]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
PR debug/58150
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 default:
1661 break;
1662 }
1663 return op;
1664 }
1665
1666 /* Similarly for attributes. */
1667 static inline enum dwarf_attribute
1668 dwarf_AT (enum dwarf_attribute at)
1669 {
1670 switch (at)
1671 {
1672 case DW_AT_call_return_pc:
1673 if (dwarf_version < 5)
1674 return DW_AT_low_pc;
1675 break;
1676
1677 case DW_AT_call_tail_call:
1678 if (dwarf_version < 5)
1679 return DW_AT_GNU_tail_call;
1680 break;
1681
1682 case DW_AT_call_origin:
1683 if (dwarf_version < 5)
1684 return DW_AT_abstract_origin;
1685 break;
1686
1687 case DW_AT_call_target:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_call_site_target;
1690 break;
1691
1692 case DW_AT_call_target_clobbered:
1693 if (dwarf_version < 5)
1694 return DW_AT_GNU_call_site_target_clobbered;
1695 break;
1696
1697 case DW_AT_call_parameter:
1698 if (dwarf_version < 5)
1699 return DW_AT_abstract_origin;
1700 break;
1701
1702 case DW_AT_call_value:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_value;
1705 break;
1706
1707 case DW_AT_call_data_value:
1708 if (dwarf_version < 5)
1709 return DW_AT_GNU_call_site_data_value;
1710 break;
1711
1712 case DW_AT_call_all_calls:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_all_call_sites;
1715 break;
1716
1717 case DW_AT_call_all_tail_calls:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_all_tail_call_sites;
1720 break;
1721
1722 case DW_AT_dwo_name:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_dwo_name;
1725 break;
1726
1727 default:
1728 break;
1729 }
1730 return at;
1731 }
1732
1733 /* And similarly for tags. */
1734 static inline enum dwarf_tag
1735 dwarf_TAG (enum dwarf_tag tag)
1736 {
1737 switch (tag)
1738 {
1739 case DW_TAG_call_site:
1740 if (dwarf_version < 5)
1741 return DW_TAG_GNU_call_site;
1742 break;
1743
1744 case DW_TAG_call_site_parameter:
1745 if (dwarf_version < 5)
1746 return DW_TAG_GNU_call_site_parameter;
1747 break;
1748
1749 default:
1750 break;
1751 }
1752 return tag;
1753 }
1754
1755 static unsigned long int get_base_type_offset (dw_die_ref);
1756
1757 /* Return the size of a location descriptor. */
1758
1759 static unsigned long
1760 size_of_loc_descr (dw_loc_descr_ref loc)
1761 {
1762 unsigned long size = 1;
1763
1764 switch (loc->dw_loc_opc)
1765 {
1766 case DW_OP_addr:
1767 size += DWARF2_ADDR_SIZE;
1768 break;
1769 case DW_OP_GNU_addr_index:
1770 case DW_OP_GNU_const_index:
1771 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1772 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1773 break;
1774 case DW_OP_const1u:
1775 case DW_OP_const1s:
1776 size += 1;
1777 break;
1778 case DW_OP_const2u:
1779 case DW_OP_const2s:
1780 size += 2;
1781 break;
1782 case DW_OP_const4u:
1783 case DW_OP_const4s:
1784 size += 4;
1785 break;
1786 case DW_OP_const8u:
1787 case DW_OP_const8s:
1788 size += 8;
1789 break;
1790 case DW_OP_constu:
1791 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1792 break;
1793 case DW_OP_consts:
1794 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1795 break;
1796 case DW_OP_pick:
1797 size += 1;
1798 break;
1799 case DW_OP_plus_uconst:
1800 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1801 break;
1802 case DW_OP_skip:
1803 case DW_OP_bra:
1804 size += 2;
1805 break;
1806 case DW_OP_breg0:
1807 case DW_OP_breg1:
1808 case DW_OP_breg2:
1809 case DW_OP_breg3:
1810 case DW_OP_breg4:
1811 case DW_OP_breg5:
1812 case DW_OP_breg6:
1813 case DW_OP_breg7:
1814 case DW_OP_breg8:
1815 case DW_OP_breg9:
1816 case DW_OP_breg10:
1817 case DW_OP_breg11:
1818 case DW_OP_breg12:
1819 case DW_OP_breg13:
1820 case DW_OP_breg14:
1821 case DW_OP_breg15:
1822 case DW_OP_breg16:
1823 case DW_OP_breg17:
1824 case DW_OP_breg18:
1825 case DW_OP_breg19:
1826 case DW_OP_breg20:
1827 case DW_OP_breg21:
1828 case DW_OP_breg22:
1829 case DW_OP_breg23:
1830 case DW_OP_breg24:
1831 case DW_OP_breg25:
1832 case DW_OP_breg26:
1833 case DW_OP_breg27:
1834 case DW_OP_breg28:
1835 case DW_OP_breg29:
1836 case DW_OP_breg30:
1837 case DW_OP_breg31:
1838 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1839 break;
1840 case DW_OP_regx:
1841 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1842 break;
1843 case DW_OP_fbreg:
1844 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1845 break;
1846 case DW_OP_bregx:
1847 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1848 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1849 break;
1850 case DW_OP_piece:
1851 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1852 break;
1853 case DW_OP_bit_piece:
1854 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1855 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1856 break;
1857 case DW_OP_deref_size:
1858 case DW_OP_xderef_size:
1859 size += 1;
1860 break;
1861 case DW_OP_call2:
1862 size += 2;
1863 break;
1864 case DW_OP_call4:
1865 size += 4;
1866 break;
1867 case DW_OP_call_ref:
1868 case DW_OP_GNU_variable_value:
1869 size += DWARF_REF_SIZE;
1870 break;
1871 case DW_OP_implicit_value:
1872 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1873 + loc->dw_loc_oprnd1.v.val_unsigned;
1874 break;
1875 case DW_OP_implicit_pointer:
1876 case DW_OP_GNU_implicit_pointer:
1877 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1878 break;
1879 case DW_OP_entry_value:
1880 case DW_OP_GNU_entry_value:
1881 {
1882 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1883 size += size_of_uleb128 (op_size) + op_size;
1884 break;
1885 }
1886 case DW_OP_const_type:
1887 case DW_OP_GNU_const_type:
1888 {
1889 unsigned long o
1890 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1891 size += size_of_uleb128 (o) + 1;
1892 switch (loc->dw_loc_oprnd2.val_class)
1893 {
1894 case dw_val_class_vec:
1895 size += loc->dw_loc_oprnd2.v.val_vec.length
1896 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1897 break;
1898 case dw_val_class_const:
1899 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1900 break;
1901 case dw_val_class_const_double:
1902 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1903 break;
1904 case dw_val_class_wide_int:
1905 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1906 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1907 break;
1908 default:
1909 gcc_unreachable ();
1910 }
1911 break;
1912 }
1913 case DW_OP_regval_type:
1914 case DW_OP_GNU_regval_type:
1915 {
1916 unsigned long o
1917 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1918 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1919 + size_of_uleb128 (o);
1920 }
1921 break;
1922 case DW_OP_deref_type:
1923 case DW_OP_GNU_deref_type:
1924 {
1925 unsigned long o
1926 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1927 size += 1 + size_of_uleb128 (o);
1928 }
1929 break;
1930 case DW_OP_convert:
1931 case DW_OP_reinterpret:
1932 case DW_OP_GNU_convert:
1933 case DW_OP_GNU_reinterpret:
1934 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1935 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1936 else
1937 {
1938 unsigned long o
1939 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1940 size += size_of_uleb128 (o);
1941 }
1942 break;
1943 case DW_OP_GNU_parameter_ref:
1944 size += 4;
1945 break;
1946 default:
1947 break;
1948 }
1949
1950 return size;
1951 }
1952
1953 /* Return the size of a series of location descriptors. */
1954
1955 unsigned long
1956 size_of_locs (dw_loc_descr_ref loc)
1957 {
1958 dw_loc_descr_ref l;
1959 unsigned long size;
1960
1961 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1962 field, to avoid writing to a PCH file. */
1963 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1964 {
1965 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1966 break;
1967 size += size_of_loc_descr (l);
1968 }
1969 if (! l)
1970 return size;
1971
1972 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1973 {
1974 l->dw_loc_addr = size;
1975 size += size_of_loc_descr (l);
1976 }
1977
1978 return size;
1979 }
1980
1981 /* Return the size of the value in a DW_AT_discr_value attribute. */
1982
1983 static int
1984 size_of_discr_value (dw_discr_value *discr_value)
1985 {
1986 if (discr_value->pos)
1987 return size_of_uleb128 (discr_value->v.uval);
1988 else
1989 return size_of_sleb128 (discr_value->v.sval);
1990 }
1991
1992 /* Return the size of the value in a DW_AT_discr_list attribute. */
1993
1994 static int
1995 size_of_discr_list (dw_discr_list_ref discr_list)
1996 {
1997 int size = 0;
1998
1999 for (dw_discr_list_ref list = discr_list;
2000 list != NULL;
2001 list = list->dw_discr_next)
2002 {
2003 /* One byte for the discriminant value descriptor, and then one or two
2004 LEB128 numbers, depending on whether it's a single case label or a
2005 range label. */
2006 size += 1;
2007 size += size_of_discr_value (&list->dw_discr_lower_bound);
2008 if (list->dw_discr_range != 0)
2009 size += size_of_discr_value (&list->dw_discr_upper_bound);
2010 }
2011 return size;
2012 }
2013
2014 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2015 static void get_ref_die_offset_label (char *, dw_die_ref);
2016 static unsigned long int get_ref_die_offset (dw_die_ref);
2017
2018 /* Output location description stack opcode's operands (if any).
2019 The for_eh_or_skip parameter controls whether register numbers are
2020 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2021 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2022 info). This should be suppressed for the cases that have not been converted
2023 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2024
2025 static void
2026 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2027 {
2028 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2029 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2030
2031 switch (loc->dw_loc_opc)
2032 {
2033 #ifdef DWARF2_DEBUGGING_INFO
2034 case DW_OP_const2u:
2035 case DW_OP_const2s:
2036 dw2_asm_output_data (2, val1->v.val_int, NULL);
2037 break;
2038 case DW_OP_const4u:
2039 if (loc->dtprel)
2040 {
2041 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2042 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2043 val1->v.val_addr);
2044 fputc ('\n', asm_out_file);
2045 break;
2046 }
2047 /* FALLTHRU */
2048 case DW_OP_const4s:
2049 dw2_asm_output_data (4, val1->v.val_int, NULL);
2050 break;
2051 case DW_OP_const8u:
2052 if (loc->dtprel)
2053 {
2054 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2055 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2056 val1->v.val_addr);
2057 fputc ('\n', asm_out_file);
2058 break;
2059 }
2060 /* FALLTHRU */
2061 case DW_OP_const8s:
2062 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2063 dw2_asm_output_data (8, val1->v.val_int, NULL);
2064 break;
2065 case DW_OP_skip:
2066 case DW_OP_bra:
2067 {
2068 int offset;
2069
2070 gcc_assert (val1->val_class == dw_val_class_loc);
2071 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2072
2073 dw2_asm_output_data (2, offset, NULL);
2074 }
2075 break;
2076 case DW_OP_implicit_value:
2077 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2078 switch (val2->val_class)
2079 {
2080 case dw_val_class_const:
2081 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2082 break;
2083 case dw_val_class_vec:
2084 {
2085 unsigned int elt_size = val2->v.val_vec.elt_size;
2086 unsigned int len = val2->v.val_vec.length;
2087 unsigned int i;
2088 unsigned char *p;
2089
2090 if (elt_size > sizeof (HOST_WIDE_INT))
2091 {
2092 elt_size /= 2;
2093 len *= 2;
2094 }
2095 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2096 i < len;
2097 i++, p += elt_size)
2098 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2099 "fp or vector constant word %u", i);
2100 }
2101 break;
2102 case dw_val_class_const_double:
2103 {
2104 unsigned HOST_WIDE_INT first, second;
2105
2106 if (WORDS_BIG_ENDIAN)
2107 {
2108 first = val2->v.val_double.high;
2109 second = val2->v.val_double.low;
2110 }
2111 else
2112 {
2113 first = val2->v.val_double.low;
2114 second = val2->v.val_double.high;
2115 }
2116 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2117 first, NULL);
2118 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2119 second, NULL);
2120 }
2121 break;
2122 case dw_val_class_wide_int:
2123 {
2124 int i;
2125 int len = get_full_len (*val2->v.val_wide);
2126 if (WORDS_BIG_ENDIAN)
2127 for (i = len - 1; i >= 0; --i)
2128 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2129 val2->v.val_wide->elt (i), NULL);
2130 else
2131 for (i = 0; i < len; ++i)
2132 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2133 val2->v.val_wide->elt (i), NULL);
2134 }
2135 break;
2136 case dw_val_class_addr:
2137 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2138 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2139 break;
2140 default:
2141 gcc_unreachable ();
2142 }
2143 break;
2144 #else
2145 case DW_OP_const2u:
2146 case DW_OP_const2s:
2147 case DW_OP_const4u:
2148 case DW_OP_const4s:
2149 case DW_OP_const8u:
2150 case DW_OP_const8s:
2151 case DW_OP_skip:
2152 case DW_OP_bra:
2153 case DW_OP_implicit_value:
2154 /* We currently don't make any attempt to make sure these are
2155 aligned properly like we do for the main unwind info, so
2156 don't support emitting things larger than a byte if we're
2157 only doing unwinding. */
2158 gcc_unreachable ();
2159 #endif
2160 case DW_OP_const1u:
2161 case DW_OP_const1s:
2162 dw2_asm_output_data (1, val1->v.val_int, NULL);
2163 break;
2164 case DW_OP_constu:
2165 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2166 break;
2167 case DW_OP_consts:
2168 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2169 break;
2170 case DW_OP_pick:
2171 dw2_asm_output_data (1, val1->v.val_int, NULL);
2172 break;
2173 case DW_OP_plus_uconst:
2174 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2175 break;
2176 case DW_OP_breg0:
2177 case DW_OP_breg1:
2178 case DW_OP_breg2:
2179 case DW_OP_breg3:
2180 case DW_OP_breg4:
2181 case DW_OP_breg5:
2182 case DW_OP_breg6:
2183 case DW_OP_breg7:
2184 case DW_OP_breg8:
2185 case DW_OP_breg9:
2186 case DW_OP_breg10:
2187 case DW_OP_breg11:
2188 case DW_OP_breg12:
2189 case DW_OP_breg13:
2190 case DW_OP_breg14:
2191 case DW_OP_breg15:
2192 case DW_OP_breg16:
2193 case DW_OP_breg17:
2194 case DW_OP_breg18:
2195 case DW_OP_breg19:
2196 case DW_OP_breg20:
2197 case DW_OP_breg21:
2198 case DW_OP_breg22:
2199 case DW_OP_breg23:
2200 case DW_OP_breg24:
2201 case DW_OP_breg25:
2202 case DW_OP_breg26:
2203 case DW_OP_breg27:
2204 case DW_OP_breg28:
2205 case DW_OP_breg29:
2206 case DW_OP_breg30:
2207 case DW_OP_breg31:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_regx:
2211 {
2212 unsigned r = val1->v.val_unsigned;
2213 if (for_eh_or_skip >= 0)
2214 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2215 gcc_assert (size_of_uleb128 (r)
2216 == size_of_uleb128 (val1->v.val_unsigned));
2217 dw2_asm_output_data_uleb128 (r, NULL);
2218 }
2219 break;
2220 case DW_OP_fbreg:
2221 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2222 break;
2223 case DW_OP_bregx:
2224 {
2225 unsigned r = val1->v.val_unsigned;
2226 if (for_eh_or_skip >= 0)
2227 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2228 gcc_assert (size_of_uleb128 (r)
2229 == size_of_uleb128 (val1->v.val_unsigned));
2230 dw2_asm_output_data_uleb128 (r, NULL);
2231 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2232 }
2233 break;
2234 case DW_OP_piece:
2235 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2236 break;
2237 case DW_OP_bit_piece:
2238 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2239 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2240 break;
2241 case DW_OP_deref_size:
2242 case DW_OP_xderef_size:
2243 dw2_asm_output_data (1, val1->v.val_int, NULL);
2244 break;
2245
2246 case DW_OP_addr:
2247 if (loc->dtprel)
2248 {
2249 if (targetm.asm_out.output_dwarf_dtprel)
2250 {
2251 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2252 DWARF2_ADDR_SIZE,
2253 val1->v.val_addr);
2254 fputc ('\n', asm_out_file);
2255 }
2256 else
2257 gcc_unreachable ();
2258 }
2259 else
2260 {
2261 #ifdef DWARF2_DEBUGGING_INFO
2262 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2263 #else
2264 gcc_unreachable ();
2265 #endif
2266 }
2267 break;
2268
2269 case DW_OP_GNU_addr_index:
2270 case DW_OP_GNU_const_index:
2271 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2272 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2273 "(index into .debug_addr)");
2274 break;
2275
2276 case DW_OP_call2:
2277 case DW_OP_call4:
2278 {
2279 unsigned long die_offset
2280 = get_ref_die_offset (val1->v.val_die_ref.die);
2281 /* Make sure the offset has been computed and that we can encode it as
2282 an operand. */
2283 gcc_assert (die_offset > 0
2284 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2285 ? 0xffff
2286 : 0xffffffff));
2287 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2288 die_offset, NULL);
2289 }
2290 break;
2291
2292 case DW_OP_call_ref:
2293 case DW_OP_GNU_variable_value:
2294 {
2295 char label[MAX_ARTIFICIAL_LABEL_BYTES
2296 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2297 gcc_assert (val1->val_class == dw_val_class_die_ref);
2298 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2299 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2300 }
2301 break;
2302
2303 case DW_OP_implicit_pointer:
2304 case DW_OP_GNU_implicit_pointer:
2305 {
2306 char label[MAX_ARTIFICIAL_LABEL_BYTES
2307 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2308 gcc_assert (val1->val_class == dw_val_class_die_ref);
2309 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2310 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2311 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2312 }
2313 break;
2314
2315 case DW_OP_entry_value:
2316 case DW_OP_GNU_entry_value:
2317 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2318 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2319 break;
2320
2321 case DW_OP_const_type:
2322 case DW_OP_GNU_const_type:
2323 {
2324 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2325 gcc_assert (o);
2326 dw2_asm_output_data_uleb128 (o, NULL);
2327 switch (val2->val_class)
2328 {
2329 case dw_val_class_const:
2330 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2331 dw2_asm_output_data (1, l, NULL);
2332 dw2_asm_output_data (l, val2->v.val_int, NULL);
2333 break;
2334 case dw_val_class_vec:
2335 {
2336 unsigned int elt_size = val2->v.val_vec.elt_size;
2337 unsigned int len = val2->v.val_vec.length;
2338 unsigned int i;
2339 unsigned char *p;
2340
2341 l = len * elt_size;
2342 dw2_asm_output_data (1, l, NULL);
2343 if (elt_size > sizeof (HOST_WIDE_INT))
2344 {
2345 elt_size /= 2;
2346 len *= 2;
2347 }
2348 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2349 i < len;
2350 i++, p += elt_size)
2351 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2352 "fp or vector constant word %u", i);
2353 }
2354 break;
2355 case dw_val_class_const_double:
2356 {
2357 unsigned HOST_WIDE_INT first, second;
2358 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2359
2360 dw2_asm_output_data (1, 2 * l, NULL);
2361 if (WORDS_BIG_ENDIAN)
2362 {
2363 first = val2->v.val_double.high;
2364 second = val2->v.val_double.low;
2365 }
2366 else
2367 {
2368 first = val2->v.val_double.low;
2369 second = val2->v.val_double.high;
2370 }
2371 dw2_asm_output_data (l, first, NULL);
2372 dw2_asm_output_data (l, second, NULL);
2373 }
2374 break;
2375 case dw_val_class_wide_int:
2376 {
2377 int i;
2378 int len = get_full_len (*val2->v.val_wide);
2379 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2380
2381 dw2_asm_output_data (1, len * l, NULL);
2382 if (WORDS_BIG_ENDIAN)
2383 for (i = len - 1; i >= 0; --i)
2384 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2385 else
2386 for (i = 0; i < len; ++i)
2387 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2388 }
2389 break;
2390 default:
2391 gcc_unreachable ();
2392 }
2393 }
2394 break;
2395 case DW_OP_regval_type:
2396 case DW_OP_GNU_regval_type:
2397 {
2398 unsigned r = val1->v.val_unsigned;
2399 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2400 gcc_assert (o);
2401 if (for_eh_or_skip >= 0)
2402 {
2403 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2404 gcc_assert (size_of_uleb128 (r)
2405 == size_of_uleb128 (val1->v.val_unsigned));
2406 }
2407 dw2_asm_output_data_uleb128 (r, NULL);
2408 dw2_asm_output_data_uleb128 (o, NULL);
2409 }
2410 break;
2411 case DW_OP_deref_type:
2412 case DW_OP_GNU_deref_type:
2413 {
2414 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2415 gcc_assert (o);
2416 dw2_asm_output_data (1, val1->v.val_int, NULL);
2417 dw2_asm_output_data_uleb128 (o, NULL);
2418 }
2419 break;
2420 case DW_OP_convert:
2421 case DW_OP_reinterpret:
2422 case DW_OP_GNU_convert:
2423 case DW_OP_GNU_reinterpret:
2424 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2425 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2426 else
2427 {
2428 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2429 gcc_assert (o);
2430 dw2_asm_output_data_uleb128 (o, NULL);
2431 }
2432 break;
2433
2434 case DW_OP_GNU_parameter_ref:
2435 {
2436 unsigned long o;
2437 gcc_assert (val1->val_class == dw_val_class_die_ref);
2438 o = get_ref_die_offset (val1->v.val_die_ref.die);
2439 dw2_asm_output_data (4, o, NULL);
2440 }
2441 break;
2442
2443 default:
2444 /* Other codes have no operands. */
2445 break;
2446 }
2447 }
2448
2449 /* Output a sequence of location operations.
2450 The for_eh_or_skip parameter controls whether register numbers are
2451 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2452 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2453 info). This should be suppressed for the cases that have not been converted
2454 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2455
2456 void
2457 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2458 {
2459 for (; loc != NULL; loc = loc->dw_loc_next)
2460 {
2461 enum dwarf_location_atom opc = loc->dw_loc_opc;
2462 /* Output the opcode. */
2463 if (for_eh_or_skip >= 0
2464 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2465 {
2466 unsigned r = (opc - DW_OP_breg0);
2467 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2468 gcc_assert (r <= 31);
2469 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2470 }
2471 else if (for_eh_or_skip >= 0
2472 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2473 {
2474 unsigned r = (opc - DW_OP_reg0);
2475 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2476 gcc_assert (r <= 31);
2477 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2478 }
2479
2480 dw2_asm_output_data (1, opc,
2481 "%s", dwarf_stack_op_name (opc));
2482
2483 /* Output the operand(s) (if any). */
2484 output_loc_operands (loc, for_eh_or_skip);
2485 }
2486 }
2487
2488 /* Output location description stack opcode's operands (if any).
2489 The output is single bytes on a line, suitable for .cfi_escape. */
2490
2491 static void
2492 output_loc_operands_raw (dw_loc_descr_ref loc)
2493 {
2494 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2495 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2496
2497 switch (loc->dw_loc_opc)
2498 {
2499 case DW_OP_addr:
2500 case DW_OP_GNU_addr_index:
2501 case DW_OP_GNU_const_index:
2502 case DW_OP_implicit_value:
2503 /* We cannot output addresses in .cfi_escape, only bytes. */
2504 gcc_unreachable ();
2505
2506 case DW_OP_const1u:
2507 case DW_OP_const1s:
2508 case DW_OP_pick:
2509 case DW_OP_deref_size:
2510 case DW_OP_xderef_size:
2511 fputc (',', asm_out_file);
2512 dw2_asm_output_data_raw (1, val1->v.val_int);
2513 break;
2514
2515 case DW_OP_const2u:
2516 case DW_OP_const2s:
2517 fputc (',', asm_out_file);
2518 dw2_asm_output_data_raw (2, val1->v.val_int);
2519 break;
2520
2521 case DW_OP_const4u:
2522 case DW_OP_const4s:
2523 fputc (',', asm_out_file);
2524 dw2_asm_output_data_raw (4, val1->v.val_int);
2525 break;
2526
2527 case DW_OP_const8u:
2528 case DW_OP_const8s:
2529 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2530 fputc (',', asm_out_file);
2531 dw2_asm_output_data_raw (8, val1->v.val_int);
2532 break;
2533
2534 case DW_OP_skip:
2535 case DW_OP_bra:
2536 {
2537 int offset;
2538
2539 gcc_assert (val1->val_class == dw_val_class_loc);
2540 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2541
2542 fputc (',', asm_out_file);
2543 dw2_asm_output_data_raw (2, offset);
2544 }
2545 break;
2546
2547 case DW_OP_regx:
2548 {
2549 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2550 gcc_assert (size_of_uleb128 (r)
2551 == size_of_uleb128 (val1->v.val_unsigned));
2552 fputc (',', asm_out_file);
2553 dw2_asm_output_data_uleb128_raw (r);
2554 }
2555 break;
2556
2557 case DW_OP_constu:
2558 case DW_OP_plus_uconst:
2559 case DW_OP_piece:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2562 break;
2563
2564 case DW_OP_bit_piece:
2565 fputc (',', asm_out_file);
2566 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2567 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2568 break;
2569
2570 case DW_OP_consts:
2571 case DW_OP_breg0:
2572 case DW_OP_breg1:
2573 case DW_OP_breg2:
2574 case DW_OP_breg3:
2575 case DW_OP_breg4:
2576 case DW_OP_breg5:
2577 case DW_OP_breg6:
2578 case DW_OP_breg7:
2579 case DW_OP_breg8:
2580 case DW_OP_breg9:
2581 case DW_OP_breg10:
2582 case DW_OP_breg11:
2583 case DW_OP_breg12:
2584 case DW_OP_breg13:
2585 case DW_OP_breg14:
2586 case DW_OP_breg15:
2587 case DW_OP_breg16:
2588 case DW_OP_breg17:
2589 case DW_OP_breg18:
2590 case DW_OP_breg19:
2591 case DW_OP_breg20:
2592 case DW_OP_breg21:
2593 case DW_OP_breg22:
2594 case DW_OP_breg23:
2595 case DW_OP_breg24:
2596 case DW_OP_breg25:
2597 case DW_OP_breg26:
2598 case DW_OP_breg27:
2599 case DW_OP_breg28:
2600 case DW_OP_breg29:
2601 case DW_OP_breg30:
2602 case DW_OP_breg31:
2603 case DW_OP_fbreg:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2606 break;
2607
2608 case DW_OP_bregx:
2609 {
2610 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2611 gcc_assert (size_of_uleb128 (r)
2612 == size_of_uleb128 (val1->v.val_unsigned));
2613 fputc (',', asm_out_file);
2614 dw2_asm_output_data_uleb128_raw (r);
2615 fputc (',', asm_out_file);
2616 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2617 }
2618 break;
2619
2620 case DW_OP_implicit_pointer:
2621 case DW_OP_entry_value:
2622 case DW_OP_const_type:
2623 case DW_OP_regval_type:
2624 case DW_OP_deref_type:
2625 case DW_OP_convert:
2626 case DW_OP_reinterpret:
2627 case DW_OP_GNU_implicit_pointer:
2628 case DW_OP_GNU_entry_value:
2629 case DW_OP_GNU_const_type:
2630 case DW_OP_GNU_regval_type:
2631 case DW_OP_GNU_deref_type:
2632 case DW_OP_GNU_convert:
2633 case DW_OP_GNU_reinterpret:
2634 case DW_OP_GNU_parameter_ref:
2635 gcc_unreachable ();
2636 break;
2637
2638 default:
2639 /* Other codes have no operands. */
2640 break;
2641 }
2642 }
2643
2644 void
2645 output_loc_sequence_raw (dw_loc_descr_ref loc)
2646 {
2647 while (1)
2648 {
2649 enum dwarf_location_atom opc = loc->dw_loc_opc;
2650 /* Output the opcode. */
2651 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2652 {
2653 unsigned r = (opc - DW_OP_breg0);
2654 r = DWARF2_FRAME_REG_OUT (r, 1);
2655 gcc_assert (r <= 31);
2656 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2657 }
2658 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2659 {
2660 unsigned r = (opc - DW_OP_reg0);
2661 r = DWARF2_FRAME_REG_OUT (r, 1);
2662 gcc_assert (r <= 31);
2663 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2664 }
2665 /* Output the opcode. */
2666 fprintf (asm_out_file, "%#x", opc);
2667 output_loc_operands_raw (loc);
2668
2669 if (!loc->dw_loc_next)
2670 break;
2671 loc = loc->dw_loc_next;
2672
2673 fputc (',', asm_out_file);
2674 }
2675 }
2676
2677 /* This function builds a dwarf location descriptor sequence from a
2678 dw_cfa_location, adding the given OFFSET to the result of the
2679 expression. */
2680
2681 struct dw_loc_descr_node *
2682 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2683 {
2684 struct dw_loc_descr_node *head, *tmp;
2685
2686 offset += cfa->offset;
2687
2688 if (cfa->indirect)
2689 {
2690 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2691 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2692 head->dw_loc_oprnd1.val_entry = NULL;
2693 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2694 add_loc_descr (&head, tmp);
2695 loc_descr_plus_const (&head, offset);
2696 }
2697 else
2698 head = new_reg_loc_descr (cfa->reg, offset);
2699
2700 return head;
2701 }
2702
2703 /* This function builds a dwarf location descriptor sequence for
2704 the address at OFFSET from the CFA when stack is aligned to
2705 ALIGNMENT byte. */
2706
2707 struct dw_loc_descr_node *
2708 build_cfa_aligned_loc (dw_cfa_location *cfa,
2709 poly_int64 offset, HOST_WIDE_INT alignment)
2710 {
2711 struct dw_loc_descr_node *head;
2712 unsigned int dwarf_fp
2713 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2714
2715 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2716 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2717 {
2718 head = new_reg_loc_descr (dwarf_fp, 0);
2719 add_loc_descr (&head, int_loc_descriptor (alignment));
2720 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2721 loc_descr_plus_const (&head, offset);
2722 }
2723 else
2724 head = new_reg_loc_descr (dwarf_fp, offset);
2725 return head;
2726 }
2727 \f
2728 /* And now, the support for symbolic debugging information. */
2729
2730 /* .debug_str support. */
2731
2732 static void dwarf2out_init (const char *);
2733 static void dwarf2out_finish (const char *);
2734 static void dwarf2out_early_finish (const char *);
2735 static void dwarf2out_assembly_start (void);
2736 static void dwarf2out_define (unsigned int, const char *);
2737 static void dwarf2out_undef (unsigned int, const char *);
2738 static void dwarf2out_start_source_file (unsigned, const char *);
2739 static void dwarf2out_end_source_file (unsigned);
2740 static void dwarf2out_function_decl (tree);
2741 static void dwarf2out_begin_block (unsigned, unsigned);
2742 static void dwarf2out_end_block (unsigned, unsigned);
2743 static bool dwarf2out_ignore_block (const_tree);
2744 static void dwarf2out_early_global_decl (tree);
2745 static void dwarf2out_late_global_decl (tree);
2746 static void dwarf2out_type_decl (tree, int);
2747 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2748 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2749 dw_die_ref);
2750 static void dwarf2out_abstract_function (tree);
2751 static void dwarf2out_var_location (rtx_insn *);
2752 static void dwarf2out_inline_entry (tree);
2753 static void dwarf2out_size_function (tree);
2754 static void dwarf2out_begin_function (tree);
2755 static void dwarf2out_end_function (unsigned int);
2756 static void dwarf2out_register_main_translation_unit (tree unit);
2757 static void dwarf2out_set_name (tree, tree);
2758 static void dwarf2out_register_external_die (tree decl, const char *sym,
2759 unsigned HOST_WIDE_INT off);
2760 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2761 unsigned HOST_WIDE_INT *off);
2762
2763 /* The debug hooks structure. */
2764
2765 const struct gcc_debug_hooks dwarf2_debug_hooks =
2766 {
2767 dwarf2out_init,
2768 dwarf2out_finish,
2769 dwarf2out_early_finish,
2770 dwarf2out_assembly_start,
2771 dwarf2out_define,
2772 dwarf2out_undef,
2773 dwarf2out_start_source_file,
2774 dwarf2out_end_source_file,
2775 dwarf2out_begin_block,
2776 dwarf2out_end_block,
2777 dwarf2out_ignore_block,
2778 dwarf2out_source_line,
2779 dwarf2out_begin_prologue,
2780 #if VMS_DEBUGGING_INFO
2781 dwarf2out_vms_end_prologue,
2782 dwarf2out_vms_begin_epilogue,
2783 #else
2784 debug_nothing_int_charstar,
2785 debug_nothing_int_charstar,
2786 #endif
2787 dwarf2out_end_epilogue,
2788 dwarf2out_begin_function,
2789 dwarf2out_end_function, /* end_function */
2790 dwarf2out_register_main_translation_unit,
2791 dwarf2out_function_decl, /* function_decl */
2792 dwarf2out_early_global_decl,
2793 dwarf2out_late_global_decl,
2794 dwarf2out_type_decl, /* type_decl */
2795 dwarf2out_imported_module_or_decl,
2796 dwarf2out_die_ref_for_decl,
2797 dwarf2out_register_external_die,
2798 debug_nothing_tree, /* deferred_inline_function */
2799 /* The DWARF 2 backend tries to reduce debugging bloat by not
2800 emitting the abstract description of inline functions until
2801 something tries to reference them. */
2802 dwarf2out_abstract_function, /* outlining_inline_function */
2803 debug_nothing_rtx_code_label, /* label */
2804 debug_nothing_int, /* handle_pch */
2805 dwarf2out_var_location,
2806 dwarf2out_inline_entry, /* inline_entry */
2807 dwarf2out_size_function, /* size_function */
2808 dwarf2out_switch_text_section,
2809 dwarf2out_set_name,
2810 1, /* start_end_main_source_file */
2811 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2812 };
2813
2814 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2815 {
2816 dwarf2out_init,
2817 debug_nothing_charstar,
2818 debug_nothing_charstar,
2819 dwarf2out_assembly_start,
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 debug_nothing_int_charstar,
2823 debug_nothing_int,
2824 debug_nothing_int_int, /* begin_block */
2825 debug_nothing_int_int, /* end_block */
2826 debug_true_const_tree, /* ignore_block */
2827 dwarf2out_source_line, /* source_line */
2828 debug_nothing_int_int_charstar, /* begin_prologue */
2829 debug_nothing_int_charstar, /* end_prologue */
2830 debug_nothing_int_charstar, /* begin_epilogue */
2831 debug_nothing_int_charstar, /* end_epilogue */
2832 debug_nothing_tree, /* begin_function */
2833 debug_nothing_int, /* end_function */
2834 debug_nothing_tree, /* register_main_translation_unit */
2835 debug_nothing_tree, /* function_decl */
2836 debug_nothing_tree, /* early_global_decl */
2837 debug_nothing_tree, /* late_global_decl */
2838 debug_nothing_tree_int, /* type_decl */
2839 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2840 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2841 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2842 debug_nothing_tree, /* deferred_inline_function */
2843 debug_nothing_tree, /* outlining_inline_function */
2844 debug_nothing_rtx_code_label, /* label */
2845 debug_nothing_int, /* handle_pch */
2846 debug_nothing_rtx_insn, /* var_location */
2847 debug_nothing_tree, /* inline_entry */
2848 debug_nothing_tree, /* size_function */
2849 debug_nothing_void, /* switch_text_section */
2850 debug_nothing_tree_tree, /* set_name */
2851 0, /* start_end_main_source_file */
2852 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2853 };
2854 \f
2855 /* NOTE: In the comments in this file, many references are made to
2856 "Debugging Information Entries". This term is abbreviated as `DIE'
2857 throughout the remainder of this file. */
2858
2859 /* An internal representation of the DWARF output is built, and then
2860 walked to generate the DWARF debugging info. The walk of the internal
2861 representation is done after the entire program has been compiled.
2862 The types below are used to describe the internal representation. */
2863
2864 /* Whether to put type DIEs into their own section .debug_types instead
2865 of making them part of the .debug_info section. Only supported for
2866 Dwarf V4 or higher and the user didn't disable them through
2867 -fno-debug-types-section. It is more efficient to put them in a
2868 separate comdat sections since the linker will then be able to
2869 remove duplicates. But not all tools support .debug_types sections
2870 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2871 it is DW_UT_type unit type in .debug_info section. */
2872
2873 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2874
2875 /* Various DIE's use offsets relative to the beginning of the
2876 .debug_info section to refer to each other. */
2877
2878 typedef long int dw_offset;
2879
2880 struct comdat_type_node;
2881
2882 /* The entries in the line_info table more-or-less mirror the opcodes
2883 that are used in the real dwarf line table. Arrays of these entries
2884 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2885 supported. */
2886
2887 enum dw_line_info_opcode {
2888 /* Emit DW_LNE_set_address; the operand is the label index. */
2889 LI_set_address,
2890
2891 /* Emit a row to the matrix with the given line. This may be done
2892 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2893 special opcodes. */
2894 LI_set_line,
2895
2896 /* Emit a DW_LNS_set_file. */
2897 LI_set_file,
2898
2899 /* Emit a DW_LNS_set_column. */
2900 LI_set_column,
2901
2902 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2903 LI_negate_stmt,
2904
2905 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2906 LI_set_prologue_end,
2907 LI_set_epilogue_begin,
2908
2909 /* Emit a DW_LNE_set_discriminator. */
2910 LI_set_discriminator,
2911
2912 /* Output a Fixed Advance PC; the target PC is the label index; the
2913 base PC is the previous LI_adv_address or LI_set_address entry.
2914 We only use this when emitting debug views without assembler
2915 support, at explicit user request. Ideally, we should only use
2916 it when the offset might be zero but we can't tell: it's the only
2917 way to maybe change the PC without resetting the view number. */
2918 LI_adv_address
2919 };
2920
2921 typedef struct GTY(()) dw_line_info_struct {
2922 enum dw_line_info_opcode opcode;
2923 unsigned int val;
2924 } dw_line_info_entry;
2925
2926
2927 struct GTY(()) dw_line_info_table {
2928 /* The label that marks the end of this section. */
2929 const char *end_label;
2930
2931 /* The values for the last row of the matrix, as collected in the table.
2932 These are used to minimize the changes to the next row. */
2933 unsigned int file_num;
2934 unsigned int line_num;
2935 unsigned int column_num;
2936 int discrim_num;
2937 bool is_stmt;
2938 bool in_use;
2939
2940 /* This denotes the NEXT view number.
2941
2942 If it is 0, it is known that the NEXT view will be the first view
2943 at the given PC.
2944
2945 If it is -1, we're forcing the view number to be reset, e.g. at a
2946 function entry.
2947
2948 The meaning of other nonzero values depends on whether we're
2949 computing views internally or leaving it for the assembler to do
2950 so. If we're emitting them internally, view denotes the view
2951 number since the last known advance of PC. If we're leaving it
2952 for the assembler, it denotes the LVU label number that we're
2953 going to ask the assembler to assign. */
2954 var_loc_view view;
2955
2956 /* This counts the number of symbolic views emitted in this table
2957 since the latest view reset. Its max value, over all tables,
2958 sets symview_upper_bound. */
2959 var_loc_view symviews_since_reset;
2960
2961 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2962 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2963 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2964 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2965
2966 vec<dw_line_info_entry, va_gc> *entries;
2967 };
2968
2969 /* This is an upper bound for view numbers that the assembler may
2970 assign to symbolic views output in this translation. It is used to
2971 decide how big a field to use to represent view numbers in
2972 symview-classed attributes. */
2973
2974 static var_loc_view symview_upper_bound;
2975
2976 /* If we're keep track of location views and their reset points, and
2977 INSN is a reset point (i.e., it necessarily advances the PC), mark
2978 the next view in TABLE as reset. */
2979
2980 static void
2981 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
2982 {
2983 if (!debug_internal_reset_location_views)
2984 return;
2985
2986 /* Maybe turn (part of?) this test into a default target hook. */
2987 int reset = 0;
2988
2989 if (targetm.reset_location_view)
2990 reset = targetm.reset_location_view (insn);
2991
2992 if (reset)
2993 ;
2994 else if (JUMP_TABLE_DATA_P (insn))
2995 reset = 1;
2996 else if (GET_CODE (insn) == USE
2997 || GET_CODE (insn) == CLOBBER
2998 || GET_CODE (insn) == ASM_INPUT
2999 || asm_noperands (insn) >= 0)
3000 ;
3001 else if (get_attr_min_length (insn) > 0)
3002 reset = 1;
3003
3004 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3005 RESET_NEXT_VIEW (table->view);
3006 }
3007
3008 /* Each DIE attribute has a field specifying the attribute kind,
3009 a link to the next attribute in the chain, and an attribute value.
3010 Attributes are typically linked below the DIE they modify. */
3011
3012 typedef struct GTY(()) dw_attr_struct {
3013 enum dwarf_attribute dw_attr;
3014 dw_val_node dw_attr_val;
3015 }
3016 dw_attr_node;
3017
3018
3019 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3020 The children of each node form a circular list linked by
3021 die_sib. die_child points to the node *before* the "first" child node. */
3022
3023 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3024 union die_symbol_or_type_node
3025 {
3026 const char * GTY ((tag ("0"))) die_symbol;
3027 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3028 }
3029 GTY ((desc ("%0.comdat_type_p"))) die_id;
3030 vec<dw_attr_node, va_gc> *die_attr;
3031 dw_die_ref die_parent;
3032 dw_die_ref die_child;
3033 dw_die_ref die_sib;
3034 dw_die_ref die_definition; /* ref from a specification to its definition */
3035 dw_offset die_offset;
3036 unsigned long die_abbrev;
3037 int die_mark;
3038 unsigned int decl_id;
3039 enum dwarf_tag die_tag;
3040 /* Die is used and must not be pruned as unused. */
3041 BOOL_BITFIELD die_perennial_p : 1;
3042 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3043 /* For an external ref to die_symbol if die_offset contains an extra
3044 offset to that symbol. */
3045 BOOL_BITFIELD with_offset : 1;
3046 /* Whether this DIE was removed from the DIE tree, for example via
3047 prune_unused_types. We don't consider those present from the
3048 DIE lookup routines. */
3049 BOOL_BITFIELD removed : 1;
3050 /* Lots of spare bits. */
3051 }
3052 die_node;
3053
3054 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3055 static bool early_dwarf;
3056 static bool early_dwarf_finished;
3057 struct set_early_dwarf {
3058 bool saved;
3059 set_early_dwarf () : saved(early_dwarf)
3060 {
3061 gcc_assert (! early_dwarf_finished);
3062 early_dwarf = true;
3063 }
3064 ~set_early_dwarf () { early_dwarf = saved; }
3065 };
3066
3067 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3068 #define FOR_EACH_CHILD(die, c, expr) do { \
3069 c = die->die_child; \
3070 if (c) do { \
3071 c = c->die_sib; \
3072 expr; \
3073 } while (c != die->die_child); \
3074 } while (0)
3075
3076 /* The pubname structure */
3077
3078 typedef struct GTY(()) pubname_struct {
3079 dw_die_ref die;
3080 const char *name;
3081 }
3082 pubname_entry;
3083
3084
3085 struct GTY(()) dw_ranges {
3086 const char *label;
3087 /* If this is positive, it's a block number, otherwise it's a
3088 bitwise-negated index into dw_ranges_by_label. */
3089 int num;
3090 /* Index for the range list for DW_FORM_rnglistx. */
3091 unsigned int idx : 31;
3092 /* True if this range might be possibly in a different section
3093 from previous entry. */
3094 unsigned int maybe_new_sec : 1;
3095 };
3096
3097 /* A structure to hold a macinfo entry. */
3098
3099 typedef struct GTY(()) macinfo_struct {
3100 unsigned char code;
3101 unsigned HOST_WIDE_INT lineno;
3102 const char *info;
3103 }
3104 macinfo_entry;
3105
3106
3107 struct GTY(()) dw_ranges_by_label {
3108 const char *begin;
3109 const char *end;
3110 };
3111
3112 /* The comdat type node structure. */
3113 struct GTY(()) comdat_type_node
3114 {
3115 dw_die_ref root_die;
3116 dw_die_ref type_die;
3117 dw_die_ref skeleton_die;
3118 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3119 comdat_type_node *next;
3120 };
3121
3122 /* A list of DIEs for which we can't determine ancestry (parent_die
3123 field) just yet. Later in dwarf2out_finish we will fill in the
3124 missing bits. */
3125 typedef struct GTY(()) limbo_die_struct {
3126 dw_die_ref die;
3127 /* The tree for which this DIE was created. We use this to
3128 determine ancestry later. */
3129 tree created_for;
3130 struct limbo_die_struct *next;
3131 }
3132 limbo_die_node;
3133
3134 typedef struct skeleton_chain_struct
3135 {
3136 dw_die_ref old_die;
3137 dw_die_ref new_die;
3138 struct skeleton_chain_struct *parent;
3139 }
3140 skeleton_chain_node;
3141
3142 /* Define a macro which returns nonzero for a TYPE_DECL which was
3143 implicitly generated for a type.
3144
3145 Note that, unlike the C front-end (which generates a NULL named
3146 TYPE_DECL node for each complete tagged type, each array type,
3147 and each function type node created) the C++ front-end generates
3148 a _named_ TYPE_DECL node for each tagged type node created.
3149 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3150 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3151 front-end, but for each type, tagged or not. */
3152
3153 #define TYPE_DECL_IS_STUB(decl) \
3154 (DECL_NAME (decl) == NULL_TREE \
3155 || (DECL_ARTIFICIAL (decl) \
3156 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3157 /* This is necessary for stub decls that \
3158 appear in nested inline functions. */ \
3159 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3160 && (decl_ultimate_origin (decl) \
3161 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3162
3163 /* Information concerning the compilation unit's programming
3164 language, and compiler version. */
3165
3166 /* Fixed size portion of the DWARF compilation unit header. */
3167 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3168 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3169 + (dwarf_version >= 5 ? 4 : 3))
3170
3171 /* Fixed size portion of the DWARF comdat type unit header. */
3172 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3173 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3174 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3175
3176 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3177 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3178 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3179
3180 /* Fixed size portion of public names info. */
3181 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3182
3183 /* Fixed size portion of the address range info. */
3184 #define DWARF_ARANGES_HEADER_SIZE \
3185 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3186 DWARF2_ADDR_SIZE * 2) \
3187 - DWARF_INITIAL_LENGTH_SIZE)
3188
3189 /* Size of padding portion in the address range info. It must be
3190 aligned to twice the pointer size. */
3191 #define DWARF_ARANGES_PAD_SIZE \
3192 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3193 DWARF2_ADDR_SIZE * 2) \
3194 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3195
3196 /* Use assembler line directives if available. */
3197 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3198 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3199 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3200 #else
3201 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3202 #endif
3203 #endif
3204
3205 /* Use assembler views in line directives if available. */
3206 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3207 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3208 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3209 #else
3210 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3211 #endif
3212 #endif
3213
3214 /* Return true if GCC configure detected assembler support for .loc. */
3215
3216 bool
3217 dwarf2out_default_as_loc_support (void)
3218 {
3219 return DWARF2_ASM_LINE_DEBUG_INFO;
3220 #if (GCC_VERSION >= 3000)
3221 # undef DWARF2_ASM_LINE_DEBUG_INFO
3222 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3223 #endif
3224 }
3225
3226 /* Return true if GCC configure detected assembler support for views
3227 in .loc directives. */
3228
3229 bool
3230 dwarf2out_default_as_locview_support (void)
3231 {
3232 return DWARF2_ASM_VIEW_DEBUG_INFO;
3233 #if (GCC_VERSION >= 3000)
3234 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3235 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3236 #endif
3237 }
3238
3239 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3240 view computation, and it refers to a view identifier for which we
3241 will not emit a label because it is known to map to a view number
3242 zero. We won't allocate the bitmap if we're not using assembler
3243 support for location views, but we have to make the variable
3244 visible for GGC and for code that will be optimized out for lack of
3245 support but that's still parsed and compiled. We could abstract it
3246 out with macros, but it's not worth it. */
3247 static GTY(()) bitmap zero_view_p;
3248
3249 /* Evaluate to TRUE iff N is known to identify the first location view
3250 at its PC. When not using assembler location view computation,
3251 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3252 and views label numbers recorded in it are the ones known to be
3253 zero. */
3254 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3255 || (N) == (var_loc_view)-1 \
3256 || (zero_view_p \
3257 && bitmap_bit_p (zero_view_p, (N))))
3258
3259 /* Return true iff we're to emit .loc directives for the assembler to
3260 generate line number sections.
3261
3262 When we're not emitting views, all we need from the assembler is
3263 support for .loc directives.
3264
3265 If we are emitting views, we can only use the assembler's .loc
3266 support if it also supports views.
3267
3268 When the compiler is emitting the line number programs and
3269 computing view numbers itself, it resets view numbers at known PC
3270 changes and counts from that, and then it emits view numbers as
3271 literal constants in locviewlists. There are cases in which the
3272 compiler is not sure about PC changes, e.g. when extra alignment is
3273 requested for a label. In these cases, the compiler may not reset
3274 the view counter, and the potential PC advance in the line number
3275 program will use an opcode that does not reset the view counter
3276 even if the PC actually changes, so that compiler and debug info
3277 consumer can keep view numbers in sync.
3278
3279 When the compiler defers view computation to the assembler, it
3280 emits symbolic view numbers in locviewlists, with the exception of
3281 views known to be zero (forced resets, or reset after
3282 compiler-visible PC changes): instead of emitting symbols for
3283 these, we emit literal zero and assert the assembler agrees with
3284 the compiler's assessment. We could use symbolic views everywhere,
3285 instead of special-casing zero views, but then we'd be unable to
3286 optimize out locviewlists that contain only zeros. */
3287
3288 static bool
3289 output_asm_line_debug_info (void)
3290 {
3291 return (dwarf2out_as_loc_support
3292 && (dwarf2out_as_locview_support
3293 || !debug_variable_location_views));
3294 }
3295
3296 /* Minimum line offset in a special line info. opcode.
3297 This value was chosen to give a reasonable range of values. */
3298 #define DWARF_LINE_BASE -10
3299
3300 /* First special line opcode - leave room for the standard opcodes. */
3301 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3302
3303 /* Range of line offsets in a special line info. opcode. */
3304 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3305
3306 /* Flag that indicates the initial value of the is_stmt_start flag.
3307 In the present implementation, we do not mark any lines as
3308 the beginning of a source statement, because that information
3309 is not made available by the GCC front-end. */
3310 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3311
3312 /* Maximum number of operations per instruction bundle. */
3313 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3314 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3315 #endif
3316
3317 /* This location is used by calc_die_sizes() to keep track
3318 the offset of each DIE within the .debug_info section. */
3319 static unsigned long next_die_offset;
3320
3321 /* Record the root of the DIE's built for the current compilation unit. */
3322 static GTY(()) dw_die_ref single_comp_unit_die;
3323
3324 /* A list of type DIEs that have been separated into comdat sections. */
3325 static GTY(()) comdat_type_node *comdat_type_list;
3326
3327 /* A list of CU DIEs that have been separated. */
3328 static GTY(()) limbo_die_node *cu_die_list;
3329
3330 /* A list of DIEs with a NULL parent waiting to be relocated. */
3331 static GTY(()) limbo_die_node *limbo_die_list;
3332
3333 /* A list of DIEs for which we may have to generate
3334 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3335 static GTY(()) limbo_die_node *deferred_asm_name;
3336
3337 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3338 {
3339 typedef const char *compare_type;
3340
3341 static hashval_t hash (dwarf_file_data *);
3342 static bool equal (dwarf_file_data *, const char *);
3343 };
3344
3345 /* Filenames referenced by this compilation unit. */
3346 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3347
3348 struct decl_die_hasher : ggc_ptr_hash<die_node>
3349 {
3350 typedef tree compare_type;
3351
3352 static hashval_t hash (die_node *);
3353 static bool equal (die_node *, tree);
3354 };
3355 /* A hash table of references to DIE's that describe declarations.
3356 The key is a DECL_UID() which is a unique number identifying each decl. */
3357 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3358
3359 struct GTY ((for_user)) variable_value_struct {
3360 unsigned int decl_id;
3361 vec<dw_die_ref, va_gc> *dies;
3362 };
3363
3364 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3365 {
3366 typedef tree compare_type;
3367
3368 static hashval_t hash (variable_value_struct *);
3369 static bool equal (variable_value_struct *, tree);
3370 };
3371 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3372 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3373 DECL_CONTEXT of the referenced VAR_DECLs. */
3374 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3375
3376 struct block_die_hasher : ggc_ptr_hash<die_struct>
3377 {
3378 static hashval_t hash (die_struct *);
3379 static bool equal (die_struct *, die_struct *);
3380 };
3381
3382 /* A hash table of references to DIE's that describe COMMON blocks.
3383 The key is DECL_UID() ^ die_parent. */
3384 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3385
3386 typedef struct GTY(()) die_arg_entry_struct {
3387 dw_die_ref die;
3388 tree arg;
3389 } die_arg_entry;
3390
3391
3392 /* Node of the variable location list. */
3393 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3394 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3395 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3396 in mode of the EXPR_LIST node and first EXPR_LIST operand
3397 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3398 location or NULL for padding. For larger bitsizes,
3399 mode is 0 and first operand is a CONCAT with bitsize
3400 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3401 NULL as second operand. */
3402 rtx GTY (()) loc;
3403 const char * GTY (()) label;
3404 struct var_loc_node * GTY (()) next;
3405 var_loc_view view;
3406 };
3407
3408 /* Variable location list. */
3409 struct GTY ((for_user)) var_loc_list_def {
3410 struct var_loc_node * GTY (()) first;
3411
3412 /* Pointer to the last but one or last element of the
3413 chained list. If the list is empty, both first and
3414 last are NULL, if the list contains just one node
3415 or the last node certainly is not redundant, it points
3416 to the last node, otherwise points to the last but one.
3417 Do not mark it for GC because it is marked through the chain. */
3418 struct var_loc_node * GTY ((skip ("%h"))) last;
3419
3420 /* Pointer to the last element before section switch,
3421 if NULL, either sections weren't switched or first
3422 is after section switch. */
3423 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3424
3425 /* DECL_UID of the variable decl. */
3426 unsigned int decl_id;
3427 };
3428 typedef struct var_loc_list_def var_loc_list;
3429
3430 /* Call argument location list. */
3431 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3432 rtx GTY (()) call_arg_loc_note;
3433 const char * GTY (()) label;
3434 tree GTY (()) block;
3435 bool tail_call_p;
3436 rtx GTY (()) symbol_ref;
3437 struct call_arg_loc_node * GTY (()) next;
3438 };
3439
3440
3441 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3442 {
3443 typedef const_tree compare_type;
3444
3445 static hashval_t hash (var_loc_list *);
3446 static bool equal (var_loc_list *, const_tree);
3447 };
3448
3449 /* Table of decl location linked lists. */
3450 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3451
3452 /* Head and tail of call_arg_loc chain. */
3453 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3454 static struct call_arg_loc_node *call_arg_loc_last;
3455
3456 /* Number of call sites in the current function. */
3457 static int call_site_count = -1;
3458 /* Number of tail call sites in the current function. */
3459 static int tail_call_site_count = -1;
3460
3461 /* A cached location list. */
3462 struct GTY ((for_user)) cached_dw_loc_list_def {
3463 /* The DECL_UID of the decl that this entry describes. */
3464 unsigned int decl_id;
3465
3466 /* The cached location list. */
3467 dw_loc_list_ref loc_list;
3468 };
3469 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3470
3471 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3472 {
3473
3474 typedef const_tree compare_type;
3475
3476 static hashval_t hash (cached_dw_loc_list *);
3477 static bool equal (cached_dw_loc_list *, const_tree);
3478 };
3479
3480 /* Table of cached location lists. */
3481 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3482
3483 /* A vector of references to DIE's that are uniquely identified by their tag,
3484 presence/absence of children DIE's, and list of attribute/value pairs. */
3485 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3486
3487 /* A hash map to remember the stack usage for DWARF procedures. The value
3488 stored is the stack size difference between before the DWARF procedure
3489 invokation and after it returned. In other words, for a DWARF procedure
3490 that consumes N stack slots and that pushes M ones, this stores M - N. */
3491 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3492
3493 /* A global counter for generating labels for line number data. */
3494 static unsigned int line_info_label_num;
3495
3496 /* The current table to which we should emit line number information
3497 for the current function. This will be set up at the beginning of
3498 assembly for the function. */
3499 static GTY(()) dw_line_info_table *cur_line_info_table;
3500
3501 /* The two default tables of line number info. */
3502 static GTY(()) dw_line_info_table *text_section_line_info;
3503 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3504
3505 /* The set of all non-default tables of line number info. */
3506 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3507
3508 /* A flag to tell pubnames/types export if there is an info section to
3509 refer to. */
3510 static bool info_section_emitted;
3511
3512 /* A pointer to the base of a table that contains a list of publicly
3513 accessible names. */
3514 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3515
3516 /* A pointer to the base of a table that contains a list of publicly
3517 accessible types. */
3518 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3519
3520 /* A pointer to the base of a table that contains a list of macro
3521 defines/undefines (and file start/end markers). */
3522 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3523
3524 /* True if .debug_macinfo or .debug_macros section is going to be
3525 emitted. */
3526 #define have_macinfo \
3527 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3528 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3529 && !macinfo_table->is_empty ())
3530
3531 /* Vector of dies for which we should generate .debug_ranges info. */
3532 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3533
3534 /* Vector of pairs of labels referenced in ranges_table. */
3535 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3536
3537 /* Whether we have location lists that need outputting */
3538 static GTY(()) bool have_location_lists;
3539
3540 /* Unique label counter. */
3541 static GTY(()) unsigned int loclabel_num;
3542
3543 /* Unique label counter for point-of-call tables. */
3544 static GTY(()) unsigned int poc_label_num;
3545
3546 /* The last file entry emitted by maybe_emit_file(). */
3547 static GTY(()) struct dwarf_file_data * last_emitted_file;
3548
3549 /* Number of internal labels generated by gen_internal_sym(). */
3550 static GTY(()) int label_num;
3551
3552 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3553
3554 /* Instances of generic types for which we need to generate debug
3555 info that describe their generic parameters and arguments. That
3556 generation needs to happen once all types are properly laid out so
3557 we do it at the end of compilation. */
3558 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3559
3560 /* Offset from the "steady-state frame pointer" to the frame base,
3561 within the current function. */
3562 static poly_int64 frame_pointer_fb_offset;
3563 static bool frame_pointer_fb_offset_valid;
3564
3565 static vec<dw_die_ref> base_types;
3566
3567 /* Flags to represent a set of attribute classes for attributes that represent
3568 a scalar value (bounds, pointers, ...). */
3569 enum dw_scalar_form
3570 {
3571 dw_scalar_form_constant = 0x01,
3572 dw_scalar_form_exprloc = 0x02,
3573 dw_scalar_form_reference = 0x04
3574 };
3575
3576 /* Forward declarations for functions defined in this file. */
3577
3578 static int is_pseudo_reg (const_rtx);
3579 static tree type_main_variant (tree);
3580 static int is_tagged_type (const_tree);
3581 static const char *dwarf_tag_name (unsigned);
3582 static const char *dwarf_attr_name (unsigned);
3583 static const char *dwarf_form_name (unsigned);
3584 static tree decl_ultimate_origin (const_tree);
3585 static tree decl_class_context (tree);
3586 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3587 static inline enum dw_val_class AT_class (dw_attr_node *);
3588 static inline unsigned int AT_index (dw_attr_node *);
3589 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3590 static inline unsigned AT_flag (dw_attr_node *);
3591 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3592 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3593 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3594 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3595 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3596 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3597 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3598 unsigned int, unsigned char *);
3599 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3600 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3601 static inline const char *AT_string (dw_attr_node *);
3602 static enum dwarf_form AT_string_form (dw_attr_node *);
3603 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3604 static void add_AT_specification (dw_die_ref, dw_die_ref);
3605 static inline dw_die_ref AT_ref (dw_attr_node *);
3606 static inline int AT_ref_external (dw_attr_node *);
3607 static inline void set_AT_ref_external (dw_attr_node *, int);
3608 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3609 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3610 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3611 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3612 dw_loc_list_ref);
3613 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3614 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3615 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3616 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3617 static void remove_addr_table_entry (addr_table_entry *);
3618 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3619 static inline rtx AT_addr (dw_attr_node *);
3620 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3621 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3622 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3623 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3624 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3625 const char *);
3626 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3627 unsigned HOST_WIDE_INT);
3628 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3629 unsigned long, bool);
3630 static inline const char *AT_lbl (dw_attr_node *);
3631 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3632 static const char *get_AT_low_pc (dw_die_ref);
3633 static const char *get_AT_hi_pc (dw_die_ref);
3634 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3635 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3636 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3637 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3638 static bool is_cxx (void);
3639 static bool is_cxx (const_tree);
3640 static bool is_fortran (void);
3641 static bool is_ada (void);
3642 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3643 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3644 static void add_child_die (dw_die_ref, dw_die_ref);
3645 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3646 static dw_die_ref lookup_type_die (tree);
3647 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3648 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3649 static void equate_type_number_to_die (tree, dw_die_ref);
3650 static dw_die_ref lookup_decl_die (tree);
3651 static var_loc_list *lookup_decl_loc (const_tree);
3652 static void equate_decl_number_to_die (tree, dw_die_ref);
3653 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3654 static void print_spaces (FILE *);
3655 static void print_die (dw_die_ref, FILE *);
3656 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3657 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3658 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3659 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3660 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3661 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3662 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3663 struct md5_ctx *, int *);
3664 struct checksum_attributes;
3665 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3666 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3667 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3668 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3669 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3670 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3671 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3672 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3673 static int is_type_die (dw_die_ref);
3674 static int is_comdat_die (dw_die_ref);
3675 static inline bool is_template_instantiation (dw_die_ref);
3676 static int is_declaration_die (dw_die_ref);
3677 static int should_move_die_to_comdat (dw_die_ref);
3678 static dw_die_ref clone_as_declaration (dw_die_ref);
3679 static dw_die_ref clone_die (dw_die_ref);
3680 static dw_die_ref clone_tree (dw_die_ref);
3681 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3682 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3683 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3684 static dw_die_ref generate_skeleton (dw_die_ref);
3685 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3686 dw_die_ref,
3687 dw_die_ref);
3688 static void break_out_comdat_types (dw_die_ref);
3689 static void copy_decls_for_unworthy_types (dw_die_ref);
3690
3691 static void add_sibling_attributes (dw_die_ref);
3692 static void output_location_lists (dw_die_ref);
3693 static int constant_size (unsigned HOST_WIDE_INT);
3694 static unsigned long size_of_die (dw_die_ref);
3695 static void calc_die_sizes (dw_die_ref);
3696 static void calc_base_type_die_sizes (void);
3697 static void mark_dies (dw_die_ref);
3698 static void unmark_dies (dw_die_ref);
3699 static void unmark_all_dies (dw_die_ref);
3700 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3701 static unsigned long size_of_aranges (void);
3702 static enum dwarf_form value_format (dw_attr_node *);
3703 static void output_value_format (dw_attr_node *);
3704 static void output_abbrev_section (void);
3705 static void output_die_abbrevs (unsigned long, dw_die_ref);
3706 static void output_die (dw_die_ref);
3707 static void output_compilation_unit_header (enum dwarf_unit_type);
3708 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3709 static void output_comdat_type_unit (comdat_type_node *);
3710 static const char *dwarf2_name (tree, int);
3711 static void add_pubname (tree, dw_die_ref);
3712 static void add_enumerator_pubname (const char *, dw_die_ref);
3713 static void add_pubname_string (const char *, dw_die_ref);
3714 static void add_pubtype (tree, dw_die_ref);
3715 static void output_pubnames (vec<pubname_entry, va_gc> *);
3716 static void output_aranges (void);
3717 static unsigned int add_ranges (const_tree, bool = false);
3718 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3719 bool *, bool);
3720 static void output_ranges (void);
3721 static dw_line_info_table *new_line_info_table (void);
3722 static void output_line_info (bool);
3723 static void output_file_names (void);
3724 static dw_die_ref base_type_die (tree, bool);
3725 static int is_base_type (tree);
3726 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3727 static int decl_quals (const_tree);
3728 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3729 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3730 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3731 static int type_is_enum (const_tree);
3732 static unsigned int dbx_reg_number (const_rtx);
3733 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3734 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3735 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3736 enum var_init_status);
3737 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3738 enum var_init_status);
3739 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3740 enum var_init_status);
3741 static int is_based_loc (const_rtx);
3742 static bool resolve_one_addr (rtx *);
3743 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3744 enum var_init_status);
3745 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3746 enum var_init_status);
3747 struct loc_descr_context;
3748 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3749 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3750 static dw_loc_list_ref loc_list_from_tree (tree, int,
3751 struct loc_descr_context *);
3752 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3753 struct loc_descr_context *);
3754 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3755 static tree field_type (const_tree);
3756 static unsigned int simple_type_align_in_bits (const_tree);
3757 static unsigned int simple_decl_align_in_bits (const_tree);
3758 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3759 struct vlr_context;
3760 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3761 HOST_WIDE_INT *);
3762 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3763 dw_loc_list_ref);
3764 static void add_data_member_location_attribute (dw_die_ref, tree,
3765 struct vlr_context *);
3766 static bool add_const_value_attribute (dw_die_ref, rtx);
3767 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3768 static void insert_wide_int (const wide_int &, unsigned char *, int);
3769 static void insert_float (const_rtx, unsigned char *);
3770 static rtx rtl_for_decl_location (tree);
3771 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3772 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3773 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3774 static void add_name_attribute (dw_die_ref, const char *);
3775 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3776 static void add_comp_dir_attribute (dw_die_ref);
3777 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3778 struct loc_descr_context *);
3779 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3780 struct loc_descr_context *);
3781 static void add_subscript_info (dw_die_ref, tree, bool);
3782 static void add_byte_size_attribute (dw_die_ref, tree);
3783 static void add_alignment_attribute (dw_die_ref, tree);
3784 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3785 struct vlr_context *);
3786 static void add_bit_size_attribute (dw_die_ref, tree);
3787 static void add_prototyped_attribute (dw_die_ref, tree);
3788 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3789 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3790 static void add_src_coords_attributes (dw_die_ref, tree);
3791 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3792 static void add_discr_value (dw_die_ref, dw_discr_value *);
3793 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3794 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3795 static void push_decl_scope (tree);
3796 static void pop_decl_scope (void);
3797 static dw_die_ref scope_die_for (tree, dw_die_ref);
3798 static inline int local_scope_p (dw_die_ref);
3799 static inline int class_scope_p (dw_die_ref);
3800 static inline int class_or_namespace_scope_p (dw_die_ref);
3801 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3802 static void add_calling_convention_attribute (dw_die_ref, tree);
3803 static const char *type_tag (const_tree);
3804 static tree member_declared_type (const_tree);
3805 #if 0
3806 static const char *decl_start_label (tree);
3807 #endif
3808 static void gen_array_type_die (tree, dw_die_ref);
3809 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3810 #if 0
3811 static void gen_entry_point_die (tree, dw_die_ref);
3812 #endif
3813 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3814 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3815 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3816 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3817 static void gen_formal_types_die (tree, dw_die_ref);
3818 static void gen_subprogram_die (tree, dw_die_ref);
3819 static void gen_variable_die (tree, tree, dw_die_ref);
3820 static void gen_const_die (tree, dw_die_ref);
3821 static void gen_label_die (tree, dw_die_ref);
3822 static void gen_lexical_block_die (tree, dw_die_ref);
3823 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3824 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3825 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3826 static dw_die_ref gen_compile_unit_die (const char *);
3827 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3828 static void gen_member_die (tree, dw_die_ref);
3829 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3830 enum debug_info_usage);
3831 static void gen_subroutine_type_die (tree, dw_die_ref);
3832 static void gen_typedef_die (tree, dw_die_ref);
3833 static void gen_type_die (tree, dw_die_ref);
3834 static void gen_block_die (tree, dw_die_ref);
3835 static void decls_for_scope (tree, dw_die_ref);
3836 static bool is_naming_typedef_decl (const_tree);
3837 static inline dw_die_ref get_context_die (tree);
3838 static void gen_namespace_die (tree, dw_die_ref);
3839 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3840 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3841 static dw_die_ref force_decl_die (tree);
3842 static dw_die_ref force_type_die (tree);
3843 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3844 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3845 static struct dwarf_file_data * lookup_filename (const char *);
3846 static void retry_incomplete_types (void);
3847 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3848 static void gen_generic_params_dies (tree);
3849 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3850 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3851 static void splice_child_die (dw_die_ref, dw_die_ref);
3852 static int file_info_cmp (const void *, const void *);
3853 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3854 const char *, var_loc_view, const char *);
3855 static void output_loc_list (dw_loc_list_ref);
3856 static char *gen_internal_sym (const char *);
3857 static bool want_pubnames (void);
3858
3859 static void prune_unmark_dies (dw_die_ref);
3860 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3861 static void prune_unused_types_mark (dw_die_ref, int);
3862 static void prune_unused_types_walk (dw_die_ref);
3863 static void prune_unused_types_walk_attribs (dw_die_ref);
3864 static void prune_unused_types_prune (dw_die_ref);
3865 static void prune_unused_types (void);
3866 static int maybe_emit_file (struct dwarf_file_data *fd);
3867 static inline const char *AT_vms_delta1 (dw_attr_node *);
3868 static inline const char *AT_vms_delta2 (dw_attr_node *);
3869 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3870 const char *, const char *);
3871 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3872 static void gen_remaining_tmpl_value_param_die_attribute (void);
3873 static bool generic_type_p (tree);
3874 static void schedule_generic_params_dies_gen (tree t);
3875 static void gen_scheduled_generic_parms_dies (void);
3876 static void resolve_variable_values (void);
3877
3878 static const char *comp_dir_string (void);
3879
3880 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3881
3882 /* enum for tracking thread-local variables whose address is really an offset
3883 relative to the TLS pointer, which will need link-time relocation, but will
3884 not need relocation by the DWARF consumer. */
3885
3886 enum dtprel_bool
3887 {
3888 dtprel_false = 0,
3889 dtprel_true = 1
3890 };
3891
3892 /* Return the operator to use for an address of a variable. For dtprel_true, we
3893 use DW_OP_const*. For regular variables, which need both link-time
3894 relocation and consumer-level relocation (e.g., to account for shared objects
3895 loaded at a random address), we use DW_OP_addr*. */
3896
3897 static inline enum dwarf_location_atom
3898 dw_addr_op (enum dtprel_bool dtprel)
3899 {
3900 if (dtprel == dtprel_true)
3901 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3902 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3903 else
3904 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3905 }
3906
3907 /* Return a pointer to a newly allocated address location description. If
3908 dwarf_split_debug_info is true, then record the address with the appropriate
3909 relocation. */
3910 static inline dw_loc_descr_ref
3911 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3912 {
3913 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3914
3915 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3916 ref->dw_loc_oprnd1.v.val_addr = addr;
3917 ref->dtprel = dtprel;
3918 if (dwarf_split_debug_info)
3919 ref->dw_loc_oprnd1.val_entry
3920 = add_addr_table_entry (addr,
3921 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3922 else
3923 ref->dw_loc_oprnd1.val_entry = NULL;
3924
3925 return ref;
3926 }
3927
3928 /* Section names used to hold DWARF debugging information. */
3929
3930 #ifndef DEBUG_INFO_SECTION
3931 #define DEBUG_INFO_SECTION ".debug_info"
3932 #endif
3933 #ifndef DEBUG_DWO_INFO_SECTION
3934 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3935 #endif
3936 #ifndef DEBUG_LTO_INFO_SECTION
3937 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3938 #endif
3939 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3940 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3941 #endif
3942 #ifndef DEBUG_ABBREV_SECTION
3943 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3944 #endif
3945 #ifndef DEBUG_LTO_ABBREV_SECTION
3946 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3947 #endif
3948 #ifndef DEBUG_DWO_ABBREV_SECTION
3949 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3950 #endif
3951 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3952 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3953 #endif
3954 #ifndef DEBUG_ARANGES_SECTION
3955 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3956 #endif
3957 #ifndef DEBUG_ADDR_SECTION
3958 #define DEBUG_ADDR_SECTION ".debug_addr"
3959 #endif
3960 #ifndef DEBUG_MACINFO_SECTION
3961 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3962 #endif
3963 #ifndef DEBUG_LTO_MACINFO_SECTION
3964 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3965 #endif
3966 #ifndef DEBUG_DWO_MACINFO_SECTION
3967 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3970 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3971 #endif
3972 #ifndef DEBUG_MACRO_SECTION
3973 #define DEBUG_MACRO_SECTION ".debug_macro"
3974 #endif
3975 #ifndef DEBUG_LTO_MACRO_SECTION
3976 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3977 #endif
3978 #ifndef DEBUG_DWO_MACRO_SECTION
3979 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3982 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3983 #endif
3984 #ifndef DEBUG_LINE_SECTION
3985 #define DEBUG_LINE_SECTION ".debug_line"
3986 #endif
3987 #ifndef DEBUG_LTO_LINE_SECTION
3988 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3989 #endif
3990 #ifndef DEBUG_DWO_LINE_SECTION
3991 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3994 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3995 #endif
3996 #ifndef DEBUG_LOC_SECTION
3997 #define DEBUG_LOC_SECTION ".debug_loc"
3998 #endif
3999 #ifndef DEBUG_DWO_LOC_SECTION
4000 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4001 #endif
4002 #ifndef DEBUG_LOCLISTS_SECTION
4003 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4004 #endif
4005 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4006 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4007 #endif
4008 #ifndef DEBUG_PUBNAMES_SECTION
4009 #define DEBUG_PUBNAMES_SECTION \
4010 ((debug_generate_pub_sections == 2) \
4011 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4012 #endif
4013 #ifndef DEBUG_PUBTYPES_SECTION
4014 #define DEBUG_PUBTYPES_SECTION \
4015 ((debug_generate_pub_sections == 2) \
4016 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4017 #endif
4018 #ifndef DEBUG_STR_OFFSETS_SECTION
4019 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4020 #endif
4021 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4022 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4025 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4026 #endif
4027 #ifndef DEBUG_STR_SECTION
4028 #define DEBUG_STR_SECTION ".debug_str"
4029 #endif
4030 #ifndef DEBUG_LTO_STR_SECTION
4031 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4032 #endif
4033 #ifndef DEBUG_STR_DWO_SECTION
4034 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_STR_DWO_SECTION
4037 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4038 #endif
4039 #ifndef DEBUG_RANGES_SECTION
4040 #define DEBUG_RANGES_SECTION ".debug_ranges"
4041 #endif
4042 #ifndef DEBUG_RNGLISTS_SECTION
4043 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4044 #endif
4045 #ifndef DEBUG_LINE_STR_SECTION
4046 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4047 #endif
4048 #ifndef DEBUG_LTO_LINE_STR_SECTION
4049 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4050 #endif
4051
4052 /* Standard ELF section names for compiled code and data. */
4053 #ifndef TEXT_SECTION_NAME
4054 #define TEXT_SECTION_NAME ".text"
4055 #endif
4056
4057 /* Section flags for .debug_str section. */
4058 #define DEBUG_STR_SECTION_FLAGS \
4059 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4060 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4061 : SECTION_DEBUG)
4062
4063 /* Section flags for .debug_str.dwo section. */
4064 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4065
4066 /* Attribute used to refer to the macro section. */
4067 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4068 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4069
4070 /* Labels we insert at beginning sections we can reference instead of
4071 the section names themselves. */
4072
4073 #ifndef TEXT_SECTION_LABEL
4074 #define TEXT_SECTION_LABEL "Ltext"
4075 #endif
4076 #ifndef COLD_TEXT_SECTION_LABEL
4077 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4078 #endif
4079 #ifndef DEBUG_LINE_SECTION_LABEL
4080 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4081 #endif
4082 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4083 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4084 #endif
4085 #ifndef DEBUG_INFO_SECTION_LABEL
4086 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4087 #endif
4088 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4089 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4090 #endif
4091 #ifndef DEBUG_ABBREV_SECTION_LABEL
4092 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4093 #endif
4094 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4095 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4096 #endif
4097 #ifndef DEBUG_ADDR_SECTION_LABEL
4098 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4099 #endif
4100 #ifndef DEBUG_LOC_SECTION_LABEL
4101 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4102 #endif
4103 #ifndef DEBUG_RANGES_SECTION_LABEL
4104 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4105 #endif
4106 #ifndef DEBUG_MACINFO_SECTION_LABEL
4107 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4108 #endif
4109 #ifndef DEBUG_MACRO_SECTION_LABEL
4110 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4111 #endif
4112 #define SKELETON_COMP_DIE_ABBREV 1
4113 #define SKELETON_TYPE_DIE_ABBREV 2
4114
4115 /* Definitions of defaults for formats and names of various special
4116 (artificial) labels which may be generated within this file (when the -g
4117 options is used and DWARF2_DEBUGGING_INFO is in effect.
4118 If necessary, these may be overridden from within the tm.h file, but
4119 typically, overriding these defaults is unnecessary. */
4120
4121 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4122 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4123 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4124 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4125 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4126 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4127 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4128 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4129 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4130 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4131 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4132 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4133 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4134 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4135 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4136
4137 #ifndef TEXT_END_LABEL
4138 #define TEXT_END_LABEL "Letext"
4139 #endif
4140 #ifndef COLD_END_LABEL
4141 #define COLD_END_LABEL "Letext_cold"
4142 #endif
4143 #ifndef BLOCK_BEGIN_LABEL
4144 #define BLOCK_BEGIN_LABEL "LBB"
4145 #endif
4146 #ifndef BLOCK_INLINE_ENTRY_LABEL
4147 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4148 #endif
4149 #ifndef BLOCK_END_LABEL
4150 #define BLOCK_END_LABEL "LBE"
4151 #endif
4152 #ifndef LINE_CODE_LABEL
4153 #define LINE_CODE_LABEL "LM"
4154 #endif
4155
4156 \f
4157 /* Return the root of the DIE's built for the current compilation unit. */
4158 static dw_die_ref
4159 comp_unit_die (void)
4160 {
4161 if (!single_comp_unit_die)
4162 single_comp_unit_die = gen_compile_unit_die (NULL);
4163 return single_comp_unit_die;
4164 }
4165
4166 /* We allow a language front-end to designate a function that is to be
4167 called to "demangle" any name before it is put into a DIE. */
4168
4169 static const char *(*demangle_name_func) (const char *);
4170
4171 void
4172 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4173 {
4174 demangle_name_func = func;
4175 }
4176
4177 /* Test if rtl node points to a pseudo register. */
4178
4179 static inline int
4180 is_pseudo_reg (const_rtx rtl)
4181 {
4182 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4183 || (GET_CODE (rtl) == SUBREG
4184 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4185 }
4186
4187 /* Return a reference to a type, with its const and volatile qualifiers
4188 removed. */
4189
4190 static inline tree
4191 type_main_variant (tree type)
4192 {
4193 type = TYPE_MAIN_VARIANT (type);
4194
4195 /* ??? There really should be only one main variant among any group of
4196 variants of a given type (and all of the MAIN_VARIANT values for all
4197 members of the group should point to that one type) but sometimes the C
4198 front-end messes this up for array types, so we work around that bug
4199 here. */
4200 if (TREE_CODE (type) == ARRAY_TYPE)
4201 while (type != TYPE_MAIN_VARIANT (type))
4202 type = TYPE_MAIN_VARIANT (type);
4203
4204 return type;
4205 }
4206
4207 /* Return nonzero if the given type node represents a tagged type. */
4208
4209 static inline int
4210 is_tagged_type (const_tree type)
4211 {
4212 enum tree_code code = TREE_CODE (type);
4213
4214 return (code == RECORD_TYPE || code == UNION_TYPE
4215 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4216 }
4217
4218 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4219
4220 static void
4221 get_ref_die_offset_label (char *label, dw_die_ref ref)
4222 {
4223 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4224 }
4225
4226 /* Return die_offset of a DIE reference to a base type. */
4227
4228 static unsigned long int
4229 get_base_type_offset (dw_die_ref ref)
4230 {
4231 if (ref->die_offset)
4232 return ref->die_offset;
4233 if (comp_unit_die ()->die_abbrev)
4234 {
4235 calc_base_type_die_sizes ();
4236 gcc_assert (ref->die_offset);
4237 }
4238 return ref->die_offset;
4239 }
4240
4241 /* Return die_offset of a DIE reference other than base type. */
4242
4243 static unsigned long int
4244 get_ref_die_offset (dw_die_ref ref)
4245 {
4246 gcc_assert (ref->die_offset);
4247 return ref->die_offset;
4248 }
4249
4250 /* Convert a DIE tag into its string name. */
4251
4252 static const char *
4253 dwarf_tag_name (unsigned int tag)
4254 {
4255 const char *name = get_DW_TAG_name (tag);
4256
4257 if (name != NULL)
4258 return name;
4259
4260 return "DW_TAG_<unknown>";
4261 }
4262
4263 /* Convert a DWARF attribute code into its string name. */
4264
4265 static const char *
4266 dwarf_attr_name (unsigned int attr)
4267 {
4268 const char *name;
4269
4270 switch (attr)
4271 {
4272 #if VMS_DEBUGGING_INFO
4273 case DW_AT_HP_prologue:
4274 return "DW_AT_HP_prologue";
4275 #else
4276 case DW_AT_MIPS_loop_unroll_factor:
4277 return "DW_AT_MIPS_loop_unroll_factor";
4278 #endif
4279
4280 #if VMS_DEBUGGING_INFO
4281 case DW_AT_HP_epilogue:
4282 return "DW_AT_HP_epilogue";
4283 #else
4284 case DW_AT_MIPS_stride:
4285 return "DW_AT_MIPS_stride";
4286 #endif
4287 }
4288
4289 name = get_DW_AT_name (attr);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_AT_<unknown>";
4295 }
4296
4297 /* Convert a DWARF value form code into its string name. */
4298
4299 static const char *
4300 dwarf_form_name (unsigned int form)
4301 {
4302 const char *name = get_DW_FORM_name (form);
4303
4304 if (name != NULL)
4305 return name;
4306
4307 return "DW_FORM_<unknown>";
4308 }
4309 \f
4310 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4311 instance of an inlined instance of a decl which is local to an inline
4312 function, so we have to trace all of the way back through the origin chain
4313 to find out what sort of node actually served as the original seed for the
4314 given block. */
4315
4316 static tree
4317 decl_ultimate_origin (const_tree decl)
4318 {
4319 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4320 return NULL_TREE;
4321
4322 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4323 we're trying to output the abstract instance of this function. */
4324 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4325 return NULL_TREE;
4326
4327 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4328 most distant ancestor, this should never happen. */
4329 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4330
4331 return DECL_ABSTRACT_ORIGIN (decl);
4332 }
4333
4334 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4335 of a virtual function may refer to a base class, so we check the 'this'
4336 parameter. */
4337
4338 static tree
4339 decl_class_context (tree decl)
4340 {
4341 tree context = NULL_TREE;
4342
4343 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4344 context = DECL_CONTEXT (decl);
4345 else
4346 context = TYPE_MAIN_VARIANT
4347 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4348
4349 if (context && !TYPE_P (context))
4350 context = NULL_TREE;
4351
4352 return context;
4353 }
4354 \f
4355 /* Add an attribute/value pair to a DIE. */
4356
4357 static inline void
4358 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4359 {
4360 /* Maybe this should be an assert? */
4361 if (die == NULL)
4362 return;
4363
4364 if (flag_checking)
4365 {
4366 /* Check we do not add duplicate attrs. Can't use get_AT here
4367 because that recurses to the specification/abstract origin DIE. */
4368 dw_attr_node *a;
4369 unsigned ix;
4370 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4371 gcc_assert (a->dw_attr != attr->dw_attr);
4372 }
4373
4374 vec_safe_reserve (die->die_attr, 1);
4375 vec_safe_push (die->die_attr, *attr);
4376 }
4377
4378 static inline enum dw_val_class
4379 AT_class (dw_attr_node *a)
4380 {
4381 return a->dw_attr_val.val_class;
4382 }
4383
4384 /* Return the index for any attribute that will be referenced with a
4385 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4386 are stored in dw_attr_val.v.val_str for reference counting
4387 pruning. */
4388
4389 static inline unsigned int
4390 AT_index (dw_attr_node *a)
4391 {
4392 if (AT_class (a) == dw_val_class_str)
4393 return a->dw_attr_val.v.val_str->index;
4394 else if (a->dw_attr_val.val_entry != NULL)
4395 return a->dw_attr_val.val_entry->index;
4396 return NOT_INDEXED;
4397 }
4398
4399 /* Add a flag value attribute to a DIE. */
4400
4401 static inline void
4402 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4403 {
4404 dw_attr_node attr;
4405
4406 attr.dw_attr = attr_kind;
4407 attr.dw_attr_val.val_class = dw_val_class_flag;
4408 attr.dw_attr_val.val_entry = NULL;
4409 attr.dw_attr_val.v.val_flag = flag;
4410 add_dwarf_attr (die, &attr);
4411 }
4412
4413 static inline unsigned
4414 AT_flag (dw_attr_node *a)
4415 {
4416 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4417 return a->dw_attr_val.v.val_flag;
4418 }
4419
4420 /* Add a signed integer attribute value to a DIE. */
4421
4422 static inline void
4423 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4424 {
4425 dw_attr_node attr;
4426
4427 attr.dw_attr = attr_kind;
4428 attr.dw_attr_val.val_class = dw_val_class_const;
4429 attr.dw_attr_val.val_entry = NULL;
4430 attr.dw_attr_val.v.val_int = int_val;
4431 add_dwarf_attr (die, &attr);
4432 }
4433
4434 static inline HOST_WIDE_INT
4435 AT_int (dw_attr_node *a)
4436 {
4437 gcc_assert (a && (AT_class (a) == dw_val_class_const
4438 || AT_class (a) == dw_val_class_const_implicit));
4439 return a->dw_attr_val.v.val_int;
4440 }
4441
4442 /* Add an unsigned integer attribute value to a DIE. */
4443
4444 static inline void
4445 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4446 unsigned HOST_WIDE_INT unsigned_val)
4447 {
4448 dw_attr_node attr;
4449
4450 attr.dw_attr = attr_kind;
4451 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4452 attr.dw_attr_val.val_entry = NULL;
4453 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4454 add_dwarf_attr (die, &attr);
4455 }
4456
4457 static inline unsigned HOST_WIDE_INT
4458 AT_unsigned (dw_attr_node *a)
4459 {
4460 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4461 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4462 return a->dw_attr_val.v.val_unsigned;
4463 }
4464
4465 /* Add an unsigned wide integer attribute value to a DIE. */
4466
4467 static inline void
4468 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4469 const wide_int& w)
4470 {
4471 dw_attr_node attr;
4472
4473 attr.dw_attr = attr_kind;
4474 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4475 attr.dw_attr_val.val_entry = NULL;
4476 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4477 *attr.dw_attr_val.v.val_wide = w;
4478 add_dwarf_attr (die, &attr);
4479 }
4480
4481 /* Add an unsigned double integer attribute value to a DIE. */
4482
4483 static inline void
4484 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4485 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4486 {
4487 dw_attr_node attr;
4488
4489 attr.dw_attr = attr_kind;
4490 attr.dw_attr_val.val_class = dw_val_class_const_double;
4491 attr.dw_attr_val.val_entry = NULL;
4492 attr.dw_attr_val.v.val_double.high = high;
4493 attr.dw_attr_val.v.val_double.low = low;
4494 add_dwarf_attr (die, &attr);
4495 }
4496
4497 /* Add a floating point attribute value to a DIE and return it. */
4498
4499 static inline void
4500 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4501 unsigned int length, unsigned int elt_size, unsigned char *array)
4502 {
4503 dw_attr_node attr;
4504
4505 attr.dw_attr = attr_kind;
4506 attr.dw_attr_val.val_class = dw_val_class_vec;
4507 attr.dw_attr_val.val_entry = NULL;
4508 attr.dw_attr_val.v.val_vec.length = length;
4509 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4510 attr.dw_attr_val.v.val_vec.array = array;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an 8-byte data attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 unsigned char data8[8])
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_data8;
4524 attr.dw_attr_val.val_entry = NULL;
4525 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4526 add_dwarf_attr (die, &attr);
4527 }
4528
4529 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4530 dwarf_split_debug_info, address attributes in dies destined for the
4531 final executable have force_direct set to avoid using indexed
4532 references. */
4533
4534 static inline void
4535 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4536 bool force_direct)
4537 {
4538 dw_attr_node attr;
4539 char * lbl_id;
4540
4541 lbl_id = xstrdup (lbl_low);
4542 attr.dw_attr = DW_AT_low_pc;
4543 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4544 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4545 if (dwarf_split_debug_info && !force_direct)
4546 attr.dw_attr_val.val_entry
4547 = add_addr_table_entry (lbl_id, ate_kind_label);
4548 else
4549 attr.dw_attr_val.val_entry = NULL;
4550 add_dwarf_attr (die, &attr);
4551
4552 attr.dw_attr = DW_AT_high_pc;
4553 if (dwarf_version < 4)
4554 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4555 else
4556 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4557 lbl_id = xstrdup (lbl_high);
4558 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4559 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4560 && dwarf_split_debug_info && !force_direct)
4561 attr.dw_attr_val.val_entry
4562 = add_addr_table_entry (lbl_id, ate_kind_label);
4563 else
4564 attr.dw_attr_val.val_entry = NULL;
4565 add_dwarf_attr (die, &attr);
4566 }
4567
4568 /* Hash and equality functions for debug_str_hash. */
4569
4570 hashval_t
4571 indirect_string_hasher::hash (indirect_string_node *x)
4572 {
4573 return htab_hash_string (x->str);
4574 }
4575
4576 bool
4577 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4578 {
4579 return strcmp (x1->str, x2) == 0;
4580 }
4581
4582 /* Add STR to the given string hash table. */
4583
4584 static struct indirect_string_node *
4585 find_AT_string_in_table (const char *str,
4586 hash_table<indirect_string_hasher> *table)
4587 {
4588 struct indirect_string_node *node;
4589
4590 indirect_string_node **slot
4591 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4592 if (*slot == NULL)
4593 {
4594 node = ggc_cleared_alloc<indirect_string_node> ();
4595 node->str = ggc_strdup (str);
4596 *slot = node;
4597 }
4598 else
4599 node = *slot;
4600
4601 node->refcount++;
4602 return node;
4603 }
4604
4605 /* Add STR to the indirect string hash table. */
4606
4607 static struct indirect_string_node *
4608 find_AT_string (const char *str)
4609 {
4610 if (! debug_str_hash)
4611 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4612
4613 return find_AT_string_in_table (str, debug_str_hash);
4614 }
4615
4616 /* Add a string attribute value to a DIE. */
4617
4618 static inline void
4619 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4620 {
4621 dw_attr_node attr;
4622 struct indirect_string_node *node;
4623
4624 node = find_AT_string (str);
4625
4626 attr.dw_attr = attr_kind;
4627 attr.dw_attr_val.val_class = dw_val_class_str;
4628 attr.dw_attr_val.val_entry = NULL;
4629 attr.dw_attr_val.v.val_str = node;
4630 add_dwarf_attr (die, &attr);
4631 }
4632
4633 static inline const char *
4634 AT_string (dw_attr_node *a)
4635 {
4636 gcc_assert (a && AT_class (a) == dw_val_class_str);
4637 return a->dw_attr_val.v.val_str->str;
4638 }
4639
4640 /* Call this function directly to bypass AT_string_form's logic to put
4641 the string inline in the die. */
4642
4643 static void
4644 set_indirect_string (struct indirect_string_node *node)
4645 {
4646 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4647 /* Already indirect is a no op. */
4648 if (node->form == DW_FORM_strp
4649 || node->form == DW_FORM_line_strp
4650 || node->form == DW_FORM_GNU_str_index)
4651 {
4652 gcc_assert (node->label);
4653 return;
4654 }
4655 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4656 ++dw2_string_counter;
4657 node->label = xstrdup (label);
4658
4659 if (!dwarf_split_debug_info)
4660 {
4661 node->form = DW_FORM_strp;
4662 node->index = NOT_INDEXED;
4663 }
4664 else
4665 {
4666 node->form = DW_FORM_GNU_str_index;
4667 node->index = NO_INDEX_ASSIGNED;
4668 }
4669 }
4670
4671 /* A helper function for dwarf2out_finish, called to reset indirect
4672 string decisions done for early LTO dwarf output before fat object
4673 dwarf output. */
4674
4675 int
4676 reset_indirect_string (indirect_string_node **h, void *)
4677 {
4678 struct indirect_string_node *node = *h;
4679 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4680 {
4681 free (node->label);
4682 node->label = NULL;
4683 node->form = (dwarf_form) 0;
4684 node->index = 0;
4685 }
4686 return 1;
4687 }
4688
4689 /* Find out whether a string should be output inline in DIE
4690 or out-of-line in .debug_str section. */
4691
4692 static enum dwarf_form
4693 find_string_form (struct indirect_string_node *node)
4694 {
4695 unsigned int len;
4696
4697 if (node->form)
4698 return node->form;
4699
4700 len = strlen (node->str) + 1;
4701
4702 /* If the string is shorter or equal to the size of the reference, it is
4703 always better to put it inline. */
4704 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4705 return node->form = DW_FORM_string;
4706
4707 /* If we cannot expect the linker to merge strings in .debug_str
4708 section, only put it into .debug_str if it is worth even in this
4709 single module. */
4710 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4711 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4712 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4713 return node->form = DW_FORM_string;
4714
4715 set_indirect_string (node);
4716
4717 return node->form;
4718 }
4719
4720 /* Find out whether the string referenced from the attribute should be
4721 output inline in DIE or out-of-line in .debug_str section. */
4722
4723 static enum dwarf_form
4724 AT_string_form (dw_attr_node *a)
4725 {
4726 gcc_assert (a && AT_class (a) == dw_val_class_str);
4727 return find_string_form (a->dw_attr_val.v.val_str);
4728 }
4729
4730 /* Add a DIE reference attribute value to a DIE. */
4731
4732 static inline void
4733 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4734 {
4735 dw_attr_node attr;
4736 gcc_checking_assert (targ_die != NULL);
4737
4738 /* With LTO we can end up trying to reference something we didn't create
4739 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4740 if (targ_die == NULL)
4741 return;
4742
4743 attr.dw_attr = attr_kind;
4744 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4745 attr.dw_attr_val.val_entry = NULL;
4746 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4747 attr.dw_attr_val.v.val_die_ref.external = 0;
4748 add_dwarf_attr (die, &attr);
4749 }
4750
4751 /* Change DIE reference REF to point to NEW_DIE instead. */
4752
4753 static inline void
4754 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4755 {
4756 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4757 ref->dw_attr_val.v.val_die_ref.die = new_die;
4758 ref->dw_attr_val.v.val_die_ref.external = 0;
4759 }
4760
4761 /* Add an AT_specification attribute to a DIE, and also make the back
4762 pointer from the specification to the definition. */
4763
4764 static inline void
4765 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4766 {
4767 add_AT_die_ref (die, DW_AT_specification, targ_die);
4768 gcc_assert (!targ_die->die_definition);
4769 targ_die->die_definition = die;
4770 }
4771
4772 static inline dw_die_ref
4773 AT_ref (dw_attr_node *a)
4774 {
4775 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4776 return a->dw_attr_val.v.val_die_ref.die;
4777 }
4778
4779 static inline int
4780 AT_ref_external (dw_attr_node *a)
4781 {
4782 if (a && AT_class (a) == dw_val_class_die_ref)
4783 return a->dw_attr_val.v.val_die_ref.external;
4784
4785 return 0;
4786 }
4787
4788 static inline void
4789 set_AT_ref_external (dw_attr_node *a, int i)
4790 {
4791 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4792 a->dw_attr_val.v.val_die_ref.external = i;
4793 }
4794
4795 /* Add an FDE reference attribute value to a DIE. */
4796
4797 static inline void
4798 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4799 {
4800 dw_attr_node attr;
4801
4802 attr.dw_attr = attr_kind;
4803 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4804 attr.dw_attr_val.val_entry = NULL;
4805 attr.dw_attr_val.v.val_fde_index = targ_fde;
4806 add_dwarf_attr (die, &attr);
4807 }
4808
4809 /* Add a location description attribute value to a DIE. */
4810
4811 static inline void
4812 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4813 {
4814 dw_attr_node attr;
4815
4816 attr.dw_attr = attr_kind;
4817 attr.dw_attr_val.val_class = dw_val_class_loc;
4818 attr.dw_attr_val.val_entry = NULL;
4819 attr.dw_attr_val.v.val_loc = loc;
4820 add_dwarf_attr (die, &attr);
4821 }
4822
4823 static inline dw_loc_descr_ref
4824 AT_loc (dw_attr_node *a)
4825 {
4826 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4827 return a->dw_attr_val.v.val_loc;
4828 }
4829
4830 static inline void
4831 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4832 {
4833 dw_attr_node attr;
4834
4835 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4836 return;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc_list = loc_list;
4842 add_dwarf_attr (die, &attr);
4843 have_location_lists = true;
4844 }
4845
4846 static inline dw_loc_list_ref
4847 AT_loc_list (dw_attr_node *a)
4848 {
4849 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4850 return a->dw_attr_val.v.val_loc_list;
4851 }
4852
4853 /* Add a view list attribute to DIE. It must have a DW_AT_location
4854 attribute, because the view list complements the location list. */
4855
4856 static inline void
4857 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4858 {
4859 dw_attr_node attr;
4860
4861 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4862 return;
4863
4864 attr.dw_attr = attr_kind;
4865 attr.dw_attr_val.val_class = dw_val_class_view_list;
4866 attr.dw_attr_val.val_entry = NULL;
4867 attr.dw_attr_val.v.val_view_list = die;
4868 add_dwarf_attr (die, &attr);
4869 gcc_checking_assert (get_AT (die, DW_AT_location));
4870 gcc_assert (have_location_lists);
4871 }
4872
4873 /* Return a pointer to the location list referenced by the attribute.
4874 If the named attribute is a view list, look up the corresponding
4875 DW_AT_location attribute and return its location list. */
4876
4877 static inline dw_loc_list_ref *
4878 AT_loc_list_ptr (dw_attr_node *a)
4879 {
4880 gcc_assert (a);
4881 switch (AT_class (a))
4882 {
4883 case dw_val_class_loc_list:
4884 return &a->dw_attr_val.v.val_loc_list;
4885 case dw_val_class_view_list:
4886 {
4887 dw_attr_node *l;
4888 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4889 if (!l)
4890 return NULL;
4891 gcc_checking_assert (l + 1 == a);
4892 return AT_loc_list_ptr (l);
4893 }
4894 default:
4895 gcc_unreachable ();
4896 }
4897 }
4898
4899 /* Return the location attribute value associated with a view list
4900 attribute value. */
4901
4902 static inline dw_val_node *
4903 view_list_to_loc_list_val_node (dw_val_node *val)
4904 {
4905 gcc_assert (val->val_class == dw_val_class_view_list);
4906 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4907 if (!loc)
4908 return NULL;
4909 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4910 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4911 return &loc->dw_attr_val;
4912 }
4913
4914 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4915 {
4916 static hashval_t hash (addr_table_entry *);
4917 static bool equal (addr_table_entry *, addr_table_entry *);
4918 };
4919
4920 /* Table of entries into the .debug_addr section. */
4921
4922 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4923
4924 /* Hash an address_table_entry. */
4925
4926 hashval_t
4927 addr_hasher::hash (addr_table_entry *a)
4928 {
4929 inchash::hash hstate;
4930 switch (a->kind)
4931 {
4932 case ate_kind_rtx:
4933 hstate.add_int (0);
4934 break;
4935 case ate_kind_rtx_dtprel:
4936 hstate.add_int (1);
4937 break;
4938 case ate_kind_label:
4939 return htab_hash_string (a->addr.label);
4940 default:
4941 gcc_unreachable ();
4942 }
4943 inchash::add_rtx (a->addr.rtl, hstate);
4944 return hstate.end ();
4945 }
4946
4947 /* Determine equality for two address_table_entries. */
4948
4949 bool
4950 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4951 {
4952 if (a1->kind != a2->kind)
4953 return 0;
4954 switch (a1->kind)
4955 {
4956 case ate_kind_rtx:
4957 case ate_kind_rtx_dtprel:
4958 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4959 case ate_kind_label:
4960 return strcmp (a1->addr.label, a2->addr.label) == 0;
4961 default:
4962 gcc_unreachable ();
4963 }
4964 }
4965
4966 /* Initialize an addr_table_entry. */
4967
4968 void
4969 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4970 {
4971 e->kind = kind;
4972 switch (kind)
4973 {
4974 case ate_kind_rtx:
4975 case ate_kind_rtx_dtprel:
4976 e->addr.rtl = (rtx) addr;
4977 break;
4978 case ate_kind_label:
4979 e->addr.label = (char *) addr;
4980 break;
4981 }
4982 e->refcount = 0;
4983 e->index = NO_INDEX_ASSIGNED;
4984 }
4985
4986 /* Add attr to the address table entry to the table. Defer setting an
4987 index until output time. */
4988
4989 static addr_table_entry *
4990 add_addr_table_entry (void *addr, enum ate_kind kind)
4991 {
4992 addr_table_entry *node;
4993 addr_table_entry finder;
4994
4995 gcc_assert (dwarf_split_debug_info);
4996 if (! addr_index_table)
4997 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4998 init_addr_table_entry (&finder, kind, addr);
4999 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5000
5001 if (*slot == HTAB_EMPTY_ENTRY)
5002 {
5003 node = ggc_cleared_alloc<addr_table_entry> ();
5004 init_addr_table_entry (node, kind, addr);
5005 *slot = node;
5006 }
5007 else
5008 node = *slot;
5009
5010 node->refcount++;
5011 return node;
5012 }
5013
5014 /* Remove an entry from the addr table by decrementing its refcount.
5015 Strictly, decrementing the refcount would be enough, but the
5016 assertion that the entry is actually in the table has found
5017 bugs. */
5018
5019 static void
5020 remove_addr_table_entry (addr_table_entry *entry)
5021 {
5022 gcc_assert (dwarf_split_debug_info && addr_index_table);
5023 /* After an index is assigned, the table is frozen. */
5024 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5025 entry->refcount--;
5026 }
5027
5028 /* Given a location list, remove all addresses it refers to from the
5029 address_table. */
5030
5031 static void
5032 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5033 {
5034 for (; descr; descr = descr->dw_loc_next)
5035 if (descr->dw_loc_oprnd1.val_entry != NULL)
5036 {
5037 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5038 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5039 }
5040 }
5041
5042 /* A helper function for dwarf2out_finish called through
5043 htab_traverse. Assign an addr_table_entry its index. All entries
5044 must be collected into the table when this function is called,
5045 because the indexing code relies on htab_traverse to traverse nodes
5046 in the same order for each run. */
5047
5048 int
5049 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5050 {
5051 addr_table_entry *node = *h;
5052
5053 /* Don't index unreferenced nodes. */
5054 if (node->refcount == 0)
5055 return 1;
5056
5057 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5058 node->index = *index;
5059 *index += 1;
5060
5061 return 1;
5062 }
5063
5064 /* Add an address constant attribute value to a DIE. When using
5065 dwarf_split_debug_info, address attributes in dies destined for the
5066 final executable should be direct references--setting the parameter
5067 force_direct ensures this behavior. */
5068
5069 static inline void
5070 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5071 bool force_direct)
5072 {
5073 dw_attr_node attr;
5074
5075 attr.dw_attr = attr_kind;
5076 attr.dw_attr_val.val_class = dw_val_class_addr;
5077 attr.dw_attr_val.v.val_addr = addr;
5078 if (dwarf_split_debug_info && !force_direct)
5079 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5080 else
5081 attr.dw_attr_val.val_entry = NULL;
5082 add_dwarf_attr (die, &attr);
5083 }
5084
5085 /* Get the RTX from to an address DIE attribute. */
5086
5087 static inline rtx
5088 AT_addr (dw_attr_node *a)
5089 {
5090 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5091 return a->dw_attr_val.v.val_addr;
5092 }
5093
5094 /* Add a file attribute value to a DIE. */
5095
5096 static inline void
5097 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5098 struct dwarf_file_data *fd)
5099 {
5100 dw_attr_node attr;
5101
5102 attr.dw_attr = attr_kind;
5103 attr.dw_attr_val.val_class = dw_val_class_file;
5104 attr.dw_attr_val.val_entry = NULL;
5105 attr.dw_attr_val.v.val_file = fd;
5106 add_dwarf_attr (die, &attr);
5107 }
5108
5109 /* Get the dwarf_file_data from a file DIE attribute. */
5110
5111 static inline struct dwarf_file_data *
5112 AT_file (dw_attr_node *a)
5113 {
5114 gcc_assert (a && (AT_class (a) == dw_val_class_file
5115 || AT_class (a) == dw_val_class_file_implicit));
5116 return a->dw_attr_val.v.val_file;
5117 }
5118
5119 /* Add a vms delta attribute value to a DIE. */
5120
5121 static inline void
5122 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5123 const char *lbl1, const char *lbl2)
5124 {
5125 dw_attr_node attr;
5126
5127 attr.dw_attr = attr_kind;
5128 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5129 attr.dw_attr_val.val_entry = NULL;
5130 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5131 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5132 add_dwarf_attr (die, &attr);
5133 }
5134
5135 /* Add a symbolic view identifier attribute value to a DIE. */
5136
5137 static inline void
5138 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5139 const char *view_label)
5140 {
5141 dw_attr_node attr;
5142
5143 attr.dw_attr = attr_kind;
5144 attr.dw_attr_val.val_class = dw_val_class_symview;
5145 attr.dw_attr_val.val_entry = NULL;
5146 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5147 add_dwarf_attr (die, &attr);
5148 }
5149
5150 /* Add a label identifier attribute value to a DIE. */
5151
5152 static inline void
5153 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5154 const char *lbl_id)
5155 {
5156 dw_attr_node attr;
5157
5158 attr.dw_attr = attr_kind;
5159 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5160 attr.dw_attr_val.val_entry = NULL;
5161 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5162 if (dwarf_split_debug_info)
5163 attr.dw_attr_val.val_entry
5164 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5165 ate_kind_label);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a section offset attribute value to a DIE, an offset into the
5170 debug_line section. */
5171
5172 static inline void
5173 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5174 const char *label)
5175 {
5176 dw_attr_node attr;
5177
5178 attr.dw_attr = attr_kind;
5179 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5180 attr.dw_attr_val.val_entry = NULL;
5181 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5182 add_dwarf_attr (die, &attr);
5183 }
5184
5185 /* Add a section offset attribute value to a DIE, an offset into the
5186 debug_loclists section. */
5187
5188 static inline void
5189 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5190 const char *label)
5191 {
5192 dw_attr_node attr;
5193
5194 attr.dw_attr = attr_kind;
5195 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5196 attr.dw_attr_val.val_entry = NULL;
5197 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5198 add_dwarf_attr (die, &attr);
5199 }
5200
5201 /* Add a section offset attribute value to a DIE, an offset into the
5202 debug_macinfo section. */
5203
5204 static inline void
5205 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5206 const char *label)
5207 {
5208 dw_attr_node attr;
5209
5210 attr.dw_attr = attr_kind;
5211 attr.dw_attr_val.val_class = dw_val_class_macptr;
5212 attr.dw_attr_val.val_entry = NULL;
5213 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5214 add_dwarf_attr (die, &attr);
5215 }
5216
5217 /* Add an offset attribute value to a DIE. */
5218
5219 static inline void
5220 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5221 unsigned HOST_WIDE_INT offset)
5222 {
5223 dw_attr_node attr;
5224
5225 attr.dw_attr = attr_kind;
5226 attr.dw_attr_val.val_class = dw_val_class_offset;
5227 attr.dw_attr_val.val_entry = NULL;
5228 attr.dw_attr_val.v.val_offset = offset;
5229 add_dwarf_attr (die, &attr);
5230 }
5231
5232 /* Add a range_list attribute value to a DIE. When using
5233 dwarf_split_debug_info, address attributes in dies destined for the
5234 final executable should be direct references--setting the parameter
5235 force_direct ensures this behavior. */
5236
5237 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5238 #define RELOCATED_OFFSET (NULL)
5239
5240 static void
5241 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5242 long unsigned int offset, bool force_direct)
5243 {
5244 dw_attr_node attr;
5245
5246 attr.dw_attr = attr_kind;
5247 attr.dw_attr_val.val_class = dw_val_class_range_list;
5248 /* For the range_list attribute, use val_entry to store whether the
5249 offset should follow split-debug-info or normal semantics. This
5250 value is read in output_range_list_offset. */
5251 if (dwarf_split_debug_info && !force_direct)
5252 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5253 else
5254 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5255 attr.dw_attr_val.v.val_offset = offset;
5256 add_dwarf_attr (die, &attr);
5257 }
5258
5259 /* Return the start label of a delta attribute. */
5260
5261 static inline const char *
5262 AT_vms_delta1 (dw_attr_node *a)
5263 {
5264 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5265 return a->dw_attr_val.v.val_vms_delta.lbl1;
5266 }
5267
5268 /* Return the end label of a delta attribute. */
5269
5270 static inline const char *
5271 AT_vms_delta2 (dw_attr_node *a)
5272 {
5273 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5274 return a->dw_attr_val.v.val_vms_delta.lbl2;
5275 }
5276
5277 static inline const char *
5278 AT_lbl (dw_attr_node *a)
5279 {
5280 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5281 || AT_class (a) == dw_val_class_lineptr
5282 || AT_class (a) == dw_val_class_macptr
5283 || AT_class (a) == dw_val_class_loclistsptr
5284 || AT_class (a) == dw_val_class_high_pc));
5285 return a->dw_attr_val.v.val_lbl_id;
5286 }
5287
5288 /* Get the attribute of type attr_kind. */
5289
5290 static dw_attr_node *
5291 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5292 {
5293 dw_attr_node *a;
5294 unsigned ix;
5295 dw_die_ref spec = NULL;
5296
5297 if (! die)
5298 return NULL;
5299
5300 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5301 if (a->dw_attr == attr_kind)
5302 return a;
5303 else if (a->dw_attr == DW_AT_specification
5304 || a->dw_attr == DW_AT_abstract_origin)
5305 spec = AT_ref (a);
5306
5307 if (spec)
5308 return get_AT (spec, attr_kind);
5309
5310 return NULL;
5311 }
5312
5313 /* Returns the parent of the declaration of DIE. */
5314
5315 static dw_die_ref
5316 get_die_parent (dw_die_ref die)
5317 {
5318 dw_die_ref t;
5319
5320 if (!die)
5321 return NULL;
5322
5323 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5324 || (t = get_AT_ref (die, DW_AT_specification)))
5325 die = t;
5326
5327 return die->die_parent;
5328 }
5329
5330 /* Return the "low pc" attribute value, typically associated with a subprogram
5331 DIE. Return null if the "low pc" attribute is either not present, or if it
5332 cannot be represented as an assembler label identifier. */
5333
5334 static inline const char *
5335 get_AT_low_pc (dw_die_ref die)
5336 {
5337 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5338
5339 return a ? AT_lbl (a) : NULL;
5340 }
5341
5342 /* Return the "high pc" attribute value, typically associated with a subprogram
5343 DIE. Return null if the "high pc" attribute is either not present, or if it
5344 cannot be represented as an assembler label identifier. */
5345
5346 static inline const char *
5347 get_AT_hi_pc (dw_die_ref die)
5348 {
5349 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5350
5351 return a ? AT_lbl (a) : NULL;
5352 }
5353
5354 /* Return the value of the string attribute designated by ATTR_KIND, or
5355 NULL if it is not present. */
5356
5357 static inline const char *
5358 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5359 {
5360 dw_attr_node *a = get_AT (die, attr_kind);
5361
5362 return a ? AT_string (a) : NULL;
5363 }
5364
5365 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5366 if it is not present. */
5367
5368 static inline int
5369 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5370 {
5371 dw_attr_node *a = get_AT (die, attr_kind);
5372
5373 return a ? AT_flag (a) : 0;
5374 }
5375
5376 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5377 if it is not present. */
5378
5379 static inline unsigned
5380 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5381 {
5382 dw_attr_node *a = get_AT (die, attr_kind);
5383
5384 return a ? AT_unsigned (a) : 0;
5385 }
5386
5387 static inline dw_die_ref
5388 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5389 {
5390 dw_attr_node *a = get_AT (die, attr_kind);
5391
5392 return a ? AT_ref (a) : NULL;
5393 }
5394
5395 static inline struct dwarf_file_data *
5396 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5397 {
5398 dw_attr_node *a = get_AT (die, attr_kind);
5399
5400 return a ? AT_file (a) : NULL;
5401 }
5402
5403 /* Return TRUE if the language is C++. */
5404
5405 static inline bool
5406 is_cxx (void)
5407 {
5408 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5409
5410 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5411 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5412 }
5413
5414 /* Return TRUE if DECL was created by the C++ frontend. */
5415
5416 static bool
5417 is_cxx (const_tree decl)
5418 {
5419 if (in_lto_p)
5420 {
5421 const_tree context = get_ultimate_context (decl);
5422 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5423 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5424 }
5425 return is_cxx ();
5426 }
5427
5428 /* Return TRUE if the language is Fortran. */
5429
5430 static inline bool
5431 is_fortran (void)
5432 {
5433 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5434
5435 return (lang == DW_LANG_Fortran77
5436 || lang == DW_LANG_Fortran90
5437 || lang == DW_LANG_Fortran95
5438 || lang == DW_LANG_Fortran03
5439 || lang == DW_LANG_Fortran08);
5440 }
5441
5442 static inline bool
5443 is_fortran (const_tree decl)
5444 {
5445 if (in_lto_p)
5446 {
5447 const_tree context = get_ultimate_context (decl);
5448 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5449 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU Fortran", 11) == 0
5451 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5452 "GNU F77") == 0);
5453 }
5454 return is_fortran ();
5455 }
5456
5457 /* Return TRUE if the language is Ada. */
5458
5459 static inline bool
5460 is_ada (void)
5461 {
5462 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5463
5464 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5465 }
5466
5467 /* Remove the specified attribute if present. Return TRUE if removal
5468 was successful. */
5469
5470 static bool
5471 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5472 {
5473 dw_attr_node *a;
5474 unsigned ix;
5475
5476 if (! die)
5477 return false;
5478
5479 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5480 if (a->dw_attr == attr_kind)
5481 {
5482 if (AT_class (a) == dw_val_class_str)
5483 if (a->dw_attr_val.v.val_str->refcount)
5484 a->dw_attr_val.v.val_str->refcount--;
5485
5486 /* vec::ordered_remove should help reduce the number of abbrevs
5487 that are needed. */
5488 die->die_attr->ordered_remove (ix);
5489 return true;
5490 }
5491 return false;
5492 }
5493
5494 /* Remove CHILD from its parent. PREV must have the property that
5495 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5496
5497 static void
5498 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5499 {
5500 gcc_assert (child->die_parent == prev->die_parent);
5501 gcc_assert (prev->die_sib == child);
5502 if (prev == child)
5503 {
5504 gcc_assert (child->die_parent->die_child == child);
5505 prev = NULL;
5506 }
5507 else
5508 prev->die_sib = child->die_sib;
5509 if (child->die_parent->die_child == child)
5510 child->die_parent->die_child = prev;
5511 child->die_sib = NULL;
5512 }
5513
5514 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5515 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5516
5517 static void
5518 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5519 {
5520 dw_die_ref parent = old_child->die_parent;
5521
5522 gcc_assert (parent == prev->die_parent);
5523 gcc_assert (prev->die_sib == old_child);
5524
5525 new_child->die_parent = parent;
5526 if (prev == old_child)
5527 {
5528 gcc_assert (parent->die_child == old_child);
5529 new_child->die_sib = new_child;
5530 }
5531 else
5532 {
5533 prev->die_sib = new_child;
5534 new_child->die_sib = old_child->die_sib;
5535 }
5536 if (old_child->die_parent->die_child == old_child)
5537 old_child->die_parent->die_child = new_child;
5538 old_child->die_sib = NULL;
5539 }
5540
5541 /* Move all children from OLD_PARENT to NEW_PARENT. */
5542
5543 static void
5544 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5545 {
5546 dw_die_ref c;
5547 new_parent->die_child = old_parent->die_child;
5548 old_parent->die_child = NULL;
5549 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5550 }
5551
5552 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5553 matches TAG. */
5554
5555 static void
5556 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5557 {
5558 dw_die_ref c;
5559
5560 c = die->die_child;
5561 if (c) do {
5562 dw_die_ref prev = c;
5563 c = c->die_sib;
5564 while (c->die_tag == tag)
5565 {
5566 remove_child_with_prev (c, prev);
5567 c->die_parent = NULL;
5568 /* Might have removed every child. */
5569 if (die->die_child == NULL)
5570 return;
5571 c = prev->die_sib;
5572 }
5573 } while (c != die->die_child);
5574 }
5575
5576 /* Add a CHILD_DIE as the last child of DIE. */
5577
5578 static void
5579 add_child_die (dw_die_ref die, dw_die_ref child_die)
5580 {
5581 /* FIXME this should probably be an assert. */
5582 if (! die || ! child_die)
5583 return;
5584 gcc_assert (die != child_die);
5585
5586 child_die->die_parent = die;
5587 if (die->die_child)
5588 {
5589 child_die->die_sib = die->die_child->die_sib;
5590 die->die_child->die_sib = child_die;
5591 }
5592 else
5593 child_die->die_sib = child_die;
5594 die->die_child = child_die;
5595 }
5596
5597 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5598
5599 static void
5600 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5601 dw_die_ref after_die)
5602 {
5603 gcc_assert (die
5604 && child_die
5605 && after_die
5606 && die->die_child
5607 && die != child_die);
5608
5609 child_die->die_parent = die;
5610 child_die->die_sib = after_die->die_sib;
5611 after_die->die_sib = child_die;
5612 if (die->die_child == after_die)
5613 die->die_child = child_die;
5614 }
5615
5616 /* Unassociate CHILD from its parent, and make its parent be
5617 NEW_PARENT. */
5618
5619 static void
5620 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5621 {
5622 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5623 if (p->die_sib == child)
5624 {
5625 remove_child_with_prev (child, p);
5626 break;
5627 }
5628 add_child_die (new_parent, child);
5629 }
5630
5631 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5632 is the specification, to the end of PARENT's list of children.
5633 This is done by removing and re-adding it. */
5634
5635 static void
5636 splice_child_die (dw_die_ref parent, dw_die_ref child)
5637 {
5638 /* We want the declaration DIE from inside the class, not the
5639 specification DIE at toplevel. */
5640 if (child->die_parent != parent)
5641 {
5642 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5643
5644 if (tmp)
5645 child = tmp;
5646 }
5647
5648 gcc_assert (child->die_parent == parent
5649 || (child->die_parent
5650 == get_AT_ref (parent, DW_AT_specification)));
5651
5652 reparent_child (child, parent);
5653 }
5654
5655 /* Create and return a new die with TAG_VALUE as tag. */
5656
5657 static inline dw_die_ref
5658 new_die_raw (enum dwarf_tag tag_value)
5659 {
5660 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5661 die->die_tag = tag_value;
5662 return die;
5663 }
5664
5665 /* Create and return a new die with a parent of PARENT_DIE. If
5666 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5667 associated tree T must be supplied to determine parenthood
5668 later. */
5669
5670 static inline dw_die_ref
5671 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5672 {
5673 dw_die_ref die = new_die_raw (tag_value);
5674
5675 if (parent_die != NULL)
5676 add_child_die (parent_die, die);
5677 else
5678 {
5679 limbo_die_node *limbo_node;
5680
5681 /* No DIEs created after early dwarf should end up in limbo,
5682 because the limbo list should not persist past LTO
5683 streaming. */
5684 if (tag_value != DW_TAG_compile_unit
5685 /* These are allowed because they're generated while
5686 breaking out COMDAT units late. */
5687 && tag_value != DW_TAG_type_unit
5688 && tag_value != DW_TAG_skeleton_unit
5689 && !early_dwarf
5690 /* Allow nested functions to live in limbo because they will
5691 only temporarily live there, as decls_for_scope will fix
5692 them up. */
5693 && (TREE_CODE (t) != FUNCTION_DECL
5694 || !decl_function_context (t))
5695 /* Same as nested functions above but for types. Types that
5696 are local to a function will be fixed in
5697 decls_for_scope. */
5698 && (!RECORD_OR_UNION_TYPE_P (t)
5699 || !TYPE_CONTEXT (t)
5700 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5701 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5702 especially in the ltrans stage, but once we implement LTO
5703 dwarf streaming, we should remove this exception. */
5704 && !in_lto_p)
5705 {
5706 fprintf (stderr, "symbol ended up in limbo too late:");
5707 debug_generic_stmt (t);
5708 gcc_unreachable ();
5709 }
5710
5711 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5712 limbo_node->die = die;
5713 limbo_node->created_for = t;
5714 limbo_node->next = limbo_die_list;
5715 limbo_die_list = limbo_node;
5716 }
5717
5718 return die;
5719 }
5720
5721 /* Return the DIE associated with the given type specifier. */
5722
5723 static inline dw_die_ref
5724 lookup_type_die (tree type)
5725 {
5726 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5727 if (die && die->removed)
5728 {
5729 TYPE_SYMTAB_DIE (type) = NULL;
5730 return NULL;
5731 }
5732 return die;
5733 }
5734
5735 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5736 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5737 anonymous type instead the one of the naming typedef. */
5738
5739 static inline dw_die_ref
5740 strip_naming_typedef (tree type, dw_die_ref type_die)
5741 {
5742 if (type
5743 && TREE_CODE (type) == RECORD_TYPE
5744 && type_die
5745 && type_die->die_tag == DW_TAG_typedef
5746 && is_naming_typedef_decl (TYPE_NAME (type)))
5747 type_die = get_AT_ref (type_die, DW_AT_type);
5748 return type_die;
5749 }
5750
5751 /* Like lookup_type_die, but if type is an anonymous type named by a
5752 typedef[1], return the DIE of the anonymous type instead the one of
5753 the naming typedef. This is because in gen_typedef_die, we did
5754 equate the anonymous struct named by the typedef with the DIE of
5755 the naming typedef. So by default, lookup_type_die on an anonymous
5756 struct yields the DIE of the naming typedef.
5757
5758 [1]: Read the comment of is_naming_typedef_decl to learn about what
5759 a naming typedef is. */
5760
5761 static inline dw_die_ref
5762 lookup_type_die_strip_naming_typedef (tree type)
5763 {
5764 dw_die_ref die = lookup_type_die (type);
5765 return strip_naming_typedef (type, die);
5766 }
5767
5768 /* Equate a DIE to a given type specifier. */
5769
5770 static inline void
5771 equate_type_number_to_die (tree type, dw_die_ref type_die)
5772 {
5773 TYPE_SYMTAB_DIE (type) = type_die;
5774 }
5775
5776 /* Returns a hash value for X (which really is a die_struct). */
5777
5778 inline hashval_t
5779 decl_die_hasher::hash (die_node *x)
5780 {
5781 return (hashval_t) x->decl_id;
5782 }
5783
5784 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5785
5786 inline bool
5787 decl_die_hasher::equal (die_node *x, tree y)
5788 {
5789 return (x->decl_id == DECL_UID (y));
5790 }
5791
5792 /* Return the DIE associated with a given declaration. */
5793
5794 static inline dw_die_ref
5795 lookup_decl_die (tree decl)
5796 {
5797 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5798 NO_INSERT);
5799 if (!die)
5800 return NULL;
5801 if ((*die)->removed)
5802 {
5803 decl_die_table->clear_slot (die);
5804 return NULL;
5805 }
5806 return *die;
5807 }
5808
5809
5810 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5811 style reference. Return true if we found one refering to a DIE for
5812 DECL, otherwise return false. */
5813
5814 static bool
5815 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5816 unsigned HOST_WIDE_INT *off)
5817 {
5818 dw_die_ref die;
5819
5820 if (flag_wpa && !decl_die_table)
5821 return false;
5822
5823 if (TREE_CODE (decl) == BLOCK)
5824 die = BLOCK_DIE (decl);
5825 else
5826 die = lookup_decl_die (decl);
5827 if (!die)
5828 return false;
5829
5830 /* During WPA stage we currently use DIEs to store the
5831 decl <-> label + offset map. That's quite inefficient but it
5832 works for now. */
5833 if (flag_wpa)
5834 {
5835 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5836 if (!ref)
5837 {
5838 gcc_assert (die == comp_unit_die ());
5839 return false;
5840 }
5841 *off = ref->die_offset;
5842 *sym = ref->die_id.die_symbol;
5843 return true;
5844 }
5845
5846 /* Similar to get_ref_die_offset_label, but using the "correct"
5847 label. */
5848 *off = die->die_offset;
5849 while (die->die_parent)
5850 die = die->die_parent;
5851 /* For the containing CU DIE we compute a die_symbol in
5852 compute_comp_unit_symbol. */
5853 gcc_assert (die->die_tag == DW_TAG_compile_unit
5854 && die->die_id.die_symbol != NULL);
5855 *sym = die->die_id.die_symbol;
5856 return true;
5857 }
5858
5859 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5860
5861 static void
5862 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5863 const char *symbol, HOST_WIDE_INT offset)
5864 {
5865 /* Create a fake DIE that contains the reference. Don't use
5866 new_die because we don't want to end up in the limbo list. */
5867 dw_die_ref ref = new_die_raw (die->die_tag);
5868 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5869 ref->die_offset = offset;
5870 ref->with_offset = 1;
5871 add_AT_die_ref (die, attr_kind, ref);
5872 }
5873
5874 /* Create a DIE for DECL if required and add a reference to a DIE
5875 at SYMBOL + OFFSET which contains attributes dumped early. */
5876
5877 static void
5878 dwarf2out_register_external_die (tree decl, const char *sym,
5879 unsigned HOST_WIDE_INT off)
5880 {
5881 if (debug_info_level == DINFO_LEVEL_NONE)
5882 return;
5883
5884 if (flag_wpa && !decl_die_table)
5885 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5886
5887 dw_die_ref die
5888 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5889 gcc_assert (!die);
5890
5891 tree ctx;
5892 dw_die_ref parent = NULL;
5893 /* Need to lookup a DIE for the decls context - the containing
5894 function or translation unit. */
5895 if (TREE_CODE (decl) == BLOCK)
5896 {
5897 ctx = BLOCK_SUPERCONTEXT (decl);
5898 /* ??? We do not output DIEs for all scopes thus skip as
5899 many DIEs as needed. */
5900 while (TREE_CODE (ctx) == BLOCK
5901 && !BLOCK_DIE (ctx))
5902 ctx = BLOCK_SUPERCONTEXT (ctx);
5903 }
5904 else
5905 ctx = DECL_CONTEXT (decl);
5906 while (ctx && TYPE_P (ctx))
5907 ctx = TYPE_CONTEXT (ctx);
5908 if (ctx)
5909 {
5910 if (TREE_CODE (ctx) == BLOCK)
5911 parent = BLOCK_DIE (ctx);
5912 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5913 /* Keep the 1:1 association during WPA. */
5914 && !flag_wpa)
5915 /* Otherwise all late annotations go to the main CU which
5916 imports the original CUs. */
5917 parent = comp_unit_die ();
5918 else if (TREE_CODE (ctx) == FUNCTION_DECL
5919 && TREE_CODE (decl) != PARM_DECL
5920 && TREE_CODE (decl) != BLOCK)
5921 /* Leave function local entities parent determination to when
5922 we process scope vars. */
5923 ;
5924 else
5925 parent = lookup_decl_die (ctx);
5926 }
5927 else
5928 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5929 Handle this case gracefully by globalizing stuff. */
5930 parent = comp_unit_die ();
5931 /* Create a DIE "stub". */
5932 switch (TREE_CODE (decl))
5933 {
5934 case TRANSLATION_UNIT_DECL:
5935 if (! flag_wpa)
5936 {
5937 die = comp_unit_die ();
5938 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5939 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5940 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5941 to create a DIE for the original CUs. */
5942 return;
5943 }
5944 /* Keep the 1:1 association during WPA. */
5945 die = new_die (DW_TAG_compile_unit, NULL, decl);
5946 break;
5947 case NAMESPACE_DECL:
5948 if (is_fortran (decl))
5949 die = new_die (DW_TAG_module, parent, decl);
5950 else
5951 die = new_die (DW_TAG_namespace, parent, decl);
5952 break;
5953 case FUNCTION_DECL:
5954 die = new_die (DW_TAG_subprogram, parent, decl);
5955 break;
5956 case VAR_DECL:
5957 die = new_die (DW_TAG_variable, parent, decl);
5958 break;
5959 case RESULT_DECL:
5960 die = new_die (DW_TAG_variable, parent, decl);
5961 break;
5962 case PARM_DECL:
5963 die = new_die (DW_TAG_formal_parameter, parent, decl);
5964 break;
5965 case CONST_DECL:
5966 die = new_die (DW_TAG_constant, parent, decl);
5967 break;
5968 case LABEL_DECL:
5969 die = new_die (DW_TAG_label, parent, decl);
5970 break;
5971 case BLOCK:
5972 die = new_die (DW_TAG_lexical_block, parent, decl);
5973 break;
5974 default:
5975 gcc_unreachable ();
5976 }
5977 if (TREE_CODE (decl) == BLOCK)
5978 BLOCK_DIE (decl) = die;
5979 else
5980 equate_decl_number_to_die (decl, die);
5981
5982 /* Add a reference to the DIE providing early debug at $sym + off. */
5983 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5984 }
5985
5986 /* Returns a hash value for X (which really is a var_loc_list). */
5987
5988 inline hashval_t
5989 decl_loc_hasher::hash (var_loc_list *x)
5990 {
5991 return (hashval_t) x->decl_id;
5992 }
5993
5994 /* Return nonzero if decl_id of var_loc_list X is the same as
5995 UID of decl *Y. */
5996
5997 inline bool
5998 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5999 {
6000 return (x->decl_id == DECL_UID (y));
6001 }
6002
6003 /* Return the var_loc list associated with a given declaration. */
6004
6005 static inline var_loc_list *
6006 lookup_decl_loc (const_tree decl)
6007 {
6008 if (!decl_loc_table)
6009 return NULL;
6010 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6011 }
6012
6013 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6014
6015 inline hashval_t
6016 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6017 {
6018 return (hashval_t) x->decl_id;
6019 }
6020
6021 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6022 UID of decl *Y. */
6023
6024 inline bool
6025 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6026 {
6027 return (x->decl_id == DECL_UID (y));
6028 }
6029
6030 /* Equate a DIE to a particular declaration. */
6031
6032 static void
6033 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6034 {
6035 unsigned int decl_id = DECL_UID (decl);
6036
6037 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6038 decl_die->decl_id = decl_id;
6039 }
6040
6041 /* Return how many bits covers PIECE EXPR_LIST. */
6042
6043 static HOST_WIDE_INT
6044 decl_piece_bitsize (rtx piece)
6045 {
6046 int ret = (int) GET_MODE (piece);
6047 if (ret)
6048 return ret;
6049 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6050 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6051 return INTVAL (XEXP (XEXP (piece, 0), 0));
6052 }
6053
6054 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6055
6056 static rtx *
6057 decl_piece_varloc_ptr (rtx piece)
6058 {
6059 if ((int) GET_MODE (piece))
6060 return &XEXP (piece, 0);
6061 else
6062 return &XEXP (XEXP (piece, 0), 1);
6063 }
6064
6065 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6066 Next is the chain of following piece nodes. */
6067
6068 static rtx_expr_list *
6069 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6070 {
6071 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6072 return alloc_EXPR_LIST (bitsize, loc_note, next);
6073 else
6074 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6075 GEN_INT (bitsize),
6076 loc_note), next);
6077 }
6078
6079 /* Return rtx that should be stored into loc field for
6080 LOC_NOTE and BITPOS/BITSIZE. */
6081
6082 static rtx
6083 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6084 HOST_WIDE_INT bitsize)
6085 {
6086 if (bitsize != -1)
6087 {
6088 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6089 if (bitpos != 0)
6090 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6091 }
6092 return loc_note;
6093 }
6094
6095 /* This function either modifies location piece list *DEST in
6096 place (if SRC and INNER is NULL), or copies location piece list
6097 *SRC to *DEST while modifying it. Location BITPOS is modified
6098 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6099 not copied and if needed some padding around it is added.
6100 When modifying in place, DEST should point to EXPR_LIST where
6101 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6102 to the start of the whole list and INNER points to the EXPR_LIST
6103 where earlier pieces cover PIECE_BITPOS bits. */
6104
6105 static void
6106 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6107 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6108 HOST_WIDE_INT bitsize, rtx loc_note)
6109 {
6110 HOST_WIDE_INT diff;
6111 bool copy = inner != NULL;
6112
6113 if (copy)
6114 {
6115 /* First copy all nodes preceding the current bitpos. */
6116 while (src != inner)
6117 {
6118 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6119 decl_piece_bitsize (*src), NULL_RTX);
6120 dest = &XEXP (*dest, 1);
6121 src = &XEXP (*src, 1);
6122 }
6123 }
6124 /* Add padding if needed. */
6125 if (bitpos != piece_bitpos)
6126 {
6127 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6128 copy ? NULL_RTX : *dest);
6129 dest = &XEXP (*dest, 1);
6130 }
6131 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6132 {
6133 gcc_assert (!copy);
6134 /* A piece with correct bitpos and bitsize already exist,
6135 just update the location for it and return. */
6136 *decl_piece_varloc_ptr (*dest) = loc_note;
6137 return;
6138 }
6139 /* Add the piece that changed. */
6140 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6141 dest = &XEXP (*dest, 1);
6142 /* Skip over pieces that overlap it. */
6143 diff = bitpos - piece_bitpos + bitsize;
6144 if (!copy)
6145 src = dest;
6146 while (diff > 0 && *src)
6147 {
6148 rtx piece = *src;
6149 diff -= decl_piece_bitsize (piece);
6150 if (copy)
6151 src = &XEXP (piece, 1);
6152 else
6153 {
6154 *src = XEXP (piece, 1);
6155 free_EXPR_LIST_node (piece);
6156 }
6157 }
6158 /* Add padding if needed. */
6159 if (diff < 0 && *src)
6160 {
6161 if (!copy)
6162 dest = src;
6163 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6164 dest = &XEXP (*dest, 1);
6165 }
6166 if (!copy)
6167 return;
6168 /* Finally copy all nodes following it. */
6169 while (*src)
6170 {
6171 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6172 decl_piece_bitsize (*src), NULL_RTX);
6173 dest = &XEXP (*dest, 1);
6174 src = &XEXP (*src, 1);
6175 }
6176 }
6177
6178 /* Add a variable location node to the linked list for DECL. */
6179
6180 static struct var_loc_node *
6181 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6182 {
6183 unsigned int decl_id;
6184 var_loc_list *temp;
6185 struct var_loc_node *loc = NULL;
6186 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6187
6188 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6189 {
6190 tree realdecl = DECL_DEBUG_EXPR (decl);
6191 if (handled_component_p (realdecl)
6192 || (TREE_CODE (realdecl) == MEM_REF
6193 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6194 {
6195 bool reverse;
6196 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6197 &bitsize, &reverse);
6198 if (!innerdecl
6199 || !DECL_P (innerdecl)
6200 || DECL_IGNORED_P (innerdecl)
6201 || TREE_STATIC (innerdecl)
6202 || bitsize == 0
6203 || bitpos + bitsize > 256)
6204 return NULL;
6205 decl = innerdecl;
6206 }
6207 }
6208
6209 decl_id = DECL_UID (decl);
6210 var_loc_list **slot
6211 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6212 if (*slot == NULL)
6213 {
6214 temp = ggc_cleared_alloc<var_loc_list> ();
6215 temp->decl_id = decl_id;
6216 *slot = temp;
6217 }
6218 else
6219 temp = *slot;
6220
6221 /* For PARM_DECLs try to keep around the original incoming value,
6222 even if that means we'll emit a zero-range .debug_loc entry. */
6223 if (temp->last
6224 && temp->first == temp->last
6225 && TREE_CODE (decl) == PARM_DECL
6226 && NOTE_P (temp->first->loc)
6227 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6228 && DECL_INCOMING_RTL (decl)
6229 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6230 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6231 == GET_CODE (DECL_INCOMING_RTL (decl))
6232 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6233 && (bitsize != -1
6234 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6235 NOTE_VAR_LOCATION_LOC (loc_note))
6236 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6237 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6238 {
6239 loc = ggc_cleared_alloc<var_loc_node> ();
6240 temp->first->next = loc;
6241 temp->last = loc;
6242 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6243 }
6244 else if (temp->last)
6245 {
6246 struct var_loc_node *last = temp->last, *unused = NULL;
6247 rtx *piece_loc = NULL, last_loc_note;
6248 HOST_WIDE_INT piece_bitpos = 0;
6249 if (last->next)
6250 {
6251 last = last->next;
6252 gcc_assert (last->next == NULL);
6253 }
6254 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6255 {
6256 piece_loc = &last->loc;
6257 do
6258 {
6259 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6260 if (piece_bitpos + cur_bitsize > bitpos)
6261 break;
6262 piece_bitpos += cur_bitsize;
6263 piece_loc = &XEXP (*piece_loc, 1);
6264 }
6265 while (*piece_loc);
6266 }
6267 /* TEMP->LAST here is either pointer to the last but one or
6268 last element in the chained list, LAST is pointer to the
6269 last element. */
6270 if (label && strcmp (last->label, label) == 0 && last->view == view)
6271 {
6272 /* For SRA optimized variables if there weren't any real
6273 insns since last note, just modify the last node. */
6274 if (piece_loc != NULL)
6275 {
6276 adjust_piece_list (piece_loc, NULL, NULL,
6277 bitpos, piece_bitpos, bitsize, loc_note);
6278 return NULL;
6279 }
6280 /* If the last note doesn't cover any instructions, remove it. */
6281 if (temp->last != last)
6282 {
6283 temp->last->next = NULL;
6284 unused = last;
6285 last = temp->last;
6286 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6287 }
6288 else
6289 {
6290 gcc_assert (temp->first == temp->last
6291 || (temp->first->next == temp->last
6292 && TREE_CODE (decl) == PARM_DECL));
6293 memset (temp->last, '\0', sizeof (*temp->last));
6294 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6295 return temp->last;
6296 }
6297 }
6298 if (bitsize == -1 && NOTE_P (last->loc))
6299 last_loc_note = last->loc;
6300 else if (piece_loc != NULL
6301 && *piece_loc != NULL_RTX
6302 && piece_bitpos == bitpos
6303 && decl_piece_bitsize (*piece_loc) == bitsize)
6304 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6305 else
6306 last_loc_note = NULL_RTX;
6307 /* If the current location is the same as the end of the list,
6308 and either both or neither of the locations is uninitialized,
6309 we have nothing to do. */
6310 if (last_loc_note == NULL_RTX
6311 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6312 NOTE_VAR_LOCATION_LOC (loc_note)))
6313 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6314 != NOTE_VAR_LOCATION_STATUS (loc_note))
6315 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6316 == VAR_INIT_STATUS_UNINITIALIZED)
6317 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6318 == VAR_INIT_STATUS_UNINITIALIZED))))
6319 {
6320 /* Add LOC to the end of list and update LAST. If the last
6321 element of the list has been removed above, reuse its
6322 memory for the new node, otherwise allocate a new one. */
6323 if (unused)
6324 {
6325 loc = unused;
6326 memset (loc, '\0', sizeof (*loc));
6327 }
6328 else
6329 loc = ggc_cleared_alloc<var_loc_node> ();
6330 if (bitsize == -1 || piece_loc == NULL)
6331 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6332 else
6333 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6334 bitpos, piece_bitpos, bitsize, loc_note);
6335 last->next = loc;
6336 /* Ensure TEMP->LAST will point either to the new last but one
6337 element of the chain, or to the last element in it. */
6338 if (last != temp->last)
6339 temp->last = last;
6340 }
6341 else if (unused)
6342 ggc_free (unused);
6343 }
6344 else
6345 {
6346 loc = ggc_cleared_alloc<var_loc_node> ();
6347 temp->first = loc;
6348 temp->last = loc;
6349 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6350 }
6351 return loc;
6352 }
6353 \f
6354 /* Keep track of the number of spaces used to indent the
6355 output of the debugging routines that print the structure of
6356 the DIE internal representation. */
6357 static int print_indent;
6358
6359 /* Indent the line the number of spaces given by print_indent. */
6360
6361 static inline void
6362 print_spaces (FILE *outfile)
6363 {
6364 fprintf (outfile, "%*s", print_indent, "");
6365 }
6366
6367 /* Print a type signature in hex. */
6368
6369 static inline void
6370 print_signature (FILE *outfile, char *sig)
6371 {
6372 int i;
6373
6374 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6375 fprintf (outfile, "%02x", sig[i] & 0xff);
6376 }
6377
6378 static inline void
6379 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6380 {
6381 if (discr_value->pos)
6382 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6383 else
6384 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6385 }
6386
6387 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6388
6389 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6390 RECURSE, output location descriptor operations. */
6391
6392 static void
6393 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6394 {
6395 switch (val->val_class)
6396 {
6397 case dw_val_class_addr:
6398 fprintf (outfile, "address");
6399 break;
6400 case dw_val_class_offset:
6401 fprintf (outfile, "offset");
6402 break;
6403 case dw_val_class_loc:
6404 fprintf (outfile, "location descriptor");
6405 if (val->v.val_loc == NULL)
6406 fprintf (outfile, " -> <null>\n");
6407 else if (recurse)
6408 {
6409 fprintf (outfile, ":\n");
6410 print_indent += 4;
6411 print_loc_descr (val->v.val_loc, outfile);
6412 print_indent -= 4;
6413 }
6414 else
6415 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6416 break;
6417 case dw_val_class_loc_list:
6418 fprintf (outfile, "location list -> label:%s",
6419 val->v.val_loc_list->ll_symbol);
6420 break;
6421 case dw_val_class_view_list:
6422 val = view_list_to_loc_list_val_node (val);
6423 fprintf (outfile, "location list with views -> labels:%s and %s",
6424 val->v.val_loc_list->ll_symbol,
6425 val->v.val_loc_list->vl_symbol);
6426 break;
6427 case dw_val_class_range_list:
6428 fprintf (outfile, "range list");
6429 break;
6430 case dw_val_class_const:
6431 case dw_val_class_const_implicit:
6432 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6433 break;
6434 case dw_val_class_unsigned_const:
6435 case dw_val_class_unsigned_const_implicit:
6436 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6437 break;
6438 case dw_val_class_const_double:
6439 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6440 HOST_WIDE_INT_PRINT_UNSIGNED")",
6441 val->v.val_double.high,
6442 val->v.val_double.low);
6443 break;
6444 case dw_val_class_wide_int:
6445 {
6446 int i = val->v.val_wide->get_len ();
6447 fprintf (outfile, "constant (");
6448 gcc_assert (i > 0);
6449 if (val->v.val_wide->elt (i - 1) == 0)
6450 fprintf (outfile, "0x");
6451 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6452 val->v.val_wide->elt (--i));
6453 while (--i >= 0)
6454 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6455 val->v.val_wide->elt (i));
6456 fprintf (outfile, ")");
6457 break;
6458 }
6459 case dw_val_class_vec:
6460 fprintf (outfile, "floating-point or vector constant");
6461 break;
6462 case dw_val_class_flag:
6463 fprintf (outfile, "%u", val->v.val_flag);
6464 break;
6465 case dw_val_class_die_ref:
6466 if (val->v.val_die_ref.die != NULL)
6467 {
6468 dw_die_ref die = val->v.val_die_ref.die;
6469
6470 if (die->comdat_type_p)
6471 {
6472 fprintf (outfile, "die -> signature: ");
6473 print_signature (outfile,
6474 die->die_id.die_type_node->signature);
6475 }
6476 else if (die->die_id.die_symbol)
6477 {
6478 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6479 if (die->with_offset)
6480 fprintf (outfile, " + %ld", die->die_offset);
6481 }
6482 else
6483 fprintf (outfile, "die -> %ld", die->die_offset);
6484 fprintf (outfile, " (%p)", (void *) die);
6485 }
6486 else
6487 fprintf (outfile, "die -> <null>");
6488 break;
6489 case dw_val_class_vms_delta:
6490 fprintf (outfile, "delta: @slotcount(%s-%s)",
6491 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6492 break;
6493 case dw_val_class_symview:
6494 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6495 break;
6496 case dw_val_class_lbl_id:
6497 case dw_val_class_lineptr:
6498 case dw_val_class_macptr:
6499 case dw_val_class_loclistsptr:
6500 case dw_val_class_high_pc:
6501 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6502 break;
6503 case dw_val_class_str:
6504 if (val->v.val_str->str != NULL)
6505 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6506 else
6507 fprintf (outfile, "<null>");
6508 break;
6509 case dw_val_class_file:
6510 case dw_val_class_file_implicit:
6511 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6512 val->v.val_file->emitted_number);
6513 break;
6514 case dw_val_class_data8:
6515 {
6516 int i;
6517
6518 for (i = 0; i < 8; i++)
6519 fprintf (outfile, "%02x", val->v.val_data8[i]);
6520 break;
6521 }
6522 case dw_val_class_discr_value:
6523 print_discr_value (outfile, &val->v.val_discr_value);
6524 break;
6525 case dw_val_class_discr_list:
6526 for (dw_discr_list_ref node = val->v.val_discr_list;
6527 node != NULL;
6528 node = node->dw_discr_next)
6529 {
6530 if (node->dw_discr_range)
6531 {
6532 fprintf (outfile, " .. ");
6533 print_discr_value (outfile, &node->dw_discr_lower_bound);
6534 print_discr_value (outfile, &node->dw_discr_upper_bound);
6535 }
6536 else
6537 print_discr_value (outfile, &node->dw_discr_lower_bound);
6538
6539 if (node->dw_discr_next != NULL)
6540 fprintf (outfile, " | ");
6541 }
6542 default:
6543 break;
6544 }
6545 }
6546
6547 /* Likewise, for a DIE attribute. */
6548
6549 static void
6550 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6551 {
6552 print_dw_val (&a->dw_attr_val, recurse, outfile);
6553 }
6554
6555
6556 /* Print the list of operands in the LOC location description to OUTFILE. This
6557 routine is a debugging aid only. */
6558
6559 static void
6560 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6561 {
6562 dw_loc_descr_ref l = loc;
6563
6564 if (loc == NULL)
6565 {
6566 print_spaces (outfile);
6567 fprintf (outfile, "<null>\n");
6568 return;
6569 }
6570
6571 for (l = loc; l != NULL; l = l->dw_loc_next)
6572 {
6573 print_spaces (outfile);
6574 fprintf (outfile, "(%p) %s",
6575 (void *) l,
6576 dwarf_stack_op_name (l->dw_loc_opc));
6577 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6578 {
6579 fprintf (outfile, " ");
6580 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6581 }
6582 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6583 {
6584 fprintf (outfile, ", ");
6585 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6586 }
6587 fprintf (outfile, "\n");
6588 }
6589 }
6590
6591 /* Print the information associated with a given DIE, and its children.
6592 This routine is a debugging aid only. */
6593
6594 static void
6595 print_die (dw_die_ref die, FILE *outfile)
6596 {
6597 dw_attr_node *a;
6598 dw_die_ref c;
6599 unsigned ix;
6600
6601 print_spaces (outfile);
6602 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6603 die->die_offset, dwarf_tag_name (die->die_tag),
6604 (void*) die);
6605 print_spaces (outfile);
6606 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6607 fprintf (outfile, " offset: %ld", die->die_offset);
6608 fprintf (outfile, " mark: %d\n", die->die_mark);
6609
6610 if (die->comdat_type_p)
6611 {
6612 print_spaces (outfile);
6613 fprintf (outfile, " signature: ");
6614 print_signature (outfile, die->die_id.die_type_node->signature);
6615 fprintf (outfile, "\n");
6616 }
6617
6618 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6619 {
6620 print_spaces (outfile);
6621 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6622
6623 print_attribute (a, true, outfile);
6624 fprintf (outfile, "\n");
6625 }
6626
6627 if (die->die_child != NULL)
6628 {
6629 print_indent += 4;
6630 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6631 print_indent -= 4;
6632 }
6633 if (print_indent == 0)
6634 fprintf (outfile, "\n");
6635 }
6636
6637 /* Print the list of operations in the LOC location description. */
6638
6639 DEBUG_FUNCTION void
6640 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6641 {
6642 print_loc_descr (loc, stderr);
6643 }
6644
6645 /* Print the information collected for a given DIE. */
6646
6647 DEBUG_FUNCTION void
6648 debug_dwarf_die (dw_die_ref die)
6649 {
6650 print_die (die, stderr);
6651 }
6652
6653 DEBUG_FUNCTION void
6654 debug (die_struct &ref)
6655 {
6656 print_die (&ref, stderr);
6657 }
6658
6659 DEBUG_FUNCTION void
6660 debug (die_struct *ptr)
6661 {
6662 if (ptr)
6663 debug (*ptr);
6664 else
6665 fprintf (stderr, "<nil>\n");
6666 }
6667
6668
6669 /* Print all DWARF information collected for the compilation unit.
6670 This routine is a debugging aid only. */
6671
6672 DEBUG_FUNCTION void
6673 debug_dwarf (void)
6674 {
6675 print_indent = 0;
6676 print_die (comp_unit_die (), stderr);
6677 }
6678
6679 /* Verify the DIE tree structure. */
6680
6681 DEBUG_FUNCTION void
6682 verify_die (dw_die_ref die)
6683 {
6684 gcc_assert (!die->die_mark);
6685 if (die->die_parent == NULL
6686 && die->die_sib == NULL)
6687 return;
6688 /* Verify the die_sib list is cyclic. */
6689 dw_die_ref x = die;
6690 do
6691 {
6692 x->die_mark = 1;
6693 x = x->die_sib;
6694 }
6695 while (x && !x->die_mark);
6696 gcc_assert (x == die);
6697 x = die;
6698 do
6699 {
6700 /* Verify all dies have the same parent. */
6701 gcc_assert (x->die_parent == die->die_parent);
6702 if (x->die_child)
6703 {
6704 /* Verify the child has the proper parent and recurse. */
6705 gcc_assert (x->die_child->die_parent == x);
6706 verify_die (x->die_child);
6707 }
6708 x->die_mark = 0;
6709 x = x->die_sib;
6710 }
6711 while (x && x->die_mark);
6712 }
6713
6714 /* Sanity checks on DIEs. */
6715
6716 static void
6717 check_die (dw_die_ref die)
6718 {
6719 unsigned ix;
6720 dw_attr_node *a;
6721 bool inline_found = false;
6722 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6723 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6724 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6725 {
6726 switch (a->dw_attr)
6727 {
6728 case DW_AT_inline:
6729 if (a->dw_attr_val.v.val_unsigned)
6730 inline_found = true;
6731 break;
6732 case DW_AT_location:
6733 ++n_location;
6734 break;
6735 case DW_AT_low_pc:
6736 ++n_low_pc;
6737 break;
6738 case DW_AT_high_pc:
6739 ++n_high_pc;
6740 break;
6741 case DW_AT_artificial:
6742 ++n_artificial;
6743 break;
6744 case DW_AT_decl_column:
6745 ++n_decl_column;
6746 break;
6747 case DW_AT_decl_line:
6748 ++n_decl_line;
6749 break;
6750 case DW_AT_decl_file:
6751 ++n_decl_file;
6752 break;
6753 default:
6754 break;
6755 }
6756 }
6757 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6758 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6759 {
6760 fprintf (stderr, "Duplicate attributes in DIE:\n");
6761 debug_dwarf_die (die);
6762 gcc_unreachable ();
6763 }
6764 if (inline_found)
6765 {
6766 /* A debugging information entry that is a member of an abstract
6767 instance tree [that has DW_AT_inline] should not contain any
6768 attributes which describe aspects of the subroutine which vary
6769 between distinct inlined expansions or distinct out-of-line
6770 expansions. */
6771 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6772 gcc_assert (a->dw_attr != DW_AT_low_pc
6773 && a->dw_attr != DW_AT_high_pc
6774 && a->dw_attr != DW_AT_location
6775 && a->dw_attr != DW_AT_frame_base
6776 && a->dw_attr != DW_AT_call_all_calls
6777 && a->dw_attr != DW_AT_GNU_all_call_sites);
6778 }
6779 }
6780 \f
6781 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6782 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6783 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6784
6785 /* Calculate the checksum of a location expression. */
6786
6787 static inline void
6788 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6789 {
6790 int tem;
6791 inchash::hash hstate;
6792 hashval_t hash;
6793
6794 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6795 CHECKSUM (tem);
6796 hash_loc_operands (loc, hstate);
6797 hash = hstate.end();
6798 CHECKSUM (hash);
6799 }
6800
6801 /* Calculate the checksum of an attribute. */
6802
6803 static void
6804 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6805 {
6806 dw_loc_descr_ref loc;
6807 rtx r;
6808
6809 CHECKSUM (at->dw_attr);
6810
6811 /* We don't care that this was compiled with a different compiler
6812 snapshot; if the output is the same, that's what matters. */
6813 if (at->dw_attr == DW_AT_producer)
6814 return;
6815
6816 switch (AT_class (at))
6817 {
6818 case dw_val_class_const:
6819 case dw_val_class_const_implicit:
6820 CHECKSUM (at->dw_attr_val.v.val_int);
6821 break;
6822 case dw_val_class_unsigned_const:
6823 case dw_val_class_unsigned_const_implicit:
6824 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6825 break;
6826 case dw_val_class_const_double:
6827 CHECKSUM (at->dw_attr_val.v.val_double);
6828 break;
6829 case dw_val_class_wide_int:
6830 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6831 get_full_len (*at->dw_attr_val.v.val_wide)
6832 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6833 break;
6834 case dw_val_class_vec:
6835 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6836 (at->dw_attr_val.v.val_vec.length
6837 * at->dw_attr_val.v.val_vec.elt_size));
6838 break;
6839 case dw_val_class_flag:
6840 CHECKSUM (at->dw_attr_val.v.val_flag);
6841 break;
6842 case dw_val_class_str:
6843 CHECKSUM_STRING (AT_string (at));
6844 break;
6845
6846 case dw_val_class_addr:
6847 r = AT_addr (at);
6848 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6849 CHECKSUM_STRING (XSTR (r, 0));
6850 break;
6851
6852 case dw_val_class_offset:
6853 CHECKSUM (at->dw_attr_val.v.val_offset);
6854 break;
6855
6856 case dw_val_class_loc:
6857 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6858 loc_checksum (loc, ctx);
6859 break;
6860
6861 case dw_val_class_die_ref:
6862 die_checksum (AT_ref (at), ctx, mark);
6863 break;
6864
6865 case dw_val_class_fde_ref:
6866 case dw_val_class_vms_delta:
6867 case dw_val_class_symview:
6868 case dw_val_class_lbl_id:
6869 case dw_val_class_lineptr:
6870 case dw_val_class_macptr:
6871 case dw_val_class_loclistsptr:
6872 case dw_val_class_high_pc:
6873 break;
6874
6875 case dw_val_class_file:
6876 case dw_val_class_file_implicit:
6877 CHECKSUM_STRING (AT_file (at)->filename);
6878 break;
6879
6880 case dw_val_class_data8:
6881 CHECKSUM (at->dw_attr_val.v.val_data8);
6882 break;
6883
6884 default:
6885 break;
6886 }
6887 }
6888
6889 /* Calculate the checksum of a DIE. */
6890
6891 static void
6892 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6893 {
6894 dw_die_ref c;
6895 dw_attr_node *a;
6896 unsigned ix;
6897
6898 /* To avoid infinite recursion. */
6899 if (die->die_mark)
6900 {
6901 CHECKSUM (die->die_mark);
6902 return;
6903 }
6904 die->die_mark = ++(*mark);
6905
6906 CHECKSUM (die->die_tag);
6907
6908 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6909 attr_checksum (a, ctx, mark);
6910
6911 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6912 }
6913
6914 #undef CHECKSUM
6915 #undef CHECKSUM_BLOCK
6916 #undef CHECKSUM_STRING
6917
6918 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6919 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6920 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6921 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6922 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6923 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6924 #define CHECKSUM_ATTR(FOO) \
6925 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6926
6927 /* Calculate the checksum of a number in signed LEB128 format. */
6928
6929 static void
6930 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6931 {
6932 unsigned char byte;
6933 bool more;
6934
6935 while (1)
6936 {
6937 byte = (value & 0x7f);
6938 value >>= 7;
6939 more = !((value == 0 && (byte & 0x40) == 0)
6940 || (value == -1 && (byte & 0x40) != 0));
6941 if (more)
6942 byte |= 0x80;
6943 CHECKSUM (byte);
6944 if (!more)
6945 break;
6946 }
6947 }
6948
6949 /* Calculate the checksum of a number in unsigned LEB128 format. */
6950
6951 static void
6952 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6953 {
6954 while (1)
6955 {
6956 unsigned char byte = (value & 0x7f);
6957 value >>= 7;
6958 if (value != 0)
6959 /* More bytes to follow. */
6960 byte |= 0x80;
6961 CHECKSUM (byte);
6962 if (value == 0)
6963 break;
6964 }
6965 }
6966
6967 /* Checksum the context of the DIE. This adds the names of any
6968 surrounding namespaces or structures to the checksum. */
6969
6970 static void
6971 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6972 {
6973 const char *name;
6974 dw_die_ref spec;
6975 int tag = die->die_tag;
6976
6977 if (tag != DW_TAG_namespace
6978 && tag != DW_TAG_structure_type
6979 && tag != DW_TAG_class_type)
6980 return;
6981
6982 name = get_AT_string (die, DW_AT_name);
6983
6984 spec = get_AT_ref (die, DW_AT_specification);
6985 if (spec != NULL)
6986 die = spec;
6987
6988 if (die->die_parent != NULL)
6989 checksum_die_context (die->die_parent, ctx);
6990
6991 CHECKSUM_ULEB128 ('C');
6992 CHECKSUM_ULEB128 (tag);
6993 if (name != NULL)
6994 CHECKSUM_STRING (name);
6995 }
6996
6997 /* Calculate the checksum of a location expression. */
6998
6999 static inline void
7000 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7001 {
7002 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7003 were emitted as a DW_FORM_sdata instead of a location expression. */
7004 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7005 {
7006 CHECKSUM_ULEB128 (DW_FORM_sdata);
7007 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7008 return;
7009 }
7010
7011 /* Otherwise, just checksum the raw location expression. */
7012 while (loc != NULL)
7013 {
7014 inchash::hash hstate;
7015 hashval_t hash;
7016
7017 CHECKSUM_ULEB128 (loc->dtprel);
7018 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7019 hash_loc_operands (loc, hstate);
7020 hash = hstate.end ();
7021 CHECKSUM (hash);
7022 loc = loc->dw_loc_next;
7023 }
7024 }
7025
7026 /* Calculate the checksum of an attribute. */
7027
7028 static void
7029 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7030 struct md5_ctx *ctx, int *mark)
7031 {
7032 dw_loc_descr_ref loc;
7033 rtx r;
7034
7035 if (AT_class (at) == dw_val_class_die_ref)
7036 {
7037 dw_die_ref target_die = AT_ref (at);
7038
7039 /* For pointer and reference types, we checksum only the (qualified)
7040 name of the target type (if there is a name). For friend entries,
7041 we checksum only the (qualified) name of the target type or function.
7042 This allows the checksum to remain the same whether the target type
7043 is complete or not. */
7044 if ((at->dw_attr == DW_AT_type
7045 && (tag == DW_TAG_pointer_type
7046 || tag == DW_TAG_reference_type
7047 || tag == DW_TAG_rvalue_reference_type
7048 || tag == DW_TAG_ptr_to_member_type))
7049 || (at->dw_attr == DW_AT_friend
7050 && tag == DW_TAG_friend))
7051 {
7052 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7053
7054 if (name_attr != NULL)
7055 {
7056 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7057
7058 if (decl == NULL)
7059 decl = target_die;
7060 CHECKSUM_ULEB128 ('N');
7061 CHECKSUM_ULEB128 (at->dw_attr);
7062 if (decl->die_parent != NULL)
7063 checksum_die_context (decl->die_parent, ctx);
7064 CHECKSUM_ULEB128 ('E');
7065 CHECKSUM_STRING (AT_string (name_attr));
7066 return;
7067 }
7068 }
7069
7070 /* For all other references to another DIE, we check to see if the
7071 target DIE has already been visited. If it has, we emit a
7072 backward reference; if not, we descend recursively. */
7073 if (target_die->die_mark > 0)
7074 {
7075 CHECKSUM_ULEB128 ('R');
7076 CHECKSUM_ULEB128 (at->dw_attr);
7077 CHECKSUM_ULEB128 (target_die->die_mark);
7078 }
7079 else
7080 {
7081 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7082
7083 if (decl == NULL)
7084 decl = target_die;
7085 target_die->die_mark = ++(*mark);
7086 CHECKSUM_ULEB128 ('T');
7087 CHECKSUM_ULEB128 (at->dw_attr);
7088 if (decl->die_parent != NULL)
7089 checksum_die_context (decl->die_parent, ctx);
7090 die_checksum_ordered (target_die, ctx, mark);
7091 }
7092 return;
7093 }
7094
7095 CHECKSUM_ULEB128 ('A');
7096 CHECKSUM_ULEB128 (at->dw_attr);
7097
7098 switch (AT_class (at))
7099 {
7100 case dw_val_class_const:
7101 case dw_val_class_const_implicit:
7102 CHECKSUM_ULEB128 (DW_FORM_sdata);
7103 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7104 break;
7105
7106 case dw_val_class_unsigned_const:
7107 case dw_val_class_unsigned_const_implicit:
7108 CHECKSUM_ULEB128 (DW_FORM_sdata);
7109 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7110 break;
7111
7112 case dw_val_class_const_double:
7113 CHECKSUM_ULEB128 (DW_FORM_block);
7114 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7115 CHECKSUM (at->dw_attr_val.v.val_double);
7116 break;
7117
7118 case dw_val_class_wide_int:
7119 CHECKSUM_ULEB128 (DW_FORM_block);
7120 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7121 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7122 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7123 get_full_len (*at->dw_attr_val.v.val_wide)
7124 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7125 break;
7126
7127 case dw_val_class_vec:
7128 CHECKSUM_ULEB128 (DW_FORM_block);
7129 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7130 * at->dw_attr_val.v.val_vec.elt_size);
7131 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7132 (at->dw_attr_val.v.val_vec.length
7133 * at->dw_attr_val.v.val_vec.elt_size));
7134 break;
7135
7136 case dw_val_class_flag:
7137 CHECKSUM_ULEB128 (DW_FORM_flag);
7138 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7139 break;
7140
7141 case dw_val_class_str:
7142 CHECKSUM_ULEB128 (DW_FORM_string);
7143 CHECKSUM_STRING (AT_string (at));
7144 break;
7145
7146 case dw_val_class_addr:
7147 r = AT_addr (at);
7148 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7149 CHECKSUM_ULEB128 (DW_FORM_string);
7150 CHECKSUM_STRING (XSTR (r, 0));
7151 break;
7152
7153 case dw_val_class_offset:
7154 CHECKSUM_ULEB128 (DW_FORM_sdata);
7155 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7156 break;
7157
7158 case dw_val_class_loc:
7159 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7160 loc_checksum_ordered (loc, ctx);
7161 break;
7162
7163 case dw_val_class_fde_ref:
7164 case dw_val_class_symview:
7165 case dw_val_class_lbl_id:
7166 case dw_val_class_lineptr:
7167 case dw_val_class_macptr:
7168 case dw_val_class_loclistsptr:
7169 case dw_val_class_high_pc:
7170 break;
7171
7172 case dw_val_class_file:
7173 case dw_val_class_file_implicit:
7174 CHECKSUM_ULEB128 (DW_FORM_string);
7175 CHECKSUM_STRING (AT_file (at)->filename);
7176 break;
7177
7178 case dw_val_class_data8:
7179 CHECKSUM (at->dw_attr_val.v.val_data8);
7180 break;
7181
7182 default:
7183 break;
7184 }
7185 }
7186
7187 struct checksum_attributes
7188 {
7189 dw_attr_node *at_name;
7190 dw_attr_node *at_type;
7191 dw_attr_node *at_friend;
7192 dw_attr_node *at_accessibility;
7193 dw_attr_node *at_address_class;
7194 dw_attr_node *at_alignment;
7195 dw_attr_node *at_allocated;
7196 dw_attr_node *at_artificial;
7197 dw_attr_node *at_associated;
7198 dw_attr_node *at_binary_scale;
7199 dw_attr_node *at_bit_offset;
7200 dw_attr_node *at_bit_size;
7201 dw_attr_node *at_bit_stride;
7202 dw_attr_node *at_byte_size;
7203 dw_attr_node *at_byte_stride;
7204 dw_attr_node *at_const_value;
7205 dw_attr_node *at_containing_type;
7206 dw_attr_node *at_count;
7207 dw_attr_node *at_data_location;
7208 dw_attr_node *at_data_member_location;
7209 dw_attr_node *at_decimal_scale;
7210 dw_attr_node *at_decimal_sign;
7211 dw_attr_node *at_default_value;
7212 dw_attr_node *at_digit_count;
7213 dw_attr_node *at_discr;
7214 dw_attr_node *at_discr_list;
7215 dw_attr_node *at_discr_value;
7216 dw_attr_node *at_encoding;
7217 dw_attr_node *at_endianity;
7218 dw_attr_node *at_explicit;
7219 dw_attr_node *at_is_optional;
7220 dw_attr_node *at_location;
7221 dw_attr_node *at_lower_bound;
7222 dw_attr_node *at_mutable;
7223 dw_attr_node *at_ordering;
7224 dw_attr_node *at_picture_string;
7225 dw_attr_node *at_prototyped;
7226 dw_attr_node *at_small;
7227 dw_attr_node *at_segment;
7228 dw_attr_node *at_string_length;
7229 dw_attr_node *at_string_length_bit_size;
7230 dw_attr_node *at_string_length_byte_size;
7231 dw_attr_node *at_threads_scaled;
7232 dw_attr_node *at_upper_bound;
7233 dw_attr_node *at_use_location;
7234 dw_attr_node *at_use_UTF8;
7235 dw_attr_node *at_variable_parameter;
7236 dw_attr_node *at_virtuality;
7237 dw_attr_node *at_visibility;
7238 dw_attr_node *at_vtable_elem_location;
7239 };
7240
7241 /* Collect the attributes that we will want to use for the checksum. */
7242
7243 static void
7244 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7245 {
7246 dw_attr_node *a;
7247 unsigned ix;
7248
7249 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7250 {
7251 switch (a->dw_attr)
7252 {
7253 case DW_AT_name:
7254 attrs->at_name = a;
7255 break;
7256 case DW_AT_type:
7257 attrs->at_type = a;
7258 break;
7259 case DW_AT_friend:
7260 attrs->at_friend = a;
7261 break;
7262 case DW_AT_accessibility:
7263 attrs->at_accessibility = a;
7264 break;
7265 case DW_AT_address_class:
7266 attrs->at_address_class = a;
7267 break;
7268 case DW_AT_alignment:
7269 attrs->at_alignment = a;
7270 break;
7271 case DW_AT_allocated:
7272 attrs->at_allocated = a;
7273 break;
7274 case DW_AT_artificial:
7275 attrs->at_artificial = a;
7276 break;
7277 case DW_AT_associated:
7278 attrs->at_associated = a;
7279 break;
7280 case DW_AT_binary_scale:
7281 attrs->at_binary_scale = a;
7282 break;
7283 case DW_AT_bit_offset:
7284 attrs->at_bit_offset = a;
7285 break;
7286 case DW_AT_bit_size:
7287 attrs->at_bit_size = a;
7288 break;
7289 case DW_AT_bit_stride:
7290 attrs->at_bit_stride = a;
7291 break;
7292 case DW_AT_byte_size:
7293 attrs->at_byte_size = a;
7294 break;
7295 case DW_AT_byte_stride:
7296 attrs->at_byte_stride = a;
7297 break;
7298 case DW_AT_const_value:
7299 attrs->at_const_value = a;
7300 break;
7301 case DW_AT_containing_type:
7302 attrs->at_containing_type = a;
7303 break;
7304 case DW_AT_count:
7305 attrs->at_count = a;
7306 break;
7307 case DW_AT_data_location:
7308 attrs->at_data_location = a;
7309 break;
7310 case DW_AT_data_member_location:
7311 attrs->at_data_member_location = a;
7312 break;
7313 case DW_AT_decimal_scale:
7314 attrs->at_decimal_scale = a;
7315 break;
7316 case DW_AT_decimal_sign:
7317 attrs->at_decimal_sign = a;
7318 break;
7319 case DW_AT_default_value:
7320 attrs->at_default_value = a;
7321 break;
7322 case DW_AT_digit_count:
7323 attrs->at_digit_count = a;
7324 break;
7325 case DW_AT_discr:
7326 attrs->at_discr = a;
7327 break;
7328 case DW_AT_discr_list:
7329 attrs->at_discr_list = a;
7330 break;
7331 case DW_AT_discr_value:
7332 attrs->at_discr_value = a;
7333 break;
7334 case DW_AT_encoding:
7335 attrs->at_encoding = a;
7336 break;
7337 case DW_AT_endianity:
7338 attrs->at_endianity = a;
7339 break;
7340 case DW_AT_explicit:
7341 attrs->at_explicit = a;
7342 break;
7343 case DW_AT_is_optional:
7344 attrs->at_is_optional = a;
7345 break;
7346 case DW_AT_location:
7347 attrs->at_location = a;
7348 break;
7349 case DW_AT_lower_bound:
7350 attrs->at_lower_bound = a;
7351 break;
7352 case DW_AT_mutable:
7353 attrs->at_mutable = a;
7354 break;
7355 case DW_AT_ordering:
7356 attrs->at_ordering = a;
7357 break;
7358 case DW_AT_picture_string:
7359 attrs->at_picture_string = a;
7360 break;
7361 case DW_AT_prototyped:
7362 attrs->at_prototyped = a;
7363 break;
7364 case DW_AT_small:
7365 attrs->at_small = a;
7366 break;
7367 case DW_AT_segment:
7368 attrs->at_segment = a;
7369 break;
7370 case DW_AT_string_length:
7371 attrs->at_string_length = a;
7372 break;
7373 case DW_AT_string_length_bit_size:
7374 attrs->at_string_length_bit_size = a;
7375 break;
7376 case DW_AT_string_length_byte_size:
7377 attrs->at_string_length_byte_size = a;
7378 break;
7379 case DW_AT_threads_scaled:
7380 attrs->at_threads_scaled = a;
7381 break;
7382 case DW_AT_upper_bound:
7383 attrs->at_upper_bound = a;
7384 break;
7385 case DW_AT_use_location:
7386 attrs->at_use_location = a;
7387 break;
7388 case DW_AT_use_UTF8:
7389 attrs->at_use_UTF8 = a;
7390 break;
7391 case DW_AT_variable_parameter:
7392 attrs->at_variable_parameter = a;
7393 break;
7394 case DW_AT_virtuality:
7395 attrs->at_virtuality = a;
7396 break;
7397 case DW_AT_visibility:
7398 attrs->at_visibility = a;
7399 break;
7400 case DW_AT_vtable_elem_location:
7401 attrs->at_vtable_elem_location = a;
7402 break;
7403 default:
7404 break;
7405 }
7406 }
7407 }
7408
7409 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7410
7411 static void
7412 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7413 {
7414 dw_die_ref c;
7415 dw_die_ref decl;
7416 struct checksum_attributes attrs;
7417
7418 CHECKSUM_ULEB128 ('D');
7419 CHECKSUM_ULEB128 (die->die_tag);
7420
7421 memset (&attrs, 0, sizeof (attrs));
7422
7423 decl = get_AT_ref (die, DW_AT_specification);
7424 if (decl != NULL)
7425 collect_checksum_attributes (&attrs, decl);
7426 collect_checksum_attributes (&attrs, die);
7427
7428 CHECKSUM_ATTR (attrs.at_name);
7429 CHECKSUM_ATTR (attrs.at_accessibility);
7430 CHECKSUM_ATTR (attrs.at_address_class);
7431 CHECKSUM_ATTR (attrs.at_allocated);
7432 CHECKSUM_ATTR (attrs.at_artificial);
7433 CHECKSUM_ATTR (attrs.at_associated);
7434 CHECKSUM_ATTR (attrs.at_binary_scale);
7435 CHECKSUM_ATTR (attrs.at_bit_offset);
7436 CHECKSUM_ATTR (attrs.at_bit_size);
7437 CHECKSUM_ATTR (attrs.at_bit_stride);
7438 CHECKSUM_ATTR (attrs.at_byte_size);
7439 CHECKSUM_ATTR (attrs.at_byte_stride);
7440 CHECKSUM_ATTR (attrs.at_const_value);
7441 CHECKSUM_ATTR (attrs.at_containing_type);
7442 CHECKSUM_ATTR (attrs.at_count);
7443 CHECKSUM_ATTR (attrs.at_data_location);
7444 CHECKSUM_ATTR (attrs.at_data_member_location);
7445 CHECKSUM_ATTR (attrs.at_decimal_scale);
7446 CHECKSUM_ATTR (attrs.at_decimal_sign);
7447 CHECKSUM_ATTR (attrs.at_default_value);
7448 CHECKSUM_ATTR (attrs.at_digit_count);
7449 CHECKSUM_ATTR (attrs.at_discr);
7450 CHECKSUM_ATTR (attrs.at_discr_list);
7451 CHECKSUM_ATTR (attrs.at_discr_value);
7452 CHECKSUM_ATTR (attrs.at_encoding);
7453 CHECKSUM_ATTR (attrs.at_endianity);
7454 CHECKSUM_ATTR (attrs.at_explicit);
7455 CHECKSUM_ATTR (attrs.at_is_optional);
7456 CHECKSUM_ATTR (attrs.at_location);
7457 CHECKSUM_ATTR (attrs.at_lower_bound);
7458 CHECKSUM_ATTR (attrs.at_mutable);
7459 CHECKSUM_ATTR (attrs.at_ordering);
7460 CHECKSUM_ATTR (attrs.at_picture_string);
7461 CHECKSUM_ATTR (attrs.at_prototyped);
7462 CHECKSUM_ATTR (attrs.at_small);
7463 CHECKSUM_ATTR (attrs.at_segment);
7464 CHECKSUM_ATTR (attrs.at_string_length);
7465 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7466 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7467 CHECKSUM_ATTR (attrs.at_threads_scaled);
7468 CHECKSUM_ATTR (attrs.at_upper_bound);
7469 CHECKSUM_ATTR (attrs.at_use_location);
7470 CHECKSUM_ATTR (attrs.at_use_UTF8);
7471 CHECKSUM_ATTR (attrs.at_variable_parameter);
7472 CHECKSUM_ATTR (attrs.at_virtuality);
7473 CHECKSUM_ATTR (attrs.at_visibility);
7474 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7475 CHECKSUM_ATTR (attrs.at_type);
7476 CHECKSUM_ATTR (attrs.at_friend);
7477 CHECKSUM_ATTR (attrs.at_alignment);
7478
7479 /* Checksum the child DIEs. */
7480 c = die->die_child;
7481 if (c) do {
7482 dw_attr_node *name_attr;
7483
7484 c = c->die_sib;
7485 name_attr = get_AT (c, DW_AT_name);
7486 if (is_template_instantiation (c))
7487 {
7488 /* Ignore instantiations of member type and function templates. */
7489 }
7490 else if (name_attr != NULL
7491 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7492 {
7493 /* Use a shallow checksum for named nested types and member
7494 functions. */
7495 CHECKSUM_ULEB128 ('S');
7496 CHECKSUM_ULEB128 (c->die_tag);
7497 CHECKSUM_STRING (AT_string (name_attr));
7498 }
7499 else
7500 {
7501 /* Use a deep checksum for other children. */
7502 /* Mark this DIE so it gets processed when unmarking. */
7503 if (c->die_mark == 0)
7504 c->die_mark = -1;
7505 die_checksum_ordered (c, ctx, mark);
7506 }
7507 } while (c != die->die_child);
7508
7509 CHECKSUM_ULEB128 (0);
7510 }
7511
7512 /* Add a type name and tag to a hash. */
7513 static void
7514 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7515 {
7516 CHECKSUM_ULEB128 (tag);
7517 CHECKSUM_STRING (name);
7518 }
7519
7520 #undef CHECKSUM
7521 #undef CHECKSUM_STRING
7522 #undef CHECKSUM_ATTR
7523 #undef CHECKSUM_LEB128
7524 #undef CHECKSUM_ULEB128
7525
7526 /* Generate the type signature for DIE. This is computed by generating an
7527 MD5 checksum over the DIE's tag, its relevant attributes, and its
7528 children. Attributes that are references to other DIEs are processed
7529 by recursion, using the MARK field to prevent infinite recursion.
7530 If the DIE is nested inside a namespace or another type, we also
7531 need to include that context in the signature. The lower 64 bits
7532 of the resulting MD5 checksum comprise the signature. */
7533
7534 static void
7535 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7536 {
7537 int mark;
7538 const char *name;
7539 unsigned char checksum[16];
7540 struct md5_ctx ctx;
7541 dw_die_ref decl;
7542 dw_die_ref parent;
7543
7544 name = get_AT_string (die, DW_AT_name);
7545 decl = get_AT_ref (die, DW_AT_specification);
7546 parent = get_die_parent (die);
7547
7548 /* First, compute a signature for just the type name (and its surrounding
7549 context, if any. This is stored in the type unit DIE for link-time
7550 ODR (one-definition rule) checking. */
7551
7552 if (is_cxx () && name != NULL)
7553 {
7554 md5_init_ctx (&ctx);
7555
7556 /* Checksum the names of surrounding namespaces and structures. */
7557 if (parent != NULL)
7558 checksum_die_context (parent, &ctx);
7559
7560 /* Checksum the current DIE. */
7561 die_odr_checksum (die->die_tag, name, &ctx);
7562 md5_finish_ctx (&ctx, checksum);
7563
7564 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7565 }
7566
7567 /* Next, compute the complete type signature. */
7568
7569 md5_init_ctx (&ctx);
7570 mark = 1;
7571 die->die_mark = mark;
7572
7573 /* Checksum the names of surrounding namespaces and structures. */
7574 if (parent != NULL)
7575 checksum_die_context (parent, &ctx);
7576
7577 /* Checksum the DIE and its children. */
7578 die_checksum_ordered (die, &ctx, &mark);
7579 unmark_all_dies (die);
7580 md5_finish_ctx (&ctx, checksum);
7581
7582 /* Store the signature in the type node and link the type DIE and the
7583 type node together. */
7584 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7585 DWARF_TYPE_SIGNATURE_SIZE);
7586 die->comdat_type_p = true;
7587 die->die_id.die_type_node = type_node;
7588 type_node->type_die = die;
7589
7590 /* If the DIE is a specification, link its declaration to the type node
7591 as well. */
7592 if (decl != NULL)
7593 {
7594 decl->comdat_type_p = true;
7595 decl->die_id.die_type_node = type_node;
7596 }
7597 }
7598
7599 /* Do the location expressions look same? */
7600 static inline int
7601 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7602 {
7603 return loc1->dw_loc_opc == loc2->dw_loc_opc
7604 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7605 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7606 }
7607
7608 /* Do the values look the same? */
7609 static int
7610 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7611 {
7612 dw_loc_descr_ref loc1, loc2;
7613 rtx r1, r2;
7614
7615 if (v1->val_class != v2->val_class)
7616 return 0;
7617
7618 switch (v1->val_class)
7619 {
7620 case dw_val_class_const:
7621 case dw_val_class_const_implicit:
7622 return v1->v.val_int == v2->v.val_int;
7623 case dw_val_class_unsigned_const:
7624 case dw_val_class_unsigned_const_implicit:
7625 return v1->v.val_unsigned == v2->v.val_unsigned;
7626 case dw_val_class_const_double:
7627 return v1->v.val_double.high == v2->v.val_double.high
7628 && v1->v.val_double.low == v2->v.val_double.low;
7629 case dw_val_class_wide_int:
7630 return *v1->v.val_wide == *v2->v.val_wide;
7631 case dw_val_class_vec:
7632 if (v1->v.val_vec.length != v2->v.val_vec.length
7633 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7634 return 0;
7635 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7636 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7637 return 0;
7638 return 1;
7639 case dw_val_class_flag:
7640 return v1->v.val_flag == v2->v.val_flag;
7641 case dw_val_class_str:
7642 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7643
7644 case dw_val_class_addr:
7645 r1 = v1->v.val_addr;
7646 r2 = v2->v.val_addr;
7647 if (GET_CODE (r1) != GET_CODE (r2))
7648 return 0;
7649 return !rtx_equal_p (r1, r2);
7650
7651 case dw_val_class_offset:
7652 return v1->v.val_offset == v2->v.val_offset;
7653
7654 case dw_val_class_loc:
7655 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7656 loc1 && loc2;
7657 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7658 if (!same_loc_p (loc1, loc2, mark))
7659 return 0;
7660 return !loc1 && !loc2;
7661
7662 case dw_val_class_die_ref:
7663 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7664
7665 case dw_val_class_symview:
7666 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7667
7668 case dw_val_class_fde_ref:
7669 case dw_val_class_vms_delta:
7670 case dw_val_class_lbl_id:
7671 case dw_val_class_lineptr:
7672 case dw_val_class_macptr:
7673 case dw_val_class_loclistsptr:
7674 case dw_val_class_high_pc:
7675 return 1;
7676
7677 case dw_val_class_file:
7678 case dw_val_class_file_implicit:
7679 return v1->v.val_file == v2->v.val_file;
7680
7681 case dw_val_class_data8:
7682 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7683
7684 default:
7685 return 1;
7686 }
7687 }
7688
7689 /* Do the attributes look the same? */
7690
7691 static int
7692 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7693 {
7694 if (at1->dw_attr != at2->dw_attr)
7695 return 0;
7696
7697 /* We don't care that this was compiled with a different compiler
7698 snapshot; if the output is the same, that's what matters. */
7699 if (at1->dw_attr == DW_AT_producer)
7700 return 1;
7701
7702 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7703 }
7704
7705 /* Do the dies look the same? */
7706
7707 static int
7708 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7709 {
7710 dw_die_ref c1, c2;
7711 dw_attr_node *a1;
7712 unsigned ix;
7713
7714 /* To avoid infinite recursion. */
7715 if (die1->die_mark)
7716 return die1->die_mark == die2->die_mark;
7717 die1->die_mark = die2->die_mark = ++(*mark);
7718
7719 if (die1->die_tag != die2->die_tag)
7720 return 0;
7721
7722 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7723 return 0;
7724
7725 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7726 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7727 return 0;
7728
7729 c1 = die1->die_child;
7730 c2 = die2->die_child;
7731 if (! c1)
7732 {
7733 if (c2)
7734 return 0;
7735 }
7736 else
7737 for (;;)
7738 {
7739 if (!same_die_p (c1, c2, mark))
7740 return 0;
7741 c1 = c1->die_sib;
7742 c2 = c2->die_sib;
7743 if (c1 == die1->die_child)
7744 {
7745 if (c2 == die2->die_child)
7746 break;
7747 else
7748 return 0;
7749 }
7750 }
7751
7752 return 1;
7753 }
7754
7755 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7756 children, and set die_symbol. */
7757
7758 static void
7759 compute_comp_unit_symbol (dw_die_ref unit_die)
7760 {
7761 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7762 const char *base = die_name ? lbasename (die_name) : "anonymous";
7763 char *name = XALLOCAVEC (char, strlen (base) + 64);
7764 char *p;
7765 int i, mark;
7766 unsigned char checksum[16];
7767 struct md5_ctx ctx;
7768
7769 /* Compute the checksum of the DIE, then append part of it as hex digits to
7770 the name filename of the unit. */
7771
7772 md5_init_ctx (&ctx);
7773 mark = 0;
7774 die_checksum (unit_die, &ctx, &mark);
7775 unmark_all_dies (unit_die);
7776 md5_finish_ctx (&ctx, checksum);
7777
7778 /* When we this for comp_unit_die () we have a DW_AT_name that might
7779 not start with a letter but with anything valid for filenames and
7780 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7781 character is not a letter. */
7782 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7783 clean_symbol_name (name);
7784
7785 p = name + strlen (name);
7786 for (i = 0; i < 4; i++)
7787 {
7788 sprintf (p, "%.2x", checksum[i]);
7789 p += 2;
7790 }
7791
7792 unit_die->die_id.die_symbol = xstrdup (name);
7793 }
7794
7795 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7796
7797 static int
7798 is_type_die (dw_die_ref die)
7799 {
7800 switch (die->die_tag)
7801 {
7802 case DW_TAG_array_type:
7803 case DW_TAG_class_type:
7804 case DW_TAG_interface_type:
7805 case DW_TAG_enumeration_type:
7806 case DW_TAG_pointer_type:
7807 case DW_TAG_reference_type:
7808 case DW_TAG_rvalue_reference_type:
7809 case DW_TAG_string_type:
7810 case DW_TAG_structure_type:
7811 case DW_TAG_subroutine_type:
7812 case DW_TAG_union_type:
7813 case DW_TAG_ptr_to_member_type:
7814 case DW_TAG_set_type:
7815 case DW_TAG_subrange_type:
7816 case DW_TAG_base_type:
7817 case DW_TAG_const_type:
7818 case DW_TAG_file_type:
7819 case DW_TAG_packed_type:
7820 case DW_TAG_volatile_type:
7821 case DW_TAG_typedef:
7822 return 1;
7823 default:
7824 return 0;
7825 }
7826 }
7827
7828 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7829 Basically, we want to choose the bits that are likely to be shared between
7830 compilations (types) and leave out the bits that are specific to individual
7831 compilations (functions). */
7832
7833 static int
7834 is_comdat_die (dw_die_ref c)
7835 {
7836 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7837 we do for stabs. The advantage is a greater likelihood of sharing between
7838 objects that don't include headers in the same order (and therefore would
7839 put the base types in a different comdat). jason 8/28/00 */
7840
7841 if (c->die_tag == DW_TAG_base_type)
7842 return 0;
7843
7844 if (c->die_tag == DW_TAG_pointer_type
7845 || c->die_tag == DW_TAG_reference_type
7846 || c->die_tag == DW_TAG_rvalue_reference_type
7847 || c->die_tag == DW_TAG_const_type
7848 || c->die_tag == DW_TAG_volatile_type)
7849 {
7850 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7851
7852 return t ? is_comdat_die (t) : 0;
7853 }
7854
7855 return is_type_die (c);
7856 }
7857
7858 /* Returns true iff C is a compile-unit DIE. */
7859
7860 static inline bool
7861 is_cu_die (dw_die_ref c)
7862 {
7863 return c && (c->die_tag == DW_TAG_compile_unit
7864 || c->die_tag == DW_TAG_skeleton_unit);
7865 }
7866
7867 /* Returns true iff C is a unit DIE of some sort. */
7868
7869 static inline bool
7870 is_unit_die (dw_die_ref c)
7871 {
7872 return c && (c->die_tag == DW_TAG_compile_unit
7873 || c->die_tag == DW_TAG_partial_unit
7874 || c->die_tag == DW_TAG_type_unit
7875 || c->die_tag == DW_TAG_skeleton_unit);
7876 }
7877
7878 /* Returns true iff C is a namespace DIE. */
7879
7880 static inline bool
7881 is_namespace_die (dw_die_ref c)
7882 {
7883 return c && c->die_tag == DW_TAG_namespace;
7884 }
7885
7886 /* Returns true iff C is a class or structure DIE. */
7887
7888 static inline bool
7889 is_class_die (dw_die_ref c)
7890 {
7891 return c && (c->die_tag == DW_TAG_class_type
7892 || c->die_tag == DW_TAG_structure_type);
7893 }
7894
7895 /* Return non-zero if this DIE is a template parameter. */
7896
7897 static inline bool
7898 is_template_parameter (dw_die_ref die)
7899 {
7900 switch (die->die_tag)
7901 {
7902 case DW_TAG_template_type_param:
7903 case DW_TAG_template_value_param:
7904 case DW_TAG_GNU_template_template_param:
7905 case DW_TAG_GNU_template_parameter_pack:
7906 return true;
7907 default:
7908 return false;
7909 }
7910 }
7911
7912 /* Return non-zero if this DIE represents a template instantiation. */
7913
7914 static inline bool
7915 is_template_instantiation (dw_die_ref die)
7916 {
7917 dw_die_ref c;
7918
7919 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7920 return false;
7921 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7922 return false;
7923 }
7924
7925 static char *
7926 gen_internal_sym (const char *prefix)
7927 {
7928 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7929
7930 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7931 return xstrdup (buf);
7932 }
7933
7934 /* Return non-zero if this DIE is a declaration. */
7935
7936 static int
7937 is_declaration_die (dw_die_ref die)
7938 {
7939 dw_attr_node *a;
7940 unsigned ix;
7941
7942 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7943 if (a->dw_attr == DW_AT_declaration)
7944 return 1;
7945
7946 return 0;
7947 }
7948
7949 /* Return non-zero if this DIE is nested inside a subprogram. */
7950
7951 static int
7952 is_nested_in_subprogram (dw_die_ref die)
7953 {
7954 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7955
7956 if (decl == NULL)
7957 decl = die;
7958 return local_scope_p (decl);
7959 }
7960
7961 /* Return non-zero if this DIE contains a defining declaration of a
7962 subprogram. */
7963
7964 static int
7965 contains_subprogram_definition (dw_die_ref die)
7966 {
7967 dw_die_ref c;
7968
7969 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7970 return 1;
7971 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7972 return 0;
7973 }
7974
7975 /* Return non-zero if this is a type DIE that should be moved to a
7976 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7977 unit type. */
7978
7979 static int
7980 should_move_die_to_comdat (dw_die_ref die)
7981 {
7982 switch (die->die_tag)
7983 {
7984 case DW_TAG_class_type:
7985 case DW_TAG_structure_type:
7986 case DW_TAG_enumeration_type:
7987 case DW_TAG_union_type:
7988 /* Don't move declarations, inlined instances, types nested in a
7989 subprogram, or types that contain subprogram definitions. */
7990 if (is_declaration_die (die)
7991 || get_AT (die, DW_AT_abstract_origin)
7992 || is_nested_in_subprogram (die)
7993 || contains_subprogram_definition (die))
7994 return 0;
7995 return 1;
7996 case DW_TAG_array_type:
7997 case DW_TAG_interface_type:
7998 case DW_TAG_pointer_type:
7999 case DW_TAG_reference_type:
8000 case DW_TAG_rvalue_reference_type:
8001 case DW_TAG_string_type:
8002 case DW_TAG_subroutine_type:
8003 case DW_TAG_ptr_to_member_type:
8004 case DW_TAG_set_type:
8005 case DW_TAG_subrange_type:
8006 case DW_TAG_base_type:
8007 case DW_TAG_const_type:
8008 case DW_TAG_file_type:
8009 case DW_TAG_packed_type:
8010 case DW_TAG_volatile_type:
8011 case DW_TAG_typedef:
8012 default:
8013 return 0;
8014 }
8015 }
8016
8017 /* Make a clone of DIE. */
8018
8019 static dw_die_ref
8020 clone_die (dw_die_ref die)
8021 {
8022 dw_die_ref clone = new_die_raw (die->die_tag);
8023 dw_attr_node *a;
8024 unsigned ix;
8025
8026 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8027 add_dwarf_attr (clone, a);
8028
8029 return clone;
8030 }
8031
8032 /* Make a clone of the tree rooted at DIE. */
8033
8034 static dw_die_ref
8035 clone_tree (dw_die_ref die)
8036 {
8037 dw_die_ref c;
8038 dw_die_ref clone = clone_die (die);
8039
8040 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8041
8042 return clone;
8043 }
8044
8045 /* Make a clone of DIE as a declaration. */
8046
8047 static dw_die_ref
8048 clone_as_declaration (dw_die_ref die)
8049 {
8050 dw_die_ref clone;
8051 dw_die_ref decl;
8052 dw_attr_node *a;
8053 unsigned ix;
8054
8055 /* If the DIE is already a declaration, just clone it. */
8056 if (is_declaration_die (die))
8057 return clone_die (die);
8058
8059 /* If the DIE is a specification, just clone its declaration DIE. */
8060 decl = get_AT_ref (die, DW_AT_specification);
8061 if (decl != NULL)
8062 {
8063 clone = clone_die (decl);
8064 if (die->comdat_type_p)
8065 add_AT_die_ref (clone, DW_AT_signature, die);
8066 return clone;
8067 }
8068
8069 clone = new_die_raw (die->die_tag);
8070
8071 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8072 {
8073 /* We don't want to copy over all attributes.
8074 For example we don't want DW_AT_byte_size because otherwise we will no
8075 longer have a declaration and GDB will treat it as a definition. */
8076
8077 switch (a->dw_attr)
8078 {
8079 case DW_AT_abstract_origin:
8080 case DW_AT_artificial:
8081 case DW_AT_containing_type:
8082 case DW_AT_external:
8083 case DW_AT_name:
8084 case DW_AT_type:
8085 case DW_AT_virtuality:
8086 case DW_AT_linkage_name:
8087 case DW_AT_MIPS_linkage_name:
8088 add_dwarf_attr (clone, a);
8089 break;
8090 case DW_AT_byte_size:
8091 case DW_AT_alignment:
8092 default:
8093 break;
8094 }
8095 }
8096
8097 if (die->comdat_type_p)
8098 add_AT_die_ref (clone, DW_AT_signature, die);
8099
8100 add_AT_flag (clone, DW_AT_declaration, 1);
8101 return clone;
8102 }
8103
8104
8105 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8106
8107 struct decl_table_entry
8108 {
8109 dw_die_ref orig;
8110 dw_die_ref copy;
8111 };
8112
8113 /* Helpers to manipulate hash table of copied declarations. */
8114
8115 /* Hashtable helpers. */
8116
8117 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8118 {
8119 typedef die_struct *compare_type;
8120 static inline hashval_t hash (const decl_table_entry *);
8121 static inline bool equal (const decl_table_entry *, const die_struct *);
8122 };
8123
8124 inline hashval_t
8125 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8126 {
8127 return htab_hash_pointer (entry->orig);
8128 }
8129
8130 inline bool
8131 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8132 const die_struct *entry2)
8133 {
8134 return entry1->orig == entry2;
8135 }
8136
8137 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8138
8139 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8140 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8141 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8142 to check if the ancestor has already been copied into UNIT. */
8143
8144 static dw_die_ref
8145 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8146 decl_hash_type *decl_table)
8147 {
8148 dw_die_ref parent = die->die_parent;
8149 dw_die_ref new_parent = unit;
8150 dw_die_ref copy;
8151 decl_table_entry **slot = NULL;
8152 struct decl_table_entry *entry = NULL;
8153
8154 if (decl_table)
8155 {
8156 /* Check if the entry has already been copied to UNIT. */
8157 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8158 INSERT);
8159 if (*slot != HTAB_EMPTY_ENTRY)
8160 {
8161 entry = *slot;
8162 return entry->copy;
8163 }
8164
8165 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8166 entry = XCNEW (struct decl_table_entry);
8167 entry->orig = die;
8168 entry->copy = NULL;
8169 *slot = entry;
8170 }
8171
8172 if (parent != NULL)
8173 {
8174 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8175 if (spec != NULL)
8176 parent = spec;
8177 if (!is_unit_die (parent))
8178 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8179 }
8180
8181 copy = clone_as_declaration (die);
8182 add_child_die (new_parent, copy);
8183
8184 if (decl_table)
8185 {
8186 /* Record the pointer to the copy. */
8187 entry->copy = copy;
8188 }
8189
8190 return copy;
8191 }
8192 /* Copy the declaration context to the new type unit DIE. This includes
8193 any surrounding namespace or type declarations. If the DIE has an
8194 AT_specification attribute, it also includes attributes and children
8195 attached to the specification, and returns a pointer to the original
8196 parent of the declaration DIE. Returns NULL otherwise. */
8197
8198 static dw_die_ref
8199 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8200 {
8201 dw_die_ref decl;
8202 dw_die_ref new_decl;
8203 dw_die_ref orig_parent = NULL;
8204
8205 decl = get_AT_ref (die, DW_AT_specification);
8206 if (decl == NULL)
8207 decl = die;
8208 else
8209 {
8210 unsigned ix;
8211 dw_die_ref c;
8212 dw_attr_node *a;
8213
8214 /* The original DIE will be changed to a declaration, and must
8215 be moved to be a child of the original declaration DIE. */
8216 orig_parent = decl->die_parent;
8217
8218 /* Copy the type node pointer from the new DIE to the original
8219 declaration DIE so we can forward references later. */
8220 decl->comdat_type_p = true;
8221 decl->die_id.die_type_node = die->die_id.die_type_node;
8222
8223 remove_AT (die, DW_AT_specification);
8224
8225 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8226 {
8227 if (a->dw_attr != DW_AT_name
8228 && a->dw_attr != DW_AT_declaration
8229 && a->dw_attr != DW_AT_external)
8230 add_dwarf_attr (die, a);
8231 }
8232
8233 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8234 }
8235
8236 if (decl->die_parent != NULL
8237 && !is_unit_die (decl->die_parent))
8238 {
8239 new_decl = copy_ancestor_tree (unit, decl, NULL);
8240 if (new_decl != NULL)
8241 {
8242 remove_AT (new_decl, DW_AT_signature);
8243 add_AT_specification (die, new_decl);
8244 }
8245 }
8246
8247 return orig_parent;
8248 }
8249
8250 /* Generate the skeleton ancestor tree for the given NODE, then clone
8251 the DIE and add the clone into the tree. */
8252
8253 static void
8254 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8255 {
8256 if (node->new_die != NULL)
8257 return;
8258
8259 node->new_die = clone_as_declaration (node->old_die);
8260
8261 if (node->parent != NULL)
8262 {
8263 generate_skeleton_ancestor_tree (node->parent);
8264 add_child_die (node->parent->new_die, node->new_die);
8265 }
8266 }
8267
8268 /* Generate a skeleton tree of DIEs containing any declarations that are
8269 found in the original tree. We traverse the tree looking for declaration
8270 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8271
8272 static void
8273 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8274 {
8275 skeleton_chain_node node;
8276 dw_die_ref c;
8277 dw_die_ref first;
8278 dw_die_ref prev = NULL;
8279 dw_die_ref next = NULL;
8280
8281 node.parent = parent;
8282
8283 first = c = parent->old_die->die_child;
8284 if (c)
8285 next = c->die_sib;
8286 if (c) do {
8287 if (prev == NULL || prev->die_sib == c)
8288 prev = c;
8289 c = next;
8290 next = (c == first ? NULL : c->die_sib);
8291 node.old_die = c;
8292 node.new_die = NULL;
8293 if (is_declaration_die (c))
8294 {
8295 if (is_template_instantiation (c))
8296 {
8297 /* Instantiated templates do not need to be cloned into the
8298 type unit. Just move the DIE and its children back to
8299 the skeleton tree (in the main CU). */
8300 remove_child_with_prev (c, prev);
8301 add_child_die (parent->new_die, c);
8302 c = prev;
8303 }
8304 else if (c->comdat_type_p)
8305 {
8306 /* This is the skeleton of earlier break_out_comdat_types
8307 type. Clone the existing DIE, but keep the children
8308 under the original (which is in the main CU). */
8309 dw_die_ref clone = clone_die (c);
8310
8311 replace_child (c, clone, prev);
8312 generate_skeleton_ancestor_tree (parent);
8313 add_child_die (parent->new_die, c);
8314 c = clone;
8315 continue;
8316 }
8317 else
8318 {
8319 /* Clone the existing DIE, move the original to the skeleton
8320 tree (which is in the main CU), and put the clone, with
8321 all the original's children, where the original came from
8322 (which is about to be moved to the type unit). */
8323 dw_die_ref clone = clone_die (c);
8324 move_all_children (c, clone);
8325
8326 /* If the original has a DW_AT_object_pointer attribute,
8327 it would now point to a child DIE just moved to the
8328 cloned tree, so we need to remove that attribute from
8329 the original. */
8330 remove_AT (c, DW_AT_object_pointer);
8331
8332 replace_child (c, clone, prev);
8333 generate_skeleton_ancestor_tree (parent);
8334 add_child_die (parent->new_die, c);
8335 node.old_die = clone;
8336 node.new_die = c;
8337 c = clone;
8338 }
8339 }
8340 generate_skeleton_bottom_up (&node);
8341 } while (next != NULL);
8342 }
8343
8344 /* Wrapper function for generate_skeleton_bottom_up. */
8345
8346 static dw_die_ref
8347 generate_skeleton (dw_die_ref die)
8348 {
8349 skeleton_chain_node node;
8350
8351 node.old_die = die;
8352 node.new_die = NULL;
8353 node.parent = NULL;
8354
8355 /* If this type definition is nested inside another type,
8356 and is not an instantiation of a template, always leave
8357 at least a declaration in its place. */
8358 if (die->die_parent != NULL
8359 && is_type_die (die->die_parent)
8360 && !is_template_instantiation (die))
8361 node.new_die = clone_as_declaration (die);
8362
8363 generate_skeleton_bottom_up (&node);
8364 return node.new_die;
8365 }
8366
8367 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8368 declaration. The original DIE is moved to a new compile unit so that
8369 existing references to it follow it to the new location. If any of the
8370 original DIE's descendants is a declaration, we need to replace the
8371 original DIE with a skeleton tree and move the declarations back into the
8372 skeleton tree. */
8373
8374 static dw_die_ref
8375 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8376 dw_die_ref prev)
8377 {
8378 dw_die_ref skeleton, orig_parent;
8379
8380 /* Copy the declaration context to the type unit DIE. If the returned
8381 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8382 that DIE. */
8383 orig_parent = copy_declaration_context (unit, child);
8384
8385 skeleton = generate_skeleton (child);
8386 if (skeleton == NULL)
8387 remove_child_with_prev (child, prev);
8388 else
8389 {
8390 skeleton->comdat_type_p = true;
8391 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8392
8393 /* If the original DIE was a specification, we need to put
8394 the skeleton under the parent DIE of the declaration.
8395 This leaves the original declaration in the tree, but
8396 it will be pruned later since there are no longer any
8397 references to it. */
8398 if (orig_parent != NULL)
8399 {
8400 remove_child_with_prev (child, prev);
8401 add_child_die (orig_parent, skeleton);
8402 }
8403 else
8404 replace_child (child, skeleton, prev);
8405 }
8406
8407 return skeleton;
8408 }
8409
8410 static void
8411 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8412 comdat_type_node *type_node,
8413 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8414
8415 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8416 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8417 DWARF procedure references in the DW_AT_location attribute. */
8418
8419 static dw_die_ref
8420 copy_dwarf_procedure (dw_die_ref die,
8421 comdat_type_node *type_node,
8422 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8423 {
8424 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8425
8426 /* DWARF procedures are not supposed to have children... */
8427 gcc_assert (die->die_child == NULL);
8428
8429 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8430 gcc_assert (vec_safe_length (die->die_attr) == 1
8431 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8432
8433 /* Do not copy more than once DWARF procedures. */
8434 bool existed;
8435 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8436 if (existed)
8437 return die_copy;
8438
8439 die_copy = clone_die (die);
8440 add_child_die (type_node->root_die, die_copy);
8441 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8442 return die_copy;
8443 }
8444
8445 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8446 procedures in DIE's attributes. */
8447
8448 static void
8449 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8450 comdat_type_node *type_node,
8451 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8452 {
8453 dw_attr_node *a;
8454 unsigned i;
8455
8456 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8457 {
8458 dw_loc_descr_ref loc;
8459
8460 if (a->dw_attr_val.val_class != dw_val_class_loc)
8461 continue;
8462
8463 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8464 {
8465 switch (loc->dw_loc_opc)
8466 {
8467 case DW_OP_call2:
8468 case DW_OP_call4:
8469 case DW_OP_call_ref:
8470 gcc_assert (loc->dw_loc_oprnd1.val_class
8471 == dw_val_class_die_ref);
8472 loc->dw_loc_oprnd1.v.val_die_ref.die
8473 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8474 type_node,
8475 copied_dwarf_procs);
8476
8477 default:
8478 break;
8479 }
8480 }
8481 }
8482 }
8483
8484 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8485 rewrite references to point to the copies.
8486
8487 References are looked for in DIE's attributes and recursively in all its
8488 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8489 mapping from old DWARF procedures to their copy. It is used not to copy
8490 twice the same DWARF procedure under TYPE_NODE. */
8491
8492 static void
8493 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8494 comdat_type_node *type_node,
8495 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8496 {
8497 dw_die_ref c;
8498
8499 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8500 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8501 type_node,
8502 copied_dwarf_procs));
8503 }
8504
8505 /* Traverse the DIE and set up additional .debug_types or .debug_info
8506 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8507 section. */
8508
8509 static void
8510 break_out_comdat_types (dw_die_ref die)
8511 {
8512 dw_die_ref c;
8513 dw_die_ref first;
8514 dw_die_ref prev = NULL;
8515 dw_die_ref next = NULL;
8516 dw_die_ref unit = NULL;
8517
8518 first = c = die->die_child;
8519 if (c)
8520 next = c->die_sib;
8521 if (c) do {
8522 if (prev == NULL || prev->die_sib == c)
8523 prev = c;
8524 c = next;
8525 next = (c == first ? NULL : c->die_sib);
8526 if (should_move_die_to_comdat (c))
8527 {
8528 dw_die_ref replacement;
8529 comdat_type_node *type_node;
8530
8531 /* Break out nested types into their own type units. */
8532 break_out_comdat_types (c);
8533
8534 /* Create a new type unit DIE as the root for the new tree, and
8535 add it to the list of comdat types. */
8536 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8537 add_AT_unsigned (unit, DW_AT_language,
8538 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8539 type_node = ggc_cleared_alloc<comdat_type_node> ();
8540 type_node->root_die = unit;
8541 type_node->next = comdat_type_list;
8542 comdat_type_list = type_node;
8543
8544 /* Generate the type signature. */
8545 generate_type_signature (c, type_node);
8546
8547 /* Copy the declaration context, attributes, and children of the
8548 declaration into the new type unit DIE, then remove this DIE
8549 from the main CU (or replace it with a skeleton if necessary). */
8550 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8551 type_node->skeleton_die = replacement;
8552
8553 /* Add the DIE to the new compunit. */
8554 add_child_die (unit, c);
8555
8556 /* Types can reference DWARF procedures for type size or data location
8557 expressions. Calls in DWARF expressions cannot target procedures
8558 that are not in the same section. So we must copy DWARF procedures
8559 along with this type and then rewrite references to them. */
8560 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8561 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8562
8563 if (replacement != NULL)
8564 c = replacement;
8565 }
8566 else if (c->die_tag == DW_TAG_namespace
8567 || c->die_tag == DW_TAG_class_type
8568 || c->die_tag == DW_TAG_structure_type
8569 || c->die_tag == DW_TAG_union_type)
8570 {
8571 /* Look for nested types that can be broken out. */
8572 break_out_comdat_types (c);
8573 }
8574 } while (next != NULL);
8575 }
8576
8577 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8578 Enter all the cloned children into the hash table decl_table. */
8579
8580 static dw_die_ref
8581 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8582 {
8583 dw_die_ref c;
8584 dw_die_ref clone;
8585 struct decl_table_entry *entry;
8586 decl_table_entry **slot;
8587
8588 if (die->die_tag == DW_TAG_subprogram)
8589 clone = clone_as_declaration (die);
8590 else
8591 clone = clone_die (die);
8592
8593 slot = decl_table->find_slot_with_hash (die,
8594 htab_hash_pointer (die), INSERT);
8595
8596 /* Assert that DIE isn't in the hash table yet. If it would be there
8597 before, the ancestors would be necessarily there as well, therefore
8598 clone_tree_partial wouldn't be called. */
8599 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8600
8601 entry = XCNEW (struct decl_table_entry);
8602 entry->orig = die;
8603 entry->copy = clone;
8604 *slot = entry;
8605
8606 if (die->die_tag != DW_TAG_subprogram)
8607 FOR_EACH_CHILD (die, c,
8608 add_child_die (clone, clone_tree_partial (c, decl_table)));
8609
8610 return clone;
8611 }
8612
8613 /* Walk the DIE and its children, looking for references to incomplete
8614 or trivial types that are unmarked (i.e., that are not in the current
8615 type_unit). */
8616
8617 static void
8618 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8619 {
8620 dw_die_ref c;
8621 dw_attr_node *a;
8622 unsigned ix;
8623
8624 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8625 {
8626 if (AT_class (a) == dw_val_class_die_ref)
8627 {
8628 dw_die_ref targ = AT_ref (a);
8629 decl_table_entry **slot;
8630 struct decl_table_entry *entry;
8631
8632 if (targ->die_mark != 0 || targ->comdat_type_p)
8633 continue;
8634
8635 slot = decl_table->find_slot_with_hash (targ,
8636 htab_hash_pointer (targ),
8637 INSERT);
8638
8639 if (*slot != HTAB_EMPTY_ENTRY)
8640 {
8641 /* TARG has already been copied, so we just need to
8642 modify the reference to point to the copy. */
8643 entry = *slot;
8644 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8645 }
8646 else
8647 {
8648 dw_die_ref parent = unit;
8649 dw_die_ref copy = clone_die (targ);
8650
8651 /* Record in DECL_TABLE that TARG has been copied.
8652 Need to do this now, before the recursive call,
8653 because DECL_TABLE may be expanded and SLOT
8654 would no longer be a valid pointer. */
8655 entry = XCNEW (struct decl_table_entry);
8656 entry->orig = targ;
8657 entry->copy = copy;
8658 *slot = entry;
8659
8660 /* If TARG is not a declaration DIE, we need to copy its
8661 children. */
8662 if (!is_declaration_die (targ))
8663 {
8664 FOR_EACH_CHILD (
8665 targ, c,
8666 add_child_die (copy,
8667 clone_tree_partial (c, decl_table)));
8668 }
8669
8670 /* Make sure the cloned tree is marked as part of the
8671 type unit. */
8672 mark_dies (copy);
8673
8674 /* If TARG has surrounding context, copy its ancestor tree
8675 into the new type unit. */
8676 if (targ->die_parent != NULL
8677 && !is_unit_die (targ->die_parent))
8678 parent = copy_ancestor_tree (unit, targ->die_parent,
8679 decl_table);
8680
8681 add_child_die (parent, copy);
8682 a->dw_attr_val.v.val_die_ref.die = copy;
8683
8684 /* Make sure the newly-copied DIE is walked. If it was
8685 installed in a previously-added context, it won't
8686 get visited otherwise. */
8687 if (parent != unit)
8688 {
8689 /* Find the highest point of the newly-added tree,
8690 mark each node along the way, and walk from there. */
8691 parent->die_mark = 1;
8692 while (parent->die_parent
8693 && parent->die_parent->die_mark == 0)
8694 {
8695 parent = parent->die_parent;
8696 parent->die_mark = 1;
8697 }
8698 copy_decls_walk (unit, parent, decl_table);
8699 }
8700 }
8701 }
8702 }
8703
8704 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8705 }
8706
8707 /* Copy declarations for "unworthy" types into the new comdat section.
8708 Incomplete types, modified types, and certain other types aren't broken
8709 out into comdat sections of their own, so they don't have a signature,
8710 and we need to copy the declaration into the same section so that we
8711 don't have an external reference. */
8712
8713 static void
8714 copy_decls_for_unworthy_types (dw_die_ref unit)
8715 {
8716 mark_dies (unit);
8717 decl_hash_type decl_table (10);
8718 copy_decls_walk (unit, unit, &decl_table);
8719 unmark_dies (unit);
8720 }
8721
8722 /* Traverse the DIE and add a sibling attribute if it may have the
8723 effect of speeding up access to siblings. To save some space,
8724 avoid generating sibling attributes for DIE's without children. */
8725
8726 static void
8727 add_sibling_attributes (dw_die_ref die)
8728 {
8729 dw_die_ref c;
8730
8731 if (! die->die_child)
8732 return;
8733
8734 if (die->die_parent && die != die->die_parent->die_child)
8735 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8736
8737 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8738 }
8739
8740 /* Output all location lists for the DIE and its children. */
8741
8742 static void
8743 output_location_lists (dw_die_ref die)
8744 {
8745 dw_die_ref c;
8746 dw_attr_node *a;
8747 unsigned ix;
8748
8749 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8750 if (AT_class (a) == dw_val_class_loc_list)
8751 output_loc_list (AT_loc_list (a));
8752
8753 FOR_EACH_CHILD (die, c, output_location_lists (c));
8754 }
8755
8756 /* During assign_location_list_indexes and output_loclists_offset the
8757 current index, after it the number of assigned indexes (i.e. how
8758 large the .debug_loclists* offset table should be). */
8759 static unsigned int loc_list_idx;
8760
8761 /* Output all location list offsets for the DIE and its children. */
8762
8763 static void
8764 output_loclists_offsets (dw_die_ref die)
8765 {
8766 dw_die_ref c;
8767 dw_attr_node *a;
8768 unsigned ix;
8769
8770 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8771 if (AT_class (a) == dw_val_class_loc_list)
8772 {
8773 dw_loc_list_ref l = AT_loc_list (a);
8774 if (l->offset_emitted)
8775 continue;
8776 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8777 loc_section_label, NULL);
8778 gcc_assert (l->hash == loc_list_idx);
8779 loc_list_idx++;
8780 l->offset_emitted = true;
8781 }
8782
8783 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8784 }
8785
8786 /* Recursively set indexes of location lists. */
8787
8788 static void
8789 assign_location_list_indexes (dw_die_ref die)
8790 {
8791 dw_die_ref c;
8792 dw_attr_node *a;
8793 unsigned ix;
8794
8795 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8796 if (AT_class (a) == dw_val_class_loc_list)
8797 {
8798 dw_loc_list_ref list = AT_loc_list (a);
8799 if (!list->num_assigned)
8800 {
8801 list->num_assigned = true;
8802 list->hash = loc_list_idx++;
8803 }
8804 }
8805
8806 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8807 }
8808
8809 /* We want to limit the number of external references, because they are
8810 larger than local references: a relocation takes multiple words, and
8811 even a sig8 reference is always eight bytes, whereas a local reference
8812 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8813 So if we encounter multiple external references to the same type DIE, we
8814 make a local typedef stub for it and redirect all references there.
8815
8816 This is the element of the hash table for keeping track of these
8817 references. */
8818
8819 struct external_ref
8820 {
8821 dw_die_ref type;
8822 dw_die_ref stub;
8823 unsigned n_refs;
8824 };
8825
8826 /* Hashtable helpers. */
8827
8828 struct external_ref_hasher : free_ptr_hash <external_ref>
8829 {
8830 static inline hashval_t hash (const external_ref *);
8831 static inline bool equal (const external_ref *, const external_ref *);
8832 };
8833
8834 inline hashval_t
8835 external_ref_hasher::hash (const external_ref *r)
8836 {
8837 dw_die_ref die = r->type;
8838 hashval_t h = 0;
8839
8840 /* We can't use the address of the DIE for hashing, because
8841 that will make the order of the stub DIEs non-deterministic. */
8842 if (! die->comdat_type_p)
8843 /* We have a symbol; use it to compute a hash. */
8844 h = htab_hash_string (die->die_id.die_symbol);
8845 else
8846 {
8847 /* We have a type signature; use a subset of the bits as the hash.
8848 The 8-byte signature is at least as large as hashval_t. */
8849 comdat_type_node *type_node = die->die_id.die_type_node;
8850 memcpy (&h, type_node->signature, sizeof (h));
8851 }
8852 return h;
8853 }
8854
8855 inline bool
8856 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8857 {
8858 return r1->type == r2->type;
8859 }
8860
8861 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8862
8863 /* Return a pointer to the external_ref for references to DIE. */
8864
8865 static struct external_ref *
8866 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8867 {
8868 struct external_ref ref, *ref_p;
8869 external_ref **slot;
8870
8871 ref.type = die;
8872 slot = map->find_slot (&ref, INSERT);
8873 if (*slot != HTAB_EMPTY_ENTRY)
8874 return *slot;
8875
8876 ref_p = XCNEW (struct external_ref);
8877 ref_p->type = die;
8878 *slot = ref_p;
8879 return ref_p;
8880 }
8881
8882 /* Subroutine of optimize_external_refs, below.
8883
8884 If we see a type skeleton, record it as our stub. If we see external
8885 references, remember how many we've seen. */
8886
8887 static void
8888 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8889 {
8890 dw_die_ref c;
8891 dw_attr_node *a;
8892 unsigned ix;
8893 struct external_ref *ref_p;
8894
8895 if (is_type_die (die)
8896 && (c = get_AT_ref (die, DW_AT_signature)))
8897 {
8898 /* This is a local skeleton; use it for local references. */
8899 ref_p = lookup_external_ref (map, c);
8900 ref_p->stub = die;
8901 }
8902
8903 /* Scan the DIE references, and remember any that refer to DIEs from
8904 other CUs (i.e. those which are not marked). */
8905 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8906 if (AT_class (a) == dw_val_class_die_ref
8907 && (c = AT_ref (a))->die_mark == 0
8908 && is_type_die (c))
8909 {
8910 ref_p = lookup_external_ref (map, c);
8911 ref_p->n_refs++;
8912 }
8913
8914 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8915 }
8916
8917 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8918 points to an external_ref, DATA is the CU we're processing. If we don't
8919 already have a local stub, and we have multiple refs, build a stub. */
8920
8921 int
8922 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8923 {
8924 struct external_ref *ref_p = *slot;
8925
8926 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8927 {
8928 /* We have multiple references to this type, so build a small stub.
8929 Both of these forms are a bit dodgy from the perspective of the
8930 DWARF standard, since technically they should have names. */
8931 dw_die_ref cu = data;
8932 dw_die_ref type = ref_p->type;
8933 dw_die_ref stub = NULL;
8934
8935 if (type->comdat_type_p)
8936 {
8937 /* If we refer to this type via sig8, use AT_signature. */
8938 stub = new_die (type->die_tag, cu, NULL_TREE);
8939 add_AT_die_ref (stub, DW_AT_signature, type);
8940 }
8941 else
8942 {
8943 /* Otherwise, use a typedef with no name. */
8944 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8945 add_AT_die_ref (stub, DW_AT_type, type);
8946 }
8947
8948 stub->die_mark++;
8949 ref_p->stub = stub;
8950 }
8951 return 1;
8952 }
8953
8954 /* DIE is a unit; look through all the DIE references to see if there are
8955 any external references to types, and if so, create local stubs for
8956 them which will be applied in build_abbrev_table. This is useful because
8957 references to local DIEs are smaller. */
8958
8959 static external_ref_hash_type *
8960 optimize_external_refs (dw_die_ref die)
8961 {
8962 external_ref_hash_type *map = new external_ref_hash_type (10);
8963 optimize_external_refs_1 (die, map);
8964 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8965 return map;
8966 }
8967
8968 /* The following 3 variables are temporaries that are computed only during the
8969 build_abbrev_table call and used and released during the following
8970 optimize_abbrev_table call. */
8971
8972 /* First abbrev_id that can be optimized based on usage. */
8973 static unsigned int abbrev_opt_start;
8974
8975 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8976 abbrev_id smaller than this, because they must be already sized
8977 during build_abbrev_table). */
8978 static unsigned int abbrev_opt_base_type_end;
8979
8980 /* Vector of usage counts during build_abbrev_table. Indexed by
8981 abbrev_id - abbrev_opt_start. */
8982 static vec<unsigned int> abbrev_usage_count;
8983
8984 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8985 static vec<dw_die_ref> sorted_abbrev_dies;
8986
8987 /* The format of each DIE (and its attribute value pairs) is encoded in an
8988 abbreviation table. This routine builds the abbreviation table and assigns
8989 a unique abbreviation id for each abbreviation entry. The children of each
8990 die are visited recursively. */
8991
8992 static void
8993 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8994 {
8995 unsigned int abbrev_id = 0;
8996 dw_die_ref c;
8997 dw_attr_node *a;
8998 unsigned ix;
8999 dw_die_ref abbrev;
9000
9001 /* Scan the DIE references, and replace any that refer to
9002 DIEs from other CUs (i.e. those which are not marked) with
9003 the local stubs we built in optimize_external_refs. */
9004 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9005 if (AT_class (a) == dw_val_class_die_ref
9006 && (c = AT_ref (a))->die_mark == 0)
9007 {
9008 struct external_ref *ref_p;
9009 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9010
9011 ref_p = lookup_external_ref (extern_map, c);
9012 if (ref_p->stub && ref_p->stub != die)
9013 change_AT_die_ref (a, ref_p->stub);
9014 else
9015 /* We aren't changing this reference, so mark it external. */
9016 set_AT_ref_external (a, 1);
9017 }
9018
9019 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9020 {
9021 dw_attr_node *die_a, *abbrev_a;
9022 unsigned ix;
9023 bool ok = true;
9024
9025 if (abbrev_id == 0)
9026 continue;
9027 if (abbrev->die_tag != die->die_tag)
9028 continue;
9029 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9030 continue;
9031
9032 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9033 continue;
9034
9035 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9036 {
9037 abbrev_a = &(*abbrev->die_attr)[ix];
9038 if ((abbrev_a->dw_attr != die_a->dw_attr)
9039 || (value_format (abbrev_a) != value_format (die_a)))
9040 {
9041 ok = false;
9042 break;
9043 }
9044 }
9045 if (ok)
9046 break;
9047 }
9048
9049 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9050 {
9051 vec_safe_push (abbrev_die_table, die);
9052 if (abbrev_opt_start)
9053 abbrev_usage_count.safe_push (0);
9054 }
9055 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9056 {
9057 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9058 sorted_abbrev_dies.safe_push (die);
9059 }
9060
9061 die->die_abbrev = abbrev_id;
9062 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9063 }
9064
9065 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9066 by die_abbrev's usage count, from the most commonly used
9067 abbreviation to the least. */
9068
9069 static int
9070 die_abbrev_cmp (const void *p1, const void *p2)
9071 {
9072 dw_die_ref die1 = *(const dw_die_ref *) p1;
9073 dw_die_ref die2 = *(const dw_die_ref *) p2;
9074
9075 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9076 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9077
9078 if (die1->die_abbrev >= abbrev_opt_base_type_end
9079 && die2->die_abbrev >= abbrev_opt_base_type_end)
9080 {
9081 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9082 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9083 return -1;
9084 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9085 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9086 return 1;
9087 }
9088
9089 /* Stabilize the sort. */
9090 if (die1->die_abbrev < die2->die_abbrev)
9091 return -1;
9092 if (die1->die_abbrev > die2->die_abbrev)
9093 return 1;
9094
9095 return 0;
9096 }
9097
9098 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9099 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9100 into dw_val_class_const_implicit or
9101 dw_val_class_unsigned_const_implicit. */
9102
9103 static void
9104 optimize_implicit_const (unsigned int first_id, unsigned int end,
9105 vec<bool> &implicit_consts)
9106 {
9107 /* It never makes sense if there is just one DIE using the abbreviation. */
9108 if (end < first_id + 2)
9109 return;
9110
9111 dw_attr_node *a;
9112 unsigned ix, i;
9113 dw_die_ref die = sorted_abbrev_dies[first_id];
9114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9115 if (implicit_consts[ix])
9116 {
9117 enum dw_val_class new_class = dw_val_class_none;
9118 switch (AT_class (a))
9119 {
9120 case dw_val_class_unsigned_const:
9121 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9122 continue;
9123
9124 /* The .debug_abbrev section will grow by
9125 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9126 in all the DIEs using that abbreviation. */
9127 if (constant_size (AT_unsigned (a)) * (end - first_id)
9128 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9129 continue;
9130
9131 new_class = dw_val_class_unsigned_const_implicit;
9132 break;
9133
9134 case dw_val_class_const:
9135 new_class = dw_val_class_const_implicit;
9136 break;
9137
9138 case dw_val_class_file:
9139 new_class = dw_val_class_file_implicit;
9140 break;
9141
9142 default:
9143 continue;
9144 }
9145 for (i = first_id; i < end; i++)
9146 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9147 = new_class;
9148 }
9149 }
9150
9151 /* Attempt to optimize abbreviation table from abbrev_opt_start
9152 abbreviation above. */
9153
9154 static void
9155 optimize_abbrev_table (void)
9156 {
9157 if (abbrev_opt_start
9158 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9159 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9160 {
9161 auto_vec<bool, 32> implicit_consts;
9162 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9163
9164 unsigned int abbrev_id = abbrev_opt_start - 1;
9165 unsigned int first_id = ~0U;
9166 unsigned int last_abbrev_id = 0;
9167 unsigned int i;
9168 dw_die_ref die;
9169 if (abbrev_opt_base_type_end > abbrev_opt_start)
9170 abbrev_id = abbrev_opt_base_type_end - 1;
9171 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9172 most commonly used abbreviations come first. */
9173 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9174 {
9175 dw_attr_node *a;
9176 unsigned ix;
9177
9178 /* If calc_base_type_die_sizes has been called, the CU and
9179 base types after it can't be optimized, because we've already
9180 calculated their DIE offsets. We've sorted them first. */
9181 if (die->die_abbrev < abbrev_opt_base_type_end)
9182 continue;
9183 if (die->die_abbrev != last_abbrev_id)
9184 {
9185 last_abbrev_id = die->die_abbrev;
9186 if (dwarf_version >= 5 && first_id != ~0U)
9187 optimize_implicit_const (first_id, i, implicit_consts);
9188 abbrev_id++;
9189 (*abbrev_die_table)[abbrev_id] = die;
9190 if (dwarf_version >= 5)
9191 {
9192 first_id = i;
9193 implicit_consts.truncate (0);
9194
9195 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9196 switch (AT_class (a))
9197 {
9198 case dw_val_class_const:
9199 case dw_val_class_unsigned_const:
9200 case dw_val_class_file:
9201 implicit_consts.safe_push (true);
9202 break;
9203 default:
9204 implicit_consts.safe_push (false);
9205 break;
9206 }
9207 }
9208 }
9209 else if (dwarf_version >= 5)
9210 {
9211 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9212 if (!implicit_consts[ix])
9213 continue;
9214 else
9215 {
9216 dw_attr_node *other_a
9217 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9218 if (!dw_val_equal_p (&a->dw_attr_val,
9219 &other_a->dw_attr_val))
9220 implicit_consts[ix] = false;
9221 }
9222 }
9223 die->die_abbrev = abbrev_id;
9224 }
9225 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9226 if (dwarf_version >= 5 && first_id != ~0U)
9227 optimize_implicit_const (first_id, i, implicit_consts);
9228 }
9229
9230 abbrev_opt_start = 0;
9231 abbrev_opt_base_type_end = 0;
9232 abbrev_usage_count.release ();
9233 sorted_abbrev_dies.release ();
9234 }
9235 \f
9236 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9237
9238 static int
9239 constant_size (unsigned HOST_WIDE_INT value)
9240 {
9241 int log;
9242
9243 if (value == 0)
9244 log = 0;
9245 else
9246 log = floor_log2 (value);
9247
9248 log = log / 8;
9249 log = 1 << (floor_log2 (log) + 1);
9250
9251 return log;
9252 }
9253
9254 /* Return the size of a DIE as it is represented in the
9255 .debug_info section. */
9256
9257 static unsigned long
9258 size_of_die (dw_die_ref die)
9259 {
9260 unsigned long size = 0;
9261 dw_attr_node *a;
9262 unsigned ix;
9263 enum dwarf_form form;
9264
9265 size += size_of_uleb128 (die->die_abbrev);
9266 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9267 {
9268 switch (AT_class (a))
9269 {
9270 case dw_val_class_addr:
9271 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9272 {
9273 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9274 size += size_of_uleb128 (AT_index (a));
9275 }
9276 else
9277 size += DWARF2_ADDR_SIZE;
9278 break;
9279 case dw_val_class_offset:
9280 size += DWARF_OFFSET_SIZE;
9281 break;
9282 case dw_val_class_loc:
9283 {
9284 unsigned long lsize = size_of_locs (AT_loc (a));
9285
9286 /* Block length. */
9287 if (dwarf_version >= 4)
9288 size += size_of_uleb128 (lsize);
9289 else
9290 size += constant_size (lsize);
9291 size += lsize;
9292 }
9293 break;
9294 case dw_val_class_loc_list:
9295 case dw_val_class_view_list:
9296 if (dwarf_split_debug_info && dwarf_version >= 5)
9297 {
9298 gcc_assert (AT_loc_list (a)->num_assigned);
9299 size += size_of_uleb128 (AT_loc_list (a)->hash);
9300 }
9301 else
9302 size += DWARF_OFFSET_SIZE;
9303 break;
9304 case dw_val_class_range_list:
9305 if (value_format (a) == DW_FORM_rnglistx)
9306 {
9307 gcc_assert (rnglist_idx);
9308 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9309 size += size_of_uleb128 (r->idx);
9310 }
9311 else
9312 size += DWARF_OFFSET_SIZE;
9313 break;
9314 case dw_val_class_const:
9315 size += size_of_sleb128 (AT_int (a));
9316 break;
9317 case dw_val_class_unsigned_const:
9318 {
9319 int csize = constant_size (AT_unsigned (a));
9320 if (dwarf_version == 3
9321 && a->dw_attr == DW_AT_data_member_location
9322 && csize >= 4)
9323 size += size_of_uleb128 (AT_unsigned (a));
9324 else
9325 size += csize;
9326 }
9327 break;
9328 case dw_val_class_symview:
9329 if (symview_upper_bound <= 0xff)
9330 size += 1;
9331 else if (symview_upper_bound <= 0xffff)
9332 size += 2;
9333 else if (symview_upper_bound <= 0xffffffff)
9334 size += 4;
9335 else
9336 size += 8;
9337 break;
9338 case dw_val_class_const_implicit:
9339 case dw_val_class_unsigned_const_implicit:
9340 case dw_val_class_file_implicit:
9341 /* These occupy no size in the DIE, just an extra sleb128 in
9342 .debug_abbrev. */
9343 break;
9344 case dw_val_class_const_double:
9345 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9346 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9347 size++; /* block */
9348 break;
9349 case dw_val_class_wide_int:
9350 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9351 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9352 if (get_full_len (*a->dw_attr_val.v.val_wide)
9353 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9354 size++; /* block */
9355 break;
9356 case dw_val_class_vec:
9357 size += constant_size (a->dw_attr_val.v.val_vec.length
9358 * a->dw_attr_val.v.val_vec.elt_size)
9359 + a->dw_attr_val.v.val_vec.length
9360 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9361 break;
9362 case dw_val_class_flag:
9363 if (dwarf_version >= 4)
9364 /* Currently all add_AT_flag calls pass in 1 as last argument,
9365 so DW_FORM_flag_present can be used. If that ever changes,
9366 we'll need to use DW_FORM_flag and have some optimization
9367 in build_abbrev_table that will change those to
9368 DW_FORM_flag_present if it is set to 1 in all DIEs using
9369 the same abbrev entry. */
9370 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9371 else
9372 size += 1;
9373 break;
9374 case dw_val_class_die_ref:
9375 if (AT_ref_external (a))
9376 {
9377 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9378 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9379 is sized by target address length, whereas in DWARF3
9380 it's always sized as an offset. */
9381 if (use_debug_types)
9382 size += DWARF_TYPE_SIGNATURE_SIZE;
9383 else if (dwarf_version == 2)
9384 size += DWARF2_ADDR_SIZE;
9385 else
9386 size += DWARF_OFFSET_SIZE;
9387 }
9388 else
9389 size += DWARF_OFFSET_SIZE;
9390 break;
9391 case dw_val_class_fde_ref:
9392 size += DWARF_OFFSET_SIZE;
9393 break;
9394 case dw_val_class_lbl_id:
9395 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9396 {
9397 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9398 size += size_of_uleb128 (AT_index (a));
9399 }
9400 else
9401 size += DWARF2_ADDR_SIZE;
9402 break;
9403 case dw_val_class_lineptr:
9404 case dw_val_class_macptr:
9405 case dw_val_class_loclistsptr:
9406 size += DWARF_OFFSET_SIZE;
9407 break;
9408 case dw_val_class_str:
9409 form = AT_string_form (a);
9410 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9411 size += DWARF_OFFSET_SIZE;
9412 else if (form == DW_FORM_GNU_str_index)
9413 size += size_of_uleb128 (AT_index (a));
9414 else
9415 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9416 break;
9417 case dw_val_class_file:
9418 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9419 break;
9420 case dw_val_class_data8:
9421 size += 8;
9422 break;
9423 case dw_val_class_vms_delta:
9424 size += DWARF_OFFSET_SIZE;
9425 break;
9426 case dw_val_class_high_pc:
9427 size += DWARF2_ADDR_SIZE;
9428 break;
9429 case dw_val_class_discr_value:
9430 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9431 break;
9432 case dw_val_class_discr_list:
9433 {
9434 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9435
9436 /* This is a block, so we have the block length and then its
9437 data. */
9438 size += constant_size (block_size) + block_size;
9439 }
9440 break;
9441 default:
9442 gcc_unreachable ();
9443 }
9444 }
9445
9446 return size;
9447 }
9448
9449 /* Size the debugging information associated with a given DIE. Visits the
9450 DIE's children recursively. Updates the global variable next_die_offset, on
9451 each time through. Uses the current value of next_die_offset to update the
9452 die_offset field in each DIE. */
9453
9454 static void
9455 calc_die_sizes (dw_die_ref die)
9456 {
9457 dw_die_ref c;
9458
9459 gcc_assert (die->die_offset == 0
9460 || (unsigned long int) die->die_offset == next_die_offset);
9461 die->die_offset = next_die_offset;
9462 next_die_offset += size_of_die (die);
9463
9464 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9465
9466 if (die->die_child != NULL)
9467 /* Count the null byte used to terminate sibling lists. */
9468 next_die_offset += 1;
9469 }
9470
9471 /* Size just the base type children at the start of the CU.
9472 This is needed because build_abbrev needs to size locs
9473 and sizing of type based stack ops needs to know die_offset
9474 values for the base types. */
9475
9476 static void
9477 calc_base_type_die_sizes (void)
9478 {
9479 unsigned long die_offset = (dwarf_split_debug_info
9480 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9481 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9482 unsigned int i;
9483 dw_die_ref base_type;
9484 #if ENABLE_ASSERT_CHECKING
9485 dw_die_ref prev = comp_unit_die ()->die_child;
9486 #endif
9487
9488 die_offset += size_of_die (comp_unit_die ());
9489 for (i = 0; base_types.iterate (i, &base_type); i++)
9490 {
9491 #if ENABLE_ASSERT_CHECKING
9492 gcc_assert (base_type->die_offset == 0
9493 && prev->die_sib == base_type
9494 && base_type->die_child == NULL
9495 && base_type->die_abbrev);
9496 prev = base_type;
9497 #endif
9498 if (abbrev_opt_start
9499 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9500 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9501 base_type->die_offset = die_offset;
9502 die_offset += size_of_die (base_type);
9503 }
9504 }
9505
9506 /* Set the marks for a die and its children. We do this so
9507 that we know whether or not a reference needs to use FORM_ref_addr; only
9508 DIEs in the same CU will be marked. We used to clear out the offset
9509 and use that as the flag, but ran into ordering problems. */
9510
9511 static void
9512 mark_dies (dw_die_ref die)
9513 {
9514 dw_die_ref c;
9515
9516 gcc_assert (!die->die_mark);
9517
9518 die->die_mark = 1;
9519 FOR_EACH_CHILD (die, c, mark_dies (c));
9520 }
9521
9522 /* Clear the marks for a die and its children. */
9523
9524 static void
9525 unmark_dies (dw_die_ref die)
9526 {
9527 dw_die_ref c;
9528
9529 if (! use_debug_types)
9530 gcc_assert (die->die_mark);
9531
9532 die->die_mark = 0;
9533 FOR_EACH_CHILD (die, c, unmark_dies (c));
9534 }
9535
9536 /* Clear the marks for a die, its children and referred dies. */
9537
9538 static void
9539 unmark_all_dies (dw_die_ref die)
9540 {
9541 dw_die_ref c;
9542 dw_attr_node *a;
9543 unsigned ix;
9544
9545 if (!die->die_mark)
9546 return;
9547 die->die_mark = 0;
9548
9549 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9550
9551 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9552 if (AT_class (a) == dw_val_class_die_ref)
9553 unmark_all_dies (AT_ref (a));
9554 }
9555
9556 /* Calculate if the entry should appear in the final output file. It may be
9557 from a pruned a type. */
9558
9559 static bool
9560 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9561 {
9562 /* By limiting gnu pubnames to definitions only, gold can generate a
9563 gdb index without entries for declarations, which don't include
9564 enough information to be useful. */
9565 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9566 return false;
9567
9568 if (table == pubname_table)
9569 {
9570 /* Enumerator names are part of the pubname table, but the
9571 parent DW_TAG_enumeration_type die may have been pruned.
9572 Don't output them if that is the case. */
9573 if (p->die->die_tag == DW_TAG_enumerator &&
9574 (p->die->die_parent == NULL
9575 || !p->die->die_parent->die_perennial_p))
9576 return false;
9577
9578 /* Everything else in the pubname table is included. */
9579 return true;
9580 }
9581
9582 /* The pubtypes table shouldn't include types that have been
9583 pruned. */
9584 return (p->die->die_offset != 0
9585 || !flag_eliminate_unused_debug_types);
9586 }
9587
9588 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9589 generated for the compilation unit. */
9590
9591 static unsigned long
9592 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9593 {
9594 unsigned long size;
9595 unsigned i;
9596 pubname_entry *p;
9597 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9598
9599 size = DWARF_PUBNAMES_HEADER_SIZE;
9600 FOR_EACH_VEC_ELT (*names, i, p)
9601 if (include_pubname_in_output (names, p))
9602 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9603
9604 size += DWARF_OFFSET_SIZE;
9605 return size;
9606 }
9607
9608 /* Return the size of the information in the .debug_aranges section. */
9609
9610 static unsigned long
9611 size_of_aranges (void)
9612 {
9613 unsigned long size;
9614
9615 size = DWARF_ARANGES_HEADER_SIZE;
9616
9617 /* Count the address/length pair for this compilation unit. */
9618 if (text_section_used)
9619 size += 2 * DWARF2_ADDR_SIZE;
9620 if (cold_text_section_used)
9621 size += 2 * DWARF2_ADDR_SIZE;
9622 if (have_multiple_function_sections)
9623 {
9624 unsigned fde_idx;
9625 dw_fde_ref fde;
9626
9627 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9628 {
9629 if (DECL_IGNORED_P (fde->decl))
9630 continue;
9631 if (!fde->in_std_section)
9632 size += 2 * DWARF2_ADDR_SIZE;
9633 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9634 size += 2 * DWARF2_ADDR_SIZE;
9635 }
9636 }
9637
9638 /* Count the two zero words used to terminated the address range table. */
9639 size += 2 * DWARF2_ADDR_SIZE;
9640 return size;
9641 }
9642 \f
9643 /* Select the encoding of an attribute value. */
9644
9645 static enum dwarf_form
9646 value_format (dw_attr_node *a)
9647 {
9648 switch (AT_class (a))
9649 {
9650 case dw_val_class_addr:
9651 /* Only very few attributes allow DW_FORM_addr. */
9652 switch (a->dw_attr)
9653 {
9654 case DW_AT_low_pc:
9655 case DW_AT_high_pc:
9656 case DW_AT_entry_pc:
9657 case DW_AT_trampoline:
9658 return (AT_index (a) == NOT_INDEXED
9659 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9660 default:
9661 break;
9662 }
9663 switch (DWARF2_ADDR_SIZE)
9664 {
9665 case 1:
9666 return DW_FORM_data1;
9667 case 2:
9668 return DW_FORM_data2;
9669 case 4:
9670 return DW_FORM_data4;
9671 case 8:
9672 return DW_FORM_data8;
9673 default:
9674 gcc_unreachable ();
9675 }
9676 case dw_val_class_loc_list:
9677 case dw_val_class_view_list:
9678 if (dwarf_split_debug_info
9679 && dwarf_version >= 5
9680 && AT_loc_list (a)->num_assigned)
9681 return DW_FORM_loclistx;
9682 /* FALLTHRU */
9683 case dw_val_class_range_list:
9684 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9685 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9686 care about sizes of .debug* sections in shared libraries and
9687 executables and don't take into account relocations that affect just
9688 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9689 table in the .debug_rnglists section. */
9690 if (dwarf_split_debug_info
9691 && dwarf_version >= 5
9692 && AT_class (a) == dw_val_class_range_list
9693 && rnglist_idx
9694 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9695 return DW_FORM_rnglistx;
9696 if (dwarf_version >= 4)
9697 return DW_FORM_sec_offset;
9698 /* FALLTHRU */
9699 case dw_val_class_vms_delta:
9700 case dw_val_class_offset:
9701 switch (DWARF_OFFSET_SIZE)
9702 {
9703 case 4:
9704 return DW_FORM_data4;
9705 case 8:
9706 return DW_FORM_data8;
9707 default:
9708 gcc_unreachable ();
9709 }
9710 case dw_val_class_loc:
9711 if (dwarf_version >= 4)
9712 return DW_FORM_exprloc;
9713 switch (constant_size (size_of_locs (AT_loc (a))))
9714 {
9715 case 1:
9716 return DW_FORM_block1;
9717 case 2:
9718 return DW_FORM_block2;
9719 case 4:
9720 return DW_FORM_block4;
9721 default:
9722 gcc_unreachable ();
9723 }
9724 case dw_val_class_const:
9725 return DW_FORM_sdata;
9726 case dw_val_class_unsigned_const:
9727 switch (constant_size (AT_unsigned (a)))
9728 {
9729 case 1:
9730 return DW_FORM_data1;
9731 case 2:
9732 return DW_FORM_data2;
9733 case 4:
9734 /* In DWARF3 DW_AT_data_member_location with
9735 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9736 constant, so we need to use DW_FORM_udata if we need
9737 a large constant. */
9738 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9739 return DW_FORM_udata;
9740 return DW_FORM_data4;
9741 case 8:
9742 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9743 return DW_FORM_udata;
9744 return DW_FORM_data8;
9745 default:
9746 gcc_unreachable ();
9747 }
9748 case dw_val_class_const_implicit:
9749 case dw_val_class_unsigned_const_implicit:
9750 case dw_val_class_file_implicit:
9751 return DW_FORM_implicit_const;
9752 case dw_val_class_const_double:
9753 switch (HOST_BITS_PER_WIDE_INT)
9754 {
9755 case 8:
9756 return DW_FORM_data2;
9757 case 16:
9758 return DW_FORM_data4;
9759 case 32:
9760 return DW_FORM_data8;
9761 case 64:
9762 if (dwarf_version >= 5)
9763 return DW_FORM_data16;
9764 /* FALLTHRU */
9765 default:
9766 return DW_FORM_block1;
9767 }
9768 case dw_val_class_wide_int:
9769 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9770 {
9771 case 8:
9772 return DW_FORM_data1;
9773 case 16:
9774 return DW_FORM_data2;
9775 case 32:
9776 return DW_FORM_data4;
9777 case 64:
9778 return DW_FORM_data8;
9779 case 128:
9780 if (dwarf_version >= 5)
9781 return DW_FORM_data16;
9782 /* FALLTHRU */
9783 default:
9784 return DW_FORM_block1;
9785 }
9786 case dw_val_class_symview:
9787 /* ??? We might use uleb128, but then we'd have to compute
9788 .debug_info offsets in the assembler. */
9789 if (symview_upper_bound <= 0xff)
9790 return DW_FORM_data1;
9791 else if (symview_upper_bound <= 0xffff)
9792 return DW_FORM_data2;
9793 else if (symview_upper_bound <= 0xffffffff)
9794 return DW_FORM_data4;
9795 else
9796 return DW_FORM_data8;
9797 case dw_val_class_vec:
9798 switch (constant_size (a->dw_attr_val.v.val_vec.length
9799 * a->dw_attr_val.v.val_vec.elt_size))
9800 {
9801 case 1:
9802 return DW_FORM_block1;
9803 case 2:
9804 return DW_FORM_block2;
9805 case 4:
9806 return DW_FORM_block4;
9807 default:
9808 gcc_unreachable ();
9809 }
9810 case dw_val_class_flag:
9811 if (dwarf_version >= 4)
9812 {
9813 /* Currently all add_AT_flag calls pass in 1 as last argument,
9814 so DW_FORM_flag_present can be used. If that ever changes,
9815 we'll need to use DW_FORM_flag and have some optimization
9816 in build_abbrev_table that will change those to
9817 DW_FORM_flag_present if it is set to 1 in all DIEs using
9818 the same abbrev entry. */
9819 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9820 return DW_FORM_flag_present;
9821 }
9822 return DW_FORM_flag;
9823 case dw_val_class_die_ref:
9824 if (AT_ref_external (a))
9825 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9826 else
9827 return DW_FORM_ref;
9828 case dw_val_class_fde_ref:
9829 return DW_FORM_data;
9830 case dw_val_class_lbl_id:
9831 return (AT_index (a) == NOT_INDEXED
9832 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9833 case dw_val_class_lineptr:
9834 case dw_val_class_macptr:
9835 case dw_val_class_loclistsptr:
9836 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9837 case dw_val_class_str:
9838 return AT_string_form (a);
9839 case dw_val_class_file:
9840 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9841 {
9842 case 1:
9843 return DW_FORM_data1;
9844 case 2:
9845 return DW_FORM_data2;
9846 case 4:
9847 return DW_FORM_data4;
9848 default:
9849 gcc_unreachable ();
9850 }
9851
9852 case dw_val_class_data8:
9853 return DW_FORM_data8;
9854
9855 case dw_val_class_high_pc:
9856 switch (DWARF2_ADDR_SIZE)
9857 {
9858 case 1:
9859 return DW_FORM_data1;
9860 case 2:
9861 return DW_FORM_data2;
9862 case 4:
9863 return DW_FORM_data4;
9864 case 8:
9865 return DW_FORM_data8;
9866 default:
9867 gcc_unreachable ();
9868 }
9869
9870 case dw_val_class_discr_value:
9871 return (a->dw_attr_val.v.val_discr_value.pos
9872 ? DW_FORM_udata
9873 : DW_FORM_sdata);
9874 case dw_val_class_discr_list:
9875 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9876 {
9877 case 1:
9878 return DW_FORM_block1;
9879 case 2:
9880 return DW_FORM_block2;
9881 case 4:
9882 return DW_FORM_block4;
9883 default:
9884 gcc_unreachable ();
9885 }
9886
9887 default:
9888 gcc_unreachable ();
9889 }
9890 }
9891
9892 /* Output the encoding of an attribute value. */
9893
9894 static void
9895 output_value_format (dw_attr_node *a)
9896 {
9897 enum dwarf_form form = value_format (a);
9898
9899 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9900 }
9901
9902 /* Given a die and id, produce the appropriate abbreviations. */
9903
9904 static void
9905 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9906 {
9907 unsigned ix;
9908 dw_attr_node *a_attr;
9909
9910 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9911 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9912 dwarf_tag_name (abbrev->die_tag));
9913
9914 if (abbrev->die_child != NULL)
9915 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9916 else
9917 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9918
9919 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9920 {
9921 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9922 dwarf_attr_name (a_attr->dw_attr));
9923 output_value_format (a_attr);
9924 if (value_format (a_attr) == DW_FORM_implicit_const)
9925 {
9926 if (AT_class (a_attr) == dw_val_class_file_implicit)
9927 {
9928 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9929 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9930 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9931 }
9932 else
9933 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9934 }
9935 }
9936
9937 dw2_asm_output_data (1, 0, NULL);
9938 dw2_asm_output_data (1, 0, NULL);
9939 }
9940
9941
9942 /* Output the .debug_abbrev section which defines the DIE abbreviation
9943 table. */
9944
9945 static void
9946 output_abbrev_section (void)
9947 {
9948 unsigned int abbrev_id;
9949 dw_die_ref abbrev;
9950
9951 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9952 if (abbrev_id != 0)
9953 output_die_abbrevs (abbrev_id, abbrev);
9954
9955 /* Terminate the table. */
9956 dw2_asm_output_data (1, 0, NULL);
9957 }
9958
9959 /* Return a new location list, given the begin and end range, and the
9960 expression. */
9961
9962 static inline dw_loc_list_ref
9963 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9964 const char *end, var_loc_view vend,
9965 const char *section)
9966 {
9967 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9968
9969 retlist->begin = begin;
9970 retlist->begin_entry = NULL;
9971 retlist->end = end;
9972 retlist->expr = expr;
9973 retlist->section = section;
9974 retlist->vbegin = vbegin;
9975 retlist->vend = vend;
9976
9977 return retlist;
9978 }
9979
9980 /* Return true iff there's any nonzero view number in the loc list. */
9981
9982 static bool
9983 loc_list_has_views (dw_loc_list_ref list)
9984 {
9985 if (!debug_variable_location_views)
9986 return false;
9987
9988 for (dw_loc_list_ref loc = list;
9989 loc != NULL; loc = loc->dw_loc_next)
9990 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
9991 return true;
9992
9993 return false;
9994 }
9995
9996 /* Generate a new internal symbol for this location list node, if it
9997 hasn't got one yet. */
9998
9999 static inline void
10000 gen_llsym (dw_loc_list_ref list)
10001 {
10002 gcc_assert (!list->ll_symbol);
10003 list->ll_symbol = gen_internal_sym ("LLST");
10004
10005 if (!loc_list_has_views (list))
10006 return;
10007
10008 if (dwarf2out_locviews_in_attribute ())
10009 {
10010 /* Use the same label_num for the view list. */
10011 label_num--;
10012 list->vl_symbol = gen_internal_sym ("LVUS");
10013 }
10014 else
10015 list->vl_symbol = list->ll_symbol;
10016 }
10017
10018 /* Generate a symbol for the list, but only if we really want to emit
10019 it as a list. */
10020
10021 static inline void
10022 maybe_gen_llsym (dw_loc_list_ref list)
10023 {
10024 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10025 return;
10026
10027 gen_llsym (list);
10028 }
10029
10030 /* Determine whether or not to skip loc_list entry CURR. If we're not
10031 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10032 representation in *SIZEP. */
10033
10034 static bool
10035 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = 0)
10036 {
10037 /* Don't output an entry that starts and ends at the same address. */
10038 if (strcmp (curr->begin, curr->end) == 0
10039 && curr->vbegin == curr->vend && !curr->force)
10040 return true;
10041
10042 unsigned long size = size_of_locs (curr->expr);
10043
10044 /* If the expression is too large, drop it on the floor. We could
10045 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10046 in the expression, but >= 64KB expressions for a single value
10047 in a single range are unlikely very useful. */
10048 if (dwarf_version < 5 && size > 0xffff)
10049 return true;
10050
10051 if (sizep)
10052 *sizep = size;
10053
10054 return false;
10055 }
10056
10057 /* Output a view pair loclist entry for CURR, if it requires one. */
10058
10059 static void
10060 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10061 {
10062 if (!dwarf2out_locviews_in_loclist ())
10063 return;
10064
10065 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10066 return;
10067
10068 #ifdef DW_LLE_view_pair
10069 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10070
10071 if (dwarf2out_as_locview_support)
10072 {
10073 if (ZERO_VIEW_P (curr->vbegin))
10074 dw2_asm_output_data_uleb128 (0, "Location view begin");
10075 else
10076 {
10077 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10078 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10079 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10080 }
10081
10082 if (ZERO_VIEW_P (curr->vend))
10083 dw2_asm_output_data_uleb128 (0, "Location view end");
10084 else
10085 {
10086 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10087 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10088 dw2_asm_output_symname_uleb128 (label, "Location view end");
10089 }
10090 }
10091 else
10092 {
10093 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10094 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10095 }
10096 #endif /* DW_LLE_view_pair */
10097
10098 return;
10099 }
10100
10101 /* Output the location list given to us. */
10102
10103 static void
10104 output_loc_list (dw_loc_list_ref list_head)
10105 {
10106 int vcount = 0, lcount = 0;
10107
10108 if (list_head->emitted)
10109 return;
10110 list_head->emitted = true;
10111
10112 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10113 {
10114 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10115
10116 for (dw_loc_list_ref curr = list_head; curr != NULL;
10117 curr = curr->dw_loc_next)
10118 {
10119 if (skip_loc_list_entry (curr))
10120 continue;
10121
10122 vcount++;
10123
10124 /* ?? dwarf_split_debug_info? */
10125 if (dwarf2out_as_locview_support)
10126 {
10127 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10128
10129 if (!ZERO_VIEW_P (curr->vbegin))
10130 {
10131 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10132 dw2_asm_output_symname_uleb128 (label,
10133 "View list begin (%s)",
10134 list_head->vl_symbol);
10135 }
10136 else
10137 dw2_asm_output_data_uleb128 (0,
10138 "View list begin (%s)",
10139 list_head->vl_symbol);
10140
10141 if (!ZERO_VIEW_P (curr->vend))
10142 {
10143 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10144 dw2_asm_output_symname_uleb128 (label,
10145 "View list end (%s)",
10146 list_head->vl_symbol);
10147 }
10148 else
10149 dw2_asm_output_data_uleb128 (0,
10150 "View list end (%s)",
10151 list_head->vl_symbol);
10152 }
10153 else
10154 {
10155 dw2_asm_output_data_uleb128 (curr->vbegin,
10156 "View list begin (%s)",
10157 list_head->vl_symbol);
10158 dw2_asm_output_data_uleb128 (curr->vend,
10159 "View list end (%s)",
10160 list_head->vl_symbol);
10161 }
10162 }
10163 }
10164
10165 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10166
10167 const char *last_section = NULL;
10168 const char *base_label = NULL;
10169
10170 /* Walk the location list, and output each range + expression. */
10171 for (dw_loc_list_ref curr = list_head; curr != NULL;
10172 curr = curr->dw_loc_next)
10173 {
10174 unsigned long size;
10175
10176 /* Skip this entry? If we skip it here, we must skip it in the
10177 view list above as well. */
10178 if (skip_loc_list_entry (curr, &size))
10179 continue;
10180
10181 lcount++;
10182
10183 if (dwarf_version >= 5)
10184 {
10185 if (dwarf_split_debug_info)
10186 {
10187 dwarf2out_maybe_output_loclist_view_pair (curr);
10188 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10189 uleb128 index into .debug_addr and uleb128 length. */
10190 dw2_asm_output_data (1, DW_LLE_startx_length,
10191 "DW_LLE_startx_length (%s)",
10192 list_head->ll_symbol);
10193 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10194 "Location list range start index "
10195 "(%s)", curr->begin);
10196 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10197 For that case we probably need to emit DW_LLE_startx_endx,
10198 but we'd need 2 .debug_addr entries rather than just one. */
10199 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10200 "Location list length (%s)",
10201 list_head->ll_symbol);
10202 }
10203 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10204 {
10205 dwarf2out_maybe_output_loclist_view_pair (curr);
10206 /* If all code is in .text section, the base address is
10207 already provided by the CU attributes. Use
10208 DW_LLE_offset_pair where both addresses are uleb128 encoded
10209 offsets against that base. */
10210 dw2_asm_output_data (1, DW_LLE_offset_pair,
10211 "DW_LLE_offset_pair (%s)",
10212 list_head->ll_symbol);
10213 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10214 "Location list begin address (%s)",
10215 list_head->ll_symbol);
10216 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10217 "Location list end address (%s)",
10218 list_head->ll_symbol);
10219 }
10220 else if (HAVE_AS_LEB128)
10221 {
10222 /* Otherwise, find out how many consecutive entries could share
10223 the same base entry. If just one, emit DW_LLE_start_length,
10224 otherwise emit DW_LLE_base_address for the base address
10225 followed by a series of DW_LLE_offset_pair. */
10226 if (last_section == NULL || curr->section != last_section)
10227 {
10228 dw_loc_list_ref curr2;
10229 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10230 curr2 = curr2->dw_loc_next)
10231 {
10232 if (strcmp (curr2->begin, curr2->end) == 0
10233 && !curr2->force)
10234 continue;
10235 break;
10236 }
10237 if (curr2 == NULL || curr->section != curr2->section)
10238 last_section = NULL;
10239 else
10240 {
10241 last_section = curr->section;
10242 base_label = curr->begin;
10243 dw2_asm_output_data (1, DW_LLE_base_address,
10244 "DW_LLE_base_address (%s)",
10245 list_head->ll_symbol);
10246 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10247 "Base address (%s)",
10248 list_head->ll_symbol);
10249 }
10250 }
10251 /* Only one entry with the same base address. Use
10252 DW_LLE_start_length with absolute address and uleb128
10253 length. */
10254 if (last_section == NULL)
10255 {
10256 dwarf2out_maybe_output_loclist_view_pair (curr);
10257 dw2_asm_output_data (1, DW_LLE_start_length,
10258 "DW_LLE_start_length (%s)",
10259 list_head->ll_symbol);
10260 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10261 "Location list begin address (%s)",
10262 list_head->ll_symbol);
10263 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10264 "Location list length "
10265 "(%s)", list_head->ll_symbol);
10266 }
10267 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10268 DW_LLE_base_address. */
10269 else
10270 {
10271 dwarf2out_maybe_output_loclist_view_pair (curr);
10272 dw2_asm_output_data (1, DW_LLE_offset_pair,
10273 "DW_LLE_offset_pair (%s)",
10274 list_head->ll_symbol);
10275 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10276 "Location list begin address "
10277 "(%s)", list_head->ll_symbol);
10278 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10279 "Location list end address "
10280 "(%s)", list_head->ll_symbol);
10281 }
10282 }
10283 /* The assembler does not support .uleb128 directive. Emit
10284 DW_LLE_start_end with a pair of absolute addresses. */
10285 else
10286 {
10287 dwarf2out_maybe_output_loclist_view_pair (curr);
10288 dw2_asm_output_data (1, DW_LLE_start_end,
10289 "DW_LLE_start_end (%s)",
10290 list_head->ll_symbol);
10291 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10292 "Location list begin address (%s)",
10293 list_head->ll_symbol);
10294 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10295 "Location list end address (%s)",
10296 list_head->ll_symbol);
10297 }
10298 }
10299 else if (dwarf_split_debug_info)
10300 {
10301 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10302 and 4 byte length. */
10303 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10304 "Location list start/length entry (%s)",
10305 list_head->ll_symbol);
10306 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10307 "Location list range start index (%s)",
10308 curr->begin);
10309 /* The length field is 4 bytes. If we ever need to support
10310 an 8-byte length, we can add a new DW_LLE code or fall back
10311 to DW_LLE_GNU_start_end_entry. */
10312 dw2_asm_output_delta (4, curr->end, curr->begin,
10313 "Location list range length (%s)",
10314 list_head->ll_symbol);
10315 }
10316 else if (!have_multiple_function_sections)
10317 {
10318 /* Pair of relative addresses against start of text section. */
10319 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10320 "Location list begin address (%s)",
10321 list_head->ll_symbol);
10322 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10323 "Location list end address (%s)",
10324 list_head->ll_symbol);
10325 }
10326 else
10327 {
10328 /* Pair of absolute addresses. */
10329 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10330 "Location list begin address (%s)",
10331 list_head->ll_symbol);
10332 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10333 "Location list end address (%s)",
10334 list_head->ll_symbol);
10335 }
10336
10337 /* Output the block length for this list of location operations. */
10338 if (dwarf_version >= 5)
10339 dw2_asm_output_data_uleb128 (size, "Location expression size");
10340 else
10341 {
10342 gcc_assert (size <= 0xffff);
10343 dw2_asm_output_data (2, size, "Location expression size");
10344 }
10345
10346 output_loc_sequence (curr->expr, -1);
10347 }
10348
10349 /* And finally list termination. */
10350 if (dwarf_version >= 5)
10351 dw2_asm_output_data (1, DW_LLE_end_of_list,
10352 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10353 else if (dwarf_split_debug_info)
10354 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10355 "Location list terminator (%s)",
10356 list_head->ll_symbol);
10357 else
10358 {
10359 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10360 "Location list terminator begin (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10363 "Location list terminator end (%s)",
10364 list_head->ll_symbol);
10365 }
10366
10367 gcc_assert (!list_head->vl_symbol
10368 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10369 }
10370
10371 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10372 section. Emit a relocated reference if val_entry is NULL, otherwise,
10373 emit an indirect reference. */
10374
10375 static void
10376 output_range_list_offset (dw_attr_node *a)
10377 {
10378 const char *name = dwarf_attr_name (a->dw_attr);
10379
10380 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10381 {
10382 if (dwarf_version >= 5)
10383 {
10384 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10385 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10386 debug_ranges_section, "%s", name);
10387 }
10388 else
10389 {
10390 char *p = strchr (ranges_section_label, '\0');
10391 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10392 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10393 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10394 debug_ranges_section, "%s", name);
10395 *p = '\0';
10396 }
10397 }
10398 else if (dwarf_version >= 5)
10399 {
10400 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10401 gcc_assert (rnglist_idx);
10402 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10403 }
10404 else
10405 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10406 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10407 "%s (offset from %s)", name, ranges_section_label);
10408 }
10409
10410 /* Output the offset into the debug_loc section. */
10411
10412 static void
10413 output_loc_list_offset (dw_attr_node *a)
10414 {
10415 char *sym = AT_loc_list (a)->ll_symbol;
10416
10417 gcc_assert (sym);
10418 if (!dwarf_split_debug_info)
10419 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10420 "%s", dwarf_attr_name (a->dw_attr));
10421 else if (dwarf_version >= 5)
10422 {
10423 gcc_assert (AT_loc_list (a)->num_assigned);
10424 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10425 dwarf_attr_name (a->dw_attr),
10426 sym);
10427 }
10428 else
10429 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10430 "%s", dwarf_attr_name (a->dw_attr));
10431 }
10432
10433 /* Output the offset into the debug_loc section. */
10434
10435 static void
10436 output_view_list_offset (dw_attr_node *a)
10437 {
10438 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10439
10440 gcc_assert (sym);
10441 if (dwarf_split_debug_info)
10442 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10443 "%s", dwarf_attr_name (a->dw_attr));
10444 else
10445 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10446 "%s", dwarf_attr_name (a->dw_attr));
10447 }
10448
10449 /* Output an attribute's index or value appropriately. */
10450
10451 static void
10452 output_attr_index_or_value (dw_attr_node *a)
10453 {
10454 const char *name = dwarf_attr_name (a->dw_attr);
10455
10456 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10457 {
10458 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10459 return;
10460 }
10461 switch (AT_class (a))
10462 {
10463 case dw_val_class_addr:
10464 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10465 break;
10466 case dw_val_class_high_pc:
10467 case dw_val_class_lbl_id:
10468 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10469 break;
10470 default:
10471 gcc_unreachable ();
10472 }
10473 }
10474
10475 /* Output a type signature. */
10476
10477 static inline void
10478 output_signature (const char *sig, const char *name)
10479 {
10480 int i;
10481
10482 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10483 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10484 }
10485
10486 /* Output a discriminant value. */
10487
10488 static inline void
10489 output_discr_value (dw_discr_value *discr_value, const char *name)
10490 {
10491 if (discr_value->pos)
10492 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10493 else
10494 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10495 }
10496
10497 /* Output the DIE and its attributes. Called recursively to generate
10498 the definitions of each child DIE. */
10499
10500 static void
10501 output_die (dw_die_ref die)
10502 {
10503 dw_attr_node *a;
10504 dw_die_ref c;
10505 unsigned long size;
10506 unsigned ix;
10507
10508 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10509 (unsigned long)die->die_offset,
10510 dwarf_tag_name (die->die_tag));
10511
10512 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10513 {
10514 const char *name = dwarf_attr_name (a->dw_attr);
10515
10516 switch (AT_class (a))
10517 {
10518 case dw_val_class_addr:
10519 output_attr_index_or_value (a);
10520 break;
10521
10522 case dw_val_class_offset:
10523 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10524 "%s", name);
10525 break;
10526
10527 case dw_val_class_range_list:
10528 output_range_list_offset (a);
10529 break;
10530
10531 case dw_val_class_loc:
10532 size = size_of_locs (AT_loc (a));
10533
10534 /* Output the block length for this list of location operations. */
10535 if (dwarf_version >= 4)
10536 dw2_asm_output_data_uleb128 (size, "%s", name);
10537 else
10538 dw2_asm_output_data (constant_size (size), size, "%s", name);
10539
10540 output_loc_sequence (AT_loc (a), -1);
10541 break;
10542
10543 case dw_val_class_const:
10544 /* ??? It would be slightly more efficient to use a scheme like is
10545 used for unsigned constants below, but gdb 4.x does not sign
10546 extend. Gdb 5.x does sign extend. */
10547 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10548 break;
10549
10550 case dw_val_class_unsigned_const:
10551 {
10552 int csize = constant_size (AT_unsigned (a));
10553 if (dwarf_version == 3
10554 && a->dw_attr == DW_AT_data_member_location
10555 && csize >= 4)
10556 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10557 else
10558 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10559 }
10560 break;
10561
10562 case dw_val_class_symview:
10563 {
10564 int vsize;
10565 if (symview_upper_bound <= 0xff)
10566 vsize = 1;
10567 else if (symview_upper_bound <= 0xffff)
10568 vsize = 2;
10569 else if (symview_upper_bound <= 0xffffffff)
10570 vsize = 4;
10571 else
10572 vsize = 8;
10573 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10574 "%s", name);
10575 }
10576 break;
10577
10578 case dw_val_class_const_implicit:
10579 if (flag_debug_asm)
10580 fprintf (asm_out_file, "\t\t\t%s %s ("
10581 HOST_WIDE_INT_PRINT_DEC ")\n",
10582 ASM_COMMENT_START, name, AT_int (a));
10583 break;
10584
10585 case dw_val_class_unsigned_const_implicit:
10586 if (flag_debug_asm)
10587 fprintf (asm_out_file, "\t\t\t%s %s ("
10588 HOST_WIDE_INT_PRINT_HEX ")\n",
10589 ASM_COMMENT_START, name, AT_unsigned (a));
10590 break;
10591
10592 case dw_val_class_const_double:
10593 {
10594 unsigned HOST_WIDE_INT first, second;
10595
10596 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10597 dw2_asm_output_data (1,
10598 HOST_BITS_PER_DOUBLE_INT
10599 / HOST_BITS_PER_CHAR,
10600 NULL);
10601
10602 if (WORDS_BIG_ENDIAN)
10603 {
10604 first = a->dw_attr_val.v.val_double.high;
10605 second = a->dw_attr_val.v.val_double.low;
10606 }
10607 else
10608 {
10609 first = a->dw_attr_val.v.val_double.low;
10610 second = a->dw_attr_val.v.val_double.high;
10611 }
10612
10613 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10614 first, "%s", name);
10615 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10616 second, NULL);
10617 }
10618 break;
10619
10620 case dw_val_class_wide_int:
10621 {
10622 int i;
10623 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10624 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10625 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10626 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10627 * l, NULL);
10628
10629 if (WORDS_BIG_ENDIAN)
10630 for (i = len - 1; i >= 0; --i)
10631 {
10632 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10633 "%s", name);
10634 name = "";
10635 }
10636 else
10637 for (i = 0; i < len; ++i)
10638 {
10639 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10640 "%s", name);
10641 name = "";
10642 }
10643 }
10644 break;
10645
10646 case dw_val_class_vec:
10647 {
10648 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10649 unsigned int len = a->dw_attr_val.v.val_vec.length;
10650 unsigned int i;
10651 unsigned char *p;
10652
10653 dw2_asm_output_data (constant_size (len * elt_size),
10654 len * elt_size, "%s", name);
10655 if (elt_size > sizeof (HOST_WIDE_INT))
10656 {
10657 elt_size /= 2;
10658 len *= 2;
10659 }
10660 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10661 i < len;
10662 i++, p += elt_size)
10663 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10664 "fp or vector constant word %u", i);
10665 break;
10666 }
10667
10668 case dw_val_class_flag:
10669 if (dwarf_version >= 4)
10670 {
10671 /* Currently all add_AT_flag calls pass in 1 as last argument,
10672 so DW_FORM_flag_present can be used. If that ever changes,
10673 we'll need to use DW_FORM_flag and have some optimization
10674 in build_abbrev_table that will change those to
10675 DW_FORM_flag_present if it is set to 1 in all DIEs using
10676 the same abbrev entry. */
10677 gcc_assert (AT_flag (a) == 1);
10678 if (flag_debug_asm)
10679 fprintf (asm_out_file, "\t\t\t%s %s\n",
10680 ASM_COMMENT_START, name);
10681 break;
10682 }
10683 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10684 break;
10685
10686 case dw_val_class_loc_list:
10687 output_loc_list_offset (a);
10688 break;
10689
10690 case dw_val_class_view_list:
10691 output_view_list_offset (a);
10692 break;
10693
10694 case dw_val_class_die_ref:
10695 if (AT_ref_external (a))
10696 {
10697 if (AT_ref (a)->comdat_type_p)
10698 {
10699 comdat_type_node *type_node
10700 = AT_ref (a)->die_id.die_type_node;
10701
10702 gcc_assert (type_node);
10703 output_signature (type_node->signature, name);
10704 }
10705 else
10706 {
10707 const char *sym = AT_ref (a)->die_id.die_symbol;
10708 int size;
10709
10710 gcc_assert (sym);
10711 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10712 length, whereas in DWARF3 it's always sized as an
10713 offset. */
10714 if (dwarf_version == 2)
10715 size = DWARF2_ADDR_SIZE;
10716 else
10717 size = DWARF_OFFSET_SIZE;
10718 /* ??? We cannot unconditionally output die_offset if
10719 non-zero - others might create references to those
10720 DIEs via symbols.
10721 And we do not clear its DIE offset after outputting it
10722 (and the label refers to the actual DIEs, not the
10723 DWARF CU unit header which is when using label + offset
10724 would be the correct thing to do).
10725 ??? This is the reason for the with_offset flag. */
10726 if (AT_ref (a)->with_offset)
10727 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10728 debug_info_section, "%s", name);
10729 else
10730 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10731 name);
10732 }
10733 }
10734 else
10735 {
10736 gcc_assert (AT_ref (a)->die_offset);
10737 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10738 "%s", name);
10739 }
10740 break;
10741
10742 case dw_val_class_fde_ref:
10743 {
10744 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10745
10746 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10747 a->dw_attr_val.v.val_fde_index * 2);
10748 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10749 "%s", name);
10750 }
10751 break;
10752
10753 case dw_val_class_vms_delta:
10754 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10755 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10756 AT_vms_delta2 (a), AT_vms_delta1 (a),
10757 "%s", name);
10758 #else
10759 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10760 AT_vms_delta2 (a), AT_vms_delta1 (a),
10761 "%s", name);
10762 #endif
10763 break;
10764
10765 case dw_val_class_lbl_id:
10766 output_attr_index_or_value (a);
10767 break;
10768
10769 case dw_val_class_lineptr:
10770 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10771 debug_line_section, "%s", name);
10772 break;
10773
10774 case dw_val_class_macptr:
10775 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10776 debug_macinfo_section, "%s", name);
10777 break;
10778
10779 case dw_val_class_loclistsptr:
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10781 debug_loc_section, "%s", name);
10782 break;
10783
10784 case dw_val_class_str:
10785 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10786 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10787 a->dw_attr_val.v.val_str->label,
10788 debug_str_section,
10789 "%s: \"%s\"", name, AT_string (a));
10790 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10791 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10792 a->dw_attr_val.v.val_str->label,
10793 debug_line_str_section,
10794 "%s: \"%s\"", name, AT_string (a));
10795 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10796 dw2_asm_output_data_uleb128 (AT_index (a),
10797 "%s: \"%s\"", name, AT_string (a));
10798 else
10799 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10800 break;
10801
10802 case dw_val_class_file:
10803 {
10804 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10805
10806 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10807 a->dw_attr_val.v.val_file->filename);
10808 break;
10809 }
10810
10811 case dw_val_class_file_implicit:
10812 if (flag_debug_asm)
10813 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10814 ASM_COMMENT_START, name,
10815 maybe_emit_file (a->dw_attr_val.v.val_file),
10816 a->dw_attr_val.v.val_file->filename);
10817 break;
10818
10819 case dw_val_class_data8:
10820 {
10821 int i;
10822
10823 for (i = 0; i < 8; i++)
10824 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10825 i == 0 ? "%s" : NULL, name);
10826 break;
10827 }
10828
10829 case dw_val_class_high_pc:
10830 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10831 get_AT_low_pc (die), "DW_AT_high_pc");
10832 break;
10833
10834 case dw_val_class_discr_value:
10835 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10836 break;
10837
10838 case dw_val_class_discr_list:
10839 {
10840 dw_discr_list_ref list = AT_discr_list (a);
10841 const int size = size_of_discr_list (list);
10842
10843 /* This is a block, so output its length first. */
10844 dw2_asm_output_data (constant_size (size), size,
10845 "%s: block size", name);
10846
10847 for (; list != NULL; list = list->dw_discr_next)
10848 {
10849 /* One byte for the discriminant value descriptor, and then as
10850 many LEB128 numbers as required. */
10851 if (list->dw_discr_range)
10852 dw2_asm_output_data (1, DW_DSC_range,
10853 "%s: DW_DSC_range", name);
10854 else
10855 dw2_asm_output_data (1, DW_DSC_label,
10856 "%s: DW_DSC_label", name);
10857
10858 output_discr_value (&list->dw_discr_lower_bound, name);
10859 if (list->dw_discr_range)
10860 output_discr_value (&list->dw_discr_upper_bound, name);
10861 }
10862 break;
10863 }
10864
10865 default:
10866 gcc_unreachable ();
10867 }
10868 }
10869
10870 FOR_EACH_CHILD (die, c, output_die (c));
10871
10872 /* Add null byte to terminate sibling list. */
10873 if (die->die_child != NULL)
10874 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10875 (unsigned long) die->die_offset);
10876 }
10877
10878 /* Output the dwarf version number. */
10879
10880 static void
10881 output_dwarf_version ()
10882 {
10883 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10884 views in loclist. That will change eventually. */
10885 if (dwarf_version == 6)
10886 {
10887 static bool once;
10888 if (!once)
10889 {
10890 warning (0,
10891 "-gdwarf-6 is output as version 5 with incompatibilities");
10892 once = true;
10893 }
10894 dw2_asm_output_data (2, 5, "DWARF version number");
10895 }
10896 else
10897 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10898 }
10899
10900 /* Output the compilation unit that appears at the beginning of the
10901 .debug_info section, and precedes the DIE descriptions. */
10902
10903 static void
10904 output_compilation_unit_header (enum dwarf_unit_type ut)
10905 {
10906 if (!XCOFF_DEBUGGING_INFO)
10907 {
10908 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10909 dw2_asm_output_data (4, 0xffffffff,
10910 "Initial length escape value indicating 64-bit DWARF extension");
10911 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10912 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10913 "Length of Compilation Unit Info");
10914 }
10915
10916 output_dwarf_version ();
10917 if (dwarf_version >= 5)
10918 {
10919 const char *name;
10920 switch (ut)
10921 {
10922 case DW_UT_compile: name = "DW_UT_compile"; break;
10923 case DW_UT_type: name = "DW_UT_type"; break;
10924 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10925 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10926 default: gcc_unreachable ();
10927 }
10928 dw2_asm_output_data (1, ut, "%s", name);
10929 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10930 }
10931 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10932 debug_abbrev_section,
10933 "Offset Into Abbrev. Section");
10934 if (dwarf_version < 5)
10935 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10936 }
10937
10938 /* Output the compilation unit DIE and its children. */
10939
10940 static void
10941 output_comp_unit (dw_die_ref die, int output_if_empty,
10942 const unsigned char *dwo_id)
10943 {
10944 const char *secname, *oldsym;
10945 char *tmp;
10946
10947 /* Unless we are outputting main CU, we may throw away empty ones. */
10948 if (!output_if_empty && die->die_child == NULL)
10949 return;
10950
10951 /* Even if there are no children of this DIE, we must output the information
10952 about the compilation unit. Otherwise, on an empty translation unit, we
10953 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10954 will then complain when examining the file. First mark all the DIEs in
10955 this CU so we know which get local refs. */
10956 mark_dies (die);
10957
10958 external_ref_hash_type *extern_map = optimize_external_refs (die);
10959
10960 /* For now, optimize only the main CU, in order to optimize the rest
10961 we'd need to see all of them earlier. Leave the rest for post-linking
10962 tools like DWZ. */
10963 if (die == comp_unit_die ())
10964 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10965
10966 build_abbrev_table (die, extern_map);
10967
10968 optimize_abbrev_table ();
10969
10970 delete extern_map;
10971
10972 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10973 next_die_offset = (dwo_id
10974 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10975 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10976 calc_die_sizes (die);
10977
10978 oldsym = die->die_id.die_symbol;
10979 if (oldsym && die->comdat_type_p)
10980 {
10981 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10982
10983 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10984 secname = tmp;
10985 die->die_id.die_symbol = NULL;
10986 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10987 }
10988 else
10989 {
10990 switch_to_section (debug_info_section);
10991 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10992 info_section_emitted = true;
10993 }
10994
10995 /* For LTO cross unit DIE refs we want a symbol on the start of the
10996 debuginfo section, not on the CU DIE. */
10997 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10998 {
10999 /* ??? No way to get visibility assembled without a decl. */
11000 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11001 get_identifier (oldsym), char_type_node);
11002 TREE_PUBLIC (decl) = true;
11003 TREE_STATIC (decl) = true;
11004 DECL_ARTIFICIAL (decl) = true;
11005 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11006 DECL_VISIBILITY_SPECIFIED (decl) = true;
11007 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11008 #ifdef ASM_WEAKEN_LABEL
11009 /* We prefer a .weak because that handles duplicates from duplicate
11010 archive members in a graceful way. */
11011 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11012 #else
11013 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11014 #endif
11015 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11016 }
11017
11018 /* Output debugging information. */
11019 output_compilation_unit_header (dwo_id
11020 ? DW_UT_split_compile : DW_UT_compile);
11021 if (dwarf_version >= 5)
11022 {
11023 if (dwo_id != NULL)
11024 for (int i = 0; i < 8; i++)
11025 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11026 }
11027 output_die (die);
11028
11029 /* Leave the marks on the main CU, so we can check them in
11030 output_pubnames. */
11031 if (oldsym)
11032 {
11033 unmark_dies (die);
11034 die->die_id.die_symbol = oldsym;
11035 }
11036 }
11037
11038 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11039 and .debug_pubtypes. This is configured per-target, but can be
11040 overridden by the -gpubnames or -gno-pubnames options. */
11041
11042 static inline bool
11043 want_pubnames (void)
11044 {
11045 if (debug_info_level <= DINFO_LEVEL_TERSE)
11046 return false;
11047 if (debug_generate_pub_sections != -1)
11048 return debug_generate_pub_sections;
11049 return targetm.want_debug_pub_sections;
11050 }
11051
11052 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11053
11054 static void
11055 add_AT_pubnames (dw_die_ref die)
11056 {
11057 if (want_pubnames ())
11058 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11059 }
11060
11061 /* Add a string attribute value to a skeleton DIE. */
11062
11063 static inline void
11064 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11065 const char *str)
11066 {
11067 dw_attr_node attr;
11068 struct indirect_string_node *node;
11069
11070 if (! skeleton_debug_str_hash)
11071 skeleton_debug_str_hash
11072 = hash_table<indirect_string_hasher>::create_ggc (10);
11073
11074 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11075 find_string_form (node);
11076 if (node->form == DW_FORM_GNU_str_index)
11077 node->form = DW_FORM_strp;
11078
11079 attr.dw_attr = attr_kind;
11080 attr.dw_attr_val.val_class = dw_val_class_str;
11081 attr.dw_attr_val.val_entry = NULL;
11082 attr.dw_attr_val.v.val_str = node;
11083 add_dwarf_attr (die, &attr);
11084 }
11085
11086 /* Helper function to generate top-level dies for skeleton debug_info and
11087 debug_types. */
11088
11089 static void
11090 add_top_level_skeleton_die_attrs (dw_die_ref die)
11091 {
11092 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11093 const char *comp_dir = comp_dir_string ();
11094
11095 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11096 if (comp_dir != NULL)
11097 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11098 add_AT_pubnames (die);
11099 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
11100 }
11101
11102 /* Output skeleton debug sections that point to the dwo file. */
11103
11104 static void
11105 output_skeleton_debug_sections (dw_die_ref comp_unit,
11106 const unsigned char *dwo_id)
11107 {
11108 /* These attributes will be found in the full debug_info section. */
11109 remove_AT (comp_unit, DW_AT_producer);
11110 remove_AT (comp_unit, DW_AT_language);
11111
11112 switch_to_section (debug_skeleton_info_section);
11113 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11114
11115 /* Produce the skeleton compilation-unit header. This one differs enough from
11116 a normal CU header that it's better not to call output_compilation_unit
11117 header. */
11118 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11119 dw2_asm_output_data (4, 0xffffffff,
11120 "Initial length escape value indicating 64-bit "
11121 "DWARF extension");
11122
11123 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11124 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11125 - DWARF_INITIAL_LENGTH_SIZE
11126 + size_of_die (comp_unit),
11127 "Length of Compilation Unit Info");
11128 output_dwarf_version ();
11129 if (dwarf_version >= 5)
11130 {
11131 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11132 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11133 }
11134 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11135 debug_skeleton_abbrev_section,
11136 "Offset Into Abbrev. Section");
11137 if (dwarf_version < 5)
11138 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11139 else
11140 for (int i = 0; i < 8; i++)
11141 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11142
11143 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11144 output_die (comp_unit);
11145
11146 /* Build the skeleton debug_abbrev section. */
11147 switch_to_section (debug_skeleton_abbrev_section);
11148 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11149
11150 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11151
11152 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11153 }
11154
11155 /* Output a comdat type unit DIE and its children. */
11156
11157 static void
11158 output_comdat_type_unit (comdat_type_node *node)
11159 {
11160 const char *secname;
11161 char *tmp;
11162 int i;
11163 #if defined (OBJECT_FORMAT_ELF)
11164 tree comdat_key;
11165 #endif
11166
11167 /* First mark all the DIEs in this CU so we know which get local refs. */
11168 mark_dies (node->root_die);
11169
11170 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11171
11172 build_abbrev_table (node->root_die, extern_map);
11173
11174 delete extern_map;
11175 extern_map = NULL;
11176
11177 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11178 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11179 calc_die_sizes (node->root_die);
11180
11181 #if defined (OBJECT_FORMAT_ELF)
11182 if (dwarf_version >= 5)
11183 {
11184 if (!dwarf_split_debug_info)
11185 secname = ".debug_info";
11186 else
11187 secname = ".debug_info.dwo";
11188 }
11189 else if (!dwarf_split_debug_info)
11190 secname = ".debug_types";
11191 else
11192 secname = ".debug_types.dwo";
11193
11194 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11195 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11196 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11197 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11198 comdat_key = get_identifier (tmp);
11199 targetm.asm_out.named_section (secname,
11200 SECTION_DEBUG | SECTION_LINKONCE,
11201 comdat_key);
11202 #else
11203 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11204 sprintf (tmp, (dwarf_version >= 5
11205 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11206 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11207 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11208 secname = tmp;
11209 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11210 #endif
11211
11212 /* Output debugging information. */
11213 output_compilation_unit_header (dwarf_split_debug_info
11214 ? DW_UT_split_type : DW_UT_type);
11215 output_signature (node->signature, "Type Signature");
11216 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11217 "Offset to Type DIE");
11218 output_die (node->root_die);
11219
11220 unmark_dies (node->root_die);
11221 }
11222
11223 /* Return the DWARF2/3 pubname associated with a decl. */
11224
11225 static const char *
11226 dwarf2_name (tree decl, int scope)
11227 {
11228 if (DECL_NAMELESS (decl))
11229 return NULL;
11230 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11231 }
11232
11233 /* Add a new entry to .debug_pubnames if appropriate. */
11234
11235 static void
11236 add_pubname_string (const char *str, dw_die_ref die)
11237 {
11238 pubname_entry e;
11239
11240 e.die = die;
11241 e.name = xstrdup (str);
11242 vec_safe_push (pubname_table, e);
11243 }
11244
11245 static void
11246 add_pubname (tree decl, dw_die_ref die)
11247 {
11248 if (!want_pubnames ())
11249 return;
11250
11251 /* Don't add items to the table when we expect that the consumer will have
11252 just read the enclosing die. For example, if the consumer is looking at a
11253 class_member, it will either be inside the class already, or will have just
11254 looked up the class to find the member. Either way, searching the class is
11255 faster than searching the index. */
11256 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11257 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11258 {
11259 const char *name = dwarf2_name (decl, 1);
11260
11261 if (name)
11262 add_pubname_string (name, die);
11263 }
11264 }
11265
11266 /* Add an enumerator to the pubnames section. */
11267
11268 static void
11269 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11270 {
11271 pubname_entry e;
11272
11273 gcc_assert (scope_name);
11274 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11275 e.die = die;
11276 vec_safe_push (pubname_table, e);
11277 }
11278
11279 /* Add a new entry to .debug_pubtypes if appropriate. */
11280
11281 static void
11282 add_pubtype (tree decl, dw_die_ref die)
11283 {
11284 pubname_entry e;
11285
11286 if (!want_pubnames ())
11287 return;
11288
11289 if ((TREE_PUBLIC (decl)
11290 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11291 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11292 {
11293 tree scope = NULL;
11294 const char *scope_name = "";
11295 const char *sep = is_cxx () ? "::" : ".";
11296 const char *name;
11297
11298 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11299 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11300 {
11301 scope_name = lang_hooks.dwarf_name (scope, 1);
11302 if (scope_name != NULL && scope_name[0] != '\0')
11303 scope_name = concat (scope_name, sep, NULL);
11304 else
11305 scope_name = "";
11306 }
11307
11308 if (TYPE_P (decl))
11309 name = type_tag (decl);
11310 else
11311 name = lang_hooks.dwarf_name (decl, 1);
11312
11313 /* If we don't have a name for the type, there's no point in adding
11314 it to the table. */
11315 if (name != NULL && name[0] != '\0')
11316 {
11317 e.die = die;
11318 e.name = concat (scope_name, name, NULL);
11319 vec_safe_push (pubtype_table, e);
11320 }
11321
11322 /* Although it might be more consistent to add the pubinfo for the
11323 enumerators as their dies are created, they should only be added if the
11324 enum type meets the criteria above. So rather than re-check the parent
11325 enum type whenever an enumerator die is created, just output them all
11326 here. This isn't protected by the name conditional because anonymous
11327 enums don't have names. */
11328 if (die->die_tag == DW_TAG_enumeration_type)
11329 {
11330 dw_die_ref c;
11331
11332 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11333 }
11334 }
11335 }
11336
11337 /* Output a single entry in the pubnames table. */
11338
11339 static void
11340 output_pubname (dw_offset die_offset, pubname_entry *entry)
11341 {
11342 dw_die_ref die = entry->die;
11343 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11344
11345 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11346
11347 if (debug_generate_pub_sections == 2)
11348 {
11349 /* This logic follows gdb's method for determining the value of the flag
11350 byte. */
11351 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11352 switch (die->die_tag)
11353 {
11354 case DW_TAG_typedef:
11355 case DW_TAG_base_type:
11356 case DW_TAG_subrange_type:
11357 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11358 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11359 break;
11360 case DW_TAG_enumerator:
11361 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11362 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11363 if (!is_cxx ())
11364 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11365 break;
11366 case DW_TAG_subprogram:
11367 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11368 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11369 if (!is_ada ())
11370 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11371 break;
11372 case DW_TAG_constant:
11373 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11374 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11375 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11376 break;
11377 case DW_TAG_variable:
11378 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11379 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11380 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11381 break;
11382 case DW_TAG_namespace:
11383 case DW_TAG_imported_declaration:
11384 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11385 break;
11386 case DW_TAG_class_type:
11387 case DW_TAG_interface_type:
11388 case DW_TAG_structure_type:
11389 case DW_TAG_union_type:
11390 case DW_TAG_enumeration_type:
11391 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11392 if (!is_cxx ())
11393 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11394 break;
11395 default:
11396 /* An unusual tag. Leave the flag-byte empty. */
11397 break;
11398 }
11399 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11400 "GDB-index flags");
11401 }
11402
11403 dw2_asm_output_nstring (entry->name, -1, "external name");
11404 }
11405
11406
11407 /* Output the public names table used to speed up access to externally
11408 visible names; or the public types table used to find type definitions. */
11409
11410 static void
11411 output_pubnames (vec<pubname_entry, va_gc> *names)
11412 {
11413 unsigned i;
11414 unsigned long pubnames_length = size_of_pubnames (names);
11415 pubname_entry *pub;
11416
11417 if (!XCOFF_DEBUGGING_INFO)
11418 {
11419 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11420 dw2_asm_output_data (4, 0xffffffff,
11421 "Initial length escape value indicating 64-bit DWARF extension");
11422 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11423 "Pub Info Length");
11424 }
11425
11426 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11427 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11428
11429 if (dwarf_split_debug_info)
11430 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11431 debug_skeleton_info_section,
11432 "Offset of Compilation Unit Info");
11433 else
11434 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11435 debug_info_section,
11436 "Offset of Compilation Unit Info");
11437 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11438 "Compilation Unit Length");
11439
11440 FOR_EACH_VEC_ELT (*names, i, pub)
11441 {
11442 if (include_pubname_in_output (names, pub))
11443 {
11444 dw_offset die_offset = pub->die->die_offset;
11445
11446 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11447 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11448 gcc_assert (pub->die->die_mark);
11449
11450 /* If we're putting types in their own .debug_types sections,
11451 the .debug_pubtypes table will still point to the compile
11452 unit (not the type unit), so we want to use the offset of
11453 the skeleton DIE (if there is one). */
11454 if (pub->die->comdat_type_p && names == pubtype_table)
11455 {
11456 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11457
11458 if (type_node != NULL)
11459 die_offset = (type_node->skeleton_die != NULL
11460 ? type_node->skeleton_die->die_offset
11461 : comp_unit_die ()->die_offset);
11462 }
11463
11464 output_pubname (die_offset, pub);
11465 }
11466 }
11467
11468 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11469 }
11470
11471 /* Output public names and types tables if necessary. */
11472
11473 static void
11474 output_pubtables (void)
11475 {
11476 if (!want_pubnames () || !info_section_emitted)
11477 return;
11478
11479 switch_to_section (debug_pubnames_section);
11480 output_pubnames (pubname_table);
11481 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11482 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11483 simply won't look for the section. */
11484 switch_to_section (debug_pubtypes_section);
11485 output_pubnames (pubtype_table);
11486 }
11487
11488
11489 /* Output the information that goes into the .debug_aranges table.
11490 Namely, define the beginning and ending address range of the
11491 text section generated for this compilation unit. */
11492
11493 static void
11494 output_aranges (void)
11495 {
11496 unsigned i;
11497 unsigned long aranges_length = size_of_aranges ();
11498
11499 if (!XCOFF_DEBUGGING_INFO)
11500 {
11501 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11502 dw2_asm_output_data (4, 0xffffffff,
11503 "Initial length escape value indicating 64-bit DWARF extension");
11504 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11505 "Length of Address Ranges Info");
11506 }
11507
11508 /* Version number for aranges is still 2, even up to DWARF5. */
11509 dw2_asm_output_data (2, 2, "DWARF aranges version");
11510 if (dwarf_split_debug_info)
11511 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11512 debug_skeleton_info_section,
11513 "Offset of Compilation Unit Info");
11514 else
11515 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11516 debug_info_section,
11517 "Offset of Compilation Unit Info");
11518 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11519 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11520
11521 /* We need to align to twice the pointer size here. */
11522 if (DWARF_ARANGES_PAD_SIZE)
11523 {
11524 /* Pad using a 2 byte words so that padding is correct for any
11525 pointer size. */
11526 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11527 2 * DWARF2_ADDR_SIZE);
11528 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11529 dw2_asm_output_data (2, 0, NULL);
11530 }
11531
11532 /* It is necessary not to output these entries if the sections were
11533 not used; if the sections were not used, the length will be 0 and
11534 the address may end up as 0 if the section is discarded by ld
11535 --gc-sections, leaving an invalid (0, 0) entry that can be
11536 confused with the terminator. */
11537 if (text_section_used)
11538 {
11539 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11540 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11541 text_section_label, "Length");
11542 }
11543 if (cold_text_section_used)
11544 {
11545 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11546 "Address");
11547 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11548 cold_text_section_label, "Length");
11549 }
11550
11551 if (have_multiple_function_sections)
11552 {
11553 unsigned fde_idx;
11554 dw_fde_ref fde;
11555
11556 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11557 {
11558 if (DECL_IGNORED_P (fde->decl))
11559 continue;
11560 if (!fde->in_std_section)
11561 {
11562 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11563 "Address");
11564 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11565 fde->dw_fde_begin, "Length");
11566 }
11567 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11568 {
11569 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11570 "Address");
11571 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11572 fde->dw_fde_second_begin, "Length");
11573 }
11574 }
11575 }
11576
11577 /* Output the terminator words. */
11578 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11579 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11580 }
11581
11582 /* Add a new entry to .debug_ranges. Return its index into
11583 ranges_table vector. */
11584
11585 static unsigned int
11586 add_ranges_num (int num, bool maybe_new_sec)
11587 {
11588 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11589 vec_safe_push (ranges_table, r);
11590 return vec_safe_length (ranges_table) - 1;
11591 }
11592
11593 /* Add a new entry to .debug_ranges corresponding to a block, or a
11594 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11595 this entry might be in a different section from previous range. */
11596
11597 static unsigned int
11598 add_ranges (const_tree block, bool maybe_new_sec)
11599 {
11600 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11601 }
11602
11603 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11604 chain, or middle entry of a chain that will be directly referred to. */
11605
11606 static void
11607 note_rnglist_head (unsigned int offset)
11608 {
11609 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11610 return;
11611 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11612 }
11613
11614 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11615 When using dwarf_split_debug_info, address attributes in dies destined
11616 for the final executable should be direct references--setting the
11617 parameter force_direct ensures this behavior. */
11618
11619 static void
11620 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11621 bool *added, bool force_direct)
11622 {
11623 unsigned int in_use = vec_safe_length (ranges_by_label);
11624 unsigned int offset;
11625 dw_ranges_by_label rbl = { begin, end };
11626 vec_safe_push (ranges_by_label, rbl);
11627 offset = add_ranges_num (-(int)in_use - 1, true);
11628 if (!*added)
11629 {
11630 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11631 *added = true;
11632 note_rnglist_head (offset);
11633 }
11634 }
11635
11636 /* Emit .debug_ranges section. */
11637
11638 static void
11639 output_ranges (void)
11640 {
11641 unsigned i;
11642 static const char *const start_fmt = "Offset %#x";
11643 const char *fmt = start_fmt;
11644 dw_ranges *r;
11645
11646 switch_to_section (debug_ranges_section);
11647 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11648 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11649 {
11650 int block_num = r->num;
11651
11652 if (block_num > 0)
11653 {
11654 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11655 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11656
11657 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11658 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11659
11660 /* If all code is in the text section, then the compilation
11661 unit base address defaults to DW_AT_low_pc, which is the
11662 base of the text section. */
11663 if (!have_multiple_function_sections)
11664 {
11665 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11666 text_section_label,
11667 fmt, i * 2 * DWARF2_ADDR_SIZE);
11668 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11669 text_section_label, NULL);
11670 }
11671
11672 /* Otherwise, the compilation unit base address is zero,
11673 which allows us to use absolute addresses, and not worry
11674 about whether the target supports cross-section
11675 arithmetic. */
11676 else
11677 {
11678 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11679 fmt, i * 2 * DWARF2_ADDR_SIZE);
11680 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11681 }
11682
11683 fmt = NULL;
11684 }
11685
11686 /* Negative block_num stands for an index into ranges_by_label. */
11687 else if (block_num < 0)
11688 {
11689 int lab_idx = - block_num - 1;
11690
11691 if (!have_multiple_function_sections)
11692 {
11693 gcc_unreachable ();
11694 #if 0
11695 /* If we ever use add_ranges_by_labels () for a single
11696 function section, all we have to do is to take out
11697 the #if 0 above. */
11698 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11699 (*ranges_by_label)[lab_idx].begin,
11700 text_section_label,
11701 fmt, i * 2 * DWARF2_ADDR_SIZE);
11702 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11703 (*ranges_by_label)[lab_idx].end,
11704 text_section_label, NULL);
11705 #endif
11706 }
11707 else
11708 {
11709 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11710 (*ranges_by_label)[lab_idx].begin,
11711 fmt, i * 2 * DWARF2_ADDR_SIZE);
11712 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11713 (*ranges_by_label)[lab_idx].end,
11714 NULL);
11715 }
11716 }
11717 else
11718 {
11719 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11720 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11721 fmt = start_fmt;
11722 }
11723 }
11724 }
11725
11726 /* Non-zero if .debug_line_str should be used for .debug_line section
11727 strings or strings that are likely shareable with those. */
11728 #define DWARF5_USE_DEBUG_LINE_STR \
11729 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11730 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11731 /* FIXME: there is no .debug_line_str.dwo section, \
11732 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11733 && !dwarf_split_debug_info)
11734
11735 /* Assign .debug_rnglists indexes. */
11736
11737 static void
11738 index_rnglists (void)
11739 {
11740 unsigned i;
11741 dw_ranges *r;
11742
11743 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11744 if (r->label)
11745 r->idx = rnglist_idx++;
11746 }
11747
11748 /* Emit .debug_rnglists section. */
11749
11750 static void
11751 output_rnglists (unsigned generation)
11752 {
11753 unsigned i;
11754 dw_ranges *r;
11755 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11756 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11757 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11758
11759 switch_to_section (debug_ranges_section);
11760 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11761 /* There are up to 4 unique ranges labels per generation.
11762 See also init_sections_and_labels. */
11763 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11764 2 + generation * 4);
11765 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11766 3 + generation * 4);
11767 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11768 dw2_asm_output_data (4, 0xffffffff,
11769 "Initial length escape value indicating "
11770 "64-bit DWARF extension");
11771 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11772 "Length of Range Lists");
11773 ASM_OUTPUT_LABEL (asm_out_file, l1);
11774 output_dwarf_version ();
11775 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11776 dw2_asm_output_data (1, 0, "Segment Size");
11777 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11778 about relocation sizes and primarily care about the size of .debug*
11779 sections in linked shared libraries and executables, then
11780 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11781 into it are usually larger than just DW_FORM_sec_offset offsets
11782 into the .debug_rnglists section. */
11783 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11784 "Offset Entry Count");
11785 if (dwarf_split_debug_info)
11786 {
11787 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11788 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11789 if (r->label)
11790 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11791 ranges_base_label, NULL);
11792 }
11793
11794 const char *lab = "";
11795 unsigned int len = vec_safe_length (ranges_table);
11796 const char *base = NULL;
11797 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11798 {
11799 int block_num = r->num;
11800
11801 if (r->label)
11802 {
11803 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11804 lab = r->label;
11805 }
11806 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11807 base = NULL;
11808 if (block_num > 0)
11809 {
11810 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11811 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11812
11813 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11814 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11815
11816 if (HAVE_AS_LEB128)
11817 {
11818 /* If all code is in the text section, then the compilation
11819 unit base address defaults to DW_AT_low_pc, which is the
11820 base of the text section. */
11821 if (!have_multiple_function_sections)
11822 {
11823 dw2_asm_output_data (1, DW_RLE_offset_pair,
11824 "DW_RLE_offset_pair (%s)", lab);
11825 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11826 "Range begin address (%s)", lab);
11827 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11828 "Range end address (%s)", lab);
11829 continue;
11830 }
11831 if (base == NULL)
11832 {
11833 dw_ranges *r2 = NULL;
11834 if (i < len - 1)
11835 r2 = &(*ranges_table)[i + 1];
11836 if (r2
11837 && r2->num != 0
11838 && r2->label == NULL
11839 && !r2->maybe_new_sec)
11840 {
11841 dw2_asm_output_data (1, DW_RLE_base_address,
11842 "DW_RLE_base_address (%s)", lab);
11843 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11844 "Base address (%s)", lab);
11845 strcpy (basebuf, blabel);
11846 base = basebuf;
11847 }
11848 }
11849 if (base)
11850 {
11851 dw2_asm_output_data (1, DW_RLE_offset_pair,
11852 "DW_RLE_offset_pair (%s)", lab);
11853 dw2_asm_output_delta_uleb128 (blabel, base,
11854 "Range begin address (%s)", lab);
11855 dw2_asm_output_delta_uleb128 (elabel, base,
11856 "Range end address (%s)", lab);
11857 continue;
11858 }
11859 dw2_asm_output_data (1, DW_RLE_start_length,
11860 "DW_RLE_start_length (%s)", lab);
11861 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11862 "Range begin address (%s)", lab);
11863 dw2_asm_output_delta_uleb128 (elabel, blabel,
11864 "Range length (%s)", lab);
11865 }
11866 else
11867 {
11868 dw2_asm_output_data (1, DW_RLE_start_end,
11869 "DW_RLE_start_end (%s)", lab);
11870 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11871 "Range begin address (%s)", lab);
11872 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11873 "Range end address (%s)", lab);
11874 }
11875 }
11876
11877 /* Negative block_num stands for an index into ranges_by_label. */
11878 else if (block_num < 0)
11879 {
11880 int lab_idx = - block_num - 1;
11881 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11882 const char *elabel = (*ranges_by_label)[lab_idx].end;
11883
11884 if (!have_multiple_function_sections)
11885 gcc_unreachable ();
11886 if (HAVE_AS_LEB128)
11887 {
11888 dw2_asm_output_data (1, DW_RLE_start_length,
11889 "DW_RLE_start_length (%s)", lab);
11890 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11891 "Range begin address (%s)", lab);
11892 dw2_asm_output_delta_uleb128 (elabel, blabel,
11893 "Range length (%s)", lab);
11894 }
11895 else
11896 {
11897 dw2_asm_output_data (1, DW_RLE_start_end,
11898 "DW_RLE_start_end (%s)", lab);
11899 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11900 "Range begin address (%s)", lab);
11901 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11902 "Range end address (%s)", lab);
11903 }
11904 }
11905 else
11906 dw2_asm_output_data (1, DW_RLE_end_of_list,
11907 "DW_RLE_end_of_list (%s)", lab);
11908 }
11909 ASM_OUTPUT_LABEL (asm_out_file, l2);
11910 }
11911
11912 /* Data structure containing information about input files. */
11913 struct file_info
11914 {
11915 const char *path; /* Complete file name. */
11916 const char *fname; /* File name part. */
11917 int length; /* Length of entire string. */
11918 struct dwarf_file_data * file_idx; /* Index in input file table. */
11919 int dir_idx; /* Index in directory table. */
11920 };
11921
11922 /* Data structure containing information about directories with source
11923 files. */
11924 struct dir_info
11925 {
11926 const char *path; /* Path including directory name. */
11927 int length; /* Path length. */
11928 int prefix; /* Index of directory entry which is a prefix. */
11929 int count; /* Number of files in this directory. */
11930 int dir_idx; /* Index of directory used as base. */
11931 };
11932
11933 /* Callback function for file_info comparison. We sort by looking at
11934 the directories in the path. */
11935
11936 static int
11937 file_info_cmp (const void *p1, const void *p2)
11938 {
11939 const struct file_info *const s1 = (const struct file_info *) p1;
11940 const struct file_info *const s2 = (const struct file_info *) p2;
11941 const unsigned char *cp1;
11942 const unsigned char *cp2;
11943
11944 /* Take care of file names without directories. We need to make sure that
11945 we return consistent values to qsort since some will get confused if
11946 we return the same value when identical operands are passed in opposite
11947 orders. So if neither has a directory, return 0 and otherwise return
11948 1 or -1 depending on which one has the directory. */
11949 if ((s1->path == s1->fname || s2->path == s2->fname))
11950 return (s2->path == s2->fname) - (s1->path == s1->fname);
11951
11952 cp1 = (const unsigned char *) s1->path;
11953 cp2 = (const unsigned char *) s2->path;
11954
11955 while (1)
11956 {
11957 ++cp1;
11958 ++cp2;
11959 /* Reached the end of the first path? If so, handle like above. */
11960 if ((cp1 == (const unsigned char *) s1->fname)
11961 || (cp2 == (const unsigned char *) s2->fname))
11962 return ((cp2 == (const unsigned char *) s2->fname)
11963 - (cp1 == (const unsigned char *) s1->fname));
11964
11965 /* Character of current path component the same? */
11966 else if (*cp1 != *cp2)
11967 return *cp1 - *cp2;
11968 }
11969 }
11970
11971 struct file_name_acquire_data
11972 {
11973 struct file_info *files;
11974 int used_files;
11975 int max_files;
11976 };
11977
11978 /* Traversal function for the hash table. */
11979
11980 int
11981 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11982 {
11983 struct dwarf_file_data *d = *slot;
11984 struct file_info *fi;
11985 const char *f;
11986
11987 gcc_assert (fnad->max_files >= d->emitted_number);
11988
11989 if (! d->emitted_number)
11990 return 1;
11991
11992 gcc_assert (fnad->max_files != fnad->used_files);
11993
11994 fi = fnad->files + fnad->used_files++;
11995
11996 /* Skip all leading "./". */
11997 f = d->filename;
11998 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11999 f += 2;
12000
12001 /* Create a new array entry. */
12002 fi->path = f;
12003 fi->length = strlen (f);
12004 fi->file_idx = d;
12005
12006 /* Search for the file name part. */
12007 f = strrchr (f, DIR_SEPARATOR);
12008 #if defined (DIR_SEPARATOR_2)
12009 {
12010 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12011
12012 if (g != NULL)
12013 {
12014 if (f == NULL || f < g)
12015 f = g;
12016 }
12017 }
12018 #endif
12019
12020 fi->fname = f == NULL ? fi->path : f + 1;
12021 return 1;
12022 }
12023
12024 /* Helper function for output_file_names. Emit a FORM encoded
12025 string STR, with assembly comment start ENTRY_KIND and
12026 index IDX */
12027
12028 static void
12029 output_line_string (enum dwarf_form form, const char *str,
12030 const char *entry_kind, unsigned int idx)
12031 {
12032 switch (form)
12033 {
12034 case DW_FORM_string:
12035 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12036 break;
12037 case DW_FORM_line_strp:
12038 if (!debug_line_str_hash)
12039 debug_line_str_hash
12040 = hash_table<indirect_string_hasher>::create_ggc (10);
12041
12042 struct indirect_string_node *node;
12043 node = find_AT_string_in_table (str, debug_line_str_hash);
12044 set_indirect_string (node);
12045 node->form = form;
12046 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12047 debug_line_str_section, "%s: %#x: \"%s\"",
12048 entry_kind, 0, node->str);
12049 break;
12050 default:
12051 gcc_unreachable ();
12052 }
12053 }
12054
12055 /* Output the directory table and the file name table. We try to minimize
12056 the total amount of memory needed. A heuristic is used to avoid large
12057 slowdowns with many input files. */
12058
12059 static void
12060 output_file_names (void)
12061 {
12062 struct file_name_acquire_data fnad;
12063 int numfiles;
12064 struct file_info *files;
12065 struct dir_info *dirs;
12066 int *saved;
12067 int *savehere;
12068 int *backmap;
12069 int ndirs;
12070 int idx_offset;
12071 int i;
12072
12073 if (!last_emitted_file)
12074 {
12075 if (dwarf_version >= 5)
12076 {
12077 dw2_asm_output_data (1, 0, "Directory entry format count");
12078 dw2_asm_output_data_uleb128 (0, "Directories count");
12079 dw2_asm_output_data (1, 0, "File name entry format count");
12080 dw2_asm_output_data_uleb128 (0, "File names count");
12081 }
12082 else
12083 {
12084 dw2_asm_output_data (1, 0, "End directory table");
12085 dw2_asm_output_data (1, 0, "End file name table");
12086 }
12087 return;
12088 }
12089
12090 numfiles = last_emitted_file->emitted_number;
12091
12092 /* Allocate the various arrays we need. */
12093 files = XALLOCAVEC (struct file_info, numfiles);
12094 dirs = XALLOCAVEC (struct dir_info, numfiles);
12095
12096 fnad.files = files;
12097 fnad.used_files = 0;
12098 fnad.max_files = numfiles;
12099 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12100 gcc_assert (fnad.used_files == fnad.max_files);
12101
12102 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12103
12104 /* Find all the different directories used. */
12105 dirs[0].path = files[0].path;
12106 dirs[0].length = files[0].fname - files[0].path;
12107 dirs[0].prefix = -1;
12108 dirs[0].count = 1;
12109 dirs[0].dir_idx = 0;
12110 files[0].dir_idx = 0;
12111 ndirs = 1;
12112
12113 for (i = 1; i < numfiles; i++)
12114 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12115 && memcmp (dirs[ndirs - 1].path, files[i].path,
12116 dirs[ndirs - 1].length) == 0)
12117 {
12118 /* Same directory as last entry. */
12119 files[i].dir_idx = ndirs - 1;
12120 ++dirs[ndirs - 1].count;
12121 }
12122 else
12123 {
12124 int j;
12125
12126 /* This is a new directory. */
12127 dirs[ndirs].path = files[i].path;
12128 dirs[ndirs].length = files[i].fname - files[i].path;
12129 dirs[ndirs].count = 1;
12130 dirs[ndirs].dir_idx = ndirs;
12131 files[i].dir_idx = ndirs;
12132
12133 /* Search for a prefix. */
12134 dirs[ndirs].prefix = -1;
12135 for (j = 0; j < ndirs; j++)
12136 if (dirs[j].length < dirs[ndirs].length
12137 && dirs[j].length > 1
12138 && (dirs[ndirs].prefix == -1
12139 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12140 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12141 dirs[ndirs].prefix = j;
12142
12143 ++ndirs;
12144 }
12145
12146 /* Now to the actual work. We have to find a subset of the directories which
12147 allow expressing the file name using references to the directory table
12148 with the least amount of characters. We do not do an exhaustive search
12149 where we would have to check out every combination of every single
12150 possible prefix. Instead we use a heuristic which provides nearly optimal
12151 results in most cases and never is much off. */
12152 saved = XALLOCAVEC (int, ndirs);
12153 savehere = XALLOCAVEC (int, ndirs);
12154
12155 memset (saved, '\0', ndirs * sizeof (saved[0]));
12156 for (i = 0; i < ndirs; i++)
12157 {
12158 int j;
12159 int total;
12160
12161 /* We can always save some space for the current directory. But this
12162 does not mean it will be enough to justify adding the directory. */
12163 savehere[i] = dirs[i].length;
12164 total = (savehere[i] - saved[i]) * dirs[i].count;
12165
12166 for (j = i + 1; j < ndirs; j++)
12167 {
12168 savehere[j] = 0;
12169 if (saved[j] < dirs[i].length)
12170 {
12171 /* Determine whether the dirs[i] path is a prefix of the
12172 dirs[j] path. */
12173 int k;
12174
12175 k = dirs[j].prefix;
12176 while (k != -1 && k != (int) i)
12177 k = dirs[k].prefix;
12178
12179 if (k == (int) i)
12180 {
12181 /* Yes it is. We can possibly save some memory by
12182 writing the filenames in dirs[j] relative to
12183 dirs[i]. */
12184 savehere[j] = dirs[i].length;
12185 total += (savehere[j] - saved[j]) * dirs[j].count;
12186 }
12187 }
12188 }
12189
12190 /* Check whether we can save enough to justify adding the dirs[i]
12191 directory. */
12192 if (total > dirs[i].length + 1)
12193 {
12194 /* It's worthwhile adding. */
12195 for (j = i; j < ndirs; j++)
12196 if (savehere[j] > 0)
12197 {
12198 /* Remember how much we saved for this directory so far. */
12199 saved[j] = savehere[j];
12200
12201 /* Remember the prefix directory. */
12202 dirs[j].dir_idx = i;
12203 }
12204 }
12205 }
12206
12207 /* Emit the directory name table. */
12208 idx_offset = dirs[0].length > 0 ? 1 : 0;
12209 enum dwarf_form str_form = DW_FORM_string;
12210 enum dwarf_form idx_form = DW_FORM_udata;
12211 if (dwarf_version >= 5)
12212 {
12213 const char *comp_dir = comp_dir_string ();
12214 if (comp_dir == NULL)
12215 comp_dir = "";
12216 dw2_asm_output_data (1, 1, "Directory entry format count");
12217 if (DWARF5_USE_DEBUG_LINE_STR)
12218 str_form = DW_FORM_line_strp;
12219 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12220 dw2_asm_output_data_uleb128 (str_form, "%s",
12221 get_DW_FORM_name (str_form));
12222 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12223 if (str_form == DW_FORM_string)
12224 {
12225 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12226 for (i = 1 - idx_offset; i < ndirs; i++)
12227 dw2_asm_output_nstring (dirs[i].path,
12228 dirs[i].length
12229 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12230 "Directory Entry: %#x", i + idx_offset);
12231 }
12232 else
12233 {
12234 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12235 for (i = 1 - idx_offset; i < ndirs; i++)
12236 {
12237 const char *str
12238 = ggc_alloc_string (dirs[i].path,
12239 dirs[i].length
12240 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12241 output_line_string (str_form, str, "Directory Entry",
12242 (unsigned) i + idx_offset);
12243 }
12244 }
12245 }
12246 else
12247 {
12248 for (i = 1 - idx_offset; i < ndirs; i++)
12249 dw2_asm_output_nstring (dirs[i].path,
12250 dirs[i].length
12251 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12252 "Directory Entry: %#x", i + idx_offset);
12253
12254 dw2_asm_output_data (1, 0, "End directory table");
12255 }
12256
12257 /* We have to emit them in the order of emitted_number since that's
12258 used in the debug info generation. To do this efficiently we
12259 generate a back-mapping of the indices first. */
12260 backmap = XALLOCAVEC (int, numfiles);
12261 for (i = 0; i < numfiles; i++)
12262 backmap[files[i].file_idx->emitted_number - 1] = i;
12263
12264 if (dwarf_version >= 5)
12265 {
12266 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12267 if (filename0 == NULL)
12268 filename0 = "";
12269 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12270 DW_FORM_data2. Choose one based on the number of directories
12271 and how much space would they occupy in each encoding.
12272 If we have at most 256 directories, all indexes fit into
12273 a single byte, so DW_FORM_data1 is most compact (if there
12274 are at most 128 directories, DW_FORM_udata would be as
12275 compact as that, but not shorter and slower to decode). */
12276 if (ndirs + idx_offset <= 256)
12277 idx_form = DW_FORM_data1;
12278 /* If there are more than 65536 directories, we have to use
12279 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12280 Otherwise, compute what space would occupy if all the indexes
12281 used DW_FORM_udata - sum - and compare that to how large would
12282 be DW_FORM_data2 encoding, and pick the more efficient one. */
12283 else if (ndirs + idx_offset <= 65536)
12284 {
12285 unsigned HOST_WIDE_INT sum = 1;
12286 for (i = 0; i < numfiles; i++)
12287 {
12288 int file_idx = backmap[i];
12289 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12290 sum += size_of_uleb128 (dir_idx);
12291 }
12292 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12293 idx_form = DW_FORM_data2;
12294 }
12295 #ifdef VMS_DEBUGGING_INFO
12296 dw2_asm_output_data (1, 4, "File name entry format count");
12297 #else
12298 dw2_asm_output_data (1, 2, "File name entry format count");
12299 #endif
12300 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12301 dw2_asm_output_data_uleb128 (str_form, "%s",
12302 get_DW_FORM_name (str_form));
12303 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12304 "DW_LNCT_directory_index");
12305 dw2_asm_output_data_uleb128 (idx_form, "%s",
12306 get_DW_FORM_name (idx_form));
12307 #ifdef VMS_DEBUGGING_INFO
12308 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12309 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12310 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12311 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12312 #endif
12313 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12314
12315 output_line_string (str_form, filename0, "File Entry", 0);
12316
12317 /* Include directory index. */
12318 if (idx_form != DW_FORM_udata)
12319 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12320 0, NULL);
12321 else
12322 dw2_asm_output_data_uleb128 (0, NULL);
12323
12324 #ifdef VMS_DEBUGGING_INFO
12325 dw2_asm_output_data_uleb128 (0, NULL);
12326 dw2_asm_output_data_uleb128 (0, NULL);
12327 #endif
12328 }
12329
12330 /* Now write all the file names. */
12331 for (i = 0; i < numfiles; i++)
12332 {
12333 int file_idx = backmap[i];
12334 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12335
12336 #ifdef VMS_DEBUGGING_INFO
12337 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12338
12339 /* Setting these fields can lead to debugger miscomparisons,
12340 but VMS Debug requires them to be set correctly. */
12341
12342 int ver;
12343 long long cdt;
12344 long siz;
12345 int maxfilelen = (strlen (files[file_idx].path)
12346 + dirs[dir_idx].length
12347 + MAX_VMS_VERSION_LEN + 1);
12348 char *filebuf = XALLOCAVEC (char, maxfilelen);
12349
12350 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12351 snprintf (filebuf, maxfilelen, "%s;%d",
12352 files[file_idx].path + dirs[dir_idx].length, ver);
12353
12354 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12355
12356 /* Include directory index. */
12357 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12358 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12359 dir_idx + idx_offset, NULL);
12360 else
12361 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12362
12363 /* Modification time. */
12364 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12365 &cdt, 0, 0, 0) == 0)
12366 ? cdt : 0, NULL);
12367
12368 /* File length in bytes. */
12369 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12370 0, &siz, 0, 0) == 0)
12371 ? siz : 0, NULL);
12372 #else
12373 output_line_string (str_form,
12374 files[file_idx].path + dirs[dir_idx].length,
12375 "File Entry", (unsigned) i + 1);
12376
12377 /* Include directory index. */
12378 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12379 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12380 dir_idx + idx_offset, NULL);
12381 else
12382 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12383
12384 if (dwarf_version >= 5)
12385 continue;
12386
12387 /* Modification time. */
12388 dw2_asm_output_data_uleb128 (0, NULL);
12389
12390 /* File length in bytes. */
12391 dw2_asm_output_data_uleb128 (0, NULL);
12392 #endif /* VMS_DEBUGGING_INFO */
12393 }
12394
12395 if (dwarf_version < 5)
12396 dw2_asm_output_data (1, 0, "End file name table");
12397 }
12398
12399
12400 /* Output one line number table into the .debug_line section. */
12401
12402 static void
12403 output_one_line_info_table (dw_line_info_table *table)
12404 {
12405 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12406 unsigned int current_line = 1;
12407 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12408 dw_line_info_entry *ent, *prev_addr;
12409 size_t i;
12410 unsigned int view;
12411
12412 view = 0;
12413
12414 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12415 {
12416 switch (ent->opcode)
12417 {
12418 case LI_set_address:
12419 /* ??? Unfortunately, we have little choice here currently, and
12420 must always use the most general form. GCC does not know the
12421 address delta itself, so we can't use DW_LNS_advance_pc. Many
12422 ports do have length attributes which will give an upper bound
12423 on the address range. We could perhaps use length attributes
12424 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12425 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12426
12427 view = 0;
12428
12429 /* This can handle any delta. This takes
12430 4+DWARF2_ADDR_SIZE bytes. */
12431 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12432 debug_variable_location_views
12433 ? ", reset view to 0" : "");
12434 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12435 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12436 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12437
12438 prev_addr = ent;
12439 break;
12440
12441 case LI_adv_address:
12442 {
12443 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12444 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12445 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12446
12447 view++;
12448
12449 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12450 dw2_asm_output_delta (2, line_label, prev_label,
12451 "from %s to %s", prev_label, line_label);
12452
12453 prev_addr = ent;
12454 break;
12455 }
12456
12457 case LI_set_line:
12458 if (ent->val == current_line)
12459 {
12460 /* We still need to start a new row, so output a copy insn. */
12461 dw2_asm_output_data (1, DW_LNS_copy,
12462 "copy line %u", current_line);
12463 }
12464 else
12465 {
12466 int line_offset = ent->val - current_line;
12467 int line_delta = line_offset - DWARF_LINE_BASE;
12468
12469 current_line = ent->val;
12470 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12471 {
12472 /* This can handle deltas from -10 to 234, using the current
12473 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12474 This takes 1 byte. */
12475 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12476 "line %u", current_line);
12477 }
12478 else
12479 {
12480 /* This can handle any delta. This takes at least 4 bytes,
12481 depending on the value being encoded. */
12482 dw2_asm_output_data (1, DW_LNS_advance_line,
12483 "advance to line %u", current_line);
12484 dw2_asm_output_data_sleb128 (line_offset, NULL);
12485 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12486 }
12487 }
12488 break;
12489
12490 case LI_set_file:
12491 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12492 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12493 break;
12494
12495 case LI_set_column:
12496 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12497 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12498 break;
12499
12500 case LI_negate_stmt:
12501 current_is_stmt = !current_is_stmt;
12502 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12503 "is_stmt %d", current_is_stmt);
12504 break;
12505
12506 case LI_set_prologue_end:
12507 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12508 "set prologue end");
12509 break;
12510
12511 case LI_set_epilogue_begin:
12512 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12513 "set epilogue begin");
12514 break;
12515
12516 case LI_set_discriminator:
12517 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12518 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12519 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12520 dw2_asm_output_data_uleb128 (ent->val, NULL);
12521 break;
12522 }
12523 }
12524
12525 /* Emit debug info for the address of the end of the table. */
12526 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12527 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12528 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12529 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12530
12531 dw2_asm_output_data (1, 0, "end sequence");
12532 dw2_asm_output_data_uleb128 (1, NULL);
12533 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12534 }
12535
12536 /* Output the source line number correspondence information. This
12537 information goes into the .debug_line section. */
12538
12539 static void
12540 output_line_info (bool prologue_only)
12541 {
12542 static unsigned int generation;
12543 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12544 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12545 bool saw_one = false;
12546 int opc;
12547
12548 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12549 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12550 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12551 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12552
12553 if (!XCOFF_DEBUGGING_INFO)
12554 {
12555 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12556 dw2_asm_output_data (4, 0xffffffff,
12557 "Initial length escape value indicating 64-bit DWARF extension");
12558 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12559 "Length of Source Line Info");
12560 }
12561
12562 ASM_OUTPUT_LABEL (asm_out_file, l1);
12563
12564 output_dwarf_version ();
12565 if (dwarf_version >= 5)
12566 {
12567 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12568 dw2_asm_output_data (1, 0, "Segment Size");
12569 }
12570 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12571 ASM_OUTPUT_LABEL (asm_out_file, p1);
12572
12573 /* Define the architecture-dependent minimum instruction length (in bytes).
12574 In this implementation of DWARF, this field is used for information
12575 purposes only. Since GCC generates assembly language, we have no
12576 a priori knowledge of how many instruction bytes are generated for each
12577 source line, and therefore can use only the DW_LNE_set_address and
12578 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12579 this as '1', which is "correct enough" for all architectures,
12580 and don't let the target override. */
12581 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12582
12583 if (dwarf_version >= 4)
12584 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12585 "Maximum Operations Per Instruction");
12586 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12587 "Default is_stmt_start flag");
12588 dw2_asm_output_data (1, DWARF_LINE_BASE,
12589 "Line Base Value (Special Opcodes)");
12590 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12591 "Line Range Value (Special Opcodes)");
12592 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12593 "Special Opcode Base");
12594
12595 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12596 {
12597 int n_op_args;
12598 switch (opc)
12599 {
12600 case DW_LNS_advance_pc:
12601 case DW_LNS_advance_line:
12602 case DW_LNS_set_file:
12603 case DW_LNS_set_column:
12604 case DW_LNS_fixed_advance_pc:
12605 case DW_LNS_set_isa:
12606 n_op_args = 1;
12607 break;
12608 default:
12609 n_op_args = 0;
12610 break;
12611 }
12612
12613 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12614 opc, n_op_args);
12615 }
12616
12617 /* Write out the information about the files we use. */
12618 output_file_names ();
12619 ASM_OUTPUT_LABEL (asm_out_file, p2);
12620 if (prologue_only)
12621 {
12622 /* Output the marker for the end of the line number info. */
12623 ASM_OUTPUT_LABEL (asm_out_file, l2);
12624 return;
12625 }
12626
12627 if (separate_line_info)
12628 {
12629 dw_line_info_table *table;
12630 size_t i;
12631
12632 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12633 if (table->in_use)
12634 {
12635 output_one_line_info_table (table);
12636 saw_one = true;
12637 }
12638 }
12639 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12640 {
12641 output_one_line_info_table (cold_text_section_line_info);
12642 saw_one = true;
12643 }
12644
12645 /* ??? Some Darwin linkers crash on a .debug_line section with no
12646 sequences. Further, merely a DW_LNE_end_sequence entry is not
12647 sufficient -- the address column must also be initialized.
12648 Make sure to output at least one set_address/end_sequence pair,
12649 choosing .text since that section is always present. */
12650 if (text_section_line_info->in_use || !saw_one)
12651 output_one_line_info_table (text_section_line_info);
12652
12653 /* Output the marker for the end of the line number info. */
12654 ASM_OUTPUT_LABEL (asm_out_file, l2);
12655 }
12656 \f
12657 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12658
12659 static inline bool
12660 need_endianity_attribute_p (bool reverse)
12661 {
12662 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12663 }
12664
12665 /* Given a pointer to a tree node for some base type, return a pointer to
12666 a DIE that describes the given type. REVERSE is true if the type is
12667 to be interpreted in the reverse storage order wrt the target order.
12668
12669 This routine must only be called for GCC type nodes that correspond to
12670 Dwarf base (fundamental) types. */
12671
12672 static dw_die_ref
12673 base_type_die (tree type, bool reverse)
12674 {
12675 dw_die_ref base_type_result;
12676 enum dwarf_type encoding;
12677 bool fpt_used = false;
12678 struct fixed_point_type_info fpt_info;
12679 tree type_bias = NULL_TREE;
12680
12681 /* If this is a subtype that should not be emitted as a subrange type,
12682 use the base type. See subrange_type_for_debug_p. */
12683 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12684 type = TREE_TYPE (type);
12685
12686 switch (TREE_CODE (type))
12687 {
12688 case INTEGER_TYPE:
12689 if ((dwarf_version >= 4 || !dwarf_strict)
12690 && TYPE_NAME (type)
12691 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12692 && DECL_IS_BUILTIN (TYPE_NAME (type))
12693 && DECL_NAME (TYPE_NAME (type)))
12694 {
12695 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12696 if (strcmp (name, "char16_t") == 0
12697 || strcmp (name, "char32_t") == 0)
12698 {
12699 encoding = DW_ATE_UTF;
12700 break;
12701 }
12702 }
12703 if ((dwarf_version >= 3 || !dwarf_strict)
12704 && lang_hooks.types.get_fixed_point_type_info)
12705 {
12706 memset (&fpt_info, 0, sizeof (fpt_info));
12707 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12708 {
12709 fpt_used = true;
12710 encoding = ((TYPE_UNSIGNED (type))
12711 ? DW_ATE_unsigned_fixed
12712 : DW_ATE_signed_fixed);
12713 break;
12714 }
12715 }
12716 if (TYPE_STRING_FLAG (type))
12717 {
12718 if (TYPE_UNSIGNED (type))
12719 encoding = DW_ATE_unsigned_char;
12720 else
12721 encoding = DW_ATE_signed_char;
12722 }
12723 else if (TYPE_UNSIGNED (type))
12724 encoding = DW_ATE_unsigned;
12725 else
12726 encoding = DW_ATE_signed;
12727
12728 if (!dwarf_strict
12729 && lang_hooks.types.get_type_bias)
12730 type_bias = lang_hooks.types.get_type_bias (type);
12731 break;
12732
12733 case REAL_TYPE:
12734 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12735 {
12736 if (dwarf_version >= 3 || !dwarf_strict)
12737 encoding = DW_ATE_decimal_float;
12738 else
12739 encoding = DW_ATE_lo_user;
12740 }
12741 else
12742 encoding = DW_ATE_float;
12743 break;
12744
12745 case FIXED_POINT_TYPE:
12746 if (!(dwarf_version >= 3 || !dwarf_strict))
12747 encoding = DW_ATE_lo_user;
12748 else if (TYPE_UNSIGNED (type))
12749 encoding = DW_ATE_unsigned_fixed;
12750 else
12751 encoding = DW_ATE_signed_fixed;
12752 break;
12753
12754 /* Dwarf2 doesn't know anything about complex ints, so use
12755 a user defined type for it. */
12756 case COMPLEX_TYPE:
12757 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12758 encoding = DW_ATE_complex_float;
12759 else
12760 encoding = DW_ATE_lo_user;
12761 break;
12762
12763 case BOOLEAN_TYPE:
12764 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12765 encoding = DW_ATE_boolean;
12766 break;
12767
12768 default:
12769 /* No other TREE_CODEs are Dwarf fundamental types. */
12770 gcc_unreachable ();
12771 }
12772
12773 base_type_result = new_die_raw (DW_TAG_base_type);
12774
12775 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12776 int_size_in_bytes (type));
12777 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12778
12779 if (need_endianity_attribute_p (reverse))
12780 add_AT_unsigned (base_type_result, DW_AT_endianity,
12781 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12782
12783 add_alignment_attribute (base_type_result, type);
12784
12785 if (fpt_used)
12786 {
12787 switch (fpt_info.scale_factor_kind)
12788 {
12789 case fixed_point_scale_factor_binary:
12790 add_AT_int (base_type_result, DW_AT_binary_scale,
12791 fpt_info.scale_factor.binary);
12792 break;
12793
12794 case fixed_point_scale_factor_decimal:
12795 add_AT_int (base_type_result, DW_AT_decimal_scale,
12796 fpt_info.scale_factor.decimal);
12797 break;
12798
12799 case fixed_point_scale_factor_arbitrary:
12800 /* Arbitrary scale factors cannot be described in standard DWARF,
12801 yet. */
12802 if (!dwarf_strict)
12803 {
12804 /* Describe the scale factor as a rational constant. */
12805 const dw_die_ref scale_factor
12806 = new_die (DW_TAG_constant, comp_unit_die (), type);
12807
12808 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12809 fpt_info.scale_factor.arbitrary.numerator);
12810 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12811 fpt_info.scale_factor.arbitrary.denominator);
12812
12813 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12814 }
12815 break;
12816
12817 default:
12818 gcc_unreachable ();
12819 }
12820 }
12821
12822 if (type_bias)
12823 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12824 dw_scalar_form_constant
12825 | dw_scalar_form_exprloc
12826 | dw_scalar_form_reference,
12827 NULL);
12828
12829 return base_type_result;
12830 }
12831
12832 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12833 named 'auto' in its type: return true for it, false otherwise. */
12834
12835 static inline bool
12836 is_cxx_auto (tree type)
12837 {
12838 if (is_cxx ())
12839 {
12840 tree name = TYPE_IDENTIFIER (type);
12841 if (name == get_identifier ("auto")
12842 || name == get_identifier ("decltype(auto)"))
12843 return true;
12844 }
12845 return false;
12846 }
12847
12848 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12849 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12850
12851 static inline int
12852 is_base_type (tree type)
12853 {
12854 switch (TREE_CODE (type))
12855 {
12856 case INTEGER_TYPE:
12857 case REAL_TYPE:
12858 case FIXED_POINT_TYPE:
12859 case COMPLEX_TYPE:
12860 case BOOLEAN_TYPE:
12861 case POINTER_BOUNDS_TYPE:
12862 return 1;
12863
12864 case VOID_TYPE:
12865 case ARRAY_TYPE:
12866 case RECORD_TYPE:
12867 case UNION_TYPE:
12868 case QUAL_UNION_TYPE:
12869 case ENUMERAL_TYPE:
12870 case FUNCTION_TYPE:
12871 case METHOD_TYPE:
12872 case POINTER_TYPE:
12873 case REFERENCE_TYPE:
12874 case NULLPTR_TYPE:
12875 case OFFSET_TYPE:
12876 case LANG_TYPE:
12877 case VECTOR_TYPE:
12878 return 0;
12879
12880 default:
12881 if (is_cxx_auto (type))
12882 return 0;
12883 gcc_unreachable ();
12884 }
12885
12886 return 0;
12887 }
12888
12889 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12890 node, return the size in bits for the type if it is a constant, or else
12891 return the alignment for the type if the type's size is not constant, or
12892 else return BITS_PER_WORD if the type actually turns out to be an
12893 ERROR_MARK node. */
12894
12895 static inline unsigned HOST_WIDE_INT
12896 simple_type_size_in_bits (const_tree type)
12897 {
12898 if (TREE_CODE (type) == ERROR_MARK)
12899 return BITS_PER_WORD;
12900 else if (TYPE_SIZE (type) == NULL_TREE)
12901 return 0;
12902 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12903 return tree_to_uhwi (TYPE_SIZE (type));
12904 else
12905 return TYPE_ALIGN (type);
12906 }
12907
12908 /* Similarly, but return an offset_int instead of UHWI. */
12909
12910 static inline offset_int
12911 offset_int_type_size_in_bits (const_tree type)
12912 {
12913 if (TREE_CODE (type) == ERROR_MARK)
12914 return BITS_PER_WORD;
12915 else if (TYPE_SIZE (type) == NULL_TREE)
12916 return 0;
12917 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12918 return wi::to_offset (TYPE_SIZE (type));
12919 else
12920 return TYPE_ALIGN (type);
12921 }
12922
12923 /* Given a pointer to a tree node for a subrange type, return a pointer
12924 to a DIE that describes the given type. */
12925
12926 static dw_die_ref
12927 subrange_type_die (tree type, tree low, tree high, tree bias,
12928 dw_die_ref context_die)
12929 {
12930 dw_die_ref subrange_die;
12931 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12932
12933 if (context_die == NULL)
12934 context_die = comp_unit_die ();
12935
12936 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12937
12938 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12939 {
12940 /* The size of the subrange type and its base type do not match,
12941 so we need to generate a size attribute for the subrange type. */
12942 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12943 }
12944
12945 add_alignment_attribute (subrange_die, type);
12946
12947 if (low)
12948 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12949 if (high)
12950 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12951 if (bias && !dwarf_strict)
12952 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12953 dw_scalar_form_constant
12954 | dw_scalar_form_exprloc
12955 | dw_scalar_form_reference,
12956 NULL);
12957
12958 return subrange_die;
12959 }
12960
12961 /* Returns the (const and/or volatile) cv_qualifiers associated with
12962 the decl node. This will normally be augmented with the
12963 cv_qualifiers of the underlying type in add_type_attribute. */
12964
12965 static int
12966 decl_quals (const_tree decl)
12967 {
12968 return ((TREE_READONLY (decl)
12969 /* The C++ front-end correctly marks reference-typed
12970 variables as readonly, but from a language (and debug
12971 info) standpoint they are not const-qualified. */
12972 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12973 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12974 | (TREE_THIS_VOLATILE (decl)
12975 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12976 }
12977
12978 /* Determine the TYPE whose qualifiers match the largest strict subset
12979 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12980 qualifiers outside QUAL_MASK. */
12981
12982 static int
12983 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12984 {
12985 tree t;
12986 int best_rank = 0, best_qual = 0, max_rank;
12987
12988 type_quals &= qual_mask;
12989 max_rank = popcount_hwi (type_quals) - 1;
12990
12991 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12992 t = TYPE_NEXT_VARIANT (t))
12993 {
12994 int q = TYPE_QUALS (t) & qual_mask;
12995
12996 if ((q & type_quals) == q && q != type_quals
12997 && check_base_type (t, type))
12998 {
12999 int rank = popcount_hwi (q);
13000
13001 if (rank > best_rank)
13002 {
13003 best_rank = rank;
13004 best_qual = q;
13005 }
13006 }
13007 }
13008
13009 return best_qual;
13010 }
13011
13012 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13013 static const dwarf_qual_info_t dwarf_qual_info[] =
13014 {
13015 { TYPE_QUAL_CONST, DW_TAG_const_type },
13016 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13017 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13018 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13019 };
13020 static const unsigned int dwarf_qual_info_size
13021 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13022
13023 /* If DIE is a qualified DIE of some base DIE with the same parent,
13024 return the base DIE, otherwise return NULL. Set MASK to the
13025 qualifiers added compared to the returned DIE. */
13026
13027 static dw_die_ref
13028 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13029 {
13030 unsigned int i;
13031 for (i = 0; i < dwarf_qual_info_size; i++)
13032 if (die->die_tag == dwarf_qual_info[i].t)
13033 break;
13034 if (i == dwarf_qual_info_size)
13035 return NULL;
13036 if (vec_safe_length (die->die_attr) != 1)
13037 return NULL;
13038 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13039 if (type == NULL || type->die_parent != die->die_parent)
13040 return NULL;
13041 *mask |= dwarf_qual_info[i].q;
13042 if (depth)
13043 {
13044 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13045 if (ret)
13046 return ret;
13047 }
13048 return type;
13049 }
13050
13051 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13052 entry that chains the modifiers specified by CV_QUALS in front of the
13053 given type. REVERSE is true if the type is to be interpreted in the
13054 reverse storage order wrt the target order. */
13055
13056 static dw_die_ref
13057 modified_type_die (tree type, int cv_quals, bool reverse,
13058 dw_die_ref context_die)
13059 {
13060 enum tree_code code = TREE_CODE (type);
13061 dw_die_ref mod_type_die;
13062 dw_die_ref sub_die = NULL;
13063 tree item_type = NULL;
13064 tree qualified_type;
13065 tree name, low, high;
13066 dw_die_ref mod_scope;
13067 /* Only these cv-qualifiers are currently handled. */
13068 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13069 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13070 ENCODE_QUAL_ADDR_SPACE(~0U));
13071 const bool reverse_base_type
13072 = need_endianity_attribute_p (reverse) && is_base_type (type);
13073
13074 if (code == ERROR_MARK)
13075 return NULL;
13076
13077 if (lang_hooks.types.get_debug_type)
13078 {
13079 tree debug_type = lang_hooks.types.get_debug_type (type);
13080
13081 if (debug_type != NULL_TREE && debug_type != type)
13082 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13083 }
13084
13085 cv_quals &= cv_qual_mask;
13086
13087 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13088 tag modifier (and not an attribute) old consumers won't be able
13089 to handle it. */
13090 if (dwarf_version < 3)
13091 cv_quals &= ~TYPE_QUAL_RESTRICT;
13092
13093 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13094 if (dwarf_version < 5)
13095 cv_quals &= ~TYPE_QUAL_ATOMIC;
13096
13097 /* See if we already have the appropriately qualified variant of
13098 this type. */
13099 qualified_type = get_qualified_type (type, cv_quals);
13100
13101 if (qualified_type == sizetype)
13102 {
13103 /* Try not to expose the internal sizetype type's name. */
13104 if (TYPE_NAME (qualified_type)
13105 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13106 {
13107 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13108
13109 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13110 && (TYPE_PRECISION (t)
13111 == TYPE_PRECISION (qualified_type))
13112 && (TYPE_UNSIGNED (t)
13113 == TYPE_UNSIGNED (qualified_type)));
13114 qualified_type = t;
13115 }
13116 else if (qualified_type == sizetype
13117 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13118 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13119 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13120 qualified_type = size_type_node;
13121 }
13122
13123 /* If we do, then we can just use its DIE, if it exists. */
13124 if (qualified_type)
13125 {
13126 mod_type_die = lookup_type_die (qualified_type);
13127
13128 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13129 dealt with specially: the DIE with the attribute, if it exists, is
13130 placed immediately after the regular DIE for the same base type. */
13131 if (mod_type_die
13132 && (!reverse_base_type
13133 || ((mod_type_die = mod_type_die->die_sib) != NULL
13134 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13135 return mod_type_die;
13136 }
13137
13138 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13139
13140 /* Handle C typedef types. */
13141 if (name
13142 && TREE_CODE (name) == TYPE_DECL
13143 && DECL_ORIGINAL_TYPE (name)
13144 && !DECL_ARTIFICIAL (name))
13145 {
13146 tree dtype = TREE_TYPE (name);
13147
13148 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13149 if (qualified_type == dtype && !reverse_base_type)
13150 {
13151 tree origin = decl_ultimate_origin (name);
13152
13153 /* Typedef variants that have an abstract origin don't get their own
13154 type DIE (see gen_typedef_die), so fall back on the ultimate
13155 abstract origin instead. */
13156 if (origin != NULL && origin != name)
13157 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13158 context_die);
13159
13160 /* For a named type, use the typedef. */
13161 gen_type_die (qualified_type, context_die);
13162 return lookup_type_die (qualified_type);
13163 }
13164 else
13165 {
13166 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13167 dquals &= cv_qual_mask;
13168 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13169 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13170 /* cv-unqualified version of named type. Just use
13171 the unnamed type to which it refers. */
13172 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13173 reverse, context_die);
13174 /* Else cv-qualified version of named type; fall through. */
13175 }
13176 }
13177
13178 mod_scope = scope_die_for (type, context_die);
13179
13180 if (cv_quals)
13181 {
13182 int sub_quals = 0, first_quals = 0;
13183 unsigned i;
13184 dw_die_ref first = NULL, last = NULL;
13185
13186 /* Determine a lesser qualified type that most closely matches
13187 this one. Then generate DW_TAG_* entries for the remaining
13188 qualifiers. */
13189 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13190 cv_qual_mask);
13191 if (sub_quals && use_debug_types)
13192 {
13193 bool needed = false;
13194 /* If emitting type units, make sure the order of qualifiers
13195 is canonical. Thus, start from unqualified type if
13196 an earlier qualifier is missing in sub_quals, but some later
13197 one is present there. */
13198 for (i = 0; i < dwarf_qual_info_size; i++)
13199 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13200 needed = true;
13201 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13202 {
13203 sub_quals = 0;
13204 break;
13205 }
13206 }
13207 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13208 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13209 {
13210 /* As not all intermediate qualified DIEs have corresponding
13211 tree types, ensure that qualified DIEs in the same scope
13212 as their DW_AT_type are emitted after their DW_AT_type,
13213 only with other qualified DIEs for the same type possibly
13214 in between them. Determine the range of such qualified
13215 DIEs now (first being the base type, last being corresponding
13216 last qualified DIE for it). */
13217 unsigned int count = 0;
13218 first = qualified_die_p (mod_type_die, &first_quals,
13219 dwarf_qual_info_size);
13220 if (first == NULL)
13221 first = mod_type_die;
13222 gcc_assert ((first_quals & ~sub_quals) == 0);
13223 for (count = 0, last = first;
13224 count < (1U << dwarf_qual_info_size);
13225 count++, last = last->die_sib)
13226 {
13227 int quals = 0;
13228 if (last == mod_scope->die_child)
13229 break;
13230 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13231 != first)
13232 break;
13233 }
13234 }
13235
13236 for (i = 0; i < dwarf_qual_info_size; i++)
13237 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13238 {
13239 dw_die_ref d;
13240 if (first && first != last)
13241 {
13242 for (d = first->die_sib; ; d = d->die_sib)
13243 {
13244 int quals = 0;
13245 qualified_die_p (d, &quals, dwarf_qual_info_size);
13246 if (quals == (first_quals | dwarf_qual_info[i].q))
13247 break;
13248 if (d == last)
13249 {
13250 d = NULL;
13251 break;
13252 }
13253 }
13254 if (d)
13255 {
13256 mod_type_die = d;
13257 continue;
13258 }
13259 }
13260 if (first)
13261 {
13262 d = new_die_raw (dwarf_qual_info[i].t);
13263 add_child_die_after (mod_scope, d, last);
13264 last = d;
13265 }
13266 else
13267 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13268 if (mod_type_die)
13269 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13270 mod_type_die = d;
13271 first_quals |= dwarf_qual_info[i].q;
13272 }
13273 }
13274 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13275 {
13276 dwarf_tag tag = DW_TAG_pointer_type;
13277 if (code == REFERENCE_TYPE)
13278 {
13279 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13280 tag = DW_TAG_rvalue_reference_type;
13281 else
13282 tag = DW_TAG_reference_type;
13283 }
13284 mod_type_die = new_die (tag, mod_scope, type);
13285
13286 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13287 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13288 add_alignment_attribute (mod_type_die, type);
13289 item_type = TREE_TYPE (type);
13290
13291 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13292 if (!ADDR_SPACE_GENERIC_P (as))
13293 {
13294 int action = targetm.addr_space.debug (as);
13295 if (action >= 0)
13296 {
13297 /* Positive values indicate an address_class. */
13298 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13299 }
13300 else
13301 {
13302 /* Negative values indicate an (inverted) segment base reg. */
13303 dw_loc_descr_ref d
13304 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13305 add_AT_loc (mod_type_die, DW_AT_segment, d);
13306 }
13307 }
13308 }
13309 else if (code == INTEGER_TYPE
13310 && TREE_TYPE (type) != NULL_TREE
13311 && subrange_type_for_debug_p (type, &low, &high))
13312 {
13313 tree bias = NULL_TREE;
13314 if (lang_hooks.types.get_type_bias)
13315 bias = lang_hooks.types.get_type_bias (type);
13316 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13317 item_type = TREE_TYPE (type);
13318 }
13319 else if (is_base_type (type))
13320 {
13321 mod_type_die = base_type_die (type, reverse);
13322
13323 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13324 if (reverse_base_type)
13325 {
13326 dw_die_ref after_die
13327 = modified_type_die (type, cv_quals, false, context_die);
13328 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13329 }
13330 else
13331 add_child_die (comp_unit_die (), mod_type_die);
13332
13333 add_pubtype (type, mod_type_die);
13334 }
13335 else
13336 {
13337 gen_type_die (type, context_die);
13338
13339 /* We have to get the type_main_variant here (and pass that to the
13340 `lookup_type_die' routine) because the ..._TYPE node we have
13341 might simply be a *copy* of some original type node (where the
13342 copy was created to help us keep track of typedef names) and
13343 that copy might have a different TYPE_UID from the original
13344 ..._TYPE node. */
13345 if (TREE_CODE (type) == FUNCTION_TYPE
13346 || TREE_CODE (type) == METHOD_TYPE)
13347 {
13348 /* For function/method types, can't just use type_main_variant here,
13349 because that can have different ref-qualifiers for C++,
13350 but try to canonicalize. */
13351 tree main = TYPE_MAIN_VARIANT (type);
13352 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13353 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13354 && check_base_type (t, main)
13355 && check_lang_type (t, type))
13356 return lookup_type_die (t);
13357 return lookup_type_die (type);
13358 }
13359 else if (TREE_CODE (type) != VECTOR_TYPE
13360 && TREE_CODE (type) != ARRAY_TYPE)
13361 return lookup_type_die (type_main_variant (type));
13362 else
13363 /* Vectors have the debugging information in the type,
13364 not the main variant. */
13365 return lookup_type_die (type);
13366 }
13367
13368 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13369 don't output a DW_TAG_typedef, since there isn't one in the
13370 user's program; just attach a DW_AT_name to the type.
13371 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13372 if the base type already has the same name. */
13373 if (name
13374 && ((TREE_CODE (name) != TYPE_DECL
13375 && (qualified_type == TYPE_MAIN_VARIANT (type)
13376 || (cv_quals == TYPE_UNQUALIFIED)))
13377 || (TREE_CODE (name) == TYPE_DECL
13378 && TREE_TYPE (name) == qualified_type
13379 && DECL_NAME (name))))
13380 {
13381 if (TREE_CODE (name) == TYPE_DECL)
13382 /* Could just call add_name_and_src_coords_attributes here,
13383 but since this is a builtin type it doesn't have any
13384 useful source coordinates anyway. */
13385 name = DECL_NAME (name);
13386 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13387 }
13388 /* This probably indicates a bug. */
13389 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13390 {
13391 name = TYPE_IDENTIFIER (type);
13392 add_name_attribute (mod_type_die,
13393 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13394 }
13395
13396 if (qualified_type && !reverse_base_type)
13397 equate_type_number_to_die (qualified_type, mod_type_die);
13398
13399 if (item_type)
13400 /* We must do this after the equate_type_number_to_die call, in case
13401 this is a recursive type. This ensures that the modified_type_die
13402 recursion will terminate even if the type is recursive. Recursive
13403 types are possible in Ada. */
13404 sub_die = modified_type_die (item_type,
13405 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13406 reverse,
13407 context_die);
13408
13409 if (sub_die != NULL)
13410 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13411
13412 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13413 if (TYPE_ARTIFICIAL (type))
13414 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13415
13416 return mod_type_die;
13417 }
13418
13419 /* Generate DIEs for the generic parameters of T.
13420 T must be either a generic type or a generic function.
13421 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13422
13423 static void
13424 gen_generic_params_dies (tree t)
13425 {
13426 tree parms, args;
13427 int parms_num, i;
13428 dw_die_ref die = NULL;
13429 int non_default;
13430
13431 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13432 return;
13433
13434 if (TYPE_P (t))
13435 die = lookup_type_die (t);
13436 else if (DECL_P (t))
13437 die = lookup_decl_die (t);
13438
13439 gcc_assert (die);
13440
13441 parms = lang_hooks.get_innermost_generic_parms (t);
13442 if (!parms)
13443 /* T has no generic parameter. It means T is neither a generic type
13444 or function. End of story. */
13445 return;
13446
13447 parms_num = TREE_VEC_LENGTH (parms);
13448 args = lang_hooks.get_innermost_generic_args (t);
13449 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13450 non_default = int_cst_value (TREE_CHAIN (args));
13451 else
13452 non_default = TREE_VEC_LENGTH (args);
13453 for (i = 0; i < parms_num; i++)
13454 {
13455 tree parm, arg, arg_pack_elems;
13456 dw_die_ref parm_die;
13457
13458 parm = TREE_VEC_ELT (parms, i);
13459 arg = TREE_VEC_ELT (args, i);
13460 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13461 gcc_assert (parm && TREE_VALUE (parm) && arg);
13462
13463 if (parm && TREE_VALUE (parm) && arg)
13464 {
13465 /* If PARM represents a template parameter pack,
13466 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13467 by DW_TAG_template_*_parameter DIEs for the argument
13468 pack elements of ARG. Note that ARG would then be
13469 an argument pack. */
13470 if (arg_pack_elems)
13471 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13472 arg_pack_elems,
13473 die);
13474 else
13475 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13476 true /* emit name */, die);
13477 if (i >= non_default)
13478 add_AT_flag (parm_die, DW_AT_default_value, 1);
13479 }
13480 }
13481 }
13482
13483 /* Create and return a DIE for PARM which should be
13484 the representation of a generic type parameter.
13485 For instance, in the C++ front end, PARM would be a template parameter.
13486 ARG is the argument to PARM.
13487 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13488 name of the PARM.
13489 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13490 as a child node. */
13491
13492 static dw_die_ref
13493 generic_parameter_die (tree parm, tree arg,
13494 bool emit_name_p,
13495 dw_die_ref parent_die)
13496 {
13497 dw_die_ref tmpl_die = NULL;
13498 const char *name = NULL;
13499
13500 if (!parm || !DECL_NAME (parm) || !arg)
13501 return NULL;
13502
13503 /* We support non-type generic parameters and arguments,
13504 type generic parameters and arguments, as well as
13505 generic generic parameters (a.k.a. template template parameters in C++)
13506 and arguments. */
13507 if (TREE_CODE (parm) == PARM_DECL)
13508 /* PARM is a nontype generic parameter */
13509 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13510 else if (TREE_CODE (parm) == TYPE_DECL)
13511 /* PARM is a type generic parameter. */
13512 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13513 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13514 /* PARM is a generic generic parameter.
13515 Its DIE is a GNU extension. It shall have a
13516 DW_AT_name attribute to represent the name of the template template
13517 parameter, and a DW_AT_GNU_template_name attribute to represent the
13518 name of the template template argument. */
13519 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13520 parent_die, parm);
13521 else
13522 gcc_unreachable ();
13523
13524 if (tmpl_die)
13525 {
13526 tree tmpl_type;
13527
13528 /* If PARM is a generic parameter pack, it means we are
13529 emitting debug info for a template argument pack element.
13530 In other terms, ARG is a template argument pack element.
13531 In that case, we don't emit any DW_AT_name attribute for
13532 the die. */
13533 if (emit_name_p)
13534 {
13535 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13536 gcc_assert (name);
13537 add_AT_string (tmpl_die, DW_AT_name, name);
13538 }
13539
13540 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13541 {
13542 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13543 TMPL_DIE should have a child DW_AT_type attribute that is set
13544 to the type of the argument to PARM, which is ARG.
13545 If PARM is a type generic parameter, TMPL_DIE should have a
13546 child DW_AT_type that is set to ARG. */
13547 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13548 add_type_attribute (tmpl_die, tmpl_type,
13549 (TREE_THIS_VOLATILE (tmpl_type)
13550 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13551 false, parent_die);
13552 }
13553 else
13554 {
13555 /* So TMPL_DIE is a DIE representing a
13556 a generic generic template parameter, a.k.a template template
13557 parameter in C++ and arg is a template. */
13558
13559 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13560 to the name of the argument. */
13561 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13562 if (name)
13563 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13564 }
13565
13566 if (TREE_CODE (parm) == PARM_DECL)
13567 /* So PARM is a non-type generic parameter.
13568 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13569 attribute of TMPL_DIE which value represents the value
13570 of ARG.
13571 We must be careful here:
13572 The value of ARG might reference some function decls.
13573 We might currently be emitting debug info for a generic
13574 type and types are emitted before function decls, we don't
13575 know if the function decls referenced by ARG will actually be
13576 emitted after cgraph computations.
13577 So must defer the generation of the DW_AT_const_value to
13578 after cgraph is ready. */
13579 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13580 }
13581
13582 return tmpl_die;
13583 }
13584
13585 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13586 PARM_PACK must be a template parameter pack. The returned DIE
13587 will be child DIE of PARENT_DIE. */
13588
13589 static dw_die_ref
13590 template_parameter_pack_die (tree parm_pack,
13591 tree parm_pack_args,
13592 dw_die_ref parent_die)
13593 {
13594 dw_die_ref die;
13595 int j;
13596
13597 gcc_assert (parent_die && parm_pack);
13598
13599 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13600 add_name_and_src_coords_attributes (die, parm_pack);
13601 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13602 generic_parameter_die (parm_pack,
13603 TREE_VEC_ELT (parm_pack_args, j),
13604 false /* Don't emit DW_AT_name */,
13605 die);
13606 return die;
13607 }
13608
13609 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13610 an enumerated type. */
13611
13612 static inline int
13613 type_is_enum (const_tree type)
13614 {
13615 return TREE_CODE (type) == ENUMERAL_TYPE;
13616 }
13617
13618 /* Return the DBX register number described by a given RTL node. */
13619
13620 static unsigned int
13621 dbx_reg_number (const_rtx rtl)
13622 {
13623 unsigned regno = REGNO (rtl);
13624
13625 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13626
13627 #ifdef LEAF_REG_REMAP
13628 if (crtl->uses_only_leaf_regs)
13629 {
13630 int leaf_reg = LEAF_REG_REMAP (regno);
13631 if (leaf_reg != -1)
13632 regno = (unsigned) leaf_reg;
13633 }
13634 #endif
13635
13636 regno = DBX_REGISTER_NUMBER (regno);
13637 gcc_assert (regno != INVALID_REGNUM);
13638 return regno;
13639 }
13640
13641 /* Optionally add a DW_OP_piece term to a location description expression.
13642 DW_OP_piece is only added if the location description expression already
13643 doesn't end with DW_OP_piece. */
13644
13645 static void
13646 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13647 {
13648 dw_loc_descr_ref loc;
13649
13650 if (*list_head != NULL)
13651 {
13652 /* Find the end of the chain. */
13653 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13654 ;
13655
13656 if (loc->dw_loc_opc != DW_OP_piece)
13657 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13658 }
13659 }
13660
13661 /* Return a location descriptor that designates a machine register or
13662 zero if there is none. */
13663
13664 static dw_loc_descr_ref
13665 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13666 {
13667 rtx regs;
13668
13669 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13670 return 0;
13671
13672 /* We only use "frame base" when we're sure we're talking about the
13673 post-prologue local stack frame. We do this by *not* running
13674 register elimination until this point, and recognizing the special
13675 argument pointer and soft frame pointer rtx's.
13676 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13677 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13678 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13679 {
13680 dw_loc_descr_ref result = NULL;
13681
13682 if (dwarf_version >= 4 || !dwarf_strict)
13683 {
13684 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13685 initialized);
13686 if (result)
13687 add_loc_descr (&result,
13688 new_loc_descr (DW_OP_stack_value, 0, 0));
13689 }
13690 return result;
13691 }
13692
13693 regs = targetm.dwarf_register_span (rtl);
13694
13695 if (REG_NREGS (rtl) > 1 || regs)
13696 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13697 else
13698 {
13699 unsigned int dbx_regnum = dbx_reg_number (rtl);
13700 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13701 return 0;
13702 return one_reg_loc_descriptor (dbx_regnum, initialized);
13703 }
13704 }
13705
13706 /* Return a location descriptor that designates a machine register for
13707 a given hard register number. */
13708
13709 static dw_loc_descr_ref
13710 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13711 {
13712 dw_loc_descr_ref reg_loc_descr;
13713
13714 if (regno <= 31)
13715 reg_loc_descr
13716 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13717 else
13718 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13719
13720 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13721 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13722
13723 return reg_loc_descr;
13724 }
13725
13726 /* Given an RTL of a register, return a location descriptor that
13727 designates a value that spans more than one register. */
13728
13729 static dw_loc_descr_ref
13730 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13731 enum var_init_status initialized)
13732 {
13733 int size, i;
13734 dw_loc_descr_ref loc_result = NULL;
13735
13736 /* Simple, contiguous registers. */
13737 if (regs == NULL_RTX)
13738 {
13739 unsigned reg = REGNO (rtl);
13740 int nregs;
13741
13742 #ifdef LEAF_REG_REMAP
13743 if (crtl->uses_only_leaf_regs)
13744 {
13745 int leaf_reg = LEAF_REG_REMAP (reg);
13746 if (leaf_reg != -1)
13747 reg = (unsigned) leaf_reg;
13748 }
13749 #endif
13750
13751 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13752 nregs = REG_NREGS (rtl);
13753
13754 /* At present we only track constant-sized pieces. */
13755 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13756 return NULL;
13757 size /= nregs;
13758
13759 loc_result = NULL;
13760 while (nregs--)
13761 {
13762 dw_loc_descr_ref t;
13763
13764 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13765 VAR_INIT_STATUS_INITIALIZED);
13766 add_loc_descr (&loc_result, t);
13767 add_loc_descr_op_piece (&loc_result, size);
13768 ++reg;
13769 }
13770 return loc_result;
13771 }
13772
13773 /* Now onto stupid register sets in non contiguous locations. */
13774
13775 gcc_assert (GET_CODE (regs) == PARALLEL);
13776
13777 /* At present we only track constant-sized pieces. */
13778 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13779 return NULL;
13780 loc_result = NULL;
13781
13782 for (i = 0; i < XVECLEN (regs, 0); ++i)
13783 {
13784 dw_loc_descr_ref t;
13785
13786 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13787 VAR_INIT_STATUS_INITIALIZED);
13788 add_loc_descr (&loc_result, t);
13789 add_loc_descr_op_piece (&loc_result, size);
13790 }
13791
13792 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13793 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13794 return loc_result;
13795 }
13796
13797 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13798
13799 /* Return a location descriptor that designates a constant i,
13800 as a compound operation from constant (i >> shift), constant shift
13801 and DW_OP_shl. */
13802
13803 static dw_loc_descr_ref
13804 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13805 {
13806 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13807 add_loc_descr (&ret, int_loc_descriptor (shift));
13808 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13809 return ret;
13810 }
13811
13812 /* Return a location descriptor that designates constant POLY_I. */
13813
13814 static dw_loc_descr_ref
13815 int_loc_descriptor (poly_int64 poly_i)
13816 {
13817 enum dwarf_location_atom op;
13818
13819 HOST_WIDE_INT i;
13820 if (!poly_i.is_constant (&i))
13821 {
13822 /* Create location descriptions for the non-constant part and
13823 add any constant offset at the end. */
13824 dw_loc_descr_ref ret = NULL;
13825 HOST_WIDE_INT constant = poly_i.coeffs[0];
13826 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13827 {
13828 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13829 if (coeff != 0)
13830 {
13831 dw_loc_descr_ref start = ret;
13832 unsigned int factor;
13833 int bias;
13834 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13835 (j, &factor, &bias);
13836
13837 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13838 add COEFF * (REGNO / FACTOR) now and subtract
13839 COEFF * BIAS from the final constant part. */
13840 constant -= coeff * bias;
13841 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13842 if (coeff % factor == 0)
13843 coeff /= factor;
13844 else
13845 {
13846 int amount = exact_log2 (factor);
13847 gcc_assert (amount >= 0);
13848 add_loc_descr (&ret, int_loc_descriptor (amount));
13849 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13850 }
13851 if (coeff != 1)
13852 {
13853 add_loc_descr (&ret, int_loc_descriptor (coeff));
13854 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13855 }
13856 if (start)
13857 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13858 }
13859 }
13860 loc_descr_plus_const (&ret, constant);
13861 return ret;
13862 }
13863
13864 /* Pick the smallest representation of a constant, rather than just
13865 defaulting to the LEB encoding. */
13866 if (i >= 0)
13867 {
13868 int clz = clz_hwi (i);
13869 int ctz = ctz_hwi (i);
13870 if (i <= 31)
13871 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13872 else if (i <= 0xff)
13873 op = DW_OP_const1u;
13874 else if (i <= 0xffff)
13875 op = DW_OP_const2u;
13876 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13877 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13878 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13879 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13880 while DW_OP_const4u is 5 bytes. */
13881 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13882 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13883 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13884 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13885 while DW_OP_const4u is 5 bytes. */
13886 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13887
13888 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13889 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13890 <= 4)
13891 {
13892 /* As i >= 2**31, the double cast above will yield a negative number.
13893 Since wrapping is defined in DWARF expressions we can output big
13894 positive integers as small negative ones, regardless of the size
13895 of host wide ints.
13896
13897 Here, since the evaluator will handle 32-bit values and since i >=
13898 2**31, we know it's going to be interpreted as a negative literal:
13899 store it this way if we can do better than 5 bytes this way. */
13900 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13901 }
13902 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13903 op = DW_OP_const4u;
13904
13905 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13906 least 6 bytes: see if we can do better before falling back to it. */
13907 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13908 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13909 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13910 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13911 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13912 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13913 >= HOST_BITS_PER_WIDE_INT)
13914 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13915 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13916 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13917 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13918 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13919 && size_of_uleb128 (i) > 6)
13920 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13921 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13922 else
13923 op = DW_OP_constu;
13924 }
13925 else
13926 {
13927 if (i >= -0x80)
13928 op = DW_OP_const1s;
13929 else if (i >= -0x8000)
13930 op = DW_OP_const2s;
13931 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13932 {
13933 if (size_of_int_loc_descriptor (i) < 5)
13934 {
13935 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13936 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13937 return ret;
13938 }
13939 op = DW_OP_const4s;
13940 }
13941 else
13942 {
13943 if (size_of_int_loc_descriptor (i)
13944 < (unsigned long) 1 + size_of_sleb128 (i))
13945 {
13946 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13947 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13948 return ret;
13949 }
13950 op = DW_OP_consts;
13951 }
13952 }
13953
13954 return new_loc_descr (op, i, 0);
13955 }
13956
13957 /* Likewise, for unsigned constants. */
13958
13959 static dw_loc_descr_ref
13960 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13961 {
13962 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13963 const unsigned HOST_WIDE_INT max_uint
13964 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13965
13966 /* If possible, use the clever signed constants handling. */
13967 if (i <= max_int)
13968 return int_loc_descriptor ((HOST_WIDE_INT) i);
13969
13970 /* Here, we are left with positive numbers that cannot be represented as
13971 HOST_WIDE_INT, i.e.:
13972 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13973
13974 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13975 whereas may be better to output a negative integer: thanks to integer
13976 wrapping, we know that:
13977 x = x - 2 ** DWARF2_ADDR_SIZE
13978 = x - 2 * (max (HOST_WIDE_INT) + 1)
13979 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13980 small negative integers. Let's try that in cases it will clearly improve
13981 the encoding: there is no gain turning DW_OP_const4u into
13982 DW_OP_const4s. */
13983 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13984 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13985 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13986 {
13987 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13988
13989 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13990 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13991 const HOST_WIDE_INT second_shift
13992 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13993
13994 /* So we finally have:
13995 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13996 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13997 return int_loc_descriptor (second_shift);
13998 }
13999
14000 /* Last chance: fallback to a simple constant operation. */
14001 return new_loc_descr
14002 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14003 ? DW_OP_const4u
14004 : DW_OP_const8u,
14005 i, 0);
14006 }
14007
14008 /* Generate and return a location description that computes the unsigned
14009 comparison of the two stack top entries (a OP b where b is the top-most
14010 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14011 LE_EXPR, GT_EXPR or GE_EXPR. */
14012
14013 static dw_loc_descr_ref
14014 uint_comparison_loc_list (enum tree_code kind)
14015 {
14016 enum dwarf_location_atom op, flip_op;
14017 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14018
14019 switch (kind)
14020 {
14021 case LT_EXPR:
14022 op = DW_OP_lt;
14023 break;
14024 case LE_EXPR:
14025 op = DW_OP_le;
14026 break;
14027 case GT_EXPR:
14028 op = DW_OP_gt;
14029 break;
14030 case GE_EXPR:
14031 op = DW_OP_ge;
14032 break;
14033 default:
14034 gcc_unreachable ();
14035 }
14036
14037 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14038 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14039
14040 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14041 possible to perform unsigned comparisons: we just have to distinguish
14042 three cases:
14043
14044 1. when a and b have the same sign (as signed integers); then we should
14045 return: a OP(signed) b;
14046
14047 2. when a is a negative signed integer while b is a positive one, then a
14048 is a greater unsigned integer than b; likewise when a and b's roles
14049 are flipped.
14050
14051 So first, compare the sign of the two operands. */
14052 ret = new_loc_descr (DW_OP_over, 0, 0);
14053 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14054 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14055 /* If they have different signs (i.e. they have different sign bits), then
14056 the stack top value has now the sign bit set and thus it's smaller than
14057 zero. */
14058 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14059 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14060 add_loc_descr (&ret, bra_node);
14061
14062 /* We are in case 1. At this point, we know both operands have the same
14063 sign, to it's safe to use the built-in signed comparison. */
14064 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14065 add_loc_descr (&ret, jmp_node);
14066
14067 /* We are in case 2. Here, we know both operands do not have the same sign,
14068 so we have to flip the signed comparison. */
14069 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14070 tmp = new_loc_descr (flip_op, 0, 0);
14071 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14072 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14073 add_loc_descr (&ret, tmp);
14074
14075 /* This dummy operation is necessary to make the two branches join. */
14076 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14077 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14078 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14079 add_loc_descr (&ret, tmp);
14080
14081 return ret;
14082 }
14083
14084 /* Likewise, but takes the location description lists (might be destructive on
14085 them). Return NULL if either is NULL or if concatenation fails. */
14086
14087 static dw_loc_list_ref
14088 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14089 enum tree_code kind)
14090 {
14091 if (left == NULL || right == NULL)
14092 return NULL;
14093
14094 add_loc_list (&left, right);
14095 if (left == NULL)
14096 return NULL;
14097
14098 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14099 return left;
14100 }
14101
14102 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14103 without actually allocating it. */
14104
14105 static unsigned long
14106 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14107 {
14108 return size_of_int_loc_descriptor (i >> shift)
14109 + size_of_int_loc_descriptor (shift)
14110 + 1;
14111 }
14112
14113 /* Return size_of_locs (int_loc_descriptor (i)) without
14114 actually allocating it. */
14115
14116 static unsigned long
14117 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14118 {
14119 unsigned long s;
14120
14121 if (i >= 0)
14122 {
14123 int clz, ctz;
14124 if (i <= 31)
14125 return 1;
14126 else if (i <= 0xff)
14127 return 2;
14128 else if (i <= 0xffff)
14129 return 3;
14130 clz = clz_hwi (i);
14131 ctz = ctz_hwi (i);
14132 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14133 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14134 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14135 - clz - 5);
14136 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14137 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14138 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14139 - clz - 8);
14140 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14141 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14142 <= 4)
14143 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14144 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14145 return 5;
14146 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14147 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14148 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14149 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14150 - clz - 8);
14151 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14152 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14153 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14154 - clz - 16);
14155 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14156 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14157 && s > 6)
14158 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14159 - clz - 32);
14160 else
14161 return 1 + s;
14162 }
14163 else
14164 {
14165 if (i >= -0x80)
14166 return 2;
14167 else if (i >= -0x8000)
14168 return 3;
14169 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14170 {
14171 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14172 {
14173 s = size_of_int_loc_descriptor (-i) + 1;
14174 if (s < 5)
14175 return s;
14176 }
14177 return 5;
14178 }
14179 else
14180 {
14181 unsigned long r = 1 + size_of_sleb128 (i);
14182 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14183 {
14184 s = size_of_int_loc_descriptor (-i) + 1;
14185 if (s < r)
14186 return s;
14187 }
14188 return r;
14189 }
14190 }
14191 }
14192
14193 /* Return loc description representing "address" of integer value.
14194 This can appear only as toplevel expression. */
14195
14196 static dw_loc_descr_ref
14197 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14198 {
14199 int litsize;
14200 dw_loc_descr_ref loc_result = NULL;
14201
14202 if (!(dwarf_version >= 4 || !dwarf_strict))
14203 return NULL;
14204
14205 litsize = size_of_int_loc_descriptor (i);
14206 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14207 is more compact. For DW_OP_stack_value we need:
14208 litsize + 1 (DW_OP_stack_value)
14209 and for DW_OP_implicit_value:
14210 1 (DW_OP_implicit_value) + 1 (length) + size. */
14211 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14212 {
14213 loc_result = int_loc_descriptor (i);
14214 add_loc_descr (&loc_result,
14215 new_loc_descr (DW_OP_stack_value, 0, 0));
14216 return loc_result;
14217 }
14218
14219 loc_result = new_loc_descr (DW_OP_implicit_value,
14220 size, 0);
14221 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14222 loc_result->dw_loc_oprnd2.v.val_int = i;
14223 return loc_result;
14224 }
14225
14226 /* Return a location descriptor that designates a base+offset location. */
14227
14228 static dw_loc_descr_ref
14229 based_loc_descr (rtx reg, poly_int64 offset,
14230 enum var_init_status initialized)
14231 {
14232 unsigned int regno;
14233 dw_loc_descr_ref result;
14234 dw_fde_ref fde = cfun->fde;
14235
14236 /* We only use "frame base" when we're sure we're talking about the
14237 post-prologue local stack frame. We do this by *not* running
14238 register elimination until this point, and recognizing the special
14239 argument pointer and soft frame pointer rtx's. */
14240 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14241 {
14242 rtx elim = (ira_use_lra_p
14243 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14244 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14245
14246 if (elim != reg)
14247 {
14248 elim = strip_offset_and_add (elim, &offset);
14249 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14250 && (elim == hard_frame_pointer_rtx
14251 || elim == stack_pointer_rtx))
14252 || elim == (frame_pointer_needed
14253 ? hard_frame_pointer_rtx
14254 : stack_pointer_rtx));
14255
14256 /* If drap register is used to align stack, use frame
14257 pointer + offset to access stack variables. If stack
14258 is aligned without drap, use stack pointer + offset to
14259 access stack variables. */
14260 if (crtl->stack_realign_tried
14261 && reg == frame_pointer_rtx)
14262 {
14263 int base_reg
14264 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14265 ? HARD_FRAME_POINTER_REGNUM
14266 : REGNO (elim));
14267 return new_reg_loc_descr (base_reg, offset);
14268 }
14269
14270 gcc_assert (frame_pointer_fb_offset_valid);
14271 offset += frame_pointer_fb_offset;
14272 HOST_WIDE_INT const_offset;
14273 if (offset.is_constant (&const_offset))
14274 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14275 else
14276 {
14277 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14278 loc_descr_plus_const (&ret, offset);
14279 return ret;
14280 }
14281 }
14282 }
14283
14284 regno = REGNO (reg);
14285 #ifdef LEAF_REG_REMAP
14286 if (crtl->uses_only_leaf_regs)
14287 {
14288 int leaf_reg = LEAF_REG_REMAP (regno);
14289 if (leaf_reg != -1)
14290 regno = (unsigned) leaf_reg;
14291 }
14292 #endif
14293 regno = DWARF_FRAME_REGNUM (regno);
14294
14295 HOST_WIDE_INT const_offset;
14296 if (!optimize && fde
14297 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14298 && offset.is_constant (&const_offset))
14299 {
14300 /* Use cfa+offset to represent the location of arguments passed
14301 on the stack when drap is used to align stack.
14302 Only do this when not optimizing, for optimized code var-tracking
14303 is supposed to track where the arguments live and the register
14304 used as vdrap or drap in some spot might be used for something
14305 else in other part of the routine. */
14306 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14307 }
14308
14309 result = new_reg_loc_descr (regno, offset);
14310
14311 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14312 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14313
14314 return result;
14315 }
14316
14317 /* Return true if this RTL expression describes a base+offset calculation. */
14318
14319 static inline int
14320 is_based_loc (const_rtx rtl)
14321 {
14322 return (GET_CODE (rtl) == PLUS
14323 && ((REG_P (XEXP (rtl, 0))
14324 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14325 && CONST_INT_P (XEXP (rtl, 1)))));
14326 }
14327
14328 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14329 failed. */
14330
14331 static dw_loc_descr_ref
14332 tls_mem_loc_descriptor (rtx mem)
14333 {
14334 tree base;
14335 dw_loc_descr_ref loc_result;
14336
14337 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14338 return NULL;
14339
14340 base = get_base_address (MEM_EXPR (mem));
14341 if (base == NULL
14342 || !VAR_P (base)
14343 || !DECL_THREAD_LOCAL_P (base))
14344 return NULL;
14345
14346 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14347 if (loc_result == NULL)
14348 return NULL;
14349
14350 if (maybe_ne (MEM_OFFSET (mem), 0))
14351 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14352
14353 return loc_result;
14354 }
14355
14356 /* Output debug info about reason why we failed to expand expression as dwarf
14357 expression. */
14358
14359 static void
14360 expansion_failed (tree expr, rtx rtl, char const *reason)
14361 {
14362 if (dump_file && (dump_flags & TDF_DETAILS))
14363 {
14364 fprintf (dump_file, "Failed to expand as dwarf: ");
14365 if (expr)
14366 print_generic_expr (dump_file, expr, dump_flags);
14367 if (rtl)
14368 {
14369 fprintf (dump_file, "\n");
14370 print_rtl (dump_file, rtl);
14371 }
14372 fprintf (dump_file, "\nReason: %s\n", reason);
14373 }
14374 }
14375
14376 /* Helper function for const_ok_for_output. */
14377
14378 static bool
14379 const_ok_for_output_1 (rtx rtl)
14380 {
14381 if (targetm.const_not_ok_for_debug_p (rtl))
14382 {
14383 if (GET_CODE (rtl) != UNSPEC)
14384 {
14385 expansion_failed (NULL_TREE, rtl,
14386 "Expression rejected for debug by the backend.\n");
14387 return false;
14388 }
14389
14390 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14391 the target hook doesn't explicitly allow it in debug info, assume
14392 we can't express it in the debug info. */
14393 /* Don't complain about TLS UNSPECs, those are just too hard to
14394 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14395 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14396 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14397 if (flag_checking
14398 && (XVECLEN (rtl, 0) == 0
14399 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14400 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14401 inform (current_function_decl
14402 ? DECL_SOURCE_LOCATION (current_function_decl)
14403 : UNKNOWN_LOCATION,
14404 #if NUM_UNSPEC_VALUES > 0
14405 "non-delegitimized UNSPEC %s (%d) found in variable location",
14406 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14407 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14408 XINT (rtl, 1));
14409 #else
14410 "non-delegitimized UNSPEC %d found in variable location",
14411 XINT (rtl, 1));
14412 #endif
14413 expansion_failed (NULL_TREE, rtl,
14414 "UNSPEC hasn't been delegitimized.\n");
14415 return false;
14416 }
14417
14418 if (CONST_POLY_INT_P (rtl))
14419 return false;
14420
14421 if (targetm.const_not_ok_for_debug_p (rtl))
14422 {
14423 expansion_failed (NULL_TREE, rtl,
14424 "Expression rejected for debug by the backend.\n");
14425 return false;
14426 }
14427
14428 /* FIXME: Refer to PR60655. It is possible for simplification
14429 of rtl expressions in var tracking to produce such expressions.
14430 We should really identify / validate expressions
14431 enclosed in CONST that can be handled by assemblers on various
14432 targets and only handle legitimate cases here. */
14433 switch (GET_CODE (rtl))
14434 {
14435 case SYMBOL_REF:
14436 break;
14437 case NOT:
14438 case NEG:
14439 return false;
14440 default:
14441 return true;
14442 }
14443
14444 if (CONSTANT_POOL_ADDRESS_P (rtl))
14445 {
14446 bool marked;
14447 get_pool_constant_mark (rtl, &marked);
14448 /* If all references to this pool constant were optimized away,
14449 it was not output and thus we can't represent it. */
14450 if (!marked)
14451 {
14452 expansion_failed (NULL_TREE, rtl,
14453 "Constant was removed from constant pool.\n");
14454 return false;
14455 }
14456 }
14457
14458 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14459 return false;
14460
14461 /* Avoid references to external symbols in debug info, on several targets
14462 the linker might even refuse to link when linking a shared library,
14463 and in many other cases the relocations for .debug_info/.debug_loc are
14464 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14465 to be defined within the same shared library or executable are fine. */
14466 if (SYMBOL_REF_EXTERNAL_P (rtl))
14467 {
14468 tree decl = SYMBOL_REF_DECL (rtl);
14469
14470 if (decl == NULL || !targetm.binds_local_p (decl))
14471 {
14472 expansion_failed (NULL_TREE, rtl,
14473 "Symbol not defined in current TU.\n");
14474 return false;
14475 }
14476 }
14477
14478 return true;
14479 }
14480
14481 /* Return true if constant RTL can be emitted in DW_OP_addr or
14482 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14483 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14484
14485 static bool
14486 const_ok_for_output (rtx rtl)
14487 {
14488 if (GET_CODE (rtl) == SYMBOL_REF)
14489 return const_ok_for_output_1 (rtl);
14490
14491 if (GET_CODE (rtl) == CONST)
14492 {
14493 subrtx_var_iterator::array_type array;
14494 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14495 if (!const_ok_for_output_1 (*iter))
14496 return false;
14497 return true;
14498 }
14499
14500 return true;
14501 }
14502
14503 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14504 if possible, NULL otherwise. */
14505
14506 static dw_die_ref
14507 base_type_for_mode (machine_mode mode, bool unsignedp)
14508 {
14509 dw_die_ref type_die;
14510 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14511
14512 if (type == NULL)
14513 return NULL;
14514 switch (TREE_CODE (type))
14515 {
14516 case INTEGER_TYPE:
14517 case REAL_TYPE:
14518 break;
14519 default:
14520 return NULL;
14521 }
14522 type_die = lookup_type_die (type);
14523 if (!type_die)
14524 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14525 comp_unit_die ());
14526 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14527 return NULL;
14528 return type_die;
14529 }
14530
14531 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14532 type matching MODE, or, if MODE is narrower than or as wide as
14533 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14534 possible. */
14535
14536 static dw_loc_descr_ref
14537 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14538 {
14539 machine_mode outer_mode = mode;
14540 dw_die_ref type_die;
14541 dw_loc_descr_ref cvt;
14542
14543 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14544 {
14545 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14546 return op;
14547 }
14548 type_die = base_type_for_mode (outer_mode, 1);
14549 if (type_die == NULL)
14550 return NULL;
14551 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14552 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14553 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14554 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14555 add_loc_descr (&op, cvt);
14556 return op;
14557 }
14558
14559 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14560
14561 static dw_loc_descr_ref
14562 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14563 dw_loc_descr_ref op1)
14564 {
14565 dw_loc_descr_ref ret = op0;
14566 add_loc_descr (&ret, op1);
14567 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14568 if (STORE_FLAG_VALUE != 1)
14569 {
14570 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14571 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14572 }
14573 return ret;
14574 }
14575
14576 /* Subroutine of scompare_loc_descriptor for the case in which we're
14577 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14578 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14579
14580 static dw_loc_descr_ref
14581 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14582 scalar_int_mode op_mode,
14583 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14584 {
14585 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14586 dw_loc_descr_ref cvt;
14587
14588 if (type_die == NULL)
14589 return NULL;
14590 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14591 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14592 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14593 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14594 add_loc_descr (&op0, cvt);
14595 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14596 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14597 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14598 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14599 add_loc_descr (&op1, cvt);
14600 return compare_loc_descriptor (op, op0, op1);
14601 }
14602
14603 /* Subroutine of scompare_loc_descriptor for the case in which we're
14604 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14605 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14606
14607 static dw_loc_descr_ref
14608 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14609 scalar_int_mode op_mode,
14610 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14611 {
14612 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14613 /* For eq/ne, if the operands are known to be zero-extended,
14614 there is no need to do the fancy shifting up. */
14615 if (op == DW_OP_eq || op == DW_OP_ne)
14616 {
14617 dw_loc_descr_ref last0, last1;
14618 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14619 ;
14620 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14621 ;
14622 /* deref_size zero extends, and for constants we can check
14623 whether they are zero extended or not. */
14624 if (((last0->dw_loc_opc == DW_OP_deref_size
14625 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14626 || (CONST_INT_P (XEXP (rtl, 0))
14627 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14628 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14629 && ((last1->dw_loc_opc == DW_OP_deref_size
14630 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14631 || (CONST_INT_P (XEXP (rtl, 1))
14632 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14633 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14634 return compare_loc_descriptor (op, op0, op1);
14635
14636 /* EQ/NE comparison against constant in narrower type than
14637 DWARF2_ADDR_SIZE can be performed either as
14638 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14639 DW_OP_{eq,ne}
14640 or
14641 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14642 DW_OP_{eq,ne}. Pick whatever is shorter. */
14643 if (CONST_INT_P (XEXP (rtl, 1))
14644 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14645 && (size_of_int_loc_descriptor (shift) + 1
14646 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14647 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14648 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14649 & GET_MODE_MASK (op_mode))))
14650 {
14651 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14652 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14653 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14654 & GET_MODE_MASK (op_mode));
14655 return compare_loc_descriptor (op, op0, op1);
14656 }
14657 }
14658 add_loc_descr (&op0, int_loc_descriptor (shift));
14659 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14660 if (CONST_INT_P (XEXP (rtl, 1)))
14661 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14662 else
14663 {
14664 add_loc_descr (&op1, int_loc_descriptor (shift));
14665 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14666 }
14667 return compare_loc_descriptor (op, op0, op1);
14668 }
14669
14670 /* Return location descriptor for unsigned comparison OP RTL. */
14671
14672 static dw_loc_descr_ref
14673 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14674 machine_mode mem_mode)
14675 {
14676 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14677 dw_loc_descr_ref op0, op1;
14678
14679 if (op_mode == VOIDmode)
14680 op_mode = GET_MODE (XEXP (rtl, 1));
14681 if (op_mode == VOIDmode)
14682 return NULL;
14683
14684 scalar_int_mode int_op_mode;
14685 if (dwarf_strict
14686 && dwarf_version < 5
14687 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14688 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14689 return NULL;
14690
14691 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14692 VAR_INIT_STATUS_INITIALIZED);
14693 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14694 VAR_INIT_STATUS_INITIALIZED);
14695
14696 if (op0 == NULL || op1 == NULL)
14697 return NULL;
14698
14699 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14700 {
14701 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14702 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14703
14704 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14705 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14706 }
14707 return compare_loc_descriptor (op, op0, op1);
14708 }
14709
14710 /* Return location descriptor for unsigned comparison OP RTL. */
14711
14712 static dw_loc_descr_ref
14713 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14714 machine_mode mem_mode)
14715 {
14716 dw_loc_descr_ref op0, op1;
14717
14718 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14719 if (test_op_mode == VOIDmode)
14720 test_op_mode = GET_MODE (XEXP (rtl, 1));
14721
14722 scalar_int_mode op_mode;
14723 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14724 return NULL;
14725
14726 if (dwarf_strict
14727 && dwarf_version < 5
14728 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14729 return NULL;
14730
14731 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14732 VAR_INIT_STATUS_INITIALIZED);
14733 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14734 VAR_INIT_STATUS_INITIALIZED);
14735
14736 if (op0 == NULL || op1 == NULL)
14737 return NULL;
14738
14739 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14740 {
14741 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14742 dw_loc_descr_ref last0, last1;
14743 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14744 ;
14745 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14746 ;
14747 if (CONST_INT_P (XEXP (rtl, 0)))
14748 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14749 /* deref_size zero extends, so no need to mask it again. */
14750 else if (last0->dw_loc_opc != DW_OP_deref_size
14751 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14752 {
14753 add_loc_descr (&op0, int_loc_descriptor (mask));
14754 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14755 }
14756 if (CONST_INT_P (XEXP (rtl, 1)))
14757 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14758 /* deref_size zero extends, so no need to mask it again. */
14759 else if (last1->dw_loc_opc != DW_OP_deref_size
14760 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14761 {
14762 add_loc_descr (&op1, int_loc_descriptor (mask));
14763 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14764 }
14765 }
14766 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14767 {
14768 HOST_WIDE_INT bias = 1;
14769 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14770 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14771 if (CONST_INT_P (XEXP (rtl, 1)))
14772 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14773 + INTVAL (XEXP (rtl, 1)));
14774 else
14775 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14776 bias, 0));
14777 }
14778 return compare_loc_descriptor (op, op0, op1);
14779 }
14780
14781 /* Return location descriptor for {U,S}{MIN,MAX}. */
14782
14783 static dw_loc_descr_ref
14784 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14785 machine_mode mem_mode)
14786 {
14787 enum dwarf_location_atom op;
14788 dw_loc_descr_ref op0, op1, ret;
14789 dw_loc_descr_ref bra_node, drop_node;
14790
14791 scalar_int_mode int_mode;
14792 if (dwarf_strict
14793 && dwarf_version < 5
14794 && (!is_a <scalar_int_mode> (mode, &int_mode)
14795 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14796 return NULL;
14797
14798 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14799 VAR_INIT_STATUS_INITIALIZED);
14800 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14801 VAR_INIT_STATUS_INITIALIZED);
14802
14803 if (op0 == NULL || op1 == NULL)
14804 return NULL;
14805
14806 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14807 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14808 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14809 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14810 {
14811 /* Checked by the caller. */
14812 int_mode = as_a <scalar_int_mode> (mode);
14813 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14814 {
14815 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14816 add_loc_descr (&op0, int_loc_descriptor (mask));
14817 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14818 add_loc_descr (&op1, int_loc_descriptor (mask));
14819 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14820 }
14821 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14822 {
14823 HOST_WIDE_INT bias = 1;
14824 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14825 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14826 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14827 }
14828 }
14829 else if (is_a <scalar_int_mode> (mode, &int_mode)
14830 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14831 {
14832 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14833 add_loc_descr (&op0, int_loc_descriptor (shift));
14834 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14835 add_loc_descr (&op1, int_loc_descriptor (shift));
14836 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14837 }
14838 else if (is_a <scalar_int_mode> (mode, &int_mode)
14839 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14840 {
14841 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14842 dw_loc_descr_ref cvt;
14843 if (type_die == NULL)
14844 return NULL;
14845 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14846 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14847 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14848 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14849 add_loc_descr (&op0, cvt);
14850 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14851 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14852 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14853 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14854 add_loc_descr (&op1, cvt);
14855 }
14856
14857 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14858 op = DW_OP_lt;
14859 else
14860 op = DW_OP_gt;
14861 ret = op0;
14862 add_loc_descr (&ret, op1);
14863 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14864 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14865 add_loc_descr (&ret, bra_node);
14866 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14867 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14868 add_loc_descr (&ret, drop_node);
14869 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14870 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14871 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14872 && is_a <scalar_int_mode> (mode, &int_mode)
14873 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14874 ret = convert_descriptor_to_mode (int_mode, ret);
14875 return ret;
14876 }
14877
14878 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14879 but after converting arguments to type_die, afterwards
14880 convert back to unsigned. */
14881
14882 static dw_loc_descr_ref
14883 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14884 scalar_int_mode mode, machine_mode mem_mode)
14885 {
14886 dw_loc_descr_ref cvt, op0, op1;
14887
14888 if (type_die == NULL)
14889 return NULL;
14890 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14891 VAR_INIT_STATUS_INITIALIZED);
14892 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14893 VAR_INIT_STATUS_INITIALIZED);
14894 if (op0 == NULL || op1 == NULL)
14895 return NULL;
14896 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14897 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14898 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14899 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14900 add_loc_descr (&op0, cvt);
14901 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14902 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14903 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14904 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14905 add_loc_descr (&op1, cvt);
14906 add_loc_descr (&op0, op1);
14907 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14908 return convert_descriptor_to_mode (mode, op0);
14909 }
14910
14911 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14912 const0 is DW_OP_lit0 or corresponding typed constant,
14913 const1 is DW_OP_lit1 or corresponding typed constant
14914 and constMSB is constant with just the MSB bit set
14915 for the mode):
14916 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14917 L1: const0 DW_OP_swap
14918 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14919 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14920 L3: DW_OP_drop
14921 L4: DW_OP_nop
14922
14923 CTZ is similar:
14924 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14925 L1: const0 DW_OP_swap
14926 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14927 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14928 L3: DW_OP_drop
14929 L4: DW_OP_nop
14930
14931 FFS is similar:
14932 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14933 L1: const1 DW_OP_swap
14934 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14935 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14936 L3: DW_OP_drop
14937 L4: DW_OP_nop */
14938
14939 static dw_loc_descr_ref
14940 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14941 machine_mode mem_mode)
14942 {
14943 dw_loc_descr_ref op0, ret, tmp;
14944 HOST_WIDE_INT valv;
14945 dw_loc_descr_ref l1jump, l1label;
14946 dw_loc_descr_ref l2jump, l2label;
14947 dw_loc_descr_ref l3jump, l3label;
14948 dw_loc_descr_ref l4jump, l4label;
14949 rtx msb;
14950
14951 if (GET_MODE (XEXP (rtl, 0)) != mode)
14952 return NULL;
14953
14954 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14955 VAR_INIT_STATUS_INITIALIZED);
14956 if (op0 == NULL)
14957 return NULL;
14958 ret = op0;
14959 if (GET_CODE (rtl) == CLZ)
14960 {
14961 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14962 valv = GET_MODE_BITSIZE (mode);
14963 }
14964 else if (GET_CODE (rtl) == FFS)
14965 valv = 0;
14966 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14967 valv = GET_MODE_BITSIZE (mode);
14968 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14969 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14970 add_loc_descr (&ret, l1jump);
14971 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14972 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14973 VAR_INIT_STATUS_INITIALIZED);
14974 if (tmp == NULL)
14975 return NULL;
14976 add_loc_descr (&ret, tmp);
14977 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14978 add_loc_descr (&ret, l4jump);
14979 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14980 ? const1_rtx : const0_rtx,
14981 mode, mem_mode,
14982 VAR_INIT_STATUS_INITIALIZED);
14983 if (l1label == NULL)
14984 return NULL;
14985 add_loc_descr (&ret, l1label);
14986 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14987 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14988 add_loc_descr (&ret, l2label);
14989 if (GET_CODE (rtl) != CLZ)
14990 msb = const1_rtx;
14991 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14992 msb = GEN_INT (HOST_WIDE_INT_1U
14993 << (GET_MODE_BITSIZE (mode) - 1));
14994 else
14995 msb = immed_wide_int_const
14996 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14997 GET_MODE_PRECISION (mode)), mode);
14998 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14999 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15000 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15001 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15002 else
15003 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15004 VAR_INIT_STATUS_INITIALIZED);
15005 if (tmp == NULL)
15006 return NULL;
15007 add_loc_descr (&ret, tmp);
15008 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15009 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15010 add_loc_descr (&ret, l3jump);
15011 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15012 VAR_INIT_STATUS_INITIALIZED);
15013 if (tmp == NULL)
15014 return NULL;
15015 add_loc_descr (&ret, tmp);
15016 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15017 ? DW_OP_shl : DW_OP_shr, 0, 0));
15018 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15019 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15020 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15021 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15022 add_loc_descr (&ret, l2jump);
15023 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15024 add_loc_descr (&ret, l3label);
15025 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15026 add_loc_descr (&ret, l4label);
15027 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15028 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15029 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15030 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15031 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15032 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15033 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15034 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15035 return ret;
15036 }
15037
15038 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15039 const1 is DW_OP_lit1 or corresponding typed constant):
15040 const0 DW_OP_swap
15041 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15042 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15043 L2: DW_OP_drop
15044
15045 PARITY is similar:
15046 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15047 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15048 L2: DW_OP_drop */
15049
15050 static dw_loc_descr_ref
15051 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15052 machine_mode mem_mode)
15053 {
15054 dw_loc_descr_ref op0, ret, tmp;
15055 dw_loc_descr_ref l1jump, l1label;
15056 dw_loc_descr_ref l2jump, l2label;
15057
15058 if (GET_MODE (XEXP (rtl, 0)) != mode)
15059 return NULL;
15060
15061 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15062 VAR_INIT_STATUS_INITIALIZED);
15063 if (op0 == NULL)
15064 return NULL;
15065 ret = op0;
15066 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15067 VAR_INIT_STATUS_INITIALIZED);
15068 if (tmp == NULL)
15069 return NULL;
15070 add_loc_descr (&ret, tmp);
15071 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15072 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15073 add_loc_descr (&ret, l1label);
15074 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15075 add_loc_descr (&ret, l2jump);
15076 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15077 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15078 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15079 VAR_INIT_STATUS_INITIALIZED);
15080 if (tmp == NULL)
15081 return NULL;
15082 add_loc_descr (&ret, tmp);
15083 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15084 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15085 ? DW_OP_plus : DW_OP_xor, 0, 0));
15086 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15087 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15088 VAR_INIT_STATUS_INITIALIZED);
15089 add_loc_descr (&ret, tmp);
15090 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15091 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15092 add_loc_descr (&ret, l1jump);
15093 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15094 add_loc_descr (&ret, l2label);
15095 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15096 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15097 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15098 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15099 return ret;
15100 }
15101
15102 /* BSWAP (constS is initial shift count, either 56 or 24):
15103 constS const0
15104 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15105 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15106 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15107 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15108 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15109
15110 static dw_loc_descr_ref
15111 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15112 machine_mode mem_mode)
15113 {
15114 dw_loc_descr_ref op0, ret, tmp;
15115 dw_loc_descr_ref l1jump, l1label;
15116 dw_loc_descr_ref l2jump, l2label;
15117
15118 if (BITS_PER_UNIT != 8
15119 || (GET_MODE_BITSIZE (mode) != 32
15120 && GET_MODE_BITSIZE (mode) != 64))
15121 return NULL;
15122
15123 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15124 VAR_INIT_STATUS_INITIALIZED);
15125 if (op0 == NULL)
15126 return NULL;
15127
15128 ret = op0;
15129 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15130 mode, mem_mode,
15131 VAR_INIT_STATUS_INITIALIZED);
15132 if (tmp == NULL)
15133 return NULL;
15134 add_loc_descr (&ret, tmp);
15135 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15136 VAR_INIT_STATUS_INITIALIZED);
15137 if (tmp == NULL)
15138 return NULL;
15139 add_loc_descr (&ret, tmp);
15140 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15141 add_loc_descr (&ret, l1label);
15142 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15143 mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 add_loc_descr (&ret, tmp);
15146 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15147 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15148 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15149 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15150 VAR_INIT_STATUS_INITIALIZED);
15151 if (tmp == NULL)
15152 return NULL;
15153 add_loc_descr (&ret, tmp);
15154 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15155 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15156 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15157 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15158 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15160 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15161 VAR_INIT_STATUS_INITIALIZED);
15162 add_loc_descr (&ret, tmp);
15163 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15164 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15165 add_loc_descr (&ret, l2jump);
15166 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15167 VAR_INIT_STATUS_INITIALIZED);
15168 add_loc_descr (&ret, tmp);
15169 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15170 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15171 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15172 add_loc_descr (&ret, l1jump);
15173 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15174 add_loc_descr (&ret, l2label);
15175 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15176 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15177 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15178 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15179 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15180 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15181 return ret;
15182 }
15183
15184 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15185 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15186 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15187 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15188
15189 ROTATERT is similar:
15190 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15191 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15192 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15193
15194 static dw_loc_descr_ref
15195 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15196 machine_mode mem_mode)
15197 {
15198 rtx rtlop1 = XEXP (rtl, 1);
15199 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15200 int i;
15201
15202 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15203 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15204 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15205 VAR_INIT_STATUS_INITIALIZED);
15206 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15207 VAR_INIT_STATUS_INITIALIZED);
15208 if (op0 == NULL || op1 == NULL)
15209 return NULL;
15210 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15211 for (i = 0; i < 2; i++)
15212 {
15213 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15214 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15215 mode, mem_mode,
15216 VAR_INIT_STATUS_INITIALIZED);
15217 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15218 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15219 ? DW_OP_const4u
15220 : HOST_BITS_PER_WIDE_INT == 64
15221 ? DW_OP_const8u : DW_OP_constu,
15222 GET_MODE_MASK (mode), 0);
15223 else
15224 mask[i] = NULL;
15225 if (mask[i] == NULL)
15226 return NULL;
15227 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15228 }
15229 ret = op0;
15230 add_loc_descr (&ret, op1);
15231 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15232 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15233 if (GET_CODE (rtl) == ROTATERT)
15234 {
15235 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15236 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15237 GET_MODE_BITSIZE (mode), 0));
15238 }
15239 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15240 if (mask[0] != NULL)
15241 add_loc_descr (&ret, mask[0]);
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15243 if (mask[1] != NULL)
15244 {
15245 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15246 add_loc_descr (&ret, mask[1]);
15247 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15248 }
15249 if (GET_CODE (rtl) == ROTATE)
15250 {
15251 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15252 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15253 GET_MODE_BITSIZE (mode), 0));
15254 }
15255 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15256 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15257 return ret;
15258 }
15259
15260 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15261 for DEBUG_PARAMETER_REF RTL. */
15262
15263 static dw_loc_descr_ref
15264 parameter_ref_descriptor (rtx rtl)
15265 {
15266 dw_loc_descr_ref ret;
15267 dw_die_ref ref;
15268
15269 if (dwarf_strict)
15270 return NULL;
15271 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15272 /* With LTO during LTRANS we get the late DIE that refers to the early
15273 DIE, thus we add another indirection here. This seems to confuse
15274 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15275 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15276 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15277 if (ref)
15278 {
15279 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15280 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15281 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15282 }
15283 else
15284 {
15285 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15286 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15287 }
15288 return ret;
15289 }
15290
15291 /* The following routine converts the RTL for a variable or parameter
15292 (resident in memory) into an equivalent Dwarf representation of a
15293 mechanism for getting the address of that same variable onto the top of a
15294 hypothetical "address evaluation" stack.
15295
15296 When creating memory location descriptors, we are effectively transforming
15297 the RTL for a memory-resident object into its Dwarf postfix expression
15298 equivalent. This routine recursively descends an RTL tree, turning
15299 it into Dwarf postfix code as it goes.
15300
15301 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15302
15303 MEM_MODE is the mode of the memory reference, needed to handle some
15304 autoincrement addressing modes.
15305
15306 Return 0 if we can't represent the location. */
15307
15308 dw_loc_descr_ref
15309 mem_loc_descriptor (rtx rtl, machine_mode mode,
15310 machine_mode mem_mode,
15311 enum var_init_status initialized)
15312 {
15313 dw_loc_descr_ref mem_loc_result = NULL;
15314 enum dwarf_location_atom op;
15315 dw_loc_descr_ref op0, op1;
15316 rtx inner = NULL_RTX;
15317 poly_int64 offset;
15318
15319 if (mode == VOIDmode)
15320 mode = GET_MODE (rtl);
15321
15322 /* Note that for a dynamically sized array, the location we will generate a
15323 description of here will be the lowest numbered location which is
15324 actually within the array. That's *not* necessarily the same as the
15325 zeroth element of the array. */
15326
15327 rtl = targetm.delegitimize_address (rtl);
15328
15329 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15330 return NULL;
15331
15332 scalar_int_mode int_mode, inner_mode, op1_mode;
15333 switch (GET_CODE (rtl))
15334 {
15335 case POST_INC:
15336 case POST_DEC:
15337 case POST_MODIFY:
15338 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15339
15340 case SUBREG:
15341 /* The case of a subreg may arise when we have a local (register)
15342 variable or a formal (register) parameter which doesn't quite fill
15343 up an entire register. For now, just assume that it is
15344 legitimate to make the Dwarf info refer to the whole register which
15345 contains the given subreg. */
15346 if (!subreg_lowpart_p (rtl))
15347 break;
15348 inner = SUBREG_REG (rtl);
15349 /* FALLTHRU */
15350 case TRUNCATE:
15351 if (inner == NULL_RTX)
15352 inner = XEXP (rtl, 0);
15353 if (is_a <scalar_int_mode> (mode, &int_mode)
15354 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15355 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15356 #ifdef POINTERS_EXTEND_UNSIGNED
15357 || (int_mode == Pmode && mem_mode != VOIDmode)
15358 #endif
15359 )
15360 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15361 {
15362 mem_loc_result = mem_loc_descriptor (inner,
15363 inner_mode,
15364 mem_mode, initialized);
15365 break;
15366 }
15367 if (dwarf_strict && dwarf_version < 5)
15368 break;
15369 if (is_a <scalar_int_mode> (mode, &int_mode)
15370 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15371 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15372 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15373 {
15374 dw_die_ref type_die;
15375 dw_loc_descr_ref cvt;
15376
15377 mem_loc_result = mem_loc_descriptor (inner,
15378 GET_MODE (inner),
15379 mem_mode, initialized);
15380 if (mem_loc_result == NULL)
15381 break;
15382 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15383 if (type_die == NULL)
15384 {
15385 mem_loc_result = NULL;
15386 break;
15387 }
15388 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15389 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15390 else
15391 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15392 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15393 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15394 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15395 add_loc_descr (&mem_loc_result, cvt);
15396 if (is_a <scalar_int_mode> (mode, &int_mode)
15397 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15398 {
15399 /* Convert it to untyped afterwards. */
15400 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15401 add_loc_descr (&mem_loc_result, cvt);
15402 }
15403 }
15404 break;
15405
15406 case REG:
15407 if (!is_a <scalar_int_mode> (mode, &int_mode)
15408 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15409 && rtl != arg_pointer_rtx
15410 && rtl != frame_pointer_rtx
15411 #ifdef POINTERS_EXTEND_UNSIGNED
15412 && (int_mode != Pmode || mem_mode == VOIDmode)
15413 #endif
15414 ))
15415 {
15416 dw_die_ref type_die;
15417 unsigned int dbx_regnum;
15418
15419 if (dwarf_strict && dwarf_version < 5)
15420 break;
15421 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15422 break;
15423 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15424 if (type_die == NULL)
15425 break;
15426
15427 dbx_regnum = dbx_reg_number (rtl);
15428 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15429 break;
15430 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15431 dbx_regnum, 0);
15432 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15433 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15434 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15435 break;
15436 }
15437 /* Whenever a register number forms a part of the description of the
15438 method for calculating the (dynamic) address of a memory resident
15439 object, DWARF rules require the register number be referred to as
15440 a "base register". This distinction is not based in any way upon
15441 what category of register the hardware believes the given register
15442 belongs to. This is strictly DWARF terminology we're dealing with
15443 here. Note that in cases where the location of a memory-resident
15444 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15445 OP_CONST (0)) the actual DWARF location descriptor that we generate
15446 may just be OP_BASEREG (basereg). This may look deceptively like
15447 the object in question was allocated to a register (rather than in
15448 memory) so DWARF consumers need to be aware of the subtle
15449 distinction between OP_REG and OP_BASEREG. */
15450 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15451 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15452 else if (stack_realign_drap
15453 && crtl->drap_reg
15454 && crtl->args.internal_arg_pointer == rtl
15455 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15456 {
15457 /* If RTL is internal_arg_pointer, which has been optimized
15458 out, use DRAP instead. */
15459 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15460 VAR_INIT_STATUS_INITIALIZED);
15461 }
15462 break;
15463
15464 case SIGN_EXTEND:
15465 case ZERO_EXTEND:
15466 if (!is_a <scalar_int_mode> (mode, &int_mode)
15467 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15468 break;
15469 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15470 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15471 if (op0 == 0)
15472 break;
15473 else if (GET_CODE (rtl) == ZERO_EXTEND
15474 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15475 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15476 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15477 to expand zero extend as two shifts instead of
15478 masking. */
15479 && GET_MODE_SIZE (inner_mode) <= 4)
15480 {
15481 mem_loc_result = op0;
15482 add_loc_descr (&mem_loc_result,
15483 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15484 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15485 }
15486 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15487 {
15488 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15489 shift *= BITS_PER_UNIT;
15490 if (GET_CODE (rtl) == SIGN_EXTEND)
15491 op = DW_OP_shra;
15492 else
15493 op = DW_OP_shr;
15494 mem_loc_result = op0;
15495 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15496 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15497 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15498 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15499 }
15500 else if (!dwarf_strict || dwarf_version >= 5)
15501 {
15502 dw_die_ref type_die1, type_die2;
15503 dw_loc_descr_ref cvt;
15504
15505 type_die1 = base_type_for_mode (inner_mode,
15506 GET_CODE (rtl) == ZERO_EXTEND);
15507 if (type_die1 == NULL)
15508 break;
15509 type_die2 = base_type_for_mode (int_mode, 1);
15510 if (type_die2 == NULL)
15511 break;
15512 mem_loc_result = op0;
15513 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15514 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15515 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15516 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15517 add_loc_descr (&mem_loc_result, cvt);
15518 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15519 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15520 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15521 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15522 add_loc_descr (&mem_loc_result, cvt);
15523 }
15524 break;
15525
15526 case MEM:
15527 {
15528 rtx new_rtl = avoid_constant_pool_reference (rtl);
15529 if (new_rtl != rtl)
15530 {
15531 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15532 initialized);
15533 if (mem_loc_result != NULL)
15534 return mem_loc_result;
15535 }
15536 }
15537 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15538 get_address_mode (rtl), mode,
15539 VAR_INIT_STATUS_INITIALIZED);
15540 if (mem_loc_result == NULL)
15541 mem_loc_result = tls_mem_loc_descriptor (rtl);
15542 if (mem_loc_result != NULL)
15543 {
15544 if (!is_a <scalar_int_mode> (mode, &int_mode)
15545 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15546 {
15547 dw_die_ref type_die;
15548 dw_loc_descr_ref deref;
15549 HOST_WIDE_INT size;
15550
15551 if (dwarf_strict && dwarf_version < 5)
15552 return NULL;
15553 if (!GET_MODE_SIZE (mode).is_constant (&size))
15554 return NULL;
15555 type_die
15556 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15557 if (type_die == NULL)
15558 return NULL;
15559 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15560 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15561 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15562 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15563 add_loc_descr (&mem_loc_result, deref);
15564 }
15565 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15566 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15567 else
15568 add_loc_descr (&mem_loc_result,
15569 new_loc_descr (DW_OP_deref_size,
15570 GET_MODE_SIZE (int_mode), 0));
15571 }
15572 break;
15573
15574 case LO_SUM:
15575 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15576
15577 case LABEL_REF:
15578 /* Some ports can transform a symbol ref into a label ref, because
15579 the symbol ref is too far away and has to be dumped into a constant
15580 pool. */
15581 case CONST:
15582 case SYMBOL_REF:
15583 if (!is_a <scalar_int_mode> (mode, &int_mode)
15584 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15585 #ifdef POINTERS_EXTEND_UNSIGNED
15586 && (int_mode != Pmode || mem_mode == VOIDmode)
15587 #endif
15588 ))
15589 break;
15590 if (GET_CODE (rtl) == SYMBOL_REF
15591 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15592 {
15593 dw_loc_descr_ref temp;
15594
15595 /* If this is not defined, we have no way to emit the data. */
15596 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15597 break;
15598
15599 temp = new_addr_loc_descr (rtl, dtprel_true);
15600
15601 /* We check for DWARF 5 here because gdb did not implement
15602 DW_OP_form_tls_address until after 7.12. */
15603 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15604 ? DW_OP_form_tls_address
15605 : DW_OP_GNU_push_tls_address),
15606 0, 0);
15607 add_loc_descr (&mem_loc_result, temp);
15608
15609 break;
15610 }
15611
15612 if (!const_ok_for_output (rtl))
15613 {
15614 if (GET_CODE (rtl) == CONST)
15615 switch (GET_CODE (XEXP (rtl, 0)))
15616 {
15617 case NOT:
15618 op = DW_OP_not;
15619 goto try_const_unop;
15620 case NEG:
15621 op = DW_OP_neg;
15622 goto try_const_unop;
15623 try_const_unop:
15624 rtx arg;
15625 arg = XEXP (XEXP (rtl, 0), 0);
15626 if (!CONSTANT_P (arg))
15627 arg = gen_rtx_CONST (int_mode, arg);
15628 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15629 initialized);
15630 if (op0)
15631 {
15632 mem_loc_result = op0;
15633 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15634 }
15635 break;
15636 default:
15637 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15638 mem_mode, initialized);
15639 break;
15640 }
15641 break;
15642 }
15643
15644 symref:
15645 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15646 vec_safe_push (used_rtx_array, rtl);
15647 break;
15648
15649 case CONCAT:
15650 case CONCATN:
15651 case VAR_LOCATION:
15652 case DEBUG_IMPLICIT_PTR:
15653 expansion_failed (NULL_TREE, rtl,
15654 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15655 return 0;
15656
15657 case ENTRY_VALUE:
15658 if (dwarf_strict && dwarf_version < 5)
15659 return NULL;
15660 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15661 {
15662 if (!is_a <scalar_int_mode> (mode, &int_mode)
15663 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15664 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15665 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15666 else
15667 {
15668 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15669 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15670 return NULL;
15671 op0 = one_reg_loc_descriptor (dbx_regnum,
15672 VAR_INIT_STATUS_INITIALIZED);
15673 }
15674 }
15675 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15676 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15677 {
15678 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15679 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15680 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15681 return NULL;
15682 }
15683 else
15684 gcc_unreachable ();
15685 if (op0 == NULL)
15686 return NULL;
15687 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15688 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15689 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15690 break;
15691
15692 case DEBUG_PARAMETER_REF:
15693 mem_loc_result = parameter_ref_descriptor (rtl);
15694 break;
15695
15696 case PRE_MODIFY:
15697 /* Extract the PLUS expression nested inside and fall into
15698 PLUS code below. */
15699 rtl = XEXP (rtl, 1);
15700 goto plus;
15701
15702 case PRE_INC:
15703 case PRE_DEC:
15704 /* Turn these into a PLUS expression and fall into the PLUS code
15705 below. */
15706 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15707 gen_int_mode (GET_CODE (rtl) == PRE_INC
15708 ? GET_MODE_UNIT_SIZE (mem_mode)
15709 : -GET_MODE_UNIT_SIZE (mem_mode),
15710 mode));
15711
15712 /* fall through */
15713
15714 case PLUS:
15715 plus:
15716 if (is_based_loc (rtl)
15717 && is_a <scalar_int_mode> (mode, &int_mode)
15718 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15719 || XEXP (rtl, 0) == arg_pointer_rtx
15720 || XEXP (rtl, 0) == frame_pointer_rtx))
15721 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15722 INTVAL (XEXP (rtl, 1)),
15723 VAR_INIT_STATUS_INITIALIZED);
15724 else
15725 {
15726 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15727 VAR_INIT_STATUS_INITIALIZED);
15728 if (mem_loc_result == 0)
15729 break;
15730
15731 if (CONST_INT_P (XEXP (rtl, 1))
15732 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15733 <= DWARF2_ADDR_SIZE))
15734 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15735 else
15736 {
15737 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15738 VAR_INIT_STATUS_INITIALIZED);
15739 if (op1 == 0)
15740 return NULL;
15741 add_loc_descr (&mem_loc_result, op1);
15742 add_loc_descr (&mem_loc_result,
15743 new_loc_descr (DW_OP_plus, 0, 0));
15744 }
15745 }
15746 break;
15747
15748 /* If a pseudo-reg is optimized away, it is possible for it to
15749 be replaced with a MEM containing a multiply or shift. */
15750 case MINUS:
15751 op = DW_OP_minus;
15752 goto do_binop;
15753
15754 case MULT:
15755 op = DW_OP_mul;
15756 goto do_binop;
15757
15758 case DIV:
15759 if ((!dwarf_strict || dwarf_version >= 5)
15760 && is_a <scalar_int_mode> (mode, &int_mode)
15761 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15762 {
15763 mem_loc_result = typed_binop (DW_OP_div, rtl,
15764 base_type_for_mode (mode, 0),
15765 int_mode, mem_mode);
15766 break;
15767 }
15768 op = DW_OP_div;
15769 goto do_binop;
15770
15771 case UMOD:
15772 op = DW_OP_mod;
15773 goto do_binop;
15774
15775 case ASHIFT:
15776 op = DW_OP_shl;
15777 goto do_shift;
15778
15779 case ASHIFTRT:
15780 op = DW_OP_shra;
15781 goto do_shift;
15782
15783 case LSHIFTRT:
15784 op = DW_OP_shr;
15785 goto do_shift;
15786
15787 do_shift:
15788 if (!is_a <scalar_int_mode> (mode, &int_mode))
15789 break;
15790 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15791 VAR_INIT_STATUS_INITIALIZED);
15792 {
15793 rtx rtlop1 = XEXP (rtl, 1);
15794 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15795 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15796 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15797 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15798 VAR_INIT_STATUS_INITIALIZED);
15799 }
15800
15801 if (op0 == 0 || op1 == 0)
15802 break;
15803
15804 mem_loc_result = op0;
15805 add_loc_descr (&mem_loc_result, op1);
15806 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15807 break;
15808
15809 case AND:
15810 op = DW_OP_and;
15811 goto do_binop;
15812
15813 case IOR:
15814 op = DW_OP_or;
15815 goto do_binop;
15816
15817 case XOR:
15818 op = DW_OP_xor;
15819 goto do_binop;
15820
15821 do_binop:
15822 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15823 VAR_INIT_STATUS_INITIALIZED);
15824 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15825 VAR_INIT_STATUS_INITIALIZED);
15826
15827 if (op0 == 0 || op1 == 0)
15828 break;
15829
15830 mem_loc_result = op0;
15831 add_loc_descr (&mem_loc_result, op1);
15832 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15833 break;
15834
15835 case MOD:
15836 if ((!dwarf_strict || dwarf_version >= 5)
15837 && is_a <scalar_int_mode> (mode, &int_mode)
15838 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15839 {
15840 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15841 base_type_for_mode (mode, 0),
15842 int_mode, mem_mode);
15843 break;
15844 }
15845
15846 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15847 VAR_INIT_STATUS_INITIALIZED);
15848 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15849 VAR_INIT_STATUS_INITIALIZED);
15850
15851 if (op0 == 0 || op1 == 0)
15852 break;
15853
15854 mem_loc_result = op0;
15855 add_loc_descr (&mem_loc_result, op1);
15856 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15857 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15858 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15859 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15860 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15861 break;
15862
15863 case UDIV:
15864 if ((!dwarf_strict || dwarf_version >= 5)
15865 && is_a <scalar_int_mode> (mode, &int_mode))
15866 {
15867 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15868 {
15869 op = DW_OP_div;
15870 goto do_binop;
15871 }
15872 mem_loc_result = typed_binop (DW_OP_div, rtl,
15873 base_type_for_mode (int_mode, 1),
15874 int_mode, mem_mode);
15875 }
15876 break;
15877
15878 case NOT:
15879 op = DW_OP_not;
15880 goto do_unop;
15881
15882 case ABS:
15883 op = DW_OP_abs;
15884 goto do_unop;
15885
15886 case NEG:
15887 op = DW_OP_neg;
15888 goto do_unop;
15889
15890 do_unop:
15891 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15892 VAR_INIT_STATUS_INITIALIZED);
15893
15894 if (op0 == 0)
15895 break;
15896
15897 mem_loc_result = op0;
15898 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15899 break;
15900
15901 case CONST_INT:
15902 if (!is_a <scalar_int_mode> (mode, &int_mode)
15903 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15904 #ifdef POINTERS_EXTEND_UNSIGNED
15905 || (int_mode == Pmode
15906 && mem_mode != VOIDmode
15907 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15908 #endif
15909 )
15910 {
15911 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15912 break;
15913 }
15914 if ((!dwarf_strict || dwarf_version >= 5)
15915 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15916 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15917 {
15918 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15919 scalar_int_mode amode;
15920 if (type_die == NULL)
15921 return NULL;
15922 if (INTVAL (rtl) >= 0
15923 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15924 .exists (&amode))
15925 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15926 /* const DW_OP_convert <XXX> vs.
15927 DW_OP_const_type <XXX, 1, const>. */
15928 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15929 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15930 {
15931 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15932 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15933 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15934 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15935 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15936 add_loc_descr (&mem_loc_result, op0);
15937 return mem_loc_result;
15938 }
15939 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15940 INTVAL (rtl));
15941 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15942 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15943 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15944 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15945 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15946 else
15947 {
15948 mem_loc_result->dw_loc_oprnd2.val_class
15949 = dw_val_class_const_double;
15950 mem_loc_result->dw_loc_oprnd2.v.val_double
15951 = double_int::from_shwi (INTVAL (rtl));
15952 }
15953 }
15954 break;
15955
15956 case CONST_DOUBLE:
15957 if (!dwarf_strict || dwarf_version >= 5)
15958 {
15959 dw_die_ref type_die;
15960
15961 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15962 CONST_DOUBLE rtx could represent either a large integer
15963 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15964 the value is always a floating point constant.
15965
15966 When it is an integer, a CONST_DOUBLE is used whenever
15967 the constant requires 2 HWIs to be adequately represented.
15968 We output CONST_DOUBLEs as blocks. */
15969 if (mode == VOIDmode
15970 || (GET_MODE (rtl) == VOIDmode
15971 && maybe_ne (GET_MODE_BITSIZE (mode),
15972 HOST_BITS_PER_DOUBLE_INT)))
15973 break;
15974 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15975 if (type_die == NULL)
15976 return NULL;
15977 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15978 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15979 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15980 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15981 #if TARGET_SUPPORTS_WIDE_INT == 0
15982 if (!SCALAR_FLOAT_MODE_P (mode))
15983 {
15984 mem_loc_result->dw_loc_oprnd2.val_class
15985 = dw_val_class_const_double;
15986 mem_loc_result->dw_loc_oprnd2.v.val_double
15987 = rtx_to_double_int (rtl);
15988 }
15989 else
15990 #endif
15991 {
15992 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15993 unsigned int length = GET_MODE_SIZE (float_mode);
15994 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15995
15996 insert_float (rtl, array);
15997 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15998 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15999 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16000 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16001 }
16002 }
16003 break;
16004
16005 case CONST_WIDE_INT:
16006 if (!dwarf_strict || dwarf_version >= 5)
16007 {
16008 dw_die_ref type_die;
16009
16010 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16011 if (type_die == NULL)
16012 return NULL;
16013 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16014 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16015 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16016 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16017 mem_loc_result->dw_loc_oprnd2.val_class
16018 = dw_val_class_wide_int;
16019 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16020 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16021 }
16022 break;
16023
16024 case CONST_POLY_INT:
16025 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16026 break;
16027
16028 case EQ:
16029 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16030 break;
16031
16032 case GE:
16033 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16034 break;
16035
16036 case GT:
16037 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16038 break;
16039
16040 case LE:
16041 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16042 break;
16043
16044 case LT:
16045 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16046 break;
16047
16048 case NE:
16049 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16050 break;
16051
16052 case GEU:
16053 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16054 break;
16055
16056 case GTU:
16057 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16058 break;
16059
16060 case LEU:
16061 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16062 break;
16063
16064 case LTU:
16065 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16066 break;
16067
16068 case UMIN:
16069 case UMAX:
16070 if (!SCALAR_INT_MODE_P (mode))
16071 break;
16072 /* FALLTHRU */
16073 case SMIN:
16074 case SMAX:
16075 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16076 break;
16077
16078 case ZERO_EXTRACT:
16079 case SIGN_EXTRACT:
16080 if (CONST_INT_P (XEXP (rtl, 1))
16081 && CONST_INT_P (XEXP (rtl, 2))
16082 && is_a <scalar_int_mode> (mode, &int_mode)
16083 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16084 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16085 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16086 && ((unsigned) INTVAL (XEXP (rtl, 1))
16087 + (unsigned) INTVAL (XEXP (rtl, 2))
16088 <= GET_MODE_BITSIZE (int_mode)))
16089 {
16090 int shift, size;
16091 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16092 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16093 if (op0 == 0)
16094 break;
16095 if (GET_CODE (rtl) == SIGN_EXTRACT)
16096 op = DW_OP_shra;
16097 else
16098 op = DW_OP_shr;
16099 mem_loc_result = op0;
16100 size = INTVAL (XEXP (rtl, 1));
16101 shift = INTVAL (XEXP (rtl, 2));
16102 if (BITS_BIG_ENDIAN)
16103 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16104 if (shift + size != (int) DWARF2_ADDR_SIZE)
16105 {
16106 add_loc_descr (&mem_loc_result,
16107 int_loc_descriptor (DWARF2_ADDR_SIZE
16108 - shift - size));
16109 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16110 }
16111 if (size != (int) DWARF2_ADDR_SIZE)
16112 {
16113 add_loc_descr (&mem_loc_result,
16114 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16115 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16116 }
16117 }
16118 break;
16119
16120 case IF_THEN_ELSE:
16121 {
16122 dw_loc_descr_ref op2, bra_node, drop_node;
16123 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16124 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16125 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16126 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16127 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16128 VAR_INIT_STATUS_INITIALIZED);
16129 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16130 VAR_INIT_STATUS_INITIALIZED);
16131 if (op0 == NULL || op1 == NULL || op2 == NULL)
16132 break;
16133
16134 mem_loc_result = op1;
16135 add_loc_descr (&mem_loc_result, op2);
16136 add_loc_descr (&mem_loc_result, op0);
16137 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16138 add_loc_descr (&mem_loc_result, bra_node);
16139 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16140 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16141 add_loc_descr (&mem_loc_result, drop_node);
16142 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16143 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16144 }
16145 break;
16146
16147 case FLOAT_EXTEND:
16148 case FLOAT_TRUNCATE:
16149 case FLOAT:
16150 case UNSIGNED_FLOAT:
16151 case FIX:
16152 case UNSIGNED_FIX:
16153 if (!dwarf_strict || dwarf_version >= 5)
16154 {
16155 dw_die_ref type_die;
16156 dw_loc_descr_ref cvt;
16157
16158 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16159 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16160 if (op0 == NULL)
16161 break;
16162 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16163 && (GET_CODE (rtl) == FLOAT
16164 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16165 {
16166 type_die = base_type_for_mode (int_mode,
16167 GET_CODE (rtl) == UNSIGNED_FLOAT);
16168 if (type_die == NULL)
16169 break;
16170 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16171 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16172 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16173 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16174 add_loc_descr (&op0, cvt);
16175 }
16176 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16177 if (type_die == NULL)
16178 break;
16179 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16180 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16181 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16182 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16183 add_loc_descr (&op0, cvt);
16184 if (is_a <scalar_int_mode> (mode, &int_mode)
16185 && (GET_CODE (rtl) == FIX
16186 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16187 {
16188 op0 = convert_descriptor_to_mode (int_mode, op0);
16189 if (op0 == NULL)
16190 break;
16191 }
16192 mem_loc_result = op0;
16193 }
16194 break;
16195
16196 case CLZ:
16197 case CTZ:
16198 case FFS:
16199 if (is_a <scalar_int_mode> (mode, &int_mode))
16200 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16201 break;
16202
16203 case POPCOUNT:
16204 case PARITY:
16205 if (is_a <scalar_int_mode> (mode, &int_mode))
16206 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16207 break;
16208
16209 case BSWAP:
16210 if (is_a <scalar_int_mode> (mode, &int_mode))
16211 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16212 break;
16213
16214 case ROTATE:
16215 case ROTATERT:
16216 if (is_a <scalar_int_mode> (mode, &int_mode))
16217 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16218 break;
16219
16220 case COMPARE:
16221 /* In theory, we could implement the above. */
16222 /* DWARF cannot represent the unsigned compare operations
16223 natively. */
16224 case SS_MULT:
16225 case US_MULT:
16226 case SS_DIV:
16227 case US_DIV:
16228 case SS_PLUS:
16229 case US_PLUS:
16230 case SS_MINUS:
16231 case US_MINUS:
16232 case SS_NEG:
16233 case US_NEG:
16234 case SS_ABS:
16235 case SS_ASHIFT:
16236 case US_ASHIFT:
16237 case SS_TRUNCATE:
16238 case US_TRUNCATE:
16239 case UNORDERED:
16240 case ORDERED:
16241 case UNEQ:
16242 case UNGE:
16243 case UNGT:
16244 case UNLE:
16245 case UNLT:
16246 case LTGT:
16247 case FRACT_CONVERT:
16248 case UNSIGNED_FRACT_CONVERT:
16249 case SAT_FRACT:
16250 case UNSIGNED_SAT_FRACT:
16251 case SQRT:
16252 case ASM_OPERANDS:
16253 case VEC_MERGE:
16254 case VEC_SELECT:
16255 case VEC_CONCAT:
16256 case VEC_DUPLICATE:
16257 case VEC_SERIES:
16258 case UNSPEC:
16259 case HIGH:
16260 case FMA:
16261 case STRICT_LOW_PART:
16262 case CONST_VECTOR:
16263 case CONST_FIXED:
16264 case CLRSB:
16265 case CLOBBER:
16266 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16267 can't express it in the debug info. This can happen e.g. with some
16268 TLS UNSPECs. */
16269 break;
16270
16271 case CONST_STRING:
16272 resolve_one_addr (&rtl);
16273 goto symref;
16274
16275 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16276 the expression. An UNSPEC rtx represents a raw DWARF operation,
16277 new_loc_descr is called for it to build the operation directly.
16278 Otherwise mem_loc_descriptor is called recursively. */
16279 case PARALLEL:
16280 {
16281 int index = 0;
16282 dw_loc_descr_ref exp_result = NULL;
16283
16284 for (; index < XVECLEN (rtl, 0); index++)
16285 {
16286 rtx elem = XVECEXP (rtl, 0, index);
16287 if (GET_CODE (elem) == UNSPEC)
16288 {
16289 /* Each DWARF operation UNSPEC contain two operands, if
16290 one operand is not used for the operation, const0_rtx is
16291 passed. */
16292 gcc_assert (XVECLEN (elem, 0) == 2);
16293
16294 HOST_WIDE_INT dw_op = XINT (elem, 1);
16295 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16296 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16297 exp_result
16298 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16299 oprnd2);
16300 }
16301 else
16302 exp_result
16303 = mem_loc_descriptor (elem, mode, mem_mode,
16304 VAR_INIT_STATUS_INITIALIZED);
16305
16306 if (!mem_loc_result)
16307 mem_loc_result = exp_result;
16308 else
16309 add_loc_descr (&mem_loc_result, exp_result);
16310 }
16311
16312 break;
16313 }
16314
16315 default:
16316 if (flag_checking)
16317 {
16318 print_rtl (stderr, rtl);
16319 gcc_unreachable ();
16320 }
16321 break;
16322 }
16323
16324 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16325 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16326
16327 return mem_loc_result;
16328 }
16329
16330 /* Return a descriptor that describes the concatenation of two locations.
16331 This is typically a complex variable. */
16332
16333 static dw_loc_descr_ref
16334 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16335 {
16336 /* At present we only track constant-sized pieces. */
16337 unsigned int size0, size1;
16338 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16339 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16340 return 0;
16341
16342 dw_loc_descr_ref cc_loc_result = NULL;
16343 dw_loc_descr_ref x0_ref
16344 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16345 dw_loc_descr_ref x1_ref
16346 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16347
16348 if (x0_ref == 0 || x1_ref == 0)
16349 return 0;
16350
16351 cc_loc_result = x0_ref;
16352 add_loc_descr_op_piece (&cc_loc_result, size0);
16353
16354 add_loc_descr (&cc_loc_result, x1_ref);
16355 add_loc_descr_op_piece (&cc_loc_result, size1);
16356
16357 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16358 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16359
16360 return cc_loc_result;
16361 }
16362
16363 /* Return a descriptor that describes the concatenation of N
16364 locations. */
16365
16366 static dw_loc_descr_ref
16367 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16368 {
16369 unsigned int i;
16370 dw_loc_descr_ref cc_loc_result = NULL;
16371 unsigned int n = XVECLEN (concatn, 0);
16372 unsigned int size;
16373
16374 for (i = 0; i < n; ++i)
16375 {
16376 dw_loc_descr_ref ref;
16377 rtx x = XVECEXP (concatn, 0, i);
16378
16379 /* At present we only track constant-sized pieces. */
16380 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16381 return NULL;
16382
16383 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16384 if (ref == NULL)
16385 return NULL;
16386
16387 add_loc_descr (&cc_loc_result, ref);
16388 add_loc_descr_op_piece (&cc_loc_result, size);
16389 }
16390
16391 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16392 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16393
16394 return cc_loc_result;
16395 }
16396
16397 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16398 for DEBUG_IMPLICIT_PTR RTL. */
16399
16400 static dw_loc_descr_ref
16401 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16402 {
16403 dw_loc_descr_ref ret;
16404 dw_die_ref ref;
16405
16406 if (dwarf_strict && dwarf_version < 5)
16407 return NULL;
16408 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16409 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16410 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16411 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16412 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16413 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16414 if (ref)
16415 {
16416 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16417 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16418 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16419 }
16420 else
16421 {
16422 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16423 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16424 }
16425 return ret;
16426 }
16427
16428 /* Output a proper Dwarf location descriptor for a variable or parameter
16429 which is either allocated in a register or in a memory location. For a
16430 register, we just generate an OP_REG and the register number. For a
16431 memory location we provide a Dwarf postfix expression describing how to
16432 generate the (dynamic) address of the object onto the address stack.
16433
16434 MODE is mode of the decl if this loc_descriptor is going to be used in
16435 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16436 allowed, VOIDmode otherwise.
16437
16438 If we don't know how to describe it, return 0. */
16439
16440 static dw_loc_descr_ref
16441 loc_descriptor (rtx rtl, machine_mode mode,
16442 enum var_init_status initialized)
16443 {
16444 dw_loc_descr_ref loc_result = NULL;
16445 scalar_int_mode int_mode;
16446
16447 switch (GET_CODE (rtl))
16448 {
16449 case SUBREG:
16450 /* The case of a subreg may arise when we have a local (register)
16451 variable or a formal (register) parameter which doesn't quite fill
16452 up an entire register. For now, just assume that it is
16453 legitimate to make the Dwarf info refer to the whole register which
16454 contains the given subreg. */
16455 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16456 loc_result = loc_descriptor (SUBREG_REG (rtl),
16457 GET_MODE (SUBREG_REG (rtl)), initialized);
16458 else
16459 goto do_default;
16460 break;
16461
16462 case REG:
16463 loc_result = reg_loc_descriptor (rtl, initialized);
16464 break;
16465
16466 case MEM:
16467 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16468 GET_MODE (rtl), initialized);
16469 if (loc_result == NULL)
16470 loc_result = tls_mem_loc_descriptor (rtl);
16471 if (loc_result == NULL)
16472 {
16473 rtx new_rtl = avoid_constant_pool_reference (rtl);
16474 if (new_rtl != rtl)
16475 loc_result = loc_descriptor (new_rtl, mode, initialized);
16476 }
16477 break;
16478
16479 case CONCAT:
16480 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16481 initialized);
16482 break;
16483
16484 case CONCATN:
16485 loc_result = concatn_loc_descriptor (rtl, initialized);
16486 break;
16487
16488 case VAR_LOCATION:
16489 /* Single part. */
16490 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16491 {
16492 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16493 if (GET_CODE (loc) == EXPR_LIST)
16494 loc = XEXP (loc, 0);
16495 loc_result = loc_descriptor (loc, mode, initialized);
16496 break;
16497 }
16498
16499 rtl = XEXP (rtl, 1);
16500 /* FALLTHRU */
16501
16502 case PARALLEL:
16503 {
16504 rtvec par_elems = XVEC (rtl, 0);
16505 int num_elem = GET_NUM_ELEM (par_elems);
16506 machine_mode mode;
16507 int i, size;
16508
16509 /* Create the first one, so we have something to add to. */
16510 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16511 VOIDmode, initialized);
16512 if (loc_result == NULL)
16513 return NULL;
16514 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16515 /* At present we only track constant-sized pieces. */
16516 if (!GET_MODE_SIZE (mode).is_constant (&size))
16517 return NULL;
16518 add_loc_descr_op_piece (&loc_result, size);
16519 for (i = 1; i < num_elem; i++)
16520 {
16521 dw_loc_descr_ref temp;
16522
16523 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16524 VOIDmode, initialized);
16525 if (temp == NULL)
16526 return NULL;
16527 add_loc_descr (&loc_result, temp);
16528 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16529 /* At present we only track constant-sized pieces. */
16530 if (!GET_MODE_SIZE (mode).is_constant (&size))
16531 return NULL;
16532 add_loc_descr_op_piece (&loc_result, size);
16533 }
16534 }
16535 break;
16536
16537 case CONST_INT:
16538 if (mode != VOIDmode && mode != BLKmode)
16539 {
16540 int_mode = as_a <scalar_int_mode> (mode);
16541 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16542 INTVAL (rtl));
16543 }
16544 break;
16545
16546 case CONST_DOUBLE:
16547 if (mode == VOIDmode)
16548 mode = GET_MODE (rtl);
16549
16550 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16551 {
16552 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16553
16554 /* Note that a CONST_DOUBLE rtx could represent either an integer
16555 or a floating-point constant. A CONST_DOUBLE is used whenever
16556 the constant requires more than one word in order to be
16557 adequately represented. We output CONST_DOUBLEs as blocks. */
16558 scalar_mode smode = as_a <scalar_mode> (mode);
16559 loc_result = new_loc_descr (DW_OP_implicit_value,
16560 GET_MODE_SIZE (smode), 0);
16561 #if TARGET_SUPPORTS_WIDE_INT == 0
16562 if (!SCALAR_FLOAT_MODE_P (smode))
16563 {
16564 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16565 loc_result->dw_loc_oprnd2.v.val_double
16566 = rtx_to_double_int (rtl);
16567 }
16568 else
16569 #endif
16570 {
16571 unsigned int length = GET_MODE_SIZE (smode);
16572 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16573
16574 insert_float (rtl, array);
16575 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16576 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16577 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16578 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16579 }
16580 }
16581 break;
16582
16583 case CONST_WIDE_INT:
16584 if (mode == VOIDmode)
16585 mode = GET_MODE (rtl);
16586
16587 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16588 {
16589 int_mode = as_a <scalar_int_mode> (mode);
16590 loc_result = new_loc_descr (DW_OP_implicit_value,
16591 GET_MODE_SIZE (int_mode), 0);
16592 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16593 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16594 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16595 }
16596 break;
16597
16598 case CONST_VECTOR:
16599 if (mode == VOIDmode)
16600 mode = GET_MODE (rtl);
16601
16602 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16603 {
16604 unsigned int length;
16605 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16606 return NULL;
16607
16608 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16609 unsigned char *array
16610 = ggc_vec_alloc<unsigned char> (length * elt_size);
16611 unsigned int i;
16612 unsigned char *p;
16613 machine_mode imode = GET_MODE_INNER (mode);
16614
16615 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16616 switch (GET_MODE_CLASS (mode))
16617 {
16618 case MODE_VECTOR_INT:
16619 for (i = 0, p = array; i < length; i++, p += elt_size)
16620 {
16621 rtx elt = CONST_VECTOR_ELT (rtl, i);
16622 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16623 }
16624 break;
16625
16626 case MODE_VECTOR_FLOAT:
16627 for (i = 0, p = array; i < length; i++, p += elt_size)
16628 {
16629 rtx elt = CONST_VECTOR_ELT (rtl, i);
16630 insert_float (elt, p);
16631 }
16632 break;
16633
16634 default:
16635 gcc_unreachable ();
16636 }
16637
16638 loc_result = new_loc_descr (DW_OP_implicit_value,
16639 length * elt_size, 0);
16640 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16641 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16642 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16643 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16644 }
16645 break;
16646
16647 case CONST:
16648 if (mode == VOIDmode
16649 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16650 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16651 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16652 {
16653 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16654 break;
16655 }
16656 /* FALLTHROUGH */
16657 case SYMBOL_REF:
16658 if (!const_ok_for_output (rtl))
16659 break;
16660 /* FALLTHROUGH */
16661 case LABEL_REF:
16662 if (is_a <scalar_int_mode> (mode, &int_mode)
16663 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16664 && (dwarf_version >= 4 || !dwarf_strict))
16665 {
16666 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16667 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16668 vec_safe_push (used_rtx_array, rtl);
16669 }
16670 break;
16671
16672 case DEBUG_IMPLICIT_PTR:
16673 loc_result = implicit_ptr_descriptor (rtl, 0);
16674 break;
16675
16676 case PLUS:
16677 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16678 && CONST_INT_P (XEXP (rtl, 1)))
16679 {
16680 loc_result
16681 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16682 break;
16683 }
16684 /* FALLTHRU */
16685 do_default:
16686 default:
16687 if ((is_a <scalar_int_mode> (mode, &int_mode)
16688 && GET_MODE (rtl) == int_mode
16689 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16690 && dwarf_version >= 4)
16691 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16692 {
16693 /* Value expression. */
16694 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16695 if (loc_result)
16696 add_loc_descr (&loc_result,
16697 new_loc_descr (DW_OP_stack_value, 0, 0));
16698 }
16699 break;
16700 }
16701
16702 return loc_result;
16703 }
16704
16705 /* We need to figure out what section we should use as the base for the
16706 address ranges where a given location is valid.
16707 1. If this particular DECL has a section associated with it, use that.
16708 2. If this function has a section associated with it, use that.
16709 3. Otherwise, use the text section.
16710 XXX: If you split a variable across multiple sections, we won't notice. */
16711
16712 static const char *
16713 secname_for_decl (const_tree decl)
16714 {
16715 const char *secname;
16716
16717 if (VAR_OR_FUNCTION_DECL_P (decl)
16718 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16719 && DECL_SECTION_NAME (decl))
16720 secname = DECL_SECTION_NAME (decl);
16721 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16722 secname = DECL_SECTION_NAME (current_function_decl);
16723 else if (cfun && in_cold_section_p)
16724 secname = crtl->subsections.cold_section_label;
16725 else
16726 secname = text_section_label;
16727
16728 return secname;
16729 }
16730
16731 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16732
16733 static bool
16734 decl_by_reference_p (tree decl)
16735 {
16736 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16737 || VAR_P (decl))
16738 && DECL_BY_REFERENCE (decl));
16739 }
16740
16741 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16742 for VARLOC. */
16743
16744 static dw_loc_descr_ref
16745 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16746 enum var_init_status initialized)
16747 {
16748 int have_address = 0;
16749 dw_loc_descr_ref descr;
16750 machine_mode mode;
16751
16752 if (want_address != 2)
16753 {
16754 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16755 /* Single part. */
16756 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16757 {
16758 varloc = PAT_VAR_LOCATION_LOC (varloc);
16759 if (GET_CODE (varloc) == EXPR_LIST)
16760 varloc = XEXP (varloc, 0);
16761 mode = GET_MODE (varloc);
16762 if (MEM_P (varloc))
16763 {
16764 rtx addr = XEXP (varloc, 0);
16765 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16766 mode, initialized);
16767 if (descr)
16768 have_address = 1;
16769 else
16770 {
16771 rtx x = avoid_constant_pool_reference (varloc);
16772 if (x != varloc)
16773 descr = mem_loc_descriptor (x, mode, VOIDmode,
16774 initialized);
16775 }
16776 }
16777 else
16778 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16779 }
16780 else
16781 return 0;
16782 }
16783 else
16784 {
16785 if (GET_CODE (varloc) == VAR_LOCATION)
16786 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16787 else
16788 mode = DECL_MODE (loc);
16789 descr = loc_descriptor (varloc, mode, initialized);
16790 have_address = 1;
16791 }
16792
16793 if (!descr)
16794 return 0;
16795
16796 if (want_address == 2 && !have_address
16797 && (dwarf_version >= 4 || !dwarf_strict))
16798 {
16799 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16800 {
16801 expansion_failed (loc, NULL_RTX,
16802 "DWARF address size mismatch");
16803 return 0;
16804 }
16805 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16806 have_address = 1;
16807 }
16808 /* Show if we can't fill the request for an address. */
16809 if (want_address && !have_address)
16810 {
16811 expansion_failed (loc, NULL_RTX,
16812 "Want address and only have value");
16813 return 0;
16814 }
16815
16816 /* If we've got an address and don't want one, dereference. */
16817 if (!want_address && have_address)
16818 {
16819 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16820 enum dwarf_location_atom op;
16821
16822 if (size > DWARF2_ADDR_SIZE || size == -1)
16823 {
16824 expansion_failed (loc, NULL_RTX,
16825 "DWARF address size mismatch");
16826 return 0;
16827 }
16828 else if (size == DWARF2_ADDR_SIZE)
16829 op = DW_OP_deref;
16830 else
16831 op = DW_OP_deref_size;
16832
16833 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16834 }
16835
16836 return descr;
16837 }
16838
16839 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16840 if it is not possible. */
16841
16842 static dw_loc_descr_ref
16843 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16844 {
16845 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16846 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16847 else if (dwarf_version >= 3 || !dwarf_strict)
16848 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16849 else
16850 return NULL;
16851 }
16852
16853 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16854 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16855
16856 static dw_loc_descr_ref
16857 dw_sra_loc_expr (tree decl, rtx loc)
16858 {
16859 rtx p;
16860 unsigned HOST_WIDE_INT padsize = 0;
16861 dw_loc_descr_ref descr, *descr_tail;
16862 unsigned HOST_WIDE_INT decl_size;
16863 rtx varloc;
16864 enum var_init_status initialized;
16865
16866 if (DECL_SIZE (decl) == NULL
16867 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16868 return NULL;
16869
16870 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16871 descr = NULL;
16872 descr_tail = &descr;
16873
16874 for (p = loc; p; p = XEXP (p, 1))
16875 {
16876 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16877 rtx loc_note = *decl_piece_varloc_ptr (p);
16878 dw_loc_descr_ref cur_descr;
16879 dw_loc_descr_ref *tail, last = NULL;
16880 unsigned HOST_WIDE_INT opsize = 0;
16881
16882 if (loc_note == NULL_RTX
16883 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16884 {
16885 padsize += bitsize;
16886 continue;
16887 }
16888 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16889 varloc = NOTE_VAR_LOCATION (loc_note);
16890 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16891 if (cur_descr == NULL)
16892 {
16893 padsize += bitsize;
16894 continue;
16895 }
16896
16897 /* Check that cur_descr either doesn't use
16898 DW_OP_*piece operations, or their sum is equal
16899 to bitsize. Otherwise we can't embed it. */
16900 for (tail = &cur_descr; *tail != NULL;
16901 tail = &(*tail)->dw_loc_next)
16902 if ((*tail)->dw_loc_opc == DW_OP_piece)
16903 {
16904 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16905 * BITS_PER_UNIT;
16906 last = *tail;
16907 }
16908 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16909 {
16910 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16911 last = *tail;
16912 }
16913
16914 if (last != NULL && opsize != bitsize)
16915 {
16916 padsize += bitsize;
16917 /* Discard the current piece of the descriptor and release any
16918 addr_table entries it uses. */
16919 remove_loc_list_addr_table_entries (cur_descr);
16920 continue;
16921 }
16922
16923 /* If there is a hole, add DW_OP_*piece after empty DWARF
16924 expression, which means that those bits are optimized out. */
16925 if (padsize)
16926 {
16927 if (padsize > decl_size)
16928 {
16929 remove_loc_list_addr_table_entries (cur_descr);
16930 goto discard_descr;
16931 }
16932 decl_size -= padsize;
16933 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16934 if (*descr_tail == NULL)
16935 {
16936 remove_loc_list_addr_table_entries (cur_descr);
16937 goto discard_descr;
16938 }
16939 descr_tail = &(*descr_tail)->dw_loc_next;
16940 padsize = 0;
16941 }
16942 *descr_tail = cur_descr;
16943 descr_tail = tail;
16944 if (bitsize > decl_size)
16945 goto discard_descr;
16946 decl_size -= bitsize;
16947 if (last == NULL)
16948 {
16949 HOST_WIDE_INT offset = 0;
16950 if (GET_CODE (varloc) == VAR_LOCATION
16951 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16952 {
16953 varloc = PAT_VAR_LOCATION_LOC (varloc);
16954 if (GET_CODE (varloc) == EXPR_LIST)
16955 varloc = XEXP (varloc, 0);
16956 }
16957 do
16958 {
16959 if (GET_CODE (varloc) == CONST
16960 || GET_CODE (varloc) == SIGN_EXTEND
16961 || GET_CODE (varloc) == ZERO_EXTEND)
16962 varloc = XEXP (varloc, 0);
16963 else if (GET_CODE (varloc) == SUBREG)
16964 varloc = SUBREG_REG (varloc);
16965 else
16966 break;
16967 }
16968 while (1);
16969 /* DW_OP_bit_size offset should be zero for register
16970 or implicit location descriptions and empty location
16971 descriptions, but for memory addresses needs big endian
16972 adjustment. */
16973 if (MEM_P (varloc))
16974 {
16975 unsigned HOST_WIDE_INT memsize;
16976 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16977 goto discard_descr;
16978 memsize *= BITS_PER_UNIT;
16979 if (memsize != bitsize)
16980 {
16981 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16982 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16983 goto discard_descr;
16984 if (memsize < bitsize)
16985 goto discard_descr;
16986 if (BITS_BIG_ENDIAN)
16987 offset = memsize - bitsize;
16988 }
16989 }
16990
16991 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16992 if (*descr_tail == NULL)
16993 goto discard_descr;
16994 descr_tail = &(*descr_tail)->dw_loc_next;
16995 }
16996 }
16997
16998 /* If there were any non-empty expressions, add padding till the end of
16999 the decl. */
17000 if (descr != NULL && decl_size != 0)
17001 {
17002 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17003 if (*descr_tail == NULL)
17004 goto discard_descr;
17005 }
17006 return descr;
17007
17008 discard_descr:
17009 /* Discard the descriptor and release any addr_table entries it uses. */
17010 remove_loc_list_addr_table_entries (descr);
17011 return NULL;
17012 }
17013
17014 /* Return the dwarf representation of the location list LOC_LIST of
17015 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17016 function. */
17017
17018 static dw_loc_list_ref
17019 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17020 {
17021 const char *endname, *secname;
17022 var_loc_view endview;
17023 rtx varloc;
17024 enum var_init_status initialized;
17025 struct var_loc_node *node;
17026 dw_loc_descr_ref descr;
17027 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17028 dw_loc_list_ref list = NULL;
17029 dw_loc_list_ref *listp = &list;
17030
17031 /* Now that we know what section we are using for a base,
17032 actually construct the list of locations.
17033 The first location information is what is passed to the
17034 function that creates the location list, and the remaining
17035 locations just get added on to that list.
17036 Note that we only know the start address for a location
17037 (IE location changes), so to build the range, we use
17038 the range [current location start, next location start].
17039 This means we have to special case the last node, and generate
17040 a range of [last location start, end of function label]. */
17041
17042 if (cfun && crtl->has_bb_partition)
17043 {
17044 bool save_in_cold_section_p = in_cold_section_p;
17045 in_cold_section_p = first_function_block_is_cold;
17046 if (loc_list->last_before_switch == NULL)
17047 in_cold_section_p = !in_cold_section_p;
17048 secname = secname_for_decl (decl);
17049 in_cold_section_p = save_in_cold_section_p;
17050 }
17051 else
17052 secname = secname_for_decl (decl);
17053
17054 for (node = loc_list->first; node; node = node->next)
17055 {
17056 bool range_across_switch = false;
17057 if (GET_CODE (node->loc) == EXPR_LIST
17058 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17059 {
17060 if (GET_CODE (node->loc) == EXPR_LIST)
17061 {
17062 descr = NULL;
17063 /* This requires DW_OP_{,bit_}piece, which is not usable
17064 inside DWARF expressions. */
17065 if (want_address == 2)
17066 descr = dw_sra_loc_expr (decl, node->loc);
17067 }
17068 else
17069 {
17070 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17071 varloc = NOTE_VAR_LOCATION (node->loc);
17072 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17073 }
17074 if (descr)
17075 {
17076 /* If section switch happens in between node->label
17077 and node->next->label (or end of function) and
17078 we can't emit it as a single entry list,
17079 emit two ranges, first one ending at the end
17080 of first partition and second one starting at the
17081 beginning of second partition. */
17082 if (node == loc_list->last_before_switch
17083 && (node != loc_list->first || loc_list->first->next)
17084 && current_function_decl)
17085 {
17086 endname = cfun->fde->dw_fde_end;
17087 endview = 0;
17088 range_across_switch = true;
17089 }
17090 /* The variable has a location between NODE->LABEL and
17091 NODE->NEXT->LABEL. */
17092 else if (node->next)
17093 endname = node->next->label, endview = node->next->view;
17094 /* If the variable has a location at the last label
17095 it keeps its location until the end of function. */
17096 else if (!current_function_decl)
17097 endname = text_end_label, endview = 0;
17098 else
17099 {
17100 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17101 current_function_funcdef_no);
17102 endname = ggc_strdup (label_id);
17103 endview = 0;
17104 }
17105
17106 *listp = new_loc_list (descr, node->label, node->view,
17107 endname, endview, secname);
17108 if (TREE_CODE (decl) == PARM_DECL
17109 && node == loc_list->first
17110 && NOTE_P (node->loc)
17111 && strcmp (node->label, endname) == 0)
17112 (*listp)->force = true;
17113 listp = &(*listp)->dw_loc_next;
17114 }
17115 }
17116
17117 if (cfun
17118 && crtl->has_bb_partition
17119 && node == loc_list->last_before_switch)
17120 {
17121 bool save_in_cold_section_p = in_cold_section_p;
17122 in_cold_section_p = !first_function_block_is_cold;
17123 secname = secname_for_decl (decl);
17124 in_cold_section_p = save_in_cold_section_p;
17125 }
17126
17127 if (range_across_switch)
17128 {
17129 if (GET_CODE (node->loc) == EXPR_LIST)
17130 descr = dw_sra_loc_expr (decl, node->loc);
17131 else
17132 {
17133 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17134 varloc = NOTE_VAR_LOCATION (node->loc);
17135 descr = dw_loc_list_1 (decl, varloc, want_address,
17136 initialized);
17137 }
17138 gcc_assert (descr);
17139 /* The variable has a location between NODE->LABEL and
17140 NODE->NEXT->LABEL. */
17141 if (node->next)
17142 endname = node->next->label, endview = node->next->view;
17143 else
17144 endname = cfun->fde->dw_fde_second_end, endview = 0;
17145 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17146 endname, endview, secname);
17147 listp = &(*listp)->dw_loc_next;
17148 }
17149 }
17150
17151 /* Try to avoid the overhead of a location list emitting a location
17152 expression instead, but only if we didn't have more than one
17153 location entry in the first place. If some entries were not
17154 representable, we don't want to pretend a single entry that was
17155 applies to the entire scope in which the variable is
17156 available. */
17157 if (list && loc_list->first->next)
17158 gen_llsym (list);
17159 else
17160 maybe_gen_llsym (list);
17161
17162 return list;
17163 }
17164
17165 /* Return if the loc_list has only single element and thus can be represented
17166 as location description. */
17167
17168 static bool
17169 single_element_loc_list_p (dw_loc_list_ref list)
17170 {
17171 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17172 return !list->ll_symbol;
17173 }
17174
17175 /* Duplicate a single element of location list. */
17176
17177 static inline dw_loc_descr_ref
17178 copy_loc_descr (dw_loc_descr_ref ref)
17179 {
17180 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17181 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17182 return copy;
17183 }
17184
17185 /* To each location in list LIST append loc descr REF. */
17186
17187 static void
17188 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17189 {
17190 dw_loc_descr_ref copy;
17191 add_loc_descr (&list->expr, ref);
17192 list = list->dw_loc_next;
17193 while (list)
17194 {
17195 copy = copy_loc_descr (ref);
17196 add_loc_descr (&list->expr, copy);
17197 while (copy->dw_loc_next)
17198 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17199 list = list->dw_loc_next;
17200 }
17201 }
17202
17203 /* To each location in list LIST prepend loc descr REF. */
17204
17205 static void
17206 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17207 {
17208 dw_loc_descr_ref copy;
17209 dw_loc_descr_ref ref_end = list->expr;
17210 add_loc_descr (&ref, list->expr);
17211 list->expr = ref;
17212 list = list->dw_loc_next;
17213 while (list)
17214 {
17215 dw_loc_descr_ref end = list->expr;
17216 list->expr = copy = copy_loc_descr (ref);
17217 while (copy->dw_loc_next != ref_end)
17218 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17219 copy->dw_loc_next = end;
17220 list = list->dw_loc_next;
17221 }
17222 }
17223
17224 /* Given two lists RET and LIST
17225 produce location list that is result of adding expression in LIST
17226 to expression in RET on each position in program.
17227 Might be destructive on both RET and LIST.
17228
17229 TODO: We handle only simple cases of RET or LIST having at most one
17230 element. General case would involve sorting the lists in program order
17231 and merging them that will need some additional work.
17232 Adding that will improve quality of debug info especially for SRA-ed
17233 structures. */
17234
17235 static void
17236 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17237 {
17238 if (!list)
17239 return;
17240 if (!*ret)
17241 {
17242 *ret = list;
17243 return;
17244 }
17245 if (!list->dw_loc_next)
17246 {
17247 add_loc_descr_to_each (*ret, list->expr);
17248 return;
17249 }
17250 if (!(*ret)->dw_loc_next)
17251 {
17252 prepend_loc_descr_to_each (list, (*ret)->expr);
17253 *ret = list;
17254 return;
17255 }
17256 expansion_failed (NULL_TREE, NULL_RTX,
17257 "Don't know how to merge two non-trivial"
17258 " location lists.\n");
17259 *ret = NULL;
17260 return;
17261 }
17262
17263 /* LOC is constant expression. Try a luck, look it up in constant
17264 pool and return its loc_descr of its address. */
17265
17266 static dw_loc_descr_ref
17267 cst_pool_loc_descr (tree loc)
17268 {
17269 /* Get an RTL for this, if something has been emitted. */
17270 rtx rtl = lookup_constant_def (loc);
17271
17272 if (!rtl || !MEM_P (rtl))
17273 {
17274 gcc_assert (!rtl);
17275 return 0;
17276 }
17277 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17278
17279 /* TODO: We might get more coverage if we was actually delaying expansion
17280 of all expressions till end of compilation when constant pools are fully
17281 populated. */
17282 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17283 {
17284 expansion_failed (loc, NULL_RTX,
17285 "CST value in contant pool but not marked.");
17286 return 0;
17287 }
17288 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17289 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17290 }
17291
17292 /* Return dw_loc_list representing address of addr_expr LOC
17293 by looking for inner INDIRECT_REF expression and turning
17294 it into simple arithmetics.
17295
17296 See loc_list_from_tree for the meaning of CONTEXT. */
17297
17298 static dw_loc_list_ref
17299 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17300 loc_descr_context *context)
17301 {
17302 tree obj, offset;
17303 poly_int64 bitsize, bitpos, bytepos;
17304 machine_mode mode;
17305 int unsignedp, reversep, volatilep = 0;
17306 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17307
17308 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17309 &bitsize, &bitpos, &offset, &mode,
17310 &unsignedp, &reversep, &volatilep);
17311 STRIP_NOPS (obj);
17312 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17313 {
17314 expansion_failed (loc, NULL_RTX, "bitfield access");
17315 return 0;
17316 }
17317 if (!INDIRECT_REF_P (obj))
17318 {
17319 expansion_failed (obj,
17320 NULL_RTX, "no indirect ref in inner refrence");
17321 return 0;
17322 }
17323 if (!offset && known_eq (bitpos, 0))
17324 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17325 context);
17326 else if (toplev
17327 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17328 && (dwarf_version >= 4 || !dwarf_strict))
17329 {
17330 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17331 if (!list_ret)
17332 return 0;
17333 if (offset)
17334 {
17335 /* Variable offset. */
17336 list_ret1 = loc_list_from_tree (offset, 0, context);
17337 if (list_ret1 == 0)
17338 return 0;
17339 add_loc_list (&list_ret, list_ret1);
17340 if (!list_ret)
17341 return 0;
17342 add_loc_descr_to_each (list_ret,
17343 new_loc_descr (DW_OP_plus, 0, 0));
17344 }
17345 HOST_WIDE_INT value;
17346 if (bytepos.is_constant (&value) && value > 0)
17347 add_loc_descr_to_each (list_ret,
17348 new_loc_descr (DW_OP_plus_uconst, value, 0));
17349 else if (maybe_ne (bytepos, 0))
17350 loc_list_plus_const (list_ret, bytepos);
17351 add_loc_descr_to_each (list_ret,
17352 new_loc_descr (DW_OP_stack_value, 0, 0));
17353 }
17354 return list_ret;
17355 }
17356
17357 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17358 all operations from LOC are nops, move to the last one. Insert in NOPS all
17359 operations that are skipped. */
17360
17361 static void
17362 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17363 hash_set<dw_loc_descr_ref> &nops)
17364 {
17365 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17366 {
17367 nops.add (loc);
17368 loc = loc->dw_loc_next;
17369 }
17370 }
17371
17372 /* Helper for loc_descr_without_nops: free the location description operation
17373 P. */
17374
17375 bool
17376 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17377 {
17378 ggc_free (loc);
17379 return true;
17380 }
17381
17382 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17383 finishes LOC. */
17384
17385 static void
17386 loc_descr_without_nops (dw_loc_descr_ref &loc)
17387 {
17388 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17389 return;
17390
17391 /* Set of all DW_OP_nop operations we remove. */
17392 hash_set<dw_loc_descr_ref> nops;
17393
17394 /* First, strip all prefix NOP operations in order to keep the head of the
17395 operations list. */
17396 loc_descr_to_next_no_nop (loc, nops);
17397
17398 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17399 {
17400 /* For control flow operations: strip "prefix" nops in destination
17401 labels. */
17402 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17403 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17404 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17405 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17406
17407 /* Do the same for the operations that follow, then move to the next
17408 iteration. */
17409 if (cur->dw_loc_next != NULL)
17410 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17411 cur = cur->dw_loc_next;
17412 }
17413
17414 nops.traverse<void *, free_loc_descr> (NULL);
17415 }
17416
17417
17418 struct dwarf_procedure_info;
17419
17420 /* Helper structure for location descriptions generation. */
17421 struct loc_descr_context
17422 {
17423 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17424 NULL_TREE if DW_OP_push_object_address in invalid for this location
17425 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17426 tree context_type;
17427 /* The ..._DECL node that should be translated as a
17428 DW_OP_push_object_address operation. */
17429 tree base_decl;
17430 /* Information about the DWARF procedure we are currently generating. NULL if
17431 we are not generating a DWARF procedure. */
17432 struct dwarf_procedure_info *dpi;
17433 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17434 by consumer. Used for DW_TAG_generic_subrange attributes. */
17435 bool placeholder_arg;
17436 /* True if PLACEHOLDER_EXPR has been seen. */
17437 bool placeholder_seen;
17438 };
17439
17440 /* DWARF procedures generation
17441
17442 DWARF expressions (aka. location descriptions) are used to encode variable
17443 things such as sizes or offsets. Such computations can have redundant parts
17444 that can be factorized in order to reduce the size of the output debug
17445 information. This is the whole point of DWARF procedures.
17446
17447 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17448 already factorized into functions ("size functions") in order to handle very
17449 big and complex types. Such functions are quite simple: they have integral
17450 arguments, they return an integral result and their body contains only a
17451 return statement with arithmetic expressions. This is the only kind of
17452 function we are interested in translating into DWARF procedures, here.
17453
17454 DWARF expressions and DWARF procedure are executed using a stack, so we have
17455 to define some calling convention for them to interact. Let's say that:
17456
17457 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17458 all arguments in reverse order (right-to-left) so that when the DWARF
17459 procedure execution starts, the first argument is the top of the stack.
17460
17461 - Then, when returning, the DWARF procedure must have consumed all arguments
17462 on the stack, must have pushed the result and touched nothing else.
17463
17464 - Each integral argument and the result are integral types can be hold in a
17465 single stack slot.
17466
17467 - We call "frame offset" the number of stack slots that are "under DWARF
17468 procedure control": it includes the arguments slots, the temporaries and
17469 the result slot. Thus, it is equal to the number of arguments when the
17470 procedure execution starts and must be equal to one (the result) when it
17471 returns. */
17472
17473 /* Helper structure used when generating operations for a DWARF procedure. */
17474 struct dwarf_procedure_info
17475 {
17476 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17477 currently translated. */
17478 tree fndecl;
17479 /* The number of arguments FNDECL takes. */
17480 unsigned args_count;
17481 };
17482
17483 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17484 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17485 equate it to this DIE. */
17486
17487 static dw_die_ref
17488 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17489 dw_die_ref parent_die)
17490 {
17491 dw_die_ref dwarf_proc_die;
17492
17493 if ((dwarf_version < 3 && dwarf_strict)
17494 || location == NULL)
17495 return NULL;
17496
17497 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17498 if (fndecl)
17499 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17500 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17501 return dwarf_proc_die;
17502 }
17503
17504 /* Return whether TYPE is a supported type as a DWARF procedure argument
17505 type or return type (we handle only scalar types and pointer types that
17506 aren't wider than the DWARF expression evaluation stack. */
17507
17508 static bool
17509 is_handled_procedure_type (tree type)
17510 {
17511 return ((INTEGRAL_TYPE_P (type)
17512 || TREE_CODE (type) == OFFSET_TYPE
17513 || TREE_CODE (type) == POINTER_TYPE)
17514 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17515 }
17516
17517 /* Helper for resolve_args_picking: do the same but stop when coming across
17518 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17519 offset *before* evaluating the corresponding operation. */
17520
17521 static bool
17522 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17523 struct dwarf_procedure_info *dpi,
17524 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17525 {
17526 /* The "frame_offset" identifier is already used to name a macro... */
17527 unsigned frame_offset_ = initial_frame_offset;
17528 dw_loc_descr_ref l;
17529
17530 for (l = loc; l != NULL;)
17531 {
17532 bool existed;
17533 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17534
17535 /* If we already met this node, there is nothing to compute anymore. */
17536 if (existed)
17537 {
17538 /* Make sure that the stack size is consistent wherever the execution
17539 flow comes from. */
17540 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17541 break;
17542 }
17543 l_frame_offset = frame_offset_;
17544
17545 /* If needed, relocate the picking offset with respect to the frame
17546 offset. */
17547 if (l->frame_offset_rel)
17548 {
17549 unsigned HOST_WIDE_INT off;
17550 switch (l->dw_loc_opc)
17551 {
17552 case DW_OP_pick:
17553 off = l->dw_loc_oprnd1.v.val_unsigned;
17554 break;
17555 case DW_OP_dup:
17556 off = 0;
17557 break;
17558 case DW_OP_over:
17559 off = 1;
17560 break;
17561 default:
17562 gcc_unreachable ();
17563 }
17564 /* frame_offset_ is the size of the current stack frame, including
17565 incoming arguments. Besides, the arguments are pushed
17566 right-to-left. Thus, in order to access the Nth argument from
17567 this operation node, the picking has to skip temporaries *plus*
17568 one stack slot per argument (0 for the first one, 1 for the second
17569 one, etc.).
17570
17571 The targetted argument number (N) is already set as the operand,
17572 and the number of temporaries can be computed with:
17573 frame_offsets_ - dpi->args_count */
17574 off += frame_offset_ - dpi->args_count;
17575
17576 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17577 if (off > 255)
17578 return false;
17579
17580 if (off == 0)
17581 {
17582 l->dw_loc_opc = DW_OP_dup;
17583 l->dw_loc_oprnd1.v.val_unsigned = 0;
17584 }
17585 else if (off == 1)
17586 {
17587 l->dw_loc_opc = DW_OP_over;
17588 l->dw_loc_oprnd1.v.val_unsigned = 0;
17589 }
17590 else
17591 {
17592 l->dw_loc_opc = DW_OP_pick;
17593 l->dw_loc_oprnd1.v.val_unsigned = off;
17594 }
17595 }
17596
17597 /* Update frame_offset according to the effect the current operation has
17598 on the stack. */
17599 switch (l->dw_loc_opc)
17600 {
17601 case DW_OP_deref:
17602 case DW_OP_swap:
17603 case DW_OP_rot:
17604 case DW_OP_abs:
17605 case DW_OP_neg:
17606 case DW_OP_not:
17607 case DW_OP_plus_uconst:
17608 case DW_OP_skip:
17609 case DW_OP_reg0:
17610 case DW_OP_reg1:
17611 case DW_OP_reg2:
17612 case DW_OP_reg3:
17613 case DW_OP_reg4:
17614 case DW_OP_reg5:
17615 case DW_OP_reg6:
17616 case DW_OP_reg7:
17617 case DW_OP_reg8:
17618 case DW_OP_reg9:
17619 case DW_OP_reg10:
17620 case DW_OP_reg11:
17621 case DW_OP_reg12:
17622 case DW_OP_reg13:
17623 case DW_OP_reg14:
17624 case DW_OP_reg15:
17625 case DW_OP_reg16:
17626 case DW_OP_reg17:
17627 case DW_OP_reg18:
17628 case DW_OP_reg19:
17629 case DW_OP_reg20:
17630 case DW_OP_reg21:
17631 case DW_OP_reg22:
17632 case DW_OP_reg23:
17633 case DW_OP_reg24:
17634 case DW_OP_reg25:
17635 case DW_OP_reg26:
17636 case DW_OP_reg27:
17637 case DW_OP_reg28:
17638 case DW_OP_reg29:
17639 case DW_OP_reg30:
17640 case DW_OP_reg31:
17641 case DW_OP_bregx:
17642 case DW_OP_piece:
17643 case DW_OP_deref_size:
17644 case DW_OP_nop:
17645 case DW_OP_bit_piece:
17646 case DW_OP_implicit_value:
17647 case DW_OP_stack_value:
17648 break;
17649
17650 case DW_OP_addr:
17651 case DW_OP_const1u:
17652 case DW_OP_const1s:
17653 case DW_OP_const2u:
17654 case DW_OP_const2s:
17655 case DW_OP_const4u:
17656 case DW_OP_const4s:
17657 case DW_OP_const8u:
17658 case DW_OP_const8s:
17659 case DW_OP_constu:
17660 case DW_OP_consts:
17661 case DW_OP_dup:
17662 case DW_OP_over:
17663 case DW_OP_pick:
17664 case DW_OP_lit0:
17665 case DW_OP_lit1:
17666 case DW_OP_lit2:
17667 case DW_OP_lit3:
17668 case DW_OP_lit4:
17669 case DW_OP_lit5:
17670 case DW_OP_lit6:
17671 case DW_OP_lit7:
17672 case DW_OP_lit8:
17673 case DW_OP_lit9:
17674 case DW_OP_lit10:
17675 case DW_OP_lit11:
17676 case DW_OP_lit12:
17677 case DW_OP_lit13:
17678 case DW_OP_lit14:
17679 case DW_OP_lit15:
17680 case DW_OP_lit16:
17681 case DW_OP_lit17:
17682 case DW_OP_lit18:
17683 case DW_OP_lit19:
17684 case DW_OP_lit20:
17685 case DW_OP_lit21:
17686 case DW_OP_lit22:
17687 case DW_OP_lit23:
17688 case DW_OP_lit24:
17689 case DW_OP_lit25:
17690 case DW_OP_lit26:
17691 case DW_OP_lit27:
17692 case DW_OP_lit28:
17693 case DW_OP_lit29:
17694 case DW_OP_lit30:
17695 case DW_OP_lit31:
17696 case DW_OP_breg0:
17697 case DW_OP_breg1:
17698 case DW_OP_breg2:
17699 case DW_OP_breg3:
17700 case DW_OP_breg4:
17701 case DW_OP_breg5:
17702 case DW_OP_breg6:
17703 case DW_OP_breg7:
17704 case DW_OP_breg8:
17705 case DW_OP_breg9:
17706 case DW_OP_breg10:
17707 case DW_OP_breg11:
17708 case DW_OP_breg12:
17709 case DW_OP_breg13:
17710 case DW_OP_breg14:
17711 case DW_OP_breg15:
17712 case DW_OP_breg16:
17713 case DW_OP_breg17:
17714 case DW_OP_breg18:
17715 case DW_OP_breg19:
17716 case DW_OP_breg20:
17717 case DW_OP_breg21:
17718 case DW_OP_breg22:
17719 case DW_OP_breg23:
17720 case DW_OP_breg24:
17721 case DW_OP_breg25:
17722 case DW_OP_breg26:
17723 case DW_OP_breg27:
17724 case DW_OP_breg28:
17725 case DW_OP_breg29:
17726 case DW_OP_breg30:
17727 case DW_OP_breg31:
17728 case DW_OP_fbreg:
17729 case DW_OP_push_object_address:
17730 case DW_OP_call_frame_cfa:
17731 case DW_OP_GNU_variable_value:
17732 ++frame_offset_;
17733 break;
17734
17735 case DW_OP_drop:
17736 case DW_OP_xderef:
17737 case DW_OP_and:
17738 case DW_OP_div:
17739 case DW_OP_minus:
17740 case DW_OP_mod:
17741 case DW_OP_mul:
17742 case DW_OP_or:
17743 case DW_OP_plus:
17744 case DW_OP_shl:
17745 case DW_OP_shr:
17746 case DW_OP_shra:
17747 case DW_OP_xor:
17748 case DW_OP_bra:
17749 case DW_OP_eq:
17750 case DW_OP_ge:
17751 case DW_OP_gt:
17752 case DW_OP_le:
17753 case DW_OP_lt:
17754 case DW_OP_ne:
17755 case DW_OP_regx:
17756 case DW_OP_xderef_size:
17757 --frame_offset_;
17758 break;
17759
17760 case DW_OP_call2:
17761 case DW_OP_call4:
17762 case DW_OP_call_ref:
17763 {
17764 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17765 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17766
17767 if (stack_usage == NULL)
17768 return false;
17769 frame_offset_ += *stack_usage;
17770 break;
17771 }
17772
17773 case DW_OP_implicit_pointer:
17774 case DW_OP_entry_value:
17775 case DW_OP_const_type:
17776 case DW_OP_regval_type:
17777 case DW_OP_deref_type:
17778 case DW_OP_convert:
17779 case DW_OP_reinterpret:
17780 case DW_OP_form_tls_address:
17781 case DW_OP_GNU_push_tls_address:
17782 case DW_OP_GNU_uninit:
17783 case DW_OP_GNU_encoded_addr:
17784 case DW_OP_GNU_implicit_pointer:
17785 case DW_OP_GNU_entry_value:
17786 case DW_OP_GNU_const_type:
17787 case DW_OP_GNU_regval_type:
17788 case DW_OP_GNU_deref_type:
17789 case DW_OP_GNU_convert:
17790 case DW_OP_GNU_reinterpret:
17791 case DW_OP_GNU_parameter_ref:
17792 /* loc_list_from_tree will probably not output these operations for
17793 size functions, so assume they will not appear here. */
17794 /* Fall through... */
17795
17796 default:
17797 gcc_unreachable ();
17798 }
17799
17800 /* Now, follow the control flow (except subroutine calls). */
17801 switch (l->dw_loc_opc)
17802 {
17803 case DW_OP_bra:
17804 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17805 frame_offsets))
17806 return false;
17807 /* Fall through. */
17808
17809 case DW_OP_skip:
17810 l = l->dw_loc_oprnd1.v.val_loc;
17811 break;
17812
17813 case DW_OP_stack_value:
17814 return true;
17815
17816 default:
17817 l = l->dw_loc_next;
17818 break;
17819 }
17820 }
17821
17822 return true;
17823 }
17824
17825 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17826 operations) in order to resolve the operand of DW_OP_pick operations that
17827 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17828 offset *before* LOC is executed. Return if all relocations were
17829 successful. */
17830
17831 static bool
17832 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17833 struct dwarf_procedure_info *dpi)
17834 {
17835 /* Associate to all visited operations the frame offset *before* evaluating
17836 this operation. */
17837 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17838
17839 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17840 frame_offsets);
17841 }
17842
17843 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17844 Return NULL if it is not possible. */
17845
17846 static dw_die_ref
17847 function_to_dwarf_procedure (tree fndecl)
17848 {
17849 struct loc_descr_context ctx;
17850 struct dwarf_procedure_info dpi;
17851 dw_die_ref dwarf_proc_die;
17852 tree tree_body = DECL_SAVED_TREE (fndecl);
17853 dw_loc_descr_ref loc_body, epilogue;
17854
17855 tree cursor;
17856 unsigned i;
17857
17858 /* Do not generate multiple DWARF procedures for the same function
17859 declaration. */
17860 dwarf_proc_die = lookup_decl_die (fndecl);
17861 if (dwarf_proc_die != NULL)
17862 return dwarf_proc_die;
17863
17864 /* DWARF procedures are available starting with the DWARFv3 standard. */
17865 if (dwarf_version < 3 && dwarf_strict)
17866 return NULL;
17867
17868 /* We handle only functions for which we still have a body, that return a
17869 supported type and that takes arguments with supported types. Note that
17870 there is no point translating functions that return nothing. */
17871 if (tree_body == NULL_TREE
17872 || DECL_RESULT (fndecl) == NULL_TREE
17873 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17874 return NULL;
17875
17876 for (cursor = DECL_ARGUMENTS (fndecl);
17877 cursor != NULL_TREE;
17878 cursor = TREE_CHAIN (cursor))
17879 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17880 return NULL;
17881
17882 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17883 if (TREE_CODE (tree_body) != RETURN_EXPR)
17884 return NULL;
17885 tree_body = TREE_OPERAND (tree_body, 0);
17886 if (TREE_CODE (tree_body) != MODIFY_EXPR
17887 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17888 return NULL;
17889 tree_body = TREE_OPERAND (tree_body, 1);
17890
17891 /* Try to translate the body expression itself. Note that this will probably
17892 cause an infinite recursion if its call graph has a cycle. This is very
17893 unlikely for size functions, however, so don't bother with such things at
17894 the moment. */
17895 ctx.context_type = NULL_TREE;
17896 ctx.base_decl = NULL_TREE;
17897 ctx.dpi = &dpi;
17898 ctx.placeholder_arg = false;
17899 ctx.placeholder_seen = false;
17900 dpi.fndecl = fndecl;
17901 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17902 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17903 if (!loc_body)
17904 return NULL;
17905
17906 /* After evaluating all operands in "loc_body", we should still have on the
17907 stack all arguments plus the desired function result (top of the stack).
17908 Generate code in order to keep only the result in our stack frame. */
17909 epilogue = NULL;
17910 for (i = 0; i < dpi.args_count; ++i)
17911 {
17912 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17913 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17914 op_couple->dw_loc_next->dw_loc_next = epilogue;
17915 epilogue = op_couple;
17916 }
17917 add_loc_descr (&loc_body, epilogue);
17918 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17919 return NULL;
17920
17921 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17922 because they are considered useful. Now there is an epilogue, they are
17923 not anymore, so give it another try. */
17924 loc_descr_without_nops (loc_body);
17925
17926 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17927 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17928 though, given that size functions do not come from source, so they should
17929 not have a dedicated DW_TAG_subprogram DIE. */
17930 dwarf_proc_die
17931 = new_dwarf_proc_die (loc_body, fndecl,
17932 get_context_die (DECL_CONTEXT (fndecl)));
17933
17934 /* The called DWARF procedure consumes one stack slot per argument and
17935 returns one stack slot. */
17936 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17937
17938 return dwarf_proc_die;
17939 }
17940
17941
17942 /* Generate Dwarf location list representing LOC.
17943 If WANT_ADDRESS is false, expression computing LOC will be computed
17944 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17945 if WANT_ADDRESS is 2, expression computing address useable in location
17946 will be returned (i.e. DW_OP_reg can be used
17947 to refer to register values).
17948
17949 CONTEXT provides information to customize the location descriptions
17950 generation. Its context_type field specifies what type is implicitly
17951 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17952 will not be generated.
17953
17954 Its DPI field determines whether we are generating a DWARF expression for a
17955 DWARF procedure, so PARM_DECL references are processed specifically.
17956
17957 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17958 and dpi fields were null. */
17959
17960 static dw_loc_list_ref
17961 loc_list_from_tree_1 (tree loc, int want_address,
17962 struct loc_descr_context *context)
17963 {
17964 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17965 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17966 int have_address = 0;
17967 enum dwarf_location_atom op;
17968
17969 /* ??? Most of the time we do not take proper care for sign/zero
17970 extending the values properly. Hopefully this won't be a real
17971 problem... */
17972
17973 if (context != NULL
17974 && context->base_decl == loc
17975 && want_address == 0)
17976 {
17977 if (dwarf_version >= 3 || !dwarf_strict)
17978 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17979 NULL, 0, NULL, 0, NULL);
17980 else
17981 return NULL;
17982 }
17983
17984 switch (TREE_CODE (loc))
17985 {
17986 case ERROR_MARK:
17987 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17988 return 0;
17989
17990 case PLACEHOLDER_EXPR:
17991 /* This case involves extracting fields from an object to determine the
17992 position of other fields. It is supposed to appear only as the first
17993 operand of COMPONENT_REF nodes and to reference precisely the type
17994 that the context allows. */
17995 if (context != NULL
17996 && TREE_TYPE (loc) == context->context_type
17997 && want_address >= 1)
17998 {
17999 if (dwarf_version >= 3 || !dwarf_strict)
18000 {
18001 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18002 have_address = 1;
18003 break;
18004 }
18005 else
18006 return NULL;
18007 }
18008 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18009 the single argument passed by consumer. */
18010 else if (context != NULL
18011 && context->placeholder_arg
18012 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18013 && want_address == 0)
18014 {
18015 ret = new_loc_descr (DW_OP_pick, 0, 0);
18016 ret->frame_offset_rel = 1;
18017 context->placeholder_seen = true;
18018 break;
18019 }
18020 else
18021 expansion_failed (loc, NULL_RTX,
18022 "PLACEHOLDER_EXPR for an unexpected type");
18023 break;
18024
18025 case CALL_EXPR:
18026 {
18027 const int nargs = call_expr_nargs (loc);
18028 tree callee = get_callee_fndecl (loc);
18029 int i;
18030 dw_die_ref dwarf_proc;
18031
18032 if (callee == NULL_TREE)
18033 goto call_expansion_failed;
18034
18035 /* We handle only functions that return an integer. */
18036 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18037 goto call_expansion_failed;
18038
18039 dwarf_proc = function_to_dwarf_procedure (callee);
18040 if (dwarf_proc == NULL)
18041 goto call_expansion_failed;
18042
18043 /* Evaluate arguments right-to-left so that the first argument will
18044 be the top-most one on the stack. */
18045 for (i = nargs - 1; i >= 0; --i)
18046 {
18047 dw_loc_descr_ref loc_descr
18048 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18049 context);
18050
18051 if (loc_descr == NULL)
18052 goto call_expansion_failed;
18053
18054 add_loc_descr (&ret, loc_descr);
18055 }
18056
18057 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18058 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18059 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18060 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18061 add_loc_descr (&ret, ret1);
18062 break;
18063
18064 call_expansion_failed:
18065 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18066 /* There are no opcodes for these operations. */
18067 return 0;
18068 }
18069
18070 case PREINCREMENT_EXPR:
18071 case PREDECREMENT_EXPR:
18072 case POSTINCREMENT_EXPR:
18073 case POSTDECREMENT_EXPR:
18074 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18075 /* There are no opcodes for these operations. */
18076 return 0;
18077
18078 case ADDR_EXPR:
18079 /* If we already want an address, see if there is INDIRECT_REF inside
18080 e.g. for &this->field. */
18081 if (want_address)
18082 {
18083 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18084 (loc, want_address == 2, context);
18085 if (list_ret)
18086 have_address = 1;
18087 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18088 && (ret = cst_pool_loc_descr (loc)))
18089 have_address = 1;
18090 }
18091 /* Otherwise, process the argument and look for the address. */
18092 if (!list_ret && !ret)
18093 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18094 else
18095 {
18096 if (want_address)
18097 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18098 return NULL;
18099 }
18100 break;
18101
18102 case VAR_DECL:
18103 if (DECL_THREAD_LOCAL_P (loc))
18104 {
18105 rtx rtl;
18106 enum dwarf_location_atom tls_op;
18107 enum dtprel_bool dtprel = dtprel_false;
18108
18109 if (targetm.have_tls)
18110 {
18111 /* If this is not defined, we have no way to emit the
18112 data. */
18113 if (!targetm.asm_out.output_dwarf_dtprel)
18114 return 0;
18115
18116 /* The way DW_OP_GNU_push_tls_address is specified, we
18117 can only look up addresses of objects in the current
18118 module. We used DW_OP_addr as first op, but that's
18119 wrong, because DW_OP_addr is relocated by the debug
18120 info consumer, while DW_OP_GNU_push_tls_address
18121 operand shouldn't be. */
18122 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18123 return 0;
18124 dtprel = dtprel_true;
18125 /* We check for DWARF 5 here because gdb did not implement
18126 DW_OP_form_tls_address until after 7.12. */
18127 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18128 : DW_OP_GNU_push_tls_address);
18129 }
18130 else
18131 {
18132 if (!targetm.emutls.debug_form_tls_address
18133 || !(dwarf_version >= 3 || !dwarf_strict))
18134 return 0;
18135 /* We stuffed the control variable into the DECL_VALUE_EXPR
18136 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18137 no longer appear in gimple code. We used the control
18138 variable in specific so that we could pick it up here. */
18139 loc = DECL_VALUE_EXPR (loc);
18140 tls_op = DW_OP_form_tls_address;
18141 }
18142
18143 rtl = rtl_for_decl_location (loc);
18144 if (rtl == NULL_RTX)
18145 return 0;
18146
18147 if (!MEM_P (rtl))
18148 return 0;
18149 rtl = XEXP (rtl, 0);
18150 if (! CONSTANT_P (rtl))
18151 return 0;
18152
18153 ret = new_addr_loc_descr (rtl, dtprel);
18154 ret1 = new_loc_descr (tls_op, 0, 0);
18155 add_loc_descr (&ret, ret1);
18156
18157 have_address = 1;
18158 break;
18159 }
18160 /* FALLTHRU */
18161
18162 case PARM_DECL:
18163 if (context != NULL && context->dpi != NULL
18164 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18165 {
18166 /* We are generating code for a DWARF procedure and we want to access
18167 one of its arguments: find the appropriate argument offset and let
18168 the resolve_args_picking pass compute the offset that complies
18169 with the stack frame size. */
18170 unsigned i = 0;
18171 tree cursor;
18172
18173 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18174 cursor != NULL_TREE && cursor != loc;
18175 cursor = TREE_CHAIN (cursor), ++i)
18176 ;
18177 /* If we are translating a DWARF procedure, all referenced parameters
18178 must belong to the current function. */
18179 gcc_assert (cursor != NULL_TREE);
18180
18181 ret = new_loc_descr (DW_OP_pick, i, 0);
18182 ret->frame_offset_rel = 1;
18183 break;
18184 }
18185 /* FALLTHRU */
18186
18187 case RESULT_DECL:
18188 if (DECL_HAS_VALUE_EXPR_P (loc))
18189 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18190 want_address, context);
18191 /* FALLTHRU */
18192
18193 case FUNCTION_DECL:
18194 {
18195 rtx rtl;
18196 var_loc_list *loc_list = lookup_decl_loc (loc);
18197
18198 if (loc_list && loc_list->first)
18199 {
18200 list_ret = dw_loc_list (loc_list, loc, want_address);
18201 have_address = want_address != 0;
18202 break;
18203 }
18204 rtl = rtl_for_decl_location (loc);
18205 if (rtl == NULL_RTX)
18206 {
18207 if (TREE_CODE (loc) != FUNCTION_DECL
18208 && early_dwarf
18209 && current_function_decl
18210 && want_address != 1
18211 && ! DECL_IGNORED_P (loc)
18212 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18213 || POINTER_TYPE_P (TREE_TYPE (loc)))
18214 && DECL_CONTEXT (loc) == current_function_decl
18215 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18216 <= DWARF2_ADDR_SIZE))
18217 {
18218 dw_die_ref ref = lookup_decl_die (loc);
18219 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18220 if (ref)
18221 {
18222 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18223 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18224 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18225 }
18226 else
18227 {
18228 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18229 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18230 }
18231 break;
18232 }
18233 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18234 return 0;
18235 }
18236 else if (CONST_INT_P (rtl))
18237 {
18238 HOST_WIDE_INT val = INTVAL (rtl);
18239 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18240 val &= GET_MODE_MASK (DECL_MODE (loc));
18241 ret = int_loc_descriptor (val);
18242 }
18243 else if (GET_CODE (rtl) == CONST_STRING)
18244 {
18245 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18246 return 0;
18247 }
18248 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18249 ret = new_addr_loc_descr (rtl, dtprel_false);
18250 else
18251 {
18252 machine_mode mode, mem_mode;
18253
18254 /* Certain constructs can only be represented at top-level. */
18255 if (want_address == 2)
18256 {
18257 ret = loc_descriptor (rtl, VOIDmode,
18258 VAR_INIT_STATUS_INITIALIZED);
18259 have_address = 1;
18260 }
18261 else
18262 {
18263 mode = GET_MODE (rtl);
18264 mem_mode = VOIDmode;
18265 if (MEM_P (rtl))
18266 {
18267 mem_mode = mode;
18268 mode = get_address_mode (rtl);
18269 rtl = XEXP (rtl, 0);
18270 have_address = 1;
18271 }
18272 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18273 VAR_INIT_STATUS_INITIALIZED);
18274 }
18275 if (!ret)
18276 expansion_failed (loc, rtl,
18277 "failed to produce loc descriptor for rtl");
18278 }
18279 }
18280 break;
18281
18282 case MEM_REF:
18283 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18284 {
18285 have_address = 1;
18286 goto do_plus;
18287 }
18288 /* Fallthru. */
18289 case INDIRECT_REF:
18290 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18291 have_address = 1;
18292 break;
18293
18294 case TARGET_MEM_REF:
18295 case SSA_NAME:
18296 case DEBUG_EXPR_DECL:
18297 return NULL;
18298
18299 case COMPOUND_EXPR:
18300 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18301 context);
18302
18303 CASE_CONVERT:
18304 case VIEW_CONVERT_EXPR:
18305 case SAVE_EXPR:
18306 case MODIFY_EXPR:
18307 case NON_LVALUE_EXPR:
18308 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18309 context);
18310
18311 case COMPONENT_REF:
18312 case BIT_FIELD_REF:
18313 case ARRAY_REF:
18314 case ARRAY_RANGE_REF:
18315 case REALPART_EXPR:
18316 case IMAGPART_EXPR:
18317 {
18318 tree obj, offset;
18319 poly_int64 bitsize, bitpos, bytepos;
18320 machine_mode mode;
18321 int unsignedp, reversep, volatilep = 0;
18322
18323 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18324 &unsignedp, &reversep, &volatilep);
18325
18326 gcc_assert (obj != loc);
18327
18328 list_ret = loc_list_from_tree_1 (obj,
18329 want_address == 2
18330 && known_eq (bitpos, 0)
18331 && !offset ? 2 : 1,
18332 context);
18333 /* TODO: We can extract value of the small expression via shifting even
18334 for nonzero bitpos. */
18335 if (list_ret == 0)
18336 return 0;
18337 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18338 || !multiple_p (bitsize, BITS_PER_UNIT))
18339 {
18340 expansion_failed (loc, NULL_RTX,
18341 "bitfield access");
18342 return 0;
18343 }
18344
18345 if (offset != NULL_TREE)
18346 {
18347 /* Variable offset. */
18348 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18349 if (list_ret1 == 0)
18350 return 0;
18351 add_loc_list (&list_ret, list_ret1);
18352 if (!list_ret)
18353 return 0;
18354 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18355 }
18356
18357 HOST_WIDE_INT value;
18358 if (bytepos.is_constant (&value) && value > 0)
18359 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18360 value, 0));
18361 else if (maybe_ne (bytepos, 0))
18362 loc_list_plus_const (list_ret, bytepos);
18363
18364 have_address = 1;
18365 break;
18366 }
18367
18368 case INTEGER_CST:
18369 if ((want_address || !tree_fits_shwi_p (loc))
18370 && (ret = cst_pool_loc_descr (loc)))
18371 have_address = 1;
18372 else if (want_address == 2
18373 && tree_fits_shwi_p (loc)
18374 && (ret = address_of_int_loc_descriptor
18375 (int_size_in_bytes (TREE_TYPE (loc)),
18376 tree_to_shwi (loc))))
18377 have_address = 1;
18378 else if (tree_fits_shwi_p (loc))
18379 ret = int_loc_descriptor (tree_to_shwi (loc));
18380 else if (tree_fits_uhwi_p (loc))
18381 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18382 else
18383 {
18384 expansion_failed (loc, NULL_RTX,
18385 "Integer operand is not host integer");
18386 return 0;
18387 }
18388 break;
18389
18390 case CONSTRUCTOR:
18391 case REAL_CST:
18392 case STRING_CST:
18393 case COMPLEX_CST:
18394 if ((ret = cst_pool_loc_descr (loc)))
18395 have_address = 1;
18396 else if (TREE_CODE (loc) == CONSTRUCTOR)
18397 {
18398 tree type = TREE_TYPE (loc);
18399 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18400 unsigned HOST_WIDE_INT offset = 0;
18401 unsigned HOST_WIDE_INT cnt;
18402 constructor_elt *ce;
18403
18404 if (TREE_CODE (type) == RECORD_TYPE)
18405 {
18406 /* This is very limited, but it's enough to output
18407 pointers to member functions, as long as the
18408 referenced function is defined in the current
18409 translation unit. */
18410 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18411 {
18412 tree val = ce->value;
18413
18414 tree field = ce->index;
18415
18416 if (val)
18417 STRIP_NOPS (val);
18418
18419 if (!field || DECL_BIT_FIELD (field))
18420 {
18421 expansion_failed (loc, NULL_RTX,
18422 "bitfield in record type constructor");
18423 size = offset = (unsigned HOST_WIDE_INT)-1;
18424 ret = NULL;
18425 break;
18426 }
18427
18428 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18429 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18430 gcc_assert (pos + fieldsize <= size);
18431 if (pos < offset)
18432 {
18433 expansion_failed (loc, NULL_RTX,
18434 "out-of-order fields in record constructor");
18435 size = offset = (unsigned HOST_WIDE_INT)-1;
18436 ret = NULL;
18437 break;
18438 }
18439 if (pos > offset)
18440 {
18441 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18442 add_loc_descr (&ret, ret1);
18443 offset = pos;
18444 }
18445 if (val && fieldsize != 0)
18446 {
18447 ret1 = loc_descriptor_from_tree (val, want_address, context);
18448 if (!ret1)
18449 {
18450 expansion_failed (loc, NULL_RTX,
18451 "unsupported expression in field");
18452 size = offset = (unsigned HOST_WIDE_INT)-1;
18453 ret = NULL;
18454 break;
18455 }
18456 add_loc_descr (&ret, ret1);
18457 }
18458 if (fieldsize)
18459 {
18460 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18461 add_loc_descr (&ret, ret1);
18462 offset = pos + fieldsize;
18463 }
18464 }
18465
18466 if (offset != size)
18467 {
18468 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18469 add_loc_descr (&ret, ret1);
18470 offset = size;
18471 }
18472
18473 have_address = !!want_address;
18474 }
18475 else
18476 expansion_failed (loc, NULL_RTX,
18477 "constructor of non-record type");
18478 }
18479 else
18480 /* We can construct small constants here using int_loc_descriptor. */
18481 expansion_failed (loc, NULL_RTX,
18482 "constructor or constant not in constant pool");
18483 break;
18484
18485 case TRUTH_AND_EXPR:
18486 case TRUTH_ANDIF_EXPR:
18487 case BIT_AND_EXPR:
18488 op = DW_OP_and;
18489 goto do_binop;
18490
18491 case TRUTH_XOR_EXPR:
18492 case BIT_XOR_EXPR:
18493 op = DW_OP_xor;
18494 goto do_binop;
18495
18496 case TRUTH_OR_EXPR:
18497 case TRUTH_ORIF_EXPR:
18498 case BIT_IOR_EXPR:
18499 op = DW_OP_or;
18500 goto do_binop;
18501
18502 case FLOOR_DIV_EXPR:
18503 case CEIL_DIV_EXPR:
18504 case ROUND_DIV_EXPR:
18505 case TRUNC_DIV_EXPR:
18506 case EXACT_DIV_EXPR:
18507 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18508 return 0;
18509 op = DW_OP_div;
18510 goto do_binop;
18511
18512 case MINUS_EXPR:
18513 op = DW_OP_minus;
18514 goto do_binop;
18515
18516 case FLOOR_MOD_EXPR:
18517 case CEIL_MOD_EXPR:
18518 case ROUND_MOD_EXPR:
18519 case TRUNC_MOD_EXPR:
18520 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18521 {
18522 op = DW_OP_mod;
18523 goto do_binop;
18524 }
18525 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18526 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18527 if (list_ret == 0 || list_ret1 == 0)
18528 return 0;
18529
18530 add_loc_list (&list_ret, list_ret1);
18531 if (list_ret == 0)
18532 return 0;
18533 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18534 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18535 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18536 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18537 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18538 break;
18539
18540 case MULT_EXPR:
18541 op = DW_OP_mul;
18542 goto do_binop;
18543
18544 case LSHIFT_EXPR:
18545 op = DW_OP_shl;
18546 goto do_binop;
18547
18548 case RSHIFT_EXPR:
18549 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18550 goto do_binop;
18551
18552 case POINTER_PLUS_EXPR:
18553 case PLUS_EXPR:
18554 do_plus:
18555 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18556 {
18557 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18558 smarter to encode their opposite. The DW_OP_plus_uconst operation
18559 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18560 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18561 bytes, Y being the size of the operation that pushes the opposite
18562 of the addend. So let's choose the smallest representation. */
18563 const tree tree_addend = TREE_OPERAND (loc, 1);
18564 offset_int wi_addend;
18565 HOST_WIDE_INT shwi_addend;
18566 dw_loc_descr_ref loc_naddend;
18567
18568 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18569 if (list_ret == 0)
18570 return 0;
18571
18572 /* Try to get the literal to push. It is the opposite of the addend,
18573 so as we rely on wrapping during DWARF evaluation, first decode
18574 the literal as a "DWARF-sized" signed number. */
18575 wi_addend = wi::to_offset (tree_addend);
18576 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18577 shwi_addend = wi_addend.to_shwi ();
18578 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18579 ? int_loc_descriptor (-shwi_addend)
18580 : NULL;
18581
18582 if (loc_naddend != NULL
18583 && ((unsigned) size_of_uleb128 (shwi_addend)
18584 > size_of_loc_descr (loc_naddend)))
18585 {
18586 add_loc_descr_to_each (list_ret, loc_naddend);
18587 add_loc_descr_to_each (list_ret,
18588 new_loc_descr (DW_OP_minus, 0, 0));
18589 }
18590 else
18591 {
18592 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18593 {
18594 loc_naddend = loc_cur;
18595 loc_cur = loc_cur->dw_loc_next;
18596 ggc_free (loc_naddend);
18597 }
18598 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18599 }
18600 break;
18601 }
18602
18603 op = DW_OP_plus;
18604 goto do_binop;
18605
18606 case LE_EXPR:
18607 op = DW_OP_le;
18608 goto do_comp_binop;
18609
18610 case GE_EXPR:
18611 op = DW_OP_ge;
18612 goto do_comp_binop;
18613
18614 case LT_EXPR:
18615 op = DW_OP_lt;
18616 goto do_comp_binop;
18617
18618 case GT_EXPR:
18619 op = DW_OP_gt;
18620 goto do_comp_binop;
18621
18622 do_comp_binop:
18623 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18624 {
18625 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18626 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18627 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18628 TREE_CODE (loc));
18629 break;
18630 }
18631 else
18632 goto do_binop;
18633
18634 case EQ_EXPR:
18635 op = DW_OP_eq;
18636 goto do_binop;
18637
18638 case NE_EXPR:
18639 op = DW_OP_ne;
18640 goto do_binop;
18641
18642 do_binop:
18643 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18644 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18645 if (list_ret == 0 || list_ret1 == 0)
18646 return 0;
18647
18648 add_loc_list (&list_ret, list_ret1);
18649 if (list_ret == 0)
18650 return 0;
18651 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18652 break;
18653
18654 case TRUTH_NOT_EXPR:
18655 case BIT_NOT_EXPR:
18656 op = DW_OP_not;
18657 goto do_unop;
18658
18659 case ABS_EXPR:
18660 op = DW_OP_abs;
18661 goto do_unop;
18662
18663 case NEGATE_EXPR:
18664 op = DW_OP_neg;
18665 goto do_unop;
18666
18667 do_unop:
18668 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18669 if (list_ret == 0)
18670 return 0;
18671
18672 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18673 break;
18674
18675 case MIN_EXPR:
18676 case MAX_EXPR:
18677 {
18678 const enum tree_code code =
18679 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18680
18681 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18682 build2 (code, integer_type_node,
18683 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18684 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18685 }
18686
18687 /* fall through */
18688
18689 case COND_EXPR:
18690 {
18691 dw_loc_descr_ref lhs
18692 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18693 dw_loc_list_ref rhs
18694 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18695 dw_loc_descr_ref bra_node, jump_node, tmp;
18696
18697 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18698 if (list_ret == 0 || lhs == 0 || rhs == 0)
18699 return 0;
18700
18701 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18702 add_loc_descr_to_each (list_ret, bra_node);
18703
18704 add_loc_list (&list_ret, rhs);
18705 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18706 add_loc_descr_to_each (list_ret, jump_node);
18707
18708 add_loc_descr_to_each (list_ret, lhs);
18709 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18710 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18711
18712 /* ??? Need a node to point the skip at. Use a nop. */
18713 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18714 add_loc_descr_to_each (list_ret, tmp);
18715 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18716 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18717 }
18718 break;
18719
18720 case FIX_TRUNC_EXPR:
18721 return 0;
18722
18723 default:
18724 /* Leave front-end specific codes as simply unknown. This comes
18725 up, for instance, with the C STMT_EXPR. */
18726 if ((unsigned int) TREE_CODE (loc)
18727 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18728 {
18729 expansion_failed (loc, NULL_RTX,
18730 "language specific tree node");
18731 return 0;
18732 }
18733
18734 /* Otherwise this is a generic code; we should just lists all of
18735 these explicitly. We forgot one. */
18736 if (flag_checking)
18737 gcc_unreachable ();
18738
18739 /* In a release build, we want to degrade gracefully: better to
18740 generate incomplete debugging information than to crash. */
18741 return NULL;
18742 }
18743
18744 if (!ret && !list_ret)
18745 return 0;
18746
18747 if (want_address == 2 && !have_address
18748 && (dwarf_version >= 4 || !dwarf_strict))
18749 {
18750 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18751 {
18752 expansion_failed (loc, NULL_RTX,
18753 "DWARF address size mismatch");
18754 return 0;
18755 }
18756 if (ret)
18757 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18758 else
18759 add_loc_descr_to_each (list_ret,
18760 new_loc_descr (DW_OP_stack_value, 0, 0));
18761 have_address = 1;
18762 }
18763 /* Show if we can't fill the request for an address. */
18764 if (want_address && !have_address)
18765 {
18766 expansion_failed (loc, NULL_RTX,
18767 "Want address and only have value");
18768 return 0;
18769 }
18770
18771 gcc_assert (!ret || !list_ret);
18772
18773 /* If we've got an address and don't want one, dereference. */
18774 if (!want_address && have_address)
18775 {
18776 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18777
18778 if (size > DWARF2_ADDR_SIZE || size == -1)
18779 {
18780 expansion_failed (loc, NULL_RTX,
18781 "DWARF address size mismatch");
18782 return 0;
18783 }
18784 else if (size == DWARF2_ADDR_SIZE)
18785 op = DW_OP_deref;
18786 else
18787 op = DW_OP_deref_size;
18788
18789 if (ret)
18790 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18791 else
18792 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18793 }
18794 if (ret)
18795 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18796
18797 return list_ret;
18798 }
18799
18800 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18801 expressions. */
18802
18803 static dw_loc_list_ref
18804 loc_list_from_tree (tree loc, int want_address,
18805 struct loc_descr_context *context)
18806 {
18807 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18808
18809 for (dw_loc_list_ref loc_cur = result;
18810 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18811 loc_descr_without_nops (loc_cur->expr);
18812 return result;
18813 }
18814
18815 /* Same as above but return only single location expression. */
18816 static dw_loc_descr_ref
18817 loc_descriptor_from_tree (tree loc, int want_address,
18818 struct loc_descr_context *context)
18819 {
18820 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18821 if (!ret)
18822 return NULL;
18823 if (ret->dw_loc_next)
18824 {
18825 expansion_failed (loc, NULL_RTX,
18826 "Location list where only loc descriptor needed");
18827 return NULL;
18828 }
18829 return ret->expr;
18830 }
18831
18832 /* Given a value, round it up to the lowest multiple of `boundary'
18833 which is not less than the value itself. */
18834
18835 static inline HOST_WIDE_INT
18836 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18837 {
18838 return (((value + boundary - 1) / boundary) * boundary);
18839 }
18840
18841 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18842 pointer to the declared type for the relevant field variable, or return
18843 `integer_type_node' if the given node turns out to be an
18844 ERROR_MARK node. */
18845
18846 static inline tree
18847 field_type (const_tree decl)
18848 {
18849 tree type;
18850
18851 if (TREE_CODE (decl) == ERROR_MARK)
18852 return integer_type_node;
18853
18854 type = DECL_BIT_FIELD_TYPE (decl);
18855 if (type == NULL_TREE)
18856 type = TREE_TYPE (decl);
18857
18858 return type;
18859 }
18860
18861 /* Given a pointer to a tree node, return the alignment in bits for
18862 it, or else return BITS_PER_WORD if the node actually turns out to
18863 be an ERROR_MARK node. */
18864
18865 static inline unsigned
18866 simple_type_align_in_bits (const_tree type)
18867 {
18868 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18869 }
18870
18871 static inline unsigned
18872 simple_decl_align_in_bits (const_tree decl)
18873 {
18874 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18875 }
18876
18877 /* Return the result of rounding T up to ALIGN. */
18878
18879 static inline offset_int
18880 round_up_to_align (const offset_int &t, unsigned int align)
18881 {
18882 return wi::udiv_trunc (t + align - 1, align) * align;
18883 }
18884
18885 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18886 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18887 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18888 if we fail to return the size in one of these two forms. */
18889
18890 static dw_loc_descr_ref
18891 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18892 {
18893 tree tree_size;
18894 struct loc_descr_context ctx;
18895
18896 /* Return a constant integer in priority, if possible. */
18897 *cst_size = int_size_in_bytes (type);
18898 if (*cst_size != -1)
18899 return NULL;
18900
18901 ctx.context_type = const_cast<tree> (type);
18902 ctx.base_decl = NULL_TREE;
18903 ctx.dpi = NULL;
18904 ctx.placeholder_arg = false;
18905 ctx.placeholder_seen = false;
18906
18907 type = TYPE_MAIN_VARIANT (type);
18908 tree_size = TYPE_SIZE_UNIT (type);
18909 return ((tree_size != NULL_TREE)
18910 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18911 : NULL);
18912 }
18913
18914 /* Helper structure for RECORD_TYPE processing. */
18915 struct vlr_context
18916 {
18917 /* Root RECORD_TYPE. It is needed to generate data member location
18918 descriptions in variable-length records (VLR), but also to cope with
18919 variants, which are composed of nested structures multiplexed with
18920 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18921 function processing a FIELD_DECL, it is required to be non null. */
18922 tree struct_type;
18923 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18924 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18925 this variant part as part of the root record (in storage units). For
18926 regular records, it must be NULL_TREE. */
18927 tree variant_part_offset;
18928 };
18929
18930 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18931 addressed byte of the "containing object" for the given FIELD_DECL. If
18932 possible, return a native constant through CST_OFFSET (in which case NULL is
18933 returned); otherwise return a DWARF expression that computes the offset.
18934
18935 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18936 that offset is, either because the argument turns out to be a pointer to an
18937 ERROR_MARK node, or because the offset expression is too complex for us.
18938
18939 CTX is required: see the comment for VLR_CONTEXT. */
18940
18941 static dw_loc_descr_ref
18942 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18943 HOST_WIDE_INT *cst_offset)
18944 {
18945 tree tree_result;
18946 dw_loc_list_ref loc_result;
18947
18948 *cst_offset = 0;
18949
18950 if (TREE_CODE (decl) == ERROR_MARK)
18951 return NULL;
18952 else
18953 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18954
18955 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18956 case. */
18957 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18958 return NULL;
18959
18960 #ifdef PCC_BITFIELD_TYPE_MATTERS
18961 /* We used to handle only constant offsets in all cases. Now, we handle
18962 properly dynamic byte offsets only when PCC bitfield type doesn't
18963 matter. */
18964 if (PCC_BITFIELD_TYPE_MATTERS
18965 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18966 {
18967 offset_int object_offset_in_bits;
18968 offset_int object_offset_in_bytes;
18969 offset_int bitpos_int;
18970 tree type;
18971 tree field_size_tree;
18972 offset_int deepest_bitpos;
18973 offset_int field_size_in_bits;
18974 unsigned int type_align_in_bits;
18975 unsigned int decl_align_in_bits;
18976 offset_int type_size_in_bits;
18977
18978 bitpos_int = wi::to_offset (bit_position (decl));
18979 type = field_type (decl);
18980 type_size_in_bits = offset_int_type_size_in_bits (type);
18981 type_align_in_bits = simple_type_align_in_bits (type);
18982
18983 field_size_tree = DECL_SIZE (decl);
18984
18985 /* The size could be unspecified if there was an error, or for
18986 a flexible array member. */
18987 if (!field_size_tree)
18988 field_size_tree = bitsize_zero_node;
18989
18990 /* If the size of the field is not constant, use the type size. */
18991 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18992 field_size_in_bits = wi::to_offset (field_size_tree);
18993 else
18994 field_size_in_bits = type_size_in_bits;
18995
18996 decl_align_in_bits = simple_decl_align_in_bits (decl);
18997
18998 /* The GCC front-end doesn't make any attempt to keep track of the
18999 starting bit offset (relative to the start of the containing
19000 structure type) of the hypothetical "containing object" for a
19001 bit-field. Thus, when computing the byte offset value for the
19002 start of the "containing object" of a bit-field, we must deduce
19003 this information on our own. This can be rather tricky to do in
19004 some cases. For example, handling the following structure type
19005 definition when compiling for an i386/i486 target (which only
19006 aligns long long's to 32-bit boundaries) can be very tricky:
19007
19008 struct S { int field1; long long field2:31; };
19009
19010 Fortunately, there is a simple rule-of-thumb which can be used
19011 in such cases. When compiling for an i386/i486, GCC will
19012 allocate 8 bytes for the structure shown above. It decides to
19013 do this based upon one simple rule for bit-field allocation.
19014 GCC allocates each "containing object" for each bit-field at
19015 the first (i.e. lowest addressed) legitimate alignment boundary
19016 (based upon the required minimum alignment for the declared
19017 type of the field) which it can possibly use, subject to the
19018 condition that there is still enough available space remaining
19019 in the containing object (when allocated at the selected point)
19020 to fully accommodate all of the bits of the bit-field itself.
19021
19022 This simple rule makes it obvious why GCC allocates 8 bytes for
19023 each object of the structure type shown above. When looking
19024 for a place to allocate the "containing object" for `field2',
19025 the compiler simply tries to allocate a 64-bit "containing
19026 object" at each successive 32-bit boundary (starting at zero)
19027 until it finds a place to allocate that 64- bit field such that
19028 at least 31 contiguous (and previously unallocated) bits remain
19029 within that selected 64 bit field. (As it turns out, for the
19030 example above, the compiler finds it is OK to allocate the
19031 "containing object" 64-bit field at bit-offset zero within the
19032 structure type.)
19033
19034 Here we attempt to work backwards from the limited set of facts
19035 we're given, and we try to deduce from those facts, where GCC
19036 must have believed that the containing object started (within
19037 the structure type). The value we deduce is then used (by the
19038 callers of this routine) to generate DW_AT_location and
19039 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19040 the case of DW_AT_location, regular fields as well). */
19041
19042 /* Figure out the bit-distance from the start of the structure to
19043 the "deepest" bit of the bit-field. */
19044 deepest_bitpos = bitpos_int + field_size_in_bits;
19045
19046 /* This is the tricky part. Use some fancy footwork to deduce
19047 where the lowest addressed bit of the containing object must
19048 be. */
19049 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19050
19051 /* Round up to type_align by default. This works best for
19052 bitfields. */
19053 object_offset_in_bits
19054 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19055
19056 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19057 {
19058 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19059
19060 /* Round up to decl_align instead. */
19061 object_offset_in_bits
19062 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19063 }
19064
19065 object_offset_in_bytes
19066 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19067 if (ctx->variant_part_offset == NULL_TREE)
19068 {
19069 *cst_offset = object_offset_in_bytes.to_shwi ();
19070 return NULL;
19071 }
19072 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19073 }
19074 else
19075 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19076 tree_result = byte_position (decl);
19077
19078 if (ctx->variant_part_offset != NULL_TREE)
19079 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19080 ctx->variant_part_offset, tree_result);
19081
19082 /* If the byte offset is a constant, it's simplier to handle a native
19083 constant rather than a DWARF expression. */
19084 if (TREE_CODE (tree_result) == INTEGER_CST)
19085 {
19086 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19087 return NULL;
19088 }
19089 struct loc_descr_context loc_ctx = {
19090 ctx->struct_type, /* context_type */
19091 NULL_TREE, /* base_decl */
19092 NULL, /* dpi */
19093 false, /* placeholder_arg */
19094 false /* placeholder_seen */
19095 };
19096 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19097
19098 /* We want a DWARF expression: abort if we only have a location list with
19099 multiple elements. */
19100 if (!loc_result || !single_element_loc_list_p (loc_result))
19101 return NULL;
19102 else
19103 return loc_result->expr;
19104 }
19105 \f
19106 /* The following routines define various Dwarf attributes and any data
19107 associated with them. */
19108
19109 /* Add a location description attribute value to a DIE.
19110
19111 This emits location attributes suitable for whole variables and
19112 whole parameters. Note that the location attributes for struct fields are
19113 generated by the routine `data_member_location_attribute' below. */
19114
19115 static inline void
19116 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19117 dw_loc_list_ref descr)
19118 {
19119 bool check_no_locviews = true;
19120 if (descr == 0)
19121 return;
19122 if (single_element_loc_list_p (descr))
19123 add_AT_loc (die, attr_kind, descr->expr);
19124 else
19125 {
19126 add_AT_loc_list (die, attr_kind, descr);
19127 gcc_assert (descr->ll_symbol);
19128 if (attr_kind == DW_AT_location && descr->vl_symbol
19129 && dwarf2out_locviews_in_attribute ())
19130 {
19131 add_AT_view_list (die, DW_AT_GNU_locviews);
19132 check_no_locviews = false;
19133 }
19134 }
19135
19136 if (check_no_locviews)
19137 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19138 }
19139
19140 /* Add DW_AT_accessibility attribute to DIE if needed. */
19141
19142 static void
19143 add_accessibility_attribute (dw_die_ref die, tree decl)
19144 {
19145 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19146 children, otherwise the default is DW_ACCESS_public. In DWARF2
19147 the default has always been DW_ACCESS_public. */
19148 if (TREE_PROTECTED (decl))
19149 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19150 else if (TREE_PRIVATE (decl))
19151 {
19152 if (dwarf_version == 2
19153 || die->die_parent == NULL
19154 || die->die_parent->die_tag != DW_TAG_class_type)
19155 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19156 }
19157 else if (dwarf_version > 2
19158 && die->die_parent
19159 && die->die_parent->die_tag == DW_TAG_class_type)
19160 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19161 }
19162
19163 /* Attach the specialized form of location attribute used for data members of
19164 struct and union types. In the special case of a FIELD_DECL node which
19165 represents a bit-field, the "offset" part of this special location
19166 descriptor must indicate the distance in bytes from the lowest-addressed
19167 byte of the containing struct or union type to the lowest-addressed byte of
19168 the "containing object" for the bit-field. (See the `field_byte_offset'
19169 function above).
19170
19171 For any given bit-field, the "containing object" is a hypothetical object
19172 (of some integral or enum type) within which the given bit-field lives. The
19173 type of this hypothetical "containing object" is always the same as the
19174 declared type of the individual bit-field itself (for GCC anyway... the
19175 DWARF spec doesn't actually mandate this). Note that it is the size (in
19176 bytes) of the hypothetical "containing object" which will be given in the
19177 DW_AT_byte_size attribute for this bit-field. (See the
19178 `byte_size_attribute' function below.) It is also used when calculating the
19179 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19180 function below.)
19181
19182 CTX is required: see the comment for VLR_CONTEXT. */
19183
19184 static void
19185 add_data_member_location_attribute (dw_die_ref die,
19186 tree decl,
19187 struct vlr_context *ctx)
19188 {
19189 HOST_WIDE_INT offset;
19190 dw_loc_descr_ref loc_descr = 0;
19191
19192 if (TREE_CODE (decl) == TREE_BINFO)
19193 {
19194 /* We're working on the TAG_inheritance for a base class. */
19195 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19196 {
19197 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19198 aren't at a fixed offset from all (sub)objects of the same
19199 type. We need to extract the appropriate offset from our
19200 vtable. The following dwarf expression means
19201
19202 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19203
19204 This is specific to the V3 ABI, of course. */
19205
19206 dw_loc_descr_ref tmp;
19207
19208 /* Make a copy of the object address. */
19209 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19210 add_loc_descr (&loc_descr, tmp);
19211
19212 /* Extract the vtable address. */
19213 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19214 add_loc_descr (&loc_descr, tmp);
19215
19216 /* Calculate the address of the offset. */
19217 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19218 gcc_assert (offset < 0);
19219
19220 tmp = int_loc_descriptor (-offset);
19221 add_loc_descr (&loc_descr, tmp);
19222 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19223 add_loc_descr (&loc_descr, tmp);
19224
19225 /* Extract the offset. */
19226 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19227 add_loc_descr (&loc_descr, tmp);
19228
19229 /* Add it to the object address. */
19230 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19231 add_loc_descr (&loc_descr, tmp);
19232 }
19233 else
19234 offset = tree_to_shwi (BINFO_OFFSET (decl));
19235 }
19236 else
19237 {
19238 loc_descr = field_byte_offset (decl, ctx, &offset);
19239
19240 /* If loc_descr is available then we know the field offset is dynamic.
19241 However, GDB does not handle dynamic field offsets very well at the
19242 moment. */
19243 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19244 {
19245 loc_descr = NULL;
19246 offset = 0;
19247 }
19248
19249 /* Data member location evalutation starts with the base address on the
19250 stack. Compute the field offset and add it to this base address. */
19251 else if (loc_descr != NULL)
19252 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19253 }
19254
19255 if (! loc_descr)
19256 {
19257 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19258 e.g. GDB only added support to it in November 2016. For DWARF5
19259 we need newer debug info consumers anyway. We might change this
19260 to dwarf_version >= 4 once most consumers catched up. */
19261 if (dwarf_version >= 5
19262 && TREE_CODE (decl) == FIELD_DECL
19263 && DECL_BIT_FIELD_TYPE (decl))
19264 {
19265 tree off = bit_position (decl);
19266 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19267 {
19268 remove_AT (die, DW_AT_byte_size);
19269 remove_AT (die, DW_AT_bit_offset);
19270 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19271 return;
19272 }
19273 }
19274 if (dwarf_version > 2)
19275 {
19276 /* Don't need to output a location expression, just the constant. */
19277 if (offset < 0)
19278 add_AT_int (die, DW_AT_data_member_location, offset);
19279 else
19280 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19281 return;
19282 }
19283 else
19284 {
19285 enum dwarf_location_atom op;
19286
19287 /* The DWARF2 standard says that we should assume that the structure
19288 address is already on the stack, so we can specify a structure
19289 field address by using DW_OP_plus_uconst. */
19290 op = DW_OP_plus_uconst;
19291 loc_descr = new_loc_descr (op, offset, 0);
19292 }
19293 }
19294
19295 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19296 }
19297
19298 /* Writes integer values to dw_vec_const array. */
19299
19300 static void
19301 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19302 {
19303 while (size != 0)
19304 {
19305 *dest++ = val & 0xff;
19306 val >>= 8;
19307 --size;
19308 }
19309 }
19310
19311 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19312
19313 static HOST_WIDE_INT
19314 extract_int (const unsigned char *src, unsigned int size)
19315 {
19316 HOST_WIDE_INT val = 0;
19317
19318 src += size;
19319 while (size != 0)
19320 {
19321 val <<= 8;
19322 val |= *--src & 0xff;
19323 --size;
19324 }
19325 return val;
19326 }
19327
19328 /* Writes wide_int values to dw_vec_const array. */
19329
19330 static void
19331 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19332 {
19333 int i;
19334
19335 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19336 {
19337 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19338 return;
19339 }
19340
19341 /* We'd have to extend this code to support odd sizes. */
19342 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19343
19344 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19345
19346 if (WORDS_BIG_ENDIAN)
19347 for (i = n - 1; i >= 0; i--)
19348 {
19349 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19350 dest += sizeof (HOST_WIDE_INT);
19351 }
19352 else
19353 for (i = 0; i < n; i++)
19354 {
19355 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19356 dest += sizeof (HOST_WIDE_INT);
19357 }
19358 }
19359
19360 /* Writes floating point values to dw_vec_const array. */
19361
19362 static void
19363 insert_float (const_rtx rtl, unsigned char *array)
19364 {
19365 long val[4];
19366 int i;
19367 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19368
19369 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19370
19371 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19372 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19373 {
19374 insert_int (val[i], 4, array);
19375 array += 4;
19376 }
19377 }
19378
19379 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19380 does not have a "location" either in memory or in a register. These
19381 things can arise in GNU C when a constant is passed as an actual parameter
19382 to an inlined function. They can also arise in C++ where declared
19383 constants do not necessarily get memory "homes". */
19384
19385 static bool
19386 add_const_value_attribute (dw_die_ref die, rtx rtl)
19387 {
19388 switch (GET_CODE (rtl))
19389 {
19390 case CONST_INT:
19391 {
19392 HOST_WIDE_INT val = INTVAL (rtl);
19393
19394 if (val < 0)
19395 add_AT_int (die, DW_AT_const_value, val);
19396 else
19397 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19398 }
19399 return true;
19400
19401 case CONST_WIDE_INT:
19402 {
19403 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19404 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19405 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19406 wide_int w = wi::zext (w1, prec);
19407 add_AT_wide (die, DW_AT_const_value, w);
19408 }
19409 return true;
19410
19411 case CONST_DOUBLE:
19412 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19413 floating-point constant. A CONST_DOUBLE is used whenever the
19414 constant requires more than one word in order to be adequately
19415 represented. */
19416 if (TARGET_SUPPORTS_WIDE_INT == 0
19417 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19418 add_AT_double (die, DW_AT_const_value,
19419 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19420 else
19421 {
19422 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19423 unsigned int length = GET_MODE_SIZE (mode);
19424 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19425
19426 insert_float (rtl, array);
19427 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19428 }
19429 return true;
19430
19431 case CONST_VECTOR:
19432 {
19433 unsigned int length;
19434 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19435 return false;
19436
19437 machine_mode mode = GET_MODE (rtl);
19438 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19439 unsigned char *array
19440 = ggc_vec_alloc<unsigned char> (length * elt_size);
19441 unsigned int i;
19442 unsigned char *p;
19443 machine_mode imode = GET_MODE_INNER (mode);
19444
19445 switch (GET_MODE_CLASS (mode))
19446 {
19447 case MODE_VECTOR_INT:
19448 for (i = 0, p = array; i < length; i++, p += elt_size)
19449 {
19450 rtx elt = CONST_VECTOR_ELT (rtl, i);
19451 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19452 }
19453 break;
19454
19455 case MODE_VECTOR_FLOAT:
19456 for (i = 0, p = array; i < length; i++, p += elt_size)
19457 {
19458 rtx elt = CONST_VECTOR_ELT (rtl, i);
19459 insert_float (elt, p);
19460 }
19461 break;
19462
19463 default:
19464 gcc_unreachable ();
19465 }
19466
19467 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19468 }
19469 return true;
19470
19471 case CONST_STRING:
19472 if (dwarf_version >= 4 || !dwarf_strict)
19473 {
19474 dw_loc_descr_ref loc_result;
19475 resolve_one_addr (&rtl);
19476 rtl_addr:
19477 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19478 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19479 add_AT_loc (die, DW_AT_location, loc_result);
19480 vec_safe_push (used_rtx_array, rtl);
19481 return true;
19482 }
19483 return false;
19484
19485 case CONST:
19486 if (CONSTANT_P (XEXP (rtl, 0)))
19487 return add_const_value_attribute (die, XEXP (rtl, 0));
19488 /* FALLTHROUGH */
19489 case SYMBOL_REF:
19490 if (!const_ok_for_output (rtl))
19491 return false;
19492 /* FALLTHROUGH */
19493 case LABEL_REF:
19494 if (dwarf_version >= 4 || !dwarf_strict)
19495 goto rtl_addr;
19496 return false;
19497
19498 case PLUS:
19499 /* In cases where an inlined instance of an inline function is passed
19500 the address of an `auto' variable (which is local to the caller) we
19501 can get a situation where the DECL_RTL of the artificial local
19502 variable (for the inlining) which acts as a stand-in for the
19503 corresponding formal parameter (of the inline function) will look
19504 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19505 exactly a compile-time constant expression, but it isn't the address
19506 of the (artificial) local variable either. Rather, it represents the
19507 *value* which the artificial local variable always has during its
19508 lifetime. We currently have no way to represent such quasi-constant
19509 values in Dwarf, so for now we just punt and generate nothing. */
19510 return false;
19511
19512 case HIGH:
19513 case CONST_FIXED:
19514 return false;
19515
19516 case MEM:
19517 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19518 && MEM_READONLY_P (rtl)
19519 && GET_MODE (rtl) == BLKmode)
19520 {
19521 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19522 return true;
19523 }
19524 return false;
19525
19526 default:
19527 /* No other kinds of rtx should be possible here. */
19528 gcc_unreachable ();
19529 }
19530 return false;
19531 }
19532
19533 /* Determine whether the evaluation of EXPR references any variables
19534 or functions which aren't otherwise used (and therefore may not be
19535 output). */
19536 static tree
19537 reference_to_unused (tree * tp, int * walk_subtrees,
19538 void * data ATTRIBUTE_UNUSED)
19539 {
19540 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19541 *walk_subtrees = 0;
19542
19543 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19544 && ! TREE_ASM_WRITTEN (*tp))
19545 return *tp;
19546 /* ??? The C++ FE emits debug information for using decls, so
19547 putting gcc_unreachable here falls over. See PR31899. For now
19548 be conservative. */
19549 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19550 return *tp;
19551 else if (VAR_P (*tp))
19552 {
19553 varpool_node *node = varpool_node::get (*tp);
19554 if (!node || !node->definition)
19555 return *tp;
19556 }
19557 else if (TREE_CODE (*tp) == FUNCTION_DECL
19558 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19559 {
19560 /* The call graph machinery must have finished analyzing,
19561 optimizing and gimplifying the CU by now.
19562 So if *TP has no call graph node associated
19563 to it, it means *TP will not be emitted. */
19564 if (!cgraph_node::get (*tp))
19565 return *tp;
19566 }
19567 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19568 return *tp;
19569
19570 return NULL_TREE;
19571 }
19572
19573 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19574 for use in a later add_const_value_attribute call. */
19575
19576 static rtx
19577 rtl_for_decl_init (tree init, tree type)
19578 {
19579 rtx rtl = NULL_RTX;
19580
19581 STRIP_NOPS (init);
19582
19583 /* If a variable is initialized with a string constant without embedded
19584 zeros, build CONST_STRING. */
19585 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19586 {
19587 tree enttype = TREE_TYPE (type);
19588 tree domain = TYPE_DOMAIN (type);
19589 scalar_int_mode mode;
19590
19591 if (is_int_mode (TYPE_MODE (enttype), &mode)
19592 && GET_MODE_SIZE (mode) == 1
19593 && domain
19594 && integer_zerop (TYPE_MIN_VALUE (domain))
19595 && compare_tree_int (TYPE_MAX_VALUE (domain),
19596 TREE_STRING_LENGTH (init) - 1) == 0
19597 && ((size_t) TREE_STRING_LENGTH (init)
19598 == strlen (TREE_STRING_POINTER (init)) + 1))
19599 {
19600 rtl = gen_rtx_CONST_STRING (VOIDmode,
19601 ggc_strdup (TREE_STRING_POINTER (init)));
19602 rtl = gen_rtx_MEM (BLKmode, rtl);
19603 MEM_READONLY_P (rtl) = 1;
19604 }
19605 }
19606 /* Other aggregates, and complex values, could be represented using
19607 CONCAT: FIXME! */
19608 else if (AGGREGATE_TYPE_P (type)
19609 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19610 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19611 || TREE_CODE (type) == COMPLEX_TYPE)
19612 ;
19613 /* Vectors only work if their mode is supported by the target.
19614 FIXME: generic vectors ought to work too. */
19615 else if (TREE_CODE (type) == VECTOR_TYPE
19616 && !VECTOR_MODE_P (TYPE_MODE (type)))
19617 ;
19618 /* If the initializer is something that we know will expand into an
19619 immediate RTL constant, expand it now. We must be careful not to
19620 reference variables which won't be output. */
19621 else if (initializer_constant_valid_p (init, type)
19622 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19623 {
19624 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19625 possible. */
19626 if (TREE_CODE (type) == VECTOR_TYPE)
19627 switch (TREE_CODE (init))
19628 {
19629 case VECTOR_CST:
19630 break;
19631 case CONSTRUCTOR:
19632 if (TREE_CONSTANT (init))
19633 {
19634 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19635 bool constant_p = true;
19636 tree value;
19637 unsigned HOST_WIDE_INT ix;
19638
19639 /* Even when ctor is constant, it might contain non-*_CST
19640 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19641 belong into VECTOR_CST nodes. */
19642 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19643 if (!CONSTANT_CLASS_P (value))
19644 {
19645 constant_p = false;
19646 break;
19647 }
19648
19649 if (constant_p)
19650 {
19651 init = build_vector_from_ctor (type, elts);
19652 break;
19653 }
19654 }
19655 /* FALLTHRU */
19656
19657 default:
19658 return NULL;
19659 }
19660
19661 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19662
19663 /* If expand_expr returns a MEM, it wasn't immediate. */
19664 gcc_assert (!rtl || !MEM_P (rtl));
19665 }
19666
19667 return rtl;
19668 }
19669
19670 /* Generate RTL for the variable DECL to represent its location. */
19671
19672 static rtx
19673 rtl_for_decl_location (tree decl)
19674 {
19675 rtx rtl;
19676
19677 /* Here we have to decide where we are going to say the parameter "lives"
19678 (as far as the debugger is concerned). We only have a couple of
19679 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19680
19681 DECL_RTL normally indicates where the parameter lives during most of the
19682 activation of the function. If optimization is enabled however, this
19683 could be either NULL or else a pseudo-reg. Both of those cases indicate
19684 that the parameter doesn't really live anywhere (as far as the code
19685 generation parts of GCC are concerned) during most of the function's
19686 activation. That will happen (for example) if the parameter is never
19687 referenced within the function.
19688
19689 We could just generate a location descriptor here for all non-NULL
19690 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19691 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19692 where DECL_RTL is NULL or is a pseudo-reg.
19693
19694 Note however that we can only get away with using DECL_INCOMING_RTL as
19695 a backup substitute for DECL_RTL in certain limited cases. In cases
19696 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19697 we can be sure that the parameter was passed using the same type as it is
19698 declared to have within the function, and that its DECL_INCOMING_RTL
19699 points us to a place where a value of that type is passed.
19700
19701 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19702 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19703 because in these cases DECL_INCOMING_RTL points us to a value of some
19704 type which is *different* from the type of the parameter itself. Thus,
19705 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19706 such cases, the debugger would end up (for example) trying to fetch a
19707 `float' from a place which actually contains the first part of a
19708 `double'. That would lead to really incorrect and confusing
19709 output at debug-time.
19710
19711 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19712 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19713 are a couple of exceptions however. On little-endian machines we can
19714 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19715 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19716 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19717 when (on a little-endian machine) a non-prototyped function has a
19718 parameter declared to be of type `short' or `char'. In such cases,
19719 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19720 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19721 passed `int' value. If the debugger then uses that address to fetch
19722 a `short' or a `char' (on a little-endian machine) the result will be
19723 the correct data, so we allow for such exceptional cases below.
19724
19725 Note that our goal here is to describe the place where the given formal
19726 parameter lives during most of the function's activation (i.e. between the
19727 end of the prologue and the start of the epilogue). We'll do that as best
19728 as we can. Note however that if the given formal parameter is modified
19729 sometime during the execution of the function, then a stack backtrace (at
19730 debug-time) will show the function as having been called with the *new*
19731 value rather than the value which was originally passed in. This happens
19732 rarely enough that it is not a major problem, but it *is* a problem, and
19733 I'd like to fix it.
19734
19735 A future version of dwarf2out.c may generate two additional attributes for
19736 any given DW_TAG_formal_parameter DIE which will describe the "passed
19737 type" and the "passed location" for the given formal parameter in addition
19738 to the attributes we now generate to indicate the "declared type" and the
19739 "active location" for each parameter. This additional set of attributes
19740 could be used by debuggers for stack backtraces. Separately, note that
19741 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19742 This happens (for example) for inlined-instances of inline function formal
19743 parameters which are never referenced. This really shouldn't be
19744 happening. All PARM_DECL nodes should get valid non-NULL
19745 DECL_INCOMING_RTL values. FIXME. */
19746
19747 /* Use DECL_RTL as the "location" unless we find something better. */
19748 rtl = DECL_RTL_IF_SET (decl);
19749
19750 /* When generating abstract instances, ignore everything except
19751 constants, symbols living in memory, and symbols living in
19752 fixed registers. */
19753 if (! reload_completed)
19754 {
19755 if (rtl
19756 && (CONSTANT_P (rtl)
19757 || (MEM_P (rtl)
19758 && CONSTANT_P (XEXP (rtl, 0)))
19759 || (REG_P (rtl)
19760 && VAR_P (decl)
19761 && TREE_STATIC (decl))))
19762 {
19763 rtl = targetm.delegitimize_address (rtl);
19764 return rtl;
19765 }
19766 rtl = NULL_RTX;
19767 }
19768 else if (TREE_CODE (decl) == PARM_DECL)
19769 {
19770 if (rtl == NULL_RTX
19771 || is_pseudo_reg (rtl)
19772 || (MEM_P (rtl)
19773 && is_pseudo_reg (XEXP (rtl, 0))
19774 && DECL_INCOMING_RTL (decl)
19775 && MEM_P (DECL_INCOMING_RTL (decl))
19776 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19777 {
19778 tree declared_type = TREE_TYPE (decl);
19779 tree passed_type = DECL_ARG_TYPE (decl);
19780 machine_mode dmode = TYPE_MODE (declared_type);
19781 machine_mode pmode = TYPE_MODE (passed_type);
19782
19783 /* This decl represents a formal parameter which was optimized out.
19784 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19785 all cases where (rtl == NULL_RTX) just below. */
19786 if (dmode == pmode)
19787 rtl = DECL_INCOMING_RTL (decl);
19788 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19789 && SCALAR_INT_MODE_P (dmode)
19790 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19791 && DECL_INCOMING_RTL (decl))
19792 {
19793 rtx inc = DECL_INCOMING_RTL (decl);
19794 if (REG_P (inc))
19795 rtl = inc;
19796 else if (MEM_P (inc))
19797 {
19798 if (BYTES_BIG_ENDIAN)
19799 rtl = adjust_address_nv (inc, dmode,
19800 GET_MODE_SIZE (pmode)
19801 - GET_MODE_SIZE (dmode));
19802 else
19803 rtl = inc;
19804 }
19805 }
19806 }
19807
19808 /* If the parm was passed in registers, but lives on the stack, then
19809 make a big endian correction if the mode of the type of the
19810 parameter is not the same as the mode of the rtl. */
19811 /* ??? This is the same series of checks that are made in dbxout.c before
19812 we reach the big endian correction code there. It isn't clear if all
19813 of these checks are necessary here, but keeping them all is the safe
19814 thing to do. */
19815 else if (MEM_P (rtl)
19816 && XEXP (rtl, 0) != const0_rtx
19817 && ! CONSTANT_P (XEXP (rtl, 0))
19818 /* Not passed in memory. */
19819 && !MEM_P (DECL_INCOMING_RTL (decl))
19820 /* Not passed by invisible reference. */
19821 && (!REG_P (XEXP (rtl, 0))
19822 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19823 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19824 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19825 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19826 #endif
19827 )
19828 /* Big endian correction check. */
19829 && BYTES_BIG_ENDIAN
19830 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19831 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19832 UNITS_PER_WORD))
19833 {
19834 machine_mode addr_mode = get_address_mode (rtl);
19835 poly_int64 offset = (UNITS_PER_WORD
19836 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19837
19838 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19839 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19840 }
19841 }
19842 else if (VAR_P (decl)
19843 && rtl
19844 && MEM_P (rtl)
19845 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19846 {
19847 machine_mode addr_mode = get_address_mode (rtl);
19848 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19849 GET_MODE (rtl));
19850
19851 /* If a variable is declared "register" yet is smaller than
19852 a register, then if we store the variable to memory, it
19853 looks like we're storing a register-sized value, when in
19854 fact we are not. We need to adjust the offset of the
19855 storage location to reflect the actual value's bytes,
19856 else gdb will not be able to display it. */
19857 if (maybe_ne (offset, 0))
19858 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19859 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19860 }
19861
19862 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19863 and will have been substituted directly into all expressions that use it.
19864 C does not have such a concept, but C++ and other languages do. */
19865 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19866 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19867
19868 if (rtl)
19869 rtl = targetm.delegitimize_address (rtl);
19870
19871 /* If we don't look past the constant pool, we risk emitting a
19872 reference to a constant pool entry that isn't referenced from
19873 code, and thus is not emitted. */
19874 if (rtl)
19875 rtl = avoid_constant_pool_reference (rtl);
19876
19877 /* Try harder to get a rtl. If this symbol ends up not being emitted
19878 in the current CU, resolve_addr will remove the expression referencing
19879 it. */
19880 if (rtl == NULL_RTX
19881 && VAR_P (decl)
19882 && !DECL_EXTERNAL (decl)
19883 && TREE_STATIC (decl)
19884 && DECL_NAME (decl)
19885 && !DECL_HARD_REGISTER (decl)
19886 && DECL_MODE (decl) != VOIDmode)
19887 {
19888 rtl = make_decl_rtl_for_debug (decl);
19889 if (!MEM_P (rtl)
19890 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19891 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19892 rtl = NULL_RTX;
19893 }
19894
19895 return rtl;
19896 }
19897
19898 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19899 returned. If so, the decl for the COMMON block is returned, and the
19900 value is the offset into the common block for the symbol. */
19901
19902 static tree
19903 fortran_common (tree decl, HOST_WIDE_INT *value)
19904 {
19905 tree val_expr, cvar;
19906 machine_mode mode;
19907 poly_int64 bitsize, bitpos;
19908 tree offset;
19909 HOST_WIDE_INT cbitpos;
19910 int unsignedp, reversep, volatilep = 0;
19911
19912 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19913 it does not have a value (the offset into the common area), or if it
19914 is thread local (as opposed to global) then it isn't common, and shouldn't
19915 be handled as such. */
19916 if (!VAR_P (decl)
19917 || !TREE_STATIC (decl)
19918 || !DECL_HAS_VALUE_EXPR_P (decl)
19919 || !is_fortran ())
19920 return NULL_TREE;
19921
19922 val_expr = DECL_VALUE_EXPR (decl);
19923 if (TREE_CODE (val_expr) != COMPONENT_REF)
19924 return NULL_TREE;
19925
19926 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19927 &unsignedp, &reversep, &volatilep);
19928
19929 if (cvar == NULL_TREE
19930 || !VAR_P (cvar)
19931 || DECL_ARTIFICIAL (cvar)
19932 || !TREE_PUBLIC (cvar)
19933 /* We don't expect to have to cope with variable offsets,
19934 since at present all static data must have a constant size. */
19935 || !bitpos.is_constant (&cbitpos))
19936 return NULL_TREE;
19937
19938 *value = 0;
19939 if (offset != NULL)
19940 {
19941 if (!tree_fits_shwi_p (offset))
19942 return NULL_TREE;
19943 *value = tree_to_shwi (offset);
19944 }
19945 if (cbitpos != 0)
19946 *value += cbitpos / BITS_PER_UNIT;
19947
19948 return cvar;
19949 }
19950
19951 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19952 data attribute for a variable or a parameter. We generate the
19953 DW_AT_const_value attribute only in those cases where the given variable
19954 or parameter does not have a true "location" either in memory or in a
19955 register. This can happen (for example) when a constant is passed as an
19956 actual argument in a call to an inline function. (It's possible that
19957 these things can crop up in other ways also.) Note that one type of
19958 constant value which can be passed into an inlined function is a constant
19959 pointer. This can happen for example if an actual argument in an inlined
19960 function call evaluates to a compile-time constant address.
19961
19962 CACHE_P is true if it is worth caching the location list for DECL,
19963 so that future calls can reuse it rather than regenerate it from scratch.
19964 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19965 since we will need to refer to them each time the function is inlined. */
19966
19967 static bool
19968 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19969 {
19970 rtx rtl;
19971 dw_loc_list_ref list;
19972 var_loc_list *loc_list;
19973 cached_dw_loc_list *cache;
19974
19975 if (early_dwarf)
19976 return false;
19977
19978 if (TREE_CODE (decl) == ERROR_MARK)
19979 return false;
19980
19981 if (get_AT (die, DW_AT_location)
19982 || get_AT (die, DW_AT_const_value))
19983 return true;
19984
19985 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19986 || TREE_CODE (decl) == RESULT_DECL);
19987
19988 /* Try to get some constant RTL for this decl, and use that as the value of
19989 the location. */
19990
19991 rtl = rtl_for_decl_location (decl);
19992 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19993 && add_const_value_attribute (die, rtl))
19994 return true;
19995
19996 /* See if we have single element location list that is equivalent to
19997 a constant value. That way we are better to use add_const_value_attribute
19998 rather than expanding constant value equivalent. */
19999 loc_list = lookup_decl_loc (decl);
20000 if (loc_list
20001 && loc_list->first
20002 && loc_list->first->next == NULL
20003 && NOTE_P (loc_list->first->loc)
20004 && NOTE_VAR_LOCATION (loc_list->first->loc)
20005 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20006 {
20007 struct var_loc_node *node;
20008
20009 node = loc_list->first;
20010 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20011 if (GET_CODE (rtl) == EXPR_LIST)
20012 rtl = XEXP (rtl, 0);
20013 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20014 && add_const_value_attribute (die, rtl))
20015 return true;
20016 }
20017 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20018 list several times. See if we've already cached the contents. */
20019 list = NULL;
20020 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20021 cache_p = false;
20022 if (cache_p)
20023 {
20024 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20025 if (cache)
20026 list = cache->loc_list;
20027 }
20028 if (list == NULL)
20029 {
20030 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20031 NULL);
20032 /* It is usually worth caching this result if the decl is from
20033 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20034 if (cache_p && list && list->dw_loc_next)
20035 {
20036 cached_dw_loc_list **slot
20037 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20038 DECL_UID (decl),
20039 INSERT);
20040 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20041 cache->decl_id = DECL_UID (decl);
20042 cache->loc_list = list;
20043 *slot = cache;
20044 }
20045 }
20046 if (list)
20047 {
20048 add_AT_location_description (die, DW_AT_location, list);
20049 return true;
20050 }
20051 /* None of that worked, so it must not really have a location;
20052 try adding a constant value attribute from the DECL_INITIAL. */
20053 return tree_add_const_value_attribute_for_decl (die, decl);
20054 }
20055
20056 /* Helper function for tree_add_const_value_attribute. Natively encode
20057 initializer INIT into an array. Return true if successful. */
20058
20059 static bool
20060 native_encode_initializer (tree init, unsigned char *array, int size)
20061 {
20062 tree type;
20063
20064 if (init == NULL_TREE)
20065 return false;
20066
20067 STRIP_NOPS (init);
20068 switch (TREE_CODE (init))
20069 {
20070 case STRING_CST:
20071 type = TREE_TYPE (init);
20072 if (TREE_CODE (type) == ARRAY_TYPE)
20073 {
20074 tree enttype = TREE_TYPE (type);
20075 scalar_int_mode mode;
20076
20077 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20078 || GET_MODE_SIZE (mode) != 1)
20079 return false;
20080 if (int_size_in_bytes (type) != size)
20081 return false;
20082 if (size > TREE_STRING_LENGTH (init))
20083 {
20084 memcpy (array, TREE_STRING_POINTER (init),
20085 TREE_STRING_LENGTH (init));
20086 memset (array + TREE_STRING_LENGTH (init),
20087 '\0', size - TREE_STRING_LENGTH (init));
20088 }
20089 else
20090 memcpy (array, TREE_STRING_POINTER (init), size);
20091 return true;
20092 }
20093 return false;
20094 case CONSTRUCTOR:
20095 type = TREE_TYPE (init);
20096 if (int_size_in_bytes (type) != size)
20097 return false;
20098 if (TREE_CODE (type) == ARRAY_TYPE)
20099 {
20100 HOST_WIDE_INT min_index;
20101 unsigned HOST_WIDE_INT cnt;
20102 int curpos = 0, fieldsize;
20103 constructor_elt *ce;
20104
20105 if (TYPE_DOMAIN (type) == NULL_TREE
20106 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20107 return false;
20108
20109 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20110 if (fieldsize <= 0)
20111 return false;
20112
20113 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20114 memset (array, '\0', size);
20115 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20116 {
20117 tree val = ce->value;
20118 tree index = ce->index;
20119 int pos = curpos;
20120 if (index && TREE_CODE (index) == RANGE_EXPR)
20121 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20122 * fieldsize;
20123 else if (index)
20124 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20125
20126 if (val)
20127 {
20128 STRIP_NOPS (val);
20129 if (!native_encode_initializer (val, array + pos, fieldsize))
20130 return false;
20131 }
20132 curpos = pos + fieldsize;
20133 if (index && TREE_CODE (index) == RANGE_EXPR)
20134 {
20135 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20136 - tree_to_shwi (TREE_OPERAND (index, 0));
20137 while (count-- > 0)
20138 {
20139 if (val)
20140 memcpy (array + curpos, array + pos, fieldsize);
20141 curpos += fieldsize;
20142 }
20143 }
20144 gcc_assert (curpos <= size);
20145 }
20146 return true;
20147 }
20148 else if (TREE_CODE (type) == RECORD_TYPE
20149 || TREE_CODE (type) == UNION_TYPE)
20150 {
20151 tree field = NULL_TREE;
20152 unsigned HOST_WIDE_INT cnt;
20153 constructor_elt *ce;
20154
20155 if (int_size_in_bytes (type) != size)
20156 return false;
20157
20158 if (TREE_CODE (type) == RECORD_TYPE)
20159 field = TYPE_FIELDS (type);
20160
20161 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20162 {
20163 tree val = ce->value;
20164 int pos, fieldsize;
20165
20166 if (ce->index != 0)
20167 field = ce->index;
20168
20169 if (val)
20170 STRIP_NOPS (val);
20171
20172 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20173 return false;
20174
20175 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20176 && TYPE_DOMAIN (TREE_TYPE (field))
20177 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20178 return false;
20179 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20180 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20181 return false;
20182 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20183 pos = int_byte_position (field);
20184 gcc_assert (pos + fieldsize <= size);
20185 if (val && fieldsize != 0
20186 && !native_encode_initializer (val, array + pos, fieldsize))
20187 return false;
20188 }
20189 return true;
20190 }
20191 return false;
20192 case VIEW_CONVERT_EXPR:
20193 case NON_LVALUE_EXPR:
20194 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20195 default:
20196 return native_encode_expr (init, array, size) == size;
20197 }
20198 }
20199
20200 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20201 attribute is the const value T. */
20202
20203 static bool
20204 tree_add_const_value_attribute (dw_die_ref die, tree t)
20205 {
20206 tree init;
20207 tree type = TREE_TYPE (t);
20208 rtx rtl;
20209
20210 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20211 return false;
20212
20213 init = t;
20214 gcc_assert (!DECL_P (init));
20215
20216 if (TREE_CODE (init) == INTEGER_CST)
20217 {
20218 if (tree_fits_uhwi_p (init))
20219 {
20220 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20221 return true;
20222 }
20223 if (tree_fits_shwi_p (init))
20224 {
20225 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20226 return true;
20227 }
20228 }
20229 if (! early_dwarf)
20230 {
20231 rtl = rtl_for_decl_init (init, type);
20232 if (rtl)
20233 return add_const_value_attribute (die, rtl);
20234 }
20235 /* If the host and target are sane, try harder. */
20236 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20237 && initializer_constant_valid_p (init, type))
20238 {
20239 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20240 if (size > 0 && (int) size == size)
20241 {
20242 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20243
20244 if (native_encode_initializer (init, array, size))
20245 {
20246 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20247 return true;
20248 }
20249 ggc_free (array);
20250 }
20251 }
20252 return false;
20253 }
20254
20255 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20256 attribute is the const value of T, where T is an integral constant
20257 variable with static storage duration
20258 (so it can't be a PARM_DECL or a RESULT_DECL). */
20259
20260 static bool
20261 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20262 {
20263
20264 if (!decl
20265 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20266 || (VAR_P (decl) && !TREE_STATIC (decl)))
20267 return false;
20268
20269 if (TREE_READONLY (decl)
20270 && ! TREE_THIS_VOLATILE (decl)
20271 && DECL_INITIAL (decl))
20272 /* OK */;
20273 else
20274 return false;
20275
20276 /* Don't add DW_AT_const_value if abstract origin already has one. */
20277 if (get_AT (var_die, DW_AT_const_value))
20278 return false;
20279
20280 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20281 }
20282
20283 /* Convert the CFI instructions for the current function into a
20284 location list. This is used for DW_AT_frame_base when we targeting
20285 a dwarf2 consumer that does not support the dwarf3
20286 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20287 expressions. */
20288
20289 static dw_loc_list_ref
20290 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20291 {
20292 int ix;
20293 dw_fde_ref fde;
20294 dw_loc_list_ref list, *list_tail;
20295 dw_cfi_ref cfi;
20296 dw_cfa_location last_cfa, next_cfa;
20297 const char *start_label, *last_label, *section;
20298 dw_cfa_location remember;
20299
20300 fde = cfun->fde;
20301 gcc_assert (fde != NULL);
20302
20303 section = secname_for_decl (current_function_decl);
20304 list_tail = &list;
20305 list = NULL;
20306
20307 memset (&next_cfa, 0, sizeof (next_cfa));
20308 next_cfa.reg = INVALID_REGNUM;
20309 remember = next_cfa;
20310
20311 start_label = fde->dw_fde_begin;
20312
20313 /* ??? Bald assumption that the CIE opcode list does not contain
20314 advance opcodes. */
20315 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20316 lookup_cfa_1 (cfi, &next_cfa, &remember);
20317
20318 last_cfa = next_cfa;
20319 last_label = start_label;
20320
20321 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20322 {
20323 /* If the first partition contained no CFI adjustments, the
20324 CIE opcodes apply to the whole first partition. */
20325 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20326 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20327 list_tail =&(*list_tail)->dw_loc_next;
20328 start_label = last_label = fde->dw_fde_second_begin;
20329 }
20330
20331 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20332 {
20333 switch (cfi->dw_cfi_opc)
20334 {
20335 case DW_CFA_set_loc:
20336 case DW_CFA_advance_loc1:
20337 case DW_CFA_advance_loc2:
20338 case DW_CFA_advance_loc4:
20339 if (!cfa_equal_p (&last_cfa, &next_cfa))
20340 {
20341 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20342 start_label, 0, last_label, 0, section);
20343
20344 list_tail = &(*list_tail)->dw_loc_next;
20345 last_cfa = next_cfa;
20346 start_label = last_label;
20347 }
20348 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20349 break;
20350
20351 case DW_CFA_advance_loc:
20352 /* The encoding is complex enough that we should never emit this. */
20353 gcc_unreachable ();
20354
20355 default:
20356 lookup_cfa_1 (cfi, &next_cfa, &remember);
20357 break;
20358 }
20359 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20360 {
20361 if (!cfa_equal_p (&last_cfa, &next_cfa))
20362 {
20363 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20364 start_label, 0, last_label, 0, section);
20365
20366 list_tail = &(*list_tail)->dw_loc_next;
20367 last_cfa = next_cfa;
20368 start_label = last_label;
20369 }
20370 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20371 start_label, 0, fde->dw_fde_end, 0, section);
20372 list_tail = &(*list_tail)->dw_loc_next;
20373 start_label = last_label = fde->dw_fde_second_begin;
20374 }
20375 }
20376
20377 if (!cfa_equal_p (&last_cfa, &next_cfa))
20378 {
20379 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20380 start_label, 0, last_label, 0, section);
20381 list_tail = &(*list_tail)->dw_loc_next;
20382 start_label = last_label;
20383 }
20384
20385 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20386 start_label, 0,
20387 fde->dw_fde_second_begin
20388 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20389 section);
20390
20391 maybe_gen_llsym (list);
20392
20393 return list;
20394 }
20395
20396 /* Compute a displacement from the "steady-state frame pointer" to the
20397 frame base (often the same as the CFA), and store it in
20398 frame_pointer_fb_offset. OFFSET is added to the displacement
20399 before the latter is negated. */
20400
20401 static void
20402 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20403 {
20404 rtx reg, elim;
20405
20406 #ifdef FRAME_POINTER_CFA_OFFSET
20407 reg = frame_pointer_rtx;
20408 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20409 #else
20410 reg = arg_pointer_rtx;
20411 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20412 #endif
20413
20414 elim = (ira_use_lra_p
20415 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20416 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20417 elim = strip_offset_and_add (elim, &offset);
20418
20419 frame_pointer_fb_offset = -offset;
20420
20421 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20422 in which to eliminate. This is because it's stack pointer isn't
20423 directly accessible as a register within the ISA. To work around
20424 this, assume that while we cannot provide a proper value for
20425 frame_pointer_fb_offset, we won't need one either. */
20426 frame_pointer_fb_offset_valid
20427 = ((SUPPORTS_STACK_ALIGNMENT
20428 && (elim == hard_frame_pointer_rtx
20429 || elim == stack_pointer_rtx))
20430 || elim == (frame_pointer_needed
20431 ? hard_frame_pointer_rtx
20432 : stack_pointer_rtx));
20433 }
20434
20435 /* Generate a DW_AT_name attribute given some string value to be included as
20436 the value of the attribute. */
20437
20438 static void
20439 add_name_attribute (dw_die_ref die, const char *name_string)
20440 {
20441 if (name_string != NULL && *name_string != 0)
20442 {
20443 if (demangle_name_func)
20444 name_string = (*demangle_name_func) (name_string);
20445
20446 add_AT_string (die, DW_AT_name, name_string);
20447 }
20448 }
20449
20450 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20451 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20452 of TYPE accordingly.
20453
20454 ??? This is a temporary measure until after we're able to generate
20455 regular DWARF for the complex Ada type system. */
20456
20457 static void
20458 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20459 dw_die_ref context_die)
20460 {
20461 tree dtype;
20462 dw_die_ref dtype_die;
20463
20464 if (!lang_hooks.types.descriptive_type)
20465 return;
20466
20467 dtype = lang_hooks.types.descriptive_type (type);
20468 if (!dtype)
20469 return;
20470
20471 dtype_die = lookup_type_die (dtype);
20472 if (!dtype_die)
20473 {
20474 gen_type_die (dtype, context_die);
20475 dtype_die = lookup_type_die (dtype);
20476 gcc_assert (dtype_die);
20477 }
20478
20479 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20480 }
20481
20482 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20483
20484 static const char *
20485 comp_dir_string (void)
20486 {
20487 const char *wd;
20488 char *wd1;
20489 static const char *cached_wd = NULL;
20490
20491 if (cached_wd != NULL)
20492 return cached_wd;
20493
20494 wd = get_src_pwd ();
20495 if (wd == NULL)
20496 return NULL;
20497
20498 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20499 {
20500 int wdlen;
20501
20502 wdlen = strlen (wd);
20503 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20504 strcpy (wd1, wd);
20505 wd1 [wdlen] = DIR_SEPARATOR;
20506 wd1 [wdlen + 1] = 0;
20507 wd = wd1;
20508 }
20509
20510 cached_wd = remap_debug_filename (wd);
20511 return cached_wd;
20512 }
20513
20514 /* Generate a DW_AT_comp_dir attribute for DIE. */
20515
20516 static void
20517 add_comp_dir_attribute (dw_die_ref die)
20518 {
20519 const char * wd = comp_dir_string ();
20520 if (wd != NULL)
20521 add_AT_string (die, DW_AT_comp_dir, wd);
20522 }
20523
20524 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20525 pointer computation, ...), output a representation for that bound according
20526 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20527 loc_list_from_tree for the meaning of CONTEXT. */
20528
20529 static void
20530 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20531 int forms, struct loc_descr_context *context)
20532 {
20533 dw_die_ref context_die, decl_die;
20534 dw_loc_list_ref list;
20535 bool strip_conversions = true;
20536 bool placeholder_seen = false;
20537
20538 while (strip_conversions)
20539 switch (TREE_CODE (value))
20540 {
20541 case ERROR_MARK:
20542 case SAVE_EXPR:
20543 return;
20544
20545 CASE_CONVERT:
20546 case VIEW_CONVERT_EXPR:
20547 value = TREE_OPERAND (value, 0);
20548 break;
20549
20550 default:
20551 strip_conversions = false;
20552 break;
20553 }
20554
20555 /* If possible and permitted, output the attribute as a constant. */
20556 if ((forms & dw_scalar_form_constant) != 0
20557 && TREE_CODE (value) == INTEGER_CST)
20558 {
20559 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20560
20561 /* If HOST_WIDE_INT is big enough then represent the bound as
20562 a constant value. We need to choose a form based on
20563 whether the type is signed or unsigned. We cannot just
20564 call add_AT_unsigned if the value itself is positive
20565 (add_AT_unsigned might add the unsigned value encoded as
20566 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20567 bounds type and then sign extend any unsigned values found
20568 for signed types. This is needed only for
20569 DW_AT_{lower,upper}_bound, since for most other attributes,
20570 consumers will treat DW_FORM_data[1248] as unsigned values,
20571 regardless of the underlying type. */
20572 if (prec <= HOST_BITS_PER_WIDE_INT
20573 || tree_fits_uhwi_p (value))
20574 {
20575 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20576 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20577 else
20578 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20579 }
20580 else
20581 /* Otherwise represent the bound as an unsigned value with
20582 the precision of its type. The precision and signedness
20583 of the type will be necessary to re-interpret it
20584 unambiguously. */
20585 add_AT_wide (die, attr, wi::to_wide (value));
20586 return;
20587 }
20588
20589 /* Otherwise, if it's possible and permitted too, output a reference to
20590 another DIE. */
20591 if ((forms & dw_scalar_form_reference) != 0)
20592 {
20593 tree decl = NULL_TREE;
20594
20595 /* Some type attributes reference an outer type. For instance, the upper
20596 bound of an array may reference an embedding record (this happens in
20597 Ada). */
20598 if (TREE_CODE (value) == COMPONENT_REF
20599 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20600 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20601 decl = TREE_OPERAND (value, 1);
20602
20603 else if (VAR_P (value)
20604 || TREE_CODE (value) == PARM_DECL
20605 || TREE_CODE (value) == RESULT_DECL)
20606 decl = value;
20607
20608 if (decl != NULL_TREE)
20609 {
20610 dw_die_ref decl_die = lookup_decl_die (decl);
20611
20612 /* ??? Can this happen, or should the variable have been bound
20613 first? Probably it can, since I imagine that we try to create
20614 the types of parameters in the order in which they exist in
20615 the list, and won't have created a forward reference to a
20616 later parameter. */
20617 if (decl_die != NULL)
20618 {
20619 add_AT_die_ref (die, attr, decl_die);
20620 return;
20621 }
20622 }
20623 }
20624
20625 /* Last chance: try to create a stack operation procedure to evaluate the
20626 value. Do nothing if even that is not possible or permitted. */
20627 if ((forms & dw_scalar_form_exprloc) == 0)
20628 return;
20629
20630 list = loc_list_from_tree (value, 2, context);
20631 if (context && context->placeholder_arg)
20632 {
20633 placeholder_seen = context->placeholder_seen;
20634 context->placeholder_seen = false;
20635 }
20636 if (list == NULL || single_element_loc_list_p (list))
20637 {
20638 /* If this attribute is not a reference nor constant, it is
20639 a DWARF expression rather than location description. For that
20640 loc_list_from_tree (value, 0, &context) is needed. */
20641 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20642 if (list2 && single_element_loc_list_p (list2))
20643 {
20644 if (placeholder_seen)
20645 {
20646 struct dwarf_procedure_info dpi;
20647 dpi.fndecl = NULL_TREE;
20648 dpi.args_count = 1;
20649 if (!resolve_args_picking (list2->expr, 1, &dpi))
20650 return;
20651 }
20652 add_AT_loc (die, attr, list2->expr);
20653 return;
20654 }
20655 }
20656
20657 /* If that failed to give a single element location list, fall back to
20658 outputting this as a reference... still if permitted. */
20659 if (list == NULL
20660 || (forms & dw_scalar_form_reference) == 0
20661 || placeholder_seen)
20662 return;
20663
20664 if (current_function_decl == 0)
20665 context_die = comp_unit_die ();
20666 else
20667 context_die = lookup_decl_die (current_function_decl);
20668
20669 decl_die = new_die (DW_TAG_variable, context_die, value);
20670 add_AT_flag (decl_die, DW_AT_artificial, 1);
20671 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20672 context_die);
20673 add_AT_location_description (decl_die, DW_AT_location, list);
20674 add_AT_die_ref (die, attr, decl_die);
20675 }
20676
20677 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20678 default. */
20679
20680 static int
20681 lower_bound_default (void)
20682 {
20683 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20684 {
20685 case DW_LANG_C:
20686 case DW_LANG_C89:
20687 case DW_LANG_C99:
20688 case DW_LANG_C11:
20689 case DW_LANG_C_plus_plus:
20690 case DW_LANG_C_plus_plus_11:
20691 case DW_LANG_C_plus_plus_14:
20692 case DW_LANG_ObjC:
20693 case DW_LANG_ObjC_plus_plus:
20694 return 0;
20695 case DW_LANG_Fortran77:
20696 case DW_LANG_Fortran90:
20697 case DW_LANG_Fortran95:
20698 case DW_LANG_Fortran03:
20699 case DW_LANG_Fortran08:
20700 return 1;
20701 case DW_LANG_UPC:
20702 case DW_LANG_D:
20703 case DW_LANG_Python:
20704 return dwarf_version >= 4 ? 0 : -1;
20705 case DW_LANG_Ada95:
20706 case DW_LANG_Ada83:
20707 case DW_LANG_Cobol74:
20708 case DW_LANG_Cobol85:
20709 case DW_LANG_Modula2:
20710 case DW_LANG_PLI:
20711 return dwarf_version >= 4 ? 1 : -1;
20712 default:
20713 return -1;
20714 }
20715 }
20716
20717 /* Given a tree node describing an array bound (either lower or upper) output
20718 a representation for that bound. */
20719
20720 static void
20721 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20722 tree bound, struct loc_descr_context *context)
20723 {
20724 int dflt;
20725
20726 while (1)
20727 switch (TREE_CODE (bound))
20728 {
20729 /* Strip all conversions. */
20730 CASE_CONVERT:
20731 case VIEW_CONVERT_EXPR:
20732 bound = TREE_OPERAND (bound, 0);
20733 break;
20734
20735 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20736 are even omitted when they are the default. */
20737 case INTEGER_CST:
20738 /* If the value for this bound is the default one, we can even omit the
20739 attribute. */
20740 if (bound_attr == DW_AT_lower_bound
20741 && tree_fits_shwi_p (bound)
20742 && (dflt = lower_bound_default ()) != -1
20743 && tree_to_shwi (bound) == dflt)
20744 return;
20745
20746 /* FALLTHRU */
20747
20748 default:
20749 /* Because of the complex interaction there can be with other GNAT
20750 encodings, GDB isn't ready yet to handle proper DWARF description
20751 for self-referencial subrange bounds: let GNAT encodings do the
20752 magic in such a case. */
20753 if (is_ada ()
20754 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20755 && contains_placeholder_p (bound))
20756 return;
20757
20758 add_scalar_info (subrange_die, bound_attr, bound,
20759 dw_scalar_form_constant
20760 | dw_scalar_form_exprloc
20761 | dw_scalar_form_reference,
20762 context);
20763 return;
20764 }
20765 }
20766
20767 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20768 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20769 Note that the block of subscript information for an array type also
20770 includes information about the element type of the given array type.
20771
20772 This function reuses previously set type and bound information if
20773 available. */
20774
20775 static void
20776 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20777 {
20778 unsigned dimension_number;
20779 tree lower, upper;
20780 dw_die_ref child = type_die->die_child;
20781
20782 for (dimension_number = 0;
20783 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20784 type = TREE_TYPE (type), dimension_number++)
20785 {
20786 tree domain = TYPE_DOMAIN (type);
20787
20788 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20789 break;
20790
20791 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20792 and (in GNU C only) variable bounds. Handle all three forms
20793 here. */
20794
20795 /* Find and reuse a previously generated DW_TAG_subrange_type if
20796 available.
20797
20798 For multi-dimensional arrays, as we iterate through the
20799 various dimensions in the enclosing for loop above, we also
20800 iterate through the DIE children and pick at each
20801 DW_TAG_subrange_type previously generated (if available).
20802 Each child DW_TAG_subrange_type DIE describes the range of
20803 the current dimension. At this point we should have as many
20804 DW_TAG_subrange_type's as we have dimensions in the
20805 array. */
20806 dw_die_ref subrange_die = NULL;
20807 if (child)
20808 while (1)
20809 {
20810 child = child->die_sib;
20811 if (child->die_tag == DW_TAG_subrange_type)
20812 subrange_die = child;
20813 if (child == type_die->die_child)
20814 {
20815 /* If we wrapped around, stop looking next time. */
20816 child = NULL;
20817 break;
20818 }
20819 if (child->die_tag == DW_TAG_subrange_type)
20820 break;
20821 }
20822 if (!subrange_die)
20823 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20824
20825 if (domain)
20826 {
20827 /* We have an array type with specified bounds. */
20828 lower = TYPE_MIN_VALUE (domain);
20829 upper = TYPE_MAX_VALUE (domain);
20830
20831 /* Define the index type. */
20832 if (TREE_TYPE (domain)
20833 && !get_AT (subrange_die, DW_AT_type))
20834 {
20835 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20836 TREE_TYPE field. We can't emit debug info for this
20837 because it is an unnamed integral type. */
20838 if (TREE_CODE (domain) == INTEGER_TYPE
20839 && TYPE_NAME (domain) == NULL_TREE
20840 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20841 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20842 ;
20843 else
20844 add_type_attribute (subrange_die, TREE_TYPE (domain),
20845 TYPE_UNQUALIFIED, false, type_die);
20846 }
20847
20848 /* ??? If upper is NULL, the array has unspecified length,
20849 but it does have a lower bound. This happens with Fortran
20850 dimension arr(N:*)
20851 Since the debugger is definitely going to need to know N
20852 to produce useful results, go ahead and output the lower
20853 bound solo, and hope the debugger can cope. */
20854
20855 if (!get_AT (subrange_die, DW_AT_lower_bound))
20856 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20857 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20858 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20859 }
20860
20861 /* Otherwise we have an array type with an unspecified length. The
20862 DWARF-2 spec does not say how to handle this; let's just leave out the
20863 bounds. */
20864 }
20865 }
20866
20867 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20868
20869 static void
20870 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20871 {
20872 dw_die_ref decl_die;
20873 HOST_WIDE_INT size;
20874 dw_loc_descr_ref size_expr = NULL;
20875
20876 switch (TREE_CODE (tree_node))
20877 {
20878 case ERROR_MARK:
20879 size = 0;
20880 break;
20881 case ENUMERAL_TYPE:
20882 case RECORD_TYPE:
20883 case UNION_TYPE:
20884 case QUAL_UNION_TYPE:
20885 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20886 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20887 {
20888 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20889 return;
20890 }
20891 size_expr = type_byte_size (tree_node, &size);
20892 break;
20893 case FIELD_DECL:
20894 /* For a data member of a struct or union, the DW_AT_byte_size is
20895 generally given as the number of bytes normally allocated for an
20896 object of the *declared* type of the member itself. This is true
20897 even for bit-fields. */
20898 size = int_size_in_bytes (field_type (tree_node));
20899 break;
20900 default:
20901 gcc_unreachable ();
20902 }
20903
20904 /* Support for dynamically-sized objects was introduced by DWARFv3.
20905 At the moment, GDB does not handle variable byte sizes very well,
20906 though. */
20907 if ((dwarf_version >= 3 || !dwarf_strict)
20908 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20909 && size_expr != NULL)
20910 add_AT_loc (die, DW_AT_byte_size, size_expr);
20911
20912 /* Note that `size' might be -1 when we get to this point. If it is, that
20913 indicates that the byte size of the entity in question is variable and
20914 that we could not generate a DWARF expression that computes it. */
20915 if (size >= 0)
20916 add_AT_unsigned (die, DW_AT_byte_size, size);
20917 }
20918
20919 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20920 alignment. */
20921
20922 static void
20923 add_alignment_attribute (dw_die_ref die, tree tree_node)
20924 {
20925 if (dwarf_version < 5 && dwarf_strict)
20926 return;
20927
20928 unsigned align;
20929
20930 if (DECL_P (tree_node))
20931 {
20932 if (!DECL_USER_ALIGN (tree_node))
20933 return;
20934
20935 align = DECL_ALIGN_UNIT (tree_node);
20936 }
20937 else if (TYPE_P (tree_node))
20938 {
20939 if (!TYPE_USER_ALIGN (tree_node))
20940 return;
20941
20942 align = TYPE_ALIGN_UNIT (tree_node);
20943 }
20944 else
20945 gcc_unreachable ();
20946
20947 add_AT_unsigned (die, DW_AT_alignment, align);
20948 }
20949
20950 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20951 which specifies the distance in bits from the highest order bit of the
20952 "containing object" for the bit-field to the highest order bit of the
20953 bit-field itself.
20954
20955 For any given bit-field, the "containing object" is a hypothetical object
20956 (of some integral or enum type) within which the given bit-field lives. The
20957 type of this hypothetical "containing object" is always the same as the
20958 declared type of the individual bit-field itself. The determination of the
20959 exact location of the "containing object" for a bit-field is rather
20960 complicated. It's handled by the `field_byte_offset' function (above).
20961
20962 CTX is required: see the comment for VLR_CONTEXT.
20963
20964 Note that it is the size (in bytes) of the hypothetical "containing object"
20965 which will be given in the DW_AT_byte_size attribute for this bit-field.
20966 (See `byte_size_attribute' above). */
20967
20968 static inline void
20969 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20970 {
20971 HOST_WIDE_INT object_offset_in_bytes;
20972 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20973 HOST_WIDE_INT bitpos_int;
20974 HOST_WIDE_INT highest_order_object_bit_offset;
20975 HOST_WIDE_INT highest_order_field_bit_offset;
20976 HOST_WIDE_INT bit_offset;
20977
20978 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20979
20980 /* Must be a field and a bit field. */
20981 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20982
20983 /* We can't yet handle bit-fields whose offsets are variable, so if we
20984 encounter such things, just return without generating any attribute
20985 whatsoever. Likewise for variable or too large size. */
20986 if (! tree_fits_shwi_p (bit_position (decl))
20987 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20988 return;
20989
20990 bitpos_int = int_bit_position (decl);
20991
20992 /* Note that the bit offset is always the distance (in bits) from the
20993 highest-order bit of the "containing object" to the highest-order bit of
20994 the bit-field itself. Since the "high-order end" of any object or field
20995 is different on big-endian and little-endian machines, the computation
20996 below must take account of these differences. */
20997 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20998 highest_order_field_bit_offset = bitpos_int;
20999
21000 if (! BYTES_BIG_ENDIAN)
21001 {
21002 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21003 highest_order_object_bit_offset +=
21004 simple_type_size_in_bits (original_type);
21005 }
21006
21007 bit_offset
21008 = (! BYTES_BIG_ENDIAN
21009 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21010 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21011
21012 if (bit_offset < 0)
21013 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21014 else
21015 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21016 }
21017
21018 /* For a FIELD_DECL node which represents a bit field, output an attribute
21019 which specifies the length in bits of the given field. */
21020
21021 static inline void
21022 add_bit_size_attribute (dw_die_ref die, tree decl)
21023 {
21024 /* Must be a field and a bit field. */
21025 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21026 && DECL_BIT_FIELD_TYPE (decl));
21027
21028 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21029 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21030 }
21031
21032 /* If the compiled language is ANSI C, then add a 'prototyped'
21033 attribute, if arg types are given for the parameters of a function. */
21034
21035 static inline void
21036 add_prototyped_attribute (dw_die_ref die, tree func_type)
21037 {
21038 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21039 {
21040 case DW_LANG_C:
21041 case DW_LANG_C89:
21042 case DW_LANG_C99:
21043 case DW_LANG_C11:
21044 case DW_LANG_ObjC:
21045 if (prototype_p (func_type))
21046 add_AT_flag (die, DW_AT_prototyped, 1);
21047 break;
21048 default:
21049 break;
21050 }
21051 }
21052
21053 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21054 by looking in the type declaration, the object declaration equate table or
21055 the block mapping. */
21056
21057 static inline dw_die_ref
21058 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21059 {
21060 dw_die_ref origin_die = NULL;
21061
21062 if (DECL_P (origin))
21063 {
21064 dw_die_ref c;
21065 origin_die = lookup_decl_die (origin);
21066 /* "Unwrap" the decls DIE which we put in the imported unit context.
21067 We are looking for the abstract copy here. */
21068 if (in_lto_p
21069 && origin_die
21070 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21071 /* ??? Identify this better. */
21072 && c->with_offset)
21073 origin_die = c;
21074 }
21075 else if (TYPE_P (origin))
21076 origin_die = lookup_type_die (origin);
21077 else if (TREE_CODE (origin) == BLOCK)
21078 origin_die = BLOCK_DIE (origin);
21079
21080 /* XXX: Functions that are never lowered don't always have correct block
21081 trees (in the case of java, they simply have no block tree, in some other
21082 languages). For these functions, there is nothing we can really do to
21083 output correct debug info for inlined functions in all cases. Rather
21084 than die, we'll just produce deficient debug info now, in that we will
21085 have variables without a proper abstract origin. In the future, when all
21086 functions are lowered, we should re-add a gcc_assert (origin_die)
21087 here. */
21088
21089 if (origin_die)
21090 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21091 return origin_die;
21092 }
21093
21094 /* We do not currently support the pure_virtual attribute. */
21095
21096 static inline void
21097 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21098 {
21099 if (DECL_VINDEX (func_decl))
21100 {
21101 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21102
21103 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21104 add_AT_loc (die, DW_AT_vtable_elem_location,
21105 new_loc_descr (DW_OP_constu,
21106 tree_to_shwi (DECL_VINDEX (func_decl)),
21107 0));
21108
21109 /* GNU extension: Record what type this method came from originally. */
21110 if (debug_info_level > DINFO_LEVEL_TERSE
21111 && DECL_CONTEXT (func_decl))
21112 add_AT_die_ref (die, DW_AT_containing_type,
21113 lookup_type_die (DECL_CONTEXT (func_decl)));
21114 }
21115 }
21116 \f
21117 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21118 given decl. This used to be a vendor extension until after DWARF 4
21119 standardized it. */
21120
21121 static void
21122 add_linkage_attr (dw_die_ref die, tree decl)
21123 {
21124 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21125
21126 /* Mimic what assemble_name_raw does with a leading '*'. */
21127 if (name[0] == '*')
21128 name = &name[1];
21129
21130 if (dwarf_version >= 4)
21131 add_AT_string (die, DW_AT_linkage_name, name);
21132 else
21133 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21134 }
21135
21136 /* Add source coordinate attributes for the given decl. */
21137
21138 static void
21139 add_src_coords_attributes (dw_die_ref die, tree decl)
21140 {
21141 expanded_location s;
21142
21143 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21144 return;
21145 s = expand_location (DECL_SOURCE_LOCATION (decl));
21146 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21147 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21148 if (debug_column_info && s.column)
21149 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21150 }
21151
21152 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21153
21154 static void
21155 add_linkage_name_raw (dw_die_ref die, tree decl)
21156 {
21157 /* Defer until we have an assembler name set. */
21158 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21159 {
21160 limbo_die_node *asm_name;
21161
21162 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21163 asm_name->die = die;
21164 asm_name->created_for = decl;
21165 asm_name->next = deferred_asm_name;
21166 deferred_asm_name = asm_name;
21167 }
21168 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21169 add_linkage_attr (die, decl);
21170 }
21171
21172 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21173
21174 static void
21175 add_linkage_name (dw_die_ref die, tree decl)
21176 {
21177 if (debug_info_level > DINFO_LEVEL_NONE
21178 && VAR_OR_FUNCTION_DECL_P (decl)
21179 && TREE_PUBLIC (decl)
21180 && !(VAR_P (decl) && DECL_REGISTER (decl))
21181 && die->die_tag != DW_TAG_member)
21182 add_linkage_name_raw (die, decl);
21183 }
21184
21185 /* Add a DW_AT_name attribute and source coordinate attribute for the
21186 given decl, but only if it actually has a name. */
21187
21188 static void
21189 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21190 bool no_linkage_name)
21191 {
21192 tree decl_name;
21193
21194 decl_name = DECL_NAME (decl);
21195 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21196 {
21197 const char *name = dwarf2_name (decl, 0);
21198 if (name)
21199 add_name_attribute (die, name);
21200 if (! DECL_ARTIFICIAL (decl))
21201 add_src_coords_attributes (die, decl);
21202
21203 if (!no_linkage_name)
21204 add_linkage_name (die, decl);
21205 }
21206
21207 #ifdef VMS_DEBUGGING_INFO
21208 /* Get the function's name, as described by its RTL. This may be different
21209 from the DECL_NAME name used in the source file. */
21210 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21211 {
21212 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21213 XEXP (DECL_RTL (decl), 0), false);
21214 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21215 }
21216 #endif /* VMS_DEBUGGING_INFO */
21217 }
21218
21219 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21220
21221 static void
21222 add_discr_value (dw_die_ref die, dw_discr_value *value)
21223 {
21224 dw_attr_node attr;
21225
21226 attr.dw_attr = DW_AT_discr_value;
21227 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21228 attr.dw_attr_val.val_entry = NULL;
21229 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21230 if (value->pos)
21231 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21232 else
21233 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21234 add_dwarf_attr (die, &attr);
21235 }
21236
21237 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21238
21239 static void
21240 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21241 {
21242 dw_attr_node attr;
21243
21244 attr.dw_attr = DW_AT_discr_list;
21245 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21246 attr.dw_attr_val.val_entry = NULL;
21247 attr.dw_attr_val.v.val_discr_list = discr_list;
21248 add_dwarf_attr (die, &attr);
21249 }
21250
21251 static inline dw_discr_list_ref
21252 AT_discr_list (dw_attr_node *attr)
21253 {
21254 return attr->dw_attr_val.v.val_discr_list;
21255 }
21256
21257 #ifdef VMS_DEBUGGING_INFO
21258 /* Output the debug main pointer die for VMS */
21259
21260 void
21261 dwarf2out_vms_debug_main_pointer (void)
21262 {
21263 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21264 dw_die_ref die;
21265
21266 /* Allocate the VMS debug main subprogram die. */
21267 die = new_die_raw (DW_TAG_subprogram);
21268 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21269 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21270 current_function_funcdef_no);
21271 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21272
21273 /* Make it the first child of comp_unit_die (). */
21274 die->die_parent = comp_unit_die ();
21275 if (comp_unit_die ()->die_child)
21276 {
21277 die->die_sib = comp_unit_die ()->die_child->die_sib;
21278 comp_unit_die ()->die_child->die_sib = die;
21279 }
21280 else
21281 {
21282 die->die_sib = die;
21283 comp_unit_die ()->die_child = die;
21284 }
21285 }
21286 #endif /* VMS_DEBUGGING_INFO */
21287
21288 /* Push a new declaration scope. */
21289
21290 static void
21291 push_decl_scope (tree scope)
21292 {
21293 vec_safe_push (decl_scope_table, scope);
21294 }
21295
21296 /* Pop a declaration scope. */
21297
21298 static inline void
21299 pop_decl_scope (void)
21300 {
21301 decl_scope_table->pop ();
21302 }
21303
21304 /* walk_tree helper function for uses_local_type, below. */
21305
21306 static tree
21307 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21308 {
21309 if (!TYPE_P (*tp))
21310 *walk_subtrees = 0;
21311 else
21312 {
21313 tree name = TYPE_NAME (*tp);
21314 if (name && DECL_P (name) && decl_function_context (name))
21315 return *tp;
21316 }
21317 return NULL_TREE;
21318 }
21319
21320 /* If TYPE involves a function-local type (including a local typedef to a
21321 non-local type), returns that type; otherwise returns NULL_TREE. */
21322
21323 static tree
21324 uses_local_type (tree type)
21325 {
21326 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21327 return used;
21328 }
21329
21330 /* Return the DIE for the scope that immediately contains this type.
21331 Non-named types that do not involve a function-local type get global
21332 scope. Named types nested in namespaces or other types get their
21333 containing scope. All other types (i.e. function-local named types) get
21334 the current active scope. */
21335
21336 static dw_die_ref
21337 scope_die_for (tree t, dw_die_ref context_die)
21338 {
21339 dw_die_ref scope_die = NULL;
21340 tree containing_scope;
21341
21342 /* Non-types always go in the current scope. */
21343 gcc_assert (TYPE_P (t));
21344
21345 /* Use the scope of the typedef, rather than the scope of the type
21346 it refers to. */
21347 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21348 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21349 else
21350 containing_scope = TYPE_CONTEXT (t);
21351
21352 /* Use the containing namespace if there is one. */
21353 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21354 {
21355 if (context_die == lookup_decl_die (containing_scope))
21356 /* OK */;
21357 else if (debug_info_level > DINFO_LEVEL_TERSE)
21358 context_die = get_context_die (containing_scope);
21359 else
21360 containing_scope = NULL_TREE;
21361 }
21362
21363 /* Ignore function type "scopes" from the C frontend. They mean that
21364 a tagged type is local to a parmlist of a function declarator, but
21365 that isn't useful to DWARF. */
21366 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21367 containing_scope = NULL_TREE;
21368
21369 if (SCOPE_FILE_SCOPE_P (containing_scope))
21370 {
21371 /* If T uses a local type keep it local as well, to avoid references
21372 to function-local DIEs from outside the function. */
21373 if (current_function_decl && uses_local_type (t))
21374 scope_die = context_die;
21375 else
21376 scope_die = comp_unit_die ();
21377 }
21378 else if (TYPE_P (containing_scope))
21379 {
21380 /* For types, we can just look up the appropriate DIE. */
21381 if (debug_info_level > DINFO_LEVEL_TERSE)
21382 scope_die = get_context_die (containing_scope);
21383 else
21384 {
21385 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21386 if (scope_die == NULL)
21387 scope_die = comp_unit_die ();
21388 }
21389 }
21390 else
21391 scope_die = context_die;
21392
21393 return scope_die;
21394 }
21395
21396 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21397
21398 static inline int
21399 local_scope_p (dw_die_ref context_die)
21400 {
21401 for (; context_die; context_die = context_die->die_parent)
21402 if (context_die->die_tag == DW_TAG_inlined_subroutine
21403 || context_die->die_tag == DW_TAG_subprogram)
21404 return 1;
21405
21406 return 0;
21407 }
21408
21409 /* Returns nonzero if CONTEXT_DIE is a class. */
21410
21411 static inline int
21412 class_scope_p (dw_die_ref context_die)
21413 {
21414 return (context_die
21415 && (context_die->die_tag == DW_TAG_structure_type
21416 || context_die->die_tag == DW_TAG_class_type
21417 || context_die->die_tag == DW_TAG_interface_type
21418 || context_die->die_tag == DW_TAG_union_type));
21419 }
21420
21421 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21422 whether or not to treat a DIE in this context as a declaration. */
21423
21424 static inline int
21425 class_or_namespace_scope_p (dw_die_ref context_die)
21426 {
21427 return (class_scope_p (context_die)
21428 || (context_die && context_die->die_tag == DW_TAG_namespace));
21429 }
21430
21431 /* Many forms of DIEs require a "type description" attribute. This
21432 routine locates the proper "type descriptor" die for the type given
21433 by 'type' plus any additional qualifiers given by 'cv_quals', and
21434 adds a DW_AT_type attribute below the given die. */
21435
21436 static void
21437 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21438 bool reverse, dw_die_ref context_die)
21439 {
21440 enum tree_code code = TREE_CODE (type);
21441 dw_die_ref type_die = NULL;
21442
21443 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21444 or fixed-point type, use the inner type. This is because we have no
21445 support for unnamed types in base_type_die. This can happen if this is
21446 an Ada subrange type. Correct solution is emit a subrange type die. */
21447 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21448 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21449 type = TREE_TYPE (type), code = TREE_CODE (type);
21450
21451 if (code == ERROR_MARK
21452 /* Handle a special case. For functions whose return type is void, we
21453 generate *no* type attribute. (Note that no object may have type
21454 `void', so this only applies to function return types). */
21455 || code == VOID_TYPE)
21456 return;
21457
21458 type_die = modified_type_die (type,
21459 cv_quals | TYPE_QUALS (type),
21460 reverse,
21461 context_die);
21462
21463 if (type_die != NULL)
21464 add_AT_die_ref (object_die, DW_AT_type, type_die);
21465 }
21466
21467 /* Given an object die, add the calling convention attribute for the
21468 function call type. */
21469 static void
21470 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21471 {
21472 enum dwarf_calling_convention value = DW_CC_normal;
21473
21474 value = ((enum dwarf_calling_convention)
21475 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21476
21477 if (is_fortran ()
21478 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21479 {
21480 /* DWARF 2 doesn't provide a way to identify a program's source-level
21481 entry point. DW_AT_calling_convention attributes are only meant
21482 to describe functions' calling conventions. However, lacking a
21483 better way to signal the Fortran main program, we used this for
21484 a long time, following existing custom. Now, DWARF 4 has
21485 DW_AT_main_subprogram, which we add below, but some tools still
21486 rely on the old way, which we thus keep. */
21487 value = DW_CC_program;
21488
21489 if (dwarf_version >= 4 || !dwarf_strict)
21490 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21491 }
21492
21493 /* Only add the attribute if the backend requests it, and
21494 is not DW_CC_normal. */
21495 if (value && (value != DW_CC_normal))
21496 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21497 }
21498
21499 /* Given a tree pointer to a struct, class, union, or enum type node, return
21500 a pointer to the (string) tag name for the given type, or zero if the type
21501 was declared without a tag. */
21502
21503 static const char *
21504 type_tag (const_tree type)
21505 {
21506 const char *name = 0;
21507
21508 if (TYPE_NAME (type) != 0)
21509 {
21510 tree t = 0;
21511
21512 /* Find the IDENTIFIER_NODE for the type name. */
21513 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21514 && !TYPE_NAMELESS (type))
21515 t = TYPE_NAME (type);
21516
21517 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21518 a TYPE_DECL node, regardless of whether or not a `typedef' was
21519 involved. */
21520 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21521 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21522 {
21523 /* We want to be extra verbose. Don't call dwarf_name if
21524 DECL_NAME isn't set. The default hook for decl_printable_name
21525 doesn't like that, and in this context it's correct to return
21526 0, instead of "<anonymous>" or the like. */
21527 if (DECL_NAME (TYPE_NAME (type))
21528 && !DECL_NAMELESS (TYPE_NAME (type)))
21529 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21530 }
21531
21532 /* Now get the name as a string, or invent one. */
21533 if (!name && t != 0)
21534 name = IDENTIFIER_POINTER (t);
21535 }
21536
21537 return (name == 0 || *name == '\0') ? 0 : name;
21538 }
21539
21540 /* Return the type associated with a data member, make a special check
21541 for bit field types. */
21542
21543 static inline tree
21544 member_declared_type (const_tree member)
21545 {
21546 return (DECL_BIT_FIELD_TYPE (member)
21547 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21548 }
21549
21550 /* Get the decl's label, as described by its RTL. This may be different
21551 from the DECL_NAME name used in the source file. */
21552
21553 #if 0
21554 static const char *
21555 decl_start_label (tree decl)
21556 {
21557 rtx x;
21558 const char *fnname;
21559
21560 x = DECL_RTL (decl);
21561 gcc_assert (MEM_P (x));
21562
21563 x = XEXP (x, 0);
21564 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21565
21566 fnname = XSTR (x, 0);
21567 return fnname;
21568 }
21569 #endif
21570 \f
21571 /* For variable-length arrays that have been previously generated, but
21572 may be incomplete due to missing subscript info, fill the subscript
21573 info. Return TRUE if this is one of those cases. */
21574 static bool
21575 fill_variable_array_bounds (tree type)
21576 {
21577 if (TREE_ASM_WRITTEN (type)
21578 && TREE_CODE (type) == ARRAY_TYPE
21579 && variably_modified_type_p (type, NULL))
21580 {
21581 dw_die_ref array_die = lookup_type_die (type);
21582 if (!array_die)
21583 return false;
21584 add_subscript_info (array_die, type, !is_ada ());
21585 return true;
21586 }
21587 return false;
21588 }
21589
21590 /* These routines generate the internal representation of the DIE's for
21591 the compilation unit. Debugging information is collected by walking
21592 the declaration trees passed in from dwarf2out_decl(). */
21593
21594 static void
21595 gen_array_type_die (tree type, dw_die_ref context_die)
21596 {
21597 dw_die_ref array_die;
21598
21599 /* GNU compilers represent multidimensional array types as sequences of one
21600 dimensional array types whose element types are themselves array types.
21601 We sometimes squish that down to a single array_type DIE with multiple
21602 subscripts in the Dwarf debugging info. The draft Dwarf specification
21603 say that we are allowed to do this kind of compression in C, because
21604 there is no difference between an array of arrays and a multidimensional
21605 array. We don't do this for Ada to remain as close as possible to the
21606 actual representation, which is especially important against the language
21607 flexibilty wrt arrays of variable size. */
21608
21609 bool collapse_nested_arrays = !is_ada ();
21610
21611 if (fill_variable_array_bounds (type))
21612 return;
21613
21614 dw_die_ref scope_die = scope_die_for (type, context_die);
21615 tree element_type;
21616
21617 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21618 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21619 if (TYPE_STRING_FLAG (type)
21620 && TREE_CODE (type) == ARRAY_TYPE
21621 && is_fortran ()
21622 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21623 {
21624 HOST_WIDE_INT size;
21625
21626 array_die = new_die (DW_TAG_string_type, scope_die, type);
21627 add_name_attribute (array_die, type_tag (type));
21628 equate_type_number_to_die (type, array_die);
21629 size = int_size_in_bytes (type);
21630 if (size >= 0)
21631 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21632 /* ??? We can't annotate types late, but for LTO we may not
21633 generate a location early either (gfortran.dg/save_6.f90). */
21634 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21635 && TYPE_DOMAIN (type) != NULL_TREE
21636 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21637 {
21638 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21639 tree rszdecl = szdecl;
21640
21641 size = int_size_in_bytes (TREE_TYPE (szdecl));
21642 if (!DECL_P (szdecl))
21643 {
21644 if (TREE_CODE (szdecl) == INDIRECT_REF
21645 && DECL_P (TREE_OPERAND (szdecl, 0)))
21646 {
21647 rszdecl = TREE_OPERAND (szdecl, 0);
21648 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21649 != DWARF2_ADDR_SIZE)
21650 size = 0;
21651 }
21652 else
21653 size = 0;
21654 }
21655 if (size > 0)
21656 {
21657 dw_loc_list_ref loc
21658 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21659 NULL);
21660 if (loc)
21661 {
21662 add_AT_location_description (array_die, DW_AT_string_length,
21663 loc);
21664 if (size != DWARF2_ADDR_SIZE)
21665 add_AT_unsigned (array_die, dwarf_version >= 5
21666 ? DW_AT_string_length_byte_size
21667 : DW_AT_byte_size, size);
21668 }
21669 }
21670 }
21671 return;
21672 }
21673
21674 array_die = new_die (DW_TAG_array_type, scope_die, type);
21675 add_name_attribute (array_die, type_tag (type));
21676 equate_type_number_to_die (type, array_die);
21677
21678 if (TREE_CODE (type) == VECTOR_TYPE)
21679 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21680
21681 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21682 if (is_fortran ()
21683 && TREE_CODE (type) == ARRAY_TYPE
21684 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21685 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21686 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21687
21688 #if 0
21689 /* We default the array ordering. Debuggers will probably do the right
21690 things even if DW_AT_ordering is not present. It's not even an issue
21691 until we start to get into multidimensional arrays anyway. If a debugger
21692 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21693 then we'll have to put the DW_AT_ordering attribute back in. (But if
21694 and when we find out that we need to put these in, we will only do so
21695 for multidimensional arrays. */
21696 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21697 #endif
21698
21699 if (TREE_CODE (type) == VECTOR_TYPE)
21700 {
21701 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21702 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21703 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21704 add_bound_info (subrange_die, DW_AT_upper_bound,
21705 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21706 }
21707 else
21708 add_subscript_info (array_die, type, collapse_nested_arrays);
21709
21710 /* Add representation of the type of the elements of this array type and
21711 emit the corresponding DIE if we haven't done it already. */
21712 element_type = TREE_TYPE (type);
21713 if (collapse_nested_arrays)
21714 while (TREE_CODE (element_type) == ARRAY_TYPE)
21715 {
21716 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21717 break;
21718 element_type = TREE_TYPE (element_type);
21719 }
21720
21721 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21722 TREE_CODE (type) == ARRAY_TYPE
21723 && TYPE_REVERSE_STORAGE_ORDER (type),
21724 context_die);
21725
21726 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21727 if (TYPE_ARTIFICIAL (type))
21728 add_AT_flag (array_die, DW_AT_artificial, 1);
21729
21730 if (get_AT (array_die, DW_AT_name))
21731 add_pubtype (type, array_die);
21732
21733 add_alignment_attribute (array_die, type);
21734 }
21735
21736 /* This routine generates DIE for array with hidden descriptor, details
21737 are filled into *info by a langhook. */
21738
21739 static void
21740 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21741 dw_die_ref context_die)
21742 {
21743 const dw_die_ref scope_die = scope_die_for (type, context_die);
21744 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21745 struct loc_descr_context context = { type, info->base_decl, NULL,
21746 false, false };
21747 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21748 int dim;
21749
21750 add_name_attribute (array_die, type_tag (type));
21751 equate_type_number_to_die (type, array_die);
21752
21753 if (info->ndimensions > 1)
21754 switch (info->ordering)
21755 {
21756 case array_descr_ordering_row_major:
21757 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21758 break;
21759 case array_descr_ordering_column_major:
21760 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21761 break;
21762 default:
21763 break;
21764 }
21765
21766 if (dwarf_version >= 3 || !dwarf_strict)
21767 {
21768 if (info->data_location)
21769 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21770 dw_scalar_form_exprloc, &context);
21771 if (info->associated)
21772 add_scalar_info (array_die, DW_AT_associated, info->associated,
21773 dw_scalar_form_constant
21774 | dw_scalar_form_exprloc
21775 | dw_scalar_form_reference, &context);
21776 if (info->allocated)
21777 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21778 dw_scalar_form_constant
21779 | dw_scalar_form_exprloc
21780 | dw_scalar_form_reference, &context);
21781 if (info->stride)
21782 {
21783 const enum dwarf_attribute attr
21784 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21785 const int forms
21786 = (info->stride_in_bits)
21787 ? dw_scalar_form_constant
21788 : (dw_scalar_form_constant
21789 | dw_scalar_form_exprloc
21790 | dw_scalar_form_reference);
21791
21792 add_scalar_info (array_die, attr, info->stride, forms, &context);
21793 }
21794 }
21795 if (dwarf_version >= 5)
21796 {
21797 if (info->rank)
21798 {
21799 add_scalar_info (array_die, DW_AT_rank, info->rank,
21800 dw_scalar_form_constant
21801 | dw_scalar_form_exprloc, &context);
21802 subrange_tag = DW_TAG_generic_subrange;
21803 context.placeholder_arg = true;
21804 }
21805 }
21806
21807 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21808
21809 for (dim = 0; dim < info->ndimensions; dim++)
21810 {
21811 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21812
21813 if (info->dimen[dim].bounds_type)
21814 add_type_attribute (subrange_die,
21815 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21816 false, context_die);
21817 if (info->dimen[dim].lower_bound)
21818 add_bound_info (subrange_die, DW_AT_lower_bound,
21819 info->dimen[dim].lower_bound, &context);
21820 if (info->dimen[dim].upper_bound)
21821 add_bound_info (subrange_die, DW_AT_upper_bound,
21822 info->dimen[dim].upper_bound, &context);
21823 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21824 add_scalar_info (subrange_die, DW_AT_byte_stride,
21825 info->dimen[dim].stride,
21826 dw_scalar_form_constant
21827 | dw_scalar_form_exprloc
21828 | dw_scalar_form_reference,
21829 &context);
21830 }
21831
21832 gen_type_die (info->element_type, context_die);
21833 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21834 TREE_CODE (type) == ARRAY_TYPE
21835 && TYPE_REVERSE_STORAGE_ORDER (type),
21836 context_die);
21837
21838 if (get_AT (array_die, DW_AT_name))
21839 add_pubtype (type, array_die);
21840
21841 add_alignment_attribute (array_die, type);
21842 }
21843
21844 #if 0
21845 static void
21846 gen_entry_point_die (tree decl, dw_die_ref context_die)
21847 {
21848 tree origin = decl_ultimate_origin (decl);
21849 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21850
21851 if (origin != NULL)
21852 add_abstract_origin_attribute (decl_die, origin);
21853 else
21854 {
21855 add_name_and_src_coords_attributes (decl_die, decl);
21856 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21857 TYPE_UNQUALIFIED, false, context_die);
21858 }
21859
21860 if (DECL_ABSTRACT_P (decl))
21861 equate_decl_number_to_die (decl, decl_die);
21862 else
21863 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21864 }
21865 #endif
21866
21867 /* Walk through the list of incomplete types again, trying once more to
21868 emit full debugging info for them. */
21869
21870 static void
21871 retry_incomplete_types (void)
21872 {
21873 set_early_dwarf s;
21874 int i;
21875
21876 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21877 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21878 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21879 vec_safe_truncate (incomplete_types, 0);
21880 }
21881
21882 /* Determine what tag to use for a record type. */
21883
21884 static enum dwarf_tag
21885 record_type_tag (tree type)
21886 {
21887 if (! lang_hooks.types.classify_record)
21888 return DW_TAG_structure_type;
21889
21890 switch (lang_hooks.types.classify_record (type))
21891 {
21892 case RECORD_IS_STRUCT:
21893 return DW_TAG_structure_type;
21894
21895 case RECORD_IS_CLASS:
21896 return DW_TAG_class_type;
21897
21898 case RECORD_IS_INTERFACE:
21899 if (dwarf_version >= 3 || !dwarf_strict)
21900 return DW_TAG_interface_type;
21901 return DW_TAG_structure_type;
21902
21903 default:
21904 gcc_unreachable ();
21905 }
21906 }
21907
21908 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21909 include all of the information about the enumeration values also. Each
21910 enumerated type name/value is listed as a child of the enumerated type
21911 DIE. */
21912
21913 static dw_die_ref
21914 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21915 {
21916 dw_die_ref type_die = lookup_type_die (type);
21917 dw_die_ref orig_type_die = type_die;
21918
21919 if (type_die == NULL)
21920 {
21921 type_die = new_die (DW_TAG_enumeration_type,
21922 scope_die_for (type, context_die), type);
21923 equate_type_number_to_die (type, type_die);
21924 add_name_attribute (type_die, type_tag (type));
21925 if ((dwarf_version >= 4 || !dwarf_strict)
21926 && ENUM_IS_SCOPED (type))
21927 add_AT_flag (type_die, DW_AT_enum_class, 1);
21928 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21929 add_AT_flag (type_die, DW_AT_declaration, 1);
21930 if (!dwarf_strict)
21931 add_AT_unsigned (type_die, DW_AT_encoding,
21932 TYPE_UNSIGNED (type)
21933 ? DW_ATE_unsigned
21934 : DW_ATE_signed);
21935 }
21936 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21937 return type_die;
21938 else
21939 remove_AT (type_die, DW_AT_declaration);
21940
21941 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21942 given enum type is incomplete, do not generate the DW_AT_byte_size
21943 attribute or the DW_AT_element_list attribute. */
21944 if (TYPE_SIZE (type))
21945 {
21946 tree link;
21947
21948 if (!ENUM_IS_OPAQUE (type))
21949 TREE_ASM_WRITTEN (type) = 1;
21950 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
21951 add_byte_size_attribute (type_die, type);
21952 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
21953 add_alignment_attribute (type_die, type);
21954 if ((dwarf_version >= 3 || !dwarf_strict)
21955 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
21956 {
21957 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21958 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21959 context_die);
21960 }
21961 if (TYPE_STUB_DECL (type) != NULL_TREE)
21962 {
21963 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
21964 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21965 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
21966 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21967 }
21968
21969 /* If the first reference to this type was as the return type of an
21970 inline function, then it may not have a parent. Fix this now. */
21971 if (type_die->die_parent == NULL)
21972 add_child_die (scope_die_for (type, context_die), type_die);
21973
21974 for (link = TYPE_VALUES (type);
21975 link != NULL; link = TREE_CHAIN (link))
21976 {
21977 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21978 tree value = TREE_VALUE (link);
21979
21980 gcc_assert (!ENUM_IS_OPAQUE (type));
21981 add_name_attribute (enum_die,
21982 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21983
21984 if (TREE_CODE (value) == CONST_DECL)
21985 value = DECL_INITIAL (value);
21986
21987 if (simple_type_size_in_bits (TREE_TYPE (value))
21988 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21989 {
21990 /* For constant forms created by add_AT_unsigned DWARF
21991 consumers (GDB, elfutils, etc.) always zero extend
21992 the value. Only when the actual value is negative
21993 do we need to use add_AT_int to generate a constant
21994 form that can represent negative values. */
21995 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21996 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21997 add_AT_unsigned (enum_die, DW_AT_const_value,
21998 (unsigned HOST_WIDE_INT) val);
21999 else
22000 add_AT_int (enum_die, DW_AT_const_value, val);
22001 }
22002 else
22003 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22004 that here. TODO: This should be re-worked to use correct
22005 signed/unsigned double tags for all cases. */
22006 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22007 }
22008
22009 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22010 if (TYPE_ARTIFICIAL (type)
22011 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22012 add_AT_flag (type_die, DW_AT_artificial, 1);
22013 }
22014 else
22015 add_AT_flag (type_die, DW_AT_declaration, 1);
22016
22017 add_pubtype (type, type_die);
22018
22019 return type_die;
22020 }
22021
22022 /* Generate a DIE to represent either a real live formal parameter decl or to
22023 represent just the type of some formal parameter position in some function
22024 type.
22025
22026 Note that this routine is a bit unusual because its argument may be a
22027 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22028 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22029 node. If it's the former then this function is being called to output a
22030 DIE to represent a formal parameter object (or some inlining thereof). If
22031 it's the latter, then this function is only being called to output a
22032 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22033 argument type of some subprogram type.
22034 If EMIT_NAME_P is true, name and source coordinate attributes
22035 are emitted. */
22036
22037 static dw_die_ref
22038 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22039 dw_die_ref context_die)
22040 {
22041 tree node_or_origin = node ? node : origin;
22042 tree ultimate_origin;
22043 dw_die_ref parm_die = NULL;
22044
22045 if (DECL_P (node_or_origin))
22046 {
22047 parm_die = lookup_decl_die (node);
22048
22049 /* If the contexts differ, we may not be talking about the same
22050 thing.
22051 ??? When in LTO the DIE parent is the "abstract" copy and the
22052 context_die is the specification "copy". But this whole block
22053 should eventually be no longer needed. */
22054 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22055 {
22056 if (!DECL_ABSTRACT_P (node))
22057 {
22058 /* This can happen when creating an inlined instance, in
22059 which case we need to create a new DIE that will get
22060 annotated with DW_AT_abstract_origin. */
22061 parm_die = NULL;
22062 }
22063 else
22064 gcc_unreachable ();
22065 }
22066
22067 if (parm_die && parm_die->die_parent == NULL)
22068 {
22069 /* Check that parm_die already has the right attributes that
22070 we would have added below. If any attributes are
22071 missing, fall through to add them. */
22072 if (! DECL_ABSTRACT_P (node_or_origin)
22073 && !get_AT (parm_die, DW_AT_location)
22074 && !get_AT (parm_die, DW_AT_const_value))
22075 /* We are missing location info, and are about to add it. */
22076 ;
22077 else
22078 {
22079 add_child_die (context_die, parm_die);
22080 return parm_die;
22081 }
22082 }
22083 }
22084
22085 /* If we have a previously generated DIE, use it, unless this is an
22086 concrete instance (origin != NULL), in which case we need a new
22087 DIE with a corresponding DW_AT_abstract_origin. */
22088 bool reusing_die;
22089 if (parm_die && origin == NULL)
22090 reusing_die = true;
22091 else
22092 {
22093 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22094 reusing_die = false;
22095 }
22096
22097 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22098 {
22099 case tcc_declaration:
22100 ultimate_origin = decl_ultimate_origin (node_or_origin);
22101 if (node || ultimate_origin)
22102 origin = ultimate_origin;
22103
22104 if (reusing_die)
22105 goto add_location;
22106
22107 if (origin != NULL)
22108 add_abstract_origin_attribute (parm_die, origin);
22109 else if (emit_name_p)
22110 add_name_and_src_coords_attributes (parm_die, node);
22111 if (origin == NULL
22112 || (! DECL_ABSTRACT_P (node_or_origin)
22113 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22114 decl_function_context
22115 (node_or_origin))))
22116 {
22117 tree type = TREE_TYPE (node_or_origin);
22118 if (decl_by_reference_p (node_or_origin))
22119 add_type_attribute (parm_die, TREE_TYPE (type),
22120 TYPE_UNQUALIFIED,
22121 false, context_die);
22122 else
22123 add_type_attribute (parm_die, type,
22124 decl_quals (node_or_origin),
22125 false, context_die);
22126 }
22127 if (origin == NULL && DECL_ARTIFICIAL (node))
22128 add_AT_flag (parm_die, DW_AT_artificial, 1);
22129 add_location:
22130 if (node && node != origin)
22131 equate_decl_number_to_die (node, parm_die);
22132 if (! DECL_ABSTRACT_P (node_or_origin))
22133 add_location_or_const_value_attribute (parm_die, node_or_origin,
22134 node == NULL);
22135
22136 break;
22137
22138 case tcc_type:
22139 /* We were called with some kind of a ..._TYPE node. */
22140 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22141 context_die);
22142 break;
22143
22144 default:
22145 gcc_unreachable ();
22146 }
22147
22148 return parm_die;
22149 }
22150
22151 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22152 children DW_TAG_formal_parameter DIEs representing the arguments of the
22153 parameter pack.
22154
22155 PARM_PACK must be a function parameter pack.
22156 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22157 must point to the subsequent arguments of the function PACK_ARG belongs to.
22158 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22159 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22160 following the last one for which a DIE was generated. */
22161
22162 static dw_die_ref
22163 gen_formal_parameter_pack_die (tree parm_pack,
22164 tree pack_arg,
22165 dw_die_ref subr_die,
22166 tree *next_arg)
22167 {
22168 tree arg;
22169 dw_die_ref parm_pack_die;
22170
22171 gcc_assert (parm_pack
22172 && lang_hooks.function_parameter_pack_p (parm_pack)
22173 && subr_die);
22174
22175 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22176 add_src_coords_attributes (parm_pack_die, parm_pack);
22177
22178 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22179 {
22180 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22181 parm_pack))
22182 break;
22183 gen_formal_parameter_die (arg, NULL,
22184 false /* Don't emit name attribute. */,
22185 parm_pack_die);
22186 }
22187 if (next_arg)
22188 *next_arg = arg;
22189 return parm_pack_die;
22190 }
22191
22192 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22193 at the end of an (ANSI prototyped) formal parameters list. */
22194
22195 static void
22196 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22197 {
22198 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22199 }
22200
22201 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22202 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22203 parameters as specified in some function type specification (except for
22204 those which appear as part of a function *definition*). */
22205
22206 static void
22207 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22208 {
22209 tree link;
22210 tree formal_type = NULL;
22211 tree first_parm_type;
22212 tree arg;
22213
22214 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22215 {
22216 arg = DECL_ARGUMENTS (function_or_method_type);
22217 function_or_method_type = TREE_TYPE (function_or_method_type);
22218 }
22219 else
22220 arg = NULL_TREE;
22221
22222 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22223
22224 /* Make our first pass over the list of formal parameter types and output a
22225 DW_TAG_formal_parameter DIE for each one. */
22226 for (link = first_parm_type; link; )
22227 {
22228 dw_die_ref parm_die;
22229
22230 formal_type = TREE_VALUE (link);
22231 if (formal_type == void_type_node)
22232 break;
22233
22234 /* Output a (nameless) DIE to represent the formal parameter itself. */
22235 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22236 {
22237 parm_die = gen_formal_parameter_die (formal_type, NULL,
22238 true /* Emit name attribute. */,
22239 context_die);
22240 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22241 && link == first_parm_type)
22242 {
22243 add_AT_flag (parm_die, DW_AT_artificial, 1);
22244 if (dwarf_version >= 3 || !dwarf_strict)
22245 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22246 }
22247 else if (arg && DECL_ARTIFICIAL (arg))
22248 add_AT_flag (parm_die, DW_AT_artificial, 1);
22249 }
22250
22251 link = TREE_CHAIN (link);
22252 if (arg)
22253 arg = DECL_CHAIN (arg);
22254 }
22255
22256 /* If this function type has an ellipsis, add a
22257 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22258 if (formal_type != void_type_node)
22259 gen_unspecified_parameters_die (function_or_method_type, context_die);
22260
22261 /* Make our second (and final) pass over the list of formal parameter types
22262 and output DIEs to represent those types (as necessary). */
22263 for (link = TYPE_ARG_TYPES (function_or_method_type);
22264 link && TREE_VALUE (link);
22265 link = TREE_CHAIN (link))
22266 gen_type_die (TREE_VALUE (link), context_die);
22267 }
22268
22269 /* We want to generate the DIE for TYPE so that we can generate the
22270 die for MEMBER, which has been defined; we will need to refer back
22271 to the member declaration nested within TYPE. If we're trying to
22272 generate minimal debug info for TYPE, processing TYPE won't do the
22273 trick; we need to attach the member declaration by hand. */
22274
22275 static void
22276 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22277 {
22278 gen_type_die (type, context_die);
22279
22280 /* If we're trying to avoid duplicate debug info, we may not have
22281 emitted the member decl for this function. Emit it now. */
22282 if (TYPE_STUB_DECL (type)
22283 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22284 && ! lookup_decl_die (member))
22285 {
22286 dw_die_ref type_die;
22287 gcc_assert (!decl_ultimate_origin (member));
22288
22289 push_decl_scope (type);
22290 type_die = lookup_type_die_strip_naming_typedef (type);
22291 if (TREE_CODE (member) == FUNCTION_DECL)
22292 gen_subprogram_die (member, type_die);
22293 else if (TREE_CODE (member) == FIELD_DECL)
22294 {
22295 /* Ignore the nameless fields that are used to skip bits but handle
22296 C++ anonymous unions and structs. */
22297 if (DECL_NAME (member) != NULL_TREE
22298 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22299 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22300 {
22301 struct vlr_context vlr_ctx = {
22302 DECL_CONTEXT (member), /* struct_type */
22303 NULL_TREE /* variant_part_offset */
22304 };
22305 gen_type_die (member_declared_type (member), type_die);
22306 gen_field_die (member, &vlr_ctx, type_die);
22307 }
22308 }
22309 else
22310 gen_variable_die (member, NULL_TREE, type_die);
22311
22312 pop_decl_scope ();
22313 }
22314 }
22315 \f
22316 /* Forward declare these functions, because they are mutually recursive
22317 with their set_block_* pairing functions. */
22318 static void set_decl_origin_self (tree);
22319
22320 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22321 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22322 that it points to the node itself, thus indicating that the node is its
22323 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22324 the given node is NULL, recursively descend the decl/block tree which
22325 it is the root of, and for each other ..._DECL or BLOCK node contained
22326 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22327 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22328 values to point to themselves. */
22329
22330 static void
22331 set_block_origin_self (tree stmt)
22332 {
22333 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22334 {
22335 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22336
22337 {
22338 tree local_decl;
22339
22340 for (local_decl = BLOCK_VARS (stmt);
22341 local_decl != NULL_TREE;
22342 local_decl = DECL_CHAIN (local_decl))
22343 /* Do not recurse on nested functions since the inlining status
22344 of parent and child can be different as per the DWARF spec. */
22345 if (TREE_CODE (local_decl) != FUNCTION_DECL
22346 && !DECL_EXTERNAL (local_decl))
22347 set_decl_origin_self (local_decl);
22348 }
22349
22350 {
22351 tree subblock;
22352
22353 for (subblock = BLOCK_SUBBLOCKS (stmt);
22354 subblock != NULL_TREE;
22355 subblock = BLOCK_CHAIN (subblock))
22356 set_block_origin_self (subblock); /* Recurse. */
22357 }
22358 }
22359 }
22360
22361 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22362 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22363 node to so that it points to the node itself, thus indicating that the
22364 node represents its own (abstract) origin. Additionally, if the
22365 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22366 the decl/block tree of which the given node is the root of, and for
22367 each other ..._DECL or BLOCK node contained therein whose
22368 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22369 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22370 point to themselves. */
22371
22372 static void
22373 set_decl_origin_self (tree decl)
22374 {
22375 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22376 {
22377 DECL_ABSTRACT_ORIGIN (decl) = decl;
22378 if (TREE_CODE (decl) == FUNCTION_DECL)
22379 {
22380 tree arg;
22381
22382 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22383 DECL_ABSTRACT_ORIGIN (arg) = arg;
22384 if (DECL_INITIAL (decl) != NULL_TREE
22385 && DECL_INITIAL (decl) != error_mark_node)
22386 set_block_origin_self (DECL_INITIAL (decl));
22387 }
22388 }
22389 }
22390 \f
22391 /* Mark the early DIE for DECL as the abstract instance. */
22392
22393 static void
22394 dwarf2out_abstract_function (tree decl)
22395 {
22396 dw_die_ref old_die;
22397
22398 /* Make sure we have the actual abstract inline, not a clone. */
22399 decl = DECL_ORIGIN (decl);
22400
22401 if (DECL_IGNORED_P (decl))
22402 return;
22403
22404 old_die = lookup_decl_die (decl);
22405 /* With early debug we always have an old DIE unless we are in LTO
22406 and the user did not compile but only link with debug. */
22407 if (in_lto_p && ! old_die)
22408 return;
22409 gcc_assert (old_die != NULL);
22410 if (get_AT (old_die, DW_AT_inline)
22411 || get_AT (old_die, DW_AT_abstract_origin))
22412 /* We've already generated the abstract instance. */
22413 return;
22414
22415 /* Go ahead and put DW_AT_inline on the DIE. */
22416 if (DECL_DECLARED_INLINE_P (decl))
22417 {
22418 if (cgraph_function_possibly_inlined_p (decl))
22419 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22420 else
22421 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22422 }
22423 else
22424 {
22425 if (cgraph_function_possibly_inlined_p (decl))
22426 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22427 else
22428 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22429 }
22430
22431 if (DECL_DECLARED_INLINE_P (decl)
22432 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22433 add_AT_flag (old_die, DW_AT_artificial, 1);
22434
22435 set_decl_origin_self (decl);
22436 }
22437
22438 /* Helper function of premark_used_types() which gets called through
22439 htab_traverse.
22440
22441 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22442 marked as unused by prune_unused_types. */
22443
22444 bool
22445 premark_used_types_helper (tree const &type, void *)
22446 {
22447 dw_die_ref die;
22448
22449 die = lookup_type_die (type);
22450 if (die != NULL)
22451 die->die_perennial_p = 1;
22452 return true;
22453 }
22454
22455 /* Helper function of premark_types_used_by_global_vars which gets called
22456 through htab_traverse.
22457
22458 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22459 marked as unused by prune_unused_types. The DIE of the type is marked
22460 only if the global variable using the type will actually be emitted. */
22461
22462 int
22463 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22464 void *)
22465 {
22466 struct types_used_by_vars_entry *entry;
22467 dw_die_ref die;
22468
22469 entry = (struct types_used_by_vars_entry *) *slot;
22470 gcc_assert (entry->type != NULL
22471 && entry->var_decl != NULL);
22472 die = lookup_type_die (entry->type);
22473 if (die)
22474 {
22475 /* Ask cgraph if the global variable really is to be emitted.
22476 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22477 varpool_node *node = varpool_node::get (entry->var_decl);
22478 if (node && node->definition)
22479 {
22480 die->die_perennial_p = 1;
22481 /* Keep the parent DIEs as well. */
22482 while ((die = die->die_parent) && die->die_perennial_p == 0)
22483 die->die_perennial_p = 1;
22484 }
22485 }
22486 return 1;
22487 }
22488
22489 /* Mark all members of used_types_hash as perennial. */
22490
22491 static void
22492 premark_used_types (struct function *fun)
22493 {
22494 if (fun && fun->used_types_hash)
22495 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22496 }
22497
22498 /* Mark all members of types_used_by_vars_entry as perennial. */
22499
22500 static void
22501 premark_types_used_by_global_vars (void)
22502 {
22503 if (types_used_by_vars_hash)
22504 types_used_by_vars_hash
22505 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22506 }
22507
22508 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22509 for CA_LOC call arg loc node. */
22510
22511 static dw_die_ref
22512 gen_call_site_die (tree decl, dw_die_ref subr_die,
22513 struct call_arg_loc_node *ca_loc)
22514 {
22515 dw_die_ref stmt_die = NULL, die;
22516 tree block = ca_loc->block;
22517
22518 while (block
22519 && block != DECL_INITIAL (decl)
22520 && TREE_CODE (block) == BLOCK)
22521 {
22522 stmt_die = BLOCK_DIE (block);
22523 if (stmt_die)
22524 break;
22525 block = BLOCK_SUPERCONTEXT (block);
22526 }
22527 if (stmt_die == NULL)
22528 stmt_die = subr_die;
22529 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22530 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22531 if (ca_loc->tail_call_p)
22532 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22533 if (ca_loc->symbol_ref)
22534 {
22535 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22536 if (tdie)
22537 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22538 else
22539 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22540 false);
22541 }
22542 return die;
22543 }
22544
22545 /* Generate a DIE to represent a declared function (either file-scope or
22546 block-local). */
22547
22548 static void
22549 gen_subprogram_die (tree decl, dw_die_ref context_die)
22550 {
22551 tree origin = decl_ultimate_origin (decl);
22552 dw_die_ref subr_die;
22553 dw_die_ref old_die = lookup_decl_die (decl);
22554
22555 /* This function gets called multiple times for different stages of
22556 the debug process. For example, for func() in this code:
22557
22558 namespace S
22559 {
22560 void func() { ... }
22561 }
22562
22563 ...we get called 4 times. Twice in early debug and twice in
22564 late debug:
22565
22566 Early debug
22567 -----------
22568
22569 1. Once while generating func() within the namespace. This is
22570 the declaration. The declaration bit below is set, as the
22571 context is the namespace.
22572
22573 A new DIE will be generated with DW_AT_declaration set.
22574
22575 2. Once for func() itself. This is the specification. The
22576 declaration bit below is clear as the context is the CU.
22577
22578 We will use the cached DIE from (1) to create a new DIE with
22579 DW_AT_specification pointing to the declaration in (1).
22580
22581 Late debug via rest_of_handle_final()
22582 -------------------------------------
22583
22584 3. Once generating func() within the namespace. This is also the
22585 declaration, as in (1), but this time we will early exit below
22586 as we have a cached DIE and a declaration needs no additional
22587 annotations (no locations), as the source declaration line
22588 info is enough.
22589
22590 4. Once for func() itself. As in (2), this is the specification,
22591 but this time we will re-use the cached DIE, and just annotate
22592 it with the location information that should now be available.
22593
22594 For something without namespaces, but with abstract instances, we
22595 are also called a multiple times:
22596
22597 class Base
22598 {
22599 public:
22600 Base (); // constructor declaration (1)
22601 };
22602
22603 Base::Base () { } // constructor specification (2)
22604
22605 Early debug
22606 -----------
22607
22608 1. Once for the Base() constructor by virtue of it being a
22609 member of the Base class. This is done via
22610 rest_of_type_compilation.
22611
22612 This is a declaration, so a new DIE will be created with
22613 DW_AT_declaration.
22614
22615 2. Once for the Base() constructor definition, but this time
22616 while generating the abstract instance of the base
22617 constructor (__base_ctor) which is being generated via early
22618 debug of reachable functions.
22619
22620 Even though we have a cached version of the declaration (1),
22621 we will create a DW_AT_specification of the declaration DIE
22622 in (1).
22623
22624 3. Once for the __base_ctor itself, but this time, we generate
22625 an DW_AT_abstract_origin version of the DW_AT_specification in
22626 (2).
22627
22628 Late debug via rest_of_handle_final
22629 -----------------------------------
22630
22631 4. One final time for the __base_ctor (which will have a cached
22632 DIE with DW_AT_abstract_origin created in (3). This time,
22633 we will just annotate the location information now
22634 available.
22635 */
22636 int declaration = (current_function_decl != decl
22637 || class_or_namespace_scope_p (context_die));
22638
22639 /* A declaration that has been previously dumped needs no
22640 additional information. */
22641 if (old_die && declaration)
22642 return;
22643
22644 /* Now that the C++ front end lazily declares artificial member fns, we
22645 might need to retrofit the declaration into its class. */
22646 if (!declaration && !origin && !old_die
22647 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22648 && !class_or_namespace_scope_p (context_die)
22649 && debug_info_level > DINFO_LEVEL_TERSE)
22650 old_die = force_decl_die (decl);
22651
22652 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22653 if (origin != NULL)
22654 {
22655 gcc_assert (!declaration || local_scope_p (context_die));
22656
22657 /* Fixup die_parent for the abstract instance of a nested
22658 inline function. */
22659 if (old_die && old_die->die_parent == NULL)
22660 add_child_die (context_die, old_die);
22661
22662 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22663 {
22664 /* If we have a DW_AT_abstract_origin we have a working
22665 cached version. */
22666 subr_die = old_die;
22667 }
22668 else
22669 {
22670 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22671 add_abstract_origin_attribute (subr_die, origin);
22672 /* This is where the actual code for a cloned function is.
22673 Let's emit linkage name attribute for it. This helps
22674 debuggers to e.g, set breakpoints into
22675 constructors/destructors when the user asks "break
22676 K::K". */
22677 add_linkage_name (subr_die, decl);
22678 }
22679 }
22680 /* A cached copy, possibly from early dwarf generation. Reuse as
22681 much as possible. */
22682 else if (old_die)
22683 {
22684 if (!get_AT_flag (old_die, DW_AT_declaration)
22685 /* We can have a normal definition following an inline one in the
22686 case of redefinition of GNU C extern inlines.
22687 It seems reasonable to use AT_specification in this case. */
22688 && !get_AT (old_die, DW_AT_inline))
22689 {
22690 /* Detect and ignore this case, where we are trying to output
22691 something we have already output. */
22692 if (get_AT (old_die, DW_AT_low_pc)
22693 || get_AT (old_die, DW_AT_ranges))
22694 return;
22695
22696 /* If we have no location information, this must be a
22697 partially generated DIE from early dwarf generation.
22698 Fall through and generate it. */
22699 }
22700
22701 /* If the definition comes from the same place as the declaration,
22702 maybe use the old DIE. We always want the DIE for this function
22703 that has the *_pc attributes to be under comp_unit_die so the
22704 debugger can find it. We also need to do this for abstract
22705 instances of inlines, since the spec requires the out-of-line copy
22706 to have the same parent. For local class methods, this doesn't
22707 apply; we just use the old DIE. */
22708 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22709 struct dwarf_file_data * file_index = lookup_filename (s.file);
22710 if ((is_cu_die (old_die->die_parent)
22711 /* This condition fixes the inconsistency/ICE with the
22712 following Fortran test (or some derivative thereof) while
22713 building libgfortran:
22714
22715 module some_m
22716 contains
22717 logical function funky (FLAG)
22718 funky = .true.
22719 end function
22720 end module
22721 */
22722 || (old_die->die_parent
22723 && old_die->die_parent->die_tag == DW_TAG_module)
22724 || context_die == NULL)
22725 && (DECL_ARTIFICIAL (decl)
22726 /* The location attributes may be in the abstract origin
22727 which in the case of LTO might be not available to
22728 look at. */
22729 || get_AT (old_die, DW_AT_abstract_origin)
22730 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22731 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22732 == (unsigned) s.line)
22733 && (!debug_column_info
22734 || s.column == 0
22735 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22736 == (unsigned) s.column)))))
22737 {
22738 subr_die = old_die;
22739
22740 /* Clear out the declaration attribute, but leave the
22741 parameters so they can be augmented with location
22742 information later. Unless this was a declaration, in
22743 which case, wipe out the nameless parameters and recreate
22744 them further down. */
22745 if (remove_AT (subr_die, DW_AT_declaration))
22746 {
22747
22748 remove_AT (subr_die, DW_AT_object_pointer);
22749 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22750 }
22751 }
22752 /* Make a specification pointing to the previously built
22753 declaration. */
22754 else
22755 {
22756 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22757 add_AT_specification (subr_die, old_die);
22758 add_pubname (decl, subr_die);
22759 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22760 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22761 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22762 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22763 if (debug_column_info
22764 && s.column
22765 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22766 != (unsigned) s.column))
22767 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22768
22769 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22770 emit the real type on the definition die. */
22771 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22772 {
22773 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22774 if (die == auto_die || die == decltype_auto_die)
22775 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22776 TYPE_UNQUALIFIED, false, context_die);
22777 }
22778
22779 /* When we process the method declaration, we haven't seen
22780 the out-of-class defaulted definition yet, so we have to
22781 recheck now. */
22782 if ((dwarf_version >= 5 || ! dwarf_strict)
22783 && !get_AT (subr_die, DW_AT_defaulted))
22784 {
22785 int defaulted
22786 = lang_hooks.decls.decl_dwarf_attribute (decl,
22787 DW_AT_defaulted);
22788 if (defaulted != -1)
22789 {
22790 /* Other values must have been handled before. */
22791 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22792 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22793 }
22794 }
22795 }
22796 }
22797 /* Create a fresh DIE for anything else. */
22798 else
22799 {
22800 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22801
22802 if (TREE_PUBLIC (decl))
22803 add_AT_flag (subr_die, DW_AT_external, 1);
22804
22805 add_name_and_src_coords_attributes (subr_die, decl);
22806 add_pubname (decl, subr_die);
22807 if (debug_info_level > DINFO_LEVEL_TERSE)
22808 {
22809 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22810 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22811 TYPE_UNQUALIFIED, false, context_die);
22812 }
22813
22814 add_pure_or_virtual_attribute (subr_die, decl);
22815 if (DECL_ARTIFICIAL (decl))
22816 add_AT_flag (subr_die, DW_AT_artificial, 1);
22817
22818 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22819 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22820
22821 add_alignment_attribute (subr_die, decl);
22822
22823 add_accessibility_attribute (subr_die, decl);
22824 }
22825
22826 /* Unless we have an existing non-declaration DIE, equate the new
22827 DIE. */
22828 if (!old_die || is_declaration_die (old_die))
22829 equate_decl_number_to_die (decl, subr_die);
22830
22831 if (declaration)
22832 {
22833 if (!old_die || !get_AT (old_die, DW_AT_inline))
22834 {
22835 add_AT_flag (subr_die, DW_AT_declaration, 1);
22836
22837 /* If this is an explicit function declaration then generate
22838 a DW_AT_explicit attribute. */
22839 if ((dwarf_version >= 3 || !dwarf_strict)
22840 && lang_hooks.decls.decl_dwarf_attribute (decl,
22841 DW_AT_explicit) == 1)
22842 add_AT_flag (subr_die, DW_AT_explicit, 1);
22843
22844 /* If this is a C++11 deleted special function member then generate
22845 a DW_AT_deleted attribute. */
22846 if ((dwarf_version >= 5 || !dwarf_strict)
22847 && lang_hooks.decls.decl_dwarf_attribute (decl,
22848 DW_AT_deleted) == 1)
22849 add_AT_flag (subr_die, DW_AT_deleted, 1);
22850
22851 /* If this is a C++11 defaulted special function member then
22852 generate a DW_AT_defaulted attribute. */
22853 if (dwarf_version >= 5 || !dwarf_strict)
22854 {
22855 int defaulted
22856 = lang_hooks.decls.decl_dwarf_attribute (decl,
22857 DW_AT_defaulted);
22858 if (defaulted != -1)
22859 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22860 }
22861
22862 /* If this is a C++11 non-static member function with & ref-qualifier
22863 then generate a DW_AT_reference attribute. */
22864 if ((dwarf_version >= 5 || !dwarf_strict)
22865 && lang_hooks.decls.decl_dwarf_attribute (decl,
22866 DW_AT_reference) == 1)
22867 add_AT_flag (subr_die, DW_AT_reference, 1);
22868
22869 /* If this is a C++11 non-static member function with &&
22870 ref-qualifier then generate a DW_AT_reference attribute. */
22871 if ((dwarf_version >= 5 || !dwarf_strict)
22872 && lang_hooks.decls.decl_dwarf_attribute (decl,
22873 DW_AT_rvalue_reference)
22874 == 1)
22875 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22876 }
22877 }
22878 /* For non DECL_EXTERNALs, if range information is available, fill
22879 the DIE with it. */
22880 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22881 {
22882 HOST_WIDE_INT cfa_fb_offset;
22883
22884 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22885
22886 if (!crtl->has_bb_partition)
22887 {
22888 dw_fde_ref fde = fun->fde;
22889 if (fde->dw_fde_begin)
22890 {
22891 /* We have already generated the labels. */
22892 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22893 fde->dw_fde_end, false);
22894 }
22895 else
22896 {
22897 /* Create start/end labels and add the range. */
22898 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22899 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22900 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22901 current_function_funcdef_no);
22902 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22903 current_function_funcdef_no);
22904 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22905 false);
22906 }
22907
22908 #if VMS_DEBUGGING_INFO
22909 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22910 Section 2.3 Prologue and Epilogue Attributes:
22911 When a breakpoint is set on entry to a function, it is generally
22912 desirable for execution to be suspended, not on the very first
22913 instruction of the function, but rather at a point after the
22914 function's frame has been set up, after any language defined local
22915 declaration processing has been completed, and before execution of
22916 the first statement of the function begins. Debuggers generally
22917 cannot properly determine where this point is. Similarly for a
22918 breakpoint set on exit from a function. The prologue and epilogue
22919 attributes allow a compiler to communicate the location(s) to use. */
22920
22921 {
22922 if (fde->dw_fde_vms_end_prologue)
22923 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22924 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22925
22926 if (fde->dw_fde_vms_begin_epilogue)
22927 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22928 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22929 }
22930 #endif
22931
22932 }
22933 else
22934 {
22935 /* Generate pubnames entries for the split function code ranges. */
22936 dw_fde_ref fde = fun->fde;
22937
22938 if (fde->dw_fde_second_begin)
22939 {
22940 if (dwarf_version >= 3 || !dwarf_strict)
22941 {
22942 /* We should use ranges for non-contiguous code section
22943 addresses. Use the actual code range for the initial
22944 section, since the HOT/COLD labels might precede an
22945 alignment offset. */
22946 bool range_list_added = false;
22947 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22948 fde->dw_fde_end, &range_list_added,
22949 false);
22950 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22951 fde->dw_fde_second_end,
22952 &range_list_added, false);
22953 if (range_list_added)
22954 add_ranges (NULL);
22955 }
22956 else
22957 {
22958 /* There is no real support in DW2 for this .. so we make
22959 a work-around. First, emit the pub name for the segment
22960 containing the function label. Then make and emit a
22961 simplified subprogram DIE for the second segment with the
22962 name pre-fixed by __hot/cold_sect_of_. We use the same
22963 linkage name for the second die so that gdb will find both
22964 sections when given "b foo". */
22965 const char *name = NULL;
22966 tree decl_name = DECL_NAME (decl);
22967 dw_die_ref seg_die;
22968
22969 /* Do the 'primary' section. */
22970 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22971 fde->dw_fde_end, false);
22972
22973 /* Build a minimal DIE for the secondary section. */
22974 seg_die = new_die (DW_TAG_subprogram,
22975 subr_die->die_parent, decl);
22976
22977 if (TREE_PUBLIC (decl))
22978 add_AT_flag (seg_die, DW_AT_external, 1);
22979
22980 if (decl_name != NULL
22981 && IDENTIFIER_POINTER (decl_name) != NULL)
22982 {
22983 name = dwarf2_name (decl, 1);
22984 if (! DECL_ARTIFICIAL (decl))
22985 add_src_coords_attributes (seg_die, decl);
22986
22987 add_linkage_name (seg_die, decl);
22988 }
22989 gcc_assert (name != NULL);
22990 add_pure_or_virtual_attribute (seg_die, decl);
22991 if (DECL_ARTIFICIAL (decl))
22992 add_AT_flag (seg_die, DW_AT_artificial, 1);
22993
22994 name = concat ("__second_sect_of_", name, NULL);
22995 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22996 fde->dw_fde_second_end, false);
22997 add_name_attribute (seg_die, name);
22998 if (want_pubnames ())
22999 add_pubname_string (name, seg_die);
23000 }
23001 }
23002 else
23003 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23004 false);
23005 }
23006
23007 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23008
23009 /* We define the "frame base" as the function's CFA. This is more
23010 convenient for several reasons: (1) It's stable across the prologue
23011 and epilogue, which makes it better than just a frame pointer,
23012 (2) With dwarf3, there exists a one-byte encoding that allows us
23013 to reference the .debug_frame data by proxy, but failing that,
23014 (3) We can at least reuse the code inspection and interpretation
23015 code that determines the CFA position at various points in the
23016 function. */
23017 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23018 {
23019 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23020 add_AT_loc (subr_die, DW_AT_frame_base, op);
23021 }
23022 else
23023 {
23024 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23025 if (list->dw_loc_next)
23026 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23027 else
23028 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23029 }
23030
23031 /* Compute a displacement from the "steady-state frame pointer" to
23032 the CFA. The former is what all stack slots and argument slots
23033 will reference in the rtl; the latter is what we've told the
23034 debugger about. We'll need to adjust all frame_base references
23035 by this displacement. */
23036 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23037
23038 if (fun->static_chain_decl)
23039 {
23040 /* DWARF requires here a location expression that computes the
23041 address of the enclosing subprogram's frame base. The machinery
23042 in tree-nested.c is supposed to store this specific address in the
23043 last field of the FRAME record. */
23044 const tree frame_type
23045 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23046 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23047
23048 tree fb_expr
23049 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23050 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23051 fb_expr, fb_decl, NULL_TREE);
23052
23053 add_AT_location_description (subr_die, DW_AT_static_link,
23054 loc_list_from_tree (fb_expr, 0, NULL));
23055 }
23056
23057 resolve_variable_values ();
23058 }
23059
23060 /* Generate child dies for template paramaters. */
23061 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23062 gen_generic_params_dies (decl);
23063
23064 /* Now output descriptions of the arguments for this function. This gets
23065 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23066 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23067 `...' at the end of the formal parameter list. In order to find out if
23068 there was a trailing ellipsis or not, we must instead look at the type
23069 associated with the FUNCTION_DECL. This will be a node of type
23070 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23071 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23072 an ellipsis at the end. */
23073
23074 /* In the case where we are describing a mere function declaration, all we
23075 need to do here (and all we *can* do here) is to describe the *types* of
23076 its formal parameters. */
23077 if (debug_info_level <= DINFO_LEVEL_TERSE)
23078 ;
23079 else if (declaration)
23080 gen_formal_types_die (decl, subr_die);
23081 else
23082 {
23083 /* Generate DIEs to represent all known formal parameters. */
23084 tree parm = DECL_ARGUMENTS (decl);
23085 tree generic_decl = early_dwarf
23086 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23087 tree generic_decl_parm = generic_decl
23088 ? DECL_ARGUMENTS (generic_decl)
23089 : NULL;
23090
23091 /* Now we want to walk the list of parameters of the function and
23092 emit their relevant DIEs.
23093
23094 We consider the case of DECL being an instance of a generic function
23095 as well as it being a normal function.
23096
23097 If DECL is an instance of a generic function we walk the
23098 parameters of the generic function declaration _and_ the parameters of
23099 DECL itself. This is useful because we want to emit specific DIEs for
23100 function parameter packs and those are declared as part of the
23101 generic function declaration. In that particular case,
23102 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23103 That DIE has children DIEs representing the set of arguments
23104 of the pack. Note that the set of pack arguments can be empty.
23105 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23106 children DIE.
23107
23108 Otherwise, we just consider the parameters of DECL. */
23109 while (generic_decl_parm || parm)
23110 {
23111 if (generic_decl_parm
23112 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23113 gen_formal_parameter_pack_die (generic_decl_parm,
23114 parm, subr_die,
23115 &parm);
23116 else if (parm && !POINTER_BOUNDS_P (parm))
23117 {
23118 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23119
23120 if (early_dwarf
23121 && parm == DECL_ARGUMENTS (decl)
23122 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23123 && parm_die
23124 && (dwarf_version >= 3 || !dwarf_strict))
23125 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23126
23127 parm = DECL_CHAIN (parm);
23128 }
23129 else if (parm)
23130 parm = DECL_CHAIN (parm);
23131
23132 if (generic_decl_parm)
23133 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23134 }
23135
23136 /* Decide whether we need an unspecified_parameters DIE at the end.
23137 There are 2 more cases to do this for: 1) the ansi ... declaration -
23138 this is detectable when the end of the arg list is not a
23139 void_type_node 2) an unprototyped function declaration (not a
23140 definition). This just means that we have no info about the
23141 parameters at all. */
23142 if (early_dwarf)
23143 {
23144 if (prototype_p (TREE_TYPE (decl)))
23145 {
23146 /* This is the prototyped case, check for.... */
23147 if (stdarg_p (TREE_TYPE (decl)))
23148 gen_unspecified_parameters_die (decl, subr_die);
23149 }
23150 else if (DECL_INITIAL (decl) == NULL_TREE)
23151 gen_unspecified_parameters_die (decl, subr_die);
23152 }
23153 }
23154
23155 if (subr_die != old_die)
23156 /* Add the calling convention attribute if requested. */
23157 add_calling_convention_attribute (subr_die, decl);
23158
23159 /* Output Dwarf info for all of the stuff within the body of the function
23160 (if it has one - it may be just a declaration).
23161
23162 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23163 a function. This BLOCK actually represents the outermost binding contour
23164 for the function, i.e. the contour in which the function's formal
23165 parameters and labels get declared. Curiously, it appears that the front
23166 end doesn't actually put the PARM_DECL nodes for the current function onto
23167 the BLOCK_VARS list for this outer scope, but are strung off of the
23168 DECL_ARGUMENTS list for the function instead.
23169
23170 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23171 the LABEL_DECL nodes for the function however, and we output DWARF info
23172 for those in decls_for_scope. Just within the `outer_scope' there will be
23173 a BLOCK node representing the function's outermost pair of curly braces,
23174 and any blocks used for the base and member initializers of a C++
23175 constructor function. */
23176 tree outer_scope = DECL_INITIAL (decl);
23177 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23178 {
23179 int call_site_note_count = 0;
23180 int tail_call_site_note_count = 0;
23181
23182 /* Emit a DW_TAG_variable DIE for a named return value. */
23183 if (DECL_NAME (DECL_RESULT (decl)))
23184 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23185
23186 /* The first time through decls_for_scope we will generate the
23187 DIEs for the locals. The second time, we fill in the
23188 location info. */
23189 decls_for_scope (outer_scope, subr_die);
23190
23191 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23192 {
23193 struct call_arg_loc_node *ca_loc;
23194 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23195 {
23196 dw_die_ref die = NULL;
23197 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23198 rtx arg, next_arg;
23199
23200 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23201 ? XEXP (ca_loc->call_arg_loc_note, 0)
23202 : NULL_RTX);
23203 arg; arg = next_arg)
23204 {
23205 dw_loc_descr_ref reg, val;
23206 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23207 dw_die_ref cdie, tdie = NULL;
23208
23209 next_arg = XEXP (arg, 1);
23210 if (REG_P (XEXP (XEXP (arg, 0), 0))
23211 && next_arg
23212 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23213 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23214 && REGNO (XEXP (XEXP (arg, 0), 0))
23215 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23216 next_arg = XEXP (next_arg, 1);
23217 if (mode == VOIDmode)
23218 {
23219 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23220 if (mode == VOIDmode)
23221 mode = GET_MODE (XEXP (arg, 0));
23222 }
23223 if (mode == VOIDmode || mode == BLKmode)
23224 continue;
23225 /* Get dynamic information about call target only if we
23226 have no static information: we cannot generate both
23227 DW_AT_call_origin and DW_AT_call_target
23228 attributes. */
23229 if (ca_loc->symbol_ref == NULL_RTX)
23230 {
23231 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23232 {
23233 tloc = XEXP (XEXP (arg, 0), 1);
23234 continue;
23235 }
23236 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23237 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23238 {
23239 tlocc = XEXP (XEXP (arg, 0), 1);
23240 continue;
23241 }
23242 }
23243 reg = NULL;
23244 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23245 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23246 VAR_INIT_STATUS_INITIALIZED);
23247 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23248 {
23249 rtx mem = XEXP (XEXP (arg, 0), 0);
23250 reg = mem_loc_descriptor (XEXP (mem, 0),
23251 get_address_mode (mem),
23252 GET_MODE (mem),
23253 VAR_INIT_STATUS_INITIALIZED);
23254 }
23255 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23256 == DEBUG_PARAMETER_REF)
23257 {
23258 tree tdecl
23259 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23260 tdie = lookup_decl_die (tdecl);
23261 if (tdie == NULL)
23262 continue;
23263 }
23264 else
23265 continue;
23266 if (reg == NULL
23267 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23268 != DEBUG_PARAMETER_REF)
23269 continue;
23270 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23271 VOIDmode,
23272 VAR_INIT_STATUS_INITIALIZED);
23273 if (val == NULL)
23274 continue;
23275 if (die == NULL)
23276 die = gen_call_site_die (decl, subr_die, ca_loc);
23277 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23278 NULL_TREE);
23279 if (reg != NULL)
23280 add_AT_loc (cdie, DW_AT_location, reg);
23281 else if (tdie != NULL)
23282 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23283 tdie);
23284 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23285 if (next_arg != XEXP (arg, 1))
23286 {
23287 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23288 if (mode == VOIDmode)
23289 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23290 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23291 0), 1),
23292 mode, VOIDmode,
23293 VAR_INIT_STATUS_INITIALIZED);
23294 if (val != NULL)
23295 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23296 val);
23297 }
23298 }
23299 if (die == NULL
23300 && (ca_loc->symbol_ref || tloc))
23301 die = gen_call_site_die (decl, subr_die, ca_loc);
23302 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23303 {
23304 dw_loc_descr_ref tval = NULL;
23305
23306 if (tloc != NULL_RTX)
23307 tval = mem_loc_descriptor (tloc,
23308 GET_MODE (tloc) == VOIDmode
23309 ? Pmode : GET_MODE (tloc),
23310 VOIDmode,
23311 VAR_INIT_STATUS_INITIALIZED);
23312 if (tval)
23313 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23314 else if (tlocc != NULL_RTX)
23315 {
23316 tval = mem_loc_descriptor (tlocc,
23317 GET_MODE (tlocc) == VOIDmode
23318 ? Pmode : GET_MODE (tlocc),
23319 VOIDmode,
23320 VAR_INIT_STATUS_INITIALIZED);
23321 if (tval)
23322 add_AT_loc (die,
23323 dwarf_AT (DW_AT_call_target_clobbered),
23324 tval);
23325 }
23326 }
23327 if (die != NULL)
23328 {
23329 call_site_note_count++;
23330 if (ca_loc->tail_call_p)
23331 tail_call_site_note_count++;
23332 }
23333 }
23334 }
23335 call_arg_locations = NULL;
23336 call_arg_loc_last = NULL;
23337 if (tail_call_site_count >= 0
23338 && tail_call_site_count == tail_call_site_note_count
23339 && (!dwarf_strict || dwarf_version >= 5))
23340 {
23341 if (call_site_count >= 0
23342 && call_site_count == call_site_note_count)
23343 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23344 else
23345 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23346 }
23347 call_site_count = -1;
23348 tail_call_site_count = -1;
23349 }
23350
23351 /* Mark used types after we have created DIEs for the functions scopes. */
23352 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23353 }
23354
23355 /* Returns a hash value for X (which really is a die_struct). */
23356
23357 hashval_t
23358 block_die_hasher::hash (die_struct *d)
23359 {
23360 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23361 }
23362
23363 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23364 as decl_id and die_parent of die_struct Y. */
23365
23366 bool
23367 block_die_hasher::equal (die_struct *x, die_struct *y)
23368 {
23369 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23370 }
23371
23372 /* Hold information about markers for inlined entry points. */
23373 struct GTY ((for_user)) inline_entry_data
23374 {
23375 /* The block that's the inlined_function_outer_scope for an inlined
23376 function. */
23377 tree block;
23378
23379 /* The label at the inlined entry point. */
23380 const char *label_pfx;
23381 unsigned int label_num;
23382
23383 /* The view number to be used as the inlined entry point. */
23384 var_loc_view view;
23385 };
23386
23387 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23388 {
23389 typedef tree compare_type;
23390 static inline hashval_t hash (const inline_entry_data *);
23391 static inline bool equal (const inline_entry_data *, const_tree);
23392 };
23393
23394 /* Hash table routines for inline_entry_data. */
23395
23396 inline hashval_t
23397 inline_entry_data_hasher::hash (const inline_entry_data *data)
23398 {
23399 return htab_hash_pointer (data->block);
23400 }
23401
23402 inline bool
23403 inline_entry_data_hasher::equal (const inline_entry_data *data,
23404 const_tree block)
23405 {
23406 return data->block == block;
23407 }
23408
23409 /* Inlined entry points pending DIE creation in this compilation unit. */
23410
23411 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23412
23413
23414 /* Return TRUE if DECL, which may have been previously generated as
23415 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23416 true if decl (or its origin) is either an extern declaration or a
23417 class/namespace scoped declaration.
23418
23419 The declare_in_namespace support causes us to get two DIEs for one
23420 variable, both of which are declarations. We want to avoid
23421 considering one to be a specification, so we must test for
23422 DECLARATION and DW_AT_declaration. */
23423 static inline bool
23424 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23425 {
23426 return (old_die && TREE_STATIC (decl) && !declaration
23427 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23428 }
23429
23430 /* Return true if DECL is a local static. */
23431
23432 static inline bool
23433 local_function_static (tree decl)
23434 {
23435 gcc_assert (VAR_P (decl));
23436 return TREE_STATIC (decl)
23437 && DECL_CONTEXT (decl)
23438 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23439 }
23440
23441 /* Generate a DIE to represent a declared data object.
23442 Either DECL or ORIGIN must be non-null. */
23443
23444 static void
23445 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23446 {
23447 HOST_WIDE_INT off = 0;
23448 tree com_decl;
23449 tree decl_or_origin = decl ? decl : origin;
23450 tree ultimate_origin;
23451 dw_die_ref var_die;
23452 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23453 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23454 || class_or_namespace_scope_p (context_die));
23455 bool specialization_p = false;
23456 bool no_linkage_name = false;
23457
23458 /* While C++ inline static data members have definitions inside of the
23459 class, force the first DIE to be a declaration, then let gen_member_die
23460 reparent it to the class context and call gen_variable_die again
23461 to create the outside of the class DIE for the definition. */
23462 if (!declaration
23463 && old_die == NULL
23464 && decl
23465 && DECL_CONTEXT (decl)
23466 && TYPE_P (DECL_CONTEXT (decl))
23467 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23468 {
23469 declaration = true;
23470 if (dwarf_version < 5)
23471 no_linkage_name = true;
23472 }
23473
23474 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23475 if (decl || ultimate_origin)
23476 origin = ultimate_origin;
23477 com_decl = fortran_common (decl_or_origin, &off);
23478
23479 /* Symbol in common gets emitted as a child of the common block, in the form
23480 of a data member. */
23481 if (com_decl)
23482 {
23483 dw_die_ref com_die;
23484 dw_loc_list_ref loc = NULL;
23485 die_node com_die_arg;
23486
23487 var_die = lookup_decl_die (decl_or_origin);
23488 if (var_die)
23489 {
23490 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23491 {
23492 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23493 if (loc)
23494 {
23495 if (off)
23496 {
23497 /* Optimize the common case. */
23498 if (single_element_loc_list_p (loc)
23499 && loc->expr->dw_loc_opc == DW_OP_addr
23500 && loc->expr->dw_loc_next == NULL
23501 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23502 == SYMBOL_REF)
23503 {
23504 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23505 loc->expr->dw_loc_oprnd1.v.val_addr
23506 = plus_constant (GET_MODE (x), x , off);
23507 }
23508 else
23509 loc_list_plus_const (loc, off);
23510 }
23511 add_AT_location_description (var_die, DW_AT_location, loc);
23512 remove_AT (var_die, DW_AT_declaration);
23513 }
23514 }
23515 return;
23516 }
23517
23518 if (common_block_die_table == NULL)
23519 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23520
23521 com_die_arg.decl_id = DECL_UID (com_decl);
23522 com_die_arg.die_parent = context_die;
23523 com_die = common_block_die_table->find (&com_die_arg);
23524 if (! early_dwarf)
23525 loc = loc_list_from_tree (com_decl, 2, NULL);
23526 if (com_die == NULL)
23527 {
23528 const char *cnam
23529 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23530 die_node **slot;
23531
23532 com_die = new_die (DW_TAG_common_block, context_die, decl);
23533 add_name_and_src_coords_attributes (com_die, com_decl);
23534 if (loc)
23535 {
23536 add_AT_location_description (com_die, DW_AT_location, loc);
23537 /* Avoid sharing the same loc descriptor between
23538 DW_TAG_common_block and DW_TAG_variable. */
23539 loc = loc_list_from_tree (com_decl, 2, NULL);
23540 }
23541 else if (DECL_EXTERNAL (decl_or_origin))
23542 add_AT_flag (com_die, DW_AT_declaration, 1);
23543 if (want_pubnames ())
23544 add_pubname_string (cnam, com_die); /* ??? needed? */
23545 com_die->decl_id = DECL_UID (com_decl);
23546 slot = common_block_die_table->find_slot (com_die, INSERT);
23547 *slot = com_die;
23548 }
23549 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23550 {
23551 add_AT_location_description (com_die, DW_AT_location, loc);
23552 loc = loc_list_from_tree (com_decl, 2, NULL);
23553 remove_AT (com_die, DW_AT_declaration);
23554 }
23555 var_die = new_die (DW_TAG_variable, com_die, decl);
23556 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23557 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23558 decl_quals (decl_or_origin), false,
23559 context_die);
23560 add_alignment_attribute (var_die, decl);
23561 add_AT_flag (var_die, DW_AT_external, 1);
23562 if (loc)
23563 {
23564 if (off)
23565 {
23566 /* Optimize the common case. */
23567 if (single_element_loc_list_p (loc)
23568 && loc->expr->dw_loc_opc == DW_OP_addr
23569 && loc->expr->dw_loc_next == NULL
23570 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23571 {
23572 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23573 loc->expr->dw_loc_oprnd1.v.val_addr
23574 = plus_constant (GET_MODE (x), x, off);
23575 }
23576 else
23577 loc_list_plus_const (loc, off);
23578 }
23579 add_AT_location_description (var_die, DW_AT_location, loc);
23580 }
23581 else if (DECL_EXTERNAL (decl_or_origin))
23582 add_AT_flag (var_die, DW_AT_declaration, 1);
23583 if (decl)
23584 equate_decl_number_to_die (decl, var_die);
23585 return;
23586 }
23587
23588 if (old_die)
23589 {
23590 if (declaration)
23591 {
23592 /* A declaration that has been previously dumped, needs no
23593 further annotations, since it doesn't need location on
23594 the second pass. */
23595 return;
23596 }
23597 else if (decl_will_get_specification_p (old_die, decl, declaration)
23598 && !get_AT (old_die, DW_AT_specification))
23599 {
23600 /* Fall-thru so we can make a new variable die along with a
23601 DW_AT_specification. */
23602 }
23603 else if (origin && old_die->die_parent != context_die)
23604 {
23605 /* If we will be creating an inlined instance, we need a
23606 new DIE that will get annotated with
23607 DW_AT_abstract_origin. */
23608 gcc_assert (!DECL_ABSTRACT_P (decl));
23609 }
23610 else
23611 {
23612 /* If a DIE was dumped early, it still needs location info.
23613 Skip to where we fill the location bits. */
23614 var_die = old_die;
23615
23616 /* ??? In LTRANS we cannot annotate early created variably
23617 modified type DIEs without copying them and adjusting all
23618 references to them. Thus we dumped them again. Also add a
23619 reference to them but beware of -g0 compile and -g link
23620 in which case the reference will be already present. */
23621 tree type = TREE_TYPE (decl_or_origin);
23622 if (in_lto_p
23623 && ! get_AT (var_die, DW_AT_type)
23624 && variably_modified_type_p
23625 (type, decl_function_context (decl_or_origin)))
23626 {
23627 if (decl_by_reference_p (decl_or_origin))
23628 add_type_attribute (var_die, TREE_TYPE (type),
23629 TYPE_UNQUALIFIED, false, context_die);
23630 else
23631 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23632 false, context_die);
23633 }
23634
23635 goto gen_variable_die_location;
23636 }
23637 }
23638
23639 /* For static data members, the declaration in the class is supposed
23640 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23641 also in DWARF2; the specification should still be DW_TAG_variable
23642 referencing the DW_TAG_member DIE. */
23643 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23644 var_die = new_die (DW_TAG_member, context_die, decl);
23645 else
23646 var_die = new_die (DW_TAG_variable, context_die, decl);
23647
23648 if (origin != NULL)
23649 add_abstract_origin_attribute (var_die, origin);
23650
23651 /* Loop unrolling can create multiple blocks that refer to the same
23652 static variable, so we must test for the DW_AT_declaration flag.
23653
23654 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23655 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23656 sharing them.
23657
23658 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23659 else if (decl_will_get_specification_p (old_die, decl, declaration))
23660 {
23661 /* This is a definition of a C++ class level static. */
23662 add_AT_specification (var_die, old_die);
23663 specialization_p = true;
23664 if (DECL_NAME (decl))
23665 {
23666 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23667 struct dwarf_file_data * file_index = lookup_filename (s.file);
23668
23669 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23670 add_AT_file (var_die, DW_AT_decl_file, file_index);
23671
23672 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23673 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23674
23675 if (debug_column_info
23676 && s.column
23677 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23678 != (unsigned) s.column))
23679 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23680
23681 if (old_die->die_tag == DW_TAG_member)
23682 add_linkage_name (var_die, decl);
23683 }
23684 }
23685 else
23686 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23687
23688 if ((origin == NULL && !specialization_p)
23689 || (origin != NULL
23690 && !DECL_ABSTRACT_P (decl_or_origin)
23691 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23692 decl_function_context
23693 (decl_or_origin))))
23694 {
23695 tree type = TREE_TYPE (decl_or_origin);
23696
23697 if (decl_by_reference_p (decl_or_origin))
23698 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23699 context_die);
23700 else
23701 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23702 context_die);
23703 }
23704
23705 if (origin == NULL && !specialization_p)
23706 {
23707 if (TREE_PUBLIC (decl))
23708 add_AT_flag (var_die, DW_AT_external, 1);
23709
23710 if (DECL_ARTIFICIAL (decl))
23711 add_AT_flag (var_die, DW_AT_artificial, 1);
23712
23713 add_alignment_attribute (var_die, decl);
23714
23715 add_accessibility_attribute (var_die, decl);
23716 }
23717
23718 if (declaration)
23719 add_AT_flag (var_die, DW_AT_declaration, 1);
23720
23721 if (decl && (DECL_ABSTRACT_P (decl)
23722 || !old_die || is_declaration_die (old_die)))
23723 equate_decl_number_to_die (decl, var_die);
23724
23725 gen_variable_die_location:
23726 if (! declaration
23727 && (! DECL_ABSTRACT_P (decl_or_origin)
23728 /* Local static vars are shared between all clones/inlines,
23729 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23730 already set. */
23731 || (VAR_P (decl_or_origin)
23732 && TREE_STATIC (decl_or_origin)
23733 && DECL_RTL_SET_P (decl_or_origin))))
23734 {
23735 if (early_dwarf)
23736 add_pubname (decl_or_origin, var_die);
23737 else
23738 add_location_or_const_value_attribute (var_die, decl_or_origin,
23739 decl == NULL);
23740 }
23741 else
23742 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23743
23744 if ((dwarf_version >= 4 || !dwarf_strict)
23745 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23746 DW_AT_const_expr) == 1
23747 && !get_AT (var_die, DW_AT_const_expr)
23748 && !specialization_p)
23749 add_AT_flag (var_die, DW_AT_const_expr, 1);
23750
23751 if (!dwarf_strict)
23752 {
23753 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23754 DW_AT_inline);
23755 if (inl != -1
23756 && !get_AT (var_die, DW_AT_inline)
23757 && !specialization_p)
23758 add_AT_unsigned (var_die, DW_AT_inline, inl);
23759 }
23760 }
23761
23762 /* Generate a DIE to represent a named constant. */
23763
23764 static void
23765 gen_const_die (tree decl, dw_die_ref context_die)
23766 {
23767 dw_die_ref const_die;
23768 tree type = TREE_TYPE (decl);
23769
23770 const_die = lookup_decl_die (decl);
23771 if (const_die)
23772 return;
23773
23774 const_die = new_die (DW_TAG_constant, context_die, decl);
23775 equate_decl_number_to_die (decl, const_die);
23776 add_name_and_src_coords_attributes (const_die, decl);
23777 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23778 if (TREE_PUBLIC (decl))
23779 add_AT_flag (const_die, DW_AT_external, 1);
23780 if (DECL_ARTIFICIAL (decl))
23781 add_AT_flag (const_die, DW_AT_artificial, 1);
23782 tree_add_const_value_attribute_for_decl (const_die, decl);
23783 }
23784
23785 /* Generate a DIE to represent a label identifier. */
23786
23787 static void
23788 gen_label_die (tree decl, dw_die_ref context_die)
23789 {
23790 tree origin = decl_ultimate_origin (decl);
23791 dw_die_ref lbl_die = lookup_decl_die (decl);
23792 rtx insn;
23793 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23794
23795 if (!lbl_die)
23796 {
23797 lbl_die = new_die (DW_TAG_label, context_die, decl);
23798 equate_decl_number_to_die (decl, lbl_die);
23799
23800 if (origin != NULL)
23801 add_abstract_origin_attribute (lbl_die, origin);
23802 else
23803 add_name_and_src_coords_attributes (lbl_die, decl);
23804 }
23805
23806 if (DECL_ABSTRACT_P (decl))
23807 equate_decl_number_to_die (decl, lbl_die);
23808 else if (! early_dwarf)
23809 {
23810 insn = DECL_RTL_IF_SET (decl);
23811
23812 /* Deleted labels are programmer specified labels which have been
23813 eliminated because of various optimizations. We still emit them
23814 here so that it is possible to put breakpoints on them. */
23815 if (insn
23816 && (LABEL_P (insn)
23817 || ((NOTE_P (insn)
23818 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23819 {
23820 /* When optimization is enabled (via -O) some parts of the compiler
23821 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23822 represent source-level labels which were explicitly declared by
23823 the user. This really shouldn't be happening though, so catch
23824 it if it ever does happen. */
23825 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23826
23827 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23828 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23829 }
23830 else if (insn
23831 && NOTE_P (insn)
23832 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23833 && CODE_LABEL_NUMBER (insn) != -1)
23834 {
23835 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23836 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23837 }
23838 }
23839 }
23840
23841 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23842 attributes to the DIE for a block STMT, to describe where the inlined
23843 function was called from. This is similar to add_src_coords_attributes. */
23844
23845 static inline void
23846 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23847 {
23848 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23849
23850 if (dwarf_version >= 3 || !dwarf_strict)
23851 {
23852 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23853 add_AT_unsigned (die, DW_AT_call_line, s.line);
23854 if (debug_column_info && s.column)
23855 add_AT_unsigned (die, DW_AT_call_column, s.column);
23856 }
23857 }
23858
23859
23860 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23861 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23862
23863 static inline void
23864 add_high_low_attributes (tree stmt, dw_die_ref die)
23865 {
23866 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23867
23868 if (inline_entry_data **iedp
23869 = !inline_entry_data_table ? NULL
23870 : inline_entry_data_table->find_slot_with_hash (stmt,
23871 htab_hash_pointer (stmt),
23872 NO_INSERT))
23873 {
23874 inline_entry_data *ied = *iedp;
23875 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23876 gcc_assert (debug_inline_points);
23877 gcc_assert (inlined_function_outer_scope_p (stmt));
23878
23879 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23880 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23881
23882 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23883 && !dwarf_strict)
23884 {
23885 if (!output_asm_line_debug_info ())
23886 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23887 else
23888 {
23889 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23890 /* FIXME: this will resolve to a small number. Could we
23891 possibly emit smaller data? Ideally we'd emit a
23892 uleb128, but that would make the size of DIEs
23893 impossible for the compiler to compute, since it's
23894 the assembler that computes the value of the view
23895 label in this case. Ideally, we'd have a single form
23896 encompassing both the address and the view, and
23897 indirecting them through a table might make things
23898 easier, but even that would be more wasteful,
23899 space-wise, than what we have now. */
23900 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23901 }
23902 }
23903
23904 inline_entry_data_table->clear_slot (iedp);
23905 }
23906
23907 if (BLOCK_FRAGMENT_CHAIN (stmt)
23908 && (dwarf_version >= 3 || !dwarf_strict))
23909 {
23910 tree chain, superblock = NULL_TREE;
23911 dw_die_ref pdie;
23912 dw_attr_node *attr = NULL;
23913
23914 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23915 {
23916 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23917 BLOCK_NUMBER (stmt));
23918 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23919 }
23920
23921 /* Optimize duplicate .debug_ranges lists or even tails of
23922 lists. If this BLOCK has same ranges as its supercontext,
23923 lookup DW_AT_ranges attribute in the supercontext (and
23924 recursively so), verify that the ranges_table contains the
23925 right values and use it instead of adding a new .debug_range. */
23926 for (chain = stmt, pdie = die;
23927 BLOCK_SAME_RANGE (chain);
23928 chain = BLOCK_SUPERCONTEXT (chain))
23929 {
23930 dw_attr_node *new_attr;
23931
23932 pdie = pdie->die_parent;
23933 if (pdie == NULL)
23934 break;
23935 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23936 break;
23937 new_attr = get_AT (pdie, DW_AT_ranges);
23938 if (new_attr == NULL
23939 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23940 break;
23941 attr = new_attr;
23942 superblock = BLOCK_SUPERCONTEXT (chain);
23943 }
23944 if (attr != NULL
23945 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23946 == BLOCK_NUMBER (superblock))
23947 && BLOCK_FRAGMENT_CHAIN (superblock))
23948 {
23949 unsigned long off = attr->dw_attr_val.v.val_offset;
23950 unsigned long supercnt = 0, thiscnt = 0;
23951 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23952 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23953 {
23954 ++supercnt;
23955 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23956 == BLOCK_NUMBER (chain));
23957 }
23958 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23959 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23960 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23961 ++thiscnt;
23962 gcc_assert (supercnt >= thiscnt);
23963 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23964 false);
23965 note_rnglist_head (off + supercnt - thiscnt);
23966 return;
23967 }
23968
23969 unsigned int offset = add_ranges (stmt, true);
23970 add_AT_range_list (die, DW_AT_ranges, offset, false);
23971 note_rnglist_head (offset);
23972
23973 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23974 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23975 do
23976 {
23977 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23978 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23979 chain = BLOCK_FRAGMENT_CHAIN (chain);
23980 }
23981 while (chain);
23982 add_ranges (NULL);
23983 }
23984 else
23985 {
23986 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23987 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23988 BLOCK_NUMBER (stmt));
23989 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23990 BLOCK_NUMBER (stmt));
23991 add_AT_low_high_pc (die, label, label_high, false);
23992 }
23993 }
23994
23995 /* Generate a DIE for a lexical block. */
23996
23997 static void
23998 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23999 {
24000 dw_die_ref old_die = BLOCK_DIE (stmt);
24001 dw_die_ref stmt_die = NULL;
24002 if (!old_die)
24003 {
24004 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24005 BLOCK_DIE (stmt) = stmt_die;
24006 }
24007
24008 if (BLOCK_ABSTRACT (stmt))
24009 {
24010 if (old_die)
24011 {
24012 /* This must have been generated early and it won't even
24013 need location information since it's a DW_AT_inline
24014 function. */
24015 if (flag_checking)
24016 for (dw_die_ref c = context_die; c; c = c->die_parent)
24017 if (c->die_tag == DW_TAG_inlined_subroutine
24018 || c->die_tag == DW_TAG_subprogram)
24019 {
24020 gcc_assert (get_AT (c, DW_AT_inline));
24021 break;
24022 }
24023 return;
24024 }
24025 }
24026 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24027 {
24028 /* If this is an inlined instance, create a new lexical die for
24029 anything below to attach DW_AT_abstract_origin to. */
24030 if (old_die)
24031 {
24032 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24033 BLOCK_DIE (stmt) = stmt_die;
24034 old_die = NULL;
24035 }
24036
24037 tree origin = block_ultimate_origin (stmt);
24038 if (origin != NULL_TREE && origin != stmt)
24039 add_abstract_origin_attribute (stmt_die, origin);
24040 }
24041
24042 if (old_die)
24043 stmt_die = old_die;
24044
24045 /* A non abstract block whose blocks have already been reordered
24046 should have the instruction range for this block. If so, set the
24047 high/low attributes. */
24048 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24049 {
24050 gcc_assert (stmt_die);
24051 add_high_low_attributes (stmt, stmt_die);
24052 }
24053
24054 decls_for_scope (stmt, stmt_die);
24055 }
24056
24057 /* Generate a DIE for an inlined subprogram. */
24058
24059 static void
24060 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24061 {
24062 tree decl;
24063
24064 /* The instance of function that is effectively being inlined shall not
24065 be abstract. */
24066 gcc_assert (! BLOCK_ABSTRACT (stmt));
24067
24068 decl = block_ultimate_origin (stmt);
24069
24070 /* Make sure any inlined functions are known to be inlineable. */
24071 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24072 || cgraph_function_possibly_inlined_p (decl));
24073
24074 if (! BLOCK_ABSTRACT (stmt))
24075 {
24076 dw_die_ref subr_die
24077 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24078
24079 if (call_arg_locations || debug_inline_points)
24080 BLOCK_DIE (stmt) = subr_die;
24081 add_abstract_origin_attribute (subr_die, decl);
24082 if (TREE_ASM_WRITTEN (stmt))
24083 add_high_low_attributes (stmt, subr_die);
24084 add_call_src_coords_attributes (stmt, subr_die);
24085
24086 decls_for_scope (stmt, subr_die);
24087 }
24088 }
24089
24090 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24091 the comment for VLR_CONTEXT. */
24092
24093 static void
24094 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24095 {
24096 dw_die_ref decl_die;
24097
24098 if (TREE_TYPE (decl) == error_mark_node)
24099 return;
24100
24101 decl_die = new_die (DW_TAG_member, context_die, decl);
24102 add_name_and_src_coords_attributes (decl_die, decl);
24103 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24104 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24105 context_die);
24106
24107 if (DECL_BIT_FIELD_TYPE (decl))
24108 {
24109 add_byte_size_attribute (decl_die, decl);
24110 add_bit_size_attribute (decl_die, decl);
24111 add_bit_offset_attribute (decl_die, decl, ctx);
24112 }
24113
24114 add_alignment_attribute (decl_die, decl);
24115
24116 /* If we have a variant part offset, then we are supposed to process a member
24117 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24118 trees. */
24119 gcc_assert (ctx->variant_part_offset == NULL_TREE
24120 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24121 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24122 add_data_member_location_attribute (decl_die, decl, ctx);
24123
24124 if (DECL_ARTIFICIAL (decl))
24125 add_AT_flag (decl_die, DW_AT_artificial, 1);
24126
24127 add_accessibility_attribute (decl_die, decl);
24128
24129 /* Equate decl number to die, so that we can look up this decl later on. */
24130 equate_decl_number_to_die (decl, decl_die);
24131 }
24132
24133 /* Generate a DIE for a pointer to a member type. TYPE can be an
24134 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24135 pointer to member function. */
24136
24137 static void
24138 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24139 {
24140 if (lookup_type_die (type))
24141 return;
24142
24143 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24144 scope_die_for (type, context_die), type);
24145
24146 equate_type_number_to_die (type, ptr_die);
24147 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24148 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24149 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24150 context_die);
24151 add_alignment_attribute (ptr_die, type);
24152
24153 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24154 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24155 {
24156 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24157 add_AT_loc (ptr_die, DW_AT_use_location, op);
24158 }
24159 }
24160
24161 static char *producer_string;
24162
24163 /* Return a heap allocated producer string including command line options
24164 if -grecord-gcc-switches. */
24165
24166 static char *
24167 gen_producer_string (void)
24168 {
24169 size_t j;
24170 auto_vec<const char *> switches;
24171 const char *language_string = lang_hooks.name;
24172 char *producer, *tail;
24173 const char *p;
24174 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24175 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24176
24177 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24178 switch (save_decoded_options[j].opt_index)
24179 {
24180 case OPT_o:
24181 case OPT_d:
24182 case OPT_dumpbase:
24183 case OPT_dumpdir:
24184 case OPT_auxbase:
24185 case OPT_auxbase_strip:
24186 case OPT_quiet:
24187 case OPT_version:
24188 case OPT_v:
24189 case OPT_w:
24190 case OPT_L:
24191 case OPT_D:
24192 case OPT_I:
24193 case OPT_U:
24194 case OPT_SPECIAL_unknown:
24195 case OPT_SPECIAL_ignore:
24196 case OPT_SPECIAL_program_name:
24197 case OPT_SPECIAL_input_file:
24198 case OPT_grecord_gcc_switches:
24199 case OPT__output_pch_:
24200 case OPT_fdiagnostics_show_location_:
24201 case OPT_fdiagnostics_show_option:
24202 case OPT_fdiagnostics_show_caret:
24203 case OPT_fdiagnostics_color_:
24204 case OPT_fverbose_asm:
24205 case OPT____:
24206 case OPT__sysroot_:
24207 case OPT_nostdinc:
24208 case OPT_nostdinc__:
24209 case OPT_fpreprocessed:
24210 case OPT_fltrans_output_list_:
24211 case OPT_fresolution_:
24212 case OPT_fdebug_prefix_map_:
24213 case OPT_fmacro_prefix_map_:
24214 case OPT_ffile_prefix_map_:
24215 case OPT_fcompare_debug:
24216 /* Ignore these. */
24217 continue;
24218 default:
24219 if (cl_options[save_decoded_options[j].opt_index].flags
24220 & CL_NO_DWARF_RECORD)
24221 continue;
24222 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24223 == '-');
24224 switch (save_decoded_options[j].canonical_option[0][1])
24225 {
24226 case 'M':
24227 case 'i':
24228 case 'W':
24229 continue;
24230 case 'f':
24231 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24232 "dump", 4) == 0)
24233 continue;
24234 break;
24235 default:
24236 break;
24237 }
24238 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24239 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24240 break;
24241 }
24242
24243 producer = XNEWVEC (char, plen + 1 + len + 1);
24244 tail = producer;
24245 sprintf (tail, "%s %s", language_string, version_string);
24246 tail += plen;
24247
24248 FOR_EACH_VEC_ELT (switches, j, p)
24249 {
24250 len = strlen (p);
24251 *tail = ' ';
24252 memcpy (tail + 1, p, len);
24253 tail += len + 1;
24254 }
24255
24256 *tail = '\0';
24257 return producer;
24258 }
24259
24260 /* Given a C and/or C++ language/version string return the "highest".
24261 C++ is assumed to be "higher" than C in this case. Used for merging
24262 LTO translation unit languages. */
24263 static const char *
24264 highest_c_language (const char *lang1, const char *lang2)
24265 {
24266 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24267 return "GNU C++17";
24268 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24269 return "GNU C++14";
24270 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24271 return "GNU C++11";
24272 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24273 return "GNU C++98";
24274
24275 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24276 return "GNU C17";
24277 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24278 return "GNU C11";
24279 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24280 return "GNU C99";
24281 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24282 return "GNU C89";
24283
24284 gcc_unreachable ();
24285 }
24286
24287
24288 /* Generate the DIE for the compilation unit. */
24289
24290 static dw_die_ref
24291 gen_compile_unit_die (const char *filename)
24292 {
24293 dw_die_ref die;
24294 const char *language_string = lang_hooks.name;
24295 int language;
24296
24297 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24298
24299 if (filename)
24300 {
24301 add_name_attribute (die, filename);
24302 /* Don't add cwd for <built-in>. */
24303 if (filename[0] != '<')
24304 add_comp_dir_attribute (die);
24305 }
24306
24307 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24308
24309 /* If our producer is LTO try to figure out a common language to use
24310 from the global list of translation units. */
24311 if (strcmp (language_string, "GNU GIMPLE") == 0)
24312 {
24313 unsigned i;
24314 tree t;
24315 const char *common_lang = NULL;
24316
24317 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24318 {
24319 if (!TRANSLATION_UNIT_LANGUAGE (t))
24320 continue;
24321 if (!common_lang)
24322 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24323 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24324 ;
24325 else if (strncmp (common_lang, "GNU C", 5) == 0
24326 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24327 /* Mixing C and C++ is ok, use C++ in that case. */
24328 common_lang = highest_c_language (common_lang,
24329 TRANSLATION_UNIT_LANGUAGE (t));
24330 else
24331 {
24332 /* Fall back to C. */
24333 common_lang = NULL;
24334 break;
24335 }
24336 }
24337
24338 if (common_lang)
24339 language_string = common_lang;
24340 }
24341
24342 language = DW_LANG_C;
24343 if (strncmp (language_string, "GNU C", 5) == 0
24344 && ISDIGIT (language_string[5]))
24345 {
24346 language = DW_LANG_C89;
24347 if (dwarf_version >= 3 || !dwarf_strict)
24348 {
24349 if (strcmp (language_string, "GNU C89") != 0)
24350 language = DW_LANG_C99;
24351
24352 if (dwarf_version >= 5 /* || !dwarf_strict */)
24353 if (strcmp (language_string, "GNU C11") == 0
24354 || strcmp (language_string, "GNU C17") == 0)
24355 language = DW_LANG_C11;
24356 }
24357 }
24358 else if (strncmp (language_string, "GNU C++", 7) == 0)
24359 {
24360 language = DW_LANG_C_plus_plus;
24361 if (dwarf_version >= 5 /* || !dwarf_strict */)
24362 {
24363 if (strcmp (language_string, "GNU C++11") == 0)
24364 language = DW_LANG_C_plus_plus_11;
24365 else if (strcmp (language_string, "GNU C++14") == 0)
24366 language = DW_LANG_C_plus_plus_14;
24367 else if (strcmp (language_string, "GNU C++17") == 0)
24368 /* For now. */
24369 language = DW_LANG_C_plus_plus_14;
24370 }
24371 }
24372 else if (strcmp (language_string, "GNU F77") == 0)
24373 language = DW_LANG_Fortran77;
24374 else if (dwarf_version >= 3 || !dwarf_strict)
24375 {
24376 if (strcmp (language_string, "GNU Ada") == 0)
24377 language = DW_LANG_Ada95;
24378 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24379 {
24380 language = DW_LANG_Fortran95;
24381 if (dwarf_version >= 5 /* || !dwarf_strict */)
24382 {
24383 if (strcmp (language_string, "GNU Fortran2003") == 0)
24384 language = DW_LANG_Fortran03;
24385 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24386 language = DW_LANG_Fortran08;
24387 }
24388 }
24389 else if (strcmp (language_string, "GNU Objective-C") == 0)
24390 language = DW_LANG_ObjC;
24391 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24392 language = DW_LANG_ObjC_plus_plus;
24393 else if (dwarf_version >= 5 || !dwarf_strict)
24394 {
24395 if (strcmp (language_string, "GNU Go") == 0)
24396 language = DW_LANG_Go;
24397 }
24398 }
24399 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24400 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24401 language = DW_LANG_Fortran90;
24402
24403 add_AT_unsigned (die, DW_AT_language, language);
24404
24405 switch (language)
24406 {
24407 case DW_LANG_Fortran77:
24408 case DW_LANG_Fortran90:
24409 case DW_LANG_Fortran95:
24410 case DW_LANG_Fortran03:
24411 case DW_LANG_Fortran08:
24412 /* Fortran has case insensitive identifiers and the front-end
24413 lowercases everything. */
24414 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24415 break;
24416 default:
24417 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24418 break;
24419 }
24420 return die;
24421 }
24422
24423 /* Generate the DIE for a base class. */
24424
24425 static void
24426 gen_inheritance_die (tree binfo, tree access, tree type,
24427 dw_die_ref context_die)
24428 {
24429 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24430 struct vlr_context ctx = { type, NULL };
24431
24432 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24433 context_die);
24434 add_data_member_location_attribute (die, binfo, &ctx);
24435
24436 if (BINFO_VIRTUAL_P (binfo))
24437 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24438
24439 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24440 children, otherwise the default is DW_ACCESS_public. In DWARF2
24441 the default has always been DW_ACCESS_private. */
24442 if (access == access_public_node)
24443 {
24444 if (dwarf_version == 2
24445 || context_die->die_tag == DW_TAG_class_type)
24446 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24447 }
24448 else if (access == access_protected_node)
24449 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24450 else if (dwarf_version > 2
24451 && context_die->die_tag != DW_TAG_class_type)
24452 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24453 }
24454
24455 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24456 structure. */
24457 static bool
24458 is_variant_part (tree decl)
24459 {
24460 return (TREE_CODE (decl) == FIELD_DECL
24461 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24462 }
24463
24464 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24465 return the FIELD_DECL. Return NULL_TREE otherwise. */
24466
24467 static tree
24468 analyze_discr_in_predicate (tree operand, tree struct_type)
24469 {
24470 bool continue_stripping = true;
24471 while (continue_stripping)
24472 switch (TREE_CODE (operand))
24473 {
24474 CASE_CONVERT:
24475 operand = TREE_OPERAND (operand, 0);
24476 break;
24477 default:
24478 continue_stripping = false;
24479 break;
24480 }
24481
24482 /* Match field access to members of struct_type only. */
24483 if (TREE_CODE (operand) == COMPONENT_REF
24484 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24485 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24486 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24487 return TREE_OPERAND (operand, 1);
24488 else
24489 return NULL_TREE;
24490 }
24491
24492 /* Check that SRC is a constant integer that can be represented as a native
24493 integer constant (either signed or unsigned). If so, store it into DEST and
24494 return true. Return false otherwise. */
24495
24496 static bool
24497 get_discr_value (tree src, dw_discr_value *dest)
24498 {
24499 tree discr_type = TREE_TYPE (src);
24500
24501 if (lang_hooks.types.get_debug_type)
24502 {
24503 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24504 if (debug_type != NULL)
24505 discr_type = debug_type;
24506 }
24507
24508 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24509 return false;
24510
24511 /* Signedness can vary between the original type and the debug type. This
24512 can happen for character types in Ada for instance: the character type
24513 used for code generation can be signed, to be compatible with the C one,
24514 but from a debugger point of view, it must be unsigned. */
24515 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24516 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24517
24518 if (is_orig_unsigned != is_debug_unsigned)
24519 src = fold_convert (discr_type, src);
24520
24521 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24522 return false;
24523
24524 dest->pos = is_debug_unsigned;
24525 if (is_debug_unsigned)
24526 dest->v.uval = tree_to_uhwi (src);
24527 else
24528 dest->v.sval = tree_to_shwi (src);
24529
24530 return true;
24531 }
24532
24533 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24534 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24535 store NULL_TREE in DISCR_DECL. Otherwise:
24536
24537 - store the discriminant field in STRUCT_TYPE that controls the variant
24538 part to *DISCR_DECL
24539
24540 - put in *DISCR_LISTS_P an array where for each variant, the item
24541 represents the corresponding matching list of discriminant values.
24542
24543 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24544 the above array.
24545
24546 Note that when the array is allocated (i.e. when the analysis is
24547 successful), it is up to the caller to free the array. */
24548
24549 static void
24550 analyze_variants_discr (tree variant_part_decl,
24551 tree struct_type,
24552 tree *discr_decl,
24553 dw_discr_list_ref **discr_lists_p,
24554 unsigned *discr_lists_length)
24555 {
24556 tree variant_part_type = TREE_TYPE (variant_part_decl);
24557 tree variant;
24558 dw_discr_list_ref *discr_lists;
24559 unsigned i;
24560
24561 /* Compute how many variants there are in this variant part. */
24562 *discr_lists_length = 0;
24563 for (variant = TYPE_FIELDS (variant_part_type);
24564 variant != NULL_TREE;
24565 variant = DECL_CHAIN (variant))
24566 ++*discr_lists_length;
24567
24568 *discr_decl = NULL_TREE;
24569 *discr_lists_p
24570 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24571 sizeof (**discr_lists_p));
24572 discr_lists = *discr_lists_p;
24573
24574 /* And then analyze all variants to extract discriminant information for all
24575 of them. This analysis is conservative: as soon as we detect something we
24576 do not support, abort everything and pretend we found nothing. */
24577 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24578 variant != NULL_TREE;
24579 variant = DECL_CHAIN (variant), ++i)
24580 {
24581 tree match_expr = DECL_QUALIFIER (variant);
24582
24583 /* Now, try to analyze the predicate and deduce a discriminant for
24584 it. */
24585 if (match_expr == boolean_true_node)
24586 /* Typically happens for the default variant: it matches all cases that
24587 previous variants rejected. Don't output any matching value for
24588 this one. */
24589 continue;
24590
24591 /* The following loop tries to iterate over each discriminant
24592 possibility: single values or ranges. */
24593 while (match_expr != NULL_TREE)
24594 {
24595 tree next_round_match_expr;
24596 tree candidate_discr = NULL_TREE;
24597 dw_discr_list_ref new_node = NULL;
24598
24599 /* Possibilities are matched one after the other by nested
24600 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24601 continue with the rest at next iteration. */
24602 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24603 {
24604 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24605 match_expr = TREE_OPERAND (match_expr, 1);
24606 }
24607 else
24608 next_round_match_expr = NULL_TREE;
24609
24610 if (match_expr == boolean_false_node)
24611 /* This sub-expression matches nothing: just wait for the next
24612 one. */
24613 ;
24614
24615 else if (TREE_CODE (match_expr) == EQ_EXPR)
24616 {
24617 /* We are matching: <discr_field> == <integer_cst>
24618 This sub-expression matches a single value. */
24619 tree integer_cst = TREE_OPERAND (match_expr, 1);
24620
24621 candidate_discr
24622 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24623 struct_type);
24624
24625 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24626 if (!get_discr_value (integer_cst,
24627 &new_node->dw_discr_lower_bound))
24628 goto abort;
24629 new_node->dw_discr_range = false;
24630 }
24631
24632 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24633 {
24634 /* We are matching:
24635 <discr_field> > <integer_cst>
24636 && <discr_field> < <integer_cst>.
24637 This sub-expression matches the range of values between the
24638 two matched integer constants. Note that comparisons can be
24639 inclusive or exclusive. */
24640 tree candidate_discr_1, candidate_discr_2;
24641 tree lower_cst, upper_cst;
24642 bool lower_cst_included, upper_cst_included;
24643 tree lower_op = TREE_OPERAND (match_expr, 0);
24644 tree upper_op = TREE_OPERAND (match_expr, 1);
24645
24646 /* When the comparison is exclusive, the integer constant is not
24647 the discriminant range bound we are looking for: we will have
24648 to increment or decrement it. */
24649 if (TREE_CODE (lower_op) == GE_EXPR)
24650 lower_cst_included = true;
24651 else if (TREE_CODE (lower_op) == GT_EXPR)
24652 lower_cst_included = false;
24653 else
24654 goto abort;
24655
24656 if (TREE_CODE (upper_op) == LE_EXPR)
24657 upper_cst_included = true;
24658 else if (TREE_CODE (upper_op) == LT_EXPR)
24659 upper_cst_included = false;
24660 else
24661 goto abort;
24662
24663 /* Extract the discriminant from the first operand and check it
24664 is consistant with the same analysis in the second
24665 operand. */
24666 candidate_discr_1
24667 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24668 struct_type);
24669 candidate_discr_2
24670 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24671 struct_type);
24672 if (candidate_discr_1 == candidate_discr_2)
24673 candidate_discr = candidate_discr_1;
24674 else
24675 goto abort;
24676
24677 /* Extract bounds from both. */
24678 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24679 lower_cst = TREE_OPERAND (lower_op, 1);
24680 upper_cst = TREE_OPERAND (upper_op, 1);
24681
24682 if (!lower_cst_included)
24683 lower_cst
24684 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24685 build_int_cst (TREE_TYPE (lower_cst), 1));
24686 if (!upper_cst_included)
24687 upper_cst
24688 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24689 build_int_cst (TREE_TYPE (upper_cst), 1));
24690
24691 if (!get_discr_value (lower_cst,
24692 &new_node->dw_discr_lower_bound)
24693 || !get_discr_value (upper_cst,
24694 &new_node->dw_discr_upper_bound))
24695 goto abort;
24696
24697 new_node->dw_discr_range = true;
24698 }
24699
24700 else
24701 /* Unsupported sub-expression: we cannot determine the set of
24702 matching discriminant values. Abort everything. */
24703 goto abort;
24704
24705 /* If the discriminant info is not consistant with what we saw so
24706 far, consider the analysis failed and abort everything. */
24707 if (candidate_discr == NULL_TREE
24708 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24709 goto abort;
24710 else
24711 *discr_decl = candidate_discr;
24712
24713 if (new_node != NULL)
24714 {
24715 new_node->dw_discr_next = discr_lists[i];
24716 discr_lists[i] = new_node;
24717 }
24718 match_expr = next_round_match_expr;
24719 }
24720 }
24721
24722 /* If we reach this point, we could match everything we were interested
24723 in. */
24724 return;
24725
24726 abort:
24727 /* Clean all data structure and return no result. */
24728 free (*discr_lists_p);
24729 *discr_lists_p = NULL;
24730 *discr_decl = NULL_TREE;
24731 }
24732
24733 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24734 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24735 under CONTEXT_DIE.
24736
24737 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24738 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24739 this type, which are record types, represent the available variants and each
24740 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24741 values are inferred from these attributes.
24742
24743 In trees, the offsets for the fields inside these sub-records are relative
24744 to the variant part itself, whereas the corresponding DIEs should have
24745 offset attributes that are relative to the embedding record base address.
24746 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24747 must be an expression that computes the offset of the variant part to
24748 describe in DWARF. */
24749
24750 static void
24751 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24752 dw_die_ref context_die)
24753 {
24754 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24755 tree variant_part_offset = vlr_ctx->variant_part_offset;
24756 struct loc_descr_context ctx = {
24757 vlr_ctx->struct_type, /* context_type */
24758 NULL_TREE, /* base_decl */
24759 NULL, /* dpi */
24760 false, /* placeholder_arg */
24761 false /* placeholder_seen */
24762 };
24763
24764 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24765 NULL_TREE if there is no such field. */
24766 tree discr_decl = NULL_TREE;
24767 dw_discr_list_ref *discr_lists;
24768 unsigned discr_lists_length = 0;
24769 unsigned i;
24770
24771 dw_die_ref dwarf_proc_die = NULL;
24772 dw_die_ref variant_part_die
24773 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24774
24775 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24776
24777 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24778 &discr_decl, &discr_lists, &discr_lists_length);
24779
24780 if (discr_decl != NULL_TREE)
24781 {
24782 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24783
24784 if (discr_die)
24785 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24786 else
24787 /* We have no DIE for the discriminant, so just discard all
24788 discrimimant information in the output. */
24789 discr_decl = NULL_TREE;
24790 }
24791
24792 /* If the offset for this variant part is more complex than a constant,
24793 create a DWARF procedure for it so that we will not have to generate DWARF
24794 expressions for it for each member. */
24795 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24796 && (dwarf_version >= 3 || !dwarf_strict))
24797 {
24798 const tree dwarf_proc_fndecl
24799 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24800 build_function_type (TREE_TYPE (variant_part_offset),
24801 NULL_TREE));
24802 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24803 const dw_loc_descr_ref dwarf_proc_body
24804 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24805
24806 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24807 dwarf_proc_fndecl, context_die);
24808 if (dwarf_proc_die != NULL)
24809 variant_part_offset = dwarf_proc_call;
24810 }
24811
24812 /* Output DIEs for all variants. */
24813 i = 0;
24814 for (tree variant = TYPE_FIELDS (variant_part_type);
24815 variant != NULL_TREE;
24816 variant = DECL_CHAIN (variant), ++i)
24817 {
24818 tree variant_type = TREE_TYPE (variant);
24819 dw_die_ref variant_die;
24820
24821 /* All variants (i.e. members of a variant part) are supposed to be
24822 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24823 under these records. */
24824 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24825
24826 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24827 equate_decl_number_to_die (variant, variant_die);
24828
24829 /* Output discriminant values this variant matches, if any. */
24830 if (discr_decl == NULL || discr_lists[i] == NULL)
24831 /* In the case we have discriminant information at all, this is
24832 probably the default variant: as the standard says, don't
24833 output any discriminant value/list attribute. */
24834 ;
24835 else if (discr_lists[i]->dw_discr_next == NULL
24836 && !discr_lists[i]->dw_discr_range)
24837 /* If there is only one accepted value, don't bother outputting a
24838 list. */
24839 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24840 else
24841 add_discr_list (variant_die, discr_lists[i]);
24842
24843 for (tree member = TYPE_FIELDS (variant_type);
24844 member != NULL_TREE;
24845 member = DECL_CHAIN (member))
24846 {
24847 struct vlr_context vlr_sub_ctx = {
24848 vlr_ctx->struct_type, /* struct_type */
24849 NULL /* variant_part_offset */
24850 };
24851 if (is_variant_part (member))
24852 {
24853 /* All offsets for fields inside variant parts are relative to
24854 the top-level embedding RECORD_TYPE's base address. On the
24855 other hand, offsets in GCC's types are relative to the
24856 nested-most variant part. So we have to sum offsets each time
24857 we recurse. */
24858
24859 vlr_sub_ctx.variant_part_offset
24860 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24861 variant_part_offset, byte_position (member));
24862 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24863 }
24864 else
24865 {
24866 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24867 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24868 }
24869 }
24870 }
24871
24872 free (discr_lists);
24873 }
24874
24875 /* Generate a DIE for a class member. */
24876
24877 static void
24878 gen_member_die (tree type, dw_die_ref context_die)
24879 {
24880 tree member;
24881 tree binfo = TYPE_BINFO (type);
24882
24883 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24884
24885 /* If this is not an incomplete type, output descriptions of each of its
24886 members. Note that as we output the DIEs necessary to represent the
24887 members of this record or union type, we will also be trying to output
24888 DIEs to represent the *types* of those members. However the `type'
24889 function (above) will specifically avoid generating type DIEs for member
24890 types *within* the list of member DIEs for this (containing) type except
24891 for those types (of members) which are explicitly marked as also being
24892 members of this (containing) type themselves. The g++ front- end can
24893 force any given type to be treated as a member of some other (containing)
24894 type by setting the TYPE_CONTEXT of the given (member) type to point to
24895 the TREE node representing the appropriate (containing) type. */
24896
24897 /* First output info about the base classes. */
24898 if (binfo)
24899 {
24900 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24901 int i;
24902 tree base;
24903
24904 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24905 gen_inheritance_die (base,
24906 (accesses ? (*accesses)[i] : access_public_node),
24907 type,
24908 context_die);
24909 }
24910
24911 /* Now output info about the data members and type members. */
24912 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24913 {
24914 struct vlr_context vlr_ctx = { type, NULL_TREE };
24915 bool static_inline_p
24916 = (TREE_STATIC (member)
24917 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24918 != -1));
24919
24920 /* Ignore clones. */
24921 if (DECL_ABSTRACT_ORIGIN (member))
24922 continue;
24923
24924 /* If we thought we were generating minimal debug info for TYPE
24925 and then changed our minds, some of the member declarations
24926 may have already been defined. Don't define them again, but
24927 do put them in the right order. */
24928
24929 if (dw_die_ref child = lookup_decl_die (member))
24930 {
24931 /* Handle inline static data members, which only have in-class
24932 declarations. */
24933 dw_die_ref ref = NULL;
24934 if (child->die_tag == DW_TAG_variable
24935 && child->die_parent == comp_unit_die ())
24936 {
24937 ref = get_AT_ref (child, DW_AT_specification);
24938 /* For C++17 inline static data members followed by redundant
24939 out of class redeclaration, we might get here with
24940 child being the DIE created for the out of class
24941 redeclaration and with its DW_AT_specification being
24942 the DIE created for in-class definition. We want to
24943 reparent the latter, and don't want to create another
24944 DIE with DW_AT_specification in that case, because
24945 we already have one. */
24946 if (ref
24947 && static_inline_p
24948 && ref->die_tag == DW_TAG_variable
24949 && ref->die_parent == comp_unit_die ()
24950 && get_AT (ref, DW_AT_specification) == NULL)
24951 {
24952 child = ref;
24953 ref = NULL;
24954 static_inline_p = false;
24955 }
24956 }
24957
24958 if (child->die_tag == DW_TAG_variable
24959 && child->die_parent == comp_unit_die ()
24960 && ref == NULL)
24961 {
24962 reparent_child (child, context_die);
24963 if (dwarf_version < 5)
24964 child->die_tag = DW_TAG_member;
24965 }
24966 else
24967 splice_child_die (context_die, child);
24968 }
24969
24970 /* Do not generate standard DWARF for variant parts if we are generating
24971 the corresponding GNAT encodings: DIEs generated for both would
24972 conflict in our mappings. */
24973 else if (is_variant_part (member)
24974 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24975 {
24976 vlr_ctx.variant_part_offset = byte_position (member);
24977 gen_variant_part (member, &vlr_ctx, context_die);
24978 }
24979 else
24980 {
24981 vlr_ctx.variant_part_offset = NULL_TREE;
24982 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24983 }
24984
24985 /* For C++ inline static data members emit immediately a DW_TAG_variable
24986 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24987 DW_AT_specification. */
24988 if (static_inline_p)
24989 {
24990 int old_extern = DECL_EXTERNAL (member);
24991 DECL_EXTERNAL (member) = 0;
24992 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24993 DECL_EXTERNAL (member) = old_extern;
24994 }
24995 }
24996 }
24997
24998 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24999 is set, we pretend that the type was never defined, so we only get the
25000 member DIEs needed by later specification DIEs. */
25001
25002 static void
25003 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25004 enum debug_info_usage usage)
25005 {
25006 if (TREE_ASM_WRITTEN (type))
25007 {
25008 /* Fill in the bound of variable-length fields in late dwarf if
25009 still incomplete. */
25010 if (!early_dwarf && variably_modified_type_p (type, NULL))
25011 for (tree member = TYPE_FIELDS (type);
25012 member;
25013 member = DECL_CHAIN (member))
25014 fill_variable_array_bounds (TREE_TYPE (member));
25015 return;
25016 }
25017
25018 dw_die_ref type_die = lookup_type_die (type);
25019 dw_die_ref scope_die = 0;
25020 int nested = 0;
25021 int complete = (TYPE_SIZE (type)
25022 && (! TYPE_STUB_DECL (type)
25023 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25024 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25025 complete = complete && should_emit_struct_debug (type, usage);
25026
25027 if (type_die && ! complete)
25028 return;
25029
25030 if (TYPE_CONTEXT (type) != NULL_TREE
25031 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25032 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25033 nested = 1;
25034
25035 scope_die = scope_die_for (type, context_die);
25036
25037 /* Generate child dies for template paramaters. */
25038 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25039 schedule_generic_params_dies_gen (type);
25040
25041 if (! type_die || (nested && is_cu_die (scope_die)))
25042 /* First occurrence of type or toplevel definition of nested class. */
25043 {
25044 dw_die_ref old_die = type_die;
25045
25046 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25047 ? record_type_tag (type) : DW_TAG_union_type,
25048 scope_die, type);
25049 equate_type_number_to_die (type, type_die);
25050 if (old_die)
25051 add_AT_specification (type_die, old_die);
25052 else
25053 add_name_attribute (type_die, type_tag (type));
25054 }
25055 else
25056 remove_AT (type_die, DW_AT_declaration);
25057
25058 /* If this type has been completed, then give it a byte_size attribute and
25059 then give a list of members. */
25060 if (complete && !ns_decl)
25061 {
25062 /* Prevent infinite recursion in cases where the type of some member of
25063 this type is expressed in terms of this type itself. */
25064 TREE_ASM_WRITTEN (type) = 1;
25065 add_byte_size_attribute (type_die, type);
25066 add_alignment_attribute (type_die, type);
25067 if (TYPE_STUB_DECL (type) != NULL_TREE)
25068 {
25069 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25070 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25071 }
25072
25073 /* If the first reference to this type was as the return type of an
25074 inline function, then it may not have a parent. Fix this now. */
25075 if (type_die->die_parent == NULL)
25076 add_child_die (scope_die, type_die);
25077
25078 push_decl_scope (type);
25079 gen_member_die (type, type_die);
25080 pop_decl_scope ();
25081
25082 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25083 if (TYPE_ARTIFICIAL (type))
25084 add_AT_flag (type_die, DW_AT_artificial, 1);
25085
25086 /* GNU extension: Record what type our vtable lives in. */
25087 if (TYPE_VFIELD (type))
25088 {
25089 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25090
25091 gen_type_die (vtype, context_die);
25092 add_AT_die_ref (type_die, DW_AT_containing_type,
25093 lookup_type_die (vtype));
25094 }
25095 }
25096 else
25097 {
25098 add_AT_flag (type_die, DW_AT_declaration, 1);
25099
25100 /* We don't need to do this for function-local types. */
25101 if (TYPE_STUB_DECL (type)
25102 && ! decl_function_context (TYPE_STUB_DECL (type)))
25103 vec_safe_push (incomplete_types, type);
25104 }
25105
25106 if (get_AT (type_die, DW_AT_name))
25107 add_pubtype (type, type_die);
25108 }
25109
25110 /* Generate a DIE for a subroutine _type_. */
25111
25112 static void
25113 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25114 {
25115 tree return_type = TREE_TYPE (type);
25116 dw_die_ref subr_die
25117 = new_die (DW_TAG_subroutine_type,
25118 scope_die_for (type, context_die), type);
25119
25120 equate_type_number_to_die (type, subr_die);
25121 add_prototyped_attribute (subr_die, type);
25122 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25123 context_die);
25124 add_alignment_attribute (subr_die, type);
25125 gen_formal_types_die (type, subr_die);
25126
25127 if (get_AT (subr_die, DW_AT_name))
25128 add_pubtype (type, subr_die);
25129 if ((dwarf_version >= 5 || !dwarf_strict)
25130 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25131 add_AT_flag (subr_die, DW_AT_reference, 1);
25132 if ((dwarf_version >= 5 || !dwarf_strict)
25133 && lang_hooks.types.type_dwarf_attribute (type,
25134 DW_AT_rvalue_reference) != -1)
25135 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25136 }
25137
25138 /* Generate a DIE for a type definition. */
25139
25140 static void
25141 gen_typedef_die (tree decl, dw_die_ref context_die)
25142 {
25143 dw_die_ref type_die;
25144 tree type;
25145
25146 if (TREE_ASM_WRITTEN (decl))
25147 {
25148 if (DECL_ORIGINAL_TYPE (decl))
25149 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25150 return;
25151 }
25152
25153 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25154 checks in process_scope_var and modified_type_die), this should be called
25155 only for original types. */
25156 gcc_assert (decl_ultimate_origin (decl) == NULL
25157 || decl_ultimate_origin (decl) == decl);
25158
25159 TREE_ASM_WRITTEN (decl) = 1;
25160 type_die = new_die (DW_TAG_typedef, context_die, decl);
25161
25162 add_name_and_src_coords_attributes (type_die, decl);
25163 if (DECL_ORIGINAL_TYPE (decl))
25164 {
25165 type = DECL_ORIGINAL_TYPE (decl);
25166 if (type == error_mark_node)
25167 return;
25168
25169 gcc_assert (type != TREE_TYPE (decl));
25170 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25171 }
25172 else
25173 {
25174 type = TREE_TYPE (decl);
25175 if (type == error_mark_node)
25176 return;
25177
25178 if (is_naming_typedef_decl (TYPE_NAME (type)))
25179 {
25180 /* Here, we are in the case of decl being a typedef naming
25181 an anonymous type, e.g:
25182 typedef struct {...} foo;
25183 In that case TREE_TYPE (decl) is not a typedef variant
25184 type and TYPE_NAME of the anonymous type is set to the
25185 TYPE_DECL of the typedef. This construct is emitted by
25186 the C++ FE.
25187
25188 TYPE is the anonymous struct named by the typedef
25189 DECL. As we need the DW_AT_type attribute of the
25190 DW_TAG_typedef to point to the DIE of TYPE, let's
25191 generate that DIE right away. add_type_attribute
25192 called below will then pick (via lookup_type_die) that
25193 anonymous struct DIE. */
25194 if (!TREE_ASM_WRITTEN (type))
25195 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25196
25197 /* This is a GNU Extension. We are adding a
25198 DW_AT_linkage_name attribute to the DIE of the
25199 anonymous struct TYPE. The value of that attribute
25200 is the name of the typedef decl naming the anonymous
25201 struct. This greatly eases the work of consumers of
25202 this debug info. */
25203 add_linkage_name_raw (lookup_type_die (type), decl);
25204 }
25205 }
25206
25207 add_type_attribute (type_die, type, decl_quals (decl), false,
25208 context_die);
25209
25210 if (is_naming_typedef_decl (decl))
25211 /* We want that all subsequent calls to lookup_type_die with
25212 TYPE in argument yield the DW_TAG_typedef we have just
25213 created. */
25214 equate_type_number_to_die (type, type_die);
25215
25216 add_alignment_attribute (type_die, TREE_TYPE (decl));
25217
25218 add_accessibility_attribute (type_die, decl);
25219
25220 if (DECL_ABSTRACT_P (decl))
25221 equate_decl_number_to_die (decl, type_die);
25222
25223 if (get_AT (type_die, DW_AT_name))
25224 add_pubtype (decl, type_die);
25225 }
25226
25227 /* Generate a DIE for a struct, class, enum or union type. */
25228
25229 static void
25230 gen_tagged_type_die (tree type,
25231 dw_die_ref context_die,
25232 enum debug_info_usage usage)
25233 {
25234 int need_pop;
25235
25236 if (type == NULL_TREE
25237 || !is_tagged_type (type))
25238 return;
25239
25240 if (TREE_ASM_WRITTEN (type))
25241 need_pop = 0;
25242 /* If this is a nested type whose containing class hasn't been written
25243 out yet, writing it out will cover this one, too. This does not apply
25244 to instantiations of member class templates; they need to be added to
25245 the containing class as they are generated. FIXME: This hurts the
25246 idea of combining type decls from multiple TUs, since we can't predict
25247 what set of template instantiations we'll get. */
25248 else if (TYPE_CONTEXT (type)
25249 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25250 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25251 {
25252 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25253
25254 if (TREE_ASM_WRITTEN (type))
25255 return;
25256
25257 /* If that failed, attach ourselves to the stub. */
25258 push_decl_scope (TYPE_CONTEXT (type));
25259 context_die = lookup_type_die (TYPE_CONTEXT (type));
25260 need_pop = 1;
25261 }
25262 else if (TYPE_CONTEXT (type) != NULL_TREE
25263 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25264 {
25265 /* If this type is local to a function that hasn't been written
25266 out yet, use a NULL context for now; it will be fixed up in
25267 decls_for_scope. */
25268 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25269 /* A declaration DIE doesn't count; nested types need to go in the
25270 specification. */
25271 if (context_die && is_declaration_die (context_die))
25272 context_die = NULL;
25273 need_pop = 0;
25274 }
25275 else
25276 {
25277 context_die = declare_in_namespace (type, context_die);
25278 need_pop = 0;
25279 }
25280
25281 if (TREE_CODE (type) == ENUMERAL_TYPE)
25282 {
25283 /* This might have been written out by the call to
25284 declare_in_namespace. */
25285 if (!TREE_ASM_WRITTEN (type))
25286 gen_enumeration_type_die (type, context_die);
25287 }
25288 else
25289 gen_struct_or_union_type_die (type, context_die, usage);
25290
25291 if (need_pop)
25292 pop_decl_scope ();
25293
25294 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25295 it up if it is ever completed. gen_*_type_die will set it for us
25296 when appropriate. */
25297 }
25298
25299 /* Generate a type description DIE. */
25300
25301 static void
25302 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25303 enum debug_info_usage usage)
25304 {
25305 struct array_descr_info info;
25306
25307 if (type == NULL_TREE || type == error_mark_node)
25308 return;
25309
25310 if (flag_checking && type)
25311 verify_type (type);
25312
25313 if (TYPE_NAME (type) != NULL_TREE
25314 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25315 && is_redundant_typedef (TYPE_NAME (type))
25316 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25317 /* The DECL of this type is a typedef we don't want to emit debug
25318 info for but we want debug info for its underlying typedef.
25319 This can happen for e.g, the injected-class-name of a C++
25320 type. */
25321 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25322
25323 /* If TYPE is a typedef type variant, let's generate debug info
25324 for the parent typedef which TYPE is a type of. */
25325 if (typedef_variant_p (type))
25326 {
25327 if (TREE_ASM_WRITTEN (type))
25328 return;
25329
25330 tree name = TYPE_NAME (type);
25331 tree origin = decl_ultimate_origin (name);
25332 if (origin != NULL && origin != name)
25333 {
25334 gen_decl_die (origin, NULL, NULL, context_die);
25335 return;
25336 }
25337
25338 /* Prevent broken recursion; we can't hand off to the same type. */
25339 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25340
25341 /* Give typedefs the right scope. */
25342 context_die = scope_die_for (type, context_die);
25343
25344 TREE_ASM_WRITTEN (type) = 1;
25345
25346 gen_decl_die (name, NULL, NULL, context_die);
25347 return;
25348 }
25349
25350 /* If type is an anonymous tagged type named by a typedef, let's
25351 generate debug info for the typedef. */
25352 if (is_naming_typedef_decl (TYPE_NAME (type)))
25353 {
25354 /* Use the DIE of the containing namespace as the parent DIE of
25355 the type description DIE we want to generate. */
25356 if (DECL_CONTEXT (TYPE_NAME (type))
25357 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25358 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25359
25360 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25361 return;
25362 }
25363
25364 if (lang_hooks.types.get_debug_type)
25365 {
25366 tree debug_type = lang_hooks.types.get_debug_type (type);
25367
25368 if (debug_type != NULL_TREE && debug_type != type)
25369 {
25370 gen_type_die_with_usage (debug_type, context_die, usage);
25371 return;
25372 }
25373 }
25374
25375 /* We are going to output a DIE to represent the unqualified version
25376 of this type (i.e. without any const or volatile qualifiers) so
25377 get the main variant (i.e. the unqualified version) of this type
25378 now. (Vectors and arrays are special because the debugging info is in the
25379 cloned type itself. Similarly function/method types can contain extra
25380 ref-qualification). */
25381 if (TREE_CODE (type) == FUNCTION_TYPE
25382 || TREE_CODE (type) == METHOD_TYPE)
25383 {
25384 /* For function/method types, can't use type_main_variant here,
25385 because that can have different ref-qualifiers for C++,
25386 but try to canonicalize. */
25387 tree main = TYPE_MAIN_VARIANT (type);
25388 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25389 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25390 && check_base_type (t, main)
25391 && check_lang_type (t, type))
25392 {
25393 type = t;
25394 break;
25395 }
25396 }
25397 else if (TREE_CODE (type) != VECTOR_TYPE
25398 && TREE_CODE (type) != ARRAY_TYPE)
25399 type = type_main_variant (type);
25400
25401 /* If this is an array type with hidden descriptor, handle it first. */
25402 if (!TREE_ASM_WRITTEN (type)
25403 && lang_hooks.types.get_array_descr_info)
25404 {
25405 memset (&info, 0, sizeof (info));
25406 if (lang_hooks.types.get_array_descr_info (type, &info))
25407 {
25408 /* Fortran sometimes emits array types with no dimension. */
25409 gcc_assert (info.ndimensions >= 0
25410 && (info.ndimensions
25411 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25412 gen_descr_array_type_die (type, &info, context_die);
25413 TREE_ASM_WRITTEN (type) = 1;
25414 return;
25415 }
25416 }
25417
25418 if (TREE_ASM_WRITTEN (type))
25419 {
25420 /* Variable-length types may be incomplete even if
25421 TREE_ASM_WRITTEN. For such types, fall through to
25422 gen_array_type_die() and possibly fill in
25423 DW_AT_{upper,lower}_bound attributes. */
25424 if ((TREE_CODE (type) != ARRAY_TYPE
25425 && TREE_CODE (type) != RECORD_TYPE
25426 && TREE_CODE (type) != UNION_TYPE
25427 && TREE_CODE (type) != QUAL_UNION_TYPE)
25428 || !variably_modified_type_p (type, NULL))
25429 return;
25430 }
25431
25432 switch (TREE_CODE (type))
25433 {
25434 case ERROR_MARK:
25435 break;
25436
25437 case POINTER_TYPE:
25438 case REFERENCE_TYPE:
25439 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25440 ensures that the gen_type_die recursion will terminate even if the
25441 type is recursive. Recursive types are possible in Ada. */
25442 /* ??? We could perhaps do this for all types before the switch
25443 statement. */
25444 TREE_ASM_WRITTEN (type) = 1;
25445
25446 /* For these types, all that is required is that we output a DIE (or a
25447 set of DIEs) to represent the "basis" type. */
25448 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25449 DINFO_USAGE_IND_USE);
25450 break;
25451
25452 case OFFSET_TYPE:
25453 /* This code is used for C++ pointer-to-data-member types.
25454 Output a description of the relevant class type. */
25455 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25456 DINFO_USAGE_IND_USE);
25457
25458 /* Output a description of the type of the object pointed to. */
25459 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25460 DINFO_USAGE_IND_USE);
25461
25462 /* Now output a DIE to represent this pointer-to-data-member type
25463 itself. */
25464 gen_ptr_to_mbr_type_die (type, context_die);
25465 break;
25466
25467 case FUNCTION_TYPE:
25468 /* Force out return type (in case it wasn't forced out already). */
25469 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25470 DINFO_USAGE_DIR_USE);
25471 gen_subroutine_type_die (type, context_die);
25472 break;
25473
25474 case METHOD_TYPE:
25475 /* Force out return type (in case it wasn't forced out already). */
25476 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25477 DINFO_USAGE_DIR_USE);
25478 gen_subroutine_type_die (type, context_die);
25479 break;
25480
25481 case ARRAY_TYPE:
25482 case VECTOR_TYPE:
25483 gen_array_type_die (type, context_die);
25484 break;
25485
25486 case ENUMERAL_TYPE:
25487 case RECORD_TYPE:
25488 case UNION_TYPE:
25489 case QUAL_UNION_TYPE:
25490 gen_tagged_type_die (type, context_die, usage);
25491 return;
25492
25493 case VOID_TYPE:
25494 case INTEGER_TYPE:
25495 case REAL_TYPE:
25496 case FIXED_POINT_TYPE:
25497 case COMPLEX_TYPE:
25498 case BOOLEAN_TYPE:
25499 case POINTER_BOUNDS_TYPE:
25500 /* No DIEs needed for fundamental types. */
25501 break;
25502
25503 case NULLPTR_TYPE:
25504 case LANG_TYPE:
25505 /* Just use DW_TAG_unspecified_type. */
25506 {
25507 dw_die_ref type_die = lookup_type_die (type);
25508 if (type_die == NULL)
25509 {
25510 tree name = TYPE_IDENTIFIER (type);
25511 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25512 type);
25513 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25514 equate_type_number_to_die (type, type_die);
25515 }
25516 }
25517 break;
25518
25519 default:
25520 if (is_cxx_auto (type))
25521 {
25522 tree name = TYPE_IDENTIFIER (type);
25523 dw_die_ref *die = (name == get_identifier ("auto")
25524 ? &auto_die : &decltype_auto_die);
25525 if (!*die)
25526 {
25527 *die = new_die (DW_TAG_unspecified_type,
25528 comp_unit_die (), NULL_TREE);
25529 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25530 }
25531 equate_type_number_to_die (type, *die);
25532 break;
25533 }
25534 gcc_unreachable ();
25535 }
25536
25537 TREE_ASM_WRITTEN (type) = 1;
25538 }
25539
25540 static void
25541 gen_type_die (tree type, dw_die_ref context_die)
25542 {
25543 if (type != error_mark_node)
25544 {
25545 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25546 if (flag_checking)
25547 {
25548 dw_die_ref die = lookup_type_die (type);
25549 if (die)
25550 check_die (die);
25551 }
25552 }
25553 }
25554
25555 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25556 things which are local to the given block. */
25557
25558 static void
25559 gen_block_die (tree stmt, dw_die_ref context_die)
25560 {
25561 int must_output_die = 0;
25562 bool inlined_func;
25563
25564 /* Ignore blocks that are NULL. */
25565 if (stmt == NULL_TREE)
25566 return;
25567
25568 inlined_func = inlined_function_outer_scope_p (stmt);
25569
25570 /* If the block is one fragment of a non-contiguous block, do not
25571 process the variables, since they will have been done by the
25572 origin block. Do process subblocks. */
25573 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25574 {
25575 tree sub;
25576
25577 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25578 gen_block_die (sub, context_die);
25579
25580 return;
25581 }
25582
25583 /* Determine if we need to output any Dwarf DIEs at all to represent this
25584 block. */
25585 if (inlined_func)
25586 /* The outer scopes for inlinings *must* always be represented. We
25587 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25588 must_output_die = 1;
25589 else
25590 {
25591 /* Determine if this block directly contains any "significant"
25592 local declarations which we will need to output DIEs for. */
25593 if (debug_info_level > DINFO_LEVEL_TERSE)
25594 /* We are not in terse mode so *any* local declaration counts
25595 as being a "significant" one. */
25596 must_output_die = ((BLOCK_VARS (stmt) != NULL
25597 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25598 && (TREE_USED (stmt)
25599 || TREE_ASM_WRITTEN (stmt)
25600 || BLOCK_ABSTRACT (stmt)));
25601 else if ((TREE_USED (stmt)
25602 || TREE_ASM_WRITTEN (stmt)
25603 || BLOCK_ABSTRACT (stmt))
25604 && !dwarf2out_ignore_block (stmt))
25605 must_output_die = 1;
25606 }
25607
25608 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25609 DIE for any block which contains no significant local declarations at
25610 all. Rather, in such cases we just call `decls_for_scope' so that any
25611 needed Dwarf info for any sub-blocks will get properly generated. Note
25612 that in terse mode, our definition of what constitutes a "significant"
25613 local declaration gets restricted to include only inlined function
25614 instances and local (nested) function definitions. */
25615 if (must_output_die)
25616 {
25617 if (inlined_func)
25618 {
25619 /* If STMT block is abstract, that means we have been called
25620 indirectly from dwarf2out_abstract_function.
25621 That function rightfully marks the descendent blocks (of
25622 the abstract function it is dealing with) as being abstract,
25623 precisely to prevent us from emitting any
25624 DW_TAG_inlined_subroutine DIE as a descendent
25625 of an abstract function instance. So in that case, we should
25626 not call gen_inlined_subroutine_die.
25627
25628 Later though, when cgraph asks dwarf2out to emit info
25629 for the concrete instance of the function decl into which
25630 the concrete instance of STMT got inlined, the later will lead
25631 to the generation of a DW_TAG_inlined_subroutine DIE. */
25632 if (! BLOCK_ABSTRACT (stmt))
25633 gen_inlined_subroutine_die (stmt, context_die);
25634 }
25635 else
25636 gen_lexical_block_die (stmt, context_die);
25637 }
25638 else
25639 decls_for_scope (stmt, context_die);
25640 }
25641
25642 /* Process variable DECL (or variable with origin ORIGIN) within
25643 block STMT and add it to CONTEXT_DIE. */
25644 static void
25645 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25646 {
25647 dw_die_ref die;
25648 tree decl_or_origin = decl ? decl : origin;
25649
25650 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25651 die = lookup_decl_die (decl_or_origin);
25652 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25653 {
25654 if (TYPE_DECL_IS_STUB (decl_or_origin))
25655 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25656 else
25657 die = lookup_decl_die (decl_or_origin);
25658 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25659 if (! die && ! early_dwarf)
25660 return;
25661 }
25662 else
25663 die = NULL;
25664
25665 /* Avoid creating DIEs for local typedefs and concrete static variables that
25666 will only be pruned later. */
25667 if ((origin || decl_ultimate_origin (decl))
25668 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25669 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25670 {
25671 origin = decl_ultimate_origin (decl_or_origin);
25672 if (decl && VAR_P (decl) && die != NULL)
25673 {
25674 die = lookup_decl_die (origin);
25675 if (die != NULL)
25676 equate_decl_number_to_die (decl, die);
25677 }
25678 return;
25679 }
25680
25681 if (die != NULL && die->die_parent == NULL)
25682 add_child_die (context_die, die);
25683 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25684 {
25685 if (early_dwarf)
25686 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25687 stmt, context_die);
25688 }
25689 else
25690 {
25691 if (decl && DECL_P (decl))
25692 {
25693 die = lookup_decl_die (decl);
25694
25695 /* Early created DIEs do not have a parent as the decls refer
25696 to the function as DECL_CONTEXT rather than the BLOCK. */
25697 if (die && die->die_parent == NULL)
25698 {
25699 gcc_assert (in_lto_p);
25700 add_child_die (context_die, die);
25701 }
25702 }
25703
25704 gen_decl_die (decl, origin, NULL, context_die);
25705 }
25706 }
25707
25708 /* Generate all of the decls declared within a given scope and (recursively)
25709 all of its sub-blocks. */
25710
25711 static void
25712 decls_for_scope (tree stmt, dw_die_ref context_die)
25713 {
25714 tree decl;
25715 unsigned int i;
25716 tree subblocks;
25717
25718 /* Ignore NULL blocks. */
25719 if (stmt == NULL_TREE)
25720 return;
25721
25722 /* Output the DIEs to represent all of the data objects and typedefs
25723 declared directly within this block but not within any nested
25724 sub-blocks. Also, nested function and tag DIEs have been
25725 generated with a parent of NULL; fix that up now. We don't
25726 have to do this if we're at -g1. */
25727 if (debug_info_level > DINFO_LEVEL_TERSE)
25728 {
25729 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25730 process_scope_var (stmt, decl, NULL_TREE, context_die);
25731 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25732 origin - avoid doing this twice as we have no good way to see
25733 if we've done it once already. */
25734 if (! early_dwarf)
25735 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25736 {
25737 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25738 if (decl == current_function_decl)
25739 /* Ignore declarations of the current function, while they
25740 are declarations, gen_subprogram_die would treat them
25741 as definitions again, because they are equal to
25742 current_function_decl and endlessly recurse. */;
25743 else if (TREE_CODE (decl) == FUNCTION_DECL)
25744 process_scope_var (stmt, decl, NULL_TREE, context_die);
25745 else
25746 process_scope_var (stmt, NULL_TREE, decl, context_die);
25747 }
25748 }
25749
25750 /* Even if we're at -g1, we need to process the subblocks in order to get
25751 inlined call information. */
25752
25753 /* Output the DIEs to represent all sub-blocks (and the items declared
25754 therein) of this block. */
25755 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25756 subblocks != NULL;
25757 subblocks = BLOCK_CHAIN (subblocks))
25758 gen_block_die (subblocks, context_die);
25759 }
25760
25761 /* Is this a typedef we can avoid emitting? */
25762
25763 bool
25764 is_redundant_typedef (const_tree decl)
25765 {
25766 if (TYPE_DECL_IS_STUB (decl))
25767 return true;
25768
25769 if (DECL_ARTIFICIAL (decl)
25770 && DECL_CONTEXT (decl)
25771 && is_tagged_type (DECL_CONTEXT (decl))
25772 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25773 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25774 /* Also ignore the artificial member typedef for the class name. */
25775 return true;
25776
25777 return false;
25778 }
25779
25780 /* Return TRUE if TYPE is a typedef that names a type for linkage
25781 purposes. This kind of typedefs is produced by the C++ FE for
25782 constructs like:
25783
25784 typedef struct {...} foo;
25785
25786 In that case, there is no typedef variant type produced for foo.
25787 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25788 struct type. */
25789
25790 static bool
25791 is_naming_typedef_decl (const_tree decl)
25792 {
25793 if (decl == NULL_TREE
25794 || TREE_CODE (decl) != TYPE_DECL
25795 || DECL_NAMELESS (decl)
25796 || !is_tagged_type (TREE_TYPE (decl))
25797 || DECL_IS_BUILTIN (decl)
25798 || is_redundant_typedef (decl)
25799 /* It looks like Ada produces TYPE_DECLs that are very similar
25800 to C++ naming typedefs but that have different
25801 semantics. Let's be specific to c++ for now. */
25802 || !is_cxx (decl))
25803 return FALSE;
25804
25805 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25806 && TYPE_NAME (TREE_TYPE (decl)) == decl
25807 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25808 != TYPE_NAME (TREE_TYPE (decl))));
25809 }
25810
25811 /* Looks up the DIE for a context. */
25812
25813 static inline dw_die_ref
25814 lookup_context_die (tree context)
25815 {
25816 if (context)
25817 {
25818 /* Find die that represents this context. */
25819 if (TYPE_P (context))
25820 {
25821 context = TYPE_MAIN_VARIANT (context);
25822 dw_die_ref ctx = lookup_type_die (context);
25823 if (!ctx)
25824 return NULL;
25825 return strip_naming_typedef (context, ctx);
25826 }
25827 else
25828 return lookup_decl_die (context);
25829 }
25830 return comp_unit_die ();
25831 }
25832
25833 /* Returns the DIE for a context. */
25834
25835 static inline dw_die_ref
25836 get_context_die (tree context)
25837 {
25838 if (context)
25839 {
25840 /* Find die that represents this context. */
25841 if (TYPE_P (context))
25842 {
25843 context = TYPE_MAIN_VARIANT (context);
25844 return strip_naming_typedef (context, force_type_die (context));
25845 }
25846 else
25847 return force_decl_die (context);
25848 }
25849 return comp_unit_die ();
25850 }
25851
25852 /* Returns the DIE for decl. A DIE will always be returned. */
25853
25854 static dw_die_ref
25855 force_decl_die (tree decl)
25856 {
25857 dw_die_ref decl_die;
25858 unsigned saved_external_flag;
25859 tree save_fn = NULL_TREE;
25860 decl_die = lookup_decl_die (decl);
25861 if (!decl_die)
25862 {
25863 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25864
25865 decl_die = lookup_decl_die (decl);
25866 if (decl_die)
25867 return decl_die;
25868
25869 switch (TREE_CODE (decl))
25870 {
25871 case FUNCTION_DECL:
25872 /* Clear current_function_decl, so that gen_subprogram_die thinks
25873 that this is a declaration. At this point, we just want to force
25874 declaration die. */
25875 save_fn = current_function_decl;
25876 current_function_decl = NULL_TREE;
25877 gen_subprogram_die (decl, context_die);
25878 current_function_decl = save_fn;
25879 break;
25880
25881 case VAR_DECL:
25882 /* Set external flag to force declaration die. Restore it after
25883 gen_decl_die() call. */
25884 saved_external_flag = DECL_EXTERNAL (decl);
25885 DECL_EXTERNAL (decl) = 1;
25886 gen_decl_die (decl, NULL, NULL, context_die);
25887 DECL_EXTERNAL (decl) = saved_external_flag;
25888 break;
25889
25890 case NAMESPACE_DECL:
25891 if (dwarf_version >= 3 || !dwarf_strict)
25892 dwarf2out_decl (decl);
25893 else
25894 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25895 decl_die = comp_unit_die ();
25896 break;
25897
25898 case TRANSLATION_UNIT_DECL:
25899 decl_die = comp_unit_die ();
25900 break;
25901
25902 default:
25903 gcc_unreachable ();
25904 }
25905
25906 /* We should be able to find the DIE now. */
25907 if (!decl_die)
25908 decl_die = lookup_decl_die (decl);
25909 gcc_assert (decl_die);
25910 }
25911
25912 return decl_die;
25913 }
25914
25915 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25916 always returned. */
25917
25918 static dw_die_ref
25919 force_type_die (tree type)
25920 {
25921 dw_die_ref type_die;
25922
25923 type_die = lookup_type_die (type);
25924 if (!type_die)
25925 {
25926 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25927
25928 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25929 false, context_die);
25930 gcc_assert (type_die);
25931 }
25932 return type_die;
25933 }
25934
25935 /* Force out any required namespaces to be able to output DECL,
25936 and return the new context_die for it, if it's changed. */
25937
25938 static dw_die_ref
25939 setup_namespace_context (tree thing, dw_die_ref context_die)
25940 {
25941 tree context = (DECL_P (thing)
25942 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25943 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25944 /* Force out the namespace. */
25945 context_die = force_decl_die (context);
25946
25947 return context_die;
25948 }
25949
25950 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25951 type) within its namespace, if appropriate.
25952
25953 For compatibility with older debuggers, namespace DIEs only contain
25954 declarations; all definitions are emitted at CU scope, with
25955 DW_AT_specification pointing to the declaration (like with class
25956 members). */
25957
25958 static dw_die_ref
25959 declare_in_namespace (tree thing, dw_die_ref context_die)
25960 {
25961 dw_die_ref ns_context;
25962
25963 if (debug_info_level <= DINFO_LEVEL_TERSE)
25964 return context_die;
25965
25966 /* External declarations in the local scope only need to be emitted
25967 once, not once in the namespace and once in the scope.
25968
25969 This avoids declaring the `extern' below in the
25970 namespace DIE as well as in the innermost scope:
25971
25972 namespace S
25973 {
25974 int i=5;
25975 int foo()
25976 {
25977 int i=8;
25978 extern int i;
25979 return i;
25980 }
25981 }
25982 */
25983 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25984 return context_die;
25985
25986 /* If this decl is from an inlined function, then don't try to emit it in its
25987 namespace, as we will get confused. It would have already been emitted
25988 when the abstract instance of the inline function was emitted anyways. */
25989 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25990 return context_die;
25991
25992 ns_context = setup_namespace_context (thing, context_die);
25993
25994 if (ns_context != context_die)
25995 {
25996 if (is_fortran ())
25997 return ns_context;
25998 if (DECL_P (thing))
25999 gen_decl_die (thing, NULL, NULL, ns_context);
26000 else
26001 gen_type_die (thing, ns_context);
26002 }
26003 return context_die;
26004 }
26005
26006 /* Generate a DIE for a namespace or namespace alias. */
26007
26008 static void
26009 gen_namespace_die (tree decl, dw_die_ref context_die)
26010 {
26011 dw_die_ref namespace_die;
26012
26013 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26014 they are an alias of. */
26015 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26016 {
26017 /* Output a real namespace or module. */
26018 context_die = setup_namespace_context (decl, comp_unit_die ());
26019 namespace_die = new_die (is_fortran ()
26020 ? DW_TAG_module : DW_TAG_namespace,
26021 context_die, decl);
26022 /* For Fortran modules defined in different CU don't add src coords. */
26023 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26024 {
26025 const char *name = dwarf2_name (decl, 0);
26026 if (name)
26027 add_name_attribute (namespace_die, name);
26028 }
26029 else
26030 add_name_and_src_coords_attributes (namespace_die, decl);
26031 if (DECL_EXTERNAL (decl))
26032 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26033 equate_decl_number_to_die (decl, namespace_die);
26034 }
26035 else
26036 {
26037 /* Output a namespace alias. */
26038
26039 /* Force out the namespace we are an alias of, if necessary. */
26040 dw_die_ref origin_die
26041 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26042
26043 if (DECL_FILE_SCOPE_P (decl)
26044 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26045 context_die = setup_namespace_context (decl, comp_unit_die ());
26046 /* Now create the namespace alias DIE. */
26047 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26048 add_name_and_src_coords_attributes (namespace_die, decl);
26049 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26050 equate_decl_number_to_die (decl, namespace_die);
26051 }
26052 if ((dwarf_version >= 5 || !dwarf_strict)
26053 && lang_hooks.decls.decl_dwarf_attribute (decl,
26054 DW_AT_export_symbols) == 1)
26055 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26056
26057 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26058 if (want_pubnames ())
26059 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26060 }
26061
26062 /* Generate Dwarf debug information for a decl described by DECL.
26063 The return value is currently only meaningful for PARM_DECLs,
26064 for all other decls it returns NULL.
26065
26066 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26067 It can be NULL otherwise. */
26068
26069 static dw_die_ref
26070 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26071 dw_die_ref context_die)
26072 {
26073 tree decl_or_origin = decl ? decl : origin;
26074 tree class_origin = NULL, ultimate_origin;
26075
26076 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26077 return NULL;
26078
26079 /* Ignore pointer bounds decls. */
26080 if (DECL_P (decl_or_origin)
26081 && TREE_TYPE (decl_or_origin)
26082 && POINTER_BOUNDS_P (decl_or_origin))
26083 return NULL;
26084
26085 switch (TREE_CODE (decl_or_origin))
26086 {
26087 case ERROR_MARK:
26088 break;
26089
26090 case CONST_DECL:
26091 if (!is_fortran () && !is_ada ())
26092 {
26093 /* The individual enumerators of an enum type get output when we output
26094 the Dwarf representation of the relevant enum type itself. */
26095 break;
26096 }
26097
26098 /* Emit its type. */
26099 gen_type_die (TREE_TYPE (decl), context_die);
26100
26101 /* And its containing namespace. */
26102 context_die = declare_in_namespace (decl, context_die);
26103
26104 gen_const_die (decl, context_die);
26105 break;
26106
26107 case FUNCTION_DECL:
26108 #if 0
26109 /* FIXME */
26110 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26111 on local redeclarations of global functions. That seems broken. */
26112 if (current_function_decl != decl)
26113 /* This is only a declaration. */;
26114 #endif
26115
26116 /* We should have abstract copies already and should not generate
26117 stray type DIEs in late LTO dumping. */
26118 if (! early_dwarf)
26119 ;
26120
26121 /* If we're emitting a clone, emit info for the abstract instance. */
26122 else if (origin || DECL_ORIGIN (decl) != decl)
26123 dwarf2out_abstract_function (origin
26124 ? DECL_ORIGIN (origin)
26125 : DECL_ABSTRACT_ORIGIN (decl));
26126
26127 /* If we're emitting a possibly inlined function emit it as
26128 abstract instance. */
26129 else if (cgraph_function_possibly_inlined_p (decl)
26130 && ! DECL_ABSTRACT_P (decl)
26131 && ! class_or_namespace_scope_p (context_die)
26132 /* dwarf2out_abstract_function won't emit a die if this is just
26133 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26134 that case, because that works only if we have a die. */
26135 && DECL_INITIAL (decl) != NULL_TREE)
26136 dwarf2out_abstract_function (decl);
26137
26138 /* Otherwise we're emitting the primary DIE for this decl. */
26139 else if (debug_info_level > DINFO_LEVEL_TERSE)
26140 {
26141 /* Before we describe the FUNCTION_DECL itself, make sure that we
26142 have its containing type. */
26143 if (!origin)
26144 origin = decl_class_context (decl);
26145 if (origin != NULL_TREE)
26146 gen_type_die (origin, context_die);
26147
26148 /* And its return type. */
26149 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26150
26151 /* And its virtual context. */
26152 if (DECL_VINDEX (decl) != NULL_TREE)
26153 gen_type_die (DECL_CONTEXT (decl), context_die);
26154
26155 /* Make sure we have a member DIE for decl. */
26156 if (origin != NULL_TREE)
26157 gen_type_die_for_member (origin, decl, context_die);
26158
26159 /* And its containing namespace. */
26160 context_die = declare_in_namespace (decl, context_die);
26161 }
26162
26163 /* Now output a DIE to represent the function itself. */
26164 if (decl)
26165 gen_subprogram_die (decl, context_die);
26166 break;
26167
26168 case TYPE_DECL:
26169 /* If we are in terse mode, don't generate any DIEs to represent any
26170 actual typedefs. */
26171 if (debug_info_level <= DINFO_LEVEL_TERSE)
26172 break;
26173
26174 /* In the special case of a TYPE_DECL node representing the declaration
26175 of some type tag, if the given TYPE_DECL is marked as having been
26176 instantiated from some other (original) TYPE_DECL node (e.g. one which
26177 was generated within the original definition of an inline function) we
26178 used to generate a special (abbreviated) DW_TAG_structure_type,
26179 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26180 should be actually referencing those DIEs, as variable DIEs with that
26181 type would be emitted already in the abstract origin, so it was always
26182 removed during unused type prunning. Don't add anything in this
26183 case. */
26184 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26185 break;
26186
26187 if (is_redundant_typedef (decl))
26188 gen_type_die (TREE_TYPE (decl), context_die);
26189 else
26190 /* Output a DIE to represent the typedef itself. */
26191 gen_typedef_die (decl, context_die);
26192 break;
26193
26194 case LABEL_DECL:
26195 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26196 gen_label_die (decl, context_die);
26197 break;
26198
26199 case VAR_DECL:
26200 case RESULT_DECL:
26201 /* If we are in terse mode, don't generate any DIEs to represent any
26202 variable declarations or definitions. */
26203 if (debug_info_level <= DINFO_LEVEL_TERSE)
26204 break;
26205
26206 /* Avoid generating stray type DIEs during late dwarf dumping.
26207 All types have been dumped early. */
26208 if (early_dwarf
26209 /* ??? But in LTRANS we cannot annotate early created variably
26210 modified type DIEs without copying them and adjusting all
26211 references to them. Dump them again as happens for inlining
26212 which copies both the decl and the types. */
26213 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26214 in VLA bound information for example. */
26215 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26216 current_function_decl)))
26217 {
26218 /* Output any DIEs that are needed to specify the type of this data
26219 object. */
26220 if (decl_by_reference_p (decl_or_origin))
26221 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26222 else
26223 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26224 }
26225
26226 if (early_dwarf)
26227 {
26228 /* And its containing type. */
26229 class_origin = decl_class_context (decl_or_origin);
26230 if (class_origin != NULL_TREE)
26231 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26232
26233 /* And its containing namespace. */
26234 context_die = declare_in_namespace (decl_or_origin, context_die);
26235 }
26236
26237 /* Now output the DIE to represent the data object itself. This gets
26238 complicated because of the possibility that the VAR_DECL really
26239 represents an inlined instance of a formal parameter for an inline
26240 function. */
26241 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26242 if (ultimate_origin != NULL_TREE
26243 && TREE_CODE (ultimate_origin) == PARM_DECL)
26244 gen_formal_parameter_die (decl, origin,
26245 true /* Emit name attribute. */,
26246 context_die);
26247 else
26248 gen_variable_die (decl, origin, context_die);
26249 break;
26250
26251 case FIELD_DECL:
26252 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26253 /* Ignore the nameless fields that are used to skip bits but handle C++
26254 anonymous unions and structs. */
26255 if (DECL_NAME (decl) != NULL_TREE
26256 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26257 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26258 {
26259 gen_type_die (member_declared_type (decl), context_die);
26260 gen_field_die (decl, ctx, context_die);
26261 }
26262 break;
26263
26264 case PARM_DECL:
26265 /* Avoid generating stray type DIEs during late dwarf dumping.
26266 All types have been dumped early. */
26267 if (early_dwarf
26268 /* ??? But in LTRANS we cannot annotate early created variably
26269 modified type DIEs without copying them and adjusting all
26270 references to them. Dump them again as happens for inlining
26271 which copies both the decl and the types. */
26272 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26273 in VLA bound information for example. */
26274 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26275 current_function_decl)))
26276 {
26277 if (DECL_BY_REFERENCE (decl_or_origin))
26278 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26279 else
26280 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26281 }
26282 return gen_formal_parameter_die (decl, origin,
26283 true /* Emit name attribute. */,
26284 context_die);
26285
26286 case NAMESPACE_DECL:
26287 if (dwarf_version >= 3 || !dwarf_strict)
26288 gen_namespace_die (decl, context_die);
26289 break;
26290
26291 case IMPORTED_DECL:
26292 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26293 DECL_CONTEXT (decl), context_die);
26294 break;
26295
26296 case NAMELIST_DECL:
26297 gen_namelist_decl (DECL_NAME (decl), context_die,
26298 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26299 break;
26300
26301 default:
26302 /* Probably some frontend-internal decl. Assume we don't care. */
26303 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26304 break;
26305 }
26306
26307 return NULL;
26308 }
26309 \f
26310 /* Output initial debug information for global DECL. Called at the
26311 end of the parsing process.
26312
26313 This is the initial debug generation process. As such, the DIEs
26314 generated may be incomplete. A later debug generation pass
26315 (dwarf2out_late_global_decl) will augment the information generated
26316 in this pass (e.g., with complete location info). */
26317
26318 static void
26319 dwarf2out_early_global_decl (tree decl)
26320 {
26321 set_early_dwarf s;
26322
26323 /* gen_decl_die() will set DECL_ABSTRACT because
26324 cgraph_function_possibly_inlined_p() returns true. This is in
26325 turn will cause DW_AT_inline attributes to be set.
26326
26327 This happens because at early dwarf generation, there is no
26328 cgraph information, causing cgraph_function_possibly_inlined_p()
26329 to return true. Trick cgraph_function_possibly_inlined_p()
26330 while we generate dwarf early. */
26331 bool save = symtab->global_info_ready;
26332 symtab->global_info_ready = true;
26333
26334 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26335 other DECLs and they can point to template types or other things
26336 that dwarf2out can't handle when done via dwarf2out_decl. */
26337 if (TREE_CODE (decl) != TYPE_DECL
26338 && TREE_CODE (decl) != PARM_DECL)
26339 {
26340 if (TREE_CODE (decl) == FUNCTION_DECL)
26341 {
26342 tree save_fndecl = current_function_decl;
26343
26344 /* For nested functions, make sure we have DIEs for the parents first
26345 so that all nested DIEs are generated at the proper scope in the
26346 first shot. */
26347 tree context = decl_function_context (decl);
26348 if (context != NULL)
26349 {
26350 dw_die_ref context_die = lookup_decl_die (context);
26351 current_function_decl = context;
26352
26353 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26354 enough so that it lands in its own context. This avoids type
26355 pruning issues later on. */
26356 if (context_die == NULL || is_declaration_die (context_die))
26357 dwarf2out_decl (context);
26358 }
26359
26360 /* Emit an abstract origin of a function first. This happens
26361 with C++ constructor clones for example and makes
26362 dwarf2out_abstract_function happy which requires the early
26363 DIE of the abstract instance to be present. */
26364 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26365 dw_die_ref origin_die;
26366 if (origin != NULL
26367 /* Do not emit the DIE multiple times but make sure to
26368 process it fully here in case we just saw a declaration. */
26369 && ((origin_die = lookup_decl_die (origin)) == NULL
26370 || is_declaration_die (origin_die)))
26371 {
26372 current_function_decl = origin;
26373 dwarf2out_decl (origin);
26374 }
26375
26376 /* Emit the DIE for decl but avoid doing that multiple times. */
26377 dw_die_ref old_die;
26378 if ((old_die = lookup_decl_die (decl)) == NULL
26379 || is_declaration_die (old_die))
26380 {
26381 current_function_decl = decl;
26382 dwarf2out_decl (decl);
26383 }
26384
26385 current_function_decl = save_fndecl;
26386 }
26387 else
26388 dwarf2out_decl (decl);
26389 }
26390 symtab->global_info_ready = save;
26391 }
26392
26393 /* Return whether EXPR is an expression with the following pattern:
26394 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26395
26396 static bool
26397 is_trivial_indirect_ref (tree expr)
26398 {
26399 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26400 return false;
26401
26402 tree nop = TREE_OPERAND (expr, 0);
26403 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26404 return false;
26405
26406 tree int_cst = TREE_OPERAND (nop, 0);
26407 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26408 }
26409
26410 /* Output debug information for global decl DECL. Called from
26411 toplev.c after compilation proper has finished. */
26412
26413 static void
26414 dwarf2out_late_global_decl (tree decl)
26415 {
26416 /* Fill-in any location information we were unable to determine
26417 on the first pass. */
26418 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26419 {
26420 dw_die_ref die = lookup_decl_die (decl);
26421
26422 /* We may have to generate early debug late for LTO in case debug
26423 was not enabled at compile-time or the target doesn't support
26424 the LTO early debug scheme. */
26425 if (! die && in_lto_p)
26426 {
26427 dwarf2out_decl (decl);
26428 die = lookup_decl_die (decl);
26429 }
26430
26431 if (die)
26432 {
26433 /* We get called via the symtab code invoking late_global_decl
26434 for symbols that are optimized out.
26435
26436 Do not add locations for those, except if they have a
26437 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26438 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26439 INDIRECT_REF expression, as this could generate relocations to
26440 text symbols in LTO object files, which is invalid. */
26441 varpool_node *node = varpool_node::get (decl);
26442 if ((! node || ! node->definition)
26443 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26444 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26445 tree_add_const_value_attribute_for_decl (die, decl);
26446 else
26447 add_location_or_const_value_attribute (die, decl, false);
26448 }
26449 }
26450 }
26451
26452 /* Output debug information for type decl DECL. Called from toplev.c
26453 and from language front ends (to record built-in types). */
26454 static void
26455 dwarf2out_type_decl (tree decl, int local)
26456 {
26457 if (!local)
26458 {
26459 set_early_dwarf s;
26460 dwarf2out_decl (decl);
26461 }
26462 }
26463
26464 /* Output debug information for imported module or decl DECL.
26465 NAME is non-NULL name in the lexical block if the decl has been renamed.
26466 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26467 that DECL belongs to.
26468 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26469 static void
26470 dwarf2out_imported_module_or_decl_1 (tree decl,
26471 tree name,
26472 tree lexical_block,
26473 dw_die_ref lexical_block_die)
26474 {
26475 expanded_location xloc;
26476 dw_die_ref imported_die = NULL;
26477 dw_die_ref at_import_die;
26478
26479 if (TREE_CODE (decl) == IMPORTED_DECL)
26480 {
26481 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26482 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26483 gcc_assert (decl);
26484 }
26485 else
26486 xloc = expand_location (input_location);
26487
26488 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26489 {
26490 at_import_die = force_type_die (TREE_TYPE (decl));
26491 /* For namespace N { typedef void T; } using N::T; base_type_die
26492 returns NULL, but DW_TAG_imported_declaration requires
26493 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26494 if (!at_import_die)
26495 {
26496 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26497 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26498 at_import_die = lookup_type_die (TREE_TYPE (decl));
26499 gcc_assert (at_import_die);
26500 }
26501 }
26502 else
26503 {
26504 at_import_die = lookup_decl_die (decl);
26505 if (!at_import_die)
26506 {
26507 /* If we're trying to avoid duplicate debug info, we may not have
26508 emitted the member decl for this field. Emit it now. */
26509 if (TREE_CODE (decl) == FIELD_DECL)
26510 {
26511 tree type = DECL_CONTEXT (decl);
26512
26513 if (TYPE_CONTEXT (type)
26514 && TYPE_P (TYPE_CONTEXT (type))
26515 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26516 DINFO_USAGE_DIR_USE))
26517 return;
26518 gen_type_die_for_member (type, decl,
26519 get_context_die (TYPE_CONTEXT (type)));
26520 }
26521 if (TREE_CODE (decl) == NAMELIST_DECL)
26522 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26523 get_context_die (DECL_CONTEXT (decl)),
26524 NULL_TREE);
26525 else
26526 at_import_die = force_decl_die (decl);
26527 }
26528 }
26529
26530 if (TREE_CODE (decl) == NAMESPACE_DECL)
26531 {
26532 if (dwarf_version >= 3 || !dwarf_strict)
26533 imported_die = new_die (DW_TAG_imported_module,
26534 lexical_block_die,
26535 lexical_block);
26536 else
26537 return;
26538 }
26539 else
26540 imported_die = new_die (DW_TAG_imported_declaration,
26541 lexical_block_die,
26542 lexical_block);
26543
26544 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26545 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26546 if (debug_column_info && xloc.column)
26547 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26548 if (name)
26549 add_AT_string (imported_die, DW_AT_name,
26550 IDENTIFIER_POINTER (name));
26551 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26552 }
26553
26554 /* Output debug information for imported module or decl DECL.
26555 NAME is non-NULL name in context if the decl has been renamed.
26556 CHILD is true if decl is one of the renamed decls as part of
26557 importing whole module.
26558 IMPLICIT is set if this hook is called for an implicit import
26559 such as inline namespace. */
26560
26561 static void
26562 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26563 bool child, bool implicit)
26564 {
26565 /* dw_die_ref at_import_die; */
26566 dw_die_ref scope_die;
26567
26568 if (debug_info_level <= DINFO_LEVEL_TERSE)
26569 return;
26570
26571 gcc_assert (decl);
26572
26573 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26574 should be enough, for DWARF4 and older even if we emit as extension
26575 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26576 for the benefit of consumers unaware of DW_AT_export_symbols. */
26577 if (implicit
26578 && dwarf_version >= 5
26579 && lang_hooks.decls.decl_dwarf_attribute (decl,
26580 DW_AT_export_symbols) == 1)
26581 return;
26582
26583 set_early_dwarf s;
26584
26585 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26586 We need decl DIE for reference and scope die. First, get DIE for the decl
26587 itself. */
26588
26589 /* Get the scope die for decl context. Use comp_unit_die for global module
26590 or decl. If die is not found for non globals, force new die. */
26591 if (context
26592 && TYPE_P (context)
26593 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26594 return;
26595
26596 scope_die = get_context_die (context);
26597
26598 if (child)
26599 {
26600 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26601 there is nothing we can do, here. */
26602 if (dwarf_version < 3 && dwarf_strict)
26603 return;
26604
26605 gcc_assert (scope_die->die_child);
26606 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26607 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26608 scope_die = scope_die->die_child;
26609 }
26610
26611 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26612 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26613 }
26614
26615 /* Output debug information for namelists. */
26616
26617 static dw_die_ref
26618 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26619 {
26620 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26621 tree value;
26622 unsigned i;
26623
26624 if (debug_info_level <= DINFO_LEVEL_TERSE)
26625 return NULL;
26626
26627 gcc_assert (scope_die != NULL);
26628 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26629 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26630
26631 /* If there are no item_decls, we have a nondefining namelist, e.g.
26632 with USE association; hence, set DW_AT_declaration. */
26633 if (item_decls == NULL_TREE)
26634 {
26635 add_AT_flag (nml_die, DW_AT_declaration, 1);
26636 return nml_die;
26637 }
26638
26639 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26640 {
26641 nml_item_ref_die = lookup_decl_die (value);
26642 if (!nml_item_ref_die)
26643 nml_item_ref_die = force_decl_die (value);
26644
26645 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26646 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26647 }
26648 return nml_die;
26649 }
26650
26651
26652 /* Write the debugging output for DECL and return the DIE. */
26653
26654 static void
26655 dwarf2out_decl (tree decl)
26656 {
26657 dw_die_ref context_die = comp_unit_die ();
26658
26659 switch (TREE_CODE (decl))
26660 {
26661 case ERROR_MARK:
26662 return;
26663
26664 case FUNCTION_DECL:
26665 /* If we're a nested function, initially use a parent of NULL; if we're
26666 a plain function, this will be fixed up in decls_for_scope. If
26667 we're a method, it will be ignored, since we already have a DIE. */
26668 if (decl_function_context (decl)
26669 /* But if we're in terse mode, we don't care about scope. */
26670 && debug_info_level > DINFO_LEVEL_TERSE)
26671 context_die = NULL;
26672 break;
26673
26674 case VAR_DECL:
26675 /* For local statics lookup proper context die. */
26676 if (local_function_static (decl))
26677 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26678
26679 /* If we are in terse mode, don't generate any DIEs to represent any
26680 variable declarations or definitions. */
26681 if (debug_info_level <= DINFO_LEVEL_TERSE)
26682 return;
26683 break;
26684
26685 case CONST_DECL:
26686 if (debug_info_level <= DINFO_LEVEL_TERSE)
26687 return;
26688 if (!is_fortran () && !is_ada ())
26689 return;
26690 if (TREE_STATIC (decl) && decl_function_context (decl))
26691 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26692 break;
26693
26694 case NAMESPACE_DECL:
26695 case IMPORTED_DECL:
26696 if (debug_info_level <= DINFO_LEVEL_TERSE)
26697 return;
26698 if (lookup_decl_die (decl) != NULL)
26699 return;
26700 break;
26701
26702 case TYPE_DECL:
26703 /* Don't emit stubs for types unless they are needed by other DIEs. */
26704 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26705 return;
26706
26707 /* Don't bother trying to generate any DIEs to represent any of the
26708 normal built-in types for the language we are compiling. */
26709 if (DECL_IS_BUILTIN (decl))
26710 return;
26711
26712 /* If we are in terse mode, don't generate any DIEs for types. */
26713 if (debug_info_level <= DINFO_LEVEL_TERSE)
26714 return;
26715
26716 /* If we're a function-scope tag, initially use a parent of NULL;
26717 this will be fixed up in decls_for_scope. */
26718 if (decl_function_context (decl))
26719 context_die = NULL;
26720
26721 break;
26722
26723 case NAMELIST_DECL:
26724 break;
26725
26726 default:
26727 return;
26728 }
26729
26730 gen_decl_die (decl, NULL, NULL, context_die);
26731
26732 if (flag_checking)
26733 {
26734 dw_die_ref die = lookup_decl_die (decl);
26735 if (die)
26736 check_die (die);
26737 }
26738 }
26739
26740 /* Write the debugging output for DECL. */
26741
26742 static void
26743 dwarf2out_function_decl (tree decl)
26744 {
26745 dwarf2out_decl (decl);
26746 call_arg_locations = NULL;
26747 call_arg_loc_last = NULL;
26748 call_site_count = -1;
26749 tail_call_site_count = -1;
26750 decl_loc_table->empty ();
26751 cached_dw_loc_list_table->empty ();
26752 }
26753
26754 /* Output a marker (i.e. a label) for the beginning of the generated code for
26755 a lexical block. */
26756
26757 static void
26758 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26759 unsigned int blocknum)
26760 {
26761 switch_to_section (current_function_section ());
26762 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26763 }
26764
26765 /* Output a marker (i.e. a label) for the end of the generated code for a
26766 lexical block. */
26767
26768 static void
26769 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26770 {
26771 switch_to_section (current_function_section ());
26772 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26773 }
26774
26775 /* Returns nonzero if it is appropriate not to emit any debugging
26776 information for BLOCK, because it doesn't contain any instructions.
26777
26778 Don't allow this for blocks with nested functions or local classes
26779 as we would end up with orphans, and in the presence of scheduling
26780 we may end up calling them anyway. */
26781
26782 static bool
26783 dwarf2out_ignore_block (const_tree block)
26784 {
26785 tree decl;
26786 unsigned int i;
26787
26788 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26789 if (TREE_CODE (decl) == FUNCTION_DECL
26790 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26791 return 0;
26792 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26793 {
26794 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26795 if (TREE_CODE (decl) == FUNCTION_DECL
26796 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26797 return 0;
26798 }
26799
26800 return 1;
26801 }
26802
26803 /* Hash table routines for file_hash. */
26804
26805 bool
26806 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26807 {
26808 return filename_cmp (p1->filename, p2) == 0;
26809 }
26810
26811 hashval_t
26812 dwarf_file_hasher::hash (dwarf_file_data *p)
26813 {
26814 return htab_hash_string (p->filename);
26815 }
26816
26817 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26818 dwarf2out.c) and return its "index". The index of each (known) filename is
26819 just a unique number which is associated with only that one filename. We
26820 need such numbers for the sake of generating labels (in the .debug_sfnames
26821 section) and references to those files numbers (in the .debug_srcinfo
26822 and .debug_macinfo sections). If the filename given as an argument is not
26823 found in our current list, add it to the list and assign it the next
26824 available unique index number. */
26825
26826 static struct dwarf_file_data *
26827 lookup_filename (const char *file_name)
26828 {
26829 struct dwarf_file_data * created;
26830
26831 if (!file_name)
26832 return NULL;
26833
26834 dwarf_file_data **slot
26835 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26836 INSERT);
26837 if (*slot)
26838 return *slot;
26839
26840 created = ggc_alloc<dwarf_file_data> ();
26841 created->filename = file_name;
26842 created->emitted_number = 0;
26843 *slot = created;
26844 return created;
26845 }
26846
26847 /* If the assembler will construct the file table, then translate the compiler
26848 internal file table number into the assembler file table number, and emit
26849 a .file directive if we haven't already emitted one yet. The file table
26850 numbers are different because we prune debug info for unused variables and
26851 types, which may include filenames. */
26852
26853 static int
26854 maybe_emit_file (struct dwarf_file_data * fd)
26855 {
26856 if (! fd->emitted_number)
26857 {
26858 if (last_emitted_file)
26859 fd->emitted_number = last_emitted_file->emitted_number + 1;
26860 else
26861 fd->emitted_number = 1;
26862 last_emitted_file = fd;
26863
26864 if (output_asm_line_debug_info ())
26865 {
26866 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26867 output_quoted_string (asm_out_file,
26868 remap_debug_filename (fd->filename));
26869 fputc ('\n', asm_out_file);
26870 }
26871 }
26872
26873 return fd->emitted_number;
26874 }
26875
26876 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26877 That generation should happen after function debug info has been
26878 generated. The value of the attribute is the constant value of ARG. */
26879
26880 static void
26881 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26882 {
26883 die_arg_entry entry;
26884
26885 if (!die || !arg)
26886 return;
26887
26888 gcc_assert (early_dwarf);
26889
26890 if (!tmpl_value_parm_die_table)
26891 vec_alloc (tmpl_value_parm_die_table, 32);
26892
26893 entry.die = die;
26894 entry.arg = arg;
26895 vec_safe_push (tmpl_value_parm_die_table, entry);
26896 }
26897
26898 /* Return TRUE if T is an instance of generic type, FALSE
26899 otherwise. */
26900
26901 static bool
26902 generic_type_p (tree t)
26903 {
26904 if (t == NULL_TREE || !TYPE_P (t))
26905 return false;
26906 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26907 }
26908
26909 /* Schedule the generation of the generic parameter dies for the
26910 instance of generic type T. The proper generation itself is later
26911 done by gen_scheduled_generic_parms_dies. */
26912
26913 static void
26914 schedule_generic_params_dies_gen (tree t)
26915 {
26916 if (!generic_type_p (t))
26917 return;
26918
26919 gcc_assert (early_dwarf);
26920
26921 if (!generic_type_instances)
26922 vec_alloc (generic_type_instances, 256);
26923
26924 vec_safe_push (generic_type_instances, t);
26925 }
26926
26927 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26928 by append_entry_to_tmpl_value_parm_die_table. This function must
26929 be called after function DIEs have been generated. */
26930
26931 static void
26932 gen_remaining_tmpl_value_param_die_attribute (void)
26933 {
26934 if (tmpl_value_parm_die_table)
26935 {
26936 unsigned i, j;
26937 die_arg_entry *e;
26938
26939 /* We do this in two phases - first get the cases we can
26940 handle during early-finish, preserving those we cannot
26941 (containing symbolic constants where we don't yet know
26942 whether we are going to output the referenced symbols).
26943 For those we try again at late-finish. */
26944 j = 0;
26945 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26946 {
26947 if (!e->die->removed
26948 && !tree_add_const_value_attribute (e->die, e->arg))
26949 {
26950 dw_loc_descr_ref loc = NULL;
26951 if (! early_dwarf
26952 && (dwarf_version >= 5 || !dwarf_strict))
26953 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26954 if (loc)
26955 add_AT_loc (e->die, DW_AT_location, loc);
26956 else
26957 (*tmpl_value_parm_die_table)[j++] = *e;
26958 }
26959 }
26960 tmpl_value_parm_die_table->truncate (j);
26961 }
26962 }
26963
26964 /* Generate generic parameters DIEs for instances of generic types
26965 that have been previously scheduled by
26966 schedule_generic_params_dies_gen. This function must be called
26967 after all the types of the CU have been laid out. */
26968
26969 static void
26970 gen_scheduled_generic_parms_dies (void)
26971 {
26972 unsigned i;
26973 tree t;
26974
26975 if (!generic_type_instances)
26976 return;
26977
26978 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26979 if (COMPLETE_TYPE_P (t))
26980 gen_generic_params_dies (t);
26981
26982 generic_type_instances = NULL;
26983 }
26984
26985
26986 /* Replace DW_AT_name for the decl with name. */
26987
26988 static void
26989 dwarf2out_set_name (tree decl, tree name)
26990 {
26991 dw_die_ref die;
26992 dw_attr_node *attr;
26993 const char *dname;
26994
26995 die = TYPE_SYMTAB_DIE (decl);
26996 if (!die)
26997 return;
26998
26999 dname = dwarf2_name (name, 0);
27000 if (!dname)
27001 return;
27002
27003 attr = get_AT (die, DW_AT_name);
27004 if (attr)
27005 {
27006 struct indirect_string_node *node;
27007
27008 node = find_AT_string (dname);
27009 /* replace the string. */
27010 attr->dw_attr_val.v.val_str = node;
27011 }
27012
27013 else
27014 add_name_attribute (die, dname);
27015 }
27016
27017 /* True if before or during processing of the first function being emitted. */
27018 static bool in_first_function_p = true;
27019 /* True if loc_note during dwarf2out_var_location call might still be
27020 before first real instruction at address equal to .Ltext0. */
27021 static bool maybe_at_text_label_p = true;
27022 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27023 static unsigned int first_loclabel_num_not_at_text_label;
27024
27025 /* Look ahead for a real insn, or for a begin stmt marker. */
27026
27027 static rtx_insn *
27028 dwarf2out_next_real_insn (rtx_insn *loc_note)
27029 {
27030 rtx_insn *next_real = NEXT_INSN (loc_note);
27031
27032 while (next_real)
27033 if (INSN_P (next_real))
27034 break;
27035 else
27036 next_real = NEXT_INSN (next_real);
27037
27038 return next_real;
27039 }
27040
27041 /* Called by the final INSN scan whenever we see a var location. We
27042 use it to drop labels in the right places, and throw the location in
27043 our lookup table. */
27044
27045 static void
27046 dwarf2out_var_location (rtx_insn *loc_note)
27047 {
27048 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27049 struct var_loc_node *newloc;
27050 rtx_insn *next_real, *next_note;
27051 rtx_insn *call_insn = NULL;
27052 static const char *last_label;
27053 static const char *last_postcall_label;
27054 static bool last_in_cold_section_p;
27055 static rtx_insn *expected_next_loc_note;
27056 tree decl;
27057 bool var_loc_p;
27058 var_loc_view view = 0;
27059
27060 if (!NOTE_P (loc_note))
27061 {
27062 if (CALL_P (loc_note))
27063 {
27064 maybe_reset_location_view (loc_note, cur_line_info_table);
27065 call_site_count++;
27066 if (SIBLING_CALL_P (loc_note))
27067 tail_call_site_count++;
27068 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27069 {
27070 call_insn = loc_note;
27071 loc_note = NULL;
27072 var_loc_p = false;
27073
27074 next_real = dwarf2out_next_real_insn (call_insn);
27075 next_note = NULL;
27076 cached_next_real_insn = NULL;
27077 goto create_label;
27078 }
27079 if (optimize == 0 && !flag_var_tracking)
27080 {
27081 /* When the var-tracking pass is not running, there is no note
27082 for indirect calls whose target is compile-time known. In this
27083 case, process such calls specifically so that we generate call
27084 sites for them anyway. */
27085 rtx x = PATTERN (loc_note);
27086 if (GET_CODE (x) == PARALLEL)
27087 x = XVECEXP (x, 0, 0);
27088 if (GET_CODE (x) == SET)
27089 x = SET_SRC (x);
27090 if (GET_CODE (x) == CALL)
27091 x = XEXP (x, 0);
27092 if (!MEM_P (x)
27093 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27094 || !SYMBOL_REF_DECL (XEXP (x, 0))
27095 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27096 != FUNCTION_DECL))
27097 {
27098 call_insn = loc_note;
27099 loc_note = NULL;
27100 var_loc_p = false;
27101
27102 next_real = dwarf2out_next_real_insn (call_insn);
27103 next_note = NULL;
27104 cached_next_real_insn = NULL;
27105 goto create_label;
27106 }
27107 }
27108 }
27109 else if (!debug_variable_location_views)
27110 gcc_unreachable ();
27111 else
27112 maybe_reset_location_view (loc_note, cur_line_info_table);
27113
27114 return;
27115 }
27116
27117 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27118 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27119 return;
27120
27121 /* Optimize processing a large consecutive sequence of location
27122 notes so we don't spend too much time in next_real_insn. If the
27123 next insn is another location note, remember the next_real_insn
27124 calculation for next time. */
27125 next_real = cached_next_real_insn;
27126 if (next_real)
27127 {
27128 if (expected_next_loc_note != loc_note)
27129 next_real = NULL;
27130 }
27131
27132 next_note = NEXT_INSN (loc_note);
27133 if (! next_note
27134 || next_note->deleted ()
27135 || ! NOTE_P (next_note)
27136 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27137 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27138 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27139 next_note = NULL;
27140
27141 if (! next_real)
27142 next_real = dwarf2out_next_real_insn (loc_note);
27143
27144 if (next_note)
27145 {
27146 expected_next_loc_note = next_note;
27147 cached_next_real_insn = next_real;
27148 }
27149 else
27150 cached_next_real_insn = NULL;
27151
27152 /* If there are no instructions which would be affected by this note,
27153 don't do anything. */
27154 if (var_loc_p
27155 && next_real == NULL_RTX
27156 && !NOTE_DURING_CALL_P (loc_note))
27157 return;
27158
27159 create_label:
27160
27161 if (next_real == NULL_RTX)
27162 next_real = get_last_insn ();
27163
27164 /* If there were any real insns between note we processed last time
27165 and this note (or if it is the first note), clear
27166 last_{,postcall_}label so that they are not reused this time. */
27167 if (last_var_location_insn == NULL_RTX
27168 || last_var_location_insn != next_real
27169 || last_in_cold_section_p != in_cold_section_p)
27170 {
27171 last_label = NULL;
27172 last_postcall_label = NULL;
27173 }
27174
27175 if (var_loc_p)
27176 {
27177 const char *label
27178 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27179 view = cur_line_info_table->view;
27180 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27181 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27182 if (newloc == NULL)
27183 return;
27184 }
27185 else
27186 {
27187 decl = NULL_TREE;
27188 newloc = NULL;
27189 }
27190
27191 /* If there were no real insns between note we processed last time
27192 and this note, use the label we emitted last time. Otherwise
27193 create a new label and emit it. */
27194 if (last_label == NULL)
27195 {
27196 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27197 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27198 loclabel_num++;
27199 last_label = ggc_strdup (loclabel);
27200 /* See if loclabel might be equal to .Ltext0. If yes,
27201 bump first_loclabel_num_not_at_text_label. */
27202 if (!have_multiple_function_sections
27203 && in_first_function_p
27204 && maybe_at_text_label_p)
27205 {
27206 static rtx_insn *last_start;
27207 rtx_insn *insn;
27208 for (insn = loc_note; insn; insn = previous_insn (insn))
27209 if (insn == last_start)
27210 break;
27211 else if (!NONDEBUG_INSN_P (insn))
27212 continue;
27213 else
27214 {
27215 rtx body = PATTERN (insn);
27216 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27217 continue;
27218 /* Inline asm could occupy zero bytes. */
27219 else if (GET_CODE (body) == ASM_INPUT
27220 || asm_noperands (body) >= 0)
27221 continue;
27222 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27223 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27224 continue;
27225 #endif
27226 else
27227 {
27228 /* Assume insn has non-zero length. */
27229 maybe_at_text_label_p = false;
27230 break;
27231 }
27232 }
27233 if (maybe_at_text_label_p)
27234 {
27235 last_start = loc_note;
27236 first_loclabel_num_not_at_text_label = loclabel_num;
27237 }
27238 }
27239 }
27240
27241 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27242 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27243
27244 if (!var_loc_p)
27245 {
27246 struct call_arg_loc_node *ca_loc
27247 = ggc_cleared_alloc<call_arg_loc_node> ();
27248 rtx_insn *prev = call_insn;
27249
27250 ca_loc->call_arg_loc_note
27251 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27252 ca_loc->next = NULL;
27253 ca_loc->label = last_label;
27254 gcc_assert (prev
27255 && (CALL_P (prev)
27256 || (NONJUMP_INSN_P (prev)
27257 && GET_CODE (PATTERN (prev)) == SEQUENCE
27258 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27259 if (!CALL_P (prev))
27260 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27261 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27262
27263 /* Look for a SYMBOL_REF in the "prev" instruction. */
27264 rtx x = get_call_rtx_from (PATTERN (prev));
27265 if (x)
27266 {
27267 /* Try to get the call symbol, if any. */
27268 if (MEM_P (XEXP (x, 0)))
27269 x = XEXP (x, 0);
27270 /* First, look for a memory access to a symbol_ref. */
27271 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27272 && SYMBOL_REF_DECL (XEXP (x, 0))
27273 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27274 ca_loc->symbol_ref = XEXP (x, 0);
27275 /* Otherwise, look at a compile-time known user-level function
27276 declaration. */
27277 else if (MEM_P (x)
27278 && MEM_EXPR (x)
27279 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27280 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27281 }
27282
27283 ca_loc->block = insn_scope (prev);
27284 if (call_arg_locations)
27285 call_arg_loc_last->next = ca_loc;
27286 else
27287 call_arg_locations = ca_loc;
27288 call_arg_loc_last = ca_loc;
27289 }
27290 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27291 {
27292 newloc->label = last_label;
27293 newloc->view = view;
27294 }
27295 else
27296 {
27297 if (!last_postcall_label)
27298 {
27299 sprintf (loclabel, "%s-1", last_label);
27300 last_postcall_label = ggc_strdup (loclabel);
27301 }
27302 newloc->label = last_postcall_label;
27303 /* ??? This view is at last_label, not last_label-1, but we
27304 could only assume view at last_label-1 is zero if we could
27305 assume calls always have length greater than one. This is
27306 probably true in general, though there might be a rare
27307 exception to this rule, e.g. if a call insn is optimized out
27308 by target magic. Then, even the -1 in the label will be
27309 wrong, which might invalidate the range. Anyway, using view,
27310 though technically possibly incorrect, will work as far as
27311 ranges go: since L-1 is in the middle of the call insn,
27312 (L-1).0 and (L-1).V shouldn't make any difference, and having
27313 the loclist entry refer to the .loc entry might be useful, so
27314 leave it like this. */
27315 newloc->view = view;
27316 }
27317
27318 if (var_loc_p && flag_debug_asm)
27319 {
27320 const char *name, *sep, *patstr;
27321 if (decl && DECL_NAME (decl))
27322 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27323 else
27324 name = "";
27325 if (NOTE_VAR_LOCATION_LOC (loc_note))
27326 {
27327 sep = " => ";
27328 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27329 }
27330 else
27331 {
27332 sep = " ";
27333 patstr = "RESET";
27334 }
27335 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27336 name, sep, patstr);
27337 }
27338
27339 last_var_location_insn = next_real;
27340 last_in_cold_section_p = in_cold_section_p;
27341 }
27342
27343 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27344 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27345 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27346 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27347 BLOCK_FRAGMENT_ORIGIN links. */
27348 static bool
27349 block_within_block_p (tree block, tree outer, bool bothways)
27350 {
27351 if (block == outer)
27352 return true;
27353
27354 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27355 for (tree context = BLOCK_SUPERCONTEXT (block);
27356 context != outer;
27357 context = BLOCK_SUPERCONTEXT (context))
27358 if (!context || TREE_CODE (context) != BLOCK)
27359 return false;
27360
27361 if (!bothways)
27362 return true;
27363
27364 /* Now check that each block is actually referenced by its
27365 parent. */
27366 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27367 context = BLOCK_SUPERCONTEXT (context))
27368 {
27369 if (BLOCK_FRAGMENT_ORIGIN (context))
27370 {
27371 gcc_assert (!BLOCK_SUBBLOCKS (context));
27372 context = BLOCK_FRAGMENT_ORIGIN (context);
27373 }
27374 for (tree sub = BLOCK_SUBBLOCKS (context);
27375 sub != block;
27376 sub = BLOCK_CHAIN (sub))
27377 if (!sub)
27378 return false;
27379 if (context == outer)
27380 return true;
27381 else
27382 block = context;
27383 }
27384 }
27385
27386 /* Called during final while assembling the marker of the entry point
27387 for an inlined function. */
27388
27389 static void
27390 dwarf2out_inline_entry (tree block)
27391 {
27392 gcc_assert (debug_inline_points);
27393
27394 /* If we can't represent it, don't bother. */
27395 if (!(dwarf_version >= 3 || !dwarf_strict))
27396 return;
27397
27398 gcc_assert (DECL_P (block_ultimate_origin (block)));
27399
27400 /* Sanity check the block tree. This would catch a case in which
27401 BLOCK got removed from the tree reachable from the outermost
27402 lexical block, but got retained in markers. It would still link
27403 back to its parents, but some ancestor would be missing a link
27404 down the path to the sub BLOCK. If the block got removed, its
27405 BLOCK_NUMBER will not be a usable value. */
27406 if (flag_checking)
27407 gcc_assert (block_within_block_p (block,
27408 DECL_INITIAL (current_function_decl),
27409 true));
27410
27411 gcc_assert (inlined_function_outer_scope_p (block));
27412 gcc_assert (!BLOCK_DIE (block));
27413
27414 if (BLOCK_FRAGMENT_ORIGIN (block))
27415 block = BLOCK_FRAGMENT_ORIGIN (block);
27416 /* Can the entry point ever not be at the beginning of an
27417 unfragmented lexical block? */
27418 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27419 || (cur_line_info_table
27420 && !ZERO_VIEW_P (cur_line_info_table->view))))
27421 return;
27422
27423 if (!inline_entry_data_table)
27424 inline_entry_data_table
27425 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27426
27427
27428 inline_entry_data **iedp
27429 = inline_entry_data_table->find_slot_with_hash (block,
27430 htab_hash_pointer (block),
27431 INSERT);
27432 if (*iedp)
27433 /* ??? Ideally, we'd record all entry points for the same inlined
27434 function (some may have been duplicated by e.g. unrolling), but
27435 we have no way to represent that ATM. */
27436 return;
27437
27438 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27439 ied->block = block;
27440 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27441 ied->label_num = BLOCK_NUMBER (block);
27442 if (cur_line_info_table)
27443 ied->view = cur_line_info_table->view;
27444
27445 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27446
27447 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27448 BLOCK_NUMBER (block));
27449 ASM_OUTPUT_LABEL (asm_out_file, label);
27450 }
27451
27452 /* Called from finalize_size_functions for size functions so that their body
27453 can be encoded in the debug info to describe the layout of variable-length
27454 structures. */
27455
27456 static void
27457 dwarf2out_size_function (tree decl)
27458 {
27459 function_to_dwarf_procedure (decl);
27460 }
27461
27462 /* Note in one location list that text section has changed. */
27463
27464 int
27465 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27466 {
27467 var_loc_list *list = *slot;
27468 if (list->first)
27469 list->last_before_switch
27470 = list->last->next ? list->last->next : list->last;
27471 return 1;
27472 }
27473
27474 /* Note in all location lists that text section has changed. */
27475
27476 static void
27477 var_location_switch_text_section (void)
27478 {
27479 if (decl_loc_table == NULL)
27480 return;
27481
27482 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27483 }
27484
27485 /* Create a new line number table. */
27486
27487 static dw_line_info_table *
27488 new_line_info_table (void)
27489 {
27490 dw_line_info_table *table;
27491
27492 table = ggc_cleared_alloc<dw_line_info_table> ();
27493 table->file_num = 1;
27494 table->line_num = 1;
27495 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27496 FORCE_RESET_NEXT_VIEW (table->view);
27497 table->symviews_since_reset = 0;
27498
27499 return table;
27500 }
27501
27502 /* Lookup the "current" table into which we emit line info, so
27503 that we don't have to do it for every source line. */
27504
27505 static void
27506 set_cur_line_info_table (section *sec)
27507 {
27508 dw_line_info_table *table;
27509
27510 if (sec == text_section)
27511 table = text_section_line_info;
27512 else if (sec == cold_text_section)
27513 {
27514 table = cold_text_section_line_info;
27515 if (!table)
27516 {
27517 cold_text_section_line_info = table = new_line_info_table ();
27518 table->end_label = cold_end_label;
27519 }
27520 }
27521 else
27522 {
27523 const char *end_label;
27524
27525 if (crtl->has_bb_partition)
27526 {
27527 if (in_cold_section_p)
27528 end_label = crtl->subsections.cold_section_end_label;
27529 else
27530 end_label = crtl->subsections.hot_section_end_label;
27531 }
27532 else
27533 {
27534 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27535 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27536 current_function_funcdef_no);
27537 end_label = ggc_strdup (label);
27538 }
27539
27540 table = new_line_info_table ();
27541 table->end_label = end_label;
27542
27543 vec_safe_push (separate_line_info, table);
27544 }
27545
27546 if (output_asm_line_debug_info ())
27547 table->is_stmt = (cur_line_info_table
27548 ? cur_line_info_table->is_stmt
27549 : DWARF_LINE_DEFAULT_IS_STMT_START);
27550 cur_line_info_table = table;
27551 }
27552
27553
27554 /* We need to reset the locations at the beginning of each
27555 function. We can't do this in the end_function hook, because the
27556 declarations that use the locations won't have been output when
27557 that hook is called. Also compute have_multiple_function_sections here. */
27558
27559 static void
27560 dwarf2out_begin_function (tree fun)
27561 {
27562 section *sec = function_section (fun);
27563
27564 if (sec != text_section)
27565 have_multiple_function_sections = true;
27566
27567 if (crtl->has_bb_partition && !cold_text_section)
27568 {
27569 gcc_assert (current_function_decl == fun);
27570 cold_text_section = unlikely_text_section ();
27571 switch_to_section (cold_text_section);
27572 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27573 switch_to_section (sec);
27574 }
27575
27576 dwarf2out_note_section_used ();
27577 call_site_count = 0;
27578 tail_call_site_count = 0;
27579
27580 set_cur_line_info_table (sec);
27581 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27582 }
27583
27584 /* Helper function of dwarf2out_end_function, called only after emitting
27585 the very first function into assembly. Check if some .debug_loc range
27586 might end with a .LVL* label that could be equal to .Ltext0.
27587 In that case we must force using absolute addresses in .debug_loc ranges,
27588 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27589 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27590 list terminator.
27591 Set have_multiple_function_sections to true in that case and
27592 terminate htab traversal. */
27593
27594 int
27595 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27596 {
27597 var_loc_list *entry = *slot;
27598 struct var_loc_node *node;
27599
27600 node = entry->first;
27601 if (node && node->next && node->next->label)
27602 {
27603 unsigned int i;
27604 const char *label = node->next->label;
27605 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27606
27607 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27608 {
27609 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27610 if (strcmp (label, loclabel) == 0)
27611 {
27612 have_multiple_function_sections = true;
27613 return 0;
27614 }
27615 }
27616 }
27617 return 1;
27618 }
27619
27620 /* Hook called after emitting a function into assembly.
27621 This does something only for the very first function emitted. */
27622
27623 static void
27624 dwarf2out_end_function (unsigned int)
27625 {
27626 if (in_first_function_p
27627 && !have_multiple_function_sections
27628 && first_loclabel_num_not_at_text_label
27629 && decl_loc_table)
27630 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27631 in_first_function_p = false;
27632 maybe_at_text_label_p = false;
27633 }
27634
27635 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27636 front-ends register a translation unit even before dwarf2out_init is
27637 called. */
27638 static tree main_translation_unit = NULL_TREE;
27639
27640 /* Hook called by front-ends after they built their main translation unit.
27641 Associate comp_unit_die to UNIT. */
27642
27643 static void
27644 dwarf2out_register_main_translation_unit (tree unit)
27645 {
27646 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27647 && main_translation_unit == NULL_TREE);
27648 main_translation_unit = unit;
27649 /* If dwarf2out_init has not been called yet, it will perform the association
27650 itself looking at main_translation_unit. */
27651 if (decl_die_table != NULL)
27652 equate_decl_number_to_die (unit, comp_unit_die ());
27653 }
27654
27655 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27656
27657 static void
27658 push_dw_line_info_entry (dw_line_info_table *table,
27659 enum dw_line_info_opcode opcode, unsigned int val)
27660 {
27661 dw_line_info_entry e;
27662 e.opcode = opcode;
27663 e.val = val;
27664 vec_safe_push (table->entries, e);
27665 }
27666
27667 /* Output a label to mark the beginning of a source code line entry
27668 and record information relating to this source line, in
27669 'line_info_table' for later output of the .debug_line section. */
27670 /* ??? The discriminator parameter ought to be unsigned. */
27671
27672 static void
27673 dwarf2out_source_line (unsigned int line, unsigned int column,
27674 const char *filename,
27675 int discriminator, bool is_stmt)
27676 {
27677 unsigned int file_num;
27678 dw_line_info_table *table;
27679 static var_loc_view lvugid;
27680
27681 if (debug_info_level < DINFO_LEVEL_TERSE)
27682 return;
27683
27684 table = cur_line_info_table;
27685
27686 if (line == 0)
27687 {
27688 if (debug_variable_location_views
27689 && output_asm_line_debug_info ()
27690 && table && !RESETTING_VIEW_P (table->view))
27691 {
27692 /* If we're using the assembler to compute view numbers, we
27693 can't issue a .loc directive for line zero, so we can't
27694 get a view number at this point. We might attempt to
27695 compute it from the previous view, or equate it to a
27696 subsequent view (though it might not be there!), but
27697 since we're omitting the line number entry, we might as
27698 well omit the view number as well. That means pretending
27699 it's a view number zero, which might very well turn out
27700 to be correct. ??? Extend the assembler so that the
27701 compiler could emit e.g. ".locview .LVU#", to output a
27702 view without changing line number information. We'd then
27703 have to count it in symviews_since_reset; when it's omitted,
27704 it doesn't count. */
27705 if (!zero_view_p)
27706 zero_view_p = BITMAP_GGC_ALLOC ();
27707 bitmap_set_bit (zero_view_p, table->view);
27708 if (flag_debug_asm)
27709 {
27710 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27711 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27712 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27713 ASM_COMMENT_START);
27714 assemble_name (asm_out_file, label);
27715 putc ('\n', asm_out_file);
27716 }
27717 table->view = ++lvugid;
27718 }
27719 return;
27720 }
27721
27722 /* The discriminator column was added in dwarf4. Simplify the below
27723 by simply removing it if we're not supposed to output it. */
27724 if (dwarf_version < 4 && dwarf_strict)
27725 discriminator = 0;
27726
27727 if (!debug_column_info)
27728 column = 0;
27729
27730 file_num = maybe_emit_file (lookup_filename (filename));
27731
27732 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27733 the debugger has used the second (possibly duplicate) line number
27734 at the beginning of the function to mark the end of the prologue.
27735 We could eliminate any other duplicates within the function. For
27736 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27737 that second line number entry. */
27738 /* Recall that this end-of-prologue indication is *not* the same thing
27739 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27740 to which the hook corresponds, follows the last insn that was
27741 emitted by gen_prologue. What we need is to precede the first insn
27742 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27743 insn that corresponds to something the user wrote. These may be
27744 very different locations once scheduling is enabled. */
27745
27746 if (0 && file_num == table->file_num
27747 && line == table->line_num
27748 && column == table->column_num
27749 && discriminator == table->discrim_num
27750 && is_stmt == table->is_stmt)
27751 return;
27752
27753 switch_to_section (current_function_section ());
27754
27755 /* If requested, emit something human-readable. */
27756 if (flag_debug_asm)
27757 {
27758 if (debug_column_info)
27759 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27760 filename, line, column);
27761 else
27762 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27763 filename, line);
27764 }
27765
27766 if (output_asm_line_debug_info ())
27767 {
27768 /* Emit the .loc directive understood by GNU as. */
27769 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27770 file_num, line, is_stmt, discriminator */
27771 fputs ("\t.loc ", asm_out_file);
27772 fprint_ul (asm_out_file, file_num);
27773 putc (' ', asm_out_file);
27774 fprint_ul (asm_out_file, line);
27775 putc (' ', asm_out_file);
27776 fprint_ul (asm_out_file, column);
27777
27778 if (is_stmt != table->is_stmt)
27779 {
27780 fputs (" is_stmt ", asm_out_file);
27781 putc (is_stmt ? '1' : '0', asm_out_file);
27782 }
27783 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27784 {
27785 gcc_assert (discriminator > 0);
27786 fputs (" discriminator ", asm_out_file);
27787 fprint_ul (asm_out_file, (unsigned long) discriminator);
27788 }
27789 if (debug_variable_location_views)
27790 {
27791 if (!RESETTING_VIEW_P (table->view))
27792 {
27793 table->symviews_since_reset++;
27794 if (table->symviews_since_reset > symview_upper_bound)
27795 symview_upper_bound = table->symviews_since_reset;
27796 /* When we're using the assembler to compute view
27797 numbers, we output symbolic labels after "view" in
27798 .loc directives, and the assembler will set them for
27799 us, so that we can refer to the view numbers in
27800 location lists. The only exceptions are when we know
27801 a view will be zero: "-0" is a forced reset, used
27802 e.g. in the beginning of functions, whereas "0" tells
27803 the assembler to check that there was a PC change
27804 since the previous view, in a way that implicitly
27805 resets the next view. */
27806 fputs (" view ", asm_out_file);
27807 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27808 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27809 assemble_name (asm_out_file, label);
27810 table->view = ++lvugid;
27811 }
27812 else
27813 {
27814 table->symviews_since_reset = 0;
27815 if (FORCE_RESETTING_VIEW_P (table->view))
27816 fputs (" view -0", asm_out_file);
27817 else
27818 fputs (" view 0", asm_out_file);
27819 /* Mark the present view as a zero view. Earlier debug
27820 binds may have already added its id to loclists to be
27821 emitted later, so we can't reuse the id for something
27822 else. However, it's good to know whether a view is
27823 known to be zero, because then we may be able to
27824 optimize out locviews that are all zeros, so take
27825 note of it in zero_view_p. */
27826 if (!zero_view_p)
27827 zero_view_p = BITMAP_GGC_ALLOC ();
27828 bitmap_set_bit (zero_view_p, lvugid);
27829 table->view = ++lvugid;
27830 }
27831 }
27832 putc ('\n', asm_out_file);
27833 }
27834 else
27835 {
27836 unsigned int label_num = ++line_info_label_num;
27837
27838 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27839
27840 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27841 push_dw_line_info_entry (table, LI_adv_address, label_num);
27842 else
27843 push_dw_line_info_entry (table, LI_set_address, label_num);
27844 if (debug_variable_location_views)
27845 {
27846 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27847 if (resetting)
27848 table->view = 0;
27849
27850 if (flag_debug_asm)
27851 fprintf (asm_out_file, "\t%s view %s%d\n",
27852 ASM_COMMENT_START,
27853 resetting ? "-" : "",
27854 table->view);
27855
27856 table->view++;
27857 }
27858 if (file_num != table->file_num)
27859 push_dw_line_info_entry (table, LI_set_file, file_num);
27860 if (discriminator != table->discrim_num)
27861 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27862 if (is_stmt != table->is_stmt)
27863 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27864 push_dw_line_info_entry (table, LI_set_line, line);
27865 if (debug_column_info)
27866 push_dw_line_info_entry (table, LI_set_column, column);
27867 }
27868
27869 table->file_num = file_num;
27870 table->line_num = line;
27871 table->column_num = column;
27872 table->discrim_num = discriminator;
27873 table->is_stmt = is_stmt;
27874 table->in_use = true;
27875 }
27876
27877 /* Record the beginning of a new source file. */
27878
27879 static void
27880 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27881 {
27882 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27883 {
27884 macinfo_entry e;
27885 e.code = DW_MACINFO_start_file;
27886 e.lineno = lineno;
27887 e.info = ggc_strdup (filename);
27888 vec_safe_push (macinfo_table, e);
27889 }
27890 }
27891
27892 /* Record the end of a source file. */
27893
27894 static void
27895 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27896 {
27897 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27898 {
27899 macinfo_entry e;
27900 e.code = DW_MACINFO_end_file;
27901 e.lineno = lineno;
27902 e.info = NULL;
27903 vec_safe_push (macinfo_table, e);
27904 }
27905 }
27906
27907 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27908 the tail part of the directive line, i.e. the part which is past the
27909 initial whitespace, #, whitespace, directive-name, whitespace part. */
27910
27911 static void
27912 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27913 const char *buffer ATTRIBUTE_UNUSED)
27914 {
27915 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27916 {
27917 macinfo_entry e;
27918 /* Insert a dummy first entry to be able to optimize the whole
27919 predefined macro block using DW_MACRO_import. */
27920 if (macinfo_table->is_empty () && lineno <= 1)
27921 {
27922 e.code = 0;
27923 e.lineno = 0;
27924 e.info = NULL;
27925 vec_safe_push (macinfo_table, e);
27926 }
27927 e.code = DW_MACINFO_define;
27928 e.lineno = lineno;
27929 e.info = ggc_strdup (buffer);
27930 vec_safe_push (macinfo_table, e);
27931 }
27932 }
27933
27934 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27935 the tail part of the directive line, i.e. the part which is past the
27936 initial whitespace, #, whitespace, directive-name, whitespace part. */
27937
27938 static void
27939 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27940 const char *buffer ATTRIBUTE_UNUSED)
27941 {
27942 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27943 {
27944 macinfo_entry e;
27945 /* Insert a dummy first entry to be able to optimize the whole
27946 predefined macro block using DW_MACRO_import. */
27947 if (macinfo_table->is_empty () && lineno <= 1)
27948 {
27949 e.code = 0;
27950 e.lineno = 0;
27951 e.info = NULL;
27952 vec_safe_push (macinfo_table, e);
27953 }
27954 e.code = DW_MACINFO_undef;
27955 e.lineno = lineno;
27956 e.info = ggc_strdup (buffer);
27957 vec_safe_push (macinfo_table, e);
27958 }
27959 }
27960
27961 /* Helpers to manipulate hash table of CUs. */
27962
27963 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27964 {
27965 static inline hashval_t hash (const macinfo_entry *);
27966 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27967 };
27968
27969 inline hashval_t
27970 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27971 {
27972 return htab_hash_string (entry->info);
27973 }
27974
27975 inline bool
27976 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27977 const macinfo_entry *entry2)
27978 {
27979 return !strcmp (entry1->info, entry2->info);
27980 }
27981
27982 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27983
27984 /* Output a single .debug_macinfo entry. */
27985
27986 static void
27987 output_macinfo_op (macinfo_entry *ref)
27988 {
27989 int file_num;
27990 size_t len;
27991 struct indirect_string_node *node;
27992 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27993 struct dwarf_file_data *fd;
27994
27995 switch (ref->code)
27996 {
27997 case DW_MACINFO_start_file:
27998 fd = lookup_filename (ref->info);
27999 file_num = maybe_emit_file (fd);
28000 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28001 dw2_asm_output_data_uleb128 (ref->lineno,
28002 "Included from line number %lu",
28003 (unsigned long) ref->lineno);
28004 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28005 break;
28006 case DW_MACINFO_end_file:
28007 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28008 break;
28009 case DW_MACINFO_define:
28010 case DW_MACINFO_undef:
28011 len = strlen (ref->info) + 1;
28012 if (!dwarf_strict
28013 && len > DWARF_OFFSET_SIZE
28014 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28015 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28016 {
28017 ref->code = ref->code == DW_MACINFO_define
28018 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28019 output_macinfo_op (ref);
28020 return;
28021 }
28022 dw2_asm_output_data (1, ref->code,
28023 ref->code == DW_MACINFO_define
28024 ? "Define macro" : "Undefine macro");
28025 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28026 (unsigned long) ref->lineno);
28027 dw2_asm_output_nstring (ref->info, -1, "The macro");
28028 break;
28029 case DW_MACRO_define_strp:
28030 case DW_MACRO_undef_strp:
28031 node = find_AT_string (ref->info);
28032 gcc_assert (node
28033 && (node->form == DW_FORM_strp
28034 || node->form == DW_FORM_GNU_str_index));
28035 dw2_asm_output_data (1, ref->code,
28036 ref->code == DW_MACRO_define_strp
28037 ? "Define macro strp"
28038 : "Undefine macro strp");
28039 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28040 (unsigned long) ref->lineno);
28041 if (node->form == DW_FORM_strp)
28042 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28043 debug_str_section, "The macro: \"%s\"",
28044 ref->info);
28045 else
28046 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28047 ref->info);
28048 break;
28049 case DW_MACRO_import:
28050 dw2_asm_output_data (1, ref->code, "Import");
28051 ASM_GENERATE_INTERNAL_LABEL (label,
28052 DEBUG_MACRO_SECTION_LABEL,
28053 ref->lineno + macinfo_label_base);
28054 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28055 break;
28056 default:
28057 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28058 ASM_COMMENT_START, (unsigned long) ref->code);
28059 break;
28060 }
28061 }
28062
28063 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28064 other compilation unit .debug_macinfo sections. IDX is the first
28065 index of a define/undef, return the number of ops that should be
28066 emitted in a comdat .debug_macinfo section and emit
28067 a DW_MACRO_import entry referencing it.
28068 If the define/undef entry should be emitted normally, return 0. */
28069
28070 static unsigned
28071 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28072 macinfo_hash_type **macinfo_htab)
28073 {
28074 macinfo_entry *first, *second, *cur, *inc;
28075 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28076 unsigned char checksum[16];
28077 struct md5_ctx ctx;
28078 char *grp_name, *tail;
28079 const char *base;
28080 unsigned int i, count, encoded_filename_len, linebuf_len;
28081 macinfo_entry **slot;
28082
28083 first = &(*macinfo_table)[idx];
28084 second = &(*macinfo_table)[idx + 1];
28085
28086 /* Optimize only if there are at least two consecutive define/undef ops,
28087 and either all of them are before first DW_MACINFO_start_file
28088 with lineno {0,1} (i.e. predefined macro block), or all of them are
28089 in some included header file. */
28090 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28091 return 0;
28092 if (vec_safe_is_empty (files))
28093 {
28094 if (first->lineno > 1 || second->lineno > 1)
28095 return 0;
28096 }
28097 else if (first->lineno == 0)
28098 return 0;
28099
28100 /* Find the last define/undef entry that can be grouped together
28101 with first and at the same time compute md5 checksum of their
28102 codes, linenumbers and strings. */
28103 md5_init_ctx (&ctx);
28104 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28105 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28106 break;
28107 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28108 break;
28109 else
28110 {
28111 unsigned char code = cur->code;
28112 md5_process_bytes (&code, 1, &ctx);
28113 checksum_uleb128 (cur->lineno, &ctx);
28114 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28115 }
28116 md5_finish_ctx (&ctx, checksum);
28117 count = i - idx;
28118
28119 /* From the containing include filename (if any) pick up just
28120 usable characters from its basename. */
28121 if (vec_safe_is_empty (files))
28122 base = "";
28123 else
28124 base = lbasename (files->last ().info);
28125 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28126 if (ISIDNUM (base[i]) || base[i] == '.')
28127 encoded_filename_len++;
28128 /* Count . at the end. */
28129 if (encoded_filename_len)
28130 encoded_filename_len++;
28131
28132 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28133 linebuf_len = strlen (linebuf);
28134
28135 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28136 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28137 + 16 * 2 + 1);
28138 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28139 tail = grp_name + 4;
28140 if (encoded_filename_len)
28141 {
28142 for (i = 0; base[i]; i++)
28143 if (ISIDNUM (base[i]) || base[i] == '.')
28144 *tail++ = base[i];
28145 *tail++ = '.';
28146 }
28147 memcpy (tail, linebuf, linebuf_len);
28148 tail += linebuf_len;
28149 *tail++ = '.';
28150 for (i = 0; i < 16; i++)
28151 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28152
28153 /* Construct a macinfo_entry for DW_MACRO_import
28154 in the empty vector entry before the first define/undef. */
28155 inc = &(*macinfo_table)[idx - 1];
28156 inc->code = DW_MACRO_import;
28157 inc->lineno = 0;
28158 inc->info = ggc_strdup (grp_name);
28159 if (!*macinfo_htab)
28160 *macinfo_htab = new macinfo_hash_type (10);
28161 /* Avoid emitting duplicates. */
28162 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28163 if (*slot != NULL)
28164 {
28165 inc->code = 0;
28166 inc->info = NULL;
28167 /* If such an entry has been used before, just emit
28168 a DW_MACRO_import op. */
28169 inc = *slot;
28170 output_macinfo_op (inc);
28171 /* And clear all macinfo_entry in the range to avoid emitting them
28172 in the second pass. */
28173 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28174 {
28175 cur->code = 0;
28176 cur->info = NULL;
28177 }
28178 }
28179 else
28180 {
28181 *slot = inc;
28182 inc->lineno = (*macinfo_htab)->elements ();
28183 output_macinfo_op (inc);
28184 }
28185 return count;
28186 }
28187
28188 /* Save any strings needed by the macinfo table in the debug str
28189 table. All strings must be collected into the table by the time
28190 index_string is called. */
28191
28192 static void
28193 save_macinfo_strings (void)
28194 {
28195 unsigned len;
28196 unsigned i;
28197 macinfo_entry *ref;
28198
28199 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28200 {
28201 switch (ref->code)
28202 {
28203 /* Match the logic in output_macinfo_op to decide on
28204 indirect strings. */
28205 case DW_MACINFO_define:
28206 case DW_MACINFO_undef:
28207 len = strlen (ref->info) + 1;
28208 if (!dwarf_strict
28209 && len > DWARF_OFFSET_SIZE
28210 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28211 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28212 set_indirect_string (find_AT_string (ref->info));
28213 break;
28214 case DW_MACRO_define_strp:
28215 case DW_MACRO_undef_strp:
28216 set_indirect_string (find_AT_string (ref->info));
28217 break;
28218 default:
28219 break;
28220 }
28221 }
28222 }
28223
28224 /* Output macinfo section(s). */
28225
28226 static void
28227 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28228 {
28229 unsigned i;
28230 unsigned long length = vec_safe_length (macinfo_table);
28231 macinfo_entry *ref;
28232 vec<macinfo_entry, va_gc> *files = NULL;
28233 macinfo_hash_type *macinfo_htab = NULL;
28234 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28235
28236 if (! length)
28237 return;
28238
28239 /* output_macinfo* uses these interchangeably. */
28240 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28241 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28242 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28243 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28244
28245 /* AIX Assembler inserts the length, so adjust the reference to match the
28246 offset expected by debuggers. */
28247 strcpy (dl_section_ref, debug_line_label);
28248 if (XCOFF_DEBUGGING_INFO)
28249 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28250
28251 /* For .debug_macro emit the section header. */
28252 if (!dwarf_strict || dwarf_version >= 5)
28253 {
28254 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28255 "DWARF macro version number");
28256 if (DWARF_OFFSET_SIZE == 8)
28257 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28258 else
28259 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28260 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28261 debug_line_section, NULL);
28262 }
28263
28264 /* In the first loop, it emits the primary .debug_macinfo section
28265 and after each emitted op the macinfo_entry is cleared.
28266 If a longer range of define/undef ops can be optimized using
28267 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28268 the vector before the first define/undef in the range and the
28269 whole range of define/undef ops is not emitted and kept. */
28270 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28271 {
28272 switch (ref->code)
28273 {
28274 case DW_MACINFO_start_file:
28275 vec_safe_push (files, *ref);
28276 break;
28277 case DW_MACINFO_end_file:
28278 if (!vec_safe_is_empty (files))
28279 files->pop ();
28280 break;
28281 case DW_MACINFO_define:
28282 case DW_MACINFO_undef:
28283 if ((!dwarf_strict || dwarf_version >= 5)
28284 && HAVE_COMDAT_GROUP
28285 && vec_safe_length (files) != 1
28286 && i > 0
28287 && i + 1 < length
28288 && (*macinfo_table)[i - 1].code == 0)
28289 {
28290 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28291 if (count)
28292 {
28293 i += count - 1;
28294 continue;
28295 }
28296 }
28297 break;
28298 case 0:
28299 /* A dummy entry may be inserted at the beginning to be able
28300 to optimize the whole block of predefined macros. */
28301 if (i == 0)
28302 continue;
28303 default:
28304 break;
28305 }
28306 output_macinfo_op (ref);
28307 ref->info = NULL;
28308 ref->code = 0;
28309 }
28310
28311 if (!macinfo_htab)
28312 return;
28313
28314 /* Save the number of transparent includes so we can adjust the
28315 label number for the fat LTO object DWARF. */
28316 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28317
28318 delete macinfo_htab;
28319 macinfo_htab = NULL;
28320
28321 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28322 terminate the current chain and switch to a new comdat .debug_macinfo
28323 section and emit the define/undef entries within it. */
28324 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28325 switch (ref->code)
28326 {
28327 case 0:
28328 continue;
28329 case DW_MACRO_import:
28330 {
28331 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28332 tree comdat_key = get_identifier (ref->info);
28333 /* Terminate the previous .debug_macinfo section. */
28334 dw2_asm_output_data (1, 0, "End compilation unit");
28335 targetm.asm_out.named_section (debug_macinfo_section_name,
28336 SECTION_DEBUG
28337 | SECTION_LINKONCE
28338 | (early_lto_debug
28339 ? SECTION_EXCLUDE : 0),
28340 comdat_key);
28341 ASM_GENERATE_INTERNAL_LABEL (label,
28342 DEBUG_MACRO_SECTION_LABEL,
28343 ref->lineno + macinfo_label_base);
28344 ASM_OUTPUT_LABEL (asm_out_file, label);
28345 ref->code = 0;
28346 ref->info = NULL;
28347 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28348 "DWARF macro version number");
28349 if (DWARF_OFFSET_SIZE == 8)
28350 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28351 else
28352 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28353 }
28354 break;
28355 case DW_MACINFO_define:
28356 case DW_MACINFO_undef:
28357 output_macinfo_op (ref);
28358 ref->code = 0;
28359 ref->info = NULL;
28360 break;
28361 default:
28362 gcc_unreachable ();
28363 }
28364
28365 macinfo_label_base += macinfo_label_base_adj;
28366 }
28367
28368 /* Initialize the various sections and labels for dwarf output and prefix
28369 them with PREFIX if non-NULL. Returns the generation (zero based
28370 number of times function was called). */
28371
28372 static unsigned
28373 init_sections_and_labels (bool early_lto_debug)
28374 {
28375 /* As we may get called multiple times have a generation count for
28376 labels. */
28377 static unsigned generation = 0;
28378
28379 if (early_lto_debug)
28380 {
28381 if (!dwarf_split_debug_info)
28382 {
28383 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28384 SECTION_DEBUG | SECTION_EXCLUDE,
28385 NULL);
28386 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28387 SECTION_DEBUG | SECTION_EXCLUDE,
28388 NULL);
28389 debug_macinfo_section_name
28390 = ((dwarf_strict && dwarf_version < 5)
28391 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28392 debug_macinfo_section = get_section (debug_macinfo_section_name,
28393 SECTION_DEBUG
28394 | SECTION_EXCLUDE, NULL);
28395 /* For macro info we have to refer to a debug_line section, so
28396 similar to split-dwarf emit a skeleton one for early debug. */
28397 debug_skeleton_line_section
28398 = get_section (DEBUG_LTO_LINE_SECTION,
28399 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28400 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28401 DEBUG_SKELETON_LINE_SECTION_LABEL,
28402 generation);
28403 }
28404 else
28405 {
28406 /* ??? Which of the following do we need early? */
28407 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28408 SECTION_DEBUG | SECTION_EXCLUDE,
28409 NULL);
28410 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28411 SECTION_DEBUG | SECTION_EXCLUDE,
28412 NULL);
28413 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28414 SECTION_DEBUG
28415 | SECTION_EXCLUDE, NULL);
28416 debug_skeleton_abbrev_section
28417 = get_section (DEBUG_LTO_ABBREV_SECTION,
28418 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28419 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28420 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28421 generation);
28422
28423 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28424 stay in the main .o, but the skeleton_line goes into the split
28425 off dwo. */
28426 debug_skeleton_line_section
28427 = get_section (DEBUG_LTO_LINE_SECTION,
28428 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28429 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28430 DEBUG_SKELETON_LINE_SECTION_LABEL,
28431 generation);
28432 debug_str_offsets_section
28433 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28434 SECTION_DEBUG | SECTION_EXCLUDE,
28435 NULL);
28436 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28437 DEBUG_SKELETON_INFO_SECTION_LABEL,
28438 generation);
28439 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28440 DEBUG_STR_DWO_SECTION_FLAGS,
28441 NULL);
28442 debug_macinfo_section_name
28443 = ((dwarf_strict && dwarf_version < 5)
28444 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28445 debug_macinfo_section = get_section (debug_macinfo_section_name,
28446 SECTION_DEBUG | SECTION_EXCLUDE,
28447 NULL);
28448 }
28449 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28450 DEBUG_STR_SECTION_FLAGS
28451 | SECTION_EXCLUDE, NULL);
28452 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28453 debug_line_str_section
28454 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28455 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28456 }
28457 else
28458 {
28459 if (!dwarf_split_debug_info)
28460 {
28461 debug_info_section = get_section (DEBUG_INFO_SECTION,
28462 SECTION_DEBUG, NULL);
28463 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28464 SECTION_DEBUG, NULL);
28465 debug_loc_section = get_section (dwarf_version >= 5
28466 ? DEBUG_LOCLISTS_SECTION
28467 : DEBUG_LOC_SECTION,
28468 SECTION_DEBUG, NULL);
28469 debug_macinfo_section_name
28470 = ((dwarf_strict && dwarf_version < 5)
28471 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28472 debug_macinfo_section = get_section (debug_macinfo_section_name,
28473 SECTION_DEBUG, NULL);
28474 }
28475 else
28476 {
28477 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28478 SECTION_DEBUG | SECTION_EXCLUDE,
28479 NULL);
28480 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28481 SECTION_DEBUG | SECTION_EXCLUDE,
28482 NULL);
28483 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28484 SECTION_DEBUG, NULL);
28485 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28486 SECTION_DEBUG, NULL);
28487 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28488 SECTION_DEBUG, NULL);
28489 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28490 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28491 generation);
28492
28493 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28494 stay in the main .o, but the skeleton_line goes into the
28495 split off dwo. */
28496 debug_skeleton_line_section
28497 = get_section (DEBUG_DWO_LINE_SECTION,
28498 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28499 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28500 DEBUG_SKELETON_LINE_SECTION_LABEL,
28501 generation);
28502 debug_str_offsets_section
28503 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28504 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28505 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28506 DEBUG_SKELETON_INFO_SECTION_LABEL,
28507 generation);
28508 debug_loc_section = get_section (dwarf_version >= 5
28509 ? DEBUG_DWO_LOCLISTS_SECTION
28510 : DEBUG_DWO_LOC_SECTION,
28511 SECTION_DEBUG | SECTION_EXCLUDE,
28512 NULL);
28513 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28514 DEBUG_STR_DWO_SECTION_FLAGS,
28515 NULL);
28516 debug_macinfo_section_name
28517 = ((dwarf_strict && dwarf_version < 5)
28518 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28519 debug_macinfo_section = get_section (debug_macinfo_section_name,
28520 SECTION_DEBUG | SECTION_EXCLUDE,
28521 NULL);
28522 }
28523 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28524 SECTION_DEBUG, NULL);
28525 debug_line_section = get_section (DEBUG_LINE_SECTION,
28526 SECTION_DEBUG, NULL);
28527 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28528 SECTION_DEBUG, NULL);
28529 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28530 SECTION_DEBUG, NULL);
28531 debug_str_section = get_section (DEBUG_STR_SECTION,
28532 DEBUG_STR_SECTION_FLAGS, NULL);
28533 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28534 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28535 DEBUG_STR_SECTION_FLAGS, NULL);
28536
28537 debug_ranges_section = get_section (dwarf_version >= 5
28538 ? DEBUG_RNGLISTS_SECTION
28539 : DEBUG_RANGES_SECTION,
28540 SECTION_DEBUG, NULL);
28541 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28542 SECTION_DEBUG, NULL);
28543 }
28544
28545 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28546 DEBUG_ABBREV_SECTION_LABEL, generation);
28547 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28548 DEBUG_INFO_SECTION_LABEL, generation);
28549 info_section_emitted = false;
28550 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28551 DEBUG_LINE_SECTION_LABEL, generation);
28552 /* There are up to 4 unique ranges labels per generation.
28553 See also output_rnglists. */
28554 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28555 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28556 if (dwarf_version >= 5 && dwarf_split_debug_info)
28557 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28558 DEBUG_RANGES_SECTION_LABEL,
28559 1 + generation * 4);
28560 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28561 DEBUG_ADDR_SECTION_LABEL, generation);
28562 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28563 (dwarf_strict && dwarf_version < 5)
28564 ? DEBUG_MACINFO_SECTION_LABEL
28565 : DEBUG_MACRO_SECTION_LABEL, generation);
28566 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28567 generation);
28568
28569 ++generation;
28570 return generation - 1;
28571 }
28572
28573 /* Set up for Dwarf output at the start of compilation. */
28574
28575 static void
28576 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28577 {
28578 /* Allocate the file_table. */
28579 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28580
28581 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28582 /* Allocate the decl_die_table. */
28583 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28584
28585 /* Allocate the decl_loc_table. */
28586 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28587
28588 /* Allocate the cached_dw_loc_list_table. */
28589 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28590
28591 /* Allocate the initial hunk of the decl_scope_table. */
28592 vec_alloc (decl_scope_table, 256);
28593
28594 /* Allocate the initial hunk of the abbrev_die_table. */
28595 vec_alloc (abbrev_die_table, 256);
28596 /* Zero-th entry is allocated, but unused. */
28597 abbrev_die_table->quick_push (NULL);
28598
28599 /* Allocate the dwarf_proc_stack_usage_map. */
28600 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28601
28602 /* Allocate the pubtypes and pubnames vectors. */
28603 vec_alloc (pubname_table, 32);
28604 vec_alloc (pubtype_table, 32);
28605
28606 vec_alloc (incomplete_types, 64);
28607
28608 vec_alloc (used_rtx_array, 32);
28609
28610 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28611 vec_alloc (macinfo_table, 64);
28612 #endif
28613
28614 /* If front-ends already registered a main translation unit but we were not
28615 ready to perform the association, do this now. */
28616 if (main_translation_unit != NULL_TREE)
28617 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28618 }
28619
28620 /* Called before compile () starts outputtting functions, variables
28621 and toplevel asms into assembly. */
28622
28623 static void
28624 dwarf2out_assembly_start (void)
28625 {
28626 if (text_section_line_info)
28627 return;
28628
28629 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28630 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28631 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28632 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28633 COLD_TEXT_SECTION_LABEL, 0);
28634 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28635
28636 switch_to_section (text_section);
28637 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28638 #endif
28639
28640 /* Make sure the line number table for .text always exists. */
28641 text_section_line_info = new_line_info_table ();
28642 text_section_line_info->end_label = text_end_label;
28643
28644 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28645 cur_line_info_table = text_section_line_info;
28646 #endif
28647
28648 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28649 && dwarf2out_do_cfi_asm ()
28650 && !dwarf2out_do_eh_frame ())
28651 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28652 }
28653
28654 /* A helper function for dwarf2out_finish called through
28655 htab_traverse. Assign a string its index. All strings must be
28656 collected into the table by the time index_string is called,
28657 because the indexing code relies on htab_traverse to traverse nodes
28658 in the same order for each run. */
28659
28660 int
28661 index_string (indirect_string_node **h, unsigned int *index)
28662 {
28663 indirect_string_node *node = *h;
28664
28665 find_string_form (node);
28666 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28667 {
28668 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28669 node->index = *index;
28670 *index += 1;
28671 }
28672 return 1;
28673 }
28674
28675 /* A helper function for output_indirect_strings called through
28676 htab_traverse. Output the offset to a string and update the
28677 current offset. */
28678
28679 int
28680 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28681 {
28682 indirect_string_node *node = *h;
28683
28684 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28685 {
28686 /* Assert that this node has been assigned an index. */
28687 gcc_assert (node->index != NO_INDEX_ASSIGNED
28688 && node->index != NOT_INDEXED);
28689 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28690 "indexed string 0x%x: %s", node->index, node->str);
28691 *offset += strlen (node->str) + 1;
28692 }
28693 return 1;
28694 }
28695
28696 /* A helper function for dwarf2out_finish called through
28697 htab_traverse. Output the indexed string. */
28698
28699 int
28700 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28701 {
28702 struct indirect_string_node *node = *h;
28703
28704 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28705 {
28706 /* Assert that the strings are output in the same order as their
28707 indexes were assigned. */
28708 gcc_assert (*cur_idx == node->index);
28709 assemble_string (node->str, strlen (node->str) + 1);
28710 *cur_idx += 1;
28711 }
28712 return 1;
28713 }
28714
28715 /* A helper function for dwarf2out_finish called through
28716 htab_traverse. Emit one queued .debug_str string. */
28717
28718 int
28719 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28720 {
28721 struct indirect_string_node *node = *h;
28722
28723 node->form = find_string_form (node);
28724 if (node->form == form && node->refcount > 0)
28725 {
28726 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28727 assemble_string (node->str, strlen (node->str) + 1);
28728 }
28729
28730 return 1;
28731 }
28732
28733 /* Output the indexed string table. */
28734
28735 static void
28736 output_indirect_strings (void)
28737 {
28738 switch_to_section (debug_str_section);
28739 if (!dwarf_split_debug_info)
28740 debug_str_hash->traverse<enum dwarf_form,
28741 output_indirect_string> (DW_FORM_strp);
28742 else
28743 {
28744 unsigned int offset = 0;
28745 unsigned int cur_idx = 0;
28746
28747 if (skeleton_debug_str_hash)
28748 skeleton_debug_str_hash->traverse<enum dwarf_form,
28749 output_indirect_string> (DW_FORM_strp);
28750
28751 switch_to_section (debug_str_offsets_section);
28752 debug_str_hash->traverse_noresize
28753 <unsigned int *, output_index_string_offset> (&offset);
28754 switch_to_section (debug_str_dwo_section);
28755 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28756 (&cur_idx);
28757 }
28758 }
28759
28760 /* Callback for htab_traverse to assign an index to an entry in the
28761 table, and to write that entry to the .debug_addr section. */
28762
28763 int
28764 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28765 {
28766 addr_table_entry *entry = *slot;
28767
28768 if (entry->refcount == 0)
28769 {
28770 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28771 || entry->index == NOT_INDEXED);
28772 return 1;
28773 }
28774
28775 gcc_assert (entry->index == *cur_index);
28776 (*cur_index)++;
28777
28778 switch (entry->kind)
28779 {
28780 case ate_kind_rtx:
28781 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28782 "0x%x", entry->index);
28783 break;
28784 case ate_kind_rtx_dtprel:
28785 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28786 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28787 DWARF2_ADDR_SIZE,
28788 entry->addr.rtl);
28789 fputc ('\n', asm_out_file);
28790 break;
28791 case ate_kind_label:
28792 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28793 "0x%x", entry->index);
28794 break;
28795 default:
28796 gcc_unreachable ();
28797 }
28798 return 1;
28799 }
28800
28801 /* Produce the .debug_addr section. */
28802
28803 static void
28804 output_addr_table (void)
28805 {
28806 unsigned int index = 0;
28807 if (addr_index_table == NULL || addr_index_table->size () == 0)
28808 return;
28809
28810 switch_to_section (debug_addr_section);
28811 addr_index_table
28812 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28813 }
28814
28815 #if ENABLE_ASSERT_CHECKING
28816 /* Verify that all marks are clear. */
28817
28818 static void
28819 verify_marks_clear (dw_die_ref die)
28820 {
28821 dw_die_ref c;
28822
28823 gcc_assert (! die->die_mark);
28824 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28825 }
28826 #endif /* ENABLE_ASSERT_CHECKING */
28827
28828 /* Clear the marks for a die and its children.
28829 Be cool if the mark isn't set. */
28830
28831 static void
28832 prune_unmark_dies (dw_die_ref die)
28833 {
28834 dw_die_ref c;
28835
28836 if (die->die_mark)
28837 die->die_mark = 0;
28838 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28839 }
28840
28841 /* Given LOC that is referenced by a DIE we're marking as used, find all
28842 referenced DWARF procedures it references and mark them as used. */
28843
28844 static void
28845 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28846 {
28847 for (; loc != NULL; loc = loc->dw_loc_next)
28848 switch (loc->dw_loc_opc)
28849 {
28850 case DW_OP_implicit_pointer:
28851 case DW_OP_convert:
28852 case DW_OP_reinterpret:
28853 case DW_OP_GNU_implicit_pointer:
28854 case DW_OP_GNU_convert:
28855 case DW_OP_GNU_reinterpret:
28856 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28857 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28858 break;
28859 case DW_OP_GNU_variable_value:
28860 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28861 {
28862 dw_die_ref ref
28863 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28864 if (ref == NULL)
28865 break;
28866 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28867 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28868 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28869 }
28870 /* FALLTHRU */
28871 case DW_OP_call2:
28872 case DW_OP_call4:
28873 case DW_OP_call_ref:
28874 case DW_OP_const_type:
28875 case DW_OP_GNU_const_type:
28876 case DW_OP_GNU_parameter_ref:
28877 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28878 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28879 break;
28880 case DW_OP_regval_type:
28881 case DW_OP_deref_type:
28882 case DW_OP_GNU_regval_type:
28883 case DW_OP_GNU_deref_type:
28884 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28885 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28886 break;
28887 case DW_OP_entry_value:
28888 case DW_OP_GNU_entry_value:
28889 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28890 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28891 break;
28892 default:
28893 break;
28894 }
28895 }
28896
28897 /* Given DIE that we're marking as used, find any other dies
28898 it references as attributes and mark them as used. */
28899
28900 static void
28901 prune_unused_types_walk_attribs (dw_die_ref die)
28902 {
28903 dw_attr_node *a;
28904 unsigned ix;
28905
28906 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28907 {
28908 switch (AT_class (a))
28909 {
28910 /* Make sure DWARF procedures referenced by location descriptions will
28911 get emitted. */
28912 case dw_val_class_loc:
28913 prune_unused_types_walk_loc_descr (AT_loc (a));
28914 break;
28915 case dw_val_class_loc_list:
28916 for (dw_loc_list_ref list = AT_loc_list (a);
28917 list != NULL;
28918 list = list->dw_loc_next)
28919 prune_unused_types_walk_loc_descr (list->expr);
28920 break;
28921
28922 case dw_val_class_view_list:
28923 /* This points to a loc_list in another attribute, so it's
28924 already covered. */
28925 break;
28926
28927 case dw_val_class_die_ref:
28928 /* A reference to another DIE.
28929 Make sure that it will get emitted.
28930 If it was broken out into a comdat group, don't follow it. */
28931 if (! AT_ref (a)->comdat_type_p
28932 || a->dw_attr == DW_AT_specification)
28933 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
28934 break;
28935
28936 case dw_val_class_str:
28937 /* Set the string's refcount to 0 so that prune_unused_types_mark
28938 accounts properly for it. */
28939 a->dw_attr_val.v.val_str->refcount = 0;
28940 break;
28941
28942 default:
28943 break;
28944 }
28945 }
28946 }
28947
28948 /* Mark the generic parameters and arguments children DIEs of DIE. */
28949
28950 static void
28951 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
28952 {
28953 dw_die_ref c;
28954
28955 if (die == NULL || die->die_child == NULL)
28956 return;
28957 c = die->die_child;
28958 do
28959 {
28960 if (is_template_parameter (c))
28961 prune_unused_types_mark (c, 1);
28962 c = c->die_sib;
28963 } while (c && c != die->die_child);
28964 }
28965
28966 /* Mark DIE as being used. If DOKIDS is true, then walk down
28967 to DIE's children. */
28968
28969 static void
28970 prune_unused_types_mark (dw_die_ref die, int dokids)
28971 {
28972 dw_die_ref c;
28973
28974 if (die->die_mark == 0)
28975 {
28976 /* We haven't done this node yet. Mark it as used. */
28977 die->die_mark = 1;
28978 /* If this is the DIE of a generic type instantiation,
28979 mark the children DIEs that describe its generic parms and
28980 args. */
28981 prune_unused_types_mark_generic_parms_dies (die);
28982
28983 /* We also have to mark its parents as used.
28984 (But we don't want to mark our parent's kids due to this,
28985 unless it is a class.) */
28986 if (die->die_parent)
28987 prune_unused_types_mark (die->die_parent,
28988 class_scope_p (die->die_parent));
28989
28990 /* Mark any referenced nodes. */
28991 prune_unused_types_walk_attribs (die);
28992
28993 /* If this node is a specification,
28994 also mark the definition, if it exists. */
28995 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
28996 prune_unused_types_mark (die->die_definition, 1);
28997 }
28998
28999 if (dokids && die->die_mark != 2)
29000 {
29001 /* We need to walk the children, but haven't done so yet.
29002 Remember that we've walked the kids. */
29003 die->die_mark = 2;
29004
29005 /* If this is an array type, we need to make sure our
29006 kids get marked, even if they're types. If we're
29007 breaking out types into comdat sections, do this
29008 for all type definitions. */
29009 if (die->die_tag == DW_TAG_array_type
29010 || (use_debug_types
29011 && is_type_die (die) && ! is_declaration_die (die)))
29012 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29013 else
29014 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29015 }
29016 }
29017
29018 /* For local classes, look if any static member functions were emitted
29019 and if so, mark them. */
29020
29021 static void
29022 prune_unused_types_walk_local_classes (dw_die_ref die)
29023 {
29024 dw_die_ref c;
29025
29026 if (die->die_mark == 2)
29027 return;
29028
29029 switch (die->die_tag)
29030 {
29031 case DW_TAG_structure_type:
29032 case DW_TAG_union_type:
29033 case DW_TAG_class_type:
29034 break;
29035
29036 case DW_TAG_subprogram:
29037 if (!get_AT_flag (die, DW_AT_declaration)
29038 || die->die_definition != NULL)
29039 prune_unused_types_mark (die, 1);
29040 return;
29041
29042 default:
29043 return;
29044 }
29045
29046 /* Mark children. */
29047 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29048 }
29049
29050 /* Walk the tree DIE and mark types that we actually use. */
29051
29052 static void
29053 prune_unused_types_walk (dw_die_ref die)
29054 {
29055 dw_die_ref c;
29056
29057 /* Don't do anything if this node is already marked and
29058 children have been marked as well. */
29059 if (die->die_mark == 2)
29060 return;
29061
29062 switch (die->die_tag)
29063 {
29064 case DW_TAG_structure_type:
29065 case DW_TAG_union_type:
29066 case DW_TAG_class_type:
29067 if (die->die_perennial_p)
29068 break;
29069
29070 for (c = die->die_parent; c; c = c->die_parent)
29071 if (c->die_tag == DW_TAG_subprogram)
29072 break;
29073
29074 /* Finding used static member functions inside of classes
29075 is needed just for local classes, because for other classes
29076 static member function DIEs with DW_AT_specification
29077 are emitted outside of the DW_TAG_*_type. If we ever change
29078 it, we'd need to call this even for non-local classes. */
29079 if (c)
29080 prune_unused_types_walk_local_classes (die);
29081
29082 /* It's a type node --- don't mark it. */
29083 return;
29084
29085 case DW_TAG_const_type:
29086 case DW_TAG_packed_type:
29087 case DW_TAG_pointer_type:
29088 case DW_TAG_reference_type:
29089 case DW_TAG_rvalue_reference_type:
29090 case DW_TAG_volatile_type:
29091 case DW_TAG_typedef:
29092 case DW_TAG_array_type:
29093 case DW_TAG_interface_type:
29094 case DW_TAG_friend:
29095 case DW_TAG_enumeration_type:
29096 case DW_TAG_subroutine_type:
29097 case DW_TAG_string_type:
29098 case DW_TAG_set_type:
29099 case DW_TAG_subrange_type:
29100 case DW_TAG_ptr_to_member_type:
29101 case DW_TAG_file_type:
29102 /* Type nodes are useful only when other DIEs reference them --- don't
29103 mark them. */
29104 /* FALLTHROUGH */
29105
29106 case DW_TAG_dwarf_procedure:
29107 /* Likewise for DWARF procedures. */
29108
29109 if (die->die_perennial_p)
29110 break;
29111
29112 return;
29113
29114 default:
29115 /* Mark everything else. */
29116 break;
29117 }
29118
29119 if (die->die_mark == 0)
29120 {
29121 die->die_mark = 1;
29122
29123 /* Now, mark any dies referenced from here. */
29124 prune_unused_types_walk_attribs (die);
29125 }
29126
29127 die->die_mark = 2;
29128
29129 /* Mark children. */
29130 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29131 }
29132
29133 /* Increment the string counts on strings referred to from DIE's
29134 attributes. */
29135
29136 static void
29137 prune_unused_types_update_strings (dw_die_ref die)
29138 {
29139 dw_attr_node *a;
29140 unsigned ix;
29141
29142 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29143 if (AT_class (a) == dw_val_class_str)
29144 {
29145 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29146 s->refcount++;
29147 /* Avoid unnecessarily putting strings that are used less than
29148 twice in the hash table. */
29149 if (s->refcount
29150 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29151 {
29152 indirect_string_node **slot
29153 = debug_str_hash->find_slot_with_hash (s->str,
29154 htab_hash_string (s->str),
29155 INSERT);
29156 gcc_assert (*slot == NULL);
29157 *slot = s;
29158 }
29159 }
29160 }
29161
29162 /* Mark DIE and its children as removed. */
29163
29164 static void
29165 mark_removed (dw_die_ref die)
29166 {
29167 dw_die_ref c;
29168 die->removed = true;
29169 FOR_EACH_CHILD (die, c, mark_removed (c));
29170 }
29171
29172 /* Remove from the tree DIE any dies that aren't marked. */
29173
29174 static void
29175 prune_unused_types_prune (dw_die_ref die)
29176 {
29177 dw_die_ref c;
29178
29179 gcc_assert (die->die_mark);
29180 prune_unused_types_update_strings (die);
29181
29182 if (! die->die_child)
29183 return;
29184
29185 c = die->die_child;
29186 do {
29187 dw_die_ref prev = c, next;
29188 for (c = c->die_sib; ! c->die_mark; c = next)
29189 if (c == die->die_child)
29190 {
29191 /* No marked children between 'prev' and the end of the list. */
29192 if (prev == c)
29193 /* No marked children at all. */
29194 die->die_child = NULL;
29195 else
29196 {
29197 prev->die_sib = c->die_sib;
29198 die->die_child = prev;
29199 }
29200 c->die_sib = NULL;
29201 mark_removed (c);
29202 return;
29203 }
29204 else
29205 {
29206 next = c->die_sib;
29207 c->die_sib = NULL;
29208 mark_removed (c);
29209 }
29210
29211 if (c != prev->die_sib)
29212 prev->die_sib = c;
29213 prune_unused_types_prune (c);
29214 } while (c != die->die_child);
29215 }
29216
29217 /* Remove dies representing declarations that we never use. */
29218
29219 static void
29220 prune_unused_types (void)
29221 {
29222 unsigned int i;
29223 limbo_die_node *node;
29224 comdat_type_node *ctnode;
29225 pubname_entry *pub;
29226 dw_die_ref base_type;
29227
29228 #if ENABLE_ASSERT_CHECKING
29229 /* All the marks should already be clear. */
29230 verify_marks_clear (comp_unit_die ());
29231 for (node = limbo_die_list; node; node = node->next)
29232 verify_marks_clear (node->die);
29233 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29234 verify_marks_clear (ctnode->root_die);
29235 #endif /* ENABLE_ASSERT_CHECKING */
29236
29237 /* Mark types that are used in global variables. */
29238 premark_types_used_by_global_vars ();
29239
29240 /* Set the mark on nodes that are actually used. */
29241 prune_unused_types_walk (comp_unit_die ());
29242 for (node = limbo_die_list; node; node = node->next)
29243 prune_unused_types_walk (node->die);
29244 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29245 {
29246 prune_unused_types_walk (ctnode->root_die);
29247 prune_unused_types_mark (ctnode->type_die, 1);
29248 }
29249
29250 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29251 are unusual in that they are pubnames that are the children of pubtypes.
29252 They should only be marked via their parent DW_TAG_enumeration_type die,
29253 not as roots in themselves. */
29254 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29255 if (pub->die->die_tag != DW_TAG_enumerator)
29256 prune_unused_types_mark (pub->die, 1);
29257 for (i = 0; base_types.iterate (i, &base_type); i++)
29258 prune_unused_types_mark (base_type, 1);
29259
29260 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29261 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29262 callees). */
29263 cgraph_node *cnode;
29264 FOR_EACH_FUNCTION (cnode)
29265 if (cnode->referred_to_p (false))
29266 {
29267 dw_die_ref die = lookup_decl_die (cnode->decl);
29268 if (die == NULL || die->die_mark)
29269 continue;
29270 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29271 if (e->caller != cnode
29272 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29273 {
29274 prune_unused_types_mark (die, 1);
29275 break;
29276 }
29277 }
29278
29279 if (debug_str_hash)
29280 debug_str_hash->empty ();
29281 if (skeleton_debug_str_hash)
29282 skeleton_debug_str_hash->empty ();
29283 prune_unused_types_prune (comp_unit_die ());
29284 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29285 {
29286 node = *pnode;
29287 if (!node->die->die_mark)
29288 *pnode = node->next;
29289 else
29290 {
29291 prune_unused_types_prune (node->die);
29292 pnode = &node->next;
29293 }
29294 }
29295 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29296 prune_unused_types_prune (ctnode->root_die);
29297
29298 /* Leave the marks clear. */
29299 prune_unmark_dies (comp_unit_die ());
29300 for (node = limbo_die_list; node; node = node->next)
29301 prune_unmark_dies (node->die);
29302 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29303 prune_unmark_dies (ctnode->root_die);
29304 }
29305
29306 /* Helpers to manipulate hash table of comdat type units. */
29307
29308 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29309 {
29310 static inline hashval_t hash (const comdat_type_node *);
29311 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29312 };
29313
29314 inline hashval_t
29315 comdat_type_hasher::hash (const comdat_type_node *type_node)
29316 {
29317 hashval_t h;
29318 memcpy (&h, type_node->signature, sizeof (h));
29319 return h;
29320 }
29321
29322 inline bool
29323 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29324 const comdat_type_node *type_node_2)
29325 {
29326 return (! memcmp (type_node_1->signature, type_node_2->signature,
29327 DWARF_TYPE_SIGNATURE_SIZE));
29328 }
29329
29330 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29331 to the location it would have been added, should we know its
29332 DECL_ASSEMBLER_NAME when we added other attributes. This will
29333 probably improve compactness of debug info, removing equivalent
29334 abbrevs, and hide any differences caused by deferring the
29335 computation of the assembler name, triggered by e.g. PCH. */
29336
29337 static inline void
29338 move_linkage_attr (dw_die_ref die)
29339 {
29340 unsigned ix = vec_safe_length (die->die_attr);
29341 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29342
29343 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29344 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29345
29346 while (--ix > 0)
29347 {
29348 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29349
29350 if (prev->dw_attr == DW_AT_decl_line
29351 || prev->dw_attr == DW_AT_decl_column
29352 || prev->dw_attr == DW_AT_name)
29353 break;
29354 }
29355
29356 if (ix != vec_safe_length (die->die_attr) - 1)
29357 {
29358 die->die_attr->pop ();
29359 die->die_attr->quick_insert (ix, linkage);
29360 }
29361 }
29362
29363 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29364 referenced from typed stack ops and count how often they are used. */
29365
29366 static void
29367 mark_base_types (dw_loc_descr_ref loc)
29368 {
29369 dw_die_ref base_type = NULL;
29370
29371 for (; loc; loc = loc->dw_loc_next)
29372 {
29373 switch (loc->dw_loc_opc)
29374 {
29375 case DW_OP_regval_type:
29376 case DW_OP_deref_type:
29377 case DW_OP_GNU_regval_type:
29378 case DW_OP_GNU_deref_type:
29379 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29380 break;
29381 case DW_OP_convert:
29382 case DW_OP_reinterpret:
29383 case DW_OP_GNU_convert:
29384 case DW_OP_GNU_reinterpret:
29385 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29386 continue;
29387 /* FALLTHRU */
29388 case DW_OP_const_type:
29389 case DW_OP_GNU_const_type:
29390 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29391 break;
29392 case DW_OP_entry_value:
29393 case DW_OP_GNU_entry_value:
29394 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29395 continue;
29396 default:
29397 continue;
29398 }
29399 gcc_assert (base_type->die_parent == comp_unit_die ());
29400 if (base_type->die_mark)
29401 base_type->die_mark++;
29402 else
29403 {
29404 base_types.safe_push (base_type);
29405 base_type->die_mark = 1;
29406 }
29407 }
29408 }
29409
29410 /* Comparison function for sorting marked base types. */
29411
29412 static int
29413 base_type_cmp (const void *x, const void *y)
29414 {
29415 dw_die_ref dx = *(const dw_die_ref *) x;
29416 dw_die_ref dy = *(const dw_die_ref *) y;
29417 unsigned int byte_size1, byte_size2;
29418 unsigned int encoding1, encoding2;
29419 unsigned int align1, align2;
29420 if (dx->die_mark > dy->die_mark)
29421 return -1;
29422 if (dx->die_mark < dy->die_mark)
29423 return 1;
29424 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29425 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29426 if (byte_size1 < byte_size2)
29427 return 1;
29428 if (byte_size1 > byte_size2)
29429 return -1;
29430 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29431 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29432 if (encoding1 < encoding2)
29433 return 1;
29434 if (encoding1 > encoding2)
29435 return -1;
29436 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29437 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29438 if (align1 < align2)
29439 return 1;
29440 if (align1 > align2)
29441 return -1;
29442 return 0;
29443 }
29444
29445 /* Move base types marked by mark_base_types as early as possible
29446 in the CU, sorted by decreasing usage count both to make the
29447 uleb128 references as small as possible and to make sure they
29448 will have die_offset already computed by calc_die_sizes when
29449 sizes of typed stack loc ops is computed. */
29450
29451 static void
29452 move_marked_base_types (void)
29453 {
29454 unsigned int i;
29455 dw_die_ref base_type, die, c;
29456
29457 if (base_types.is_empty ())
29458 return;
29459
29460 /* Sort by decreasing usage count, they will be added again in that
29461 order later on. */
29462 base_types.qsort (base_type_cmp);
29463 die = comp_unit_die ();
29464 c = die->die_child;
29465 do
29466 {
29467 dw_die_ref prev = c;
29468 c = c->die_sib;
29469 while (c->die_mark)
29470 {
29471 remove_child_with_prev (c, prev);
29472 /* As base types got marked, there must be at least
29473 one node other than DW_TAG_base_type. */
29474 gcc_assert (die->die_child != NULL);
29475 c = prev->die_sib;
29476 }
29477 }
29478 while (c != die->die_child);
29479 gcc_assert (die->die_child);
29480 c = die->die_child;
29481 for (i = 0; base_types.iterate (i, &base_type); i++)
29482 {
29483 base_type->die_mark = 0;
29484 base_type->die_sib = c->die_sib;
29485 c->die_sib = base_type;
29486 c = base_type;
29487 }
29488 }
29489
29490 /* Helper function for resolve_addr, attempt to resolve
29491 one CONST_STRING, return true if successful. Similarly verify that
29492 SYMBOL_REFs refer to variables emitted in the current CU. */
29493
29494 static bool
29495 resolve_one_addr (rtx *addr)
29496 {
29497 rtx rtl = *addr;
29498
29499 if (GET_CODE (rtl) == CONST_STRING)
29500 {
29501 size_t len = strlen (XSTR (rtl, 0)) + 1;
29502 tree t = build_string (len, XSTR (rtl, 0));
29503 tree tlen = size_int (len - 1);
29504 TREE_TYPE (t)
29505 = build_array_type (char_type_node, build_index_type (tlen));
29506 rtl = lookup_constant_def (t);
29507 if (!rtl || !MEM_P (rtl))
29508 return false;
29509 rtl = XEXP (rtl, 0);
29510 if (GET_CODE (rtl) == SYMBOL_REF
29511 && SYMBOL_REF_DECL (rtl)
29512 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29513 return false;
29514 vec_safe_push (used_rtx_array, rtl);
29515 *addr = rtl;
29516 return true;
29517 }
29518
29519 if (GET_CODE (rtl) == SYMBOL_REF
29520 && SYMBOL_REF_DECL (rtl))
29521 {
29522 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29523 {
29524 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29525 return false;
29526 }
29527 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29528 return false;
29529 }
29530
29531 if (GET_CODE (rtl) == CONST)
29532 {
29533 subrtx_ptr_iterator::array_type array;
29534 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29535 if (!resolve_one_addr (*iter))
29536 return false;
29537 }
29538
29539 return true;
29540 }
29541
29542 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29543 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29544 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29545
29546 static rtx
29547 string_cst_pool_decl (tree t)
29548 {
29549 rtx rtl = output_constant_def (t, 1);
29550 unsigned char *array;
29551 dw_loc_descr_ref l;
29552 tree decl;
29553 size_t len;
29554 dw_die_ref ref;
29555
29556 if (!rtl || !MEM_P (rtl))
29557 return NULL_RTX;
29558 rtl = XEXP (rtl, 0);
29559 if (GET_CODE (rtl) != SYMBOL_REF
29560 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29561 return NULL_RTX;
29562
29563 decl = SYMBOL_REF_DECL (rtl);
29564 if (!lookup_decl_die (decl))
29565 {
29566 len = TREE_STRING_LENGTH (t);
29567 vec_safe_push (used_rtx_array, rtl);
29568 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29569 array = ggc_vec_alloc<unsigned char> (len);
29570 memcpy (array, TREE_STRING_POINTER (t), len);
29571 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29572 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29573 l->dw_loc_oprnd2.v.val_vec.length = len;
29574 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29575 l->dw_loc_oprnd2.v.val_vec.array = array;
29576 add_AT_loc (ref, DW_AT_location, l);
29577 equate_decl_number_to_die (decl, ref);
29578 }
29579 return rtl;
29580 }
29581
29582 /* Helper function of resolve_addr_in_expr. LOC is
29583 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29584 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29585 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29586 with DW_OP_implicit_pointer if possible
29587 and return true, if unsuccessful, return false. */
29588
29589 static bool
29590 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29591 {
29592 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29593 HOST_WIDE_INT offset = 0;
29594 dw_die_ref ref = NULL;
29595 tree decl;
29596
29597 if (GET_CODE (rtl) == CONST
29598 && GET_CODE (XEXP (rtl, 0)) == PLUS
29599 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29600 {
29601 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29602 rtl = XEXP (XEXP (rtl, 0), 0);
29603 }
29604 if (GET_CODE (rtl) == CONST_STRING)
29605 {
29606 size_t len = strlen (XSTR (rtl, 0)) + 1;
29607 tree t = build_string (len, XSTR (rtl, 0));
29608 tree tlen = size_int (len - 1);
29609
29610 TREE_TYPE (t)
29611 = build_array_type (char_type_node, build_index_type (tlen));
29612 rtl = string_cst_pool_decl (t);
29613 if (!rtl)
29614 return false;
29615 }
29616 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29617 {
29618 decl = SYMBOL_REF_DECL (rtl);
29619 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29620 {
29621 ref = lookup_decl_die (decl);
29622 if (ref && (get_AT (ref, DW_AT_location)
29623 || get_AT (ref, DW_AT_const_value)))
29624 {
29625 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29626 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29627 loc->dw_loc_oprnd1.val_entry = NULL;
29628 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29629 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29630 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29631 loc->dw_loc_oprnd2.v.val_int = offset;
29632 return true;
29633 }
29634 }
29635 }
29636 return false;
29637 }
29638
29639 /* Helper function for resolve_addr, handle one location
29640 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29641 the location list couldn't be resolved. */
29642
29643 static bool
29644 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29645 {
29646 dw_loc_descr_ref keep = NULL;
29647 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29648 switch (loc->dw_loc_opc)
29649 {
29650 case DW_OP_addr:
29651 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29652 {
29653 if ((prev == NULL
29654 || prev->dw_loc_opc == DW_OP_piece
29655 || prev->dw_loc_opc == DW_OP_bit_piece)
29656 && loc->dw_loc_next
29657 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29658 && (!dwarf_strict || dwarf_version >= 5)
29659 && optimize_one_addr_into_implicit_ptr (loc))
29660 break;
29661 return false;
29662 }
29663 break;
29664 case DW_OP_GNU_addr_index:
29665 case DW_OP_GNU_const_index:
29666 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
29667 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
29668 {
29669 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29670 if (!resolve_one_addr (&rtl))
29671 return false;
29672 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29673 loc->dw_loc_oprnd1.val_entry
29674 = add_addr_table_entry (rtl, ate_kind_rtx);
29675 }
29676 break;
29677 case DW_OP_const4u:
29678 case DW_OP_const8u:
29679 if (loc->dtprel
29680 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29681 return false;
29682 break;
29683 case DW_OP_plus_uconst:
29684 if (size_of_loc_descr (loc)
29685 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29686 + 1
29687 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29688 {
29689 dw_loc_descr_ref repl
29690 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29691 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29692 add_loc_descr (&repl, loc->dw_loc_next);
29693 *loc = *repl;
29694 }
29695 break;
29696 case DW_OP_implicit_value:
29697 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29698 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29699 return false;
29700 break;
29701 case DW_OP_implicit_pointer:
29702 case DW_OP_GNU_implicit_pointer:
29703 case DW_OP_GNU_parameter_ref:
29704 case DW_OP_GNU_variable_value:
29705 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29706 {
29707 dw_die_ref ref
29708 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29709 if (ref == NULL)
29710 return false;
29711 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29712 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29713 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29714 }
29715 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29716 {
29717 if (prev == NULL
29718 && loc->dw_loc_next == NULL
29719 && AT_class (a) == dw_val_class_loc)
29720 switch (a->dw_attr)
29721 {
29722 /* Following attributes allow both exprloc and reference,
29723 so if the whole expression is DW_OP_GNU_variable_value
29724 alone we could transform it into reference. */
29725 case DW_AT_byte_size:
29726 case DW_AT_bit_size:
29727 case DW_AT_lower_bound:
29728 case DW_AT_upper_bound:
29729 case DW_AT_bit_stride:
29730 case DW_AT_count:
29731 case DW_AT_allocated:
29732 case DW_AT_associated:
29733 case DW_AT_byte_stride:
29734 a->dw_attr_val.val_class = dw_val_class_die_ref;
29735 a->dw_attr_val.val_entry = NULL;
29736 a->dw_attr_val.v.val_die_ref.die
29737 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29738 a->dw_attr_val.v.val_die_ref.external = 0;
29739 return true;
29740 default:
29741 break;
29742 }
29743 if (dwarf_strict)
29744 return false;
29745 }
29746 break;
29747 case DW_OP_const_type:
29748 case DW_OP_regval_type:
29749 case DW_OP_deref_type:
29750 case DW_OP_convert:
29751 case DW_OP_reinterpret:
29752 case DW_OP_GNU_const_type:
29753 case DW_OP_GNU_regval_type:
29754 case DW_OP_GNU_deref_type:
29755 case DW_OP_GNU_convert:
29756 case DW_OP_GNU_reinterpret:
29757 while (loc->dw_loc_next
29758 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29759 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29760 {
29761 dw_die_ref base1, base2;
29762 unsigned enc1, enc2, size1, size2;
29763 if (loc->dw_loc_opc == DW_OP_regval_type
29764 || loc->dw_loc_opc == DW_OP_deref_type
29765 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29766 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29767 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29768 else if (loc->dw_loc_oprnd1.val_class
29769 == dw_val_class_unsigned_const)
29770 break;
29771 else
29772 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29773 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29774 == dw_val_class_unsigned_const)
29775 break;
29776 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29777 gcc_assert (base1->die_tag == DW_TAG_base_type
29778 && base2->die_tag == DW_TAG_base_type);
29779 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29780 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29781 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29782 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29783 if (size1 == size2
29784 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29785 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29786 && loc != keep)
29787 || enc1 == enc2))
29788 {
29789 /* Optimize away next DW_OP_convert after
29790 adjusting LOC's base type die reference. */
29791 if (loc->dw_loc_opc == DW_OP_regval_type
29792 || loc->dw_loc_opc == DW_OP_deref_type
29793 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29794 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29795 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29796 else
29797 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29798 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29799 continue;
29800 }
29801 /* Don't change integer DW_OP_convert after e.g. floating
29802 point typed stack entry. */
29803 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29804 keep = loc->dw_loc_next;
29805 break;
29806 }
29807 break;
29808 default:
29809 break;
29810 }
29811 return true;
29812 }
29813
29814 /* Helper function of resolve_addr. DIE had DW_AT_location of
29815 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29816 and DW_OP_addr couldn't be resolved. resolve_addr has already
29817 removed the DW_AT_location attribute. This function attempts to
29818 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29819 to it or DW_AT_const_value attribute, if possible. */
29820
29821 static void
29822 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29823 {
29824 if (!VAR_P (decl)
29825 || lookup_decl_die (decl) != die
29826 || DECL_EXTERNAL (decl)
29827 || !TREE_STATIC (decl)
29828 || DECL_INITIAL (decl) == NULL_TREE
29829 || DECL_P (DECL_INITIAL (decl))
29830 || get_AT (die, DW_AT_const_value))
29831 return;
29832
29833 tree init = DECL_INITIAL (decl);
29834 HOST_WIDE_INT offset = 0;
29835 /* For variables that have been optimized away and thus
29836 don't have a memory location, see if we can emit
29837 DW_AT_const_value instead. */
29838 if (tree_add_const_value_attribute (die, init))
29839 return;
29840 if (dwarf_strict && dwarf_version < 5)
29841 return;
29842 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29843 and ADDR_EXPR refers to a decl that has DW_AT_location or
29844 DW_AT_const_value (but isn't addressable, otherwise
29845 resolving the original DW_OP_addr wouldn't fail), see if
29846 we can add DW_OP_implicit_pointer. */
29847 STRIP_NOPS (init);
29848 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29849 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29850 {
29851 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29852 init = TREE_OPERAND (init, 0);
29853 STRIP_NOPS (init);
29854 }
29855 if (TREE_CODE (init) != ADDR_EXPR)
29856 return;
29857 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29858 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29859 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29860 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29861 && TREE_OPERAND (init, 0) != decl))
29862 {
29863 dw_die_ref ref;
29864 dw_loc_descr_ref l;
29865
29866 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29867 {
29868 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29869 if (!rtl)
29870 return;
29871 decl = SYMBOL_REF_DECL (rtl);
29872 }
29873 else
29874 decl = TREE_OPERAND (init, 0);
29875 ref = lookup_decl_die (decl);
29876 if (ref == NULL
29877 || (!get_AT (ref, DW_AT_location)
29878 && !get_AT (ref, DW_AT_const_value)))
29879 return;
29880 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29881 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29882 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29883 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29884 add_AT_loc (die, DW_AT_location, l);
29885 }
29886 }
29887
29888 /* Return NULL if l is a DWARF expression, or first op that is not
29889 valid DWARF expression. */
29890
29891 static dw_loc_descr_ref
29892 non_dwarf_expression (dw_loc_descr_ref l)
29893 {
29894 while (l)
29895 {
29896 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29897 return l;
29898 switch (l->dw_loc_opc)
29899 {
29900 case DW_OP_regx:
29901 case DW_OP_implicit_value:
29902 case DW_OP_stack_value:
29903 case DW_OP_implicit_pointer:
29904 case DW_OP_GNU_implicit_pointer:
29905 case DW_OP_GNU_parameter_ref:
29906 case DW_OP_piece:
29907 case DW_OP_bit_piece:
29908 return l;
29909 default:
29910 break;
29911 }
29912 l = l->dw_loc_next;
29913 }
29914 return NULL;
29915 }
29916
29917 /* Return adjusted copy of EXPR:
29918 If it is empty DWARF expression, return it.
29919 If it is valid non-empty DWARF expression,
29920 return copy of EXPR with DW_OP_deref appended to it.
29921 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29922 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29923 If it is DWARF expression followed by DW_OP_stack_value, return
29924 copy of the DWARF expression without anything appended.
29925 Otherwise, return NULL. */
29926
29927 static dw_loc_descr_ref
29928 copy_deref_exprloc (dw_loc_descr_ref expr)
29929 {
29930 dw_loc_descr_ref tail = NULL;
29931
29932 if (expr == NULL)
29933 return NULL;
29934
29935 dw_loc_descr_ref l = non_dwarf_expression (expr);
29936 if (l && l->dw_loc_next)
29937 return NULL;
29938
29939 if (l)
29940 {
29941 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29942 tail = new_loc_descr ((enum dwarf_location_atom)
29943 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
29944 0, 0);
29945 else
29946 switch (l->dw_loc_opc)
29947 {
29948 case DW_OP_regx:
29949 tail = new_loc_descr (DW_OP_bregx,
29950 l->dw_loc_oprnd1.v.val_unsigned, 0);
29951 break;
29952 case DW_OP_stack_value:
29953 break;
29954 default:
29955 return NULL;
29956 }
29957 }
29958 else
29959 tail = new_loc_descr (DW_OP_deref, 0, 0);
29960
29961 dw_loc_descr_ref ret = NULL, *p = &ret;
29962 while (expr != l)
29963 {
29964 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
29965 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
29966 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
29967 p = &(*p)->dw_loc_next;
29968 expr = expr->dw_loc_next;
29969 }
29970 *p = tail;
29971 return ret;
29972 }
29973
29974 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
29975 reference to a variable or argument, adjust it if needed and return:
29976 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
29977 attribute if present should be removed
29978 0 keep the attribute perhaps with minor modifications, no need to rescan
29979 1 if the attribute has been successfully adjusted. */
29980
29981 static int
29982 optimize_string_length (dw_attr_node *a)
29983 {
29984 dw_loc_descr_ref l = AT_loc (a), lv;
29985 dw_die_ref die;
29986 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29987 {
29988 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
29989 die = lookup_decl_die (decl);
29990 if (die)
29991 {
29992 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29993 l->dw_loc_oprnd1.v.val_die_ref.die = die;
29994 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29995 }
29996 else
29997 return -1;
29998 }
29999 else
30000 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30001
30002 /* DWARF5 allows reference class, so we can then reference the DIE.
30003 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30004 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30005 {
30006 a->dw_attr_val.val_class = dw_val_class_die_ref;
30007 a->dw_attr_val.val_entry = NULL;
30008 a->dw_attr_val.v.val_die_ref.die = die;
30009 a->dw_attr_val.v.val_die_ref.external = 0;
30010 return 0;
30011 }
30012
30013 dw_attr_node *av = get_AT (die, DW_AT_location);
30014 dw_loc_list_ref d;
30015 bool non_dwarf_expr = false;
30016
30017 if (av == NULL)
30018 return dwarf_strict ? -1 : 0;
30019 switch (AT_class (av))
30020 {
30021 case dw_val_class_loc_list:
30022 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30023 if (d->expr && non_dwarf_expression (d->expr))
30024 non_dwarf_expr = true;
30025 break;
30026 case dw_val_class_view_list:
30027 gcc_unreachable ();
30028 case dw_val_class_loc:
30029 lv = AT_loc (av);
30030 if (lv == NULL)
30031 return dwarf_strict ? -1 : 0;
30032 if (non_dwarf_expression (lv))
30033 non_dwarf_expr = true;
30034 break;
30035 default:
30036 return dwarf_strict ? -1 : 0;
30037 }
30038
30039 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30040 into DW_OP_call4 or DW_OP_GNU_variable_value into
30041 DW_OP_call4 DW_OP_deref, do so. */
30042 if (!non_dwarf_expr
30043 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30044 {
30045 l->dw_loc_opc = DW_OP_call4;
30046 if (l->dw_loc_next)
30047 l->dw_loc_next = NULL;
30048 else
30049 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30050 return 0;
30051 }
30052
30053 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30054 copy over the DW_AT_location attribute from die to a. */
30055 if (l->dw_loc_next != NULL)
30056 {
30057 a->dw_attr_val = av->dw_attr_val;
30058 return 1;
30059 }
30060
30061 dw_loc_list_ref list, *p;
30062 switch (AT_class (av))
30063 {
30064 case dw_val_class_loc_list:
30065 p = &list;
30066 list = NULL;
30067 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30068 {
30069 lv = copy_deref_exprloc (d->expr);
30070 if (lv)
30071 {
30072 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30073 p = &(*p)->dw_loc_next;
30074 }
30075 else if (!dwarf_strict && d->expr)
30076 return 0;
30077 }
30078 if (list == NULL)
30079 return dwarf_strict ? -1 : 0;
30080 a->dw_attr_val.val_class = dw_val_class_loc_list;
30081 gen_llsym (list);
30082 *AT_loc_list_ptr (a) = list;
30083 return 1;
30084 case dw_val_class_loc:
30085 lv = copy_deref_exprloc (AT_loc (av));
30086 if (lv == NULL)
30087 return dwarf_strict ? -1 : 0;
30088 a->dw_attr_val.v.val_loc = lv;
30089 return 1;
30090 default:
30091 gcc_unreachable ();
30092 }
30093 }
30094
30095 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30096 an address in .rodata section if the string literal is emitted there,
30097 or remove the containing location list or replace DW_AT_const_value
30098 with DW_AT_location and empty location expression, if it isn't found
30099 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30100 to something that has been emitted in the current CU. */
30101
30102 static void
30103 resolve_addr (dw_die_ref die)
30104 {
30105 dw_die_ref c;
30106 dw_attr_node *a;
30107 dw_loc_list_ref *curr, *start, loc;
30108 unsigned ix;
30109 bool remove_AT_byte_size = false;
30110
30111 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30112 switch (AT_class (a))
30113 {
30114 case dw_val_class_loc_list:
30115 start = curr = AT_loc_list_ptr (a);
30116 loc = *curr;
30117 gcc_assert (loc);
30118 /* The same list can be referenced more than once. See if we have
30119 already recorded the result from a previous pass. */
30120 if (loc->replaced)
30121 *curr = loc->dw_loc_next;
30122 else if (!loc->resolved_addr)
30123 {
30124 /* As things stand, we do not expect or allow one die to
30125 reference a suffix of another die's location list chain.
30126 References must be identical or completely separate.
30127 There is therefore no need to cache the result of this
30128 pass on any list other than the first; doing so
30129 would lead to unnecessary writes. */
30130 while (*curr)
30131 {
30132 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30133 if (!resolve_addr_in_expr (a, (*curr)->expr))
30134 {
30135 dw_loc_list_ref next = (*curr)->dw_loc_next;
30136 dw_loc_descr_ref l = (*curr)->expr;
30137
30138 if (next && (*curr)->ll_symbol)
30139 {
30140 gcc_assert (!next->ll_symbol);
30141 next->ll_symbol = (*curr)->ll_symbol;
30142 next->vl_symbol = (*curr)->vl_symbol;
30143 }
30144 if (dwarf_split_debug_info)
30145 remove_loc_list_addr_table_entries (l);
30146 *curr = next;
30147 }
30148 else
30149 {
30150 mark_base_types ((*curr)->expr);
30151 curr = &(*curr)->dw_loc_next;
30152 }
30153 }
30154 if (loc == *start)
30155 loc->resolved_addr = 1;
30156 else
30157 {
30158 loc->replaced = 1;
30159 loc->dw_loc_next = *start;
30160 }
30161 }
30162 if (!*start)
30163 {
30164 remove_AT (die, a->dw_attr);
30165 ix--;
30166 }
30167 break;
30168 case dw_val_class_view_list:
30169 {
30170 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30171 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30172 dw_val_node *llnode
30173 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30174 /* If we no longer have a loclist, or it no longer needs
30175 views, drop this attribute. */
30176 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30177 {
30178 remove_AT (die, a->dw_attr);
30179 ix--;
30180 }
30181 break;
30182 }
30183 case dw_val_class_loc:
30184 {
30185 dw_loc_descr_ref l = AT_loc (a);
30186 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30187 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30188 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30189 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30190 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30191 with DW_FORM_ref referencing the same DIE as
30192 DW_OP_GNU_variable_value used to reference. */
30193 if (a->dw_attr == DW_AT_string_length
30194 && l
30195 && l->dw_loc_opc == DW_OP_GNU_variable_value
30196 && (l->dw_loc_next == NULL
30197 || (l->dw_loc_next->dw_loc_next == NULL
30198 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30199 {
30200 switch (optimize_string_length (a))
30201 {
30202 case -1:
30203 remove_AT (die, a->dw_attr);
30204 ix--;
30205 /* If we drop DW_AT_string_length, we need to drop also
30206 DW_AT_{string_length_,}byte_size. */
30207 remove_AT_byte_size = true;
30208 continue;
30209 default:
30210 break;
30211 case 1:
30212 /* Even if we keep the optimized DW_AT_string_length,
30213 it might have changed AT_class, so process it again. */
30214 ix--;
30215 continue;
30216 }
30217 }
30218 /* For -gdwarf-2 don't attempt to optimize
30219 DW_AT_data_member_location containing
30220 DW_OP_plus_uconst - older consumers might
30221 rely on it being that op instead of a more complex,
30222 but shorter, location description. */
30223 if ((dwarf_version > 2
30224 || a->dw_attr != DW_AT_data_member_location
30225 || l == NULL
30226 || l->dw_loc_opc != DW_OP_plus_uconst
30227 || l->dw_loc_next != NULL)
30228 && !resolve_addr_in_expr (a, l))
30229 {
30230 if (dwarf_split_debug_info)
30231 remove_loc_list_addr_table_entries (l);
30232 if (l != NULL
30233 && l->dw_loc_next == NULL
30234 && l->dw_loc_opc == DW_OP_addr
30235 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30236 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30237 && a->dw_attr == DW_AT_location)
30238 {
30239 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30240 remove_AT (die, a->dw_attr);
30241 ix--;
30242 optimize_location_into_implicit_ptr (die, decl);
30243 break;
30244 }
30245 if (a->dw_attr == DW_AT_string_length)
30246 /* If we drop DW_AT_string_length, we need to drop also
30247 DW_AT_{string_length_,}byte_size. */
30248 remove_AT_byte_size = true;
30249 remove_AT (die, a->dw_attr);
30250 ix--;
30251 }
30252 else
30253 mark_base_types (l);
30254 }
30255 break;
30256 case dw_val_class_addr:
30257 if (a->dw_attr == DW_AT_const_value
30258 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30259 {
30260 if (AT_index (a) != NOT_INDEXED)
30261 remove_addr_table_entry (a->dw_attr_val.val_entry);
30262 remove_AT (die, a->dw_attr);
30263 ix--;
30264 }
30265 if ((die->die_tag == DW_TAG_call_site
30266 && a->dw_attr == DW_AT_call_origin)
30267 || (die->die_tag == DW_TAG_GNU_call_site
30268 && a->dw_attr == DW_AT_abstract_origin))
30269 {
30270 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30271 dw_die_ref tdie = lookup_decl_die (tdecl);
30272 dw_die_ref cdie;
30273 if (tdie == NULL
30274 && DECL_EXTERNAL (tdecl)
30275 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30276 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30277 {
30278 dw_die_ref pdie = cdie;
30279 /* Make sure we don't add these DIEs into type units.
30280 We could emit skeleton DIEs for context (namespaces,
30281 outer structs/classes) and a skeleton DIE for the
30282 innermost context with DW_AT_signature pointing to the
30283 type unit. See PR78835. */
30284 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30285 pdie = pdie->die_parent;
30286 if (pdie == NULL)
30287 {
30288 /* Creating a full DIE for tdecl is overly expensive and
30289 at this point even wrong when in the LTO phase
30290 as it can end up generating new type DIEs we didn't
30291 output and thus optimize_external_refs will crash. */
30292 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30293 add_AT_flag (tdie, DW_AT_external, 1);
30294 add_AT_flag (tdie, DW_AT_declaration, 1);
30295 add_linkage_attr (tdie, tdecl);
30296 add_name_and_src_coords_attributes (tdie, tdecl, true);
30297 equate_decl_number_to_die (tdecl, tdie);
30298 }
30299 }
30300 if (tdie)
30301 {
30302 a->dw_attr_val.val_class = dw_val_class_die_ref;
30303 a->dw_attr_val.v.val_die_ref.die = tdie;
30304 a->dw_attr_val.v.val_die_ref.external = 0;
30305 }
30306 else
30307 {
30308 if (AT_index (a) != NOT_INDEXED)
30309 remove_addr_table_entry (a->dw_attr_val.val_entry);
30310 remove_AT (die, a->dw_attr);
30311 ix--;
30312 }
30313 }
30314 break;
30315 default:
30316 break;
30317 }
30318
30319 if (remove_AT_byte_size)
30320 remove_AT (die, dwarf_version >= 5
30321 ? DW_AT_string_length_byte_size
30322 : DW_AT_byte_size);
30323
30324 FOR_EACH_CHILD (die, c, resolve_addr (c));
30325 }
30326 \f
30327 /* Helper routines for optimize_location_lists.
30328 This pass tries to share identical local lists in .debug_loc
30329 section. */
30330
30331 /* Iteratively hash operands of LOC opcode into HSTATE. */
30332
30333 static void
30334 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30335 {
30336 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30337 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30338
30339 switch (loc->dw_loc_opc)
30340 {
30341 case DW_OP_const4u:
30342 case DW_OP_const8u:
30343 if (loc->dtprel)
30344 goto hash_addr;
30345 /* FALLTHRU */
30346 case DW_OP_const1u:
30347 case DW_OP_const1s:
30348 case DW_OP_const2u:
30349 case DW_OP_const2s:
30350 case DW_OP_const4s:
30351 case DW_OP_const8s:
30352 case DW_OP_constu:
30353 case DW_OP_consts:
30354 case DW_OP_pick:
30355 case DW_OP_plus_uconst:
30356 case DW_OP_breg0:
30357 case DW_OP_breg1:
30358 case DW_OP_breg2:
30359 case DW_OP_breg3:
30360 case DW_OP_breg4:
30361 case DW_OP_breg5:
30362 case DW_OP_breg6:
30363 case DW_OP_breg7:
30364 case DW_OP_breg8:
30365 case DW_OP_breg9:
30366 case DW_OP_breg10:
30367 case DW_OP_breg11:
30368 case DW_OP_breg12:
30369 case DW_OP_breg13:
30370 case DW_OP_breg14:
30371 case DW_OP_breg15:
30372 case DW_OP_breg16:
30373 case DW_OP_breg17:
30374 case DW_OP_breg18:
30375 case DW_OP_breg19:
30376 case DW_OP_breg20:
30377 case DW_OP_breg21:
30378 case DW_OP_breg22:
30379 case DW_OP_breg23:
30380 case DW_OP_breg24:
30381 case DW_OP_breg25:
30382 case DW_OP_breg26:
30383 case DW_OP_breg27:
30384 case DW_OP_breg28:
30385 case DW_OP_breg29:
30386 case DW_OP_breg30:
30387 case DW_OP_breg31:
30388 case DW_OP_regx:
30389 case DW_OP_fbreg:
30390 case DW_OP_piece:
30391 case DW_OP_deref_size:
30392 case DW_OP_xderef_size:
30393 hstate.add_object (val1->v.val_int);
30394 break;
30395 case DW_OP_skip:
30396 case DW_OP_bra:
30397 {
30398 int offset;
30399
30400 gcc_assert (val1->val_class == dw_val_class_loc);
30401 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30402 hstate.add_object (offset);
30403 }
30404 break;
30405 case DW_OP_implicit_value:
30406 hstate.add_object (val1->v.val_unsigned);
30407 switch (val2->val_class)
30408 {
30409 case dw_val_class_const:
30410 hstate.add_object (val2->v.val_int);
30411 break;
30412 case dw_val_class_vec:
30413 {
30414 unsigned int elt_size = val2->v.val_vec.elt_size;
30415 unsigned int len = val2->v.val_vec.length;
30416
30417 hstate.add_int (elt_size);
30418 hstate.add_int (len);
30419 hstate.add (val2->v.val_vec.array, len * elt_size);
30420 }
30421 break;
30422 case dw_val_class_const_double:
30423 hstate.add_object (val2->v.val_double.low);
30424 hstate.add_object (val2->v.val_double.high);
30425 break;
30426 case dw_val_class_wide_int:
30427 hstate.add (val2->v.val_wide->get_val (),
30428 get_full_len (*val2->v.val_wide)
30429 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30430 break;
30431 case dw_val_class_addr:
30432 inchash::add_rtx (val2->v.val_addr, hstate);
30433 break;
30434 default:
30435 gcc_unreachable ();
30436 }
30437 break;
30438 case DW_OP_bregx:
30439 case DW_OP_bit_piece:
30440 hstate.add_object (val1->v.val_int);
30441 hstate.add_object (val2->v.val_int);
30442 break;
30443 case DW_OP_addr:
30444 hash_addr:
30445 if (loc->dtprel)
30446 {
30447 unsigned char dtprel = 0xd1;
30448 hstate.add_object (dtprel);
30449 }
30450 inchash::add_rtx (val1->v.val_addr, hstate);
30451 break;
30452 case DW_OP_GNU_addr_index:
30453 case DW_OP_GNU_const_index:
30454 {
30455 if (loc->dtprel)
30456 {
30457 unsigned char dtprel = 0xd1;
30458 hstate.add_object (dtprel);
30459 }
30460 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30461 }
30462 break;
30463 case DW_OP_implicit_pointer:
30464 case DW_OP_GNU_implicit_pointer:
30465 hstate.add_int (val2->v.val_int);
30466 break;
30467 case DW_OP_entry_value:
30468 case DW_OP_GNU_entry_value:
30469 hstate.add_object (val1->v.val_loc);
30470 break;
30471 case DW_OP_regval_type:
30472 case DW_OP_deref_type:
30473 case DW_OP_GNU_regval_type:
30474 case DW_OP_GNU_deref_type:
30475 {
30476 unsigned int byte_size
30477 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30478 unsigned int encoding
30479 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30480 hstate.add_object (val1->v.val_int);
30481 hstate.add_object (byte_size);
30482 hstate.add_object (encoding);
30483 }
30484 break;
30485 case DW_OP_convert:
30486 case DW_OP_reinterpret:
30487 case DW_OP_GNU_convert:
30488 case DW_OP_GNU_reinterpret:
30489 if (val1->val_class == dw_val_class_unsigned_const)
30490 {
30491 hstate.add_object (val1->v.val_unsigned);
30492 break;
30493 }
30494 /* FALLTHRU */
30495 case DW_OP_const_type:
30496 case DW_OP_GNU_const_type:
30497 {
30498 unsigned int byte_size
30499 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30500 unsigned int encoding
30501 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30502 hstate.add_object (byte_size);
30503 hstate.add_object (encoding);
30504 if (loc->dw_loc_opc != DW_OP_const_type
30505 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30506 break;
30507 hstate.add_object (val2->val_class);
30508 switch (val2->val_class)
30509 {
30510 case dw_val_class_const:
30511 hstate.add_object (val2->v.val_int);
30512 break;
30513 case dw_val_class_vec:
30514 {
30515 unsigned int elt_size = val2->v.val_vec.elt_size;
30516 unsigned int len = val2->v.val_vec.length;
30517
30518 hstate.add_object (elt_size);
30519 hstate.add_object (len);
30520 hstate.add (val2->v.val_vec.array, len * elt_size);
30521 }
30522 break;
30523 case dw_val_class_const_double:
30524 hstate.add_object (val2->v.val_double.low);
30525 hstate.add_object (val2->v.val_double.high);
30526 break;
30527 case dw_val_class_wide_int:
30528 hstate.add (val2->v.val_wide->get_val (),
30529 get_full_len (*val2->v.val_wide)
30530 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30531 break;
30532 default:
30533 gcc_unreachable ();
30534 }
30535 }
30536 break;
30537
30538 default:
30539 /* Other codes have no operands. */
30540 break;
30541 }
30542 }
30543
30544 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30545
30546 static inline void
30547 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30548 {
30549 dw_loc_descr_ref l;
30550 bool sizes_computed = false;
30551 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30552 size_of_locs (loc);
30553
30554 for (l = loc; l != NULL; l = l->dw_loc_next)
30555 {
30556 enum dwarf_location_atom opc = l->dw_loc_opc;
30557 hstate.add_object (opc);
30558 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30559 {
30560 size_of_locs (loc);
30561 sizes_computed = true;
30562 }
30563 hash_loc_operands (l, hstate);
30564 }
30565 }
30566
30567 /* Compute hash of the whole location list LIST_HEAD. */
30568
30569 static inline void
30570 hash_loc_list (dw_loc_list_ref list_head)
30571 {
30572 dw_loc_list_ref curr = list_head;
30573 inchash::hash hstate;
30574
30575 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30576 {
30577 hstate.add (curr->begin, strlen (curr->begin) + 1);
30578 hstate.add (curr->end, strlen (curr->end) + 1);
30579 hstate.add_object (curr->vbegin);
30580 hstate.add_object (curr->vend);
30581 if (curr->section)
30582 hstate.add (curr->section, strlen (curr->section) + 1);
30583 hash_locs (curr->expr, hstate);
30584 }
30585 list_head->hash = hstate.end ();
30586 }
30587
30588 /* Return true if X and Y opcodes have the same operands. */
30589
30590 static inline bool
30591 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30592 {
30593 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30594 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30595 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30596 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30597
30598 switch (x->dw_loc_opc)
30599 {
30600 case DW_OP_const4u:
30601 case DW_OP_const8u:
30602 if (x->dtprel)
30603 goto hash_addr;
30604 /* FALLTHRU */
30605 case DW_OP_const1u:
30606 case DW_OP_const1s:
30607 case DW_OP_const2u:
30608 case DW_OP_const2s:
30609 case DW_OP_const4s:
30610 case DW_OP_const8s:
30611 case DW_OP_constu:
30612 case DW_OP_consts:
30613 case DW_OP_pick:
30614 case DW_OP_plus_uconst:
30615 case DW_OP_breg0:
30616 case DW_OP_breg1:
30617 case DW_OP_breg2:
30618 case DW_OP_breg3:
30619 case DW_OP_breg4:
30620 case DW_OP_breg5:
30621 case DW_OP_breg6:
30622 case DW_OP_breg7:
30623 case DW_OP_breg8:
30624 case DW_OP_breg9:
30625 case DW_OP_breg10:
30626 case DW_OP_breg11:
30627 case DW_OP_breg12:
30628 case DW_OP_breg13:
30629 case DW_OP_breg14:
30630 case DW_OP_breg15:
30631 case DW_OP_breg16:
30632 case DW_OP_breg17:
30633 case DW_OP_breg18:
30634 case DW_OP_breg19:
30635 case DW_OP_breg20:
30636 case DW_OP_breg21:
30637 case DW_OP_breg22:
30638 case DW_OP_breg23:
30639 case DW_OP_breg24:
30640 case DW_OP_breg25:
30641 case DW_OP_breg26:
30642 case DW_OP_breg27:
30643 case DW_OP_breg28:
30644 case DW_OP_breg29:
30645 case DW_OP_breg30:
30646 case DW_OP_breg31:
30647 case DW_OP_regx:
30648 case DW_OP_fbreg:
30649 case DW_OP_piece:
30650 case DW_OP_deref_size:
30651 case DW_OP_xderef_size:
30652 return valx1->v.val_int == valy1->v.val_int;
30653 case DW_OP_skip:
30654 case DW_OP_bra:
30655 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30656 can cause irrelevant differences in dw_loc_addr. */
30657 gcc_assert (valx1->val_class == dw_val_class_loc
30658 && valy1->val_class == dw_val_class_loc
30659 && (dwarf_split_debug_info
30660 || x->dw_loc_addr == y->dw_loc_addr));
30661 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30662 case DW_OP_implicit_value:
30663 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30664 || valx2->val_class != valy2->val_class)
30665 return false;
30666 switch (valx2->val_class)
30667 {
30668 case dw_val_class_const:
30669 return valx2->v.val_int == valy2->v.val_int;
30670 case dw_val_class_vec:
30671 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30672 && valx2->v.val_vec.length == valy2->v.val_vec.length
30673 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30674 valx2->v.val_vec.elt_size
30675 * valx2->v.val_vec.length) == 0;
30676 case dw_val_class_const_double:
30677 return valx2->v.val_double.low == valy2->v.val_double.low
30678 && valx2->v.val_double.high == valy2->v.val_double.high;
30679 case dw_val_class_wide_int:
30680 return *valx2->v.val_wide == *valy2->v.val_wide;
30681 case dw_val_class_addr:
30682 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30683 default:
30684 gcc_unreachable ();
30685 }
30686 case DW_OP_bregx:
30687 case DW_OP_bit_piece:
30688 return valx1->v.val_int == valy1->v.val_int
30689 && valx2->v.val_int == valy2->v.val_int;
30690 case DW_OP_addr:
30691 hash_addr:
30692 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30693 case DW_OP_GNU_addr_index:
30694 case DW_OP_GNU_const_index:
30695 {
30696 rtx ax1 = valx1->val_entry->addr.rtl;
30697 rtx ay1 = valy1->val_entry->addr.rtl;
30698 return rtx_equal_p (ax1, ay1);
30699 }
30700 case DW_OP_implicit_pointer:
30701 case DW_OP_GNU_implicit_pointer:
30702 return valx1->val_class == dw_val_class_die_ref
30703 && valx1->val_class == valy1->val_class
30704 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30705 && valx2->v.val_int == valy2->v.val_int;
30706 case DW_OP_entry_value:
30707 case DW_OP_GNU_entry_value:
30708 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30709 case DW_OP_const_type:
30710 case DW_OP_GNU_const_type:
30711 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30712 || valx2->val_class != valy2->val_class)
30713 return false;
30714 switch (valx2->val_class)
30715 {
30716 case dw_val_class_const:
30717 return valx2->v.val_int == valy2->v.val_int;
30718 case dw_val_class_vec:
30719 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30720 && valx2->v.val_vec.length == valy2->v.val_vec.length
30721 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30722 valx2->v.val_vec.elt_size
30723 * valx2->v.val_vec.length) == 0;
30724 case dw_val_class_const_double:
30725 return valx2->v.val_double.low == valy2->v.val_double.low
30726 && valx2->v.val_double.high == valy2->v.val_double.high;
30727 case dw_val_class_wide_int:
30728 return *valx2->v.val_wide == *valy2->v.val_wide;
30729 default:
30730 gcc_unreachable ();
30731 }
30732 case DW_OP_regval_type:
30733 case DW_OP_deref_type:
30734 case DW_OP_GNU_regval_type:
30735 case DW_OP_GNU_deref_type:
30736 return valx1->v.val_int == valy1->v.val_int
30737 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30738 case DW_OP_convert:
30739 case DW_OP_reinterpret:
30740 case DW_OP_GNU_convert:
30741 case DW_OP_GNU_reinterpret:
30742 if (valx1->val_class != valy1->val_class)
30743 return false;
30744 if (valx1->val_class == dw_val_class_unsigned_const)
30745 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30746 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30747 case DW_OP_GNU_parameter_ref:
30748 return valx1->val_class == dw_val_class_die_ref
30749 && valx1->val_class == valy1->val_class
30750 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30751 default:
30752 /* Other codes have no operands. */
30753 return true;
30754 }
30755 }
30756
30757 /* Return true if DWARF location expressions X and Y are the same. */
30758
30759 static inline bool
30760 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30761 {
30762 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30763 if (x->dw_loc_opc != y->dw_loc_opc
30764 || x->dtprel != y->dtprel
30765 || !compare_loc_operands (x, y))
30766 break;
30767 return x == NULL && y == NULL;
30768 }
30769
30770 /* Hashtable helpers. */
30771
30772 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30773 {
30774 static inline hashval_t hash (const dw_loc_list_struct *);
30775 static inline bool equal (const dw_loc_list_struct *,
30776 const dw_loc_list_struct *);
30777 };
30778
30779 /* Return precomputed hash of location list X. */
30780
30781 inline hashval_t
30782 loc_list_hasher::hash (const dw_loc_list_struct *x)
30783 {
30784 return x->hash;
30785 }
30786
30787 /* Return true if location lists A and B are the same. */
30788
30789 inline bool
30790 loc_list_hasher::equal (const dw_loc_list_struct *a,
30791 const dw_loc_list_struct *b)
30792 {
30793 if (a == b)
30794 return 1;
30795 if (a->hash != b->hash)
30796 return 0;
30797 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30798 if (strcmp (a->begin, b->begin) != 0
30799 || strcmp (a->end, b->end) != 0
30800 || (a->section == NULL) != (b->section == NULL)
30801 || (a->section && strcmp (a->section, b->section) != 0)
30802 || a->vbegin != b->vbegin || a->vend != b->vend
30803 || !compare_locs (a->expr, b->expr))
30804 break;
30805 return a == NULL && b == NULL;
30806 }
30807
30808 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30809
30810
30811 /* Recursively optimize location lists referenced from DIE
30812 children and share them whenever possible. */
30813
30814 static void
30815 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30816 {
30817 dw_die_ref c;
30818 dw_attr_node *a;
30819 unsigned ix;
30820 dw_loc_list_struct **slot;
30821 bool drop_locviews = false;
30822 bool has_locviews = false;
30823
30824 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30825 if (AT_class (a) == dw_val_class_loc_list)
30826 {
30827 dw_loc_list_ref list = AT_loc_list (a);
30828 /* TODO: perform some optimizations here, before hashing
30829 it and storing into the hash table. */
30830 hash_loc_list (list);
30831 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30832 if (*slot == NULL)
30833 {
30834 *slot = list;
30835 if (loc_list_has_views (list))
30836 gcc_assert (list->vl_symbol);
30837 else if (list->vl_symbol)
30838 {
30839 drop_locviews = true;
30840 list->vl_symbol = NULL;
30841 }
30842 }
30843 else
30844 {
30845 if (list->vl_symbol && !(*slot)->vl_symbol)
30846 drop_locviews = true;
30847 a->dw_attr_val.v.val_loc_list = *slot;
30848 }
30849 }
30850 else if (AT_class (a) == dw_val_class_view_list)
30851 {
30852 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30853 has_locviews = true;
30854 }
30855
30856
30857 if (drop_locviews && has_locviews)
30858 remove_AT (die, DW_AT_GNU_locviews);
30859
30860 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30861 }
30862
30863
30864 /* Recursively assign each location list a unique index into the debug_addr
30865 section. */
30866
30867 static void
30868 index_location_lists (dw_die_ref die)
30869 {
30870 dw_die_ref c;
30871 dw_attr_node *a;
30872 unsigned ix;
30873
30874 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30875 if (AT_class (a) == dw_val_class_loc_list)
30876 {
30877 dw_loc_list_ref list = AT_loc_list (a);
30878 dw_loc_list_ref curr;
30879 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30880 {
30881 /* Don't index an entry that has already been indexed
30882 or won't be output. */
30883 if (curr->begin_entry != NULL
30884 || skip_loc_list_entry (curr))
30885 continue;
30886
30887 curr->begin_entry
30888 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30889 }
30890 }
30891
30892 FOR_EACH_CHILD (die, c, index_location_lists (c));
30893 }
30894
30895 /* Optimize location lists referenced from DIE
30896 children and share them whenever possible. */
30897
30898 static void
30899 optimize_location_lists (dw_die_ref die)
30900 {
30901 loc_list_hash_type htab (500);
30902 optimize_location_lists_1 (die, &htab);
30903 }
30904 \f
30905 /* Traverse the limbo die list, and add parent/child links. The only
30906 dies without parents that should be here are concrete instances of
30907 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30908 For concrete instances, we can get the parent die from the abstract
30909 instance. */
30910
30911 static void
30912 flush_limbo_die_list (void)
30913 {
30914 limbo_die_node *node;
30915
30916 /* get_context_die calls force_decl_die, which can put new DIEs on the
30917 limbo list in LTO mode when nested functions are put in a different
30918 partition than that of their parent function. */
30919 while ((node = limbo_die_list))
30920 {
30921 dw_die_ref die = node->die;
30922 limbo_die_list = node->next;
30923
30924 if (die->die_parent == NULL)
30925 {
30926 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
30927
30928 if (origin && origin->die_parent)
30929 add_child_die (origin->die_parent, die);
30930 else if (is_cu_die (die))
30931 ;
30932 else if (seen_error ())
30933 /* It's OK to be confused by errors in the input. */
30934 add_child_die (comp_unit_die (), die);
30935 else
30936 {
30937 /* In certain situations, the lexical block containing a
30938 nested function can be optimized away, which results
30939 in the nested function die being orphaned. Likewise
30940 with the return type of that nested function. Force
30941 this to be a child of the containing function.
30942
30943 It may happen that even the containing function got fully
30944 inlined and optimized out. In that case we are lost and
30945 assign the empty child. This should not be big issue as
30946 the function is likely unreachable too. */
30947 gcc_assert (node->created_for);
30948
30949 if (DECL_P (node->created_for))
30950 origin = get_context_die (DECL_CONTEXT (node->created_for));
30951 else if (TYPE_P (node->created_for))
30952 origin = scope_die_for (node->created_for, comp_unit_die ());
30953 else
30954 origin = comp_unit_die ();
30955
30956 add_child_die (origin, die);
30957 }
30958 }
30959 }
30960 }
30961
30962 /* Reset DIEs so we can output them again. */
30963
30964 static void
30965 reset_dies (dw_die_ref die)
30966 {
30967 dw_die_ref c;
30968
30969 /* Remove stuff we re-generate. */
30970 die->die_mark = 0;
30971 die->die_offset = 0;
30972 die->die_abbrev = 0;
30973 remove_AT (die, DW_AT_sibling);
30974
30975 FOR_EACH_CHILD (die, c, reset_dies (c));
30976 }
30977
30978 /* Output stuff that dwarf requires at the end of every file,
30979 and generate the DWARF-2 debugging info. */
30980
30981 static void
30982 dwarf2out_finish (const char *)
30983 {
30984 comdat_type_node *ctnode;
30985 dw_die_ref main_comp_unit_die;
30986 unsigned char checksum[16];
30987 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
30988
30989 /* Flush out any latecomers to the limbo party. */
30990 flush_limbo_die_list ();
30991
30992 if (inline_entry_data_table)
30993 gcc_assert (inline_entry_data_table->elements () == 0);
30994
30995 if (flag_checking)
30996 {
30997 verify_die (comp_unit_die ());
30998 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30999 verify_die (node->die);
31000 }
31001
31002 /* We shouldn't have any symbols with delayed asm names for
31003 DIEs generated after early finish. */
31004 gcc_assert (deferred_asm_name == NULL);
31005
31006 gen_remaining_tmpl_value_param_die_attribute ();
31007
31008 if (flag_generate_lto || flag_generate_offload)
31009 {
31010 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31011
31012 /* Prune stuff so that dwarf2out_finish runs successfully
31013 for the fat part of the object. */
31014 reset_dies (comp_unit_die ());
31015 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31016 reset_dies (node->die);
31017
31018 hash_table<comdat_type_hasher> comdat_type_table (100);
31019 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31020 {
31021 comdat_type_node **slot
31022 = comdat_type_table.find_slot (ctnode, INSERT);
31023
31024 /* Don't reset types twice. */
31025 if (*slot != HTAB_EMPTY_ENTRY)
31026 continue;
31027
31028 /* Add a pointer to the line table for the main compilation unit
31029 so that the debugger can make sense of DW_AT_decl_file
31030 attributes. */
31031 if (debug_info_level >= DINFO_LEVEL_TERSE)
31032 reset_dies (ctnode->root_die);
31033
31034 *slot = ctnode;
31035 }
31036
31037 /* Reset die CU symbol so we don't output it twice. */
31038 comp_unit_die ()->die_id.die_symbol = NULL;
31039
31040 /* Remove DW_AT_macro from the early output. */
31041 if (have_macinfo)
31042 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31043
31044 /* Remove indirect string decisions. */
31045 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31046 }
31047
31048 #if ENABLE_ASSERT_CHECKING
31049 {
31050 dw_die_ref die = comp_unit_die (), c;
31051 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31052 }
31053 #endif
31054 resolve_addr (comp_unit_die ());
31055 move_marked_base_types ();
31056
31057 /* Initialize sections and labels used for actual assembler output. */
31058 unsigned generation = init_sections_and_labels (false);
31059
31060 /* Traverse the DIE's and add sibling attributes to those DIE's that
31061 have children. */
31062 add_sibling_attributes (comp_unit_die ());
31063 limbo_die_node *node;
31064 for (node = cu_die_list; node; node = node->next)
31065 add_sibling_attributes (node->die);
31066 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31067 add_sibling_attributes (ctnode->root_die);
31068
31069 /* When splitting DWARF info, we put some attributes in the
31070 skeleton compile_unit DIE that remains in the .o, while
31071 most attributes go in the DWO compile_unit_die. */
31072 if (dwarf_split_debug_info)
31073 {
31074 limbo_die_node *cu;
31075 main_comp_unit_die = gen_compile_unit_die (NULL);
31076 if (dwarf_version >= 5)
31077 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31078 cu = limbo_die_list;
31079 gcc_assert (cu->die == main_comp_unit_die);
31080 limbo_die_list = limbo_die_list->next;
31081 cu->next = cu_die_list;
31082 cu_die_list = cu;
31083 }
31084 else
31085 main_comp_unit_die = comp_unit_die ();
31086
31087 /* Output a terminator label for the .text section. */
31088 switch_to_section (text_section);
31089 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31090 if (cold_text_section)
31091 {
31092 switch_to_section (cold_text_section);
31093 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31094 }
31095
31096 /* We can only use the low/high_pc attributes if all of the code was
31097 in .text. */
31098 if (!have_multiple_function_sections
31099 || (dwarf_version < 3 && dwarf_strict))
31100 {
31101 /* Don't add if the CU has no associated code. */
31102 if (text_section_used)
31103 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31104 text_end_label, true);
31105 }
31106 else
31107 {
31108 unsigned fde_idx;
31109 dw_fde_ref fde;
31110 bool range_list_added = false;
31111
31112 if (text_section_used)
31113 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31114 text_end_label, &range_list_added, true);
31115 if (cold_text_section_used)
31116 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31117 cold_end_label, &range_list_added, true);
31118
31119 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31120 {
31121 if (DECL_IGNORED_P (fde->decl))
31122 continue;
31123 if (!fde->in_std_section)
31124 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31125 fde->dw_fde_end, &range_list_added,
31126 true);
31127 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31128 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31129 fde->dw_fde_second_end, &range_list_added,
31130 true);
31131 }
31132
31133 if (range_list_added)
31134 {
31135 /* We need to give .debug_loc and .debug_ranges an appropriate
31136 "base address". Use zero so that these addresses become
31137 absolute. Historically, we've emitted the unexpected
31138 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31139 Emit both to give time for other tools to adapt. */
31140 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31141 if (! dwarf_strict && dwarf_version < 4)
31142 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31143
31144 add_ranges (NULL);
31145 }
31146 }
31147
31148 /* AIX Assembler inserts the length, so adjust the reference to match the
31149 offset expected by debuggers. */
31150 strcpy (dl_section_ref, debug_line_section_label);
31151 if (XCOFF_DEBUGGING_INFO)
31152 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31153
31154 if (debug_info_level >= DINFO_LEVEL_TERSE)
31155 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31156 dl_section_ref);
31157
31158 if (have_macinfo)
31159 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31160 macinfo_section_label);
31161
31162 if (dwarf_split_debug_info)
31163 {
31164 if (have_location_lists)
31165 {
31166 if (dwarf_version >= 5)
31167 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31168 loc_section_label);
31169 /* optimize_location_lists calculates the size of the lists,
31170 so index them first, and assign indices to the entries.
31171 Although optimize_location_lists will remove entries from
31172 the table, it only does so for duplicates, and therefore
31173 only reduces ref_counts to 1. */
31174 index_location_lists (comp_unit_die ());
31175 }
31176
31177 if (addr_index_table != NULL)
31178 {
31179 unsigned int index = 0;
31180 addr_index_table
31181 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31182 (&index);
31183 }
31184 }
31185
31186 loc_list_idx = 0;
31187 if (have_location_lists)
31188 {
31189 optimize_location_lists (comp_unit_die ());
31190 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31191 if (dwarf_version >= 5 && dwarf_split_debug_info)
31192 assign_location_list_indexes (comp_unit_die ());
31193 }
31194
31195 save_macinfo_strings ();
31196
31197 if (dwarf_split_debug_info)
31198 {
31199 unsigned int index = 0;
31200
31201 /* Add attributes common to skeleton compile_units and
31202 type_units. Because these attributes include strings, it
31203 must be done before freezing the string table. Top-level
31204 skeleton die attrs are added when the skeleton type unit is
31205 created, so ensure it is created by this point. */
31206 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31207 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31208 }
31209
31210 /* Output all of the compilation units. We put the main one last so that
31211 the offsets are available to output_pubnames. */
31212 for (node = cu_die_list; node; node = node->next)
31213 output_comp_unit (node->die, 0, NULL);
31214
31215 hash_table<comdat_type_hasher> comdat_type_table (100);
31216 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31217 {
31218 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31219
31220 /* Don't output duplicate types. */
31221 if (*slot != HTAB_EMPTY_ENTRY)
31222 continue;
31223
31224 /* Add a pointer to the line table for the main compilation unit
31225 so that the debugger can make sense of DW_AT_decl_file
31226 attributes. */
31227 if (debug_info_level >= DINFO_LEVEL_TERSE)
31228 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31229 (!dwarf_split_debug_info
31230 ? dl_section_ref
31231 : debug_skeleton_line_section_label));
31232
31233 output_comdat_type_unit (ctnode);
31234 *slot = ctnode;
31235 }
31236
31237 if (dwarf_split_debug_info)
31238 {
31239 int mark;
31240 struct md5_ctx ctx;
31241
31242 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31243 index_rnglists ();
31244
31245 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31246 md5_init_ctx (&ctx);
31247 mark = 0;
31248 die_checksum (comp_unit_die (), &ctx, &mark);
31249 unmark_all_dies (comp_unit_die ());
31250 md5_finish_ctx (&ctx, checksum);
31251
31252 if (dwarf_version < 5)
31253 {
31254 /* Use the first 8 bytes of the checksum as the dwo_id,
31255 and add it to both comp-unit DIEs. */
31256 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31257 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31258 }
31259
31260 /* Add the base offset of the ranges table to the skeleton
31261 comp-unit DIE. */
31262 if (!vec_safe_is_empty (ranges_table))
31263 {
31264 if (dwarf_version >= 5)
31265 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31266 ranges_base_label);
31267 else
31268 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31269 ranges_section_label);
31270 }
31271
31272 switch_to_section (debug_addr_section);
31273 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31274 output_addr_table ();
31275 }
31276
31277 /* Output the main compilation unit if non-empty or if .debug_macinfo
31278 or .debug_macro will be emitted. */
31279 output_comp_unit (comp_unit_die (), have_macinfo,
31280 dwarf_split_debug_info ? checksum : NULL);
31281
31282 if (dwarf_split_debug_info && info_section_emitted)
31283 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31284
31285 /* Output the abbreviation table. */
31286 if (vec_safe_length (abbrev_die_table) != 1)
31287 {
31288 switch_to_section (debug_abbrev_section);
31289 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31290 output_abbrev_section ();
31291 }
31292
31293 /* Output location list section if necessary. */
31294 if (have_location_lists)
31295 {
31296 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31297 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31298 /* Output the location lists info. */
31299 switch_to_section (debug_loc_section);
31300 if (dwarf_version >= 5)
31301 {
31302 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31303 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31304 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31305 dw2_asm_output_data (4, 0xffffffff,
31306 "Initial length escape value indicating "
31307 "64-bit DWARF extension");
31308 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31309 "Length of Location Lists");
31310 ASM_OUTPUT_LABEL (asm_out_file, l1);
31311 output_dwarf_version ();
31312 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31313 dw2_asm_output_data (1, 0, "Segment Size");
31314 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31315 "Offset Entry Count");
31316 }
31317 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31318 if (dwarf_version >= 5 && dwarf_split_debug_info)
31319 {
31320 unsigned int save_loc_list_idx = loc_list_idx;
31321 loc_list_idx = 0;
31322 output_loclists_offsets (comp_unit_die ());
31323 gcc_assert (save_loc_list_idx == loc_list_idx);
31324 }
31325 output_location_lists (comp_unit_die ());
31326 if (dwarf_version >= 5)
31327 ASM_OUTPUT_LABEL (asm_out_file, l2);
31328 }
31329
31330 output_pubtables ();
31331
31332 /* Output the address range information if a CU (.debug_info section)
31333 was emitted. We output an empty table even if we had no functions
31334 to put in it. This because the consumer has no way to tell the
31335 difference between an empty table that we omitted and failure to
31336 generate a table that would have contained data. */
31337 if (info_section_emitted)
31338 {
31339 switch_to_section (debug_aranges_section);
31340 output_aranges ();
31341 }
31342
31343 /* Output ranges section if necessary. */
31344 if (!vec_safe_is_empty (ranges_table))
31345 {
31346 if (dwarf_version >= 5)
31347 output_rnglists (generation);
31348 else
31349 output_ranges ();
31350 }
31351
31352 /* Have to end the macro section. */
31353 if (have_macinfo)
31354 {
31355 switch_to_section (debug_macinfo_section);
31356 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31357 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31358 : debug_skeleton_line_section_label, false);
31359 dw2_asm_output_data (1, 0, "End compilation unit");
31360 }
31361
31362 /* Output the source line correspondence table. We must do this
31363 even if there is no line information. Otherwise, on an empty
31364 translation unit, we will generate a present, but empty,
31365 .debug_info section. IRIX 6.5 `nm' will then complain when
31366 examining the file. This is done late so that any filenames
31367 used by the debug_info section are marked as 'used'. */
31368 switch_to_section (debug_line_section);
31369 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31370 if (! output_asm_line_debug_info ())
31371 output_line_info (false);
31372
31373 if (dwarf_split_debug_info && info_section_emitted)
31374 {
31375 switch_to_section (debug_skeleton_line_section);
31376 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31377 output_line_info (true);
31378 }
31379
31380 /* If we emitted any indirect strings, output the string table too. */
31381 if (debug_str_hash || skeleton_debug_str_hash)
31382 output_indirect_strings ();
31383 if (debug_line_str_hash)
31384 {
31385 switch_to_section (debug_line_str_section);
31386 const enum dwarf_form form = DW_FORM_line_strp;
31387 debug_line_str_hash->traverse<enum dwarf_form,
31388 output_indirect_string> (form);
31389 }
31390
31391 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31392 symview_upper_bound = 0;
31393 if (zero_view_p)
31394 bitmap_clear (zero_view_p);
31395 }
31396
31397 /* Returns a hash value for X (which really is a variable_value_struct). */
31398
31399 inline hashval_t
31400 variable_value_hasher::hash (variable_value_struct *x)
31401 {
31402 return (hashval_t) x->decl_id;
31403 }
31404
31405 /* Return nonzero if decl_id of variable_value_struct X is the same as
31406 UID of decl Y. */
31407
31408 inline bool
31409 variable_value_hasher::equal (variable_value_struct *x, tree y)
31410 {
31411 return x->decl_id == DECL_UID (y);
31412 }
31413
31414 /* Helper function for resolve_variable_value, handle
31415 DW_OP_GNU_variable_value in one location expression.
31416 Return true if exprloc has been changed into loclist. */
31417
31418 static bool
31419 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31420 {
31421 dw_loc_descr_ref next;
31422 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31423 {
31424 next = loc->dw_loc_next;
31425 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31426 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31427 continue;
31428
31429 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31430 if (DECL_CONTEXT (decl) != current_function_decl)
31431 continue;
31432
31433 dw_die_ref ref = lookup_decl_die (decl);
31434 if (ref)
31435 {
31436 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31437 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31438 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31439 continue;
31440 }
31441 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31442 if (l == NULL)
31443 continue;
31444 if (l->dw_loc_next)
31445 {
31446 if (AT_class (a) != dw_val_class_loc)
31447 continue;
31448 switch (a->dw_attr)
31449 {
31450 /* Following attributes allow both exprloc and loclist
31451 classes, so we can change them into a loclist. */
31452 case DW_AT_location:
31453 case DW_AT_string_length:
31454 case DW_AT_return_addr:
31455 case DW_AT_data_member_location:
31456 case DW_AT_frame_base:
31457 case DW_AT_segment:
31458 case DW_AT_static_link:
31459 case DW_AT_use_location:
31460 case DW_AT_vtable_elem_location:
31461 if (prev)
31462 {
31463 prev->dw_loc_next = NULL;
31464 prepend_loc_descr_to_each (l, AT_loc (a));
31465 }
31466 if (next)
31467 add_loc_descr_to_each (l, next);
31468 a->dw_attr_val.val_class = dw_val_class_loc_list;
31469 a->dw_attr_val.val_entry = NULL;
31470 a->dw_attr_val.v.val_loc_list = l;
31471 have_location_lists = true;
31472 return true;
31473 /* Following attributes allow both exprloc and reference,
31474 so if the whole expression is DW_OP_GNU_variable_value alone
31475 we could transform it into reference. */
31476 case DW_AT_byte_size:
31477 case DW_AT_bit_size:
31478 case DW_AT_lower_bound:
31479 case DW_AT_upper_bound:
31480 case DW_AT_bit_stride:
31481 case DW_AT_count:
31482 case DW_AT_allocated:
31483 case DW_AT_associated:
31484 case DW_AT_byte_stride:
31485 if (prev == NULL && next == NULL)
31486 break;
31487 /* FALLTHRU */
31488 default:
31489 if (dwarf_strict)
31490 continue;
31491 break;
31492 }
31493 /* Create DW_TAG_variable that we can refer to. */
31494 gen_decl_die (decl, NULL_TREE, NULL,
31495 lookup_decl_die (current_function_decl));
31496 ref = lookup_decl_die (decl);
31497 if (ref)
31498 {
31499 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31500 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31501 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31502 }
31503 continue;
31504 }
31505 if (prev)
31506 {
31507 prev->dw_loc_next = l->expr;
31508 add_loc_descr (&prev->dw_loc_next, next);
31509 free_loc_descr (loc, NULL);
31510 next = prev->dw_loc_next;
31511 }
31512 else
31513 {
31514 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31515 add_loc_descr (&loc, next);
31516 next = loc;
31517 }
31518 loc = prev;
31519 }
31520 return false;
31521 }
31522
31523 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31524
31525 static void
31526 resolve_variable_value (dw_die_ref die)
31527 {
31528 dw_attr_node *a;
31529 dw_loc_list_ref loc;
31530 unsigned ix;
31531
31532 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31533 switch (AT_class (a))
31534 {
31535 case dw_val_class_loc:
31536 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31537 break;
31538 /* FALLTHRU */
31539 case dw_val_class_loc_list:
31540 loc = AT_loc_list (a);
31541 gcc_assert (loc);
31542 for (; loc; loc = loc->dw_loc_next)
31543 resolve_variable_value_in_expr (a, loc->expr);
31544 break;
31545 default:
31546 break;
31547 }
31548 }
31549
31550 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31551 temporaries in the current function. */
31552
31553 static void
31554 resolve_variable_values (void)
31555 {
31556 if (!variable_value_hash || !current_function_decl)
31557 return;
31558
31559 struct variable_value_struct *node
31560 = variable_value_hash->find_with_hash (current_function_decl,
31561 DECL_UID (current_function_decl));
31562
31563 if (node == NULL)
31564 return;
31565
31566 unsigned int i;
31567 dw_die_ref die;
31568 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31569 resolve_variable_value (die);
31570 }
31571
31572 /* Helper function for note_variable_value, handle one location
31573 expression. */
31574
31575 static void
31576 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31577 {
31578 for (; loc; loc = loc->dw_loc_next)
31579 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31580 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31581 {
31582 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31583 dw_die_ref ref = lookup_decl_die (decl);
31584 if (! ref && (flag_generate_lto || flag_generate_offload))
31585 {
31586 /* ??? This is somewhat a hack because we do not create DIEs
31587 for variables not in BLOCK trees early but when generating
31588 early LTO output we need the dw_val_class_decl_ref to be
31589 fully resolved. For fat LTO objects we'd also like to
31590 undo this after LTO dwarf output. */
31591 gcc_assert (DECL_CONTEXT (decl));
31592 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31593 gcc_assert (ctx != NULL);
31594 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31595 ref = lookup_decl_die (decl);
31596 gcc_assert (ref != NULL);
31597 }
31598 if (ref)
31599 {
31600 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31601 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31602 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31603 continue;
31604 }
31605 if (VAR_P (decl)
31606 && DECL_CONTEXT (decl)
31607 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31608 && lookup_decl_die (DECL_CONTEXT (decl)))
31609 {
31610 if (!variable_value_hash)
31611 variable_value_hash
31612 = hash_table<variable_value_hasher>::create_ggc (10);
31613
31614 tree fndecl = DECL_CONTEXT (decl);
31615 struct variable_value_struct *node;
31616 struct variable_value_struct **slot
31617 = variable_value_hash->find_slot_with_hash (fndecl,
31618 DECL_UID (fndecl),
31619 INSERT);
31620 if (*slot == NULL)
31621 {
31622 node = ggc_cleared_alloc<variable_value_struct> ();
31623 node->decl_id = DECL_UID (fndecl);
31624 *slot = node;
31625 }
31626 else
31627 node = *slot;
31628
31629 vec_safe_push (node->dies, die);
31630 }
31631 }
31632 }
31633
31634 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31635 with dw_val_class_decl_ref operand. */
31636
31637 static void
31638 note_variable_value (dw_die_ref die)
31639 {
31640 dw_die_ref c;
31641 dw_attr_node *a;
31642 dw_loc_list_ref loc;
31643 unsigned ix;
31644
31645 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31646 switch (AT_class (a))
31647 {
31648 case dw_val_class_loc_list:
31649 loc = AT_loc_list (a);
31650 gcc_assert (loc);
31651 if (!loc->noted_variable_value)
31652 {
31653 loc->noted_variable_value = 1;
31654 for (; loc; loc = loc->dw_loc_next)
31655 note_variable_value_in_expr (die, loc->expr);
31656 }
31657 break;
31658 case dw_val_class_loc:
31659 note_variable_value_in_expr (die, AT_loc (a));
31660 break;
31661 default:
31662 break;
31663 }
31664
31665 /* Mark children. */
31666 FOR_EACH_CHILD (die, c, note_variable_value (c));
31667 }
31668
31669 /* Perform any cleanups needed after the early debug generation pass
31670 has run. */
31671
31672 static void
31673 dwarf2out_early_finish (const char *filename)
31674 {
31675 set_early_dwarf s;
31676
31677 /* PCH might result in DW_AT_producer string being restored from the
31678 header compilation, so always fill it with empty string initially
31679 and overwrite only here. */
31680 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31681 producer_string = gen_producer_string ();
31682 producer->dw_attr_val.v.val_str->refcount--;
31683 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31684
31685 /* Add the name for the main input file now. We delayed this from
31686 dwarf2out_init to avoid complications with PCH. */
31687 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31688 add_comp_dir_attribute (comp_unit_die ());
31689
31690 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31691 DW_AT_comp_dir into .debug_line_str section. */
31692 if (!dwarf2out_as_loc_support
31693 && dwarf_version >= 5
31694 && DWARF5_USE_DEBUG_LINE_STR)
31695 {
31696 for (int i = 0; i < 2; i++)
31697 {
31698 dw_attr_node *a = get_AT (comp_unit_die (),
31699 i ? DW_AT_comp_dir : DW_AT_name);
31700 if (a == NULL
31701 || AT_class (a) != dw_val_class_str
31702 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31703 continue;
31704
31705 if (! debug_line_str_hash)
31706 debug_line_str_hash
31707 = hash_table<indirect_string_hasher>::create_ggc (10);
31708
31709 struct indirect_string_node *node
31710 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31711 set_indirect_string (node);
31712 node->form = DW_FORM_line_strp;
31713 a->dw_attr_val.v.val_str->refcount--;
31714 a->dw_attr_val.v.val_str = node;
31715 }
31716 }
31717
31718 /* With LTO early dwarf was really finished at compile-time, so make
31719 sure to adjust the phase after annotating the LTRANS CU DIE. */
31720 if (in_lto_p)
31721 {
31722 early_dwarf_finished = true;
31723 return;
31724 }
31725
31726 /* Walk through the list of incomplete types again, trying once more to
31727 emit full debugging info for them. */
31728 retry_incomplete_types ();
31729
31730 /* The point here is to flush out the limbo list so that it is empty
31731 and we don't need to stream it for LTO. */
31732 flush_limbo_die_list ();
31733
31734 gen_scheduled_generic_parms_dies ();
31735 gen_remaining_tmpl_value_param_die_attribute ();
31736
31737 /* Add DW_AT_linkage_name for all deferred DIEs. */
31738 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31739 {
31740 tree decl = node->created_for;
31741 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31742 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31743 ended up in deferred_asm_name before we knew it was
31744 constant and never written to disk. */
31745 && DECL_ASSEMBLER_NAME (decl))
31746 {
31747 add_linkage_attr (node->die, decl);
31748 move_linkage_attr (node->die);
31749 }
31750 }
31751 deferred_asm_name = NULL;
31752
31753 if (flag_eliminate_unused_debug_types)
31754 prune_unused_types ();
31755
31756 /* Generate separate COMDAT sections for type DIEs. */
31757 if (use_debug_types)
31758 {
31759 break_out_comdat_types (comp_unit_die ());
31760
31761 /* Each new type_unit DIE was added to the limbo die list when created.
31762 Since these have all been added to comdat_type_list, clear the
31763 limbo die list. */
31764 limbo_die_list = NULL;
31765
31766 /* For each new comdat type unit, copy declarations for incomplete
31767 types to make the new unit self-contained (i.e., no direct
31768 references to the main compile unit). */
31769 for (comdat_type_node *ctnode = comdat_type_list;
31770 ctnode != NULL; ctnode = ctnode->next)
31771 copy_decls_for_unworthy_types (ctnode->root_die);
31772 copy_decls_for_unworthy_types (comp_unit_die ());
31773
31774 /* In the process of copying declarations from one unit to another,
31775 we may have left some declarations behind that are no longer
31776 referenced. Prune them. */
31777 prune_unused_types ();
31778 }
31779
31780 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31781 with dw_val_class_decl_ref operand. */
31782 note_variable_value (comp_unit_die ());
31783 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31784 note_variable_value (node->die);
31785 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31786 ctnode = ctnode->next)
31787 note_variable_value (ctnode->root_die);
31788 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31789 note_variable_value (node->die);
31790
31791 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31792 both the main_cu and all skeleton TUs. Making this call unconditional
31793 would end up either adding a second copy of the AT_pubnames attribute, or
31794 requiring a special case in add_top_level_skeleton_die_attrs. */
31795 if (!dwarf_split_debug_info)
31796 add_AT_pubnames (comp_unit_die ());
31797
31798 /* The early debug phase is now finished. */
31799 early_dwarf_finished = true;
31800
31801 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31802 if (!flag_generate_lto && !flag_generate_offload)
31803 return;
31804
31805 /* Now as we are going to output for LTO initialize sections and labels
31806 to the LTO variants. We don't need a random-seed postfix as other
31807 LTO sections as linking the LTO debug sections into one in a partial
31808 link is fine. */
31809 init_sections_and_labels (true);
31810
31811 /* The output below is modeled after dwarf2out_finish with all
31812 location related output removed and some LTO specific changes.
31813 Some refactoring might make both smaller and easier to match up. */
31814
31815 /* Traverse the DIE's and add add sibling attributes to those DIE's
31816 that have children. */
31817 add_sibling_attributes (comp_unit_die ());
31818 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31819 add_sibling_attributes (node->die);
31820 for (comdat_type_node *ctnode = comdat_type_list;
31821 ctnode != NULL; ctnode = ctnode->next)
31822 add_sibling_attributes (ctnode->root_die);
31823
31824 if (have_macinfo)
31825 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31826 macinfo_section_label);
31827
31828 save_macinfo_strings ();
31829
31830 if (dwarf_split_debug_info)
31831 {
31832 unsigned int index = 0;
31833 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31834 }
31835
31836 /* Output all of the compilation units. We put the main one last so that
31837 the offsets are available to output_pubnames. */
31838 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31839 output_comp_unit (node->die, 0, NULL);
31840
31841 hash_table<comdat_type_hasher> comdat_type_table (100);
31842 for (comdat_type_node *ctnode = comdat_type_list;
31843 ctnode != NULL; ctnode = ctnode->next)
31844 {
31845 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31846
31847 /* Don't output duplicate types. */
31848 if (*slot != HTAB_EMPTY_ENTRY)
31849 continue;
31850
31851 /* Add a pointer to the line table for the main compilation unit
31852 so that the debugger can make sense of DW_AT_decl_file
31853 attributes. */
31854 if (debug_info_level >= DINFO_LEVEL_TERSE)
31855 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31856 (!dwarf_split_debug_info
31857 ? debug_line_section_label
31858 : debug_skeleton_line_section_label));
31859
31860 output_comdat_type_unit (ctnode);
31861 *slot = ctnode;
31862 }
31863
31864 /* Stick a unique symbol to the main debuginfo section. */
31865 compute_comp_unit_symbol (comp_unit_die ());
31866
31867 /* Output the main compilation unit. We always need it if only for
31868 the CU symbol. */
31869 output_comp_unit (comp_unit_die (), true, NULL);
31870
31871 /* Output the abbreviation table. */
31872 if (vec_safe_length (abbrev_die_table) != 1)
31873 {
31874 switch_to_section (debug_abbrev_section);
31875 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31876 output_abbrev_section ();
31877 }
31878
31879 /* Have to end the macro section. */
31880 if (have_macinfo)
31881 {
31882 /* We have to save macinfo state if we need to output it again
31883 for the FAT part of the object. */
31884 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
31885 if (flag_fat_lto_objects)
31886 macinfo_table = macinfo_table->copy ();
31887
31888 switch_to_section (debug_macinfo_section);
31889 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31890 output_macinfo (debug_skeleton_line_section_label, true);
31891 dw2_asm_output_data (1, 0, "End compilation unit");
31892
31893 /* Emit a skeleton debug_line section. */
31894 switch_to_section (debug_skeleton_line_section);
31895 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31896 output_line_info (true);
31897
31898 if (flag_fat_lto_objects)
31899 {
31900 vec_free (macinfo_table);
31901 macinfo_table = saved_macinfo_table;
31902 }
31903 }
31904
31905
31906 /* If we emitted any indirect strings, output the string table too. */
31907 if (debug_str_hash || skeleton_debug_str_hash)
31908 output_indirect_strings ();
31909
31910 /* Switch back to the text section. */
31911 switch_to_section (text_section);
31912 }
31913
31914 /* Reset all state within dwarf2out.c so that we can rerun the compiler
31915 within the same process. For use by toplev::finalize. */
31916
31917 void
31918 dwarf2out_c_finalize (void)
31919 {
31920 last_var_location_insn = NULL;
31921 cached_next_real_insn = NULL;
31922 used_rtx_array = NULL;
31923 incomplete_types = NULL;
31924 decl_scope_table = NULL;
31925 debug_info_section = NULL;
31926 debug_skeleton_info_section = NULL;
31927 debug_abbrev_section = NULL;
31928 debug_skeleton_abbrev_section = NULL;
31929 debug_aranges_section = NULL;
31930 debug_addr_section = NULL;
31931 debug_macinfo_section = NULL;
31932 debug_line_section = NULL;
31933 debug_skeleton_line_section = NULL;
31934 debug_loc_section = NULL;
31935 debug_pubnames_section = NULL;
31936 debug_pubtypes_section = NULL;
31937 debug_str_section = NULL;
31938 debug_line_str_section = NULL;
31939 debug_str_dwo_section = NULL;
31940 debug_str_offsets_section = NULL;
31941 debug_ranges_section = NULL;
31942 debug_frame_section = NULL;
31943 fde_vec = NULL;
31944 debug_str_hash = NULL;
31945 debug_line_str_hash = NULL;
31946 skeleton_debug_str_hash = NULL;
31947 dw2_string_counter = 0;
31948 have_multiple_function_sections = false;
31949 text_section_used = false;
31950 cold_text_section_used = false;
31951 cold_text_section = NULL;
31952 current_unit_personality = NULL;
31953
31954 early_dwarf = false;
31955 early_dwarf_finished = false;
31956
31957 next_die_offset = 0;
31958 single_comp_unit_die = NULL;
31959 comdat_type_list = NULL;
31960 limbo_die_list = NULL;
31961 file_table = NULL;
31962 decl_die_table = NULL;
31963 common_block_die_table = NULL;
31964 decl_loc_table = NULL;
31965 call_arg_locations = NULL;
31966 call_arg_loc_last = NULL;
31967 call_site_count = -1;
31968 tail_call_site_count = -1;
31969 cached_dw_loc_list_table = NULL;
31970 abbrev_die_table = NULL;
31971 delete dwarf_proc_stack_usage_map;
31972 dwarf_proc_stack_usage_map = NULL;
31973 line_info_label_num = 0;
31974 cur_line_info_table = NULL;
31975 text_section_line_info = NULL;
31976 cold_text_section_line_info = NULL;
31977 separate_line_info = NULL;
31978 info_section_emitted = false;
31979 pubname_table = NULL;
31980 pubtype_table = NULL;
31981 macinfo_table = NULL;
31982 ranges_table = NULL;
31983 ranges_by_label = NULL;
31984 rnglist_idx = 0;
31985 have_location_lists = false;
31986 loclabel_num = 0;
31987 poc_label_num = 0;
31988 last_emitted_file = NULL;
31989 label_num = 0;
31990 tmpl_value_parm_die_table = NULL;
31991 generic_type_instances = NULL;
31992 frame_pointer_fb_offset = 0;
31993 frame_pointer_fb_offset_valid = false;
31994 base_types.release ();
31995 XDELETEVEC (producer_string);
31996 producer_string = NULL;
31997 }
31998
31999 #include "gt-dwarf2out.h"