]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
PR c++/87554 - ICE with extern template and reference member.
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. For late LTO
2918 debug there should be almost no types emitted so avoid enabling
2919 -fdebug-types-section there. */
2920
2921 #define use_debug_types (dwarf_version >= 4 \
2922 && flag_debug_types_section \
2923 && !in_lto_p)
2924
2925 /* Various DIE's use offsets relative to the beginning of the
2926 .debug_info section to refer to each other. */
2927
2928 typedef long int dw_offset;
2929
2930 struct comdat_type_node;
2931
2932 /* The entries in the line_info table more-or-less mirror the opcodes
2933 that are used in the real dwarf line table. Arrays of these entries
2934 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2935 supported. */
2936
2937 enum dw_line_info_opcode {
2938 /* Emit DW_LNE_set_address; the operand is the label index. */
2939 LI_set_address,
2940
2941 /* Emit a row to the matrix with the given line. This may be done
2942 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2943 special opcodes. */
2944 LI_set_line,
2945
2946 /* Emit a DW_LNS_set_file. */
2947 LI_set_file,
2948
2949 /* Emit a DW_LNS_set_column. */
2950 LI_set_column,
2951
2952 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2953 LI_negate_stmt,
2954
2955 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2956 LI_set_prologue_end,
2957 LI_set_epilogue_begin,
2958
2959 /* Emit a DW_LNE_set_discriminator. */
2960 LI_set_discriminator,
2961
2962 /* Output a Fixed Advance PC; the target PC is the label index; the
2963 base PC is the previous LI_adv_address or LI_set_address entry.
2964 We only use this when emitting debug views without assembler
2965 support, at explicit user request. Ideally, we should only use
2966 it when the offset might be zero but we can't tell: it's the only
2967 way to maybe change the PC without resetting the view number. */
2968 LI_adv_address
2969 };
2970
2971 typedef struct GTY(()) dw_line_info_struct {
2972 enum dw_line_info_opcode opcode;
2973 unsigned int val;
2974 } dw_line_info_entry;
2975
2976
2977 struct GTY(()) dw_line_info_table {
2978 /* The label that marks the end of this section. */
2979 const char *end_label;
2980
2981 /* The values for the last row of the matrix, as collected in the table.
2982 These are used to minimize the changes to the next row. */
2983 unsigned int file_num;
2984 unsigned int line_num;
2985 unsigned int column_num;
2986 int discrim_num;
2987 bool is_stmt;
2988 bool in_use;
2989
2990 /* This denotes the NEXT view number.
2991
2992 If it is 0, it is known that the NEXT view will be the first view
2993 at the given PC.
2994
2995 If it is -1, we're forcing the view number to be reset, e.g. at a
2996 function entry.
2997
2998 The meaning of other nonzero values depends on whether we're
2999 computing views internally or leaving it for the assembler to do
3000 so. If we're emitting them internally, view denotes the view
3001 number since the last known advance of PC. If we're leaving it
3002 for the assembler, it denotes the LVU label number that we're
3003 going to ask the assembler to assign. */
3004 var_loc_view view;
3005
3006 /* This counts the number of symbolic views emitted in this table
3007 since the latest view reset. Its max value, over all tables,
3008 sets symview_upper_bound. */
3009 var_loc_view symviews_since_reset;
3010
3011 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3012 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3013 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3014 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3015
3016 vec<dw_line_info_entry, va_gc> *entries;
3017 };
3018
3019 /* This is an upper bound for view numbers that the assembler may
3020 assign to symbolic views output in this translation. It is used to
3021 decide how big a field to use to represent view numbers in
3022 symview-classed attributes. */
3023
3024 static var_loc_view symview_upper_bound;
3025
3026 /* If we're keep track of location views and their reset points, and
3027 INSN is a reset point (i.e., it necessarily advances the PC), mark
3028 the next view in TABLE as reset. */
3029
3030 static void
3031 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3032 {
3033 if (!debug_internal_reset_location_views)
3034 return;
3035
3036 /* Maybe turn (part of?) this test into a default target hook. */
3037 int reset = 0;
3038
3039 if (targetm.reset_location_view)
3040 reset = targetm.reset_location_view (insn);
3041
3042 if (reset)
3043 ;
3044 else if (JUMP_TABLE_DATA_P (insn))
3045 reset = 1;
3046 else if (GET_CODE (insn) == USE
3047 || GET_CODE (insn) == CLOBBER
3048 || GET_CODE (insn) == ASM_INPUT
3049 || asm_noperands (insn) >= 0)
3050 ;
3051 else if (get_attr_min_length (insn) > 0)
3052 reset = 1;
3053
3054 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3055 RESET_NEXT_VIEW (table->view);
3056 }
3057
3058 /* Each DIE attribute has a field specifying the attribute kind,
3059 a link to the next attribute in the chain, and an attribute value.
3060 Attributes are typically linked below the DIE they modify. */
3061
3062 typedef struct GTY(()) dw_attr_struct {
3063 enum dwarf_attribute dw_attr;
3064 dw_val_node dw_attr_val;
3065 }
3066 dw_attr_node;
3067
3068
3069 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3070 The children of each node form a circular list linked by
3071 die_sib. die_child points to the node *before* the "first" child node. */
3072
3073 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3074 union die_symbol_or_type_node
3075 {
3076 const char * GTY ((tag ("0"))) die_symbol;
3077 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3078 }
3079 GTY ((desc ("%0.comdat_type_p"))) die_id;
3080 vec<dw_attr_node, va_gc> *die_attr;
3081 dw_die_ref die_parent;
3082 dw_die_ref die_child;
3083 dw_die_ref die_sib;
3084 dw_die_ref die_definition; /* ref from a specification to its definition */
3085 dw_offset die_offset;
3086 unsigned long die_abbrev;
3087 int die_mark;
3088 unsigned int decl_id;
3089 enum dwarf_tag die_tag;
3090 /* Die is used and must not be pruned as unused. */
3091 BOOL_BITFIELD die_perennial_p : 1;
3092 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3093 /* For an external ref to die_symbol if die_offset contains an extra
3094 offset to that symbol. */
3095 BOOL_BITFIELD with_offset : 1;
3096 /* Whether this DIE was removed from the DIE tree, for example via
3097 prune_unused_types. We don't consider those present from the
3098 DIE lookup routines. */
3099 BOOL_BITFIELD removed : 1;
3100 /* Lots of spare bits. */
3101 }
3102 die_node;
3103
3104 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3105 static bool early_dwarf;
3106 static bool early_dwarf_finished;
3107 struct set_early_dwarf {
3108 bool saved;
3109 set_early_dwarf () : saved(early_dwarf)
3110 {
3111 gcc_assert (! early_dwarf_finished);
3112 early_dwarf = true;
3113 }
3114 ~set_early_dwarf () { early_dwarf = saved; }
3115 };
3116
3117 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3118 #define FOR_EACH_CHILD(die, c, expr) do { \
3119 c = die->die_child; \
3120 if (c) do { \
3121 c = c->die_sib; \
3122 expr; \
3123 } while (c != die->die_child); \
3124 } while (0)
3125
3126 /* The pubname structure */
3127
3128 typedef struct GTY(()) pubname_struct {
3129 dw_die_ref die;
3130 const char *name;
3131 }
3132 pubname_entry;
3133
3134
3135 struct GTY(()) dw_ranges {
3136 const char *label;
3137 /* If this is positive, it's a block number, otherwise it's a
3138 bitwise-negated index into dw_ranges_by_label. */
3139 int num;
3140 /* Index for the range list for DW_FORM_rnglistx. */
3141 unsigned int idx : 31;
3142 /* True if this range might be possibly in a different section
3143 from previous entry. */
3144 unsigned int maybe_new_sec : 1;
3145 };
3146
3147 /* A structure to hold a macinfo entry. */
3148
3149 typedef struct GTY(()) macinfo_struct {
3150 unsigned char code;
3151 unsigned HOST_WIDE_INT lineno;
3152 const char *info;
3153 }
3154 macinfo_entry;
3155
3156
3157 struct GTY(()) dw_ranges_by_label {
3158 const char *begin;
3159 const char *end;
3160 };
3161
3162 /* The comdat type node structure. */
3163 struct GTY(()) comdat_type_node
3164 {
3165 dw_die_ref root_die;
3166 dw_die_ref type_die;
3167 dw_die_ref skeleton_die;
3168 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3169 comdat_type_node *next;
3170 };
3171
3172 /* A list of DIEs for which we can't determine ancestry (parent_die
3173 field) just yet. Later in dwarf2out_finish we will fill in the
3174 missing bits. */
3175 typedef struct GTY(()) limbo_die_struct {
3176 dw_die_ref die;
3177 /* The tree for which this DIE was created. We use this to
3178 determine ancestry later. */
3179 tree created_for;
3180 struct limbo_die_struct *next;
3181 }
3182 limbo_die_node;
3183
3184 typedef struct skeleton_chain_struct
3185 {
3186 dw_die_ref old_die;
3187 dw_die_ref new_die;
3188 struct skeleton_chain_struct *parent;
3189 }
3190 skeleton_chain_node;
3191
3192 /* Define a macro which returns nonzero for a TYPE_DECL which was
3193 implicitly generated for a type.
3194
3195 Note that, unlike the C front-end (which generates a NULL named
3196 TYPE_DECL node for each complete tagged type, each array type,
3197 and each function type node created) the C++ front-end generates
3198 a _named_ TYPE_DECL node for each tagged type node created.
3199 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3200 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3201 front-end, but for each type, tagged or not. */
3202
3203 #define TYPE_DECL_IS_STUB(decl) \
3204 (DECL_NAME (decl) == NULL_TREE \
3205 || (DECL_ARTIFICIAL (decl) \
3206 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3207 /* This is necessary for stub decls that \
3208 appear in nested inline functions. */ \
3209 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3210 && (decl_ultimate_origin (decl) \
3211 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3212
3213 /* Information concerning the compilation unit's programming
3214 language, and compiler version. */
3215
3216 /* Fixed size portion of the DWARF compilation unit header. */
3217 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3219 + (dwarf_version >= 5 ? 4 : 3))
3220
3221 /* Fixed size portion of the DWARF comdat type unit header. */
3222 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3223 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3224 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3225
3226 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3227 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3228 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3229
3230 /* Fixed size portion of public names info. */
3231 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3232
3233 /* Fixed size portion of the address range info. */
3234 #define DWARF_ARANGES_HEADER_SIZE \
3235 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3236 DWARF2_ADDR_SIZE * 2) \
3237 - DWARF_INITIAL_LENGTH_SIZE)
3238
3239 /* Size of padding portion in the address range info. It must be
3240 aligned to twice the pointer size. */
3241 #define DWARF_ARANGES_PAD_SIZE \
3242 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3243 DWARF2_ADDR_SIZE * 2) \
3244 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3245
3246 /* Use assembler line directives if available. */
3247 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3248 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3249 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3250 #else
3251 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3252 #endif
3253 #endif
3254
3255 /* Use assembler views in line directives if available. */
3256 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3257 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3258 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3259 #else
3260 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3261 #endif
3262 #endif
3263
3264 /* Return true if GCC configure detected assembler support for .loc. */
3265
3266 bool
3267 dwarf2out_default_as_loc_support (void)
3268 {
3269 return DWARF2_ASM_LINE_DEBUG_INFO;
3270 #if (GCC_VERSION >= 3000)
3271 # undef DWARF2_ASM_LINE_DEBUG_INFO
3272 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3273 #endif
3274 }
3275
3276 /* Return true if GCC configure detected assembler support for views
3277 in .loc directives. */
3278
3279 bool
3280 dwarf2out_default_as_locview_support (void)
3281 {
3282 return DWARF2_ASM_VIEW_DEBUG_INFO;
3283 #if (GCC_VERSION >= 3000)
3284 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3285 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3286 #endif
3287 }
3288
3289 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3290 view computation, and it refers to a view identifier for which we
3291 will not emit a label because it is known to map to a view number
3292 zero. We won't allocate the bitmap if we're not using assembler
3293 support for location views, but we have to make the variable
3294 visible for GGC and for code that will be optimized out for lack of
3295 support but that's still parsed and compiled. We could abstract it
3296 out with macros, but it's not worth it. */
3297 static GTY(()) bitmap zero_view_p;
3298
3299 /* Evaluate to TRUE iff N is known to identify the first location view
3300 at its PC. When not using assembler location view computation,
3301 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3302 and views label numbers recorded in it are the ones known to be
3303 zero. */
3304 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3305 || (N) == (var_loc_view)-1 \
3306 || (zero_view_p \
3307 && bitmap_bit_p (zero_view_p, (N))))
3308
3309 /* Return true iff we're to emit .loc directives for the assembler to
3310 generate line number sections.
3311
3312 When we're not emitting views, all we need from the assembler is
3313 support for .loc directives.
3314
3315 If we are emitting views, we can only use the assembler's .loc
3316 support if it also supports views.
3317
3318 When the compiler is emitting the line number programs and
3319 computing view numbers itself, it resets view numbers at known PC
3320 changes and counts from that, and then it emits view numbers as
3321 literal constants in locviewlists. There are cases in which the
3322 compiler is not sure about PC changes, e.g. when extra alignment is
3323 requested for a label. In these cases, the compiler may not reset
3324 the view counter, and the potential PC advance in the line number
3325 program will use an opcode that does not reset the view counter
3326 even if the PC actually changes, so that compiler and debug info
3327 consumer can keep view numbers in sync.
3328
3329 When the compiler defers view computation to the assembler, it
3330 emits symbolic view numbers in locviewlists, with the exception of
3331 views known to be zero (forced resets, or reset after
3332 compiler-visible PC changes): instead of emitting symbols for
3333 these, we emit literal zero and assert the assembler agrees with
3334 the compiler's assessment. We could use symbolic views everywhere,
3335 instead of special-casing zero views, but then we'd be unable to
3336 optimize out locviewlists that contain only zeros. */
3337
3338 static bool
3339 output_asm_line_debug_info (void)
3340 {
3341 return (dwarf2out_as_loc_support
3342 && (dwarf2out_as_locview_support
3343 || !debug_variable_location_views));
3344 }
3345
3346 /* Minimum line offset in a special line info. opcode.
3347 This value was chosen to give a reasonable range of values. */
3348 #define DWARF_LINE_BASE -10
3349
3350 /* First special line opcode - leave room for the standard opcodes. */
3351 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3352
3353 /* Range of line offsets in a special line info. opcode. */
3354 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3355
3356 /* Flag that indicates the initial value of the is_stmt_start flag.
3357 In the present implementation, we do not mark any lines as
3358 the beginning of a source statement, because that information
3359 is not made available by the GCC front-end. */
3360 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3361
3362 /* Maximum number of operations per instruction bundle. */
3363 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3364 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3365 #endif
3366
3367 /* This location is used by calc_die_sizes() to keep track
3368 the offset of each DIE within the .debug_info section. */
3369 static unsigned long next_die_offset;
3370
3371 /* Record the root of the DIE's built for the current compilation unit. */
3372 static GTY(()) dw_die_ref single_comp_unit_die;
3373
3374 /* A list of type DIEs that have been separated into comdat sections. */
3375 static GTY(()) comdat_type_node *comdat_type_list;
3376
3377 /* A list of CU DIEs that have been separated. */
3378 static GTY(()) limbo_die_node *cu_die_list;
3379
3380 /* A list of DIEs with a NULL parent waiting to be relocated. */
3381 static GTY(()) limbo_die_node *limbo_die_list;
3382
3383 /* A list of DIEs for which we may have to generate
3384 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3385 static GTY(()) limbo_die_node *deferred_asm_name;
3386
3387 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3388 {
3389 typedef const char *compare_type;
3390
3391 static hashval_t hash (dwarf_file_data *);
3392 static bool equal (dwarf_file_data *, const char *);
3393 };
3394
3395 /* Filenames referenced by this compilation unit. */
3396 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3397
3398 struct decl_die_hasher : ggc_ptr_hash<die_node>
3399 {
3400 typedef tree compare_type;
3401
3402 static hashval_t hash (die_node *);
3403 static bool equal (die_node *, tree);
3404 };
3405 /* A hash table of references to DIE's that describe declarations.
3406 The key is a DECL_UID() which is a unique number identifying each decl. */
3407 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3408
3409 struct GTY ((for_user)) variable_value_struct {
3410 unsigned int decl_id;
3411 vec<dw_die_ref, va_gc> *dies;
3412 };
3413
3414 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3415 {
3416 typedef tree compare_type;
3417
3418 static hashval_t hash (variable_value_struct *);
3419 static bool equal (variable_value_struct *, tree);
3420 };
3421 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3422 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3423 DECL_CONTEXT of the referenced VAR_DECLs. */
3424 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3425
3426 struct block_die_hasher : ggc_ptr_hash<die_struct>
3427 {
3428 static hashval_t hash (die_struct *);
3429 static bool equal (die_struct *, die_struct *);
3430 };
3431
3432 /* A hash table of references to DIE's that describe COMMON blocks.
3433 The key is DECL_UID() ^ die_parent. */
3434 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3435
3436 typedef struct GTY(()) die_arg_entry_struct {
3437 dw_die_ref die;
3438 tree arg;
3439 } die_arg_entry;
3440
3441
3442 /* Node of the variable location list. */
3443 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3444 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3445 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3446 in mode of the EXPR_LIST node and first EXPR_LIST operand
3447 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3448 location or NULL for padding. For larger bitsizes,
3449 mode is 0 and first operand is a CONCAT with bitsize
3450 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3451 NULL as second operand. */
3452 rtx GTY (()) loc;
3453 const char * GTY (()) label;
3454 struct var_loc_node * GTY (()) next;
3455 var_loc_view view;
3456 };
3457
3458 /* Variable location list. */
3459 struct GTY ((for_user)) var_loc_list_def {
3460 struct var_loc_node * GTY (()) first;
3461
3462 /* Pointer to the last but one or last element of the
3463 chained list. If the list is empty, both first and
3464 last are NULL, if the list contains just one node
3465 or the last node certainly is not redundant, it points
3466 to the last node, otherwise points to the last but one.
3467 Do not mark it for GC because it is marked through the chain. */
3468 struct var_loc_node * GTY ((skip ("%h"))) last;
3469
3470 /* Pointer to the last element before section switch,
3471 if NULL, either sections weren't switched or first
3472 is after section switch. */
3473 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3474
3475 /* DECL_UID of the variable decl. */
3476 unsigned int decl_id;
3477 };
3478 typedef struct var_loc_list_def var_loc_list;
3479
3480 /* Call argument location list. */
3481 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3482 rtx GTY (()) call_arg_loc_note;
3483 const char * GTY (()) label;
3484 tree GTY (()) block;
3485 bool tail_call_p;
3486 rtx GTY (()) symbol_ref;
3487 struct call_arg_loc_node * GTY (()) next;
3488 };
3489
3490
3491 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3492 {
3493 typedef const_tree compare_type;
3494
3495 static hashval_t hash (var_loc_list *);
3496 static bool equal (var_loc_list *, const_tree);
3497 };
3498
3499 /* Table of decl location linked lists. */
3500 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3501
3502 /* Head and tail of call_arg_loc chain. */
3503 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3504 static struct call_arg_loc_node *call_arg_loc_last;
3505
3506 /* Number of call sites in the current function. */
3507 static int call_site_count = -1;
3508 /* Number of tail call sites in the current function. */
3509 static int tail_call_site_count = -1;
3510
3511 /* A cached location list. */
3512 struct GTY ((for_user)) cached_dw_loc_list_def {
3513 /* The DECL_UID of the decl that this entry describes. */
3514 unsigned int decl_id;
3515
3516 /* The cached location list. */
3517 dw_loc_list_ref loc_list;
3518 };
3519 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3520
3521 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3522 {
3523
3524 typedef const_tree compare_type;
3525
3526 static hashval_t hash (cached_dw_loc_list *);
3527 static bool equal (cached_dw_loc_list *, const_tree);
3528 };
3529
3530 /* Table of cached location lists. */
3531 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3532
3533 /* A vector of references to DIE's that are uniquely identified by their tag,
3534 presence/absence of children DIE's, and list of attribute/value pairs. */
3535 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3536
3537 /* A hash map to remember the stack usage for DWARF procedures. The value
3538 stored is the stack size difference between before the DWARF procedure
3539 invokation and after it returned. In other words, for a DWARF procedure
3540 that consumes N stack slots and that pushes M ones, this stores M - N. */
3541 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3542
3543 /* A global counter for generating labels for line number data. */
3544 static unsigned int line_info_label_num;
3545
3546 /* The current table to which we should emit line number information
3547 for the current function. This will be set up at the beginning of
3548 assembly for the function. */
3549 static GTY(()) dw_line_info_table *cur_line_info_table;
3550
3551 /* The two default tables of line number info. */
3552 static GTY(()) dw_line_info_table *text_section_line_info;
3553 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3554
3555 /* The set of all non-default tables of line number info. */
3556 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3557
3558 /* A flag to tell pubnames/types export if there is an info section to
3559 refer to. */
3560 static bool info_section_emitted;
3561
3562 /* A pointer to the base of a table that contains a list of publicly
3563 accessible names. */
3564 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3565
3566 /* A pointer to the base of a table that contains a list of publicly
3567 accessible types. */
3568 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3569
3570 /* A pointer to the base of a table that contains a list of macro
3571 defines/undefines (and file start/end markers). */
3572 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3573
3574 /* True if .debug_macinfo or .debug_macros section is going to be
3575 emitted. */
3576 #define have_macinfo \
3577 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3578 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3579 && !macinfo_table->is_empty ())
3580
3581 /* Vector of dies for which we should generate .debug_ranges info. */
3582 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3583
3584 /* Vector of pairs of labels referenced in ranges_table. */
3585 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3586
3587 /* Whether we have location lists that need outputting */
3588 static GTY(()) bool have_location_lists;
3589
3590 /* Unique label counter. */
3591 static GTY(()) unsigned int loclabel_num;
3592
3593 /* Unique label counter for point-of-call tables. */
3594 static GTY(()) unsigned int poc_label_num;
3595
3596 /* The last file entry emitted by maybe_emit_file(). */
3597 static GTY(()) struct dwarf_file_data * last_emitted_file;
3598
3599 /* Number of internal labels generated by gen_internal_sym(). */
3600 static GTY(()) int label_num;
3601
3602 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3603
3604 /* Instances of generic types for which we need to generate debug
3605 info that describe their generic parameters and arguments. That
3606 generation needs to happen once all types are properly laid out so
3607 we do it at the end of compilation. */
3608 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3609
3610 /* Offset from the "steady-state frame pointer" to the frame base,
3611 within the current function. */
3612 static poly_int64 frame_pointer_fb_offset;
3613 static bool frame_pointer_fb_offset_valid;
3614
3615 static vec<dw_die_ref> base_types;
3616
3617 /* Flags to represent a set of attribute classes for attributes that represent
3618 a scalar value (bounds, pointers, ...). */
3619 enum dw_scalar_form
3620 {
3621 dw_scalar_form_constant = 0x01,
3622 dw_scalar_form_exprloc = 0x02,
3623 dw_scalar_form_reference = 0x04
3624 };
3625
3626 /* Forward declarations for functions defined in this file. */
3627
3628 static int is_pseudo_reg (const_rtx);
3629 static tree type_main_variant (tree);
3630 static int is_tagged_type (const_tree);
3631 static const char *dwarf_tag_name (unsigned);
3632 static const char *dwarf_attr_name (unsigned);
3633 static const char *dwarf_form_name (unsigned);
3634 static tree decl_ultimate_origin (const_tree);
3635 static tree decl_class_context (tree);
3636 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3637 static inline enum dw_val_class AT_class (dw_attr_node *);
3638 static inline unsigned int AT_index (dw_attr_node *);
3639 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3640 static inline unsigned AT_flag (dw_attr_node *);
3641 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3642 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3643 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3644 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3645 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3646 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3647 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3648 unsigned int, unsigned char *);
3649 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3650 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3651 static inline const char *AT_string (dw_attr_node *);
3652 static enum dwarf_form AT_string_form (dw_attr_node *);
3653 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3654 static void add_AT_specification (dw_die_ref, dw_die_ref);
3655 static inline dw_die_ref AT_ref (dw_attr_node *);
3656 static inline int AT_ref_external (dw_attr_node *);
3657 static inline void set_AT_ref_external (dw_attr_node *, int);
3658 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3659 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3660 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3661 dw_loc_list_ref);
3662 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3663 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3664 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3665 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3666 static void remove_addr_table_entry (addr_table_entry *);
3667 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3668 static inline rtx AT_addr (dw_attr_node *);
3669 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3670 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3671 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3672 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3673 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3674 unsigned long, bool);
3675 static inline const char *AT_lbl (dw_attr_node *);
3676 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3677 static const char *get_AT_low_pc (dw_die_ref);
3678 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3679 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3680 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3681 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3682 static bool is_c (void);
3683 static bool is_cxx (void);
3684 static bool is_cxx (const_tree);
3685 static bool is_fortran (void);
3686 static bool is_ada (void);
3687 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3688 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3689 static void add_child_die (dw_die_ref, dw_die_ref);
3690 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3691 static dw_die_ref lookup_type_die (tree);
3692 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3693 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3694 static void equate_type_number_to_die (tree, dw_die_ref);
3695 static dw_die_ref lookup_decl_die (tree);
3696 static var_loc_list *lookup_decl_loc (const_tree);
3697 static void equate_decl_number_to_die (tree, dw_die_ref);
3698 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3699 static void print_spaces (FILE *);
3700 static void print_die (dw_die_ref, FILE *);
3701 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3702 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3703 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3704 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3705 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3706 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3707 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3708 struct md5_ctx *, int *);
3709 struct checksum_attributes;
3710 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3711 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3712 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3713 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3714 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3715 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3716 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3717 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3718 static int is_type_die (dw_die_ref);
3719 static inline bool is_template_instantiation (dw_die_ref);
3720 static int is_declaration_die (dw_die_ref);
3721 static int should_move_die_to_comdat (dw_die_ref);
3722 static dw_die_ref clone_as_declaration (dw_die_ref);
3723 static dw_die_ref clone_die (dw_die_ref);
3724 static dw_die_ref clone_tree (dw_die_ref);
3725 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3726 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3727 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3728 static dw_die_ref generate_skeleton (dw_die_ref);
3729 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3730 dw_die_ref,
3731 dw_die_ref);
3732 static void break_out_comdat_types (dw_die_ref);
3733 static void copy_decls_for_unworthy_types (dw_die_ref);
3734
3735 static void add_sibling_attributes (dw_die_ref);
3736 static void output_location_lists (dw_die_ref);
3737 static int constant_size (unsigned HOST_WIDE_INT);
3738 static unsigned long size_of_die (dw_die_ref);
3739 static void calc_die_sizes (dw_die_ref);
3740 static void calc_base_type_die_sizes (void);
3741 static void mark_dies (dw_die_ref);
3742 static void unmark_dies (dw_die_ref);
3743 static void unmark_all_dies (dw_die_ref);
3744 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3745 static unsigned long size_of_aranges (void);
3746 static enum dwarf_form value_format (dw_attr_node *);
3747 static void output_value_format (dw_attr_node *);
3748 static void output_abbrev_section (void);
3749 static void output_die_abbrevs (unsigned long, dw_die_ref);
3750 static void output_die (dw_die_ref);
3751 static void output_compilation_unit_header (enum dwarf_unit_type);
3752 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3753 static void output_comdat_type_unit (comdat_type_node *, bool);
3754 static const char *dwarf2_name (tree, int);
3755 static void add_pubname (tree, dw_die_ref);
3756 static void add_enumerator_pubname (const char *, dw_die_ref);
3757 static void add_pubname_string (const char *, dw_die_ref);
3758 static void add_pubtype (tree, dw_die_ref);
3759 static void output_pubnames (vec<pubname_entry, va_gc> *);
3760 static void output_aranges (void);
3761 static unsigned int add_ranges (const_tree, bool = false);
3762 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3763 bool *, bool);
3764 static void output_ranges (void);
3765 static dw_line_info_table *new_line_info_table (void);
3766 static void output_line_info (bool);
3767 static void output_file_names (void);
3768 static dw_die_ref base_type_die (tree, bool);
3769 static int is_base_type (tree);
3770 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3771 static int decl_quals (const_tree);
3772 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3773 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3774 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3775 static unsigned int dbx_reg_number (const_rtx);
3776 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3777 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3778 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3779 enum var_init_status);
3780 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3783 enum var_init_status);
3784 static int is_based_loc (const_rtx);
3785 static bool resolve_one_addr (rtx *);
3786 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3787 enum var_init_status);
3788 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3789 enum var_init_status);
3790 struct loc_descr_context;
3791 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3792 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3793 static dw_loc_list_ref loc_list_from_tree (tree, int,
3794 struct loc_descr_context *);
3795 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3796 struct loc_descr_context *);
3797 static tree field_type (const_tree);
3798 static unsigned int simple_type_align_in_bits (const_tree);
3799 static unsigned int simple_decl_align_in_bits (const_tree);
3800 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3801 struct vlr_context;
3802 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3803 HOST_WIDE_INT *);
3804 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3805 dw_loc_list_ref);
3806 static void add_data_member_location_attribute (dw_die_ref, tree,
3807 struct vlr_context *);
3808 static bool add_const_value_attribute (dw_die_ref, rtx);
3809 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3810 static void insert_wide_int (const wide_int &, unsigned char *, int);
3811 static void insert_float (const_rtx, unsigned char *);
3812 static rtx rtl_for_decl_location (tree);
3813 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3814 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3815 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3816 static void add_name_attribute (dw_die_ref, const char *);
3817 static void add_desc_attribute (dw_die_ref, tree);
3818 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3819 static void add_comp_dir_attribute (dw_die_ref);
3820 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3821 struct loc_descr_context *);
3822 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3823 struct loc_descr_context *);
3824 static void add_subscript_info (dw_die_ref, tree, bool);
3825 static void add_byte_size_attribute (dw_die_ref, tree);
3826 static void add_alignment_attribute (dw_die_ref, tree);
3827 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3828 struct vlr_context *);
3829 static void add_bit_size_attribute (dw_die_ref, tree);
3830 static void add_prototyped_attribute (dw_die_ref, tree);
3831 static void add_abstract_origin_attribute (dw_die_ref, tree);
3832 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3833 static void add_src_coords_attributes (dw_die_ref, tree);
3834 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3835 static void add_discr_value (dw_die_ref, dw_discr_value *);
3836 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3837 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3838 static dw_die_ref scope_die_for (tree, dw_die_ref);
3839 static inline int local_scope_p (dw_die_ref);
3840 static inline int class_scope_p (dw_die_ref);
3841 static inline int class_or_namespace_scope_p (dw_die_ref);
3842 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3843 static void add_calling_convention_attribute (dw_die_ref, tree);
3844 static const char *type_tag (const_tree);
3845 static tree member_declared_type (const_tree);
3846 #if 0
3847 static const char *decl_start_label (tree);
3848 #endif
3849 static void gen_array_type_die (tree, dw_die_ref);
3850 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3851 #if 0
3852 static void gen_entry_point_die (tree, dw_die_ref);
3853 #endif
3854 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3855 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3856 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3857 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3858 static void gen_formal_types_die (tree, dw_die_ref);
3859 static void gen_subprogram_die (tree, dw_die_ref);
3860 static void gen_variable_die (tree, tree, dw_die_ref);
3861 static void gen_const_die (tree, dw_die_ref);
3862 static void gen_label_die (tree, dw_die_ref);
3863 static void gen_lexical_block_die (tree, dw_die_ref);
3864 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3865 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3866 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3867 static dw_die_ref gen_compile_unit_die (const char *);
3868 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3869 static void gen_member_die (tree, dw_die_ref);
3870 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3871 enum debug_info_usage);
3872 static void gen_subroutine_type_die (tree, dw_die_ref);
3873 static void gen_typedef_die (tree, dw_die_ref);
3874 static void gen_type_die (tree, dw_die_ref);
3875 static void gen_block_die (tree, dw_die_ref);
3876 static void decls_for_scope (tree, dw_die_ref, bool = true);
3877 static bool is_naming_typedef_decl (const_tree);
3878 static inline dw_die_ref get_context_die (tree);
3879 static void gen_namespace_die (tree, dw_die_ref);
3880 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3881 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3882 static dw_die_ref force_decl_die (tree);
3883 static dw_die_ref force_type_die (tree);
3884 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3885 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3886 static struct dwarf_file_data * lookup_filename (const char *);
3887 static void retry_incomplete_types (void);
3888 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3889 static void gen_generic_params_dies (tree);
3890 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3891 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3892 static void splice_child_die (dw_die_ref, dw_die_ref);
3893 static int file_info_cmp (const void *, const void *);
3894 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3895 const char *, var_loc_view, const char *);
3896 static void output_loc_list (dw_loc_list_ref);
3897 static char *gen_internal_sym (const char *);
3898 static bool want_pubnames (void);
3899
3900 static void prune_unmark_dies (dw_die_ref);
3901 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3902 static void prune_unused_types_mark (dw_die_ref, int);
3903 static void prune_unused_types_walk (dw_die_ref);
3904 static void prune_unused_types_walk_attribs (dw_die_ref);
3905 static void prune_unused_types_prune (dw_die_ref);
3906 static void prune_unused_types (void);
3907 static int maybe_emit_file (struct dwarf_file_data *fd);
3908 static inline const char *AT_vms_delta1 (dw_attr_node *);
3909 static inline const char *AT_vms_delta2 (dw_attr_node *);
3910 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3911 const char *, const char *);
3912 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3913 static void gen_remaining_tmpl_value_param_die_attribute (void);
3914 static bool generic_type_p (tree);
3915 static void schedule_generic_params_dies_gen (tree t);
3916 static void gen_scheduled_generic_parms_dies (void);
3917 static void resolve_variable_values (void);
3918
3919 static const char *comp_dir_string (void);
3920
3921 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3922
3923 /* enum for tracking thread-local variables whose address is really an offset
3924 relative to the TLS pointer, which will need link-time relocation, but will
3925 not need relocation by the DWARF consumer. */
3926
3927 enum dtprel_bool
3928 {
3929 dtprel_false = 0,
3930 dtprel_true = 1
3931 };
3932
3933 /* Return the operator to use for an address of a variable. For dtprel_true, we
3934 use DW_OP_const*. For regular variables, which need both link-time
3935 relocation and consumer-level relocation (e.g., to account for shared objects
3936 loaded at a random address), we use DW_OP_addr*. */
3937
3938 static inline enum dwarf_location_atom
3939 dw_addr_op (enum dtprel_bool dtprel)
3940 {
3941 if (dtprel == dtprel_true)
3942 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3943 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3944 else
3945 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3946 }
3947
3948 /* Return a pointer to a newly allocated address location description. If
3949 dwarf_split_debug_info is true, then record the address with the appropriate
3950 relocation. */
3951 static inline dw_loc_descr_ref
3952 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3953 {
3954 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3955
3956 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3957 ref->dw_loc_oprnd1.v.val_addr = addr;
3958 ref->dtprel = dtprel;
3959 if (dwarf_split_debug_info)
3960 ref->dw_loc_oprnd1.val_entry
3961 = add_addr_table_entry (addr,
3962 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3963 else
3964 ref->dw_loc_oprnd1.val_entry = NULL;
3965
3966 return ref;
3967 }
3968
3969 /* Section names used to hold DWARF debugging information. */
3970
3971 #ifndef DEBUG_INFO_SECTION
3972 #define DEBUG_INFO_SECTION ".debug_info"
3973 #endif
3974 #ifndef DEBUG_DWO_INFO_SECTION
3975 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_LTO_INFO_SECTION
3978 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3979 #endif
3980 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3981 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3982 #endif
3983 #ifndef DEBUG_ABBREV_SECTION
3984 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3985 #endif
3986 #ifndef DEBUG_LTO_ABBREV_SECTION
3987 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3988 #endif
3989 #ifndef DEBUG_DWO_ABBREV_SECTION
3990 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3993 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3994 #endif
3995 #ifndef DEBUG_ARANGES_SECTION
3996 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3997 #endif
3998 #ifndef DEBUG_ADDR_SECTION
3999 #define DEBUG_ADDR_SECTION ".debug_addr"
4000 #endif
4001 #ifndef DEBUG_MACINFO_SECTION
4002 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4003 #endif
4004 #ifndef DEBUG_LTO_MACINFO_SECTION
4005 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4006 #endif
4007 #ifndef DEBUG_DWO_MACINFO_SECTION
4008 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4009 #endif
4010 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4011 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4012 #endif
4013 #ifndef DEBUG_MACRO_SECTION
4014 #define DEBUG_MACRO_SECTION ".debug_macro"
4015 #endif
4016 #ifndef DEBUG_LTO_MACRO_SECTION
4017 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4018 #endif
4019 #ifndef DEBUG_DWO_MACRO_SECTION
4020 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4021 #endif
4022 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4023 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4024 #endif
4025 #ifndef DEBUG_LINE_SECTION
4026 #define DEBUG_LINE_SECTION ".debug_line"
4027 #endif
4028 #ifndef DEBUG_LTO_LINE_SECTION
4029 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4030 #endif
4031 #ifndef DEBUG_DWO_LINE_SECTION
4032 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4033 #endif
4034 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4035 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4036 #endif
4037 #ifndef DEBUG_LOC_SECTION
4038 #define DEBUG_LOC_SECTION ".debug_loc"
4039 #endif
4040 #ifndef DEBUG_DWO_LOC_SECTION
4041 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4042 #endif
4043 #ifndef DEBUG_LOCLISTS_SECTION
4044 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4045 #endif
4046 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4047 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4048 #endif
4049 #ifndef DEBUG_PUBNAMES_SECTION
4050 #define DEBUG_PUBNAMES_SECTION \
4051 ((debug_generate_pub_sections == 2) \
4052 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4053 #endif
4054 #ifndef DEBUG_PUBTYPES_SECTION
4055 #define DEBUG_PUBTYPES_SECTION \
4056 ((debug_generate_pub_sections == 2) \
4057 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4058 #endif
4059 #ifndef DEBUG_STR_OFFSETS_SECTION
4060 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4061 #endif
4062 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4063 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4064 #endif
4065 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4066 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4067 #endif
4068 #ifndef DEBUG_STR_SECTION
4069 #define DEBUG_STR_SECTION ".debug_str"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_SECTION
4072 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4073 #endif
4074 #ifndef DEBUG_STR_DWO_SECTION
4075 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4076 #endif
4077 #ifndef DEBUG_LTO_STR_DWO_SECTION
4078 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4079 #endif
4080 #ifndef DEBUG_RANGES_SECTION
4081 #define DEBUG_RANGES_SECTION ".debug_ranges"
4082 #endif
4083 #ifndef DEBUG_RNGLISTS_SECTION
4084 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4085 #endif
4086 #ifndef DEBUG_LINE_STR_SECTION
4087 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4088 #endif
4089 #ifndef DEBUG_LTO_LINE_STR_SECTION
4090 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4091 #endif
4092
4093 /* Standard ELF section names for compiled code and data. */
4094 #ifndef TEXT_SECTION_NAME
4095 #define TEXT_SECTION_NAME ".text"
4096 #endif
4097
4098 /* Section flags for .debug_str section. */
4099 #define DEBUG_STR_SECTION_FLAGS \
4100 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4101 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4102 : SECTION_DEBUG)
4103
4104 /* Section flags for .debug_str.dwo section. */
4105 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4106
4107 /* Attribute used to refer to the macro section. */
4108 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4109 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4110
4111 /* Labels we insert at beginning sections we can reference instead of
4112 the section names themselves. */
4113
4114 #ifndef TEXT_SECTION_LABEL
4115 #define TEXT_SECTION_LABEL "Ltext"
4116 #endif
4117 #ifndef COLD_TEXT_SECTION_LABEL
4118 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4119 #endif
4120 #ifndef DEBUG_LINE_SECTION_LABEL
4121 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4122 #endif
4123 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4124 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4125 #endif
4126 #ifndef DEBUG_INFO_SECTION_LABEL
4127 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4128 #endif
4129 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4130 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4131 #endif
4132 #ifndef DEBUG_ABBREV_SECTION_LABEL
4133 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4134 #endif
4135 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4136 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4137 #endif
4138 #ifndef DEBUG_ADDR_SECTION_LABEL
4139 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4140 #endif
4141 #ifndef DEBUG_LOC_SECTION_LABEL
4142 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4143 #endif
4144 #ifndef DEBUG_RANGES_SECTION_LABEL
4145 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4146 #endif
4147 #ifndef DEBUG_MACINFO_SECTION_LABEL
4148 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4149 #endif
4150 #ifndef DEBUG_MACRO_SECTION_LABEL
4151 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4152 #endif
4153 #define SKELETON_COMP_DIE_ABBREV 1
4154 #define SKELETON_TYPE_DIE_ABBREV 2
4155
4156 /* Definitions of defaults for formats and names of various special
4157 (artificial) labels which may be generated within this file (when the -g
4158 options is used and DWARF2_DEBUGGING_INFO is in effect.
4159 If necessary, these may be overridden from within the tm.h file, but
4160 typically, overriding these defaults is unnecessary. */
4161
4162 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4177
4178 #ifndef TEXT_END_LABEL
4179 #define TEXT_END_LABEL "Letext"
4180 #endif
4181 #ifndef COLD_END_LABEL
4182 #define COLD_END_LABEL "Letext_cold"
4183 #endif
4184 #ifndef BLOCK_BEGIN_LABEL
4185 #define BLOCK_BEGIN_LABEL "LBB"
4186 #endif
4187 #ifndef BLOCK_INLINE_ENTRY_LABEL
4188 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4189 #endif
4190 #ifndef BLOCK_END_LABEL
4191 #define BLOCK_END_LABEL "LBE"
4192 #endif
4193 #ifndef LINE_CODE_LABEL
4194 #define LINE_CODE_LABEL "LM"
4195 #endif
4196
4197 \f
4198 /* Return the root of the DIE's built for the current compilation unit. */
4199 static dw_die_ref
4200 comp_unit_die (void)
4201 {
4202 if (!single_comp_unit_die)
4203 single_comp_unit_die = gen_compile_unit_die (NULL);
4204 return single_comp_unit_die;
4205 }
4206
4207 /* We allow a language front-end to designate a function that is to be
4208 called to "demangle" any name before it is put into a DIE. */
4209
4210 static const char *(*demangle_name_func) (const char *);
4211
4212 void
4213 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4214 {
4215 demangle_name_func = func;
4216 }
4217
4218 /* Test if rtl node points to a pseudo register. */
4219
4220 static inline int
4221 is_pseudo_reg (const_rtx rtl)
4222 {
4223 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4224 || (GET_CODE (rtl) == SUBREG
4225 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4226 }
4227
4228 /* Return a reference to a type, with its const and volatile qualifiers
4229 removed. */
4230
4231 static inline tree
4232 type_main_variant (tree type)
4233 {
4234 type = TYPE_MAIN_VARIANT (type);
4235
4236 /* ??? There really should be only one main variant among any group of
4237 variants of a given type (and all of the MAIN_VARIANT values for all
4238 members of the group should point to that one type) but sometimes the C
4239 front-end messes this up for array types, so we work around that bug
4240 here. */
4241 if (TREE_CODE (type) == ARRAY_TYPE)
4242 while (type != TYPE_MAIN_VARIANT (type))
4243 type = TYPE_MAIN_VARIANT (type);
4244
4245 return type;
4246 }
4247
4248 /* Return nonzero if the given type node represents a tagged type. */
4249
4250 static inline int
4251 is_tagged_type (const_tree type)
4252 {
4253 enum tree_code code = TREE_CODE (type);
4254
4255 return (code == RECORD_TYPE || code == UNION_TYPE
4256 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4257 }
4258
4259 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4260
4261 static void
4262 get_ref_die_offset_label (char *label, dw_die_ref ref)
4263 {
4264 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4265 }
4266
4267 /* Return die_offset of a DIE reference to a base type. */
4268
4269 static unsigned long int
4270 get_base_type_offset (dw_die_ref ref)
4271 {
4272 if (ref->die_offset)
4273 return ref->die_offset;
4274 if (comp_unit_die ()->die_abbrev)
4275 {
4276 calc_base_type_die_sizes ();
4277 gcc_assert (ref->die_offset);
4278 }
4279 return ref->die_offset;
4280 }
4281
4282 /* Return die_offset of a DIE reference other than base type. */
4283
4284 static unsigned long int
4285 get_ref_die_offset (dw_die_ref ref)
4286 {
4287 gcc_assert (ref->die_offset);
4288 return ref->die_offset;
4289 }
4290
4291 /* Convert a DIE tag into its string name. */
4292
4293 static const char *
4294 dwarf_tag_name (unsigned int tag)
4295 {
4296 const char *name = get_DW_TAG_name (tag);
4297
4298 if (name != NULL)
4299 return name;
4300
4301 return "DW_TAG_<unknown>";
4302 }
4303
4304 /* Convert a DWARF attribute code into its string name. */
4305
4306 static const char *
4307 dwarf_attr_name (unsigned int attr)
4308 {
4309 const char *name;
4310
4311 switch (attr)
4312 {
4313 #if VMS_DEBUGGING_INFO
4314 case DW_AT_HP_prologue:
4315 return "DW_AT_HP_prologue";
4316 #else
4317 case DW_AT_MIPS_loop_unroll_factor:
4318 return "DW_AT_MIPS_loop_unroll_factor";
4319 #endif
4320
4321 #if VMS_DEBUGGING_INFO
4322 case DW_AT_HP_epilogue:
4323 return "DW_AT_HP_epilogue";
4324 #else
4325 case DW_AT_MIPS_stride:
4326 return "DW_AT_MIPS_stride";
4327 #endif
4328 }
4329
4330 name = get_DW_AT_name (attr);
4331
4332 if (name != NULL)
4333 return name;
4334
4335 return "DW_AT_<unknown>";
4336 }
4337
4338 /* Convert a DWARF value form code into its string name. */
4339
4340 static const char *
4341 dwarf_form_name (unsigned int form)
4342 {
4343 const char *name = get_DW_FORM_name (form);
4344
4345 if (name != NULL)
4346 return name;
4347
4348 return "DW_FORM_<unknown>";
4349 }
4350 \f
4351 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4352 instance of an inlined instance of a decl which is local to an inline
4353 function, so we have to trace all of the way back through the origin chain
4354 to find out what sort of node actually served as the original seed for the
4355 given block. */
4356
4357 static tree
4358 decl_ultimate_origin (const_tree decl)
4359 {
4360 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4361 return NULL_TREE;
4362
4363 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4364 we're trying to output the abstract instance of this function. */
4365 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4366 return NULL_TREE;
4367
4368 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4369 most distant ancestor, this should never happen. */
4370 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4371
4372 return DECL_ABSTRACT_ORIGIN (decl);
4373 }
4374
4375 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4376 of a virtual function may refer to a base class, so we check the 'this'
4377 parameter. */
4378
4379 static tree
4380 decl_class_context (tree decl)
4381 {
4382 tree context = NULL_TREE;
4383
4384 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4385 context = DECL_CONTEXT (decl);
4386 else
4387 context = TYPE_MAIN_VARIANT
4388 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4389
4390 if (context && !TYPE_P (context))
4391 context = NULL_TREE;
4392
4393 return context;
4394 }
4395 \f
4396 /* Add an attribute/value pair to a DIE. */
4397
4398 static inline void
4399 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4400 {
4401 /* Maybe this should be an assert? */
4402 if (die == NULL)
4403 return;
4404
4405 if (flag_checking)
4406 {
4407 /* Check we do not add duplicate attrs. Can't use get_AT here
4408 because that recurses to the specification/abstract origin DIE. */
4409 dw_attr_node *a;
4410 unsigned ix;
4411 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4412 gcc_assert (a->dw_attr != attr->dw_attr);
4413 }
4414
4415 vec_safe_reserve (die->die_attr, 1);
4416 vec_safe_push (die->die_attr, *attr);
4417 }
4418
4419 static inline enum dw_val_class
4420 AT_class (dw_attr_node *a)
4421 {
4422 return a->dw_attr_val.val_class;
4423 }
4424
4425 /* Return the index for any attribute that will be referenced with a
4426 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4427 indices are stored in dw_attr_val.v.val_str for reference counting
4428 pruning. */
4429
4430 static inline unsigned int
4431 AT_index (dw_attr_node *a)
4432 {
4433 if (AT_class (a) == dw_val_class_str)
4434 return a->dw_attr_val.v.val_str->index;
4435 else if (a->dw_attr_val.val_entry != NULL)
4436 return a->dw_attr_val.val_entry->index;
4437 return NOT_INDEXED;
4438 }
4439
4440 /* Add a flag value attribute to a DIE. */
4441
4442 static inline void
4443 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4444 {
4445 dw_attr_node attr;
4446
4447 attr.dw_attr = attr_kind;
4448 attr.dw_attr_val.val_class = dw_val_class_flag;
4449 attr.dw_attr_val.val_entry = NULL;
4450 attr.dw_attr_val.v.val_flag = flag;
4451 add_dwarf_attr (die, &attr);
4452 }
4453
4454 static inline unsigned
4455 AT_flag (dw_attr_node *a)
4456 {
4457 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4458 return a->dw_attr_val.v.val_flag;
4459 }
4460
4461 /* Add a signed integer attribute value to a DIE. */
4462
4463 static inline void
4464 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4465 {
4466 dw_attr_node attr;
4467
4468 attr.dw_attr = attr_kind;
4469 attr.dw_attr_val.val_class = dw_val_class_const;
4470 attr.dw_attr_val.val_entry = NULL;
4471 attr.dw_attr_val.v.val_int = int_val;
4472 add_dwarf_attr (die, &attr);
4473 }
4474
4475 static inline HOST_WIDE_INT
4476 AT_int (dw_attr_node *a)
4477 {
4478 gcc_assert (a && (AT_class (a) == dw_val_class_const
4479 || AT_class (a) == dw_val_class_const_implicit));
4480 return a->dw_attr_val.v.val_int;
4481 }
4482
4483 /* Add an unsigned integer attribute value to a DIE. */
4484
4485 static inline void
4486 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4487 unsigned HOST_WIDE_INT unsigned_val)
4488 {
4489 dw_attr_node attr;
4490
4491 attr.dw_attr = attr_kind;
4492 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4493 attr.dw_attr_val.val_entry = NULL;
4494 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4495 add_dwarf_attr (die, &attr);
4496 }
4497
4498 static inline unsigned HOST_WIDE_INT
4499 AT_unsigned (dw_attr_node *a)
4500 {
4501 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4502 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4503 return a->dw_attr_val.v.val_unsigned;
4504 }
4505
4506 /* Add an unsigned wide integer attribute value to a DIE. */
4507
4508 static inline void
4509 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4510 const wide_int& w)
4511 {
4512 dw_attr_node attr;
4513
4514 attr.dw_attr = attr_kind;
4515 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4516 attr.dw_attr_val.val_entry = NULL;
4517 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4518 *attr.dw_attr_val.v.val_wide = w;
4519 add_dwarf_attr (die, &attr);
4520 }
4521
4522 /* Add an unsigned double integer attribute value to a DIE. */
4523
4524 static inline void
4525 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4526 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4527 {
4528 dw_attr_node attr;
4529
4530 attr.dw_attr = attr_kind;
4531 attr.dw_attr_val.val_class = dw_val_class_const_double;
4532 attr.dw_attr_val.val_entry = NULL;
4533 attr.dw_attr_val.v.val_double.high = high;
4534 attr.dw_attr_val.v.val_double.low = low;
4535 add_dwarf_attr (die, &attr);
4536 }
4537
4538 /* Add a floating point attribute value to a DIE and return it. */
4539
4540 static inline void
4541 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4542 unsigned int length, unsigned int elt_size, unsigned char *array)
4543 {
4544 dw_attr_node attr;
4545
4546 attr.dw_attr = attr_kind;
4547 attr.dw_attr_val.val_class = dw_val_class_vec;
4548 attr.dw_attr_val.val_entry = NULL;
4549 attr.dw_attr_val.v.val_vec.length = length;
4550 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4551 attr.dw_attr_val.v.val_vec.array = array;
4552 add_dwarf_attr (die, &attr);
4553 }
4554
4555 /* Add an 8-byte data attribute value to a DIE. */
4556
4557 static inline void
4558 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4559 unsigned char data8[8])
4560 {
4561 dw_attr_node attr;
4562
4563 attr.dw_attr = attr_kind;
4564 attr.dw_attr_val.val_class = dw_val_class_data8;
4565 attr.dw_attr_val.val_entry = NULL;
4566 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4567 add_dwarf_attr (die, &attr);
4568 }
4569
4570 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4571 dwarf_split_debug_info, address attributes in dies destined for the
4572 final executable have force_direct set to avoid using indexed
4573 references. */
4574
4575 static inline void
4576 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4577 bool force_direct)
4578 {
4579 dw_attr_node attr;
4580 char * lbl_id;
4581
4582 lbl_id = xstrdup (lbl_low);
4583 attr.dw_attr = DW_AT_low_pc;
4584 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4585 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4586 if (dwarf_split_debug_info && !force_direct)
4587 attr.dw_attr_val.val_entry
4588 = add_addr_table_entry (lbl_id, ate_kind_label);
4589 else
4590 attr.dw_attr_val.val_entry = NULL;
4591 add_dwarf_attr (die, &attr);
4592
4593 attr.dw_attr = DW_AT_high_pc;
4594 if (dwarf_version < 4)
4595 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4596 else
4597 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4598 lbl_id = xstrdup (lbl_high);
4599 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4600 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4601 && dwarf_split_debug_info && !force_direct)
4602 attr.dw_attr_val.val_entry
4603 = add_addr_table_entry (lbl_id, ate_kind_label);
4604 else
4605 attr.dw_attr_val.val_entry = NULL;
4606 add_dwarf_attr (die, &attr);
4607 }
4608
4609 /* Hash and equality functions for debug_str_hash. */
4610
4611 hashval_t
4612 indirect_string_hasher::hash (indirect_string_node *x)
4613 {
4614 return htab_hash_string (x->str);
4615 }
4616
4617 bool
4618 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4619 {
4620 return strcmp (x1->str, x2) == 0;
4621 }
4622
4623 /* Add STR to the given string hash table. */
4624
4625 static struct indirect_string_node *
4626 find_AT_string_in_table (const char *str,
4627 hash_table<indirect_string_hasher> *table,
4628 enum insert_option insert = INSERT)
4629 {
4630 struct indirect_string_node *node;
4631
4632 indirect_string_node **slot
4633 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4634 if (*slot == NULL)
4635 {
4636 node = ggc_cleared_alloc<indirect_string_node> ();
4637 node->str = ggc_strdup (str);
4638 *slot = node;
4639 }
4640 else
4641 node = *slot;
4642
4643 node->refcount++;
4644 return node;
4645 }
4646
4647 /* Add STR to the indirect string hash table. */
4648
4649 static struct indirect_string_node *
4650 find_AT_string (const char *str, enum insert_option insert = INSERT)
4651 {
4652 if (! debug_str_hash)
4653 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4654
4655 return find_AT_string_in_table (str, debug_str_hash, insert);
4656 }
4657
4658 /* Add a string attribute value to a DIE. */
4659
4660 static inline void
4661 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4662 {
4663 dw_attr_node attr;
4664 struct indirect_string_node *node;
4665
4666 node = find_AT_string (str);
4667
4668 attr.dw_attr = attr_kind;
4669 attr.dw_attr_val.val_class = dw_val_class_str;
4670 attr.dw_attr_val.val_entry = NULL;
4671 attr.dw_attr_val.v.val_str = node;
4672 add_dwarf_attr (die, &attr);
4673 }
4674
4675 static inline const char *
4676 AT_string (dw_attr_node *a)
4677 {
4678 gcc_assert (a && AT_class (a) == dw_val_class_str);
4679 return a->dw_attr_val.v.val_str->str;
4680 }
4681
4682 /* Call this function directly to bypass AT_string_form's logic to put
4683 the string inline in the die. */
4684
4685 static void
4686 set_indirect_string (struct indirect_string_node *node)
4687 {
4688 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4689 /* Already indirect is a no op. */
4690 if (node->form == DW_FORM_strp
4691 || node->form == DW_FORM_line_strp
4692 || node->form == dwarf_FORM (DW_FORM_strx))
4693 {
4694 gcc_assert (node->label);
4695 return;
4696 }
4697 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4698 ++dw2_string_counter;
4699 node->label = xstrdup (label);
4700
4701 if (!dwarf_split_debug_info)
4702 {
4703 node->form = DW_FORM_strp;
4704 node->index = NOT_INDEXED;
4705 }
4706 else
4707 {
4708 node->form = dwarf_FORM (DW_FORM_strx);
4709 node->index = NO_INDEX_ASSIGNED;
4710 }
4711 }
4712
4713 /* A helper function for dwarf2out_finish, called to reset indirect
4714 string decisions done for early LTO dwarf output before fat object
4715 dwarf output. */
4716
4717 int
4718 reset_indirect_string (indirect_string_node **h, void *)
4719 {
4720 struct indirect_string_node *node = *h;
4721 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4722 {
4723 free (node->label);
4724 node->label = NULL;
4725 node->form = (dwarf_form) 0;
4726 node->index = 0;
4727 }
4728 return 1;
4729 }
4730
4731 /* Find out whether a string should be output inline in DIE
4732 or out-of-line in .debug_str section. */
4733
4734 static enum dwarf_form
4735 find_string_form (struct indirect_string_node *node)
4736 {
4737 unsigned int len;
4738
4739 if (node->form)
4740 return node->form;
4741
4742 len = strlen (node->str) + 1;
4743
4744 /* If the string is shorter or equal to the size of the reference, it is
4745 always better to put it inline. */
4746 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4747 return node->form = DW_FORM_string;
4748
4749 /* If we cannot expect the linker to merge strings in .debug_str
4750 section, only put it into .debug_str if it is worth even in this
4751 single module. */
4752 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4753 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4754 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4755 return node->form = DW_FORM_string;
4756
4757 set_indirect_string (node);
4758
4759 return node->form;
4760 }
4761
4762 /* Find out whether the string referenced from the attribute should be
4763 output inline in DIE or out-of-line in .debug_str section. */
4764
4765 static enum dwarf_form
4766 AT_string_form (dw_attr_node *a)
4767 {
4768 gcc_assert (a && AT_class (a) == dw_val_class_str);
4769 return find_string_form (a->dw_attr_val.v.val_str);
4770 }
4771
4772 /* Add a DIE reference attribute value to a DIE. */
4773
4774 static inline void
4775 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4776 {
4777 dw_attr_node attr;
4778 gcc_checking_assert (targ_die != NULL);
4779
4780 /* With LTO we can end up trying to reference something we didn't create
4781 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4782 if (targ_die == NULL)
4783 return;
4784
4785 attr.dw_attr = attr_kind;
4786 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4787 attr.dw_attr_val.val_entry = NULL;
4788 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4789 attr.dw_attr_val.v.val_die_ref.external = 0;
4790 add_dwarf_attr (die, &attr);
4791 }
4792
4793 /* Change DIE reference REF to point to NEW_DIE instead. */
4794
4795 static inline void
4796 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4797 {
4798 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4799 ref->dw_attr_val.v.val_die_ref.die = new_die;
4800 ref->dw_attr_val.v.val_die_ref.external = 0;
4801 }
4802
4803 /* Add an AT_specification attribute to a DIE, and also make the back
4804 pointer from the specification to the definition. */
4805
4806 static inline void
4807 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4808 {
4809 add_AT_die_ref (die, DW_AT_specification, targ_die);
4810 gcc_assert (!targ_die->die_definition);
4811 targ_die->die_definition = die;
4812 }
4813
4814 static inline dw_die_ref
4815 AT_ref (dw_attr_node *a)
4816 {
4817 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4818 return a->dw_attr_val.v.val_die_ref.die;
4819 }
4820
4821 static inline int
4822 AT_ref_external (dw_attr_node *a)
4823 {
4824 if (a && AT_class (a) == dw_val_class_die_ref)
4825 return a->dw_attr_val.v.val_die_ref.external;
4826
4827 return 0;
4828 }
4829
4830 static inline void
4831 set_AT_ref_external (dw_attr_node *a, int i)
4832 {
4833 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4834 a->dw_attr_val.v.val_die_ref.external = i;
4835 }
4836
4837 /* Add a location description attribute value to a DIE. */
4838
4839 static inline void
4840 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4841 {
4842 dw_attr_node attr;
4843
4844 attr.dw_attr = attr_kind;
4845 attr.dw_attr_val.val_class = dw_val_class_loc;
4846 attr.dw_attr_val.val_entry = NULL;
4847 attr.dw_attr_val.v.val_loc = loc;
4848 add_dwarf_attr (die, &attr);
4849 }
4850
4851 static inline dw_loc_descr_ref
4852 AT_loc (dw_attr_node *a)
4853 {
4854 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4855 return a->dw_attr_val.v.val_loc;
4856 }
4857
4858 static inline void
4859 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4860 {
4861 dw_attr_node attr;
4862
4863 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4864 return;
4865
4866 attr.dw_attr = attr_kind;
4867 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4868 attr.dw_attr_val.val_entry = NULL;
4869 attr.dw_attr_val.v.val_loc_list = loc_list;
4870 add_dwarf_attr (die, &attr);
4871 have_location_lists = true;
4872 }
4873
4874 static inline dw_loc_list_ref
4875 AT_loc_list (dw_attr_node *a)
4876 {
4877 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4878 return a->dw_attr_val.v.val_loc_list;
4879 }
4880
4881 /* Add a view list attribute to DIE. It must have a DW_AT_location
4882 attribute, because the view list complements the location list. */
4883
4884 static inline void
4885 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4886 {
4887 dw_attr_node attr;
4888
4889 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4890 return;
4891
4892 attr.dw_attr = attr_kind;
4893 attr.dw_attr_val.val_class = dw_val_class_view_list;
4894 attr.dw_attr_val.val_entry = NULL;
4895 attr.dw_attr_val.v.val_view_list = die;
4896 add_dwarf_attr (die, &attr);
4897 gcc_checking_assert (get_AT (die, DW_AT_location));
4898 gcc_assert (have_location_lists);
4899 }
4900
4901 /* Return a pointer to the location list referenced by the attribute.
4902 If the named attribute is a view list, look up the corresponding
4903 DW_AT_location attribute and return its location list. */
4904
4905 static inline dw_loc_list_ref *
4906 AT_loc_list_ptr (dw_attr_node *a)
4907 {
4908 gcc_assert (a);
4909 switch (AT_class (a))
4910 {
4911 case dw_val_class_loc_list:
4912 return &a->dw_attr_val.v.val_loc_list;
4913 case dw_val_class_view_list:
4914 {
4915 dw_attr_node *l;
4916 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4917 if (!l)
4918 return NULL;
4919 gcc_checking_assert (l + 1 == a);
4920 return AT_loc_list_ptr (l);
4921 }
4922 default:
4923 gcc_unreachable ();
4924 }
4925 }
4926
4927 /* Return the location attribute value associated with a view list
4928 attribute value. */
4929
4930 static inline dw_val_node *
4931 view_list_to_loc_list_val_node (dw_val_node *val)
4932 {
4933 gcc_assert (val->val_class == dw_val_class_view_list);
4934 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4935 if (!loc)
4936 return NULL;
4937 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4938 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4939 return &loc->dw_attr_val;
4940 }
4941
4942 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4943 {
4944 static hashval_t hash (addr_table_entry *);
4945 static bool equal (addr_table_entry *, addr_table_entry *);
4946 };
4947
4948 /* Table of entries into the .debug_addr section. */
4949
4950 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4951
4952 /* Hash an address_table_entry. */
4953
4954 hashval_t
4955 addr_hasher::hash (addr_table_entry *a)
4956 {
4957 inchash::hash hstate;
4958 switch (a->kind)
4959 {
4960 case ate_kind_rtx:
4961 hstate.add_int (0);
4962 break;
4963 case ate_kind_rtx_dtprel:
4964 hstate.add_int (1);
4965 break;
4966 case ate_kind_label:
4967 return htab_hash_string (a->addr.label);
4968 default:
4969 gcc_unreachable ();
4970 }
4971 inchash::add_rtx (a->addr.rtl, hstate);
4972 return hstate.end ();
4973 }
4974
4975 /* Determine equality for two address_table_entries. */
4976
4977 bool
4978 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4979 {
4980 if (a1->kind != a2->kind)
4981 return 0;
4982 switch (a1->kind)
4983 {
4984 case ate_kind_rtx:
4985 case ate_kind_rtx_dtprel:
4986 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4987 case ate_kind_label:
4988 return strcmp (a1->addr.label, a2->addr.label) == 0;
4989 default:
4990 gcc_unreachable ();
4991 }
4992 }
4993
4994 /* Initialize an addr_table_entry. */
4995
4996 void
4997 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4998 {
4999 e->kind = kind;
5000 switch (kind)
5001 {
5002 case ate_kind_rtx:
5003 case ate_kind_rtx_dtprel:
5004 e->addr.rtl = (rtx) addr;
5005 break;
5006 case ate_kind_label:
5007 e->addr.label = (char *) addr;
5008 break;
5009 }
5010 e->refcount = 0;
5011 e->index = NO_INDEX_ASSIGNED;
5012 }
5013
5014 /* Add attr to the address table entry to the table. Defer setting an
5015 index until output time. */
5016
5017 static addr_table_entry *
5018 add_addr_table_entry (void *addr, enum ate_kind kind)
5019 {
5020 addr_table_entry *node;
5021 addr_table_entry finder;
5022
5023 gcc_assert (dwarf_split_debug_info);
5024 if (! addr_index_table)
5025 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5026 init_addr_table_entry (&finder, kind, addr);
5027 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5028
5029 if (*slot == HTAB_EMPTY_ENTRY)
5030 {
5031 node = ggc_cleared_alloc<addr_table_entry> ();
5032 init_addr_table_entry (node, kind, addr);
5033 *slot = node;
5034 }
5035 else
5036 node = *slot;
5037
5038 node->refcount++;
5039 return node;
5040 }
5041
5042 /* Remove an entry from the addr table by decrementing its refcount.
5043 Strictly, decrementing the refcount would be enough, but the
5044 assertion that the entry is actually in the table has found
5045 bugs. */
5046
5047 static void
5048 remove_addr_table_entry (addr_table_entry *entry)
5049 {
5050 gcc_assert (dwarf_split_debug_info && addr_index_table);
5051 /* After an index is assigned, the table is frozen. */
5052 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5053 entry->refcount--;
5054 }
5055
5056 /* Given a location list, remove all addresses it refers to from the
5057 address_table. */
5058
5059 static void
5060 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5061 {
5062 for (; descr; descr = descr->dw_loc_next)
5063 if (descr->dw_loc_oprnd1.val_entry != NULL)
5064 {
5065 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5066 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5067 }
5068 }
5069
5070 /* A helper function for dwarf2out_finish called through
5071 htab_traverse. Assign an addr_table_entry its index. All entries
5072 must be collected into the table when this function is called,
5073 because the indexing code relies on htab_traverse to traverse nodes
5074 in the same order for each run. */
5075
5076 int
5077 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5078 {
5079 addr_table_entry *node = *h;
5080
5081 /* Don't index unreferenced nodes. */
5082 if (node->refcount == 0)
5083 return 1;
5084
5085 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5086 node->index = *index;
5087 *index += 1;
5088
5089 return 1;
5090 }
5091
5092 /* Add an address constant attribute value to a DIE. When using
5093 dwarf_split_debug_info, address attributes in dies destined for the
5094 final executable should be direct references--setting the parameter
5095 force_direct ensures this behavior. */
5096
5097 static inline void
5098 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5099 bool force_direct)
5100 {
5101 dw_attr_node attr;
5102
5103 attr.dw_attr = attr_kind;
5104 attr.dw_attr_val.val_class = dw_val_class_addr;
5105 attr.dw_attr_val.v.val_addr = addr;
5106 if (dwarf_split_debug_info && !force_direct)
5107 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5108 else
5109 attr.dw_attr_val.val_entry = NULL;
5110 add_dwarf_attr (die, &attr);
5111 }
5112
5113 /* Get the RTX from to an address DIE attribute. */
5114
5115 static inline rtx
5116 AT_addr (dw_attr_node *a)
5117 {
5118 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5119 return a->dw_attr_val.v.val_addr;
5120 }
5121
5122 /* Add a file attribute value to a DIE. */
5123
5124 static inline void
5125 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5126 struct dwarf_file_data *fd)
5127 {
5128 dw_attr_node attr;
5129
5130 attr.dw_attr = attr_kind;
5131 attr.dw_attr_val.val_class = dw_val_class_file;
5132 attr.dw_attr_val.val_entry = NULL;
5133 attr.dw_attr_val.v.val_file = fd;
5134 add_dwarf_attr (die, &attr);
5135 }
5136
5137 /* Get the dwarf_file_data from a file DIE attribute. */
5138
5139 static inline struct dwarf_file_data *
5140 AT_file (dw_attr_node *a)
5141 {
5142 gcc_assert (a && (AT_class (a) == dw_val_class_file
5143 || AT_class (a) == dw_val_class_file_implicit));
5144 return a->dw_attr_val.v.val_file;
5145 }
5146
5147 /* Add a vms delta attribute value to a DIE. */
5148
5149 static inline void
5150 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5151 const char *lbl1, const char *lbl2)
5152 {
5153 dw_attr_node attr;
5154
5155 attr.dw_attr = attr_kind;
5156 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5157 attr.dw_attr_val.val_entry = NULL;
5158 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5159 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5160 add_dwarf_attr (die, &attr);
5161 }
5162
5163 /* Add a symbolic view identifier attribute value to a DIE. */
5164
5165 static inline void
5166 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5167 const char *view_label)
5168 {
5169 dw_attr_node attr;
5170
5171 attr.dw_attr = attr_kind;
5172 attr.dw_attr_val.val_class = dw_val_class_symview;
5173 attr.dw_attr_val.val_entry = NULL;
5174 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a label identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *lbl_id)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5190 if (dwarf_split_debug_info)
5191 attr.dw_attr_val.val_entry
5192 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5193 ate_kind_label);
5194 add_dwarf_attr (die, &attr);
5195 }
5196
5197 /* Add a section offset attribute value to a DIE, an offset into the
5198 debug_line section. */
5199
5200 static inline void
5201 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5202 const char *label)
5203 {
5204 dw_attr_node attr;
5205
5206 attr.dw_attr = attr_kind;
5207 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5208 attr.dw_attr_val.val_entry = NULL;
5209 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5210 add_dwarf_attr (die, &attr);
5211 }
5212
5213 /* Add a section offset attribute value to a DIE, an offset into the
5214 debug_macinfo section. */
5215
5216 static inline void
5217 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5218 const char *label)
5219 {
5220 dw_attr_node attr;
5221
5222 attr.dw_attr = attr_kind;
5223 attr.dw_attr_val.val_class = dw_val_class_macptr;
5224 attr.dw_attr_val.val_entry = NULL;
5225 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5226 add_dwarf_attr (die, &attr);
5227 }
5228
5229 /* Add a range_list attribute value to a DIE. When using
5230 dwarf_split_debug_info, address attributes in dies destined for the
5231 final executable should be direct references--setting the parameter
5232 force_direct ensures this behavior. */
5233
5234 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5235 #define RELOCATED_OFFSET (NULL)
5236
5237 static void
5238 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5239 long unsigned int offset, bool force_direct)
5240 {
5241 dw_attr_node attr;
5242
5243 attr.dw_attr = attr_kind;
5244 attr.dw_attr_val.val_class = dw_val_class_range_list;
5245 /* For the range_list attribute, use val_entry to store whether the
5246 offset should follow split-debug-info or normal semantics. This
5247 value is read in output_range_list_offset. */
5248 if (dwarf_split_debug_info && !force_direct)
5249 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5250 else
5251 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5252 attr.dw_attr_val.v.val_offset = offset;
5253 add_dwarf_attr (die, &attr);
5254 }
5255
5256 /* Return the start label of a delta attribute. */
5257
5258 static inline const char *
5259 AT_vms_delta1 (dw_attr_node *a)
5260 {
5261 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5262 return a->dw_attr_val.v.val_vms_delta.lbl1;
5263 }
5264
5265 /* Return the end label of a delta attribute. */
5266
5267 static inline const char *
5268 AT_vms_delta2 (dw_attr_node *a)
5269 {
5270 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5271 return a->dw_attr_val.v.val_vms_delta.lbl2;
5272 }
5273
5274 static inline const char *
5275 AT_lbl (dw_attr_node *a)
5276 {
5277 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5278 || AT_class (a) == dw_val_class_lineptr
5279 || AT_class (a) == dw_val_class_macptr
5280 || AT_class (a) == dw_val_class_loclistsptr
5281 || AT_class (a) == dw_val_class_high_pc));
5282 return a->dw_attr_val.v.val_lbl_id;
5283 }
5284
5285 /* Get the attribute of type attr_kind. */
5286
5287 static dw_attr_node *
5288 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5289 {
5290 dw_attr_node *a;
5291 unsigned ix;
5292 dw_die_ref spec = NULL;
5293
5294 if (! die)
5295 return NULL;
5296
5297 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5298 if (a->dw_attr == attr_kind)
5299 return a;
5300 else if (a->dw_attr == DW_AT_specification
5301 || a->dw_attr == DW_AT_abstract_origin)
5302 spec = AT_ref (a);
5303
5304 if (spec)
5305 return get_AT (spec, attr_kind);
5306
5307 return NULL;
5308 }
5309
5310 /* Returns the parent of the declaration of DIE. */
5311
5312 static dw_die_ref
5313 get_die_parent (dw_die_ref die)
5314 {
5315 dw_die_ref t;
5316
5317 if (!die)
5318 return NULL;
5319
5320 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5321 || (t = get_AT_ref (die, DW_AT_specification)))
5322 die = t;
5323
5324 return die->die_parent;
5325 }
5326
5327 /* Return the "low pc" attribute value, typically associated with a subprogram
5328 DIE. Return null if the "low pc" attribute is either not present, or if it
5329 cannot be represented as an assembler label identifier. */
5330
5331 static inline const char *
5332 get_AT_low_pc (dw_die_ref die)
5333 {
5334 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5335
5336 return a ? AT_lbl (a) : NULL;
5337 }
5338
5339 /* Return the value of the string attribute designated by ATTR_KIND, or
5340 NULL if it is not present. */
5341
5342 static inline const char *
5343 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5344 {
5345 dw_attr_node *a = get_AT (die, attr_kind);
5346
5347 return a ? AT_string (a) : NULL;
5348 }
5349
5350 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5351 if it is not present. */
5352
5353 static inline int
5354 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5355 {
5356 dw_attr_node *a = get_AT (die, attr_kind);
5357
5358 return a ? AT_flag (a) : 0;
5359 }
5360
5361 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5362 if it is not present. */
5363
5364 static inline unsigned
5365 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5366 {
5367 dw_attr_node *a = get_AT (die, attr_kind);
5368
5369 return a ? AT_unsigned (a) : 0;
5370 }
5371
5372 static inline dw_die_ref
5373 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5374 {
5375 dw_attr_node *a = get_AT (die, attr_kind);
5376
5377 return a ? AT_ref (a) : NULL;
5378 }
5379
5380 static inline struct dwarf_file_data *
5381 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5382 {
5383 dw_attr_node *a = get_AT (die, attr_kind);
5384
5385 return a ? AT_file (a) : NULL;
5386 }
5387
5388 /* Return TRUE if the language is C. */
5389
5390 static inline bool
5391 is_c (void)
5392 {
5393 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5394
5395 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5396 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5397
5398
5399 }
5400
5401 /* Return TRUE if the language is C++. */
5402
5403 static inline bool
5404 is_cxx (void)
5405 {
5406 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5407
5408 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5409 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5410 }
5411
5412 /* Return TRUE if DECL was created by the C++ frontend. */
5413
5414 static bool
5415 is_cxx (const_tree decl)
5416 {
5417 if (in_lto_p)
5418 {
5419 const_tree context = get_ultimate_context (decl);
5420 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5421 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5422 }
5423 return is_cxx ();
5424 }
5425
5426 /* Return TRUE if the language is Fortran. */
5427
5428 static inline bool
5429 is_fortran (void)
5430 {
5431 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5432
5433 return (lang == DW_LANG_Fortran77
5434 || lang == DW_LANG_Fortran90
5435 || lang == DW_LANG_Fortran95
5436 || lang == DW_LANG_Fortran03
5437 || lang == DW_LANG_Fortran08);
5438 }
5439
5440 static inline bool
5441 is_fortran (const_tree decl)
5442 {
5443 if (in_lto_p)
5444 {
5445 const_tree context = get_ultimate_context (decl);
5446 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5447 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5448 "GNU Fortran", 11) == 0
5449 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU F77") == 0);
5451 }
5452 return is_fortran ();
5453 }
5454
5455 /* Return TRUE if the language is Ada. */
5456
5457 static inline bool
5458 is_ada (void)
5459 {
5460 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5461
5462 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5463 }
5464
5465 /* Return TRUE if the language is D. */
5466
5467 static inline bool
5468 is_dlang (void)
5469 {
5470 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5471
5472 return lang == DW_LANG_D;
5473 }
5474
5475 /* Remove the specified attribute if present. Return TRUE if removal
5476 was successful. */
5477
5478 static bool
5479 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5480 {
5481 dw_attr_node *a;
5482 unsigned ix;
5483
5484 if (! die)
5485 return false;
5486
5487 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5488 if (a->dw_attr == attr_kind)
5489 {
5490 if (AT_class (a) == dw_val_class_str)
5491 if (a->dw_attr_val.v.val_str->refcount)
5492 a->dw_attr_val.v.val_str->refcount--;
5493
5494 /* vec::ordered_remove should help reduce the number of abbrevs
5495 that are needed. */
5496 die->die_attr->ordered_remove (ix);
5497 return true;
5498 }
5499 return false;
5500 }
5501
5502 /* Remove CHILD from its parent. PREV must have the property that
5503 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5504
5505 static void
5506 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5507 {
5508 gcc_assert (child->die_parent == prev->die_parent);
5509 gcc_assert (prev->die_sib == child);
5510 if (prev == child)
5511 {
5512 gcc_assert (child->die_parent->die_child == child);
5513 prev = NULL;
5514 }
5515 else
5516 prev->die_sib = child->die_sib;
5517 if (child->die_parent->die_child == child)
5518 child->die_parent->die_child = prev;
5519 child->die_sib = NULL;
5520 }
5521
5522 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5523 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5524
5525 static void
5526 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5527 {
5528 dw_die_ref parent = old_child->die_parent;
5529
5530 gcc_assert (parent == prev->die_parent);
5531 gcc_assert (prev->die_sib == old_child);
5532
5533 new_child->die_parent = parent;
5534 if (prev == old_child)
5535 {
5536 gcc_assert (parent->die_child == old_child);
5537 new_child->die_sib = new_child;
5538 }
5539 else
5540 {
5541 prev->die_sib = new_child;
5542 new_child->die_sib = old_child->die_sib;
5543 }
5544 if (old_child->die_parent->die_child == old_child)
5545 old_child->die_parent->die_child = new_child;
5546 old_child->die_sib = NULL;
5547 }
5548
5549 /* Move all children from OLD_PARENT to NEW_PARENT. */
5550
5551 static void
5552 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5553 {
5554 dw_die_ref c;
5555 new_parent->die_child = old_parent->die_child;
5556 old_parent->die_child = NULL;
5557 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5558 }
5559
5560 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5561 matches TAG. */
5562
5563 static void
5564 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5565 {
5566 dw_die_ref c;
5567
5568 c = die->die_child;
5569 if (c) do {
5570 dw_die_ref prev = c;
5571 c = c->die_sib;
5572 while (c->die_tag == tag)
5573 {
5574 remove_child_with_prev (c, prev);
5575 c->die_parent = NULL;
5576 /* Might have removed every child. */
5577 if (die->die_child == NULL)
5578 return;
5579 c = prev->die_sib;
5580 }
5581 } while (c != die->die_child);
5582 }
5583
5584 /* Add a CHILD_DIE as the last child of DIE. */
5585
5586 static void
5587 add_child_die (dw_die_ref die, dw_die_ref child_die)
5588 {
5589 /* FIXME this should probably be an assert. */
5590 if (! die || ! child_die)
5591 return;
5592 gcc_assert (die != child_die);
5593
5594 child_die->die_parent = die;
5595 if (die->die_child)
5596 {
5597 child_die->die_sib = die->die_child->die_sib;
5598 die->die_child->die_sib = child_die;
5599 }
5600 else
5601 child_die->die_sib = child_die;
5602 die->die_child = child_die;
5603 }
5604
5605 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5606
5607 static void
5608 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5609 dw_die_ref after_die)
5610 {
5611 gcc_assert (die
5612 && child_die
5613 && after_die
5614 && die->die_child
5615 && die != child_die);
5616
5617 child_die->die_parent = die;
5618 child_die->die_sib = after_die->die_sib;
5619 after_die->die_sib = child_die;
5620 if (die->die_child == after_die)
5621 die->die_child = child_die;
5622 }
5623
5624 /* Unassociate CHILD from its parent, and make its parent be
5625 NEW_PARENT. */
5626
5627 static void
5628 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5629 {
5630 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5631 if (p->die_sib == child)
5632 {
5633 remove_child_with_prev (child, p);
5634 break;
5635 }
5636 add_child_die (new_parent, child);
5637 }
5638
5639 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5640 is the specification, to the end of PARENT's list of children.
5641 This is done by removing and re-adding it. */
5642
5643 static void
5644 splice_child_die (dw_die_ref parent, dw_die_ref child)
5645 {
5646 /* We want the declaration DIE from inside the class, not the
5647 specification DIE at toplevel. */
5648 if (child->die_parent != parent)
5649 {
5650 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5651
5652 if (tmp)
5653 child = tmp;
5654 }
5655
5656 gcc_assert (child->die_parent == parent
5657 || (child->die_parent
5658 == get_AT_ref (parent, DW_AT_specification)));
5659
5660 reparent_child (child, parent);
5661 }
5662
5663 /* Create and return a new die with TAG_VALUE as tag. */
5664
5665 static inline dw_die_ref
5666 new_die_raw (enum dwarf_tag tag_value)
5667 {
5668 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5669 die->die_tag = tag_value;
5670 return die;
5671 }
5672
5673 /* Create and return a new die with a parent of PARENT_DIE. If
5674 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5675 associated tree T must be supplied to determine parenthood
5676 later. */
5677
5678 static inline dw_die_ref
5679 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5680 {
5681 dw_die_ref die = new_die_raw (tag_value);
5682
5683 if (parent_die != NULL)
5684 add_child_die (parent_die, die);
5685 else
5686 {
5687 limbo_die_node *limbo_node;
5688
5689 /* No DIEs created after early dwarf should end up in limbo,
5690 because the limbo list should not persist past LTO
5691 streaming. */
5692 if (tag_value != DW_TAG_compile_unit
5693 /* These are allowed because they're generated while
5694 breaking out COMDAT units late. */
5695 && tag_value != DW_TAG_type_unit
5696 && tag_value != DW_TAG_skeleton_unit
5697 && !early_dwarf
5698 /* Allow nested functions to live in limbo because they will
5699 only temporarily live there, as decls_for_scope will fix
5700 them up. */
5701 && (TREE_CODE (t) != FUNCTION_DECL
5702 || !decl_function_context (t))
5703 /* Same as nested functions above but for types. Types that
5704 are local to a function will be fixed in
5705 decls_for_scope. */
5706 && (!RECORD_OR_UNION_TYPE_P (t)
5707 || !TYPE_CONTEXT (t)
5708 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5709 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5710 especially in the ltrans stage, but once we implement LTO
5711 dwarf streaming, we should remove this exception. */
5712 && !in_lto_p)
5713 {
5714 fprintf (stderr, "symbol ended up in limbo too late:");
5715 debug_generic_stmt (t);
5716 gcc_unreachable ();
5717 }
5718
5719 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5720 limbo_node->die = die;
5721 limbo_node->created_for = t;
5722 limbo_node->next = limbo_die_list;
5723 limbo_die_list = limbo_node;
5724 }
5725
5726 return die;
5727 }
5728
5729 /* Return the DIE associated with the given type specifier. */
5730
5731 static inline dw_die_ref
5732 lookup_type_die (tree type)
5733 {
5734 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5735 if (die && die->removed)
5736 {
5737 TYPE_SYMTAB_DIE (type) = NULL;
5738 return NULL;
5739 }
5740 return die;
5741 }
5742
5743 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5744 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5745 anonymous type instead the one of the naming typedef. */
5746
5747 static inline dw_die_ref
5748 strip_naming_typedef (tree type, dw_die_ref type_die)
5749 {
5750 if (type
5751 && TREE_CODE (type) == RECORD_TYPE
5752 && type_die
5753 && type_die->die_tag == DW_TAG_typedef
5754 && is_naming_typedef_decl (TYPE_NAME (type)))
5755 type_die = get_AT_ref (type_die, DW_AT_type);
5756 return type_die;
5757 }
5758
5759 /* Like lookup_type_die, but if type is an anonymous type named by a
5760 typedef[1], return the DIE of the anonymous type instead the one of
5761 the naming typedef. This is because in gen_typedef_die, we did
5762 equate the anonymous struct named by the typedef with the DIE of
5763 the naming typedef. So by default, lookup_type_die on an anonymous
5764 struct yields the DIE of the naming typedef.
5765
5766 [1]: Read the comment of is_naming_typedef_decl to learn about what
5767 a naming typedef is. */
5768
5769 static inline dw_die_ref
5770 lookup_type_die_strip_naming_typedef (tree type)
5771 {
5772 dw_die_ref die = lookup_type_die (type);
5773 return strip_naming_typedef (type, die);
5774 }
5775
5776 /* Equate a DIE to a given type specifier. */
5777
5778 static inline void
5779 equate_type_number_to_die (tree type, dw_die_ref type_die)
5780 {
5781 TYPE_SYMTAB_DIE (type) = type_die;
5782 }
5783
5784 static dw_die_ref maybe_create_die_with_external_ref (tree);
5785 struct GTY(()) sym_off_pair
5786 {
5787 const char * GTY((skip)) sym;
5788 unsigned HOST_WIDE_INT off;
5789 };
5790 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5791
5792 /* Returns a hash value for X (which really is a die_struct). */
5793
5794 inline hashval_t
5795 decl_die_hasher::hash (die_node *x)
5796 {
5797 return (hashval_t) x->decl_id;
5798 }
5799
5800 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5801
5802 inline bool
5803 decl_die_hasher::equal (die_node *x, tree y)
5804 {
5805 return (x->decl_id == DECL_UID (y));
5806 }
5807
5808 /* Return the DIE associated with a given declaration. */
5809
5810 static inline dw_die_ref
5811 lookup_decl_die (tree decl)
5812 {
5813 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5814 NO_INSERT);
5815 if (!die)
5816 {
5817 if (in_lto_p)
5818 return maybe_create_die_with_external_ref (decl);
5819 return NULL;
5820 }
5821 if ((*die)->removed)
5822 {
5823 decl_die_table->clear_slot (die);
5824 return NULL;
5825 }
5826 return *die;
5827 }
5828
5829
5830 /* Return the DIE associated with BLOCK. */
5831
5832 static inline dw_die_ref
5833 lookup_block_die (tree block)
5834 {
5835 dw_die_ref die = BLOCK_DIE (block);
5836 if (!die && in_lto_p)
5837 return maybe_create_die_with_external_ref (block);
5838 return die;
5839 }
5840
5841 /* Associate DIE with BLOCK. */
5842
5843 static inline void
5844 equate_block_to_die (tree block, dw_die_ref die)
5845 {
5846 BLOCK_DIE (block) = die;
5847 }
5848 #undef BLOCK_DIE
5849
5850
5851 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5852 style reference. Return true if we found one refering to a DIE for
5853 DECL, otherwise return false. */
5854
5855 static bool
5856 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5857 unsigned HOST_WIDE_INT *off)
5858 {
5859 dw_die_ref die;
5860
5861 if (in_lto_p)
5862 {
5863 /* During WPA stage and incremental linking we use a hash-map
5864 to store the decl <-> label + offset map. */
5865 if (!external_die_map)
5866 return false;
5867 sym_off_pair *desc = external_die_map->get (decl);
5868 if (!desc)
5869 return false;
5870 *sym = desc->sym;
5871 *off = desc->off;
5872 return true;
5873 }
5874
5875 if (TREE_CODE (decl) == BLOCK)
5876 die = lookup_block_die (decl);
5877 else
5878 die = lookup_decl_die (decl);
5879 if (!die)
5880 return false;
5881
5882 /* Similar to get_ref_die_offset_label, but using the "correct"
5883 label. */
5884 *off = die->die_offset;
5885 while (die->die_parent)
5886 die = die->die_parent;
5887 /* For the containing CU DIE we compute a die_symbol in
5888 compute_comp_unit_symbol. */
5889 gcc_assert (die->die_tag == DW_TAG_compile_unit
5890 && die->die_id.die_symbol != NULL);
5891 *sym = die->die_id.die_symbol;
5892 return true;
5893 }
5894
5895 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5896
5897 static void
5898 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5899 const char *symbol, HOST_WIDE_INT offset)
5900 {
5901 /* Create a fake DIE that contains the reference. Don't use
5902 new_die because we don't want to end up in the limbo list. */
5903 /* ??? We probably want to share these, thus put a ref to the DIE
5904 we create here to the external_die_map entry. */
5905 dw_die_ref ref = new_die_raw (die->die_tag);
5906 ref->die_id.die_symbol = symbol;
5907 ref->die_offset = offset;
5908 ref->with_offset = 1;
5909 add_AT_die_ref (die, attr_kind, ref);
5910 }
5911
5912 /* Create a DIE for DECL if required and add a reference to a DIE
5913 at SYMBOL + OFFSET which contains attributes dumped early. */
5914
5915 static void
5916 dwarf2out_register_external_die (tree decl, const char *sym,
5917 unsigned HOST_WIDE_INT off)
5918 {
5919 if (debug_info_level == DINFO_LEVEL_NONE)
5920 return;
5921
5922 if (!external_die_map)
5923 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5924 gcc_checking_assert (!external_die_map->get (decl));
5925 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5926 external_die_map->put (decl, p);
5927 }
5928
5929 /* If we have a registered external DIE for DECL return a new DIE for
5930 the concrete instance with an appropriate abstract origin. */
5931
5932 static dw_die_ref
5933 maybe_create_die_with_external_ref (tree decl)
5934 {
5935 if (!external_die_map)
5936 return NULL;
5937 sym_off_pair *desc = external_die_map->get (decl);
5938 if (!desc)
5939 return NULL;
5940
5941 const char *sym = desc->sym;
5942 unsigned HOST_WIDE_INT off = desc->off;
5943
5944 in_lto_p = false;
5945 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5946 ? lookup_block_die (decl) : lookup_decl_die (decl));
5947 gcc_assert (!die);
5948 in_lto_p = true;
5949
5950 tree ctx;
5951 dw_die_ref parent = NULL;
5952 /* Need to lookup a DIE for the decls context - the containing
5953 function or translation unit. */
5954 if (TREE_CODE (decl) == BLOCK)
5955 {
5956 ctx = BLOCK_SUPERCONTEXT (decl);
5957 /* ??? We do not output DIEs for all scopes thus skip as
5958 many DIEs as needed. */
5959 while (TREE_CODE (ctx) == BLOCK
5960 && !lookup_block_die (ctx))
5961 ctx = BLOCK_SUPERCONTEXT (ctx);
5962 }
5963 else
5964 ctx = DECL_CONTEXT (decl);
5965 /* Peel types in the context stack. */
5966 while (ctx && TYPE_P (ctx))
5967 ctx = TYPE_CONTEXT (ctx);
5968 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5969 if (debug_info_level <= DINFO_LEVEL_TERSE)
5970 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5971 ctx = DECL_CONTEXT (ctx);
5972 if (ctx)
5973 {
5974 if (TREE_CODE (ctx) == BLOCK)
5975 parent = lookup_block_die (ctx);
5976 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5977 /* Keep the 1:1 association during WPA. */
5978 && !flag_wpa
5979 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5980 /* Otherwise all late annotations go to the main CU which
5981 imports the original CUs. */
5982 parent = comp_unit_die ();
5983 else if (TREE_CODE (ctx) == FUNCTION_DECL
5984 && TREE_CODE (decl) != FUNCTION_DECL
5985 && TREE_CODE (decl) != PARM_DECL
5986 && TREE_CODE (decl) != RESULT_DECL
5987 && TREE_CODE (decl) != BLOCK)
5988 /* Leave function local entities parent determination to when
5989 we process scope vars. */
5990 ;
5991 else
5992 parent = lookup_decl_die (ctx);
5993 }
5994 else
5995 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5996 Handle this case gracefully by globalizing stuff. */
5997 parent = comp_unit_die ();
5998 /* Create a DIE "stub". */
5999 switch (TREE_CODE (decl))
6000 {
6001 case TRANSLATION_UNIT_DECL:
6002 {
6003 die = comp_unit_die ();
6004 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6005 to create a DIE for the original CUs. */
6006 return die;
6007 }
6008 case NAMESPACE_DECL:
6009 if (is_fortran (decl))
6010 die = new_die (DW_TAG_module, parent, decl);
6011 else
6012 die = new_die (DW_TAG_namespace, parent, decl);
6013 break;
6014 case FUNCTION_DECL:
6015 die = new_die (DW_TAG_subprogram, parent, decl);
6016 break;
6017 case VAR_DECL:
6018 die = new_die (DW_TAG_variable, parent, decl);
6019 break;
6020 case RESULT_DECL:
6021 die = new_die (DW_TAG_variable, parent, decl);
6022 break;
6023 case PARM_DECL:
6024 die = new_die (DW_TAG_formal_parameter, parent, decl);
6025 break;
6026 case CONST_DECL:
6027 die = new_die (DW_TAG_constant, parent, decl);
6028 break;
6029 case LABEL_DECL:
6030 die = new_die (DW_TAG_label, parent, decl);
6031 break;
6032 case BLOCK:
6033 die = new_die (DW_TAG_lexical_block, parent, decl);
6034 break;
6035 default:
6036 gcc_unreachable ();
6037 }
6038 if (TREE_CODE (decl) == BLOCK)
6039 equate_block_to_die (decl, die);
6040 else
6041 equate_decl_number_to_die (decl, die);
6042
6043 add_desc_attribute (die, decl);
6044
6045 /* Add a reference to the DIE providing early debug at $sym + off. */
6046 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6047
6048 return die;
6049 }
6050
6051 /* Returns a hash value for X (which really is a var_loc_list). */
6052
6053 inline hashval_t
6054 decl_loc_hasher::hash (var_loc_list *x)
6055 {
6056 return (hashval_t) x->decl_id;
6057 }
6058
6059 /* Return nonzero if decl_id of var_loc_list X is the same as
6060 UID of decl *Y. */
6061
6062 inline bool
6063 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6064 {
6065 return (x->decl_id == DECL_UID (y));
6066 }
6067
6068 /* Return the var_loc list associated with a given declaration. */
6069
6070 static inline var_loc_list *
6071 lookup_decl_loc (const_tree decl)
6072 {
6073 if (!decl_loc_table)
6074 return NULL;
6075 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6076 }
6077
6078 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6079
6080 inline hashval_t
6081 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6082 {
6083 return (hashval_t) x->decl_id;
6084 }
6085
6086 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6087 UID of decl *Y. */
6088
6089 inline bool
6090 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6091 {
6092 return (x->decl_id == DECL_UID (y));
6093 }
6094
6095 /* Equate a DIE to a particular declaration. */
6096
6097 static void
6098 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6099 {
6100 unsigned int decl_id = DECL_UID (decl);
6101
6102 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6103 decl_die->decl_id = decl_id;
6104 }
6105
6106 /* Return how many bits covers PIECE EXPR_LIST. */
6107
6108 static HOST_WIDE_INT
6109 decl_piece_bitsize (rtx piece)
6110 {
6111 int ret = (int) GET_MODE (piece);
6112 if (ret)
6113 return ret;
6114 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6115 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6116 return INTVAL (XEXP (XEXP (piece, 0), 0));
6117 }
6118
6119 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6120
6121 static rtx *
6122 decl_piece_varloc_ptr (rtx piece)
6123 {
6124 if ((int) GET_MODE (piece))
6125 return &XEXP (piece, 0);
6126 else
6127 return &XEXP (XEXP (piece, 0), 1);
6128 }
6129
6130 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6131 Next is the chain of following piece nodes. */
6132
6133 static rtx_expr_list *
6134 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6135 {
6136 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6137 return alloc_EXPR_LIST (bitsize, loc_note, next);
6138 else
6139 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6140 GEN_INT (bitsize),
6141 loc_note), next);
6142 }
6143
6144 /* Return rtx that should be stored into loc field for
6145 LOC_NOTE and BITPOS/BITSIZE. */
6146
6147 static rtx
6148 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6149 HOST_WIDE_INT bitsize)
6150 {
6151 if (bitsize != -1)
6152 {
6153 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6154 if (bitpos != 0)
6155 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6156 }
6157 return loc_note;
6158 }
6159
6160 /* This function either modifies location piece list *DEST in
6161 place (if SRC and INNER is NULL), or copies location piece list
6162 *SRC to *DEST while modifying it. Location BITPOS is modified
6163 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6164 not copied and if needed some padding around it is added.
6165 When modifying in place, DEST should point to EXPR_LIST where
6166 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6167 to the start of the whole list and INNER points to the EXPR_LIST
6168 where earlier pieces cover PIECE_BITPOS bits. */
6169
6170 static void
6171 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6172 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6173 HOST_WIDE_INT bitsize, rtx loc_note)
6174 {
6175 HOST_WIDE_INT diff;
6176 bool copy = inner != NULL;
6177
6178 if (copy)
6179 {
6180 /* First copy all nodes preceding the current bitpos. */
6181 while (src != inner)
6182 {
6183 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6184 decl_piece_bitsize (*src), NULL_RTX);
6185 dest = &XEXP (*dest, 1);
6186 src = &XEXP (*src, 1);
6187 }
6188 }
6189 /* Add padding if needed. */
6190 if (bitpos != piece_bitpos)
6191 {
6192 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6193 copy ? NULL_RTX : *dest);
6194 dest = &XEXP (*dest, 1);
6195 }
6196 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6197 {
6198 gcc_assert (!copy);
6199 /* A piece with correct bitpos and bitsize already exist,
6200 just update the location for it and return. */
6201 *decl_piece_varloc_ptr (*dest) = loc_note;
6202 return;
6203 }
6204 /* Add the piece that changed. */
6205 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6206 dest = &XEXP (*dest, 1);
6207 /* Skip over pieces that overlap it. */
6208 diff = bitpos - piece_bitpos + bitsize;
6209 if (!copy)
6210 src = dest;
6211 while (diff > 0 && *src)
6212 {
6213 rtx piece = *src;
6214 diff -= decl_piece_bitsize (piece);
6215 if (copy)
6216 src = &XEXP (piece, 1);
6217 else
6218 {
6219 *src = XEXP (piece, 1);
6220 free_EXPR_LIST_node (piece);
6221 }
6222 }
6223 /* Add padding if needed. */
6224 if (diff < 0 && *src)
6225 {
6226 if (!copy)
6227 dest = src;
6228 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6229 dest = &XEXP (*dest, 1);
6230 }
6231 if (!copy)
6232 return;
6233 /* Finally copy all nodes following it. */
6234 while (*src)
6235 {
6236 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6237 decl_piece_bitsize (*src), NULL_RTX);
6238 dest = &XEXP (*dest, 1);
6239 src = &XEXP (*src, 1);
6240 }
6241 }
6242
6243 /* Add a variable location node to the linked list for DECL. */
6244
6245 static struct var_loc_node *
6246 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6247 {
6248 unsigned int decl_id;
6249 var_loc_list *temp;
6250 struct var_loc_node *loc = NULL;
6251 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6252
6253 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6254 {
6255 tree realdecl = DECL_DEBUG_EXPR (decl);
6256 if (handled_component_p (realdecl)
6257 || (TREE_CODE (realdecl) == MEM_REF
6258 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6259 {
6260 bool reverse;
6261 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6262 &bitsize, &reverse);
6263 if (!innerdecl
6264 || !DECL_P (innerdecl)
6265 || DECL_IGNORED_P (innerdecl)
6266 || TREE_STATIC (innerdecl)
6267 || bitsize == 0
6268 || bitpos + bitsize > 256)
6269 return NULL;
6270 decl = innerdecl;
6271 }
6272 }
6273
6274 decl_id = DECL_UID (decl);
6275 var_loc_list **slot
6276 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6277 if (*slot == NULL)
6278 {
6279 temp = ggc_cleared_alloc<var_loc_list> ();
6280 temp->decl_id = decl_id;
6281 *slot = temp;
6282 }
6283 else
6284 temp = *slot;
6285
6286 /* For PARM_DECLs try to keep around the original incoming value,
6287 even if that means we'll emit a zero-range .debug_loc entry. */
6288 if (temp->last
6289 && temp->first == temp->last
6290 && TREE_CODE (decl) == PARM_DECL
6291 && NOTE_P (temp->first->loc)
6292 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6293 && DECL_INCOMING_RTL (decl)
6294 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6295 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6296 == GET_CODE (DECL_INCOMING_RTL (decl))
6297 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6298 && (bitsize != -1
6299 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6300 NOTE_VAR_LOCATION_LOC (loc_note))
6301 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6302 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6303 {
6304 loc = ggc_cleared_alloc<var_loc_node> ();
6305 temp->first->next = loc;
6306 temp->last = loc;
6307 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6308 }
6309 else if (temp->last)
6310 {
6311 struct var_loc_node *last = temp->last, *unused = NULL;
6312 rtx *piece_loc = NULL, last_loc_note;
6313 HOST_WIDE_INT piece_bitpos = 0;
6314 if (last->next)
6315 {
6316 last = last->next;
6317 gcc_assert (last->next == NULL);
6318 }
6319 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6320 {
6321 piece_loc = &last->loc;
6322 do
6323 {
6324 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6325 if (piece_bitpos + cur_bitsize > bitpos)
6326 break;
6327 piece_bitpos += cur_bitsize;
6328 piece_loc = &XEXP (*piece_loc, 1);
6329 }
6330 while (*piece_loc);
6331 }
6332 /* TEMP->LAST here is either pointer to the last but one or
6333 last element in the chained list, LAST is pointer to the
6334 last element. */
6335 if (label && strcmp (last->label, label) == 0 && last->view == view)
6336 {
6337 /* For SRA optimized variables if there weren't any real
6338 insns since last note, just modify the last node. */
6339 if (piece_loc != NULL)
6340 {
6341 adjust_piece_list (piece_loc, NULL, NULL,
6342 bitpos, piece_bitpos, bitsize, loc_note);
6343 return NULL;
6344 }
6345 /* If the last note doesn't cover any instructions, remove it. */
6346 if (temp->last != last)
6347 {
6348 temp->last->next = NULL;
6349 unused = last;
6350 last = temp->last;
6351 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6352 }
6353 else
6354 {
6355 gcc_assert (temp->first == temp->last
6356 || (temp->first->next == temp->last
6357 && TREE_CODE (decl) == PARM_DECL));
6358 memset (temp->last, '\0', sizeof (*temp->last));
6359 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6360 return temp->last;
6361 }
6362 }
6363 if (bitsize == -1 && NOTE_P (last->loc))
6364 last_loc_note = last->loc;
6365 else if (piece_loc != NULL
6366 && *piece_loc != NULL_RTX
6367 && piece_bitpos == bitpos
6368 && decl_piece_bitsize (*piece_loc) == bitsize)
6369 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6370 else
6371 last_loc_note = NULL_RTX;
6372 /* If the current location is the same as the end of the list,
6373 and either both or neither of the locations is uninitialized,
6374 we have nothing to do. */
6375 if (last_loc_note == NULL_RTX
6376 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6377 NOTE_VAR_LOCATION_LOC (loc_note)))
6378 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6379 != NOTE_VAR_LOCATION_STATUS (loc_note))
6380 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6381 == VAR_INIT_STATUS_UNINITIALIZED)
6382 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6383 == VAR_INIT_STATUS_UNINITIALIZED))))
6384 {
6385 /* Add LOC to the end of list and update LAST. If the last
6386 element of the list has been removed above, reuse its
6387 memory for the new node, otherwise allocate a new one. */
6388 if (unused)
6389 {
6390 loc = unused;
6391 memset (loc, '\0', sizeof (*loc));
6392 }
6393 else
6394 loc = ggc_cleared_alloc<var_loc_node> ();
6395 if (bitsize == -1 || piece_loc == NULL)
6396 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6397 else
6398 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6399 bitpos, piece_bitpos, bitsize, loc_note);
6400 last->next = loc;
6401 /* Ensure TEMP->LAST will point either to the new last but one
6402 element of the chain, or to the last element in it. */
6403 if (last != temp->last)
6404 temp->last = last;
6405 }
6406 else if (unused)
6407 ggc_free (unused);
6408 }
6409 else
6410 {
6411 loc = ggc_cleared_alloc<var_loc_node> ();
6412 temp->first = loc;
6413 temp->last = loc;
6414 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6415 }
6416 return loc;
6417 }
6418 \f
6419 /* Keep track of the number of spaces used to indent the
6420 output of the debugging routines that print the structure of
6421 the DIE internal representation. */
6422 static int print_indent;
6423
6424 /* Indent the line the number of spaces given by print_indent. */
6425
6426 static inline void
6427 print_spaces (FILE *outfile)
6428 {
6429 fprintf (outfile, "%*s", print_indent, "");
6430 }
6431
6432 /* Print a type signature in hex. */
6433
6434 static inline void
6435 print_signature (FILE *outfile, char *sig)
6436 {
6437 int i;
6438
6439 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6440 fprintf (outfile, "%02x", sig[i] & 0xff);
6441 }
6442
6443 static inline void
6444 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6445 {
6446 if (discr_value->pos)
6447 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6448 else
6449 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6450 }
6451
6452 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6453
6454 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6455 RECURSE, output location descriptor operations. */
6456
6457 static void
6458 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6459 {
6460 switch (val->val_class)
6461 {
6462 case dw_val_class_addr:
6463 fprintf (outfile, "address");
6464 break;
6465 case dw_val_class_offset:
6466 fprintf (outfile, "offset");
6467 break;
6468 case dw_val_class_loc:
6469 fprintf (outfile, "location descriptor");
6470 if (val->v.val_loc == NULL)
6471 fprintf (outfile, " -> <null>\n");
6472 else if (recurse)
6473 {
6474 fprintf (outfile, ":\n");
6475 print_indent += 4;
6476 print_loc_descr (val->v.val_loc, outfile);
6477 print_indent -= 4;
6478 }
6479 else
6480 {
6481 if (flag_dump_noaddr || flag_dump_unnumbered)
6482 fprintf (outfile, " #\n");
6483 else
6484 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6485 }
6486 break;
6487 case dw_val_class_loc_list:
6488 fprintf (outfile, "location list -> label:%s",
6489 val->v.val_loc_list->ll_symbol);
6490 break;
6491 case dw_val_class_view_list:
6492 val = view_list_to_loc_list_val_node (val);
6493 fprintf (outfile, "location list with views -> labels:%s and %s",
6494 val->v.val_loc_list->ll_symbol,
6495 val->v.val_loc_list->vl_symbol);
6496 break;
6497 case dw_val_class_range_list:
6498 fprintf (outfile, "range list");
6499 break;
6500 case dw_val_class_const:
6501 case dw_val_class_const_implicit:
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6503 break;
6504 case dw_val_class_unsigned_const:
6505 case dw_val_class_unsigned_const_implicit:
6506 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6507 break;
6508 case dw_val_class_const_double:
6509 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6510 HOST_WIDE_INT_PRINT_UNSIGNED")",
6511 val->v.val_double.high,
6512 val->v.val_double.low);
6513 break;
6514 case dw_val_class_wide_int:
6515 {
6516 int i = val->v.val_wide->get_len ();
6517 fprintf (outfile, "constant (");
6518 gcc_assert (i > 0);
6519 if (val->v.val_wide->elt (i - 1) == 0)
6520 fprintf (outfile, "0x");
6521 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6522 val->v.val_wide->elt (--i));
6523 while (--i >= 0)
6524 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6525 val->v.val_wide->elt (i));
6526 fprintf (outfile, ")");
6527 break;
6528 }
6529 case dw_val_class_vec:
6530 fprintf (outfile, "floating-point or vector constant");
6531 break;
6532 case dw_val_class_flag:
6533 fprintf (outfile, "%u", val->v.val_flag);
6534 break;
6535 case dw_val_class_die_ref:
6536 if (val->v.val_die_ref.die != NULL)
6537 {
6538 dw_die_ref die = val->v.val_die_ref.die;
6539
6540 if (die->comdat_type_p)
6541 {
6542 fprintf (outfile, "die -> signature: ");
6543 print_signature (outfile,
6544 die->die_id.die_type_node->signature);
6545 }
6546 else if (die->die_id.die_symbol)
6547 {
6548 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6549 if (die->with_offset)
6550 fprintf (outfile, " + %ld", die->die_offset);
6551 }
6552 else
6553 fprintf (outfile, "die -> %ld", die->die_offset);
6554 if (flag_dump_noaddr || flag_dump_unnumbered)
6555 fprintf (outfile, " #");
6556 else
6557 fprintf (outfile, " (%p)", (void *) die);
6558 }
6559 else
6560 fprintf (outfile, "die -> <null>");
6561 break;
6562 case dw_val_class_vms_delta:
6563 fprintf (outfile, "delta: @slotcount(%s-%s)",
6564 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6565 break;
6566 case dw_val_class_symview:
6567 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6568 break;
6569 case dw_val_class_lbl_id:
6570 case dw_val_class_lineptr:
6571 case dw_val_class_macptr:
6572 case dw_val_class_loclistsptr:
6573 case dw_val_class_high_pc:
6574 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6575 break;
6576 case dw_val_class_str:
6577 if (val->v.val_str->str != NULL)
6578 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6579 else
6580 fprintf (outfile, "<null>");
6581 break;
6582 case dw_val_class_file:
6583 case dw_val_class_file_implicit:
6584 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6585 val->v.val_file->emitted_number);
6586 break;
6587 case dw_val_class_data8:
6588 {
6589 int i;
6590
6591 for (i = 0; i < 8; i++)
6592 fprintf (outfile, "%02x", val->v.val_data8[i]);
6593 break;
6594 }
6595 case dw_val_class_discr_value:
6596 print_discr_value (outfile, &val->v.val_discr_value);
6597 break;
6598 case dw_val_class_discr_list:
6599 for (dw_discr_list_ref node = val->v.val_discr_list;
6600 node != NULL;
6601 node = node->dw_discr_next)
6602 {
6603 if (node->dw_discr_range)
6604 {
6605 fprintf (outfile, " .. ");
6606 print_discr_value (outfile, &node->dw_discr_lower_bound);
6607 print_discr_value (outfile, &node->dw_discr_upper_bound);
6608 }
6609 else
6610 print_discr_value (outfile, &node->dw_discr_lower_bound);
6611
6612 if (node->dw_discr_next != NULL)
6613 fprintf (outfile, " | ");
6614 }
6615 default:
6616 break;
6617 }
6618 }
6619
6620 /* Likewise, for a DIE attribute. */
6621
6622 static void
6623 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6624 {
6625 print_dw_val (&a->dw_attr_val, recurse, outfile);
6626 }
6627
6628
6629 /* Print the list of operands in the LOC location description to OUTFILE. This
6630 routine is a debugging aid only. */
6631
6632 static void
6633 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6634 {
6635 dw_loc_descr_ref l = loc;
6636
6637 if (loc == NULL)
6638 {
6639 print_spaces (outfile);
6640 fprintf (outfile, "<null>\n");
6641 return;
6642 }
6643
6644 for (l = loc; l != NULL; l = l->dw_loc_next)
6645 {
6646 print_spaces (outfile);
6647 if (flag_dump_noaddr || flag_dump_unnumbered)
6648 fprintf (outfile, "#");
6649 else
6650 fprintf (outfile, "(%p)", (void *) l);
6651 fprintf (outfile, " %s",
6652 dwarf_stack_op_name (l->dw_loc_opc));
6653 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6654 {
6655 fprintf (outfile, " ");
6656 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6657 }
6658 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6659 {
6660 fprintf (outfile, ", ");
6661 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6662 }
6663 fprintf (outfile, "\n");
6664 }
6665 }
6666
6667 /* Print the information associated with a given DIE, and its children.
6668 This routine is a debugging aid only. */
6669
6670 static void
6671 print_die (dw_die_ref die, FILE *outfile)
6672 {
6673 dw_attr_node *a;
6674 dw_die_ref c;
6675 unsigned ix;
6676
6677 print_spaces (outfile);
6678 fprintf (outfile, "DIE %4ld: %s ",
6679 die->die_offset, dwarf_tag_name (die->die_tag));
6680 if (flag_dump_noaddr || flag_dump_unnumbered)
6681 fprintf (outfile, "#\n");
6682 else
6683 fprintf (outfile, "(%p)\n", (void*) die);
6684 print_spaces (outfile);
6685 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6686 fprintf (outfile, " offset: %ld", die->die_offset);
6687 fprintf (outfile, " mark: %d\n", die->die_mark);
6688
6689 if (die->comdat_type_p)
6690 {
6691 print_spaces (outfile);
6692 fprintf (outfile, " signature: ");
6693 print_signature (outfile, die->die_id.die_type_node->signature);
6694 fprintf (outfile, "\n");
6695 }
6696
6697 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6698 {
6699 print_spaces (outfile);
6700 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6701
6702 print_attribute (a, true, outfile);
6703 fprintf (outfile, "\n");
6704 }
6705
6706 if (die->die_child != NULL)
6707 {
6708 print_indent += 4;
6709 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6710 print_indent -= 4;
6711 }
6712 if (print_indent == 0)
6713 fprintf (outfile, "\n");
6714 }
6715
6716 /* Print the list of operations in the LOC location description. */
6717
6718 DEBUG_FUNCTION void
6719 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6720 {
6721 print_loc_descr (loc, stderr);
6722 }
6723
6724 /* Print the information collected for a given DIE. */
6725
6726 DEBUG_FUNCTION void
6727 debug_dwarf_die (dw_die_ref die)
6728 {
6729 print_die (die, stderr);
6730 }
6731
6732 DEBUG_FUNCTION void
6733 debug (die_struct &ref)
6734 {
6735 print_die (&ref, stderr);
6736 }
6737
6738 DEBUG_FUNCTION void
6739 debug (die_struct *ptr)
6740 {
6741 if (ptr)
6742 debug (*ptr);
6743 else
6744 fprintf (stderr, "<nil>\n");
6745 }
6746
6747
6748 /* Print all DWARF information collected for the compilation unit.
6749 This routine is a debugging aid only. */
6750
6751 DEBUG_FUNCTION void
6752 debug_dwarf (void)
6753 {
6754 print_indent = 0;
6755 print_die (comp_unit_die (), stderr);
6756 }
6757
6758 /* Verify the DIE tree structure. */
6759
6760 DEBUG_FUNCTION void
6761 verify_die (dw_die_ref die)
6762 {
6763 gcc_assert (!die->die_mark);
6764 if (die->die_parent == NULL
6765 && die->die_sib == NULL)
6766 return;
6767 /* Verify the die_sib list is cyclic. */
6768 dw_die_ref x = die;
6769 do
6770 {
6771 x->die_mark = 1;
6772 x = x->die_sib;
6773 }
6774 while (x && !x->die_mark);
6775 gcc_assert (x == die);
6776 x = die;
6777 do
6778 {
6779 /* Verify all dies have the same parent. */
6780 gcc_assert (x->die_parent == die->die_parent);
6781 if (x->die_child)
6782 {
6783 /* Verify the child has the proper parent and recurse. */
6784 gcc_assert (x->die_child->die_parent == x);
6785 verify_die (x->die_child);
6786 }
6787 x->die_mark = 0;
6788 x = x->die_sib;
6789 }
6790 while (x && x->die_mark);
6791 }
6792
6793 /* Sanity checks on DIEs. */
6794
6795 static void
6796 check_die (dw_die_ref die)
6797 {
6798 unsigned ix;
6799 dw_attr_node *a;
6800 bool inline_found = false;
6801 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6802 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6803 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6804 {
6805 switch (a->dw_attr)
6806 {
6807 case DW_AT_inline:
6808 if (a->dw_attr_val.v.val_unsigned)
6809 inline_found = true;
6810 break;
6811 case DW_AT_location:
6812 ++n_location;
6813 break;
6814 case DW_AT_low_pc:
6815 ++n_low_pc;
6816 break;
6817 case DW_AT_high_pc:
6818 ++n_high_pc;
6819 break;
6820 case DW_AT_artificial:
6821 ++n_artificial;
6822 break;
6823 case DW_AT_decl_column:
6824 ++n_decl_column;
6825 break;
6826 case DW_AT_decl_line:
6827 ++n_decl_line;
6828 break;
6829 case DW_AT_decl_file:
6830 ++n_decl_file;
6831 break;
6832 default:
6833 break;
6834 }
6835 }
6836 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6837 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6838 {
6839 fprintf (stderr, "Duplicate attributes in DIE:\n");
6840 debug_dwarf_die (die);
6841 gcc_unreachable ();
6842 }
6843 if (inline_found)
6844 {
6845 /* A debugging information entry that is a member of an abstract
6846 instance tree [that has DW_AT_inline] should not contain any
6847 attributes which describe aspects of the subroutine which vary
6848 between distinct inlined expansions or distinct out-of-line
6849 expansions. */
6850 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6851 gcc_assert (a->dw_attr != DW_AT_low_pc
6852 && a->dw_attr != DW_AT_high_pc
6853 && a->dw_attr != DW_AT_location
6854 && a->dw_attr != DW_AT_frame_base
6855 && a->dw_attr != DW_AT_call_all_calls
6856 && a->dw_attr != DW_AT_GNU_all_call_sites);
6857 }
6858 }
6859 \f
6860 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6861 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6862 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6863
6864 /* Calculate the checksum of a location expression. */
6865
6866 static inline void
6867 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6868 {
6869 int tem;
6870 inchash::hash hstate;
6871 hashval_t hash;
6872
6873 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6874 CHECKSUM (tem);
6875 hash_loc_operands (loc, hstate);
6876 hash = hstate.end();
6877 CHECKSUM (hash);
6878 }
6879
6880 /* Calculate the checksum of an attribute. */
6881
6882 static void
6883 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6884 {
6885 dw_loc_descr_ref loc;
6886 rtx r;
6887
6888 CHECKSUM (at->dw_attr);
6889
6890 /* We don't care that this was compiled with a different compiler
6891 snapshot; if the output is the same, that's what matters. */
6892 if (at->dw_attr == DW_AT_producer)
6893 return;
6894
6895 switch (AT_class (at))
6896 {
6897 case dw_val_class_const:
6898 case dw_val_class_const_implicit:
6899 CHECKSUM (at->dw_attr_val.v.val_int);
6900 break;
6901 case dw_val_class_unsigned_const:
6902 case dw_val_class_unsigned_const_implicit:
6903 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6904 break;
6905 case dw_val_class_const_double:
6906 CHECKSUM (at->dw_attr_val.v.val_double);
6907 break;
6908 case dw_val_class_wide_int:
6909 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6910 get_full_len (*at->dw_attr_val.v.val_wide)
6911 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6912 break;
6913 case dw_val_class_vec:
6914 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6915 (at->dw_attr_val.v.val_vec.length
6916 * at->dw_attr_val.v.val_vec.elt_size));
6917 break;
6918 case dw_val_class_flag:
6919 CHECKSUM (at->dw_attr_val.v.val_flag);
6920 break;
6921 case dw_val_class_str:
6922 CHECKSUM_STRING (AT_string (at));
6923 break;
6924
6925 case dw_val_class_addr:
6926 r = AT_addr (at);
6927 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6928 CHECKSUM_STRING (XSTR (r, 0));
6929 break;
6930
6931 case dw_val_class_offset:
6932 CHECKSUM (at->dw_attr_val.v.val_offset);
6933 break;
6934
6935 case dw_val_class_loc:
6936 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6937 loc_checksum (loc, ctx);
6938 break;
6939
6940 case dw_val_class_die_ref:
6941 die_checksum (AT_ref (at), ctx, mark);
6942 break;
6943
6944 case dw_val_class_fde_ref:
6945 case dw_val_class_vms_delta:
6946 case dw_val_class_symview:
6947 case dw_val_class_lbl_id:
6948 case dw_val_class_lineptr:
6949 case dw_val_class_macptr:
6950 case dw_val_class_loclistsptr:
6951 case dw_val_class_high_pc:
6952 break;
6953
6954 case dw_val_class_file:
6955 case dw_val_class_file_implicit:
6956 CHECKSUM_STRING (AT_file (at)->filename);
6957 break;
6958
6959 case dw_val_class_data8:
6960 CHECKSUM (at->dw_attr_val.v.val_data8);
6961 break;
6962
6963 default:
6964 break;
6965 }
6966 }
6967
6968 /* Calculate the checksum of a DIE. */
6969
6970 static void
6971 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6972 {
6973 dw_die_ref c;
6974 dw_attr_node *a;
6975 unsigned ix;
6976
6977 /* To avoid infinite recursion. */
6978 if (die->die_mark)
6979 {
6980 CHECKSUM (die->die_mark);
6981 return;
6982 }
6983 die->die_mark = ++(*mark);
6984
6985 CHECKSUM (die->die_tag);
6986
6987 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6988 attr_checksum (a, ctx, mark);
6989
6990 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6991 }
6992
6993 #undef CHECKSUM
6994 #undef CHECKSUM_BLOCK
6995 #undef CHECKSUM_STRING
6996
6997 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6998 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6999 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7000 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7001 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7002 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7003 #define CHECKSUM_ATTR(FOO) \
7004 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7005
7006 /* Calculate the checksum of a number in signed LEB128 format. */
7007
7008 static void
7009 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 unsigned char byte;
7012 bool more;
7013
7014 while (1)
7015 {
7016 byte = (value & 0x7f);
7017 value >>= 7;
7018 more = !((value == 0 && (byte & 0x40) == 0)
7019 || (value == -1 && (byte & 0x40) != 0));
7020 if (more)
7021 byte |= 0x80;
7022 CHECKSUM (byte);
7023 if (!more)
7024 break;
7025 }
7026 }
7027
7028 /* Calculate the checksum of a number in unsigned LEB128 format. */
7029
7030 static void
7031 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7032 {
7033 while (1)
7034 {
7035 unsigned char byte = (value & 0x7f);
7036 value >>= 7;
7037 if (value != 0)
7038 /* More bytes to follow. */
7039 byte |= 0x80;
7040 CHECKSUM (byte);
7041 if (value == 0)
7042 break;
7043 }
7044 }
7045
7046 /* Checksum the context of the DIE. This adds the names of any
7047 surrounding namespaces or structures to the checksum. */
7048
7049 static void
7050 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7051 {
7052 const char *name;
7053 dw_die_ref spec;
7054 int tag = die->die_tag;
7055
7056 if (tag != DW_TAG_namespace
7057 && tag != DW_TAG_structure_type
7058 && tag != DW_TAG_class_type)
7059 return;
7060
7061 name = get_AT_string (die, DW_AT_name);
7062
7063 spec = get_AT_ref (die, DW_AT_specification);
7064 if (spec != NULL)
7065 die = spec;
7066
7067 if (die->die_parent != NULL)
7068 checksum_die_context (die->die_parent, ctx);
7069
7070 CHECKSUM_ULEB128 ('C');
7071 CHECKSUM_ULEB128 (tag);
7072 if (name != NULL)
7073 CHECKSUM_STRING (name);
7074 }
7075
7076 /* Calculate the checksum of a location expression. */
7077
7078 static inline void
7079 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7080 {
7081 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7082 were emitted as a DW_FORM_sdata instead of a location expression. */
7083 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7084 {
7085 CHECKSUM_ULEB128 (DW_FORM_sdata);
7086 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7087 return;
7088 }
7089
7090 /* Otherwise, just checksum the raw location expression. */
7091 while (loc != NULL)
7092 {
7093 inchash::hash hstate;
7094 hashval_t hash;
7095
7096 CHECKSUM_ULEB128 (loc->dtprel);
7097 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7098 hash_loc_operands (loc, hstate);
7099 hash = hstate.end ();
7100 CHECKSUM (hash);
7101 loc = loc->dw_loc_next;
7102 }
7103 }
7104
7105 /* Calculate the checksum of an attribute. */
7106
7107 static void
7108 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7109 struct md5_ctx *ctx, int *mark)
7110 {
7111 dw_loc_descr_ref loc;
7112 rtx r;
7113
7114 if (AT_class (at) == dw_val_class_die_ref)
7115 {
7116 dw_die_ref target_die = AT_ref (at);
7117
7118 /* For pointer and reference types, we checksum only the (qualified)
7119 name of the target type (if there is a name). For friend entries,
7120 we checksum only the (qualified) name of the target type or function.
7121 This allows the checksum to remain the same whether the target type
7122 is complete or not. */
7123 if ((at->dw_attr == DW_AT_type
7124 && (tag == DW_TAG_pointer_type
7125 || tag == DW_TAG_reference_type
7126 || tag == DW_TAG_rvalue_reference_type
7127 || tag == DW_TAG_ptr_to_member_type))
7128 || (at->dw_attr == DW_AT_friend
7129 && tag == DW_TAG_friend))
7130 {
7131 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7132
7133 if (name_attr != NULL)
7134 {
7135 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7136
7137 if (decl == NULL)
7138 decl = target_die;
7139 CHECKSUM_ULEB128 ('N');
7140 CHECKSUM_ULEB128 (at->dw_attr);
7141 if (decl->die_parent != NULL)
7142 checksum_die_context (decl->die_parent, ctx);
7143 CHECKSUM_ULEB128 ('E');
7144 CHECKSUM_STRING (AT_string (name_attr));
7145 return;
7146 }
7147 }
7148
7149 /* For all other references to another DIE, we check to see if the
7150 target DIE has already been visited. If it has, we emit a
7151 backward reference; if not, we descend recursively. */
7152 if (target_die->die_mark > 0)
7153 {
7154 CHECKSUM_ULEB128 ('R');
7155 CHECKSUM_ULEB128 (at->dw_attr);
7156 CHECKSUM_ULEB128 (target_die->die_mark);
7157 }
7158 else
7159 {
7160 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7161
7162 if (decl == NULL)
7163 decl = target_die;
7164 target_die->die_mark = ++(*mark);
7165 CHECKSUM_ULEB128 ('T');
7166 CHECKSUM_ULEB128 (at->dw_attr);
7167 if (decl->die_parent != NULL)
7168 checksum_die_context (decl->die_parent, ctx);
7169 die_checksum_ordered (target_die, ctx, mark);
7170 }
7171 return;
7172 }
7173
7174 CHECKSUM_ULEB128 ('A');
7175 CHECKSUM_ULEB128 (at->dw_attr);
7176
7177 switch (AT_class (at))
7178 {
7179 case dw_val_class_const:
7180 case dw_val_class_const_implicit:
7181 CHECKSUM_ULEB128 (DW_FORM_sdata);
7182 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7183 break;
7184
7185 case dw_val_class_unsigned_const:
7186 case dw_val_class_unsigned_const_implicit:
7187 CHECKSUM_ULEB128 (DW_FORM_sdata);
7188 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7189 break;
7190
7191 case dw_val_class_const_double:
7192 CHECKSUM_ULEB128 (DW_FORM_block);
7193 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7194 CHECKSUM (at->dw_attr_val.v.val_double);
7195 break;
7196
7197 case dw_val_class_wide_int:
7198 CHECKSUM_ULEB128 (DW_FORM_block);
7199 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7200 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7201 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7202 get_full_len (*at->dw_attr_val.v.val_wide)
7203 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7204 break;
7205
7206 case dw_val_class_vec:
7207 CHECKSUM_ULEB128 (DW_FORM_block);
7208 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7209 * at->dw_attr_val.v.val_vec.elt_size);
7210 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7211 (at->dw_attr_val.v.val_vec.length
7212 * at->dw_attr_val.v.val_vec.elt_size));
7213 break;
7214
7215 case dw_val_class_flag:
7216 CHECKSUM_ULEB128 (DW_FORM_flag);
7217 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7218 break;
7219
7220 case dw_val_class_str:
7221 CHECKSUM_ULEB128 (DW_FORM_string);
7222 CHECKSUM_STRING (AT_string (at));
7223 break;
7224
7225 case dw_val_class_addr:
7226 r = AT_addr (at);
7227 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7228 CHECKSUM_ULEB128 (DW_FORM_string);
7229 CHECKSUM_STRING (XSTR (r, 0));
7230 break;
7231
7232 case dw_val_class_offset:
7233 CHECKSUM_ULEB128 (DW_FORM_sdata);
7234 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7235 break;
7236
7237 case dw_val_class_loc:
7238 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7239 loc_checksum_ordered (loc, ctx);
7240 break;
7241
7242 case dw_val_class_fde_ref:
7243 case dw_val_class_symview:
7244 case dw_val_class_lbl_id:
7245 case dw_val_class_lineptr:
7246 case dw_val_class_macptr:
7247 case dw_val_class_loclistsptr:
7248 case dw_val_class_high_pc:
7249 break;
7250
7251 case dw_val_class_file:
7252 case dw_val_class_file_implicit:
7253 CHECKSUM_ULEB128 (DW_FORM_string);
7254 CHECKSUM_STRING (AT_file (at)->filename);
7255 break;
7256
7257 case dw_val_class_data8:
7258 CHECKSUM (at->dw_attr_val.v.val_data8);
7259 break;
7260
7261 default:
7262 break;
7263 }
7264 }
7265
7266 struct checksum_attributes
7267 {
7268 dw_attr_node *at_name;
7269 dw_attr_node *at_type;
7270 dw_attr_node *at_friend;
7271 dw_attr_node *at_accessibility;
7272 dw_attr_node *at_address_class;
7273 dw_attr_node *at_alignment;
7274 dw_attr_node *at_allocated;
7275 dw_attr_node *at_artificial;
7276 dw_attr_node *at_associated;
7277 dw_attr_node *at_binary_scale;
7278 dw_attr_node *at_bit_offset;
7279 dw_attr_node *at_bit_size;
7280 dw_attr_node *at_bit_stride;
7281 dw_attr_node *at_byte_size;
7282 dw_attr_node *at_byte_stride;
7283 dw_attr_node *at_const_value;
7284 dw_attr_node *at_containing_type;
7285 dw_attr_node *at_count;
7286 dw_attr_node *at_data_location;
7287 dw_attr_node *at_data_member_location;
7288 dw_attr_node *at_decimal_scale;
7289 dw_attr_node *at_decimal_sign;
7290 dw_attr_node *at_default_value;
7291 dw_attr_node *at_digit_count;
7292 dw_attr_node *at_discr;
7293 dw_attr_node *at_discr_list;
7294 dw_attr_node *at_discr_value;
7295 dw_attr_node *at_encoding;
7296 dw_attr_node *at_endianity;
7297 dw_attr_node *at_explicit;
7298 dw_attr_node *at_is_optional;
7299 dw_attr_node *at_location;
7300 dw_attr_node *at_lower_bound;
7301 dw_attr_node *at_mutable;
7302 dw_attr_node *at_ordering;
7303 dw_attr_node *at_picture_string;
7304 dw_attr_node *at_prototyped;
7305 dw_attr_node *at_small;
7306 dw_attr_node *at_segment;
7307 dw_attr_node *at_string_length;
7308 dw_attr_node *at_string_length_bit_size;
7309 dw_attr_node *at_string_length_byte_size;
7310 dw_attr_node *at_threads_scaled;
7311 dw_attr_node *at_upper_bound;
7312 dw_attr_node *at_use_location;
7313 dw_attr_node *at_use_UTF8;
7314 dw_attr_node *at_variable_parameter;
7315 dw_attr_node *at_virtuality;
7316 dw_attr_node *at_visibility;
7317 dw_attr_node *at_vtable_elem_location;
7318 };
7319
7320 /* Collect the attributes that we will want to use for the checksum. */
7321
7322 static void
7323 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7324 {
7325 dw_attr_node *a;
7326 unsigned ix;
7327
7328 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7329 {
7330 switch (a->dw_attr)
7331 {
7332 case DW_AT_name:
7333 attrs->at_name = a;
7334 break;
7335 case DW_AT_type:
7336 attrs->at_type = a;
7337 break;
7338 case DW_AT_friend:
7339 attrs->at_friend = a;
7340 break;
7341 case DW_AT_accessibility:
7342 attrs->at_accessibility = a;
7343 break;
7344 case DW_AT_address_class:
7345 attrs->at_address_class = a;
7346 break;
7347 case DW_AT_alignment:
7348 attrs->at_alignment = a;
7349 break;
7350 case DW_AT_allocated:
7351 attrs->at_allocated = a;
7352 break;
7353 case DW_AT_artificial:
7354 attrs->at_artificial = a;
7355 break;
7356 case DW_AT_associated:
7357 attrs->at_associated = a;
7358 break;
7359 case DW_AT_binary_scale:
7360 attrs->at_binary_scale = a;
7361 break;
7362 case DW_AT_bit_offset:
7363 attrs->at_bit_offset = a;
7364 break;
7365 case DW_AT_bit_size:
7366 attrs->at_bit_size = a;
7367 break;
7368 case DW_AT_bit_stride:
7369 attrs->at_bit_stride = a;
7370 break;
7371 case DW_AT_byte_size:
7372 attrs->at_byte_size = a;
7373 break;
7374 case DW_AT_byte_stride:
7375 attrs->at_byte_stride = a;
7376 break;
7377 case DW_AT_const_value:
7378 attrs->at_const_value = a;
7379 break;
7380 case DW_AT_containing_type:
7381 attrs->at_containing_type = a;
7382 break;
7383 case DW_AT_count:
7384 attrs->at_count = a;
7385 break;
7386 case DW_AT_data_location:
7387 attrs->at_data_location = a;
7388 break;
7389 case DW_AT_data_member_location:
7390 attrs->at_data_member_location = a;
7391 break;
7392 case DW_AT_decimal_scale:
7393 attrs->at_decimal_scale = a;
7394 break;
7395 case DW_AT_decimal_sign:
7396 attrs->at_decimal_sign = a;
7397 break;
7398 case DW_AT_default_value:
7399 attrs->at_default_value = a;
7400 break;
7401 case DW_AT_digit_count:
7402 attrs->at_digit_count = a;
7403 break;
7404 case DW_AT_discr:
7405 attrs->at_discr = a;
7406 break;
7407 case DW_AT_discr_list:
7408 attrs->at_discr_list = a;
7409 break;
7410 case DW_AT_discr_value:
7411 attrs->at_discr_value = a;
7412 break;
7413 case DW_AT_encoding:
7414 attrs->at_encoding = a;
7415 break;
7416 case DW_AT_endianity:
7417 attrs->at_endianity = a;
7418 break;
7419 case DW_AT_explicit:
7420 attrs->at_explicit = a;
7421 break;
7422 case DW_AT_is_optional:
7423 attrs->at_is_optional = a;
7424 break;
7425 case DW_AT_location:
7426 attrs->at_location = a;
7427 break;
7428 case DW_AT_lower_bound:
7429 attrs->at_lower_bound = a;
7430 break;
7431 case DW_AT_mutable:
7432 attrs->at_mutable = a;
7433 break;
7434 case DW_AT_ordering:
7435 attrs->at_ordering = a;
7436 break;
7437 case DW_AT_picture_string:
7438 attrs->at_picture_string = a;
7439 break;
7440 case DW_AT_prototyped:
7441 attrs->at_prototyped = a;
7442 break;
7443 case DW_AT_small:
7444 attrs->at_small = a;
7445 break;
7446 case DW_AT_segment:
7447 attrs->at_segment = a;
7448 break;
7449 case DW_AT_string_length:
7450 attrs->at_string_length = a;
7451 break;
7452 case DW_AT_string_length_bit_size:
7453 attrs->at_string_length_bit_size = a;
7454 break;
7455 case DW_AT_string_length_byte_size:
7456 attrs->at_string_length_byte_size = a;
7457 break;
7458 case DW_AT_threads_scaled:
7459 attrs->at_threads_scaled = a;
7460 break;
7461 case DW_AT_upper_bound:
7462 attrs->at_upper_bound = a;
7463 break;
7464 case DW_AT_use_location:
7465 attrs->at_use_location = a;
7466 break;
7467 case DW_AT_use_UTF8:
7468 attrs->at_use_UTF8 = a;
7469 break;
7470 case DW_AT_variable_parameter:
7471 attrs->at_variable_parameter = a;
7472 break;
7473 case DW_AT_virtuality:
7474 attrs->at_virtuality = a;
7475 break;
7476 case DW_AT_visibility:
7477 attrs->at_visibility = a;
7478 break;
7479 case DW_AT_vtable_elem_location:
7480 attrs->at_vtable_elem_location = a;
7481 break;
7482 default:
7483 break;
7484 }
7485 }
7486 }
7487
7488 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7489
7490 static void
7491 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7492 {
7493 dw_die_ref c;
7494 dw_die_ref decl;
7495 struct checksum_attributes attrs;
7496
7497 CHECKSUM_ULEB128 ('D');
7498 CHECKSUM_ULEB128 (die->die_tag);
7499
7500 memset (&attrs, 0, sizeof (attrs));
7501
7502 decl = get_AT_ref (die, DW_AT_specification);
7503 if (decl != NULL)
7504 collect_checksum_attributes (&attrs, decl);
7505 collect_checksum_attributes (&attrs, die);
7506
7507 CHECKSUM_ATTR (attrs.at_name);
7508 CHECKSUM_ATTR (attrs.at_accessibility);
7509 CHECKSUM_ATTR (attrs.at_address_class);
7510 CHECKSUM_ATTR (attrs.at_allocated);
7511 CHECKSUM_ATTR (attrs.at_artificial);
7512 CHECKSUM_ATTR (attrs.at_associated);
7513 CHECKSUM_ATTR (attrs.at_binary_scale);
7514 CHECKSUM_ATTR (attrs.at_bit_offset);
7515 CHECKSUM_ATTR (attrs.at_bit_size);
7516 CHECKSUM_ATTR (attrs.at_bit_stride);
7517 CHECKSUM_ATTR (attrs.at_byte_size);
7518 CHECKSUM_ATTR (attrs.at_byte_stride);
7519 CHECKSUM_ATTR (attrs.at_const_value);
7520 CHECKSUM_ATTR (attrs.at_containing_type);
7521 CHECKSUM_ATTR (attrs.at_count);
7522 CHECKSUM_ATTR (attrs.at_data_location);
7523 CHECKSUM_ATTR (attrs.at_data_member_location);
7524 CHECKSUM_ATTR (attrs.at_decimal_scale);
7525 CHECKSUM_ATTR (attrs.at_decimal_sign);
7526 CHECKSUM_ATTR (attrs.at_default_value);
7527 CHECKSUM_ATTR (attrs.at_digit_count);
7528 CHECKSUM_ATTR (attrs.at_discr);
7529 CHECKSUM_ATTR (attrs.at_discr_list);
7530 CHECKSUM_ATTR (attrs.at_discr_value);
7531 CHECKSUM_ATTR (attrs.at_encoding);
7532 CHECKSUM_ATTR (attrs.at_endianity);
7533 CHECKSUM_ATTR (attrs.at_explicit);
7534 CHECKSUM_ATTR (attrs.at_is_optional);
7535 CHECKSUM_ATTR (attrs.at_location);
7536 CHECKSUM_ATTR (attrs.at_lower_bound);
7537 CHECKSUM_ATTR (attrs.at_mutable);
7538 CHECKSUM_ATTR (attrs.at_ordering);
7539 CHECKSUM_ATTR (attrs.at_picture_string);
7540 CHECKSUM_ATTR (attrs.at_prototyped);
7541 CHECKSUM_ATTR (attrs.at_small);
7542 CHECKSUM_ATTR (attrs.at_segment);
7543 CHECKSUM_ATTR (attrs.at_string_length);
7544 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7545 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7546 CHECKSUM_ATTR (attrs.at_threads_scaled);
7547 CHECKSUM_ATTR (attrs.at_upper_bound);
7548 CHECKSUM_ATTR (attrs.at_use_location);
7549 CHECKSUM_ATTR (attrs.at_use_UTF8);
7550 CHECKSUM_ATTR (attrs.at_variable_parameter);
7551 CHECKSUM_ATTR (attrs.at_virtuality);
7552 CHECKSUM_ATTR (attrs.at_visibility);
7553 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7554 CHECKSUM_ATTR (attrs.at_type);
7555 CHECKSUM_ATTR (attrs.at_friend);
7556 CHECKSUM_ATTR (attrs.at_alignment);
7557
7558 /* Checksum the child DIEs. */
7559 c = die->die_child;
7560 if (c) do {
7561 dw_attr_node *name_attr;
7562
7563 c = c->die_sib;
7564 name_attr = get_AT (c, DW_AT_name);
7565 if (is_template_instantiation (c))
7566 {
7567 /* Ignore instantiations of member type and function templates. */
7568 }
7569 else if (name_attr != NULL
7570 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7571 {
7572 /* Use a shallow checksum for named nested types and member
7573 functions. */
7574 CHECKSUM_ULEB128 ('S');
7575 CHECKSUM_ULEB128 (c->die_tag);
7576 CHECKSUM_STRING (AT_string (name_attr));
7577 }
7578 else
7579 {
7580 /* Use a deep checksum for other children. */
7581 /* Mark this DIE so it gets processed when unmarking. */
7582 if (c->die_mark == 0)
7583 c->die_mark = -1;
7584 die_checksum_ordered (c, ctx, mark);
7585 }
7586 } while (c != die->die_child);
7587
7588 CHECKSUM_ULEB128 (0);
7589 }
7590
7591 /* Add a type name and tag to a hash. */
7592 static void
7593 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7594 {
7595 CHECKSUM_ULEB128 (tag);
7596 CHECKSUM_STRING (name);
7597 }
7598
7599 #undef CHECKSUM
7600 #undef CHECKSUM_STRING
7601 #undef CHECKSUM_ATTR
7602 #undef CHECKSUM_LEB128
7603 #undef CHECKSUM_ULEB128
7604
7605 /* Generate the type signature for DIE. This is computed by generating an
7606 MD5 checksum over the DIE's tag, its relevant attributes, and its
7607 children. Attributes that are references to other DIEs are processed
7608 by recursion, using the MARK field to prevent infinite recursion.
7609 If the DIE is nested inside a namespace or another type, we also
7610 need to include that context in the signature. The lower 64 bits
7611 of the resulting MD5 checksum comprise the signature. */
7612
7613 static void
7614 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7615 {
7616 int mark;
7617 const char *name;
7618 unsigned char checksum[16];
7619 struct md5_ctx ctx;
7620 dw_die_ref decl;
7621 dw_die_ref parent;
7622
7623 name = get_AT_string (die, DW_AT_name);
7624 decl = get_AT_ref (die, DW_AT_specification);
7625 parent = get_die_parent (die);
7626
7627 /* First, compute a signature for just the type name (and its surrounding
7628 context, if any. This is stored in the type unit DIE for link-time
7629 ODR (one-definition rule) checking. */
7630
7631 if (is_cxx () && name != NULL)
7632 {
7633 md5_init_ctx (&ctx);
7634
7635 /* Checksum the names of surrounding namespaces and structures. */
7636 if (parent != NULL)
7637 checksum_die_context (parent, &ctx);
7638
7639 /* Checksum the current DIE. */
7640 die_odr_checksum (die->die_tag, name, &ctx);
7641 md5_finish_ctx (&ctx, checksum);
7642
7643 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7644 }
7645
7646 /* Next, compute the complete type signature. */
7647
7648 md5_init_ctx (&ctx);
7649 mark = 1;
7650 die->die_mark = mark;
7651
7652 /* Checksum the names of surrounding namespaces and structures. */
7653 if (parent != NULL)
7654 checksum_die_context (parent, &ctx);
7655
7656 /* Checksum the DIE and its children. */
7657 die_checksum_ordered (die, &ctx, &mark);
7658 unmark_all_dies (die);
7659 md5_finish_ctx (&ctx, checksum);
7660
7661 /* Store the signature in the type node and link the type DIE and the
7662 type node together. */
7663 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7664 DWARF_TYPE_SIGNATURE_SIZE);
7665 die->comdat_type_p = true;
7666 die->die_id.die_type_node = type_node;
7667 type_node->type_die = die;
7668
7669 /* If the DIE is a specification, link its declaration to the type node
7670 as well. */
7671 if (decl != NULL)
7672 {
7673 decl->comdat_type_p = true;
7674 decl->die_id.die_type_node = type_node;
7675 }
7676 }
7677
7678 /* Do the location expressions look same? */
7679 static inline int
7680 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7681 {
7682 return loc1->dw_loc_opc == loc2->dw_loc_opc
7683 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7684 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7685 }
7686
7687 /* Do the values look the same? */
7688 static int
7689 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7690 {
7691 dw_loc_descr_ref loc1, loc2;
7692 rtx r1, r2;
7693
7694 if (v1->val_class != v2->val_class)
7695 return 0;
7696
7697 switch (v1->val_class)
7698 {
7699 case dw_val_class_const:
7700 case dw_val_class_const_implicit:
7701 return v1->v.val_int == v2->v.val_int;
7702 case dw_val_class_unsigned_const:
7703 case dw_val_class_unsigned_const_implicit:
7704 return v1->v.val_unsigned == v2->v.val_unsigned;
7705 case dw_val_class_const_double:
7706 return v1->v.val_double.high == v2->v.val_double.high
7707 && v1->v.val_double.low == v2->v.val_double.low;
7708 case dw_val_class_wide_int:
7709 return *v1->v.val_wide == *v2->v.val_wide;
7710 case dw_val_class_vec:
7711 if (v1->v.val_vec.length != v2->v.val_vec.length
7712 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7713 return 0;
7714 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7715 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7716 return 0;
7717 return 1;
7718 case dw_val_class_flag:
7719 return v1->v.val_flag == v2->v.val_flag;
7720 case dw_val_class_str:
7721 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7722
7723 case dw_val_class_addr:
7724 r1 = v1->v.val_addr;
7725 r2 = v2->v.val_addr;
7726 if (GET_CODE (r1) != GET_CODE (r2))
7727 return 0;
7728 return !rtx_equal_p (r1, r2);
7729
7730 case dw_val_class_offset:
7731 return v1->v.val_offset == v2->v.val_offset;
7732
7733 case dw_val_class_loc:
7734 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7735 loc1 && loc2;
7736 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7737 if (!same_loc_p (loc1, loc2, mark))
7738 return 0;
7739 return !loc1 && !loc2;
7740
7741 case dw_val_class_die_ref:
7742 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7743
7744 case dw_val_class_symview:
7745 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7746
7747 case dw_val_class_fde_ref:
7748 case dw_val_class_vms_delta:
7749 case dw_val_class_lbl_id:
7750 case dw_val_class_lineptr:
7751 case dw_val_class_macptr:
7752 case dw_val_class_loclistsptr:
7753 case dw_val_class_high_pc:
7754 return 1;
7755
7756 case dw_val_class_file:
7757 case dw_val_class_file_implicit:
7758 return v1->v.val_file == v2->v.val_file;
7759
7760 case dw_val_class_data8:
7761 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7762
7763 default:
7764 return 1;
7765 }
7766 }
7767
7768 /* Do the attributes look the same? */
7769
7770 static int
7771 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7772 {
7773 if (at1->dw_attr != at2->dw_attr)
7774 return 0;
7775
7776 /* We don't care that this was compiled with a different compiler
7777 snapshot; if the output is the same, that's what matters. */
7778 if (at1->dw_attr == DW_AT_producer)
7779 return 1;
7780
7781 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7782 }
7783
7784 /* Do the dies look the same? */
7785
7786 static int
7787 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7788 {
7789 dw_die_ref c1, c2;
7790 dw_attr_node *a1;
7791 unsigned ix;
7792
7793 /* To avoid infinite recursion. */
7794 if (die1->die_mark)
7795 return die1->die_mark == die2->die_mark;
7796 die1->die_mark = die2->die_mark = ++(*mark);
7797
7798 if (die1->die_tag != die2->die_tag)
7799 return 0;
7800
7801 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7802 return 0;
7803
7804 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7805 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7806 return 0;
7807
7808 c1 = die1->die_child;
7809 c2 = die2->die_child;
7810 if (! c1)
7811 {
7812 if (c2)
7813 return 0;
7814 }
7815 else
7816 for (;;)
7817 {
7818 if (!same_die_p (c1, c2, mark))
7819 return 0;
7820 c1 = c1->die_sib;
7821 c2 = c2->die_sib;
7822 if (c1 == die1->die_child)
7823 {
7824 if (c2 == die2->die_child)
7825 break;
7826 else
7827 return 0;
7828 }
7829 }
7830
7831 return 1;
7832 }
7833
7834 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7835 children, and set die_symbol. */
7836
7837 static void
7838 compute_comp_unit_symbol (dw_die_ref unit_die)
7839 {
7840 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7841 const char *base = die_name ? lbasename (die_name) : "anonymous";
7842 char *name = XALLOCAVEC (char, strlen (base) + 64);
7843 char *p;
7844 int i, mark;
7845 unsigned char checksum[16];
7846 struct md5_ctx ctx;
7847
7848 /* Compute the checksum of the DIE, then append part of it as hex digits to
7849 the name filename of the unit. */
7850
7851 md5_init_ctx (&ctx);
7852 mark = 0;
7853 die_checksum (unit_die, &ctx, &mark);
7854 unmark_all_dies (unit_die);
7855 md5_finish_ctx (&ctx, checksum);
7856
7857 /* When we this for comp_unit_die () we have a DW_AT_name that might
7858 not start with a letter but with anything valid for filenames and
7859 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7860 character is not a letter. */
7861 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7862 clean_symbol_name (name);
7863
7864 p = name + strlen (name);
7865 for (i = 0; i < 4; i++)
7866 {
7867 sprintf (p, "%.2x", checksum[i]);
7868 p += 2;
7869 }
7870
7871 unit_die->die_id.die_symbol = xstrdup (name);
7872 }
7873
7874 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7875
7876 static int
7877 is_type_die (dw_die_ref die)
7878 {
7879 switch (die->die_tag)
7880 {
7881 case DW_TAG_array_type:
7882 case DW_TAG_class_type:
7883 case DW_TAG_interface_type:
7884 case DW_TAG_enumeration_type:
7885 case DW_TAG_pointer_type:
7886 case DW_TAG_reference_type:
7887 case DW_TAG_rvalue_reference_type:
7888 case DW_TAG_string_type:
7889 case DW_TAG_structure_type:
7890 case DW_TAG_subroutine_type:
7891 case DW_TAG_union_type:
7892 case DW_TAG_ptr_to_member_type:
7893 case DW_TAG_set_type:
7894 case DW_TAG_subrange_type:
7895 case DW_TAG_base_type:
7896 case DW_TAG_const_type:
7897 case DW_TAG_file_type:
7898 case DW_TAG_packed_type:
7899 case DW_TAG_volatile_type:
7900 case DW_TAG_typedef:
7901 return 1;
7902 default:
7903 return 0;
7904 }
7905 }
7906
7907 /* Returns true iff C is a compile-unit DIE. */
7908
7909 static inline bool
7910 is_cu_die (dw_die_ref c)
7911 {
7912 return c && (c->die_tag == DW_TAG_compile_unit
7913 || c->die_tag == DW_TAG_skeleton_unit);
7914 }
7915
7916 /* Returns true iff C is a unit DIE of some sort. */
7917
7918 static inline bool
7919 is_unit_die (dw_die_ref c)
7920 {
7921 return c && (c->die_tag == DW_TAG_compile_unit
7922 || c->die_tag == DW_TAG_partial_unit
7923 || c->die_tag == DW_TAG_type_unit
7924 || c->die_tag == DW_TAG_skeleton_unit);
7925 }
7926
7927 /* Returns true iff C is a namespace DIE. */
7928
7929 static inline bool
7930 is_namespace_die (dw_die_ref c)
7931 {
7932 return c && c->die_tag == DW_TAG_namespace;
7933 }
7934
7935 /* Return non-zero if this DIE is a template parameter. */
7936
7937 static inline bool
7938 is_template_parameter (dw_die_ref die)
7939 {
7940 switch (die->die_tag)
7941 {
7942 case DW_TAG_template_type_param:
7943 case DW_TAG_template_value_param:
7944 case DW_TAG_GNU_template_template_param:
7945 case DW_TAG_GNU_template_parameter_pack:
7946 return true;
7947 default:
7948 return false;
7949 }
7950 }
7951
7952 /* Return non-zero if this DIE represents a template instantiation. */
7953
7954 static inline bool
7955 is_template_instantiation (dw_die_ref die)
7956 {
7957 dw_die_ref c;
7958
7959 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7960 return false;
7961 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7962 return false;
7963 }
7964
7965 static char *
7966 gen_internal_sym (const char *prefix)
7967 {
7968 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7969
7970 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7971 return xstrdup (buf);
7972 }
7973
7974 /* Return non-zero if this DIE is a declaration. */
7975
7976 static int
7977 is_declaration_die (dw_die_ref die)
7978 {
7979 dw_attr_node *a;
7980 unsigned ix;
7981
7982 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7983 if (a->dw_attr == DW_AT_declaration)
7984 return 1;
7985
7986 return 0;
7987 }
7988
7989 /* Return non-zero if this DIE is nested inside a subprogram. */
7990
7991 static int
7992 is_nested_in_subprogram (dw_die_ref die)
7993 {
7994 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7995
7996 if (decl == NULL)
7997 decl = die;
7998 return local_scope_p (decl);
7999 }
8000
8001 /* Return non-zero if this DIE contains a defining declaration of a
8002 subprogram. */
8003
8004 static int
8005 contains_subprogram_definition (dw_die_ref die)
8006 {
8007 dw_die_ref c;
8008
8009 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8010 return 1;
8011 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8012 return 0;
8013 }
8014
8015 /* Return non-zero if this is a type DIE that should be moved to a
8016 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8017 unit type. */
8018
8019 static int
8020 should_move_die_to_comdat (dw_die_ref die)
8021 {
8022 switch (die->die_tag)
8023 {
8024 case DW_TAG_class_type:
8025 case DW_TAG_structure_type:
8026 case DW_TAG_enumeration_type:
8027 case DW_TAG_union_type:
8028 /* Don't move declarations, inlined instances, types nested in a
8029 subprogram, or types that contain subprogram definitions. */
8030 if (is_declaration_die (die)
8031 || get_AT (die, DW_AT_abstract_origin)
8032 || is_nested_in_subprogram (die)
8033 || contains_subprogram_definition (die))
8034 return 0;
8035 return 1;
8036 case DW_TAG_array_type:
8037 case DW_TAG_interface_type:
8038 case DW_TAG_pointer_type:
8039 case DW_TAG_reference_type:
8040 case DW_TAG_rvalue_reference_type:
8041 case DW_TAG_string_type:
8042 case DW_TAG_subroutine_type:
8043 case DW_TAG_ptr_to_member_type:
8044 case DW_TAG_set_type:
8045 case DW_TAG_subrange_type:
8046 case DW_TAG_base_type:
8047 case DW_TAG_const_type:
8048 case DW_TAG_file_type:
8049 case DW_TAG_packed_type:
8050 case DW_TAG_volatile_type:
8051 case DW_TAG_typedef:
8052 default:
8053 return 0;
8054 }
8055 }
8056
8057 /* Make a clone of DIE. */
8058
8059 static dw_die_ref
8060 clone_die (dw_die_ref die)
8061 {
8062 dw_die_ref clone = new_die_raw (die->die_tag);
8063 dw_attr_node *a;
8064 unsigned ix;
8065
8066 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8067 add_dwarf_attr (clone, a);
8068
8069 return clone;
8070 }
8071
8072 /* Make a clone of the tree rooted at DIE. */
8073
8074 static dw_die_ref
8075 clone_tree (dw_die_ref die)
8076 {
8077 dw_die_ref c;
8078 dw_die_ref clone = clone_die (die);
8079
8080 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8081
8082 return clone;
8083 }
8084
8085 /* Make a clone of DIE as a declaration. */
8086
8087 static dw_die_ref
8088 clone_as_declaration (dw_die_ref die)
8089 {
8090 dw_die_ref clone;
8091 dw_die_ref decl;
8092 dw_attr_node *a;
8093 unsigned ix;
8094
8095 /* If the DIE is already a declaration, just clone it. */
8096 if (is_declaration_die (die))
8097 return clone_die (die);
8098
8099 /* If the DIE is a specification, just clone its declaration DIE. */
8100 decl = get_AT_ref (die, DW_AT_specification);
8101 if (decl != NULL)
8102 {
8103 clone = clone_die (decl);
8104 if (die->comdat_type_p)
8105 add_AT_die_ref (clone, DW_AT_signature, die);
8106 return clone;
8107 }
8108
8109 clone = new_die_raw (die->die_tag);
8110
8111 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8112 {
8113 /* We don't want to copy over all attributes.
8114 For example we don't want DW_AT_byte_size because otherwise we will no
8115 longer have a declaration and GDB will treat it as a definition. */
8116
8117 switch (a->dw_attr)
8118 {
8119 case DW_AT_abstract_origin:
8120 case DW_AT_artificial:
8121 case DW_AT_containing_type:
8122 case DW_AT_external:
8123 case DW_AT_name:
8124 case DW_AT_type:
8125 case DW_AT_virtuality:
8126 case DW_AT_linkage_name:
8127 case DW_AT_MIPS_linkage_name:
8128 add_dwarf_attr (clone, a);
8129 break;
8130 case DW_AT_byte_size:
8131 case DW_AT_alignment:
8132 default:
8133 break;
8134 }
8135 }
8136
8137 if (die->comdat_type_p)
8138 add_AT_die_ref (clone, DW_AT_signature, die);
8139
8140 add_AT_flag (clone, DW_AT_declaration, 1);
8141 return clone;
8142 }
8143
8144
8145 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8146
8147 struct decl_table_entry
8148 {
8149 dw_die_ref orig;
8150 dw_die_ref copy;
8151 };
8152
8153 /* Helpers to manipulate hash table of copied declarations. */
8154
8155 /* Hashtable helpers. */
8156
8157 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8158 {
8159 typedef die_struct *compare_type;
8160 static inline hashval_t hash (const decl_table_entry *);
8161 static inline bool equal (const decl_table_entry *, const die_struct *);
8162 };
8163
8164 inline hashval_t
8165 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8166 {
8167 return htab_hash_pointer (entry->orig);
8168 }
8169
8170 inline bool
8171 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8172 const die_struct *entry2)
8173 {
8174 return entry1->orig == entry2;
8175 }
8176
8177 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8178
8179 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8180 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8181 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8182 to check if the ancestor has already been copied into UNIT. */
8183
8184 static dw_die_ref
8185 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8186 decl_hash_type *decl_table)
8187 {
8188 dw_die_ref parent = die->die_parent;
8189 dw_die_ref new_parent = unit;
8190 dw_die_ref copy;
8191 decl_table_entry **slot = NULL;
8192 struct decl_table_entry *entry = NULL;
8193
8194 /* If DIE refers to a stub unfold that so we get the appropriate
8195 DIE registered as orig in decl_table. */
8196 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8197 die = c;
8198
8199 if (decl_table)
8200 {
8201 /* Check if the entry has already been copied to UNIT. */
8202 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8203 INSERT);
8204 if (*slot != HTAB_EMPTY_ENTRY)
8205 {
8206 entry = *slot;
8207 return entry->copy;
8208 }
8209
8210 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8211 entry = XCNEW (struct decl_table_entry);
8212 entry->orig = die;
8213 entry->copy = NULL;
8214 *slot = entry;
8215 }
8216
8217 if (parent != NULL)
8218 {
8219 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8220 if (spec != NULL)
8221 parent = spec;
8222 if (!is_unit_die (parent))
8223 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8224 }
8225
8226 copy = clone_as_declaration (die);
8227 add_child_die (new_parent, copy);
8228
8229 if (decl_table)
8230 {
8231 /* Record the pointer to the copy. */
8232 entry->copy = copy;
8233 }
8234
8235 return copy;
8236 }
8237 /* Copy the declaration context to the new type unit DIE. This includes
8238 any surrounding namespace or type declarations. If the DIE has an
8239 AT_specification attribute, it also includes attributes and children
8240 attached to the specification, and returns a pointer to the original
8241 parent of the declaration DIE. Returns NULL otherwise. */
8242
8243 static dw_die_ref
8244 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8245 {
8246 dw_die_ref decl;
8247 dw_die_ref new_decl;
8248 dw_die_ref orig_parent = NULL;
8249
8250 decl = get_AT_ref (die, DW_AT_specification);
8251 if (decl == NULL)
8252 decl = die;
8253 else
8254 {
8255 unsigned ix;
8256 dw_die_ref c;
8257 dw_attr_node *a;
8258
8259 /* The original DIE will be changed to a declaration, and must
8260 be moved to be a child of the original declaration DIE. */
8261 orig_parent = decl->die_parent;
8262
8263 /* Copy the type node pointer from the new DIE to the original
8264 declaration DIE so we can forward references later. */
8265 decl->comdat_type_p = true;
8266 decl->die_id.die_type_node = die->die_id.die_type_node;
8267
8268 remove_AT (die, DW_AT_specification);
8269
8270 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8271 {
8272 if (a->dw_attr != DW_AT_name
8273 && a->dw_attr != DW_AT_declaration
8274 && a->dw_attr != DW_AT_external)
8275 add_dwarf_attr (die, a);
8276 }
8277
8278 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8279 }
8280
8281 if (decl->die_parent != NULL
8282 && !is_unit_die (decl->die_parent))
8283 {
8284 new_decl = copy_ancestor_tree (unit, decl, NULL);
8285 if (new_decl != NULL)
8286 {
8287 remove_AT (new_decl, DW_AT_signature);
8288 add_AT_specification (die, new_decl);
8289 }
8290 }
8291
8292 return orig_parent;
8293 }
8294
8295 /* Generate the skeleton ancestor tree for the given NODE, then clone
8296 the DIE and add the clone into the tree. */
8297
8298 static void
8299 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8300 {
8301 if (node->new_die != NULL)
8302 return;
8303
8304 node->new_die = clone_as_declaration (node->old_die);
8305
8306 if (node->parent != NULL)
8307 {
8308 generate_skeleton_ancestor_tree (node->parent);
8309 add_child_die (node->parent->new_die, node->new_die);
8310 }
8311 }
8312
8313 /* Generate a skeleton tree of DIEs containing any declarations that are
8314 found in the original tree. We traverse the tree looking for declaration
8315 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8316
8317 static void
8318 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8319 {
8320 skeleton_chain_node node;
8321 dw_die_ref c;
8322 dw_die_ref first;
8323 dw_die_ref prev = NULL;
8324 dw_die_ref next = NULL;
8325
8326 node.parent = parent;
8327
8328 first = c = parent->old_die->die_child;
8329 if (c)
8330 next = c->die_sib;
8331 if (c) do {
8332 if (prev == NULL || prev->die_sib == c)
8333 prev = c;
8334 c = next;
8335 next = (c == first ? NULL : c->die_sib);
8336 node.old_die = c;
8337 node.new_die = NULL;
8338 if (is_declaration_die (c))
8339 {
8340 if (is_template_instantiation (c))
8341 {
8342 /* Instantiated templates do not need to be cloned into the
8343 type unit. Just move the DIE and its children back to
8344 the skeleton tree (in the main CU). */
8345 remove_child_with_prev (c, prev);
8346 add_child_die (parent->new_die, c);
8347 c = prev;
8348 }
8349 else if (c->comdat_type_p)
8350 {
8351 /* This is the skeleton of earlier break_out_comdat_types
8352 type. Clone the existing DIE, but keep the children
8353 under the original (which is in the main CU). */
8354 dw_die_ref clone = clone_die (c);
8355
8356 replace_child (c, clone, prev);
8357 generate_skeleton_ancestor_tree (parent);
8358 add_child_die (parent->new_die, c);
8359 c = clone;
8360 continue;
8361 }
8362 else
8363 {
8364 /* Clone the existing DIE, move the original to the skeleton
8365 tree (which is in the main CU), and put the clone, with
8366 all the original's children, where the original came from
8367 (which is about to be moved to the type unit). */
8368 dw_die_ref clone = clone_die (c);
8369 move_all_children (c, clone);
8370
8371 /* If the original has a DW_AT_object_pointer attribute,
8372 it would now point to a child DIE just moved to the
8373 cloned tree, so we need to remove that attribute from
8374 the original. */
8375 remove_AT (c, DW_AT_object_pointer);
8376
8377 replace_child (c, clone, prev);
8378 generate_skeleton_ancestor_tree (parent);
8379 add_child_die (parent->new_die, c);
8380 node.old_die = clone;
8381 node.new_die = c;
8382 c = clone;
8383 }
8384 }
8385 generate_skeleton_bottom_up (&node);
8386 } while (next != NULL);
8387 }
8388
8389 /* Wrapper function for generate_skeleton_bottom_up. */
8390
8391 static dw_die_ref
8392 generate_skeleton (dw_die_ref die)
8393 {
8394 skeleton_chain_node node;
8395
8396 node.old_die = die;
8397 node.new_die = NULL;
8398 node.parent = NULL;
8399
8400 /* If this type definition is nested inside another type,
8401 and is not an instantiation of a template, always leave
8402 at least a declaration in its place. */
8403 if (die->die_parent != NULL
8404 && is_type_die (die->die_parent)
8405 && !is_template_instantiation (die))
8406 node.new_die = clone_as_declaration (die);
8407
8408 generate_skeleton_bottom_up (&node);
8409 return node.new_die;
8410 }
8411
8412 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8413 declaration. The original DIE is moved to a new compile unit so that
8414 existing references to it follow it to the new location. If any of the
8415 original DIE's descendants is a declaration, we need to replace the
8416 original DIE with a skeleton tree and move the declarations back into the
8417 skeleton tree. */
8418
8419 static dw_die_ref
8420 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8421 dw_die_ref prev)
8422 {
8423 dw_die_ref skeleton, orig_parent;
8424
8425 /* Copy the declaration context to the type unit DIE. If the returned
8426 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8427 that DIE. */
8428 orig_parent = copy_declaration_context (unit, child);
8429
8430 skeleton = generate_skeleton (child);
8431 if (skeleton == NULL)
8432 remove_child_with_prev (child, prev);
8433 else
8434 {
8435 skeleton->comdat_type_p = true;
8436 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8437
8438 /* If the original DIE was a specification, we need to put
8439 the skeleton under the parent DIE of the declaration.
8440 This leaves the original declaration in the tree, but
8441 it will be pruned later since there are no longer any
8442 references to it. */
8443 if (orig_parent != NULL)
8444 {
8445 remove_child_with_prev (child, prev);
8446 add_child_die (orig_parent, skeleton);
8447 }
8448 else
8449 replace_child (child, skeleton, prev);
8450 }
8451
8452 return skeleton;
8453 }
8454
8455 static void
8456 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8457 comdat_type_node *type_node,
8458 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8459
8460 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8461 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8462 DWARF procedure references in the DW_AT_location attribute. */
8463
8464 static dw_die_ref
8465 copy_dwarf_procedure (dw_die_ref die,
8466 comdat_type_node *type_node,
8467 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8468 {
8469 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8470
8471 /* DWARF procedures are not supposed to have children... */
8472 gcc_assert (die->die_child == NULL);
8473
8474 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8475 gcc_assert (vec_safe_length (die->die_attr) == 1
8476 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8477
8478 /* Do not copy more than once DWARF procedures. */
8479 bool existed;
8480 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8481 if (existed)
8482 return die_copy;
8483
8484 die_copy = clone_die (die);
8485 add_child_die (type_node->root_die, die_copy);
8486 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8487 return die_copy;
8488 }
8489
8490 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8491 procedures in DIE's attributes. */
8492
8493 static void
8494 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8495 comdat_type_node *type_node,
8496 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8497 {
8498 dw_attr_node *a;
8499 unsigned i;
8500
8501 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8502 {
8503 dw_loc_descr_ref loc;
8504
8505 if (a->dw_attr_val.val_class != dw_val_class_loc)
8506 continue;
8507
8508 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8509 {
8510 switch (loc->dw_loc_opc)
8511 {
8512 case DW_OP_call2:
8513 case DW_OP_call4:
8514 case DW_OP_call_ref:
8515 gcc_assert (loc->dw_loc_oprnd1.val_class
8516 == dw_val_class_die_ref);
8517 loc->dw_loc_oprnd1.v.val_die_ref.die
8518 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8519 type_node,
8520 copied_dwarf_procs);
8521
8522 default:
8523 break;
8524 }
8525 }
8526 }
8527 }
8528
8529 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8530 rewrite references to point to the copies.
8531
8532 References are looked for in DIE's attributes and recursively in all its
8533 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8534 mapping from old DWARF procedures to their copy. It is used not to copy
8535 twice the same DWARF procedure under TYPE_NODE. */
8536
8537 static void
8538 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8539 comdat_type_node *type_node,
8540 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8541 {
8542 dw_die_ref c;
8543
8544 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8545 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8546 type_node,
8547 copied_dwarf_procs));
8548 }
8549
8550 /* Traverse the DIE and set up additional .debug_types or .debug_info
8551 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8552 section. */
8553
8554 static void
8555 break_out_comdat_types (dw_die_ref die)
8556 {
8557 dw_die_ref c;
8558 dw_die_ref first;
8559 dw_die_ref prev = NULL;
8560 dw_die_ref next = NULL;
8561 dw_die_ref unit = NULL;
8562
8563 first = c = die->die_child;
8564 if (c)
8565 next = c->die_sib;
8566 if (c) do {
8567 if (prev == NULL || prev->die_sib == c)
8568 prev = c;
8569 c = next;
8570 next = (c == first ? NULL : c->die_sib);
8571 if (should_move_die_to_comdat (c))
8572 {
8573 dw_die_ref replacement;
8574 comdat_type_node *type_node;
8575
8576 /* Break out nested types into their own type units. */
8577 break_out_comdat_types (c);
8578
8579 /* Create a new type unit DIE as the root for the new tree, and
8580 add it to the list of comdat types. */
8581 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8582 add_AT_unsigned (unit, DW_AT_language,
8583 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8584 type_node = ggc_cleared_alloc<comdat_type_node> ();
8585 type_node->root_die = unit;
8586 type_node->next = comdat_type_list;
8587 comdat_type_list = type_node;
8588
8589 /* Generate the type signature. */
8590 generate_type_signature (c, type_node);
8591
8592 /* Copy the declaration context, attributes, and children of the
8593 declaration into the new type unit DIE, then remove this DIE
8594 from the main CU (or replace it with a skeleton if necessary). */
8595 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8596 type_node->skeleton_die = replacement;
8597
8598 /* Add the DIE to the new compunit. */
8599 add_child_die (unit, c);
8600
8601 /* Types can reference DWARF procedures for type size or data location
8602 expressions. Calls in DWARF expressions cannot target procedures
8603 that are not in the same section. So we must copy DWARF procedures
8604 along with this type and then rewrite references to them. */
8605 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8606 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8607
8608 if (replacement != NULL)
8609 c = replacement;
8610 }
8611 else if (c->die_tag == DW_TAG_namespace
8612 || c->die_tag == DW_TAG_class_type
8613 || c->die_tag == DW_TAG_structure_type
8614 || c->die_tag == DW_TAG_union_type)
8615 {
8616 /* Look for nested types that can be broken out. */
8617 break_out_comdat_types (c);
8618 }
8619 } while (next != NULL);
8620 }
8621
8622 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8623 Enter all the cloned children into the hash table decl_table. */
8624
8625 static dw_die_ref
8626 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8627 {
8628 dw_die_ref c;
8629 dw_die_ref clone;
8630 struct decl_table_entry *entry;
8631 decl_table_entry **slot;
8632
8633 if (die->die_tag == DW_TAG_subprogram)
8634 clone = clone_as_declaration (die);
8635 else
8636 clone = clone_die (die);
8637
8638 slot = decl_table->find_slot_with_hash (die,
8639 htab_hash_pointer (die), INSERT);
8640
8641 /* Assert that DIE isn't in the hash table yet. If it would be there
8642 before, the ancestors would be necessarily there as well, therefore
8643 clone_tree_partial wouldn't be called. */
8644 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8645
8646 entry = XCNEW (struct decl_table_entry);
8647 entry->orig = die;
8648 entry->copy = clone;
8649 *slot = entry;
8650
8651 if (die->die_tag != DW_TAG_subprogram)
8652 FOR_EACH_CHILD (die, c,
8653 add_child_die (clone, clone_tree_partial (c, decl_table)));
8654
8655 return clone;
8656 }
8657
8658 /* Walk the DIE and its children, looking for references to incomplete
8659 or trivial types that are unmarked (i.e., that are not in the current
8660 type_unit). */
8661
8662 static void
8663 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8664 {
8665 dw_die_ref c;
8666 dw_attr_node *a;
8667 unsigned ix;
8668
8669 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8670 {
8671 if (AT_class (a) == dw_val_class_die_ref)
8672 {
8673 dw_die_ref targ = AT_ref (a);
8674 decl_table_entry **slot;
8675 struct decl_table_entry *entry;
8676
8677 if (targ->die_mark != 0 || targ->comdat_type_p)
8678 continue;
8679
8680 slot = decl_table->find_slot_with_hash (targ,
8681 htab_hash_pointer (targ),
8682 INSERT);
8683
8684 if (*slot != HTAB_EMPTY_ENTRY)
8685 {
8686 /* TARG has already been copied, so we just need to
8687 modify the reference to point to the copy. */
8688 entry = *slot;
8689 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8690 }
8691 else
8692 {
8693 dw_die_ref parent = unit;
8694 dw_die_ref copy = clone_die (targ);
8695
8696 /* Record in DECL_TABLE that TARG has been copied.
8697 Need to do this now, before the recursive call,
8698 because DECL_TABLE may be expanded and SLOT
8699 would no longer be a valid pointer. */
8700 entry = XCNEW (struct decl_table_entry);
8701 entry->orig = targ;
8702 entry->copy = copy;
8703 *slot = entry;
8704
8705 /* If TARG is not a declaration DIE, we need to copy its
8706 children. */
8707 if (!is_declaration_die (targ))
8708 {
8709 FOR_EACH_CHILD (
8710 targ, c,
8711 add_child_die (copy,
8712 clone_tree_partial (c, decl_table)));
8713 }
8714
8715 /* Make sure the cloned tree is marked as part of the
8716 type unit. */
8717 mark_dies (copy);
8718
8719 /* If TARG has surrounding context, copy its ancestor tree
8720 into the new type unit. */
8721 if (targ->die_parent != NULL
8722 && !is_unit_die (targ->die_parent))
8723 parent = copy_ancestor_tree (unit, targ->die_parent,
8724 decl_table);
8725
8726 add_child_die (parent, copy);
8727 a->dw_attr_val.v.val_die_ref.die = copy;
8728
8729 /* Make sure the newly-copied DIE is walked. If it was
8730 installed in a previously-added context, it won't
8731 get visited otherwise. */
8732 if (parent != unit)
8733 {
8734 /* Find the highest point of the newly-added tree,
8735 mark each node along the way, and walk from there. */
8736 parent->die_mark = 1;
8737 while (parent->die_parent
8738 && parent->die_parent->die_mark == 0)
8739 {
8740 parent = parent->die_parent;
8741 parent->die_mark = 1;
8742 }
8743 copy_decls_walk (unit, parent, decl_table);
8744 }
8745 }
8746 }
8747 }
8748
8749 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8750 }
8751
8752 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8753 and record them in DECL_TABLE. */
8754
8755 static void
8756 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8757 {
8758 dw_die_ref c;
8759
8760 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8761 {
8762 dw_die_ref targ = AT_ref (a);
8763 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8764 decl_table_entry **slot
8765 = decl_table->find_slot_with_hash (targ,
8766 htab_hash_pointer (targ),
8767 INSERT);
8768 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8769 /* Record in DECL_TABLE that TARG has been already copied
8770 by remove_child_or_replace_with_skeleton. */
8771 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8772 entry->orig = targ;
8773 entry->copy = die;
8774 *slot = entry;
8775 }
8776 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8777 }
8778
8779 /* Copy declarations for "unworthy" types into the new comdat section.
8780 Incomplete types, modified types, and certain other types aren't broken
8781 out into comdat sections of their own, so they don't have a signature,
8782 and we need to copy the declaration into the same section so that we
8783 don't have an external reference. */
8784
8785 static void
8786 copy_decls_for_unworthy_types (dw_die_ref unit)
8787 {
8788 mark_dies (unit);
8789 decl_hash_type decl_table (10);
8790 collect_skeleton_dies (unit, &decl_table);
8791 copy_decls_walk (unit, unit, &decl_table);
8792 unmark_dies (unit);
8793 }
8794
8795 /* Traverse the DIE and add a sibling attribute if it may have the
8796 effect of speeding up access to siblings. To save some space,
8797 avoid generating sibling attributes for DIE's without children. */
8798
8799 static void
8800 add_sibling_attributes (dw_die_ref die)
8801 {
8802 dw_die_ref c;
8803
8804 if (! die->die_child)
8805 return;
8806
8807 if (die->die_parent && die != die->die_parent->die_child)
8808 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8809
8810 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8811 }
8812
8813 /* Output all location lists for the DIE and its children. */
8814
8815 static void
8816 output_location_lists (dw_die_ref die)
8817 {
8818 dw_die_ref c;
8819 dw_attr_node *a;
8820 unsigned ix;
8821
8822 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8823 if (AT_class (a) == dw_val_class_loc_list)
8824 output_loc_list (AT_loc_list (a));
8825
8826 FOR_EACH_CHILD (die, c, output_location_lists (c));
8827 }
8828
8829 /* During assign_location_list_indexes and output_loclists_offset the
8830 current index, after it the number of assigned indexes (i.e. how
8831 large the .debug_loclists* offset table should be). */
8832 static unsigned int loc_list_idx;
8833
8834 /* Output all location list offsets for the DIE and its children. */
8835
8836 static void
8837 output_loclists_offsets (dw_die_ref die)
8838 {
8839 dw_die_ref c;
8840 dw_attr_node *a;
8841 unsigned ix;
8842
8843 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8844 if (AT_class (a) == dw_val_class_loc_list)
8845 {
8846 dw_loc_list_ref l = AT_loc_list (a);
8847 if (l->offset_emitted)
8848 continue;
8849 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8850 loc_section_label, NULL);
8851 gcc_assert (l->hash == loc_list_idx);
8852 loc_list_idx++;
8853 l->offset_emitted = true;
8854 }
8855
8856 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8857 }
8858
8859 /* Recursively set indexes of location lists. */
8860
8861 static void
8862 assign_location_list_indexes (dw_die_ref die)
8863 {
8864 dw_die_ref c;
8865 dw_attr_node *a;
8866 unsigned ix;
8867
8868 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8869 if (AT_class (a) == dw_val_class_loc_list)
8870 {
8871 dw_loc_list_ref list = AT_loc_list (a);
8872 if (!list->num_assigned)
8873 {
8874 list->num_assigned = true;
8875 list->hash = loc_list_idx++;
8876 }
8877 }
8878
8879 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8880 }
8881
8882 /* We want to limit the number of external references, because they are
8883 larger than local references: a relocation takes multiple words, and
8884 even a sig8 reference is always eight bytes, whereas a local reference
8885 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8886 So if we encounter multiple external references to the same type DIE, we
8887 make a local typedef stub for it and redirect all references there.
8888
8889 This is the element of the hash table for keeping track of these
8890 references. */
8891
8892 struct external_ref
8893 {
8894 dw_die_ref type;
8895 dw_die_ref stub;
8896 unsigned n_refs;
8897 };
8898
8899 /* Hashtable helpers. */
8900
8901 struct external_ref_hasher : free_ptr_hash <external_ref>
8902 {
8903 static inline hashval_t hash (const external_ref *);
8904 static inline bool equal (const external_ref *, const external_ref *);
8905 };
8906
8907 inline hashval_t
8908 external_ref_hasher::hash (const external_ref *r)
8909 {
8910 dw_die_ref die = r->type;
8911 hashval_t h = 0;
8912
8913 /* We can't use the address of the DIE for hashing, because
8914 that will make the order of the stub DIEs non-deterministic. */
8915 if (! die->comdat_type_p)
8916 /* We have a symbol; use it to compute a hash. */
8917 h = htab_hash_string (die->die_id.die_symbol);
8918 else
8919 {
8920 /* We have a type signature; use a subset of the bits as the hash.
8921 The 8-byte signature is at least as large as hashval_t. */
8922 comdat_type_node *type_node = die->die_id.die_type_node;
8923 memcpy (&h, type_node->signature, sizeof (h));
8924 }
8925 return h;
8926 }
8927
8928 inline bool
8929 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8930 {
8931 return r1->type == r2->type;
8932 }
8933
8934 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8935
8936 /* Return a pointer to the external_ref for references to DIE. */
8937
8938 static struct external_ref *
8939 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8940 {
8941 struct external_ref ref, *ref_p;
8942 external_ref **slot;
8943
8944 ref.type = die;
8945 slot = map->find_slot (&ref, INSERT);
8946 if (*slot != HTAB_EMPTY_ENTRY)
8947 return *slot;
8948
8949 ref_p = XCNEW (struct external_ref);
8950 ref_p->type = die;
8951 *slot = ref_p;
8952 return ref_p;
8953 }
8954
8955 /* Subroutine of optimize_external_refs, below.
8956
8957 If we see a type skeleton, record it as our stub. If we see external
8958 references, remember how many we've seen. */
8959
8960 static void
8961 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8962 {
8963 dw_die_ref c;
8964 dw_attr_node *a;
8965 unsigned ix;
8966 struct external_ref *ref_p;
8967
8968 if (is_type_die (die)
8969 && (c = get_AT_ref (die, DW_AT_signature)))
8970 {
8971 /* This is a local skeleton; use it for local references. */
8972 ref_p = lookup_external_ref (map, c);
8973 ref_p->stub = die;
8974 }
8975
8976 /* Scan the DIE references, and remember any that refer to DIEs from
8977 other CUs (i.e. those which are not marked). */
8978 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8979 if (AT_class (a) == dw_val_class_die_ref
8980 && (c = AT_ref (a))->die_mark == 0
8981 && is_type_die (c))
8982 {
8983 ref_p = lookup_external_ref (map, c);
8984 ref_p->n_refs++;
8985 }
8986
8987 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8988 }
8989
8990 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8991 points to an external_ref, DATA is the CU we're processing. If we don't
8992 already have a local stub, and we have multiple refs, build a stub. */
8993
8994 int
8995 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8996 {
8997 struct external_ref *ref_p = *slot;
8998
8999 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9000 {
9001 /* We have multiple references to this type, so build a small stub.
9002 Both of these forms are a bit dodgy from the perspective of the
9003 DWARF standard, since technically they should have names. */
9004 dw_die_ref cu = data;
9005 dw_die_ref type = ref_p->type;
9006 dw_die_ref stub = NULL;
9007
9008 if (type->comdat_type_p)
9009 {
9010 /* If we refer to this type via sig8, use AT_signature. */
9011 stub = new_die (type->die_tag, cu, NULL_TREE);
9012 add_AT_die_ref (stub, DW_AT_signature, type);
9013 }
9014 else
9015 {
9016 /* Otherwise, use a typedef with no name. */
9017 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9018 add_AT_die_ref (stub, DW_AT_type, type);
9019 }
9020
9021 stub->die_mark++;
9022 ref_p->stub = stub;
9023 }
9024 return 1;
9025 }
9026
9027 /* DIE is a unit; look through all the DIE references to see if there are
9028 any external references to types, and if so, create local stubs for
9029 them which will be applied in build_abbrev_table. This is useful because
9030 references to local DIEs are smaller. */
9031
9032 static external_ref_hash_type *
9033 optimize_external_refs (dw_die_ref die)
9034 {
9035 external_ref_hash_type *map = new external_ref_hash_type (10);
9036 optimize_external_refs_1 (die, map);
9037 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9038 return map;
9039 }
9040
9041 /* The following 3 variables are temporaries that are computed only during the
9042 build_abbrev_table call and used and released during the following
9043 optimize_abbrev_table call. */
9044
9045 /* First abbrev_id that can be optimized based on usage. */
9046 static unsigned int abbrev_opt_start;
9047
9048 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9049 abbrev_id smaller than this, because they must be already sized
9050 during build_abbrev_table). */
9051 static unsigned int abbrev_opt_base_type_end;
9052
9053 /* Vector of usage counts during build_abbrev_table. Indexed by
9054 abbrev_id - abbrev_opt_start. */
9055 static vec<unsigned int> abbrev_usage_count;
9056
9057 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9058 static vec<dw_die_ref> sorted_abbrev_dies;
9059
9060 /* The format of each DIE (and its attribute value pairs) is encoded in an
9061 abbreviation table. This routine builds the abbreviation table and assigns
9062 a unique abbreviation id for each abbreviation entry. The children of each
9063 die are visited recursively. */
9064
9065 static void
9066 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9067 {
9068 unsigned int abbrev_id = 0;
9069 dw_die_ref c;
9070 dw_attr_node *a;
9071 unsigned ix;
9072 dw_die_ref abbrev;
9073
9074 /* Scan the DIE references, and replace any that refer to
9075 DIEs from other CUs (i.e. those which are not marked) with
9076 the local stubs we built in optimize_external_refs. */
9077 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9078 if (AT_class (a) == dw_val_class_die_ref
9079 && (c = AT_ref (a))->die_mark == 0)
9080 {
9081 struct external_ref *ref_p;
9082 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9083
9084 if (is_type_die (c)
9085 && (ref_p = lookup_external_ref (extern_map, c))
9086 && ref_p->stub && ref_p->stub != die)
9087 {
9088 gcc_assert (a->dw_attr != DW_AT_signature);
9089 change_AT_die_ref (a, ref_p->stub);
9090 }
9091 else
9092 /* We aren't changing this reference, so mark it external. */
9093 set_AT_ref_external (a, 1);
9094 }
9095
9096 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9097 {
9098 dw_attr_node *die_a, *abbrev_a;
9099 unsigned ix;
9100 bool ok = true;
9101
9102 if (abbrev_id == 0)
9103 continue;
9104 if (abbrev->die_tag != die->die_tag)
9105 continue;
9106 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9107 continue;
9108
9109 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9110 continue;
9111
9112 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9113 {
9114 abbrev_a = &(*abbrev->die_attr)[ix];
9115 if ((abbrev_a->dw_attr != die_a->dw_attr)
9116 || (value_format (abbrev_a) != value_format (die_a)))
9117 {
9118 ok = false;
9119 break;
9120 }
9121 }
9122 if (ok)
9123 break;
9124 }
9125
9126 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9127 {
9128 vec_safe_push (abbrev_die_table, die);
9129 if (abbrev_opt_start)
9130 abbrev_usage_count.safe_push (0);
9131 }
9132 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9133 {
9134 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9135 sorted_abbrev_dies.safe_push (die);
9136 }
9137
9138 die->die_abbrev = abbrev_id;
9139 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9140 }
9141
9142 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9143 by die_abbrev's usage count, from the most commonly used
9144 abbreviation to the least. */
9145
9146 static int
9147 die_abbrev_cmp (const void *p1, const void *p2)
9148 {
9149 dw_die_ref die1 = *(const dw_die_ref *) p1;
9150 dw_die_ref die2 = *(const dw_die_ref *) p2;
9151
9152 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9153 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9154
9155 if (die1->die_abbrev >= abbrev_opt_base_type_end
9156 && die2->die_abbrev >= abbrev_opt_base_type_end)
9157 {
9158 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9159 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9160 return -1;
9161 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9162 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9163 return 1;
9164 }
9165
9166 /* Stabilize the sort. */
9167 if (die1->die_abbrev < die2->die_abbrev)
9168 return -1;
9169 if (die1->die_abbrev > die2->die_abbrev)
9170 return 1;
9171
9172 return 0;
9173 }
9174
9175 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9176 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9177 into dw_val_class_const_implicit or
9178 dw_val_class_unsigned_const_implicit. */
9179
9180 static void
9181 optimize_implicit_const (unsigned int first_id, unsigned int end,
9182 vec<bool> &implicit_consts)
9183 {
9184 /* It never makes sense if there is just one DIE using the abbreviation. */
9185 if (end < first_id + 2)
9186 return;
9187
9188 dw_attr_node *a;
9189 unsigned ix, i;
9190 dw_die_ref die = sorted_abbrev_dies[first_id];
9191 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9192 if (implicit_consts[ix])
9193 {
9194 enum dw_val_class new_class = dw_val_class_none;
9195 switch (AT_class (a))
9196 {
9197 case dw_val_class_unsigned_const:
9198 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9199 continue;
9200
9201 /* The .debug_abbrev section will grow by
9202 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9203 in all the DIEs using that abbreviation. */
9204 if (constant_size (AT_unsigned (a)) * (end - first_id)
9205 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9206 continue;
9207
9208 new_class = dw_val_class_unsigned_const_implicit;
9209 break;
9210
9211 case dw_val_class_const:
9212 new_class = dw_val_class_const_implicit;
9213 break;
9214
9215 case dw_val_class_file:
9216 new_class = dw_val_class_file_implicit;
9217 break;
9218
9219 default:
9220 continue;
9221 }
9222 for (i = first_id; i < end; i++)
9223 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9224 = new_class;
9225 }
9226 }
9227
9228 /* Attempt to optimize abbreviation table from abbrev_opt_start
9229 abbreviation above. */
9230
9231 static void
9232 optimize_abbrev_table (void)
9233 {
9234 if (abbrev_opt_start
9235 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9236 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9237 {
9238 auto_vec<bool, 32> implicit_consts;
9239 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9240
9241 unsigned int abbrev_id = abbrev_opt_start - 1;
9242 unsigned int first_id = ~0U;
9243 unsigned int last_abbrev_id = 0;
9244 unsigned int i;
9245 dw_die_ref die;
9246 if (abbrev_opt_base_type_end > abbrev_opt_start)
9247 abbrev_id = abbrev_opt_base_type_end - 1;
9248 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9249 most commonly used abbreviations come first. */
9250 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9251 {
9252 dw_attr_node *a;
9253 unsigned ix;
9254
9255 /* If calc_base_type_die_sizes has been called, the CU and
9256 base types after it can't be optimized, because we've already
9257 calculated their DIE offsets. We've sorted them first. */
9258 if (die->die_abbrev < abbrev_opt_base_type_end)
9259 continue;
9260 if (die->die_abbrev != last_abbrev_id)
9261 {
9262 last_abbrev_id = die->die_abbrev;
9263 if (dwarf_version >= 5 && first_id != ~0U)
9264 optimize_implicit_const (first_id, i, implicit_consts);
9265 abbrev_id++;
9266 (*abbrev_die_table)[abbrev_id] = die;
9267 if (dwarf_version >= 5)
9268 {
9269 first_id = i;
9270 implicit_consts.truncate (0);
9271
9272 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9273 switch (AT_class (a))
9274 {
9275 case dw_val_class_const:
9276 case dw_val_class_unsigned_const:
9277 case dw_val_class_file:
9278 implicit_consts.safe_push (true);
9279 break;
9280 default:
9281 implicit_consts.safe_push (false);
9282 break;
9283 }
9284 }
9285 }
9286 else if (dwarf_version >= 5)
9287 {
9288 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9289 if (!implicit_consts[ix])
9290 continue;
9291 else
9292 {
9293 dw_attr_node *other_a
9294 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9295 if (!dw_val_equal_p (&a->dw_attr_val,
9296 &other_a->dw_attr_val))
9297 implicit_consts[ix] = false;
9298 }
9299 }
9300 die->die_abbrev = abbrev_id;
9301 }
9302 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9303 if (dwarf_version >= 5 && first_id != ~0U)
9304 optimize_implicit_const (first_id, i, implicit_consts);
9305 }
9306
9307 abbrev_opt_start = 0;
9308 abbrev_opt_base_type_end = 0;
9309 abbrev_usage_count.release ();
9310 sorted_abbrev_dies.release ();
9311 }
9312 \f
9313 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9314
9315 static int
9316 constant_size (unsigned HOST_WIDE_INT value)
9317 {
9318 int log;
9319
9320 if (value == 0)
9321 log = 0;
9322 else
9323 log = floor_log2 (value);
9324
9325 log = log / 8;
9326 log = 1 << (floor_log2 (log) + 1);
9327
9328 return log;
9329 }
9330
9331 /* Return the size of a DIE as it is represented in the
9332 .debug_info section. */
9333
9334 static unsigned long
9335 size_of_die (dw_die_ref die)
9336 {
9337 unsigned long size = 0;
9338 dw_attr_node *a;
9339 unsigned ix;
9340 enum dwarf_form form;
9341
9342 size += size_of_uleb128 (die->die_abbrev);
9343 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9344 {
9345 switch (AT_class (a))
9346 {
9347 case dw_val_class_addr:
9348 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9349 {
9350 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9351 size += size_of_uleb128 (AT_index (a));
9352 }
9353 else
9354 size += DWARF2_ADDR_SIZE;
9355 break;
9356 case dw_val_class_offset:
9357 size += DWARF_OFFSET_SIZE;
9358 break;
9359 case dw_val_class_loc:
9360 {
9361 unsigned long lsize = size_of_locs (AT_loc (a));
9362
9363 /* Block length. */
9364 if (dwarf_version >= 4)
9365 size += size_of_uleb128 (lsize);
9366 else
9367 size += constant_size (lsize);
9368 size += lsize;
9369 }
9370 break;
9371 case dw_val_class_loc_list:
9372 if (dwarf_split_debug_info && dwarf_version >= 5)
9373 {
9374 gcc_assert (AT_loc_list (a)->num_assigned);
9375 size += size_of_uleb128 (AT_loc_list (a)->hash);
9376 }
9377 else
9378 size += DWARF_OFFSET_SIZE;
9379 break;
9380 case dw_val_class_view_list:
9381 size += DWARF_OFFSET_SIZE;
9382 break;
9383 case dw_val_class_range_list:
9384 if (value_format (a) == DW_FORM_rnglistx)
9385 {
9386 gcc_assert (rnglist_idx);
9387 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9388 size += size_of_uleb128 (r->idx);
9389 }
9390 else
9391 size += DWARF_OFFSET_SIZE;
9392 break;
9393 case dw_val_class_const:
9394 size += size_of_sleb128 (AT_int (a));
9395 break;
9396 case dw_val_class_unsigned_const:
9397 {
9398 int csize = constant_size (AT_unsigned (a));
9399 if (dwarf_version == 3
9400 && a->dw_attr == DW_AT_data_member_location
9401 && csize >= 4)
9402 size += size_of_uleb128 (AT_unsigned (a));
9403 else
9404 size += csize;
9405 }
9406 break;
9407 case dw_val_class_symview:
9408 if (symview_upper_bound <= 0xff)
9409 size += 1;
9410 else if (symview_upper_bound <= 0xffff)
9411 size += 2;
9412 else if (symview_upper_bound <= 0xffffffff)
9413 size += 4;
9414 else
9415 size += 8;
9416 break;
9417 case dw_val_class_const_implicit:
9418 case dw_val_class_unsigned_const_implicit:
9419 case dw_val_class_file_implicit:
9420 /* These occupy no size in the DIE, just an extra sleb128 in
9421 .debug_abbrev. */
9422 break;
9423 case dw_val_class_const_double:
9424 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9425 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9426 size++; /* block */
9427 break;
9428 case dw_val_class_wide_int:
9429 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9430 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9431 if (get_full_len (*a->dw_attr_val.v.val_wide)
9432 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9433 size++; /* block */
9434 break;
9435 case dw_val_class_vec:
9436 size += constant_size (a->dw_attr_val.v.val_vec.length
9437 * a->dw_attr_val.v.val_vec.elt_size)
9438 + a->dw_attr_val.v.val_vec.length
9439 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9440 break;
9441 case dw_val_class_flag:
9442 if (dwarf_version >= 4)
9443 /* Currently all add_AT_flag calls pass in 1 as last argument,
9444 so DW_FORM_flag_present can be used. If that ever changes,
9445 we'll need to use DW_FORM_flag and have some optimization
9446 in build_abbrev_table that will change those to
9447 DW_FORM_flag_present if it is set to 1 in all DIEs using
9448 the same abbrev entry. */
9449 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9450 else
9451 size += 1;
9452 break;
9453 case dw_val_class_die_ref:
9454 if (AT_ref_external (a))
9455 {
9456 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9457 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9458 is sized by target address length, whereas in DWARF3
9459 it's always sized as an offset. */
9460 if (AT_ref (a)->comdat_type_p)
9461 size += DWARF_TYPE_SIGNATURE_SIZE;
9462 else if (dwarf_version == 2)
9463 size += DWARF2_ADDR_SIZE;
9464 else
9465 size += DWARF_OFFSET_SIZE;
9466 }
9467 else
9468 size += DWARF_OFFSET_SIZE;
9469 break;
9470 case dw_val_class_fde_ref:
9471 size += DWARF_OFFSET_SIZE;
9472 break;
9473 case dw_val_class_lbl_id:
9474 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9475 {
9476 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9477 size += size_of_uleb128 (AT_index (a));
9478 }
9479 else
9480 size += DWARF2_ADDR_SIZE;
9481 break;
9482 case dw_val_class_lineptr:
9483 case dw_val_class_macptr:
9484 case dw_val_class_loclistsptr:
9485 size += DWARF_OFFSET_SIZE;
9486 break;
9487 case dw_val_class_str:
9488 form = AT_string_form (a);
9489 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9490 size += DWARF_OFFSET_SIZE;
9491 else if (form == dwarf_FORM (DW_FORM_strx))
9492 size += size_of_uleb128 (AT_index (a));
9493 else
9494 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9495 break;
9496 case dw_val_class_file:
9497 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9498 break;
9499 case dw_val_class_data8:
9500 size += 8;
9501 break;
9502 case dw_val_class_vms_delta:
9503 size += DWARF_OFFSET_SIZE;
9504 break;
9505 case dw_val_class_high_pc:
9506 size += DWARF2_ADDR_SIZE;
9507 break;
9508 case dw_val_class_discr_value:
9509 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9510 break;
9511 case dw_val_class_discr_list:
9512 {
9513 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9514
9515 /* This is a block, so we have the block length and then its
9516 data. */
9517 size += constant_size (block_size) + block_size;
9518 }
9519 break;
9520 default:
9521 gcc_unreachable ();
9522 }
9523 }
9524
9525 return size;
9526 }
9527
9528 /* Size the debugging information associated with a given DIE. Visits the
9529 DIE's children recursively. Updates the global variable next_die_offset, on
9530 each time through. Uses the current value of next_die_offset to update the
9531 die_offset field in each DIE. */
9532
9533 static void
9534 calc_die_sizes (dw_die_ref die)
9535 {
9536 dw_die_ref c;
9537
9538 gcc_assert (die->die_offset == 0
9539 || (unsigned long int) die->die_offset == next_die_offset);
9540 die->die_offset = next_die_offset;
9541 next_die_offset += size_of_die (die);
9542
9543 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9544
9545 if (die->die_child != NULL)
9546 /* Count the null byte used to terminate sibling lists. */
9547 next_die_offset += 1;
9548 }
9549
9550 /* Size just the base type children at the start of the CU.
9551 This is needed because build_abbrev needs to size locs
9552 and sizing of type based stack ops needs to know die_offset
9553 values for the base types. */
9554
9555 static void
9556 calc_base_type_die_sizes (void)
9557 {
9558 unsigned long die_offset = (dwarf_split_debug_info
9559 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9560 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9561 unsigned int i;
9562 dw_die_ref base_type;
9563 #if ENABLE_ASSERT_CHECKING
9564 dw_die_ref prev = comp_unit_die ()->die_child;
9565 #endif
9566
9567 die_offset += size_of_die (comp_unit_die ());
9568 for (i = 0; base_types.iterate (i, &base_type); i++)
9569 {
9570 #if ENABLE_ASSERT_CHECKING
9571 gcc_assert (base_type->die_offset == 0
9572 && prev->die_sib == base_type
9573 && base_type->die_child == NULL
9574 && base_type->die_abbrev);
9575 prev = base_type;
9576 #endif
9577 if (abbrev_opt_start
9578 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9579 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9580 base_type->die_offset = die_offset;
9581 die_offset += size_of_die (base_type);
9582 }
9583 }
9584
9585 /* Set the marks for a die and its children. We do this so
9586 that we know whether or not a reference needs to use FORM_ref_addr; only
9587 DIEs in the same CU will be marked. We used to clear out the offset
9588 and use that as the flag, but ran into ordering problems. */
9589
9590 static void
9591 mark_dies (dw_die_ref die)
9592 {
9593 dw_die_ref c;
9594
9595 gcc_assert (!die->die_mark);
9596
9597 die->die_mark = 1;
9598 FOR_EACH_CHILD (die, c, mark_dies (c));
9599 }
9600
9601 /* Clear the marks for a die and its children. */
9602
9603 static void
9604 unmark_dies (dw_die_ref die)
9605 {
9606 dw_die_ref c;
9607
9608 if (! use_debug_types)
9609 gcc_assert (die->die_mark);
9610
9611 die->die_mark = 0;
9612 FOR_EACH_CHILD (die, c, unmark_dies (c));
9613 }
9614
9615 /* Clear the marks for a die, its children and referred dies. */
9616
9617 static void
9618 unmark_all_dies (dw_die_ref die)
9619 {
9620 dw_die_ref c;
9621 dw_attr_node *a;
9622 unsigned ix;
9623
9624 if (!die->die_mark)
9625 return;
9626 die->die_mark = 0;
9627
9628 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9629
9630 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9631 if (AT_class (a) == dw_val_class_die_ref)
9632 unmark_all_dies (AT_ref (a));
9633 }
9634
9635 /* Calculate if the entry should appear in the final output file. It may be
9636 from a pruned a type. */
9637
9638 static bool
9639 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9640 {
9641 /* By limiting gnu pubnames to definitions only, gold can generate a
9642 gdb index without entries for declarations, which don't include
9643 enough information to be useful. */
9644 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9645 return false;
9646
9647 if (table == pubname_table)
9648 {
9649 /* Enumerator names are part of the pubname table, but the
9650 parent DW_TAG_enumeration_type die may have been pruned.
9651 Don't output them if that is the case. */
9652 if (p->die->die_tag == DW_TAG_enumerator &&
9653 (p->die->die_parent == NULL
9654 || !p->die->die_parent->die_perennial_p))
9655 return false;
9656
9657 /* Everything else in the pubname table is included. */
9658 return true;
9659 }
9660
9661 /* The pubtypes table shouldn't include types that have been
9662 pruned. */
9663 return (p->die->die_offset != 0
9664 || !flag_eliminate_unused_debug_types);
9665 }
9666
9667 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9668 generated for the compilation unit. */
9669
9670 static unsigned long
9671 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9672 {
9673 unsigned long size;
9674 unsigned i;
9675 pubname_entry *p;
9676 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9677
9678 size = DWARF_PUBNAMES_HEADER_SIZE;
9679 FOR_EACH_VEC_ELT (*names, i, p)
9680 if (include_pubname_in_output (names, p))
9681 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9682
9683 size += DWARF_OFFSET_SIZE;
9684 return size;
9685 }
9686
9687 /* Return the size of the information in the .debug_aranges section. */
9688
9689 static unsigned long
9690 size_of_aranges (void)
9691 {
9692 unsigned long size;
9693
9694 size = DWARF_ARANGES_HEADER_SIZE;
9695
9696 /* Count the address/length pair for this compilation unit. */
9697 if (text_section_used)
9698 size += 2 * DWARF2_ADDR_SIZE;
9699 if (cold_text_section_used)
9700 size += 2 * DWARF2_ADDR_SIZE;
9701 if (have_multiple_function_sections)
9702 {
9703 unsigned fde_idx;
9704 dw_fde_ref fde;
9705
9706 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9707 {
9708 if (DECL_IGNORED_P (fde->decl))
9709 continue;
9710 if (!fde->in_std_section)
9711 size += 2 * DWARF2_ADDR_SIZE;
9712 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9713 size += 2 * DWARF2_ADDR_SIZE;
9714 }
9715 }
9716
9717 /* Count the two zero words used to terminated the address range table. */
9718 size += 2 * DWARF2_ADDR_SIZE;
9719 return size;
9720 }
9721 \f
9722 /* Select the encoding of an attribute value. */
9723
9724 static enum dwarf_form
9725 value_format (dw_attr_node *a)
9726 {
9727 switch (AT_class (a))
9728 {
9729 case dw_val_class_addr:
9730 /* Only very few attributes allow DW_FORM_addr. */
9731 switch (a->dw_attr)
9732 {
9733 case DW_AT_low_pc:
9734 case DW_AT_high_pc:
9735 case DW_AT_entry_pc:
9736 case DW_AT_trampoline:
9737 return (AT_index (a) == NOT_INDEXED
9738 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9739 default:
9740 break;
9741 }
9742 switch (DWARF2_ADDR_SIZE)
9743 {
9744 case 1:
9745 return DW_FORM_data1;
9746 case 2:
9747 return DW_FORM_data2;
9748 case 4:
9749 return DW_FORM_data4;
9750 case 8:
9751 return DW_FORM_data8;
9752 default:
9753 gcc_unreachable ();
9754 }
9755 case dw_val_class_loc_list:
9756 if (dwarf_split_debug_info
9757 && dwarf_version >= 5
9758 && AT_loc_list (a)->num_assigned)
9759 return DW_FORM_loclistx;
9760 /* FALLTHRU */
9761 case dw_val_class_view_list:
9762 case dw_val_class_range_list:
9763 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9764 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9765 care about sizes of .debug* sections in shared libraries and
9766 executables and don't take into account relocations that affect just
9767 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9768 table in the .debug_rnglists section. */
9769 if (dwarf_split_debug_info
9770 && dwarf_version >= 5
9771 && AT_class (a) == dw_val_class_range_list
9772 && rnglist_idx
9773 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9774 return DW_FORM_rnglistx;
9775 if (dwarf_version >= 4)
9776 return DW_FORM_sec_offset;
9777 /* FALLTHRU */
9778 case dw_val_class_vms_delta:
9779 case dw_val_class_offset:
9780 switch (DWARF_OFFSET_SIZE)
9781 {
9782 case 4:
9783 return DW_FORM_data4;
9784 case 8:
9785 return DW_FORM_data8;
9786 default:
9787 gcc_unreachable ();
9788 }
9789 case dw_val_class_loc:
9790 if (dwarf_version >= 4)
9791 return DW_FORM_exprloc;
9792 switch (constant_size (size_of_locs (AT_loc (a))))
9793 {
9794 case 1:
9795 return DW_FORM_block1;
9796 case 2:
9797 return DW_FORM_block2;
9798 case 4:
9799 return DW_FORM_block4;
9800 default:
9801 gcc_unreachable ();
9802 }
9803 case dw_val_class_const:
9804 return DW_FORM_sdata;
9805 case dw_val_class_unsigned_const:
9806 switch (constant_size (AT_unsigned (a)))
9807 {
9808 case 1:
9809 return DW_FORM_data1;
9810 case 2:
9811 return DW_FORM_data2;
9812 case 4:
9813 /* In DWARF3 DW_AT_data_member_location with
9814 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9815 constant, so we need to use DW_FORM_udata if we need
9816 a large constant. */
9817 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9818 return DW_FORM_udata;
9819 return DW_FORM_data4;
9820 case 8:
9821 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9822 return DW_FORM_udata;
9823 return DW_FORM_data8;
9824 default:
9825 gcc_unreachable ();
9826 }
9827 case dw_val_class_const_implicit:
9828 case dw_val_class_unsigned_const_implicit:
9829 case dw_val_class_file_implicit:
9830 return DW_FORM_implicit_const;
9831 case dw_val_class_const_double:
9832 switch (HOST_BITS_PER_WIDE_INT)
9833 {
9834 case 8:
9835 return DW_FORM_data2;
9836 case 16:
9837 return DW_FORM_data4;
9838 case 32:
9839 return DW_FORM_data8;
9840 case 64:
9841 if (dwarf_version >= 5)
9842 return DW_FORM_data16;
9843 /* FALLTHRU */
9844 default:
9845 return DW_FORM_block1;
9846 }
9847 case dw_val_class_wide_int:
9848 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9849 {
9850 case 8:
9851 return DW_FORM_data1;
9852 case 16:
9853 return DW_FORM_data2;
9854 case 32:
9855 return DW_FORM_data4;
9856 case 64:
9857 return DW_FORM_data8;
9858 case 128:
9859 if (dwarf_version >= 5)
9860 return DW_FORM_data16;
9861 /* FALLTHRU */
9862 default:
9863 return DW_FORM_block1;
9864 }
9865 case dw_val_class_symview:
9866 /* ??? We might use uleb128, but then we'd have to compute
9867 .debug_info offsets in the assembler. */
9868 if (symview_upper_bound <= 0xff)
9869 return DW_FORM_data1;
9870 else if (symview_upper_bound <= 0xffff)
9871 return DW_FORM_data2;
9872 else if (symview_upper_bound <= 0xffffffff)
9873 return DW_FORM_data4;
9874 else
9875 return DW_FORM_data8;
9876 case dw_val_class_vec:
9877 switch (constant_size (a->dw_attr_val.v.val_vec.length
9878 * a->dw_attr_val.v.val_vec.elt_size))
9879 {
9880 case 1:
9881 return DW_FORM_block1;
9882 case 2:
9883 return DW_FORM_block2;
9884 case 4:
9885 return DW_FORM_block4;
9886 default:
9887 gcc_unreachable ();
9888 }
9889 case dw_val_class_flag:
9890 if (dwarf_version >= 4)
9891 {
9892 /* Currently all add_AT_flag calls pass in 1 as last argument,
9893 so DW_FORM_flag_present can be used. If that ever changes,
9894 we'll need to use DW_FORM_flag and have some optimization
9895 in build_abbrev_table that will change those to
9896 DW_FORM_flag_present if it is set to 1 in all DIEs using
9897 the same abbrev entry. */
9898 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9899 return DW_FORM_flag_present;
9900 }
9901 return DW_FORM_flag;
9902 case dw_val_class_die_ref:
9903 if (AT_ref_external (a))
9904 {
9905 if (AT_ref (a)->comdat_type_p)
9906 return DW_FORM_ref_sig8;
9907 else
9908 return DW_FORM_ref_addr;
9909 }
9910 else
9911 return DW_FORM_ref;
9912 case dw_val_class_fde_ref:
9913 return DW_FORM_data;
9914 case dw_val_class_lbl_id:
9915 return (AT_index (a) == NOT_INDEXED
9916 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9917 case dw_val_class_lineptr:
9918 case dw_val_class_macptr:
9919 case dw_val_class_loclistsptr:
9920 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9921 case dw_val_class_str:
9922 return AT_string_form (a);
9923 case dw_val_class_file:
9924 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9925 {
9926 case 1:
9927 return DW_FORM_data1;
9928 case 2:
9929 return DW_FORM_data2;
9930 case 4:
9931 return DW_FORM_data4;
9932 default:
9933 gcc_unreachable ();
9934 }
9935
9936 case dw_val_class_data8:
9937 return DW_FORM_data8;
9938
9939 case dw_val_class_high_pc:
9940 switch (DWARF2_ADDR_SIZE)
9941 {
9942 case 1:
9943 return DW_FORM_data1;
9944 case 2:
9945 return DW_FORM_data2;
9946 case 4:
9947 return DW_FORM_data4;
9948 case 8:
9949 return DW_FORM_data8;
9950 default:
9951 gcc_unreachable ();
9952 }
9953
9954 case dw_val_class_discr_value:
9955 return (a->dw_attr_val.v.val_discr_value.pos
9956 ? DW_FORM_udata
9957 : DW_FORM_sdata);
9958 case dw_val_class_discr_list:
9959 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9960 {
9961 case 1:
9962 return DW_FORM_block1;
9963 case 2:
9964 return DW_FORM_block2;
9965 case 4:
9966 return DW_FORM_block4;
9967 default:
9968 gcc_unreachable ();
9969 }
9970
9971 default:
9972 gcc_unreachable ();
9973 }
9974 }
9975
9976 /* Output the encoding of an attribute value. */
9977
9978 static void
9979 output_value_format (dw_attr_node *a)
9980 {
9981 enum dwarf_form form = value_format (a);
9982
9983 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9984 }
9985
9986 /* Given a die and id, produce the appropriate abbreviations. */
9987
9988 static void
9989 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9990 {
9991 unsigned ix;
9992 dw_attr_node *a_attr;
9993
9994 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9995 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9996 dwarf_tag_name (abbrev->die_tag));
9997
9998 if (abbrev->die_child != NULL)
9999 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10000 else
10001 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10002
10003 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10004 {
10005 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10006 dwarf_attr_name (a_attr->dw_attr));
10007 output_value_format (a_attr);
10008 if (value_format (a_attr) == DW_FORM_implicit_const)
10009 {
10010 if (AT_class (a_attr) == dw_val_class_file_implicit)
10011 {
10012 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10013 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10014 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10015 }
10016 else
10017 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10018 }
10019 }
10020
10021 dw2_asm_output_data (1, 0, NULL);
10022 dw2_asm_output_data (1, 0, NULL);
10023 }
10024
10025
10026 /* Output the .debug_abbrev section which defines the DIE abbreviation
10027 table. */
10028
10029 static void
10030 output_abbrev_section (void)
10031 {
10032 unsigned int abbrev_id;
10033 dw_die_ref abbrev;
10034
10035 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10036 if (abbrev_id != 0)
10037 output_die_abbrevs (abbrev_id, abbrev);
10038
10039 /* Terminate the table. */
10040 dw2_asm_output_data (1, 0, NULL);
10041 }
10042
10043 /* Return a new location list, given the begin and end range, and the
10044 expression. */
10045
10046 static inline dw_loc_list_ref
10047 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10048 const char *end, var_loc_view vend,
10049 const char *section)
10050 {
10051 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10052
10053 retlist->begin = begin;
10054 retlist->begin_entry = NULL;
10055 retlist->end = end;
10056 retlist->expr = expr;
10057 retlist->section = section;
10058 retlist->vbegin = vbegin;
10059 retlist->vend = vend;
10060
10061 return retlist;
10062 }
10063
10064 /* Return true iff there's any nonzero view number in the loc list.
10065
10066 ??? When views are not enabled, we'll often extend a single range
10067 to the entire function, so that we emit a single location
10068 expression rather than a location list. With views, even with a
10069 single range, we'll output a list if start or end have a nonzero
10070 view. If we change this, we may want to stop splitting a single
10071 range in dw_loc_list just because of a nonzero view, even if it
10072 straddles across hot/cold partitions. */
10073
10074 static bool
10075 loc_list_has_views (dw_loc_list_ref list)
10076 {
10077 if (!debug_variable_location_views)
10078 return false;
10079
10080 for (dw_loc_list_ref loc = list;
10081 loc != NULL; loc = loc->dw_loc_next)
10082 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10083 return true;
10084
10085 return false;
10086 }
10087
10088 /* Generate a new internal symbol for this location list node, if it
10089 hasn't got one yet. */
10090
10091 static inline void
10092 gen_llsym (dw_loc_list_ref list)
10093 {
10094 gcc_assert (!list->ll_symbol);
10095 list->ll_symbol = gen_internal_sym ("LLST");
10096
10097 if (!loc_list_has_views (list))
10098 return;
10099
10100 if (dwarf2out_locviews_in_attribute ())
10101 {
10102 /* Use the same label_num for the view list. */
10103 label_num--;
10104 list->vl_symbol = gen_internal_sym ("LVUS");
10105 }
10106 else
10107 list->vl_symbol = list->ll_symbol;
10108 }
10109
10110 /* Generate a symbol for the list, but only if we really want to emit
10111 it as a list. */
10112
10113 static inline void
10114 maybe_gen_llsym (dw_loc_list_ref list)
10115 {
10116 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10117 return;
10118
10119 gen_llsym (list);
10120 }
10121
10122 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10123 NULL, don't consider size of the location expression. If we're not
10124 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10125 representation in *SIZEP. */
10126
10127 static bool
10128 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10129 {
10130 /* Don't output an entry that starts and ends at the same address. */
10131 if (strcmp (curr->begin, curr->end) == 0
10132 && curr->vbegin == curr->vend && !curr->force)
10133 return true;
10134
10135 if (!sizep)
10136 return false;
10137
10138 unsigned long size = size_of_locs (curr->expr);
10139
10140 /* If the expression is too large, drop it on the floor. We could
10141 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10142 in the expression, but >= 64KB expressions for a single value
10143 in a single range are unlikely very useful. */
10144 if (dwarf_version < 5 && size > 0xffff)
10145 return true;
10146
10147 *sizep = size;
10148
10149 return false;
10150 }
10151
10152 /* Output a view pair loclist entry for CURR, if it requires one. */
10153
10154 static void
10155 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10156 {
10157 if (!dwarf2out_locviews_in_loclist ())
10158 return;
10159
10160 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10161 return;
10162
10163 #ifdef DW_LLE_view_pair
10164 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10165
10166 if (dwarf2out_as_locview_support)
10167 {
10168 if (ZERO_VIEW_P (curr->vbegin))
10169 dw2_asm_output_data_uleb128 (0, "Location view begin");
10170 else
10171 {
10172 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10173 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10174 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10175 }
10176
10177 if (ZERO_VIEW_P (curr->vend))
10178 dw2_asm_output_data_uleb128 (0, "Location view end");
10179 else
10180 {
10181 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10182 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10183 dw2_asm_output_symname_uleb128 (label, "Location view end");
10184 }
10185 }
10186 else
10187 {
10188 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10189 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10190 }
10191 #endif /* DW_LLE_view_pair */
10192
10193 return;
10194 }
10195
10196 /* Output the location list given to us. */
10197
10198 static void
10199 output_loc_list (dw_loc_list_ref list_head)
10200 {
10201 int vcount = 0, lcount = 0;
10202
10203 if (list_head->emitted)
10204 return;
10205 list_head->emitted = true;
10206
10207 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10208 {
10209 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10210
10211 for (dw_loc_list_ref curr = list_head; curr != NULL;
10212 curr = curr->dw_loc_next)
10213 {
10214 unsigned long size;
10215
10216 if (skip_loc_list_entry (curr, &size))
10217 continue;
10218
10219 vcount++;
10220
10221 /* ?? dwarf_split_debug_info? */
10222 if (dwarf2out_as_locview_support)
10223 {
10224 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10225
10226 if (!ZERO_VIEW_P (curr->vbegin))
10227 {
10228 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10229 dw2_asm_output_symname_uleb128 (label,
10230 "View list begin (%s)",
10231 list_head->vl_symbol);
10232 }
10233 else
10234 dw2_asm_output_data_uleb128 (0,
10235 "View list begin (%s)",
10236 list_head->vl_symbol);
10237
10238 if (!ZERO_VIEW_P (curr->vend))
10239 {
10240 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10241 dw2_asm_output_symname_uleb128 (label,
10242 "View list end (%s)",
10243 list_head->vl_symbol);
10244 }
10245 else
10246 dw2_asm_output_data_uleb128 (0,
10247 "View list end (%s)",
10248 list_head->vl_symbol);
10249 }
10250 else
10251 {
10252 dw2_asm_output_data_uleb128 (curr->vbegin,
10253 "View list begin (%s)",
10254 list_head->vl_symbol);
10255 dw2_asm_output_data_uleb128 (curr->vend,
10256 "View list end (%s)",
10257 list_head->vl_symbol);
10258 }
10259 }
10260 }
10261
10262 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10263
10264 const char *last_section = NULL;
10265 const char *base_label = NULL;
10266
10267 /* Walk the location list, and output each range + expression. */
10268 for (dw_loc_list_ref curr = list_head; curr != NULL;
10269 curr = curr->dw_loc_next)
10270 {
10271 unsigned long size;
10272
10273 /* Skip this entry? If we skip it here, we must skip it in the
10274 view list above as well. */
10275 if (skip_loc_list_entry (curr, &size))
10276 continue;
10277
10278 lcount++;
10279
10280 if (dwarf_version >= 5)
10281 {
10282 if (dwarf_split_debug_info)
10283 {
10284 dwarf2out_maybe_output_loclist_view_pair (curr);
10285 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10286 uleb128 index into .debug_addr and uleb128 length. */
10287 dw2_asm_output_data (1, DW_LLE_startx_length,
10288 "DW_LLE_startx_length (%s)",
10289 list_head->ll_symbol);
10290 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10291 "Location list range start index "
10292 "(%s)", curr->begin);
10293 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10294 For that case we probably need to emit DW_LLE_startx_endx,
10295 but we'd need 2 .debug_addr entries rather than just one. */
10296 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10297 "Location list length (%s)",
10298 list_head->ll_symbol);
10299 }
10300 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10301 {
10302 dwarf2out_maybe_output_loclist_view_pair (curr);
10303 /* If all code is in .text section, the base address is
10304 already provided by the CU attributes. Use
10305 DW_LLE_offset_pair where both addresses are uleb128 encoded
10306 offsets against that base. */
10307 dw2_asm_output_data (1, DW_LLE_offset_pair,
10308 "DW_LLE_offset_pair (%s)",
10309 list_head->ll_symbol);
10310 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10311 "Location list begin address (%s)",
10312 list_head->ll_symbol);
10313 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10314 "Location list end address (%s)",
10315 list_head->ll_symbol);
10316 }
10317 else if (HAVE_AS_LEB128)
10318 {
10319 /* Otherwise, find out how many consecutive entries could share
10320 the same base entry. If just one, emit DW_LLE_start_length,
10321 otherwise emit DW_LLE_base_address for the base address
10322 followed by a series of DW_LLE_offset_pair. */
10323 if (last_section == NULL || curr->section != last_section)
10324 {
10325 dw_loc_list_ref curr2;
10326 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10327 curr2 = curr2->dw_loc_next)
10328 {
10329 if (strcmp (curr2->begin, curr2->end) == 0
10330 && !curr2->force)
10331 continue;
10332 break;
10333 }
10334 if (curr2 == NULL || curr->section != curr2->section)
10335 last_section = NULL;
10336 else
10337 {
10338 last_section = curr->section;
10339 base_label = curr->begin;
10340 dw2_asm_output_data (1, DW_LLE_base_address,
10341 "DW_LLE_base_address (%s)",
10342 list_head->ll_symbol);
10343 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10344 "Base address (%s)",
10345 list_head->ll_symbol);
10346 }
10347 }
10348 /* Only one entry with the same base address. Use
10349 DW_LLE_start_length with absolute address and uleb128
10350 length. */
10351 if (last_section == NULL)
10352 {
10353 dwarf2out_maybe_output_loclist_view_pair (curr);
10354 dw2_asm_output_data (1, DW_LLE_start_length,
10355 "DW_LLE_start_length (%s)",
10356 list_head->ll_symbol);
10357 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10358 "Location list begin address (%s)",
10359 list_head->ll_symbol);
10360 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10361 "Location list length "
10362 "(%s)", list_head->ll_symbol);
10363 }
10364 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10365 DW_LLE_base_address. */
10366 else
10367 {
10368 dwarf2out_maybe_output_loclist_view_pair (curr);
10369 dw2_asm_output_data (1, DW_LLE_offset_pair,
10370 "DW_LLE_offset_pair (%s)",
10371 list_head->ll_symbol);
10372 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10373 "Location list begin address "
10374 "(%s)", list_head->ll_symbol);
10375 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10376 "Location list end address "
10377 "(%s)", list_head->ll_symbol);
10378 }
10379 }
10380 /* The assembler does not support .uleb128 directive. Emit
10381 DW_LLE_start_end with a pair of absolute addresses. */
10382 else
10383 {
10384 dwarf2out_maybe_output_loclist_view_pair (curr);
10385 dw2_asm_output_data (1, DW_LLE_start_end,
10386 "DW_LLE_start_end (%s)",
10387 list_head->ll_symbol);
10388 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10389 "Location list begin address (%s)",
10390 list_head->ll_symbol);
10391 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10392 "Location list end address (%s)",
10393 list_head->ll_symbol);
10394 }
10395 }
10396 else if (dwarf_split_debug_info)
10397 {
10398 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10399 and 4 byte length. */
10400 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10401 "Location list start/length entry (%s)",
10402 list_head->ll_symbol);
10403 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10404 "Location list range start index (%s)",
10405 curr->begin);
10406 /* The length field is 4 bytes. If we ever need to support
10407 an 8-byte length, we can add a new DW_LLE code or fall back
10408 to DW_LLE_GNU_start_end_entry. */
10409 dw2_asm_output_delta (4, curr->end, curr->begin,
10410 "Location list range length (%s)",
10411 list_head->ll_symbol);
10412 }
10413 else if (!have_multiple_function_sections)
10414 {
10415 /* Pair of relative addresses against start of text section. */
10416 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10417 "Location list begin address (%s)",
10418 list_head->ll_symbol);
10419 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10420 "Location list end address (%s)",
10421 list_head->ll_symbol);
10422 }
10423 else
10424 {
10425 /* Pair of absolute addresses. */
10426 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10427 "Location list begin address (%s)",
10428 list_head->ll_symbol);
10429 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10430 "Location list end address (%s)",
10431 list_head->ll_symbol);
10432 }
10433
10434 /* Output the block length for this list of location operations. */
10435 if (dwarf_version >= 5)
10436 dw2_asm_output_data_uleb128 (size, "Location expression size");
10437 else
10438 {
10439 gcc_assert (size <= 0xffff);
10440 dw2_asm_output_data (2, size, "Location expression size");
10441 }
10442
10443 output_loc_sequence (curr->expr, -1);
10444 }
10445
10446 /* And finally list termination. */
10447 if (dwarf_version >= 5)
10448 dw2_asm_output_data (1, DW_LLE_end_of_list,
10449 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10450 else if (dwarf_split_debug_info)
10451 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10452 "Location list terminator (%s)",
10453 list_head->ll_symbol);
10454 else
10455 {
10456 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10457 "Location list terminator begin (%s)",
10458 list_head->ll_symbol);
10459 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10460 "Location list terminator end (%s)",
10461 list_head->ll_symbol);
10462 }
10463
10464 gcc_assert (!list_head->vl_symbol
10465 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10466 }
10467
10468 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10469 section. Emit a relocated reference if val_entry is NULL, otherwise,
10470 emit an indirect reference. */
10471
10472 static void
10473 output_range_list_offset (dw_attr_node *a)
10474 {
10475 const char *name = dwarf_attr_name (a->dw_attr);
10476
10477 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10478 {
10479 if (dwarf_version >= 5)
10480 {
10481 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10482 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10483 debug_ranges_section, "%s", name);
10484 }
10485 else
10486 {
10487 char *p = strchr (ranges_section_label, '\0');
10488 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10489 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10490 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10491 debug_ranges_section, "%s", name);
10492 *p = '\0';
10493 }
10494 }
10495 else if (dwarf_version >= 5)
10496 {
10497 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10498 gcc_assert (rnglist_idx);
10499 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10500 }
10501 else
10502 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10503 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10504 "%s (offset from %s)", name, ranges_section_label);
10505 }
10506
10507 /* Output the offset into the debug_loc section. */
10508
10509 static void
10510 output_loc_list_offset (dw_attr_node *a)
10511 {
10512 char *sym = AT_loc_list (a)->ll_symbol;
10513
10514 gcc_assert (sym);
10515 if (!dwarf_split_debug_info)
10516 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10517 "%s", dwarf_attr_name (a->dw_attr));
10518 else if (dwarf_version >= 5)
10519 {
10520 gcc_assert (AT_loc_list (a)->num_assigned);
10521 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10522 dwarf_attr_name (a->dw_attr),
10523 sym);
10524 }
10525 else
10526 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10527 "%s", dwarf_attr_name (a->dw_attr));
10528 }
10529
10530 /* Output the offset into the debug_loc section. */
10531
10532 static void
10533 output_view_list_offset (dw_attr_node *a)
10534 {
10535 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10536
10537 gcc_assert (sym);
10538 if (dwarf_split_debug_info)
10539 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10540 "%s", dwarf_attr_name (a->dw_attr));
10541 else
10542 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10543 "%s", dwarf_attr_name (a->dw_attr));
10544 }
10545
10546 /* Output an attribute's index or value appropriately. */
10547
10548 static void
10549 output_attr_index_or_value (dw_attr_node *a)
10550 {
10551 const char *name = dwarf_attr_name (a->dw_attr);
10552
10553 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10554 {
10555 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10556 return;
10557 }
10558 switch (AT_class (a))
10559 {
10560 case dw_val_class_addr:
10561 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10562 break;
10563 case dw_val_class_high_pc:
10564 case dw_val_class_lbl_id:
10565 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10566 break;
10567 default:
10568 gcc_unreachable ();
10569 }
10570 }
10571
10572 /* Output a type signature. */
10573
10574 static inline void
10575 output_signature (const char *sig, const char *name)
10576 {
10577 int i;
10578
10579 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10580 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10581 }
10582
10583 /* Output a discriminant value. */
10584
10585 static inline void
10586 output_discr_value (dw_discr_value *discr_value, const char *name)
10587 {
10588 if (discr_value->pos)
10589 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10590 else
10591 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10592 }
10593
10594 /* Output the DIE and its attributes. Called recursively to generate
10595 the definitions of each child DIE. */
10596
10597 static void
10598 output_die (dw_die_ref die)
10599 {
10600 dw_attr_node *a;
10601 dw_die_ref c;
10602 unsigned long size;
10603 unsigned ix;
10604
10605 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10606 (unsigned long)die->die_offset,
10607 dwarf_tag_name (die->die_tag));
10608
10609 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10610 {
10611 const char *name = dwarf_attr_name (a->dw_attr);
10612
10613 switch (AT_class (a))
10614 {
10615 case dw_val_class_addr:
10616 output_attr_index_or_value (a);
10617 break;
10618
10619 case dw_val_class_offset:
10620 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10621 "%s", name);
10622 break;
10623
10624 case dw_val_class_range_list:
10625 output_range_list_offset (a);
10626 break;
10627
10628 case dw_val_class_loc:
10629 size = size_of_locs (AT_loc (a));
10630
10631 /* Output the block length for this list of location operations. */
10632 if (dwarf_version >= 4)
10633 dw2_asm_output_data_uleb128 (size, "%s", name);
10634 else
10635 dw2_asm_output_data (constant_size (size), size, "%s", name);
10636
10637 output_loc_sequence (AT_loc (a), -1);
10638 break;
10639
10640 case dw_val_class_const:
10641 /* ??? It would be slightly more efficient to use a scheme like is
10642 used for unsigned constants below, but gdb 4.x does not sign
10643 extend. Gdb 5.x does sign extend. */
10644 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10645 break;
10646
10647 case dw_val_class_unsigned_const:
10648 {
10649 int csize = constant_size (AT_unsigned (a));
10650 if (dwarf_version == 3
10651 && a->dw_attr == DW_AT_data_member_location
10652 && csize >= 4)
10653 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10654 else
10655 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10656 }
10657 break;
10658
10659 case dw_val_class_symview:
10660 {
10661 int vsize;
10662 if (symview_upper_bound <= 0xff)
10663 vsize = 1;
10664 else if (symview_upper_bound <= 0xffff)
10665 vsize = 2;
10666 else if (symview_upper_bound <= 0xffffffff)
10667 vsize = 4;
10668 else
10669 vsize = 8;
10670 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10671 "%s", name);
10672 }
10673 break;
10674
10675 case dw_val_class_const_implicit:
10676 if (flag_debug_asm)
10677 fprintf (asm_out_file, "\t\t\t%s %s ("
10678 HOST_WIDE_INT_PRINT_DEC ")\n",
10679 ASM_COMMENT_START, name, AT_int (a));
10680 break;
10681
10682 case dw_val_class_unsigned_const_implicit:
10683 if (flag_debug_asm)
10684 fprintf (asm_out_file, "\t\t\t%s %s ("
10685 HOST_WIDE_INT_PRINT_HEX ")\n",
10686 ASM_COMMENT_START, name, AT_unsigned (a));
10687 break;
10688
10689 case dw_val_class_const_double:
10690 {
10691 unsigned HOST_WIDE_INT first, second;
10692
10693 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10694 dw2_asm_output_data (1,
10695 HOST_BITS_PER_DOUBLE_INT
10696 / HOST_BITS_PER_CHAR,
10697 NULL);
10698
10699 if (WORDS_BIG_ENDIAN)
10700 {
10701 first = a->dw_attr_val.v.val_double.high;
10702 second = a->dw_attr_val.v.val_double.low;
10703 }
10704 else
10705 {
10706 first = a->dw_attr_val.v.val_double.low;
10707 second = a->dw_attr_val.v.val_double.high;
10708 }
10709
10710 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10711 first, "%s", name);
10712 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10713 second, NULL);
10714 }
10715 break;
10716
10717 case dw_val_class_wide_int:
10718 {
10719 int i;
10720 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10721 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10722 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10723 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10724 * l, NULL);
10725
10726 if (WORDS_BIG_ENDIAN)
10727 for (i = len - 1; i >= 0; --i)
10728 {
10729 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10730 "%s", name);
10731 name = "";
10732 }
10733 else
10734 for (i = 0; i < len; ++i)
10735 {
10736 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10737 "%s", name);
10738 name = "";
10739 }
10740 }
10741 break;
10742
10743 case dw_val_class_vec:
10744 {
10745 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10746 unsigned int len = a->dw_attr_val.v.val_vec.length;
10747 unsigned int i;
10748 unsigned char *p;
10749
10750 dw2_asm_output_data (constant_size (len * elt_size),
10751 len * elt_size, "%s", name);
10752 if (elt_size > sizeof (HOST_WIDE_INT))
10753 {
10754 elt_size /= 2;
10755 len *= 2;
10756 }
10757 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10758 i < len;
10759 i++, p += elt_size)
10760 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10761 "fp or vector constant word %u", i);
10762 break;
10763 }
10764
10765 case dw_val_class_flag:
10766 if (dwarf_version >= 4)
10767 {
10768 /* Currently all add_AT_flag calls pass in 1 as last argument,
10769 so DW_FORM_flag_present can be used. If that ever changes,
10770 we'll need to use DW_FORM_flag and have some optimization
10771 in build_abbrev_table that will change those to
10772 DW_FORM_flag_present if it is set to 1 in all DIEs using
10773 the same abbrev entry. */
10774 gcc_assert (AT_flag (a) == 1);
10775 if (flag_debug_asm)
10776 fprintf (asm_out_file, "\t\t\t%s %s\n",
10777 ASM_COMMENT_START, name);
10778 break;
10779 }
10780 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10781 break;
10782
10783 case dw_val_class_loc_list:
10784 output_loc_list_offset (a);
10785 break;
10786
10787 case dw_val_class_view_list:
10788 output_view_list_offset (a);
10789 break;
10790
10791 case dw_val_class_die_ref:
10792 if (AT_ref_external (a))
10793 {
10794 if (AT_ref (a)->comdat_type_p)
10795 {
10796 comdat_type_node *type_node
10797 = AT_ref (a)->die_id.die_type_node;
10798
10799 gcc_assert (type_node);
10800 output_signature (type_node->signature, name);
10801 }
10802 else
10803 {
10804 const char *sym = AT_ref (a)->die_id.die_symbol;
10805 int size;
10806
10807 gcc_assert (sym);
10808 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10809 length, whereas in DWARF3 it's always sized as an
10810 offset. */
10811 if (dwarf_version == 2)
10812 size = DWARF2_ADDR_SIZE;
10813 else
10814 size = DWARF_OFFSET_SIZE;
10815 /* ??? We cannot unconditionally output die_offset if
10816 non-zero - others might create references to those
10817 DIEs via symbols.
10818 And we do not clear its DIE offset after outputting it
10819 (and the label refers to the actual DIEs, not the
10820 DWARF CU unit header which is when using label + offset
10821 would be the correct thing to do).
10822 ??? This is the reason for the with_offset flag. */
10823 if (AT_ref (a)->with_offset)
10824 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10825 debug_info_section, "%s", name);
10826 else
10827 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10828 name);
10829 }
10830 }
10831 else
10832 {
10833 gcc_assert (AT_ref (a)->die_offset);
10834 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10835 "%s", name);
10836 }
10837 break;
10838
10839 case dw_val_class_fde_ref:
10840 {
10841 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10842
10843 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10844 a->dw_attr_val.v.val_fde_index * 2);
10845 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10846 "%s", name);
10847 }
10848 break;
10849
10850 case dw_val_class_vms_delta:
10851 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10852 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10853 AT_vms_delta2 (a), AT_vms_delta1 (a),
10854 "%s", name);
10855 #else
10856 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10857 AT_vms_delta2 (a), AT_vms_delta1 (a),
10858 "%s", name);
10859 #endif
10860 break;
10861
10862 case dw_val_class_lbl_id:
10863 output_attr_index_or_value (a);
10864 break;
10865
10866 case dw_val_class_lineptr:
10867 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10868 debug_line_section, "%s", name);
10869 break;
10870
10871 case dw_val_class_macptr:
10872 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10873 debug_macinfo_section, "%s", name);
10874 break;
10875
10876 case dw_val_class_loclistsptr:
10877 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10878 debug_loc_section, "%s", name);
10879 break;
10880
10881 case dw_val_class_str:
10882 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10883 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10884 a->dw_attr_val.v.val_str->label,
10885 debug_str_section,
10886 "%s: \"%s\"", name, AT_string (a));
10887 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10888 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10889 a->dw_attr_val.v.val_str->label,
10890 debug_line_str_section,
10891 "%s: \"%s\"", name, AT_string (a));
10892 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10893 dw2_asm_output_data_uleb128 (AT_index (a),
10894 "%s: \"%s\"", name, AT_string (a));
10895 else
10896 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10897 break;
10898
10899 case dw_val_class_file:
10900 {
10901 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10902
10903 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10904 a->dw_attr_val.v.val_file->filename);
10905 break;
10906 }
10907
10908 case dw_val_class_file_implicit:
10909 if (flag_debug_asm)
10910 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10911 ASM_COMMENT_START, name,
10912 maybe_emit_file (a->dw_attr_val.v.val_file),
10913 a->dw_attr_val.v.val_file->filename);
10914 break;
10915
10916 case dw_val_class_data8:
10917 {
10918 int i;
10919
10920 for (i = 0; i < 8; i++)
10921 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10922 i == 0 ? "%s" : NULL, name);
10923 break;
10924 }
10925
10926 case dw_val_class_high_pc:
10927 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10928 get_AT_low_pc (die), "DW_AT_high_pc");
10929 break;
10930
10931 case dw_val_class_discr_value:
10932 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10933 break;
10934
10935 case dw_val_class_discr_list:
10936 {
10937 dw_discr_list_ref list = AT_discr_list (a);
10938 const int size = size_of_discr_list (list);
10939
10940 /* This is a block, so output its length first. */
10941 dw2_asm_output_data (constant_size (size), size,
10942 "%s: block size", name);
10943
10944 for (; list != NULL; list = list->dw_discr_next)
10945 {
10946 /* One byte for the discriminant value descriptor, and then as
10947 many LEB128 numbers as required. */
10948 if (list->dw_discr_range)
10949 dw2_asm_output_data (1, DW_DSC_range,
10950 "%s: DW_DSC_range", name);
10951 else
10952 dw2_asm_output_data (1, DW_DSC_label,
10953 "%s: DW_DSC_label", name);
10954
10955 output_discr_value (&list->dw_discr_lower_bound, name);
10956 if (list->dw_discr_range)
10957 output_discr_value (&list->dw_discr_upper_bound, name);
10958 }
10959 break;
10960 }
10961
10962 default:
10963 gcc_unreachable ();
10964 }
10965 }
10966
10967 FOR_EACH_CHILD (die, c, output_die (c));
10968
10969 /* Add null byte to terminate sibling list. */
10970 if (die->die_child != NULL)
10971 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10972 (unsigned long) die->die_offset);
10973 }
10974
10975 /* Output the dwarf version number. */
10976
10977 static void
10978 output_dwarf_version ()
10979 {
10980 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10981 views in loclist. That will change eventually. */
10982 if (dwarf_version == 6)
10983 {
10984 static bool once;
10985 if (!once)
10986 {
10987 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10988 "incompatibilities");
10989 once = true;
10990 }
10991 dw2_asm_output_data (2, 5, "DWARF version number");
10992 }
10993 else
10994 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10995 }
10996
10997 /* Output the compilation unit that appears at the beginning of the
10998 .debug_info section, and precedes the DIE descriptions. */
10999
11000 static void
11001 output_compilation_unit_header (enum dwarf_unit_type ut)
11002 {
11003 if (!XCOFF_DEBUGGING_INFO)
11004 {
11005 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11006 dw2_asm_output_data (4, 0xffffffff,
11007 "Initial length escape value indicating 64-bit DWARF extension");
11008 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11009 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11010 "Length of Compilation Unit Info");
11011 }
11012
11013 output_dwarf_version ();
11014 if (dwarf_version >= 5)
11015 {
11016 const char *name;
11017 switch (ut)
11018 {
11019 case DW_UT_compile: name = "DW_UT_compile"; break;
11020 case DW_UT_type: name = "DW_UT_type"; break;
11021 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11022 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11023 default: gcc_unreachable ();
11024 }
11025 dw2_asm_output_data (1, ut, "%s", name);
11026 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11027 }
11028 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11029 debug_abbrev_section,
11030 "Offset Into Abbrev. Section");
11031 if (dwarf_version < 5)
11032 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11033 }
11034
11035 /* Output the compilation unit DIE and its children. */
11036
11037 static void
11038 output_comp_unit (dw_die_ref die, int output_if_empty,
11039 const unsigned char *dwo_id)
11040 {
11041 const char *secname, *oldsym;
11042 char *tmp;
11043
11044 /* Unless we are outputting main CU, we may throw away empty ones. */
11045 if (!output_if_empty && die->die_child == NULL)
11046 return;
11047
11048 /* Even if there are no children of this DIE, we must output the information
11049 about the compilation unit. Otherwise, on an empty translation unit, we
11050 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11051 will then complain when examining the file. First mark all the DIEs in
11052 this CU so we know which get local refs. */
11053 mark_dies (die);
11054
11055 external_ref_hash_type *extern_map = optimize_external_refs (die);
11056
11057 /* For now, optimize only the main CU, in order to optimize the rest
11058 we'd need to see all of them earlier. Leave the rest for post-linking
11059 tools like DWZ. */
11060 if (die == comp_unit_die ())
11061 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11062
11063 build_abbrev_table (die, extern_map);
11064
11065 optimize_abbrev_table ();
11066
11067 delete extern_map;
11068
11069 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11070 next_die_offset = (dwo_id
11071 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11072 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11073 calc_die_sizes (die);
11074
11075 oldsym = die->die_id.die_symbol;
11076 if (oldsym && die->comdat_type_p)
11077 {
11078 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11079
11080 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11081 secname = tmp;
11082 die->die_id.die_symbol = NULL;
11083 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11084 }
11085 else
11086 {
11087 switch_to_section (debug_info_section);
11088 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11089 info_section_emitted = true;
11090 }
11091
11092 /* For LTO cross unit DIE refs we want a symbol on the start of the
11093 debuginfo section, not on the CU DIE. */
11094 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11095 {
11096 /* ??? No way to get visibility assembled without a decl. */
11097 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11098 get_identifier (oldsym), char_type_node);
11099 TREE_PUBLIC (decl) = true;
11100 TREE_STATIC (decl) = true;
11101 DECL_ARTIFICIAL (decl) = true;
11102 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11103 DECL_VISIBILITY_SPECIFIED (decl) = true;
11104 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11105 #ifdef ASM_WEAKEN_LABEL
11106 /* We prefer a .weak because that handles duplicates from duplicate
11107 archive members in a graceful way. */
11108 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11109 #else
11110 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11111 #endif
11112 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11113 }
11114
11115 /* Output debugging information. */
11116 output_compilation_unit_header (dwo_id
11117 ? DW_UT_split_compile : DW_UT_compile);
11118 if (dwarf_version >= 5)
11119 {
11120 if (dwo_id != NULL)
11121 for (int i = 0; i < 8; i++)
11122 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11123 }
11124 output_die (die);
11125
11126 /* Leave the marks on the main CU, so we can check them in
11127 output_pubnames. */
11128 if (oldsym)
11129 {
11130 unmark_dies (die);
11131 die->die_id.die_symbol = oldsym;
11132 }
11133 }
11134
11135 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11136 and .debug_pubtypes. This is configured per-target, but can be
11137 overridden by the -gpubnames or -gno-pubnames options. */
11138
11139 static inline bool
11140 want_pubnames (void)
11141 {
11142 if (debug_info_level <= DINFO_LEVEL_TERSE
11143 /* Names and types go to the early debug part only. */
11144 || in_lto_p)
11145 return false;
11146 if (debug_generate_pub_sections != -1)
11147 return debug_generate_pub_sections;
11148 return targetm.want_debug_pub_sections;
11149 }
11150
11151 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11152
11153 static void
11154 add_AT_pubnames (dw_die_ref die)
11155 {
11156 if (want_pubnames ())
11157 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11158 }
11159
11160 /* Add a string attribute value to a skeleton DIE. */
11161
11162 static inline void
11163 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11164 const char *str)
11165 {
11166 dw_attr_node attr;
11167 struct indirect_string_node *node;
11168
11169 if (! skeleton_debug_str_hash)
11170 skeleton_debug_str_hash
11171 = hash_table<indirect_string_hasher>::create_ggc (10);
11172
11173 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11174 find_string_form (node);
11175 if (node->form == dwarf_FORM (DW_FORM_strx))
11176 node->form = DW_FORM_strp;
11177
11178 attr.dw_attr = attr_kind;
11179 attr.dw_attr_val.val_class = dw_val_class_str;
11180 attr.dw_attr_val.val_entry = NULL;
11181 attr.dw_attr_val.v.val_str = node;
11182 add_dwarf_attr (die, &attr);
11183 }
11184
11185 /* Helper function to generate top-level dies for skeleton debug_info and
11186 debug_types. */
11187
11188 static void
11189 add_top_level_skeleton_die_attrs (dw_die_ref die)
11190 {
11191 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11192 const char *comp_dir = comp_dir_string ();
11193
11194 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11195 if (comp_dir != NULL)
11196 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11197 add_AT_pubnames (die);
11198 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11199 }
11200
11201 /* Output skeleton debug sections that point to the dwo file. */
11202
11203 static void
11204 output_skeleton_debug_sections (dw_die_ref comp_unit,
11205 const unsigned char *dwo_id)
11206 {
11207 /* These attributes will be found in the full debug_info section. */
11208 remove_AT (comp_unit, DW_AT_producer);
11209 remove_AT (comp_unit, DW_AT_language);
11210
11211 switch_to_section (debug_skeleton_info_section);
11212 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11213
11214 /* Produce the skeleton compilation-unit header. This one differs enough from
11215 a normal CU header that it's better not to call output_compilation_unit
11216 header. */
11217 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11218 dw2_asm_output_data (4, 0xffffffff,
11219 "Initial length escape value indicating 64-bit "
11220 "DWARF extension");
11221
11222 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11223 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11224 - DWARF_INITIAL_LENGTH_SIZE
11225 + size_of_die (comp_unit),
11226 "Length of Compilation Unit Info");
11227 output_dwarf_version ();
11228 if (dwarf_version >= 5)
11229 {
11230 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11231 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11232 }
11233 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11234 debug_skeleton_abbrev_section,
11235 "Offset Into Abbrev. Section");
11236 if (dwarf_version < 5)
11237 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11238 else
11239 for (int i = 0; i < 8; i++)
11240 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11241
11242 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11243 output_die (comp_unit);
11244
11245 /* Build the skeleton debug_abbrev section. */
11246 switch_to_section (debug_skeleton_abbrev_section);
11247 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11248
11249 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11250
11251 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11252 }
11253
11254 /* Output a comdat type unit DIE and its children. */
11255
11256 static void
11257 output_comdat_type_unit (comdat_type_node *node,
11258 bool early_lto_debug ATTRIBUTE_UNUSED)
11259 {
11260 const char *secname;
11261 char *tmp;
11262 int i;
11263 #if defined (OBJECT_FORMAT_ELF)
11264 tree comdat_key;
11265 #endif
11266
11267 /* First mark all the DIEs in this CU so we know which get local refs. */
11268 mark_dies (node->root_die);
11269
11270 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11271
11272 build_abbrev_table (node->root_die, extern_map);
11273
11274 delete extern_map;
11275 extern_map = NULL;
11276
11277 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11278 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11279 calc_die_sizes (node->root_die);
11280
11281 #if defined (OBJECT_FORMAT_ELF)
11282 if (dwarf_version >= 5)
11283 {
11284 if (!dwarf_split_debug_info)
11285 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11286 else
11287 secname = (early_lto_debug
11288 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11289 }
11290 else if (!dwarf_split_debug_info)
11291 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11292 else
11293 secname = (early_lto_debug
11294 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11295
11296 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11297 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11298 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11299 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11300 comdat_key = get_identifier (tmp);
11301 targetm.asm_out.named_section (secname,
11302 SECTION_DEBUG | SECTION_LINKONCE,
11303 comdat_key);
11304 #else
11305 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11306 sprintf (tmp, (dwarf_version >= 5
11307 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11308 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11309 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11310 secname = tmp;
11311 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11312 #endif
11313
11314 /* Output debugging information. */
11315 output_compilation_unit_header (dwarf_split_debug_info
11316 ? DW_UT_split_type : DW_UT_type);
11317 output_signature (node->signature, "Type Signature");
11318 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11319 "Offset to Type DIE");
11320 output_die (node->root_die);
11321
11322 unmark_dies (node->root_die);
11323 }
11324
11325 /* Return the DWARF2/3 pubname associated with a decl. */
11326
11327 static const char *
11328 dwarf2_name (tree decl, int scope)
11329 {
11330 if (DECL_NAMELESS (decl))
11331 return NULL;
11332 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11333 }
11334
11335 /* Add a new entry to .debug_pubnames if appropriate. */
11336
11337 static void
11338 add_pubname_string (const char *str, dw_die_ref die)
11339 {
11340 pubname_entry e;
11341
11342 e.die = die;
11343 e.name = xstrdup (str);
11344 vec_safe_push (pubname_table, e);
11345 }
11346
11347 static void
11348 add_pubname (tree decl, dw_die_ref die)
11349 {
11350 if (!want_pubnames ())
11351 return;
11352
11353 /* Don't add items to the table when we expect that the consumer will have
11354 just read the enclosing die. For example, if the consumer is looking at a
11355 class_member, it will either be inside the class already, or will have just
11356 looked up the class to find the member. Either way, searching the class is
11357 faster than searching the index. */
11358 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11359 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11360 {
11361 const char *name = dwarf2_name (decl, 1);
11362
11363 if (name)
11364 add_pubname_string (name, die);
11365 }
11366 }
11367
11368 /* Add an enumerator to the pubnames section. */
11369
11370 static void
11371 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11372 {
11373 pubname_entry e;
11374
11375 gcc_assert (scope_name);
11376 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11377 e.die = die;
11378 vec_safe_push (pubname_table, e);
11379 }
11380
11381 /* Add a new entry to .debug_pubtypes if appropriate. */
11382
11383 static void
11384 add_pubtype (tree decl, dw_die_ref die)
11385 {
11386 pubname_entry e;
11387
11388 if (!want_pubnames ())
11389 return;
11390
11391 if ((TREE_PUBLIC (decl)
11392 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11393 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11394 {
11395 tree scope = NULL;
11396 const char *scope_name = "";
11397 const char *sep = is_cxx () ? "::" : ".";
11398 const char *name;
11399
11400 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11401 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11402 {
11403 scope_name = lang_hooks.dwarf_name (scope, 1);
11404 if (scope_name != NULL && scope_name[0] != '\0')
11405 scope_name = concat (scope_name, sep, NULL);
11406 else
11407 scope_name = "";
11408 }
11409
11410 if (TYPE_P (decl))
11411 name = type_tag (decl);
11412 else
11413 name = lang_hooks.dwarf_name (decl, 1);
11414
11415 /* If we don't have a name for the type, there's no point in adding
11416 it to the table. */
11417 if (name != NULL && name[0] != '\0')
11418 {
11419 e.die = die;
11420 e.name = concat (scope_name, name, NULL);
11421 vec_safe_push (pubtype_table, e);
11422 }
11423
11424 /* Although it might be more consistent to add the pubinfo for the
11425 enumerators as their dies are created, they should only be added if the
11426 enum type meets the criteria above. So rather than re-check the parent
11427 enum type whenever an enumerator die is created, just output them all
11428 here. This isn't protected by the name conditional because anonymous
11429 enums don't have names. */
11430 if (die->die_tag == DW_TAG_enumeration_type)
11431 {
11432 dw_die_ref c;
11433
11434 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11435 }
11436 }
11437 }
11438
11439 /* Output a single entry in the pubnames table. */
11440
11441 static void
11442 output_pubname (dw_offset die_offset, pubname_entry *entry)
11443 {
11444 dw_die_ref die = entry->die;
11445 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11446
11447 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11448
11449 if (debug_generate_pub_sections == 2)
11450 {
11451 /* This logic follows gdb's method for determining the value of the flag
11452 byte. */
11453 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11454 switch (die->die_tag)
11455 {
11456 case DW_TAG_typedef:
11457 case DW_TAG_base_type:
11458 case DW_TAG_subrange_type:
11459 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11460 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11461 break;
11462 case DW_TAG_enumerator:
11463 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11464 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11465 if (!is_cxx ())
11466 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11467 break;
11468 case DW_TAG_subprogram:
11469 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11470 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11471 if (!is_ada ())
11472 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11473 break;
11474 case DW_TAG_constant:
11475 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11476 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11477 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11478 break;
11479 case DW_TAG_variable:
11480 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11481 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11482 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11483 break;
11484 case DW_TAG_namespace:
11485 case DW_TAG_imported_declaration:
11486 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11487 break;
11488 case DW_TAG_class_type:
11489 case DW_TAG_interface_type:
11490 case DW_TAG_structure_type:
11491 case DW_TAG_union_type:
11492 case DW_TAG_enumeration_type:
11493 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11494 if (!is_cxx ())
11495 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11496 break;
11497 default:
11498 /* An unusual tag. Leave the flag-byte empty. */
11499 break;
11500 }
11501 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11502 "GDB-index flags");
11503 }
11504
11505 dw2_asm_output_nstring (entry->name, -1, "external name");
11506 }
11507
11508
11509 /* Output the public names table used to speed up access to externally
11510 visible names; or the public types table used to find type definitions. */
11511
11512 static void
11513 output_pubnames (vec<pubname_entry, va_gc> *names)
11514 {
11515 unsigned i;
11516 unsigned long pubnames_length = size_of_pubnames (names);
11517 pubname_entry *pub;
11518
11519 if (!XCOFF_DEBUGGING_INFO)
11520 {
11521 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11522 dw2_asm_output_data (4, 0xffffffff,
11523 "Initial length escape value indicating 64-bit DWARF extension");
11524 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11525 "Pub Info Length");
11526 }
11527
11528 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11529 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11530
11531 if (dwarf_split_debug_info)
11532 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11533 debug_skeleton_info_section,
11534 "Offset of Compilation Unit Info");
11535 else
11536 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11537 debug_info_section,
11538 "Offset of Compilation Unit Info");
11539 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11540 "Compilation Unit Length");
11541
11542 FOR_EACH_VEC_ELT (*names, i, pub)
11543 {
11544 if (include_pubname_in_output (names, pub))
11545 {
11546 dw_offset die_offset = pub->die->die_offset;
11547
11548 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11549 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11550 gcc_assert (pub->die->die_mark);
11551
11552 /* If we're putting types in their own .debug_types sections,
11553 the .debug_pubtypes table will still point to the compile
11554 unit (not the type unit), so we want to use the offset of
11555 the skeleton DIE (if there is one). */
11556 if (pub->die->comdat_type_p && names == pubtype_table)
11557 {
11558 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11559
11560 if (type_node != NULL)
11561 die_offset = (type_node->skeleton_die != NULL
11562 ? type_node->skeleton_die->die_offset
11563 : comp_unit_die ()->die_offset);
11564 }
11565
11566 output_pubname (die_offset, pub);
11567 }
11568 }
11569
11570 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11571 }
11572
11573 /* Output public names and types tables if necessary. */
11574
11575 static void
11576 output_pubtables (void)
11577 {
11578 if (!want_pubnames () || !info_section_emitted)
11579 return;
11580
11581 switch_to_section (debug_pubnames_section);
11582 output_pubnames (pubname_table);
11583 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11584 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11585 simply won't look for the section. */
11586 switch_to_section (debug_pubtypes_section);
11587 output_pubnames (pubtype_table);
11588 }
11589
11590
11591 /* Output the information that goes into the .debug_aranges table.
11592 Namely, define the beginning and ending address range of the
11593 text section generated for this compilation unit. */
11594
11595 static void
11596 output_aranges (void)
11597 {
11598 unsigned i;
11599 unsigned long aranges_length = size_of_aranges ();
11600
11601 if (!XCOFF_DEBUGGING_INFO)
11602 {
11603 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11604 dw2_asm_output_data (4, 0xffffffff,
11605 "Initial length escape value indicating 64-bit DWARF extension");
11606 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11607 "Length of Address Ranges Info");
11608 }
11609
11610 /* Version number for aranges is still 2, even up to DWARF5. */
11611 dw2_asm_output_data (2, 2, "DWARF aranges version");
11612 if (dwarf_split_debug_info)
11613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11614 debug_skeleton_info_section,
11615 "Offset of Compilation Unit Info");
11616 else
11617 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11618 debug_info_section,
11619 "Offset of Compilation Unit Info");
11620 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11621 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11622
11623 /* We need to align to twice the pointer size here. */
11624 if (DWARF_ARANGES_PAD_SIZE)
11625 {
11626 /* Pad using a 2 byte words so that padding is correct for any
11627 pointer size. */
11628 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11629 2 * DWARF2_ADDR_SIZE);
11630 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11631 dw2_asm_output_data (2, 0, NULL);
11632 }
11633
11634 /* It is necessary not to output these entries if the sections were
11635 not used; if the sections were not used, the length will be 0 and
11636 the address may end up as 0 if the section is discarded by ld
11637 --gc-sections, leaving an invalid (0, 0) entry that can be
11638 confused with the terminator. */
11639 if (text_section_used)
11640 {
11641 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11642 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11643 text_section_label, "Length");
11644 }
11645 if (cold_text_section_used)
11646 {
11647 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11648 "Address");
11649 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11650 cold_text_section_label, "Length");
11651 }
11652
11653 if (have_multiple_function_sections)
11654 {
11655 unsigned fde_idx;
11656 dw_fde_ref fde;
11657
11658 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11659 {
11660 if (DECL_IGNORED_P (fde->decl))
11661 continue;
11662 if (!fde->in_std_section)
11663 {
11664 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11665 "Address");
11666 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11667 fde->dw_fde_begin, "Length");
11668 }
11669 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11670 {
11671 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11672 "Address");
11673 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11674 fde->dw_fde_second_begin, "Length");
11675 }
11676 }
11677 }
11678
11679 /* Output the terminator words. */
11680 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11681 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11682 }
11683
11684 /* Add a new entry to .debug_ranges. Return its index into
11685 ranges_table vector. */
11686
11687 static unsigned int
11688 add_ranges_num (int num, bool maybe_new_sec)
11689 {
11690 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11691 vec_safe_push (ranges_table, r);
11692 return vec_safe_length (ranges_table) - 1;
11693 }
11694
11695 /* Add a new entry to .debug_ranges corresponding to a block, or a
11696 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11697 this entry might be in a different section from previous range. */
11698
11699 static unsigned int
11700 add_ranges (const_tree block, bool maybe_new_sec)
11701 {
11702 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11703 }
11704
11705 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11706 chain, or middle entry of a chain that will be directly referred to. */
11707
11708 static void
11709 note_rnglist_head (unsigned int offset)
11710 {
11711 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11712 return;
11713 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11714 }
11715
11716 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11717 When using dwarf_split_debug_info, address attributes in dies destined
11718 for the final executable should be direct references--setting the
11719 parameter force_direct ensures this behavior. */
11720
11721 static void
11722 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11723 bool *added, bool force_direct)
11724 {
11725 unsigned int in_use = vec_safe_length (ranges_by_label);
11726 unsigned int offset;
11727 dw_ranges_by_label rbl = { begin, end };
11728 vec_safe_push (ranges_by_label, rbl);
11729 offset = add_ranges_num (-(int)in_use - 1, true);
11730 if (!*added)
11731 {
11732 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11733 *added = true;
11734 note_rnglist_head (offset);
11735 }
11736 }
11737
11738 /* Emit .debug_ranges section. */
11739
11740 static void
11741 output_ranges (void)
11742 {
11743 unsigned i;
11744 static const char *const start_fmt = "Offset %#x";
11745 const char *fmt = start_fmt;
11746 dw_ranges *r;
11747
11748 switch_to_section (debug_ranges_section);
11749 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11750 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11751 {
11752 int block_num = r->num;
11753
11754 if (block_num > 0)
11755 {
11756 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11757 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11758
11759 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11760 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11761
11762 /* If all code is in the text section, then the compilation
11763 unit base address defaults to DW_AT_low_pc, which is the
11764 base of the text section. */
11765 if (!have_multiple_function_sections)
11766 {
11767 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11768 text_section_label,
11769 fmt, i * 2 * DWARF2_ADDR_SIZE);
11770 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11771 text_section_label, NULL);
11772 }
11773
11774 /* Otherwise, the compilation unit base address is zero,
11775 which allows us to use absolute addresses, and not worry
11776 about whether the target supports cross-section
11777 arithmetic. */
11778 else
11779 {
11780 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11781 fmt, i * 2 * DWARF2_ADDR_SIZE);
11782 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11783 }
11784
11785 fmt = NULL;
11786 }
11787
11788 /* Negative block_num stands for an index into ranges_by_label. */
11789 else if (block_num < 0)
11790 {
11791 int lab_idx = - block_num - 1;
11792
11793 if (!have_multiple_function_sections)
11794 {
11795 gcc_unreachable ();
11796 #if 0
11797 /* If we ever use add_ranges_by_labels () for a single
11798 function section, all we have to do is to take out
11799 the #if 0 above. */
11800 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11801 (*ranges_by_label)[lab_idx].begin,
11802 text_section_label,
11803 fmt, i * 2 * DWARF2_ADDR_SIZE);
11804 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11805 (*ranges_by_label)[lab_idx].end,
11806 text_section_label, NULL);
11807 #endif
11808 }
11809 else
11810 {
11811 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11812 (*ranges_by_label)[lab_idx].begin,
11813 fmt, i * 2 * DWARF2_ADDR_SIZE);
11814 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11815 (*ranges_by_label)[lab_idx].end,
11816 NULL);
11817 }
11818 }
11819 else
11820 {
11821 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11822 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11823 fmt = start_fmt;
11824 }
11825 }
11826 }
11827
11828 /* Non-zero if .debug_line_str should be used for .debug_line section
11829 strings or strings that are likely shareable with those. */
11830 #define DWARF5_USE_DEBUG_LINE_STR \
11831 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11832 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11833 /* FIXME: there is no .debug_line_str.dwo section, \
11834 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11835 && !dwarf_split_debug_info)
11836
11837 /* Assign .debug_rnglists indexes. */
11838
11839 static void
11840 index_rnglists (void)
11841 {
11842 unsigned i;
11843 dw_ranges *r;
11844
11845 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11846 if (r->label)
11847 r->idx = rnglist_idx++;
11848 }
11849
11850 /* Emit .debug_rnglists section. */
11851
11852 static void
11853 output_rnglists (unsigned generation)
11854 {
11855 unsigned i;
11856 dw_ranges *r;
11857 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11858 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11859 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11860
11861 switch_to_section (debug_ranges_section);
11862 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11863 /* There are up to 4 unique ranges labels per generation.
11864 See also init_sections_and_labels. */
11865 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11866 2 + generation * 4);
11867 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11868 3 + generation * 4);
11869 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11870 dw2_asm_output_data (4, 0xffffffff,
11871 "Initial length escape value indicating "
11872 "64-bit DWARF extension");
11873 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11874 "Length of Range Lists");
11875 ASM_OUTPUT_LABEL (asm_out_file, l1);
11876 output_dwarf_version ();
11877 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11878 dw2_asm_output_data (1, 0, "Segment Size");
11879 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11880 about relocation sizes and primarily care about the size of .debug*
11881 sections in linked shared libraries and executables, then
11882 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11883 into it are usually larger than just DW_FORM_sec_offset offsets
11884 into the .debug_rnglists section. */
11885 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11886 "Offset Entry Count");
11887 if (dwarf_split_debug_info)
11888 {
11889 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11890 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11891 if (r->label)
11892 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11893 ranges_base_label, NULL);
11894 }
11895
11896 const char *lab = "";
11897 unsigned int len = vec_safe_length (ranges_table);
11898 const char *base = NULL;
11899 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11900 {
11901 int block_num = r->num;
11902
11903 if (r->label)
11904 {
11905 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11906 lab = r->label;
11907 }
11908 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11909 base = NULL;
11910 if (block_num > 0)
11911 {
11912 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11913 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11914
11915 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11916 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11917
11918 if (HAVE_AS_LEB128)
11919 {
11920 /* If all code is in the text section, then the compilation
11921 unit base address defaults to DW_AT_low_pc, which is the
11922 base of the text section. */
11923 if (!have_multiple_function_sections)
11924 {
11925 dw2_asm_output_data (1, DW_RLE_offset_pair,
11926 "DW_RLE_offset_pair (%s)", lab);
11927 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11928 "Range begin address (%s)", lab);
11929 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11930 "Range end address (%s)", lab);
11931 continue;
11932 }
11933 if (base == NULL)
11934 {
11935 dw_ranges *r2 = NULL;
11936 if (i < len - 1)
11937 r2 = &(*ranges_table)[i + 1];
11938 if (r2
11939 && r2->num != 0
11940 && r2->label == NULL
11941 && !r2->maybe_new_sec)
11942 {
11943 dw2_asm_output_data (1, DW_RLE_base_address,
11944 "DW_RLE_base_address (%s)", lab);
11945 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11946 "Base address (%s)", lab);
11947 strcpy (basebuf, blabel);
11948 base = basebuf;
11949 }
11950 }
11951 if (base)
11952 {
11953 dw2_asm_output_data (1, DW_RLE_offset_pair,
11954 "DW_RLE_offset_pair (%s)", lab);
11955 dw2_asm_output_delta_uleb128 (blabel, base,
11956 "Range begin address (%s)", lab);
11957 dw2_asm_output_delta_uleb128 (elabel, base,
11958 "Range end address (%s)", lab);
11959 continue;
11960 }
11961 dw2_asm_output_data (1, DW_RLE_start_length,
11962 "DW_RLE_start_length (%s)", lab);
11963 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11964 "Range begin address (%s)", lab);
11965 dw2_asm_output_delta_uleb128 (elabel, blabel,
11966 "Range length (%s)", lab);
11967 }
11968 else
11969 {
11970 dw2_asm_output_data (1, DW_RLE_start_end,
11971 "DW_RLE_start_end (%s)", lab);
11972 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11973 "Range begin address (%s)", lab);
11974 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11975 "Range end address (%s)", lab);
11976 }
11977 }
11978
11979 /* Negative block_num stands for an index into ranges_by_label. */
11980 else if (block_num < 0)
11981 {
11982 int lab_idx = - block_num - 1;
11983 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11984 const char *elabel = (*ranges_by_label)[lab_idx].end;
11985
11986 if (!have_multiple_function_sections)
11987 gcc_unreachable ();
11988 if (HAVE_AS_LEB128)
11989 {
11990 dw2_asm_output_data (1, DW_RLE_start_length,
11991 "DW_RLE_start_length (%s)", lab);
11992 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11993 "Range begin address (%s)", lab);
11994 dw2_asm_output_delta_uleb128 (elabel, blabel,
11995 "Range length (%s)", lab);
11996 }
11997 else
11998 {
11999 dw2_asm_output_data (1, DW_RLE_start_end,
12000 "DW_RLE_start_end (%s)", lab);
12001 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12002 "Range begin address (%s)", lab);
12003 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12004 "Range end address (%s)", lab);
12005 }
12006 }
12007 else
12008 dw2_asm_output_data (1, DW_RLE_end_of_list,
12009 "DW_RLE_end_of_list (%s)", lab);
12010 }
12011 ASM_OUTPUT_LABEL (asm_out_file, l2);
12012 }
12013
12014 /* Data structure containing information about input files. */
12015 struct file_info
12016 {
12017 const char *path; /* Complete file name. */
12018 const char *fname; /* File name part. */
12019 int length; /* Length of entire string. */
12020 struct dwarf_file_data * file_idx; /* Index in input file table. */
12021 int dir_idx; /* Index in directory table. */
12022 };
12023
12024 /* Data structure containing information about directories with source
12025 files. */
12026 struct dir_info
12027 {
12028 const char *path; /* Path including directory name. */
12029 int length; /* Path length. */
12030 int prefix; /* Index of directory entry which is a prefix. */
12031 int count; /* Number of files in this directory. */
12032 int dir_idx; /* Index of directory used as base. */
12033 };
12034
12035 /* Callback function for file_info comparison. We sort by looking at
12036 the directories in the path. */
12037
12038 static int
12039 file_info_cmp (const void *p1, const void *p2)
12040 {
12041 const struct file_info *const s1 = (const struct file_info *) p1;
12042 const struct file_info *const s2 = (const struct file_info *) p2;
12043 const unsigned char *cp1;
12044 const unsigned char *cp2;
12045
12046 /* Take care of file names without directories. We need to make sure that
12047 we return consistent values to qsort since some will get confused if
12048 we return the same value when identical operands are passed in opposite
12049 orders. So if neither has a directory, return 0 and otherwise return
12050 1 or -1 depending on which one has the directory. We want the one with
12051 the directory to sort after the one without, so all no directory files
12052 are at the start (normally only the compilation unit file). */
12053 if ((s1->path == s1->fname || s2->path == s2->fname))
12054 return (s2->path == s2->fname) - (s1->path == s1->fname);
12055
12056 cp1 = (const unsigned char *) s1->path;
12057 cp2 = (const unsigned char *) s2->path;
12058
12059 while (1)
12060 {
12061 ++cp1;
12062 ++cp2;
12063 /* Reached the end of the first path? If so, handle like above,
12064 but now we want longer directory prefixes before shorter ones. */
12065 if ((cp1 == (const unsigned char *) s1->fname)
12066 || (cp2 == (const unsigned char *) s2->fname))
12067 return ((cp1 == (const unsigned char *) s1->fname)
12068 - (cp2 == (const unsigned char *) s2->fname));
12069
12070 /* Character of current path component the same? */
12071 else if (*cp1 != *cp2)
12072 return *cp1 - *cp2;
12073 }
12074 }
12075
12076 struct file_name_acquire_data
12077 {
12078 struct file_info *files;
12079 int used_files;
12080 int max_files;
12081 };
12082
12083 /* Traversal function for the hash table. */
12084
12085 int
12086 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12087 {
12088 struct dwarf_file_data *d = *slot;
12089 struct file_info *fi;
12090 const char *f;
12091
12092 gcc_assert (fnad->max_files >= d->emitted_number);
12093
12094 if (! d->emitted_number)
12095 return 1;
12096
12097 gcc_assert (fnad->max_files != fnad->used_files);
12098
12099 fi = fnad->files + fnad->used_files++;
12100
12101 /* Skip all leading "./". */
12102 f = d->filename;
12103 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12104 f += 2;
12105
12106 /* Create a new array entry. */
12107 fi->path = f;
12108 fi->length = strlen (f);
12109 fi->file_idx = d;
12110
12111 /* Search for the file name part. */
12112 f = strrchr (f, DIR_SEPARATOR);
12113 #if defined (DIR_SEPARATOR_2)
12114 {
12115 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12116
12117 if (g != NULL)
12118 {
12119 if (f == NULL || f < g)
12120 f = g;
12121 }
12122 }
12123 #endif
12124
12125 fi->fname = f == NULL ? fi->path : f + 1;
12126 return 1;
12127 }
12128
12129 /* Helper function for output_file_names. Emit a FORM encoded
12130 string STR, with assembly comment start ENTRY_KIND and
12131 index IDX */
12132
12133 static void
12134 output_line_string (enum dwarf_form form, const char *str,
12135 const char *entry_kind, unsigned int idx)
12136 {
12137 switch (form)
12138 {
12139 case DW_FORM_string:
12140 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12141 break;
12142 case DW_FORM_line_strp:
12143 if (!debug_line_str_hash)
12144 debug_line_str_hash
12145 = hash_table<indirect_string_hasher>::create_ggc (10);
12146
12147 struct indirect_string_node *node;
12148 node = find_AT_string_in_table (str, debug_line_str_hash);
12149 set_indirect_string (node);
12150 node->form = form;
12151 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12152 debug_line_str_section, "%s: %#x: \"%s\"",
12153 entry_kind, 0, node->str);
12154 break;
12155 default:
12156 gcc_unreachable ();
12157 }
12158 }
12159
12160 /* Output the directory table and the file name table. We try to minimize
12161 the total amount of memory needed. A heuristic is used to avoid large
12162 slowdowns with many input files. */
12163
12164 static void
12165 output_file_names (void)
12166 {
12167 struct file_name_acquire_data fnad;
12168 int numfiles;
12169 struct file_info *files;
12170 struct dir_info *dirs;
12171 int *saved;
12172 int *savehere;
12173 int *backmap;
12174 int ndirs;
12175 int idx_offset;
12176 int i;
12177
12178 if (!last_emitted_file)
12179 {
12180 if (dwarf_version >= 5)
12181 {
12182 dw2_asm_output_data (1, 0, "Directory entry format count");
12183 dw2_asm_output_data_uleb128 (0, "Directories count");
12184 dw2_asm_output_data (1, 0, "File name entry format count");
12185 dw2_asm_output_data_uleb128 (0, "File names count");
12186 }
12187 else
12188 {
12189 dw2_asm_output_data (1, 0, "End directory table");
12190 dw2_asm_output_data (1, 0, "End file name table");
12191 }
12192 return;
12193 }
12194
12195 numfiles = last_emitted_file->emitted_number;
12196
12197 /* Allocate the various arrays we need. */
12198 files = XALLOCAVEC (struct file_info, numfiles);
12199 dirs = XALLOCAVEC (struct dir_info, numfiles);
12200
12201 fnad.files = files;
12202 fnad.used_files = 0;
12203 fnad.max_files = numfiles;
12204 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12205 gcc_assert (fnad.used_files == fnad.max_files);
12206
12207 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12208
12209 /* Find all the different directories used. */
12210 dirs[0].path = files[0].path;
12211 dirs[0].length = files[0].fname - files[0].path;
12212 dirs[0].prefix = -1;
12213 dirs[0].count = 1;
12214 dirs[0].dir_idx = 0;
12215 files[0].dir_idx = 0;
12216 ndirs = 1;
12217
12218 for (i = 1; i < numfiles; i++)
12219 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12220 && memcmp (dirs[ndirs - 1].path, files[i].path,
12221 dirs[ndirs - 1].length) == 0)
12222 {
12223 /* Same directory as last entry. */
12224 files[i].dir_idx = ndirs - 1;
12225 ++dirs[ndirs - 1].count;
12226 }
12227 else
12228 {
12229 int j;
12230
12231 /* This is a new directory. */
12232 dirs[ndirs].path = files[i].path;
12233 dirs[ndirs].length = files[i].fname - files[i].path;
12234 dirs[ndirs].count = 1;
12235 dirs[ndirs].dir_idx = ndirs;
12236 files[i].dir_idx = ndirs;
12237
12238 /* Search for a prefix. */
12239 dirs[ndirs].prefix = -1;
12240 for (j = 0; j < ndirs; j++)
12241 if (dirs[j].length < dirs[ndirs].length
12242 && dirs[j].length > 1
12243 && (dirs[ndirs].prefix == -1
12244 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12245 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12246 dirs[ndirs].prefix = j;
12247
12248 ++ndirs;
12249 }
12250
12251 /* Now to the actual work. We have to find a subset of the directories which
12252 allow expressing the file name using references to the directory table
12253 with the least amount of characters. We do not do an exhaustive search
12254 where we would have to check out every combination of every single
12255 possible prefix. Instead we use a heuristic which provides nearly optimal
12256 results in most cases and never is much off. */
12257 saved = XALLOCAVEC (int, ndirs);
12258 savehere = XALLOCAVEC (int, ndirs);
12259
12260 memset (saved, '\0', ndirs * sizeof (saved[0]));
12261 for (i = 0; i < ndirs; i++)
12262 {
12263 int j;
12264 int total;
12265
12266 /* We can always save some space for the current directory. But this
12267 does not mean it will be enough to justify adding the directory. */
12268 savehere[i] = dirs[i].length;
12269 total = (savehere[i] - saved[i]) * dirs[i].count;
12270
12271 for (j = i + 1; j < ndirs; j++)
12272 {
12273 savehere[j] = 0;
12274 if (saved[j] < dirs[i].length)
12275 {
12276 /* Determine whether the dirs[i] path is a prefix of the
12277 dirs[j] path. */
12278 int k;
12279
12280 k = dirs[j].prefix;
12281 while (k != -1 && k != (int) i)
12282 k = dirs[k].prefix;
12283
12284 if (k == (int) i)
12285 {
12286 /* Yes it is. We can possibly save some memory by
12287 writing the filenames in dirs[j] relative to
12288 dirs[i]. */
12289 savehere[j] = dirs[i].length;
12290 total += (savehere[j] - saved[j]) * dirs[j].count;
12291 }
12292 }
12293 }
12294
12295 /* Check whether we can save enough to justify adding the dirs[i]
12296 directory. */
12297 if (total > dirs[i].length + 1)
12298 {
12299 /* It's worthwhile adding. */
12300 for (j = i; j < ndirs; j++)
12301 if (savehere[j] > 0)
12302 {
12303 /* Remember how much we saved for this directory so far. */
12304 saved[j] = savehere[j];
12305
12306 /* Remember the prefix directory. */
12307 dirs[j].dir_idx = i;
12308 }
12309 }
12310 }
12311
12312 /* Emit the directory name table. */
12313 idx_offset = dirs[0].length > 0 ? 1 : 0;
12314 enum dwarf_form str_form = DW_FORM_string;
12315 enum dwarf_form idx_form = DW_FORM_udata;
12316 if (dwarf_version >= 5)
12317 {
12318 const char *comp_dir = comp_dir_string ();
12319 if (comp_dir == NULL)
12320 comp_dir = "";
12321 dw2_asm_output_data (1, 1, "Directory entry format count");
12322 if (DWARF5_USE_DEBUG_LINE_STR)
12323 str_form = DW_FORM_line_strp;
12324 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12325 dw2_asm_output_data_uleb128 (str_form, "%s",
12326 get_DW_FORM_name (str_form));
12327 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12328 if (str_form == DW_FORM_string)
12329 {
12330 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12331 for (i = 1 - idx_offset; i < ndirs; i++)
12332 dw2_asm_output_nstring (dirs[i].path,
12333 dirs[i].length
12334 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12335 "Directory Entry: %#x", i + idx_offset);
12336 }
12337 else
12338 {
12339 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12340 for (i = 1 - idx_offset; i < ndirs; i++)
12341 {
12342 const char *str
12343 = ggc_alloc_string (dirs[i].path,
12344 dirs[i].length
12345 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12346 output_line_string (str_form, str, "Directory Entry",
12347 (unsigned) i + idx_offset);
12348 }
12349 }
12350 }
12351 else
12352 {
12353 for (i = 1 - idx_offset; i < ndirs; i++)
12354 dw2_asm_output_nstring (dirs[i].path,
12355 dirs[i].length
12356 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12357 "Directory Entry: %#x", i + idx_offset);
12358
12359 dw2_asm_output_data (1, 0, "End directory table");
12360 }
12361
12362 /* We have to emit them in the order of emitted_number since that's
12363 used in the debug info generation. To do this efficiently we
12364 generate a back-mapping of the indices first. */
12365 backmap = XALLOCAVEC (int, numfiles);
12366 for (i = 0; i < numfiles; i++)
12367 backmap[files[i].file_idx->emitted_number - 1] = i;
12368
12369 if (dwarf_version >= 5)
12370 {
12371 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12372 if (filename0 == NULL)
12373 filename0 = "";
12374 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12375 DW_FORM_data2. Choose one based on the number of directories
12376 and how much space would they occupy in each encoding.
12377 If we have at most 256 directories, all indexes fit into
12378 a single byte, so DW_FORM_data1 is most compact (if there
12379 are at most 128 directories, DW_FORM_udata would be as
12380 compact as that, but not shorter and slower to decode). */
12381 if (ndirs + idx_offset <= 256)
12382 idx_form = DW_FORM_data1;
12383 /* If there are more than 65536 directories, we have to use
12384 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12385 Otherwise, compute what space would occupy if all the indexes
12386 used DW_FORM_udata - sum - and compare that to how large would
12387 be DW_FORM_data2 encoding, and pick the more efficient one. */
12388 else if (ndirs + idx_offset <= 65536)
12389 {
12390 unsigned HOST_WIDE_INT sum = 1;
12391 for (i = 0; i < numfiles; i++)
12392 {
12393 int file_idx = backmap[i];
12394 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12395 sum += size_of_uleb128 (dir_idx);
12396 }
12397 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12398 idx_form = DW_FORM_data2;
12399 }
12400 #ifdef VMS_DEBUGGING_INFO
12401 dw2_asm_output_data (1, 4, "File name entry format count");
12402 #else
12403 dw2_asm_output_data (1, 2, "File name entry format count");
12404 #endif
12405 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12406 dw2_asm_output_data_uleb128 (str_form, "%s",
12407 get_DW_FORM_name (str_form));
12408 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12409 "DW_LNCT_directory_index");
12410 dw2_asm_output_data_uleb128 (idx_form, "%s",
12411 get_DW_FORM_name (idx_form));
12412 #ifdef VMS_DEBUGGING_INFO
12413 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12414 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12415 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12416 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12417 #endif
12418 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12419
12420 output_line_string (str_form, filename0, "File Entry", 0);
12421
12422 /* Include directory index. */
12423 if (idx_form != DW_FORM_udata)
12424 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12425 0, NULL);
12426 else
12427 dw2_asm_output_data_uleb128 (0, NULL);
12428
12429 #ifdef VMS_DEBUGGING_INFO
12430 dw2_asm_output_data_uleb128 (0, NULL);
12431 dw2_asm_output_data_uleb128 (0, NULL);
12432 #endif
12433 }
12434
12435 /* Now write all the file names. */
12436 for (i = 0; i < numfiles; i++)
12437 {
12438 int file_idx = backmap[i];
12439 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12440
12441 #ifdef VMS_DEBUGGING_INFO
12442 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12443
12444 /* Setting these fields can lead to debugger miscomparisons,
12445 but VMS Debug requires them to be set correctly. */
12446
12447 int ver;
12448 long long cdt;
12449 long siz;
12450 int maxfilelen = (strlen (files[file_idx].path)
12451 + dirs[dir_idx].length
12452 + MAX_VMS_VERSION_LEN + 1);
12453 char *filebuf = XALLOCAVEC (char, maxfilelen);
12454
12455 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12456 snprintf (filebuf, maxfilelen, "%s;%d",
12457 files[file_idx].path + dirs[dir_idx].length, ver);
12458
12459 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12460
12461 /* Include directory index. */
12462 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12463 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12464 dir_idx + idx_offset, NULL);
12465 else
12466 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12467
12468 /* Modification time. */
12469 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12470 &cdt, 0, 0, 0) == 0)
12471 ? cdt : 0, NULL);
12472
12473 /* File length in bytes. */
12474 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12475 0, &siz, 0, 0) == 0)
12476 ? siz : 0, NULL);
12477 #else
12478 output_line_string (str_form,
12479 files[file_idx].path + dirs[dir_idx].length,
12480 "File Entry", (unsigned) i + 1);
12481
12482 /* Include directory index. */
12483 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12484 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12485 dir_idx + idx_offset, NULL);
12486 else
12487 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12488
12489 if (dwarf_version >= 5)
12490 continue;
12491
12492 /* Modification time. */
12493 dw2_asm_output_data_uleb128 (0, NULL);
12494
12495 /* File length in bytes. */
12496 dw2_asm_output_data_uleb128 (0, NULL);
12497 #endif /* VMS_DEBUGGING_INFO */
12498 }
12499
12500 if (dwarf_version < 5)
12501 dw2_asm_output_data (1, 0, "End file name table");
12502 }
12503
12504
12505 /* Output one line number table into the .debug_line section. */
12506
12507 static void
12508 output_one_line_info_table (dw_line_info_table *table)
12509 {
12510 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12511 unsigned int current_line = 1;
12512 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12513 dw_line_info_entry *ent, *prev_addr;
12514 size_t i;
12515 unsigned int view;
12516
12517 view = 0;
12518
12519 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12520 {
12521 switch (ent->opcode)
12522 {
12523 case LI_set_address:
12524 /* ??? Unfortunately, we have little choice here currently, and
12525 must always use the most general form. GCC does not know the
12526 address delta itself, so we can't use DW_LNS_advance_pc. Many
12527 ports do have length attributes which will give an upper bound
12528 on the address range. We could perhaps use length attributes
12529 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12530 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12531
12532 view = 0;
12533
12534 /* This can handle any delta. This takes
12535 4+DWARF2_ADDR_SIZE bytes. */
12536 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12537 debug_variable_location_views
12538 ? ", reset view to 0" : "");
12539 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12540 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12541 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12542
12543 prev_addr = ent;
12544 break;
12545
12546 case LI_adv_address:
12547 {
12548 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12549 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12550 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12551
12552 view++;
12553
12554 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12555 dw2_asm_output_delta (2, line_label, prev_label,
12556 "from %s to %s", prev_label, line_label);
12557
12558 prev_addr = ent;
12559 break;
12560 }
12561
12562 case LI_set_line:
12563 if (ent->val == current_line)
12564 {
12565 /* We still need to start a new row, so output a copy insn. */
12566 dw2_asm_output_data (1, DW_LNS_copy,
12567 "copy line %u", current_line);
12568 }
12569 else
12570 {
12571 int line_offset = ent->val - current_line;
12572 int line_delta = line_offset - DWARF_LINE_BASE;
12573
12574 current_line = ent->val;
12575 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12576 {
12577 /* This can handle deltas from -10 to 234, using the current
12578 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12579 This takes 1 byte. */
12580 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12581 "line %u", current_line);
12582 }
12583 else
12584 {
12585 /* This can handle any delta. This takes at least 4 bytes,
12586 depending on the value being encoded. */
12587 dw2_asm_output_data (1, DW_LNS_advance_line,
12588 "advance to line %u", current_line);
12589 dw2_asm_output_data_sleb128 (line_offset, NULL);
12590 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12591 }
12592 }
12593 break;
12594
12595 case LI_set_file:
12596 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12597 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12598 break;
12599
12600 case LI_set_column:
12601 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12602 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12603 break;
12604
12605 case LI_negate_stmt:
12606 current_is_stmt = !current_is_stmt;
12607 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12608 "is_stmt %d", current_is_stmt);
12609 break;
12610
12611 case LI_set_prologue_end:
12612 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12613 "set prologue end");
12614 break;
12615
12616 case LI_set_epilogue_begin:
12617 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12618 "set epilogue begin");
12619 break;
12620
12621 case LI_set_discriminator:
12622 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12623 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12624 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12625 dw2_asm_output_data_uleb128 (ent->val, NULL);
12626 break;
12627 }
12628 }
12629
12630 /* Emit debug info for the address of the end of the table. */
12631 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12632 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12633 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12634 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12635
12636 dw2_asm_output_data (1, 0, "end sequence");
12637 dw2_asm_output_data_uleb128 (1, NULL);
12638 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12639 }
12640
12641 /* Output the source line number correspondence information. This
12642 information goes into the .debug_line section. */
12643
12644 static void
12645 output_line_info (bool prologue_only)
12646 {
12647 static unsigned int generation;
12648 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12649 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12650 bool saw_one = false;
12651 int opc;
12652
12653 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12654 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12655 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12656 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12657
12658 if (!XCOFF_DEBUGGING_INFO)
12659 {
12660 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12661 dw2_asm_output_data (4, 0xffffffff,
12662 "Initial length escape value indicating 64-bit DWARF extension");
12663 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12664 "Length of Source Line Info");
12665 }
12666
12667 ASM_OUTPUT_LABEL (asm_out_file, l1);
12668
12669 output_dwarf_version ();
12670 if (dwarf_version >= 5)
12671 {
12672 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12673 dw2_asm_output_data (1, 0, "Segment Size");
12674 }
12675 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12676 ASM_OUTPUT_LABEL (asm_out_file, p1);
12677
12678 /* Define the architecture-dependent minimum instruction length (in bytes).
12679 In this implementation of DWARF, this field is used for information
12680 purposes only. Since GCC generates assembly language, we have no
12681 a priori knowledge of how many instruction bytes are generated for each
12682 source line, and therefore can use only the DW_LNE_set_address and
12683 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12684 this as '1', which is "correct enough" for all architectures,
12685 and don't let the target override. */
12686 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12687
12688 if (dwarf_version >= 4)
12689 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12690 "Maximum Operations Per Instruction");
12691 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12692 "Default is_stmt_start flag");
12693 dw2_asm_output_data (1, DWARF_LINE_BASE,
12694 "Line Base Value (Special Opcodes)");
12695 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12696 "Line Range Value (Special Opcodes)");
12697 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12698 "Special Opcode Base");
12699
12700 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12701 {
12702 int n_op_args;
12703 switch (opc)
12704 {
12705 case DW_LNS_advance_pc:
12706 case DW_LNS_advance_line:
12707 case DW_LNS_set_file:
12708 case DW_LNS_set_column:
12709 case DW_LNS_fixed_advance_pc:
12710 case DW_LNS_set_isa:
12711 n_op_args = 1;
12712 break;
12713 default:
12714 n_op_args = 0;
12715 break;
12716 }
12717
12718 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12719 opc, n_op_args);
12720 }
12721
12722 /* Write out the information about the files we use. */
12723 output_file_names ();
12724 ASM_OUTPUT_LABEL (asm_out_file, p2);
12725 if (prologue_only)
12726 {
12727 /* Output the marker for the end of the line number info. */
12728 ASM_OUTPUT_LABEL (asm_out_file, l2);
12729 return;
12730 }
12731
12732 if (separate_line_info)
12733 {
12734 dw_line_info_table *table;
12735 size_t i;
12736
12737 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12738 if (table->in_use)
12739 {
12740 output_one_line_info_table (table);
12741 saw_one = true;
12742 }
12743 }
12744 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12745 {
12746 output_one_line_info_table (cold_text_section_line_info);
12747 saw_one = true;
12748 }
12749
12750 /* ??? Some Darwin linkers crash on a .debug_line section with no
12751 sequences. Further, merely a DW_LNE_end_sequence entry is not
12752 sufficient -- the address column must also be initialized.
12753 Make sure to output at least one set_address/end_sequence pair,
12754 choosing .text since that section is always present. */
12755 if (text_section_line_info->in_use || !saw_one)
12756 output_one_line_info_table (text_section_line_info);
12757
12758 /* Output the marker for the end of the line number info. */
12759 ASM_OUTPUT_LABEL (asm_out_file, l2);
12760 }
12761 \f
12762 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12763
12764 static inline bool
12765 need_endianity_attribute_p (bool reverse)
12766 {
12767 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12768 }
12769
12770 /* Given a pointer to a tree node for some base type, return a pointer to
12771 a DIE that describes the given type. REVERSE is true if the type is
12772 to be interpreted in the reverse storage order wrt the target order.
12773
12774 This routine must only be called for GCC type nodes that correspond to
12775 Dwarf base (fundamental) types. */
12776
12777 static dw_die_ref
12778 base_type_die (tree type, bool reverse)
12779 {
12780 dw_die_ref base_type_result;
12781 enum dwarf_type encoding;
12782 bool fpt_used = false;
12783 struct fixed_point_type_info fpt_info;
12784 tree type_bias = NULL_TREE;
12785
12786 /* If this is a subtype that should not be emitted as a subrange type,
12787 use the base type. See subrange_type_for_debug_p. */
12788 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12789 type = TREE_TYPE (type);
12790
12791 switch (TREE_CODE (type))
12792 {
12793 case INTEGER_TYPE:
12794 if ((dwarf_version >= 4 || !dwarf_strict)
12795 && TYPE_NAME (type)
12796 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12797 && DECL_IS_BUILTIN (TYPE_NAME (type))
12798 && DECL_NAME (TYPE_NAME (type)))
12799 {
12800 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12801 if (strcmp (name, "char16_t") == 0
12802 || strcmp (name, "char32_t") == 0)
12803 {
12804 encoding = DW_ATE_UTF;
12805 break;
12806 }
12807 }
12808 if ((dwarf_version >= 3 || !dwarf_strict)
12809 && lang_hooks.types.get_fixed_point_type_info)
12810 {
12811 memset (&fpt_info, 0, sizeof (fpt_info));
12812 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12813 {
12814 fpt_used = true;
12815 encoding = ((TYPE_UNSIGNED (type))
12816 ? DW_ATE_unsigned_fixed
12817 : DW_ATE_signed_fixed);
12818 break;
12819 }
12820 }
12821 if (TYPE_STRING_FLAG (type))
12822 {
12823 if (TYPE_UNSIGNED (type))
12824 encoding = DW_ATE_unsigned_char;
12825 else
12826 encoding = DW_ATE_signed_char;
12827 }
12828 else if (TYPE_UNSIGNED (type))
12829 encoding = DW_ATE_unsigned;
12830 else
12831 encoding = DW_ATE_signed;
12832
12833 if (!dwarf_strict
12834 && lang_hooks.types.get_type_bias)
12835 type_bias = lang_hooks.types.get_type_bias (type);
12836 break;
12837
12838 case REAL_TYPE:
12839 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12840 {
12841 if (dwarf_version >= 3 || !dwarf_strict)
12842 encoding = DW_ATE_decimal_float;
12843 else
12844 encoding = DW_ATE_lo_user;
12845 }
12846 else
12847 encoding = DW_ATE_float;
12848 break;
12849
12850 case FIXED_POINT_TYPE:
12851 if (!(dwarf_version >= 3 || !dwarf_strict))
12852 encoding = DW_ATE_lo_user;
12853 else if (TYPE_UNSIGNED (type))
12854 encoding = DW_ATE_unsigned_fixed;
12855 else
12856 encoding = DW_ATE_signed_fixed;
12857 break;
12858
12859 /* Dwarf2 doesn't know anything about complex ints, so use
12860 a user defined type for it. */
12861 case COMPLEX_TYPE:
12862 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12863 encoding = DW_ATE_complex_float;
12864 else
12865 encoding = DW_ATE_lo_user;
12866 break;
12867
12868 case BOOLEAN_TYPE:
12869 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12870 encoding = DW_ATE_boolean;
12871 break;
12872
12873 default:
12874 /* No other TREE_CODEs are Dwarf fundamental types. */
12875 gcc_unreachable ();
12876 }
12877
12878 base_type_result = new_die_raw (DW_TAG_base_type);
12879
12880 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12881 int_size_in_bytes (type));
12882 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12883
12884 if (need_endianity_attribute_p (reverse))
12885 add_AT_unsigned (base_type_result, DW_AT_endianity,
12886 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12887
12888 add_alignment_attribute (base_type_result, type);
12889
12890 if (fpt_used)
12891 {
12892 switch (fpt_info.scale_factor_kind)
12893 {
12894 case fixed_point_scale_factor_binary:
12895 add_AT_int (base_type_result, DW_AT_binary_scale,
12896 fpt_info.scale_factor.binary);
12897 break;
12898
12899 case fixed_point_scale_factor_decimal:
12900 add_AT_int (base_type_result, DW_AT_decimal_scale,
12901 fpt_info.scale_factor.decimal);
12902 break;
12903
12904 case fixed_point_scale_factor_arbitrary:
12905 /* Arbitrary scale factors cannot be described in standard DWARF,
12906 yet. */
12907 if (!dwarf_strict)
12908 {
12909 /* Describe the scale factor as a rational constant. */
12910 const dw_die_ref scale_factor
12911 = new_die (DW_TAG_constant, comp_unit_die (), type);
12912
12913 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12914 fpt_info.scale_factor.arbitrary.numerator);
12915 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12916 fpt_info.scale_factor.arbitrary.denominator);
12917
12918 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12919 }
12920 break;
12921
12922 default:
12923 gcc_unreachable ();
12924 }
12925 }
12926
12927 if (type_bias)
12928 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12929 dw_scalar_form_constant
12930 | dw_scalar_form_exprloc
12931 | dw_scalar_form_reference,
12932 NULL);
12933
12934 return base_type_result;
12935 }
12936
12937 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12938 named 'auto' in its type: return true for it, false otherwise. */
12939
12940 static inline bool
12941 is_cxx_auto (tree type)
12942 {
12943 if (is_cxx ())
12944 {
12945 tree name = TYPE_IDENTIFIER (type);
12946 if (name == get_identifier ("auto")
12947 || name == get_identifier ("decltype(auto)"))
12948 return true;
12949 }
12950 return false;
12951 }
12952
12953 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12954 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12955
12956 static inline int
12957 is_base_type (tree type)
12958 {
12959 switch (TREE_CODE (type))
12960 {
12961 case INTEGER_TYPE:
12962 case REAL_TYPE:
12963 case FIXED_POINT_TYPE:
12964 case COMPLEX_TYPE:
12965 case BOOLEAN_TYPE:
12966 return 1;
12967
12968 case VOID_TYPE:
12969 case ARRAY_TYPE:
12970 case RECORD_TYPE:
12971 case UNION_TYPE:
12972 case QUAL_UNION_TYPE:
12973 case ENUMERAL_TYPE:
12974 case FUNCTION_TYPE:
12975 case METHOD_TYPE:
12976 case POINTER_TYPE:
12977 case REFERENCE_TYPE:
12978 case NULLPTR_TYPE:
12979 case OFFSET_TYPE:
12980 case LANG_TYPE:
12981 case VECTOR_TYPE:
12982 return 0;
12983
12984 default:
12985 if (is_cxx_auto (type))
12986 return 0;
12987 gcc_unreachable ();
12988 }
12989
12990 return 0;
12991 }
12992
12993 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12994 node, return the size in bits for the type if it is a constant, or else
12995 return the alignment for the type if the type's size is not constant, or
12996 else return BITS_PER_WORD if the type actually turns out to be an
12997 ERROR_MARK node. */
12998
12999 static inline unsigned HOST_WIDE_INT
13000 simple_type_size_in_bits (const_tree type)
13001 {
13002 if (TREE_CODE (type) == ERROR_MARK)
13003 return BITS_PER_WORD;
13004 else if (TYPE_SIZE (type) == NULL_TREE)
13005 return 0;
13006 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13007 return tree_to_uhwi (TYPE_SIZE (type));
13008 else
13009 return TYPE_ALIGN (type);
13010 }
13011
13012 /* Similarly, but return an offset_int instead of UHWI. */
13013
13014 static inline offset_int
13015 offset_int_type_size_in_bits (const_tree type)
13016 {
13017 if (TREE_CODE (type) == ERROR_MARK)
13018 return BITS_PER_WORD;
13019 else if (TYPE_SIZE (type) == NULL_TREE)
13020 return 0;
13021 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13022 return wi::to_offset (TYPE_SIZE (type));
13023 else
13024 return TYPE_ALIGN (type);
13025 }
13026
13027 /* Given a pointer to a tree node for a subrange type, return a pointer
13028 to a DIE that describes the given type. */
13029
13030 static dw_die_ref
13031 subrange_type_die (tree type, tree low, tree high, tree bias,
13032 dw_die_ref context_die)
13033 {
13034 dw_die_ref subrange_die;
13035 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13036
13037 if (context_die == NULL)
13038 context_die = comp_unit_die ();
13039
13040 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13041
13042 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13043 {
13044 /* The size of the subrange type and its base type do not match,
13045 so we need to generate a size attribute for the subrange type. */
13046 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13047 }
13048
13049 add_alignment_attribute (subrange_die, type);
13050
13051 if (low)
13052 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13053 if (high)
13054 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13055 if (bias && !dwarf_strict)
13056 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13057 dw_scalar_form_constant
13058 | dw_scalar_form_exprloc
13059 | dw_scalar_form_reference,
13060 NULL);
13061
13062 return subrange_die;
13063 }
13064
13065 /* Returns the (const and/or volatile) cv_qualifiers associated with
13066 the decl node. This will normally be augmented with the
13067 cv_qualifiers of the underlying type in add_type_attribute. */
13068
13069 static int
13070 decl_quals (const_tree decl)
13071 {
13072 return ((TREE_READONLY (decl)
13073 /* The C++ front-end correctly marks reference-typed
13074 variables as readonly, but from a language (and debug
13075 info) standpoint they are not const-qualified. */
13076 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13077 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13078 | (TREE_THIS_VOLATILE (decl)
13079 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13080 }
13081
13082 /* Determine the TYPE whose qualifiers match the largest strict subset
13083 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13084 qualifiers outside QUAL_MASK. */
13085
13086 static int
13087 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13088 {
13089 tree t;
13090 int best_rank = 0, best_qual = 0, max_rank;
13091
13092 type_quals &= qual_mask;
13093 max_rank = popcount_hwi (type_quals) - 1;
13094
13095 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13096 t = TYPE_NEXT_VARIANT (t))
13097 {
13098 int q = TYPE_QUALS (t) & qual_mask;
13099
13100 if ((q & type_quals) == q && q != type_quals
13101 && check_base_type (t, type))
13102 {
13103 int rank = popcount_hwi (q);
13104
13105 if (rank > best_rank)
13106 {
13107 best_rank = rank;
13108 best_qual = q;
13109 }
13110 }
13111 }
13112
13113 return best_qual;
13114 }
13115
13116 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13117 static const dwarf_qual_info_t dwarf_qual_info[] =
13118 {
13119 { TYPE_QUAL_CONST, DW_TAG_const_type },
13120 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13121 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13122 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13123 };
13124 static const unsigned int dwarf_qual_info_size
13125 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13126
13127 /* If DIE is a qualified DIE of some base DIE with the same parent,
13128 return the base DIE, otherwise return NULL. Set MASK to the
13129 qualifiers added compared to the returned DIE. */
13130
13131 static dw_die_ref
13132 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13133 {
13134 unsigned int i;
13135 for (i = 0; i < dwarf_qual_info_size; i++)
13136 if (die->die_tag == dwarf_qual_info[i].t)
13137 break;
13138 if (i == dwarf_qual_info_size)
13139 return NULL;
13140 if (vec_safe_length (die->die_attr) != 1)
13141 return NULL;
13142 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13143 if (type == NULL || type->die_parent != die->die_parent)
13144 return NULL;
13145 *mask |= dwarf_qual_info[i].q;
13146 if (depth)
13147 {
13148 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13149 if (ret)
13150 return ret;
13151 }
13152 return type;
13153 }
13154
13155 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13156 entry that chains the modifiers specified by CV_QUALS in front of the
13157 given type. REVERSE is true if the type is to be interpreted in the
13158 reverse storage order wrt the target order. */
13159
13160 static dw_die_ref
13161 modified_type_die (tree type, int cv_quals, bool reverse,
13162 dw_die_ref context_die)
13163 {
13164 enum tree_code code = TREE_CODE (type);
13165 dw_die_ref mod_type_die;
13166 dw_die_ref sub_die = NULL;
13167 tree item_type = NULL;
13168 tree qualified_type;
13169 tree name, low, high;
13170 dw_die_ref mod_scope;
13171 /* Only these cv-qualifiers are currently handled. */
13172 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13173 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13174 ENCODE_QUAL_ADDR_SPACE(~0U));
13175 const bool reverse_base_type
13176 = need_endianity_attribute_p (reverse) && is_base_type (type);
13177
13178 if (code == ERROR_MARK)
13179 return NULL;
13180
13181 if (lang_hooks.types.get_debug_type)
13182 {
13183 tree debug_type = lang_hooks.types.get_debug_type (type);
13184
13185 if (debug_type != NULL_TREE && debug_type != type)
13186 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13187 }
13188
13189 cv_quals &= cv_qual_mask;
13190
13191 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13192 tag modifier (and not an attribute) old consumers won't be able
13193 to handle it. */
13194 if (dwarf_version < 3)
13195 cv_quals &= ~TYPE_QUAL_RESTRICT;
13196
13197 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13198 if (dwarf_version < 5)
13199 cv_quals &= ~TYPE_QUAL_ATOMIC;
13200
13201 /* See if we already have the appropriately qualified variant of
13202 this type. */
13203 qualified_type = get_qualified_type (type, cv_quals);
13204
13205 if (qualified_type == sizetype)
13206 {
13207 /* Try not to expose the internal sizetype type's name. */
13208 if (TYPE_NAME (qualified_type)
13209 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13210 {
13211 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13212
13213 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13214 && (TYPE_PRECISION (t)
13215 == TYPE_PRECISION (qualified_type))
13216 && (TYPE_UNSIGNED (t)
13217 == TYPE_UNSIGNED (qualified_type)));
13218 qualified_type = t;
13219 }
13220 else if (qualified_type == sizetype
13221 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13222 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13223 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13224 qualified_type = size_type_node;
13225 if (type == sizetype)
13226 type = qualified_type;
13227 }
13228
13229 /* If we do, then we can just use its DIE, if it exists. */
13230 if (qualified_type)
13231 {
13232 mod_type_die = lookup_type_die (qualified_type);
13233
13234 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13235 dealt with specially: the DIE with the attribute, if it exists, is
13236 placed immediately after the regular DIE for the same base type. */
13237 if (mod_type_die
13238 && (!reverse_base_type
13239 || ((mod_type_die = mod_type_die->die_sib) != NULL
13240 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13241 return mod_type_die;
13242 }
13243
13244 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13245
13246 /* Handle C typedef types. */
13247 if (name
13248 && TREE_CODE (name) == TYPE_DECL
13249 && DECL_ORIGINAL_TYPE (name)
13250 && !DECL_ARTIFICIAL (name))
13251 {
13252 tree dtype = TREE_TYPE (name);
13253
13254 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13255 if (qualified_type == dtype && !reverse_base_type)
13256 {
13257 tree origin = decl_ultimate_origin (name);
13258
13259 /* Typedef variants that have an abstract origin don't get their own
13260 type DIE (see gen_typedef_die), so fall back on the ultimate
13261 abstract origin instead. */
13262 if (origin != NULL && origin != name)
13263 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13264 context_die);
13265
13266 /* For a named type, use the typedef. */
13267 gen_type_die (qualified_type, context_die);
13268 return lookup_type_die (qualified_type);
13269 }
13270 else
13271 {
13272 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13273 dquals &= cv_qual_mask;
13274 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13275 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13276 /* cv-unqualified version of named type. Just use
13277 the unnamed type to which it refers. */
13278 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13279 reverse, context_die);
13280 /* Else cv-qualified version of named type; fall through. */
13281 }
13282 }
13283
13284 mod_scope = scope_die_for (type, context_die);
13285
13286 if (cv_quals)
13287 {
13288 int sub_quals = 0, first_quals = 0;
13289 unsigned i;
13290 dw_die_ref first = NULL, last = NULL;
13291
13292 /* Determine a lesser qualified type that most closely matches
13293 this one. Then generate DW_TAG_* entries for the remaining
13294 qualifiers. */
13295 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13296 cv_qual_mask);
13297 if (sub_quals && use_debug_types)
13298 {
13299 bool needed = false;
13300 /* If emitting type units, make sure the order of qualifiers
13301 is canonical. Thus, start from unqualified type if
13302 an earlier qualifier is missing in sub_quals, but some later
13303 one is present there. */
13304 for (i = 0; i < dwarf_qual_info_size; i++)
13305 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13306 needed = true;
13307 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13308 {
13309 sub_quals = 0;
13310 break;
13311 }
13312 }
13313 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13314 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13315 {
13316 /* As not all intermediate qualified DIEs have corresponding
13317 tree types, ensure that qualified DIEs in the same scope
13318 as their DW_AT_type are emitted after their DW_AT_type,
13319 only with other qualified DIEs for the same type possibly
13320 in between them. Determine the range of such qualified
13321 DIEs now (first being the base type, last being corresponding
13322 last qualified DIE for it). */
13323 unsigned int count = 0;
13324 first = qualified_die_p (mod_type_die, &first_quals,
13325 dwarf_qual_info_size);
13326 if (first == NULL)
13327 first = mod_type_die;
13328 gcc_assert ((first_quals & ~sub_quals) == 0);
13329 for (count = 0, last = first;
13330 count < (1U << dwarf_qual_info_size);
13331 count++, last = last->die_sib)
13332 {
13333 int quals = 0;
13334 if (last == mod_scope->die_child)
13335 break;
13336 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13337 != first)
13338 break;
13339 }
13340 }
13341
13342 for (i = 0; i < dwarf_qual_info_size; i++)
13343 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13344 {
13345 dw_die_ref d;
13346 if (first && first != last)
13347 {
13348 for (d = first->die_sib; ; d = d->die_sib)
13349 {
13350 int quals = 0;
13351 qualified_die_p (d, &quals, dwarf_qual_info_size);
13352 if (quals == (first_quals | dwarf_qual_info[i].q))
13353 break;
13354 if (d == last)
13355 {
13356 d = NULL;
13357 break;
13358 }
13359 }
13360 if (d)
13361 {
13362 mod_type_die = d;
13363 continue;
13364 }
13365 }
13366 if (first)
13367 {
13368 d = new_die_raw (dwarf_qual_info[i].t);
13369 add_child_die_after (mod_scope, d, last);
13370 last = d;
13371 }
13372 else
13373 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13374 if (mod_type_die)
13375 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13376 mod_type_die = d;
13377 first_quals |= dwarf_qual_info[i].q;
13378 }
13379 }
13380 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13381 {
13382 dwarf_tag tag = DW_TAG_pointer_type;
13383 if (code == REFERENCE_TYPE)
13384 {
13385 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13386 tag = DW_TAG_rvalue_reference_type;
13387 else
13388 tag = DW_TAG_reference_type;
13389 }
13390 mod_type_die = new_die (tag, mod_scope, type);
13391
13392 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13393 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13394 add_alignment_attribute (mod_type_die, type);
13395 item_type = TREE_TYPE (type);
13396
13397 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13398 if (!ADDR_SPACE_GENERIC_P (as))
13399 {
13400 int action = targetm.addr_space.debug (as);
13401 if (action >= 0)
13402 {
13403 /* Positive values indicate an address_class. */
13404 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13405 }
13406 else
13407 {
13408 /* Negative values indicate an (inverted) segment base reg. */
13409 dw_loc_descr_ref d
13410 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13411 add_AT_loc (mod_type_die, DW_AT_segment, d);
13412 }
13413 }
13414 }
13415 else if (code == INTEGER_TYPE
13416 && TREE_TYPE (type) != NULL_TREE
13417 && subrange_type_for_debug_p (type, &low, &high))
13418 {
13419 tree bias = NULL_TREE;
13420 if (lang_hooks.types.get_type_bias)
13421 bias = lang_hooks.types.get_type_bias (type);
13422 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13423 item_type = TREE_TYPE (type);
13424 }
13425 else if (is_base_type (type))
13426 {
13427 mod_type_die = base_type_die (type, reverse);
13428
13429 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13430 if (reverse_base_type)
13431 {
13432 dw_die_ref after_die
13433 = modified_type_die (type, cv_quals, false, context_die);
13434 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13435 }
13436 else
13437 add_child_die (comp_unit_die (), mod_type_die);
13438
13439 add_pubtype (type, mod_type_die);
13440 }
13441 else
13442 {
13443 gen_type_die (type, context_die);
13444
13445 /* We have to get the type_main_variant here (and pass that to the
13446 `lookup_type_die' routine) because the ..._TYPE node we have
13447 might simply be a *copy* of some original type node (where the
13448 copy was created to help us keep track of typedef names) and
13449 that copy might have a different TYPE_UID from the original
13450 ..._TYPE node. */
13451 if (TREE_CODE (type) == FUNCTION_TYPE
13452 || TREE_CODE (type) == METHOD_TYPE)
13453 {
13454 /* For function/method types, can't just use type_main_variant here,
13455 because that can have different ref-qualifiers for C++,
13456 but try to canonicalize. */
13457 tree main = TYPE_MAIN_VARIANT (type);
13458 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13459 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13460 && check_base_type (t, main)
13461 && check_lang_type (t, type))
13462 return lookup_type_die (t);
13463 return lookup_type_die (type);
13464 }
13465 else if (TREE_CODE (type) != VECTOR_TYPE
13466 && TREE_CODE (type) != ARRAY_TYPE)
13467 return lookup_type_die (type_main_variant (type));
13468 else
13469 /* Vectors have the debugging information in the type,
13470 not the main variant. */
13471 return lookup_type_die (type);
13472 }
13473
13474 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13475 don't output a DW_TAG_typedef, since there isn't one in the
13476 user's program; just attach a DW_AT_name to the type.
13477 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13478 if the base type already has the same name. */
13479 if (name
13480 && ((TREE_CODE (name) != TYPE_DECL
13481 && (qualified_type == TYPE_MAIN_VARIANT (type)
13482 || (cv_quals == TYPE_UNQUALIFIED)))
13483 || (TREE_CODE (name) == TYPE_DECL
13484 && TREE_TYPE (name) == qualified_type
13485 && DECL_NAME (name))))
13486 {
13487 if (TREE_CODE (name) == TYPE_DECL)
13488 /* Could just call add_name_and_src_coords_attributes here,
13489 but since this is a builtin type it doesn't have any
13490 useful source coordinates anyway. */
13491 name = DECL_NAME (name);
13492 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13493 }
13494 /* This probably indicates a bug. */
13495 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13496 {
13497 name = TYPE_IDENTIFIER (type);
13498 add_name_attribute (mod_type_die,
13499 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13500 }
13501
13502 if (qualified_type && !reverse_base_type)
13503 equate_type_number_to_die (qualified_type, mod_type_die);
13504
13505 if (item_type)
13506 /* We must do this after the equate_type_number_to_die call, in case
13507 this is a recursive type. This ensures that the modified_type_die
13508 recursion will terminate even if the type is recursive. Recursive
13509 types are possible in Ada. */
13510 sub_die = modified_type_die (item_type,
13511 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13512 reverse,
13513 context_die);
13514
13515 if (sub_die != NULL)
13516 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13517
13518 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13519 if (TYPE_ARTIFICIAL (type))
13520 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13521
13522 return mod_type_die;
13523 }
13524
13525 /* Generate DIEs for the generic parameters of T.
13526 T must be either a generic type or a generic function.
13527 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13528
13529 static void
13530 gen_generic_params_dies (tree t)
13531 {
13532 tree parms, args;
13533 int parms_num, i;
13534 dw_die_ref die = NULL;
13535 int non_default;
13536
13537 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13538 return;
13539
13540 if (TYPE_P (t))
13541 die = lookup_type_die (t);
13542 else if (DECL_P (t))
13543 die = lookup_decl_die (t);
13544
13545 gcc_assert (die);
13546
13547 parms = lang_hooks.get_innermost_generic_parms (t);
13548 if (!parms)
13549 /* T has no generic parameter. It means T is neither a generic type
13550 or function. End of story. */
13551 return;
13552
13553 parms_num = TREE_VEC_LENGTH (parms);
13554 args = lang_hooks.get_innermost_generic_args (t);
13555 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13556 non_default = int_cst_value (TREE_CHAIN (args));
13557 else
13558 non_default = TREE_VEC_LENGTH (args);
13559 for (i = 0; i < parms_num; i++)
13560 {
13561 tree parm, arg, arg_pack_elems;
13562 dw_die_ref parm_die;
13563
13564 parm = TREE_VEC_ELT (parms, i);
13565 arg = TREE_VEC_ELT (args, i);
13566 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13567 gcc_assert (parm && TREE_VALUE (parm) && arg);
13568
13569 if (parm && TREE_VALUE (parm) && arg)
13570 {
13571 /* If PARM represents a template parameter pack,
13572 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13573 by DW_TAG_template_*_parameter DIEs for the argument
13574 pack elements of ARG. Note that ARG would then be
13575 an argument pack. */
13576 if (arg_pack_elems)
13577 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13578 arg_pack_elems,
13579 die);
13580 else
13581 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13582 true /* emit name */, die);
13583 if (i >= non_default)
13584 add_AT_flag (parm_die, DW_AT_default_value, 1);
13585 }
13586 }
13587 }
13588
13589 /* Create and return a DIE for PARM which should be
13590 the representation of a generic type parameter.
13591 For instance, in the C++ front end, PARM would be a template parameter.
13592 ARG is the argument to PARM.
13593 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13594 name of the PARM.
13595 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13596 as a child node. */
13597
13598 static dw_die_ref
13599 generic_parameter_die (tree parm, tree arg,
13600 bool emit_name_p,
13601 dw_die_ref parent_die)
13602 {
13603 dw_die_ref tmpl_die = NULL;
13604 const char *name = NULL;
13605
13606 /* C++2a accepts class literals as template parameters, and var
13607 decls with initializers represent them. The VAR_DECLs would be
13608 rejected, but we can take the DECL_INITIAL constructor and
13609 attempt to expand it. */
13610 if (arg && VAR_P (arg))
13611 arg = DECL_INITIAL (arg);
13612
13613 if (!parm || !DECL_NAME (parm) || !arg)
13614 return NULL;
13615
13616 /* We support non-type generic parameters and arguments,
13617 type generic parameters and arguments, as well as
13618 generic generic parameters (a.k.a. template template parameters in C++)
13619 and arguments. */
13620 if (TREE_CODE (parm) == PARM_DECL)
13621 /* PARM is a nontype generic parameter */
13622 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13623 else if (TREE_CODE (parm) == TYPE_DECL)
13624 /* PARM is a type generic parameter. */
13625 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13626 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13627 /* PARM is a generic generic parameter.
13628 Its DIE is a GNU extension. It shall have a
13629 DW_AT_name attribute to represent the name of the template template
13630 parameter, and a DW_AT_GNU_template_name attribute to represent the
13631 name of the template template argument. */
13632 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13633 parent_die, parm);
13634 else
13635 gcc_unreachable ();
13636
13637 if (tmpl_die)
13638 {
13639 tree tmpl_type;
13640
13641 /* If PARM is a generic parameter pack, it means we are
13642 emitting debug info for a template argument pack element.
13643 In other terms, ARG is a template argument pack element.
13644 In that case, we don't emit any DW_AT_name attribute for
13645 the die. */
13646 if (emit_name_p)
13647 {
13648 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13649 gcc_assert (name);
13650 add_AT_string (tmpl_die, DW_AT_name, name);
13651 }
13652
13653 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13654 {
13655 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13656 TMPL_DIE should have a child DW_AT_type attribute that is set
13657 to the type of the argument to PARM, which is ARG.
13658 If PARM is a type generic parameter, TMPL_DIE should have a
13659 child DW_AT_type that is set to ARG. */
13660 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13661 add_type_attribute (tmpl_die, tmpl_type,
13662 (TREE_THIS_VOLATILE (tmpl_type)
13663 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13664 false, parent_die);
13665 }
13666 else
13667 {
13668 /* So TMPL_DIE is a DIE representing a
13669 a generic generic template parameter, a.k.a template template
13670 parameter in C++ and arg is a template. */
13671
13672 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13673 to the name of the argument. */
13674 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13675 if (name)
13676 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13677 }
13678
13679 if (TREE_CODE (parm) == PARM_DECL)
13680 /* So PARM is a non-type generic parameter.
13681 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13682 attribute of TMPL_DIE which value represents the value
13683 of ARG.
13684 We must be careful here:
13685 The value of ARG might reference some function decls.
13686 We might currently be emitting debug info for a generic
13687 type and types are emitted before function decls, we don't
13688 know if the function decls referenced by ARG will actually be
13689 emitted after cgraph computations.
13690 So must defer the generation of the DW_AT_const_value to
13691 after cgraph is ready. */
13692 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13693 }
13694
13695 return tmpl_die;
13696 }
13697
13698 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13699 PARM_PACK must be a template parameter pack. The returned DIE
13700 will be child DIE of PARENT_DIE. */
13701
13702 static dw_die_ref
13703 template_parameter_pack_die (tree parm_pack,
13704 tree parm_pack_args,
13705 dw_die_ref parent_die)
13706 {
13707 dw_die_ref die;
13708 int j;
13709
13710 gcc_assert (parent_die && parm_pack);
13711
13712 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13713 add_name_and_src_coords_attributes (die, parm_pack);
13714 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13715 generic_parameter_die (parm_pack,
13716 TREE_VEC_ELT (parm_pack_args, j),
13717 false /* Don't emit DW_AT_name */,
13718 die);
13719 return die;
13720 }
13721
13722 /* Return the DBX register number described by a given RTL node. */
13723
13724 static unsigned int
13725 dbx_reg_number (const_rtx rtl)
13726 {
13727 unsigned regno = REGNO (rtl);
13728
13729 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13730
13731 #ifdef LEAF_REG_REMAP
13732 if (crtl->uses_only_leaf_regs)
13733 {
13734 int leaf_reg = LEAF_REG_REMAP (regno);
13735 if (leaf_reg != -1)
13736 regno = (unsigned) leaf_reg;
13737 }
13738 #endif
13739
13740 regno = DBX_REGISTER_NUMBER (regno);
13741 gcc_assert (regno != INVALID_REGNUM);
13742 return regno;
13743 }
13744
13745 /* Optionally add a DW_OP_piece term to a location description expression.
13746 DW_OP_piece is only added if the location description expression already
13747 doesn't end with DW_OP_piece. */
13748
13749 static void
13750 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13751 {
13752 dw_loc_descr_ref loc;
13753
13754 if (*list_head != NULL)
13755 {
13756 /* Find the end of the chain. */
13757 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13758 ;
13759
13760 if (loc->dw_loc_opc != DW_OP_piece)
13761 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13762 }
13763 }
13764
13765 /* Return a location descriptor that designates a machine register or
13766 zero if there is none. */
13767
13768 static dw_loc_descr_ref
13769 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13770 {
13771 rtx regs;
13772
13773 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13774 return 0;
13775
13776 /* We only use "frame base" when we're sure we're talking about the
13777 post-prologue local stack frame. We do this by *not* running
13778 register elimination until this point, and recognizing the special
13779 argument pointer and soft frame pointer rtx's.
13780 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13781 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13782 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13783 {
13784 dw_loc_descr_ref result = NULL;
13785
13786 if (dwarf_version >= 4 || !dwarf_strict)
13787 {
13788 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13789 initialized);
13790 if (result)
13791 add_loc_descr (&result,
13792 new_loc_descr (DW_OP_stack_value, 0, 0));
13793 }
13794 return result;
13795 }
13796
13797 regs = targetm.dwarf_register_span (rtl);
13798
13799 if (REG_NREGS (rtl) > 1 || regs)
13800 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13801 else
13802 {
13803 unsigned int dbx_regnum = dbx_reg_number (rtl);
13804 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13805 return 0;
13806 return one_reg_loc_descriptor (dbx_regnum, initialized);
13807 }
13808 }
13809
13810 /* Return a location descriptor that designates a machine register for
13811 a given hard register number. */
13812
13813 static dw_loc_descr_ref
13814 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13815 {
13816 dw_loc_descr_ref reg_loc_descr;
13817
13818 if (regno <= 31)
13819 reg_loc_descr
13820 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13821 else
13822 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13823
13824 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13825 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13826
13827 return reg_loc_descr;
13828 }
13829
13830 /* Given an RTL of a register, return a location descriptor that
13831 designates a value that spans more than one register. */
13832
13833 static dw_loc_descr_ref
13834 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13835 enum var_init_status initialized)
13836 {
13837 int size, i;
13838 dw_loc_descr_ref loc_result = NULL;
13839
13840 /* Simple, contiguous registers. */
13841 if (regs == NULL_RTX)
13842 {
13843 unsigned reg = REGNO (rtl);
13844 int nregs;
13845
13846 #ifdef LEAF_REG_REMAP
13847 if (crtl->uses_only_leaf_regs)
13848 {
13849 int leaf_reg = LEAF_REG_REMAP (reg);
13850 if (leaf_reg != -1)
13851 reg = (unsigned) leaf_reg;
13852 }
13853 #endif
13854
13855 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13856 nregs = REG_NREGS (rtl);
13857
13858 /* At present we only track constant-sized pieces. */
13859 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13860 return NULL;
13861 size /= nregs;
13862
13863 loc_result = NULL;
13864 while (nregs--)
13865 {
13866 dw_loc_descr_ref t;
13867
13868 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13869 VAR_INIT_STATUS_INITIALIZED);
13870 add_loc_descr (&loc_result, t);
13871 add_loc_descr_op_piece (&loc_result, size);
13872 ++reg;
13873 }
13874 return loc_result;
13875 }
13876
13877 /* Now onto stupid register sets in non contiguous locations. */
13878
13879 gcc_assert (GET_CODE (regs) == PARALLEL);
13880
13881 /* At present we only track constant-sized pieces. */
13882 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13883 return NULL;
13884 loc_result = NULL;
13885
13886 for (i = 0; i < XVECLEN (regs, 0); ++i)
13887 {
13888 dw_loc_descr_ref t;
13889
13890 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13891 VAR_INIT_STATUS_INITIALIZED);
13892 add_loc_descr (&loc_result, t);
13893 add_loc_descr_op_piece (&loc_result, size);
13894 }
13895
13896 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13897 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13898 return loc_result;
13899 }
13900
13901 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13902
13903 /* Return a location descriptor that designates a constant i,
13904 as a compound operation from constant (i >> shift), constant shift
13905 and DW_OP_shl. */
13906
13907 static dw_loc_descr_ref
13908 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13909 {
13910 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13911 add_loc_descr (&ret, int_loc_descriptor (shift));
13912 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13913 return ret;
13914 }
13915
13916 /* Return a location descriptor that designates constant POLY_I. */
13917
13918 static dw_loc_descr_ref
13919 int_loc_descriptor (poly_int64 poly_i)
13920 {
13921 enum dwarf_location_atom op;
13922
13923 HOST_WIDE_INT i;
13924 if (!poly_i.is_constant (&i))
13925 {
13926 /* Create location descriptions for the non-constant part and
13927 add any constant offset at the end. */
13928 dw_loc_descr_ref ret = NULL;
13929 HOST_WIDE_INT constant = poly_i.coeffs[0];
13930 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13931 {
13932 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13933 if (coeff != 0)
13934 {
13935 dw_loc_descr_ref start = ret;
13936 unsigned int factor;
13937 int bias;
13938 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13939 (j, &factor, &bias);
13940
13941 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13942 add COEFF * (REGNO / FACTOR) now and subtract
13943 COEFF * BIAS from the final constant part. */
13944 constant -= coeff * bias;
13945 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13946 if (coeff % factor == 0)
13947 coeff /= factor;
13948 else
13949 {
13950 int amount = exact_log2 (factor);
13951 gcc_assert (amount >= 0);
13952 add_loc_descr (&ret, int_loc_descriptor (amount));
13953 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13954 }
13955 if (coeff != 1)
13956 {
13957 add_loc_descr (&ret, int_loc_descriptor (coeff));
13958 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13959 }
13960 if (start)
13961 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13962 }
13963 }
13964 loc_descr_plus_const (&ret, constant);
13965 return ret;
13966 }
13967
13968 /* Pick the smallest representation of a constant, rather than just
13969 defaulting to the LEB encoding. */
13970 if (i >= 0)
13971 {
13972 int clz = clz_hwi (i);
13973 int ctz = ctz_hwi (i);
13974 if (i <= 31)
13975 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13976 else if (i <= 0xff)
13977 op = DW_OP_const1u;
13978 else if (i <= 0xffff)
13979 op = DW_OP_const2u;
13980 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13981 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13982 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13983 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13984 while DW_OP_const4u is 5 bytes. */
13985 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13986 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13987 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13988 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13989 while DW_OP_const4u is 5 bytes. */
13990 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13991
13992 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13993 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13994 <= 4)
13995 {
13996 /* As i >= 2**31, the double cast above will yield a negative number.
13997 Since wrapping is defined in DWARF expressions we can output big
13998 positive integers as small negative ones, regardless of the size
13999 of host wide ints.
14000
14001 Here, since the evaluator will handle 32-bit values and since i >=
14002 2**31, we know it's going to be interpreted as a negative literal:
14003 store it this way if we can do better than 5 bytes this way. */
14004 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14005 }
14006 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14007 op = DW_OP_const4u;
14008
14009 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14010 least 6 bytes: see if we can do better before falling back to it. */
14011 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14012 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14013 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14014 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14015 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14016 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14017 >= HOST_BITS_PER_WIDE_INT)
14018 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14019 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14020 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14021 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14022 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14023 && size_of_uleb128 (i) > 6)
14024 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14025 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14026 else
14027 op = DW_OP_constu;
14028 }
14029 else
14030 {
14031 if (i >= -0x80)
14032 op = DW_OP_const1s;
14033 else if (i >= -0x8000)
14034 op = DW_OP_const2s;
14035 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14036 {
14037 if (size_of_int_loc_descriptor (i) < 5)
14038 {
14039 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14040 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14041 return ret;
14042 }
14043 op = DW_OP_const4s;
14044 }
14045 else
14046 {
14047 if (size_of_int_loc_descriptor (i)
14048 < (unsigned long) 1 + size_of_sleb128 (i))
14049 {
14050 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14051 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14052 return ret;
14053 }
14054 op = DW_OP_consts;
14055 }
14056 }
14057
14058 return new_loc_descr (op, i, 0);
14059 }
14060
14061 /* Likewise, for unsigned constants. */
14062
14063 static dw_loc_descr_ref
14064 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14065 {
14066 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14067 const unsigned HOST_WIDE_INT max_uint
14068 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14069
14070 /* If possible, use the clever signed constants handling. */
14071 if (i <= max_int)
14072 return int_loc_descriptor ((HOST_WIDE_INT) i);
14073
14074 /* Here, we are left with positive numbers that cannot be represented as
14075 HOST_WIDE_INT, i.e.:
14076 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14077
14078 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14079 whereas may be better to output a negative integer: thanks to integer
14080 wrapping, we know that:
14081 x = x - 2 ** DWARF2_ADDR_SIZE
14082 = x - 2 * (max (HOST_WIDE_INT) + 1)
14083 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14084 small negative integers. Let's try that in cases it will clearly improve
14085 the encoding: there is no gain turning DW_OP_const4u into
14086 DW_OP_const4s. */
14087 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14088 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14089 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14090 {
14091 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14092
14093 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14094 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14095 const HOST_WIDE_INT second_shift
14096 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14097
14098 /* So we finally have:
14099 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14100 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14101 return int_loc_descriptor (second_shift);
14102 }
14103
14104 /* Last chance: fallback to a simple constant operation. */
14105 return new_loc_descr
14106 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14107 ? DW_OP_const4u
14108 : DW_OP_const8u,
14109 i, 0);
14110 }
14111
14112 /* Generate and return a location description that computes the unsigned
14113 comparison of the two stack top entries (a OP b where b is the top-most
14114 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14115 LE_EXPR, GT_EXPR or GE_EXPR. */
14116
14117 static dw_loc_descr_ref
14118 uint_comparison_loc_list (enum tree_code kind)
14119 {
14120 enum dwarf_location_atom op, flip_op;
14121 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14122
14123 switch (kind)
14124 {
14125 case LT_EXPR:
14126 op = DW_OP_lt;
14127 break;
14128 case LE_EXPR:
14129 op = DW_OP_le;
14130 break;
14131 case GT_EXPR:
14132 op = DW_OP_gt;
14133 break;
14134 case GE_EXPR:
14135 op = DW_OP_ge;
14136 break;
14137 default:
14138 gcc_unreachable ();
14139 }
14140
14141 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14142 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14143
14144 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14145 possible to perform unsigned comparisons: we just have to distinguish
14146 three cases:
14147
14148 1. when a and b have the same sign (as signed integers); then we should
14149 return: a OP(signed) b;
14150
14151 2. when a is a negative signed integer while b is a positive one, then a
14152 is a greater unsigned integer than b; likewise when a and b's roles
14153 are flipped.
14154
14155 So first, compare the sign of the two operands. */
14156 ret = new_loc_descr (DW_OP_over, 0, 0);
14157 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14158 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14159 /* If they have different signs (i.e. they have different sign bits), then
14160 the stack top value has now the sign bit set and thus it's smaller than
14161 zero. */
14162 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14163 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14164 add_loc_descr (&ret, bra_node);
14165
14166 /* We are in case 1. At this point, we know both operands have the same
14167 sign, to it's safe to use the built-in signed comparison. */
14168 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14169 add_loc_descr (&ret, jmp_node);
14170
14171 /* We are in case 2. Here, we know both operands do not have the same sign,
14172 so we have to flip the signed comparison. */
14173 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14174 tmp = new_loc_descr (flip_op, 0, 0);
14175 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14176 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14177 add_loc_descr (&ret, tmp);
14178
14179 /* This dummy operation is necessary to make the two branches join. */
14180 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14181 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14182 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14183 add_loc_descr (&ret, tmp);
14184
14185 return ret;
14186 }
14187
14188 /* Likewise, but takes the location description lists (might be destructive on
14189 them). Return NULL if either is NULL or if concatenation fails. */
14190
14191 static dw_loc_list_ref
14192 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14193 enum tree_code kind)
14194 {
14195 if (left == NULL || right == NULL)
14196 return NULL;
14197
14198 add_loc_list (&left, right);
14199 if (left == NULL)
14200 return NULL;
14201
14202 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14203 return left;
14204 }
14205
14206 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14207 without actually allocating it. */
14208
14209 static unsigned long
14210 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14211 {
14212 return size_of_int_loc_descriptor (i >> shift)
14213 + size_of_int_loc_descriptor (shift)
14214 + 1;
14215 }
14216
14217 /* Return size_of_locs (int_loc_descriptor (i)) without
14218 actually allocating it. */
14219
14220 static unsigned long
14221 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14222 {
14223 unsigned long s;
14224
14225 if (i >= 0)
14226 {
14227 int clz, ctz;
14228 if (i <= 31)
14229 return 1;
14230 else if (i <= 0xff)
14231 return 2;
14232 else if (i <= 0xffff)
14233 return 3;
14234 clz = clz_hwi (i);
14235 ctz = ctz_hwi (i);
14236 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14237 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14238 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14239 - clz - 5);
14240 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14241 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14242 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14243 - clz - 8);
14244 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14245 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14246 <= 4)
14247 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14248 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14249 return 5;
14250 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14251 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14252 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14253 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14254 - clz - 8);
14255 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14256 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14257 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14258 - clz - 16);
14259 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14260 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14261 && s > 6)
14262 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14263 - clz - 32);
14264 else
14265 return 1 + s;
14266 }
14267 else
14268 {
14269 if (i >= -0x80)
14270 return 2;
14271 else if (i >= -0x8000)
14272 return 3;
14273 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14274 {
14275 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14276 {
14277 s = size_of_int_loc_descriptor (-i) + 1;
14278 if (s < 5)
14279 return s;
14280 }
14281 return 5;
14282 }
14283 else
14284 {
14285 unsigned long r = 1 + size_of_sleb128 (i);
14286 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14287 {
14288 s = size_of_int_loc_descriptor (-i) + 1;
14289 if (s < r)
14290 return s;
14291 }
14292 return r;
14293 }
14294 }
14295 }
14296
14297 /* Return loc description representing "address" of integer value.
14298 This can appear only as toplevel expression. */
14299
14300 static dw_loc_descr_ref
14301 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14302 {
14303 int litsize;
14304 dw_loc_descr_ref loc_result = NULL;
14305
14306 if (!(dwarf_version >= 4 || !dwarf_strict))
14307 return NULL;
14308
14309 litsize = size_of_int_loc_descriptor (i);
14310 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14311 is more compact. For DW_OP_stack_value we need:
14312 litsize + 1 (DW_OP_stack_value)
14313 and for DW_OP_implicit_value:
14314 1 (DW_OP_implicit_value) + 1 (length) + size. */
14315 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14316 {
14317 loc_result = int_loc_descriptor (i);
14318 add_loc_descr (&loc_result,
14319 new_loc_descr (DW_OP_stack_value, 0, 0));
14320 return loc_result;
14321 }
14322
14323 loc_result = new_loc_descr (DW_OP_implicit_value,
14324 size, 0);
14325 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14326 loc_result->dw_loc_oprnd2.v.val_int = i;
14327 return loc_result;
14328 }
14329
14330 /* Return a location descriptor that designates a base+offset location. */
14331
14332 static dw_loc_descr_ref
14333 based_loc_descr (rtx reg, poly_int64 offset,
14334 enum var_init_status initialized)
14335 {
14336 unsigned int regno;
14337 dw_loc_descr_ref result;
14338 dw_fde_ref fde = cfun->fde;
14339
14340 /* We only use "frame base" when we're sure we're talking about the
14341 post-prologue local stack frame. We do this by *not* running
14342 register elimination until this point, and recognizing the special
14343 argument pointer and soft frame pointer rtx's. */
14344 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14345 {
14346 rtx elim = (ira_use_lra_p
14347 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14348 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14349
14350 if (elim != reg)
14351 {
14352 /* Allow hard frame pointer here even if frame pointer
14353 isn't used since hard frame pointer is encoded with
14354 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14355 not hard frame pointer directly. */
14356 elim = strip_offset_and_add (elim, &offset);
14357 gcc_assert (elim == hard_frame_pointer_rtx
14358 || elim == stack_pointer_rtx);
14359
14360 /* If drap register is used to align stack, use frame
14361 pointer + offset to access stack variables. If stack
14362 is aligned without drap, use stack pointer + offset to
14363 access stack variables. */
14364 if (crtl->stack_realign_tried
14365 && reg == frame_pointer_rtx)
14366 {
14367 int base_reg
14368 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14369 ? HARD_FRAME_POINTER_REGNUM
14370 : REGNO (elim));
14371 return new_reg_loc_descr (base_reg, offset);
14372 }
14373
14374 gcc_assert (frame_pointer_fb_offset_valid);
14375 offset += frame_pointer_fb_offset;
14376 HOST_WIDE_INT const_offset;
14377 if (offset.is_constant (&const_offset))
14378 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14379 else
14380 {
14381 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14382 loc_descr_plus_const (&ret, offset);
14383 return ret;
14384 }
14385 }
14386 }
14387
14388 regno = REGNO (reg);
14389 #ifdef LEAF_REG_REMAP
14390 if (crtl->uses_only_leaf_regs)
14391 {
14392 int leaf_reg = LEAF_REG_REMAP (regno);
14393 if (leaf_reg != -1)
14394 regno = (unsigned) leaf_reg;
14395 }
14396 #endif
14397 regno = DWARF_FRAME_REGNUM (regno);
14398
14399 HOST_WIDE_INT const_offset;
14400 if (!optimize && fde
14401 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14402 && offset.is_constant (&const_offset))
14403 {
14404 /* Use cfa+offset to represent the location of arguments passed
14405 on the stack when drap is used to align stack.
14406 Only do this when not optimizing, for optimized code var-tracking
14407 is supposed to track where the arguments live and the register
14408 used as vdrap or drap in some spot might be used for something
14409 else in other part of the routine. */
14410 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14411 }
14412
14413 result = new_reg_loc_descr (regno, offset);
14414
14415 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14416 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14417
14418 return result;
14419 }
14420
14421 /* Return true if this RTL expression describes a base+offset calculation. */
14422
14423 static inline int
14424 is_based_loc (const_rtx rtl)
14425 {
14426 return (GET_CODE (rtl) == PLUS
14427 && ((REG_P (XEXP (rtl, 0))
14428 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14429 && CONST_INT_P (XEXP (rtl, 1)))));
14430 }
14431
14432 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14433 failed. */
14434
14435 static dw_loc_descr_ref
14436 tls_mem_loc_descriptor (rtx mem)
14437 {
14438 tree base;
14439 dw_loc_descr_ref loc_result;
14440
14441 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14442 return NULL;
14443
14444 base = get_base_address (MEM_EXPR (mem));
14445 if (base == NULL
14446 || !VAR_P (base)
14447 || !DECL_THREAD_LOCAL_P (base))
14448 return NULL;
14449
14450 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14451 if (loc_result == NULL)
14452 return NULL;
14453
14454 if (maybe_ne (MEM_OFFSET (mem), 0))
14455 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14456
14457 return loc_result;
14458 }
14459
14460 /* Output debug info about reason why we failed to expand expression as dwarf
14461 expression. */
14462
14463 static void
14464 expansion_failed (tree expr, rtx rtl, char const *reason)
14465 {
14466 if (dump_file && (dump_flags & TDF_DETAILS))
14467 {
14468 fprintf (dump_file, "Failed to expand as dwarf: ");
14469 if (expr)
14470 print_generic_expr (dump_file, expr, dump_flags);
14471 if (rtl)
14472 {
14473 fprintf (dump_file, "\n");
14474 print_rtl (dump_file, rtl);
14475 }
14476 fprintf (dump_file, "\nReason: %s\n", reason);
14477 }
14478 }
14479
14480 /* Helper function for const_ok_for_output. */
14481
14482 static bool
14483 const_ok_for_output_1 (rtx rtl)
14484 {
14485 if (targetm.const_not_ok_for_debug_p (rtl))
14486 {
14487 if (GET_CODE (rtl) != UNSPEC)
14488 {
14489 expansion_failed (NULL_TREE, rtl,
14490 "Expression rejected for debug by the backend.\n");
14491 return false;
14492 }
14493
14494 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14495 the target hook doesn't explicitly allow it in debug info, assume
14496 we can't express it in the debug info. */
14497 /* Don't complain about TLS UNSPECs, those are just too hard to
14498 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14499 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14500 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14501 if (flag_checking
14502 && (XVECLEN (rtl, 0) == 0
14503 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14504 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14505 inform (current_function_decl
14506 ? DECL_SOURCE_LOCATION (current_function_decl)
14507 : UNKNOWN_LOCATION,
14508 #if NUM_UNSPEC_VALUES > 0
14509 "non-delegitimized UNSPEC %s (%d) found in variable location",
14510 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14511 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14512 XINT (rtl, 1));
14513 #else
14514 "non-delegitimized UNSPEC %d found in variable location",
14515 XINT (rtl, 1));
14516 #endif
14517 expansion_failed (NULL_TREE, rtl,
14518 "UNSPEC hasn't been delegitimized.\n");
14519 return false;
14520 }
14521
14522 if (CONST_POLY_INT_P (rtl))
14523 return false;
14524
14525 /* FIXME: Refer to PR60655. It is possible for simplification
14526 of rtl expressions in var tracking to produce such expressions.
14527 We should really identify / validate expressions
14528 enclosed in CONST that can be handled by assemblers on various
14529 targets and only handle legitimate cases here. */
14530 switch (GET_CODE (rtl))
14531 {
14532 case SYMBOL_REF:
14533 break;
14534 case NOT:
14535 case NEG:
14536 return false;
14537 case PLUS:
14538 {
14539 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14540 operands. */
14541 subrtx_var_iterator::array_type array;
14542 bool first = false;
14543 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14544 if (SYMBOL_REF_P (*iter)
14545 || LABEL_P (*iter)
14546 || GET_CODE (*iter) == UNSPEC)
14547 {
14548 first = true;
14549 break;
14550 }
14551 if (!first)
14552 return true;
14553 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14554 if (SYMBOL_REF_P (*iter)
14555 || LABEL_P (*iter)
14556 || GET_CODE (*iter) == UNSPEC)
14557 return false;
14558 return true;
14559 }
14560 case MINUS:
14561 {
14562 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14563 appear in the second operand of MINUS. */
14564 subrtx_var_iterator::array_type array;
14565 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14566 if (SYMBOL_REF_P (*iter)
14567 || LABEL_P (*iter)
14568 || GET_CODE (*iter) == UNSPEC)
14569 return false;
14570 return true;
14571 }
14572 default:
14573 return true;
14574 }
14575
14576 if (CONSTANT_POOL_ADDRESS_P (rtl))
14577 {
14578 bool marked;
14579 get_pool_constant_mark (rtl, &marked);
14580 /* If all references to this pool constant were optimized away,
14581 it was not output and thus we can't represent it. */
14582 if (!marked)
14583 {
14584 expansion_failed (NULL_TREE, rtl,
14585 "Constant was removed from constant pool.\n");
14586 return false;
14587 }
14588 }
14589
14590 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14591 return false;
14592
14593 /* Avoid references to external symbols in debug info, on several targets
14594 the linker might even refuse to link when linking a shared library,
14595 and in many other cases the relocations for .debug_info/.debug_loc are
14596 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14597 to be defined within the same shared library or executable are fine. */
14598 if (SYMBOL_REF_EXTERNAL_P (rtl))
14599 {
14600 tree decl = SYMBOL_REF_DECL (rtl);
14601
14602 if (decl == NULL || !targetm.binds_local_p (decl))
14603 {
14604 expansion_failed (NULL_TREE, rtl,
14605 "Symbol not defined in current TU.\n");
14606 return false;
14607 }
14608 }
14609
14610 return true;
14611 }
14612
14613 /* Return true if constant RTL can be emitted in DW_OP_addr or
14614 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14615 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14616
14617 static bool
14618 const_ok_for_output (rtx rtl)
14619 {
14620 if (GET_CODE (rtl) == SYMBOL_REF)
14621 return const_ok_for_output_1 (rtl);
14622
14623 if (GET_CODE (rtl) == CONST)
14624 {
14625 subrtx_var_iterator::array_type array;
14626 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14627 if (!const_ok_for_output_1 (*iter))
14628 return false;
14629 return true;
14630 }
14631
14632 return true;
14633 }
14634
14635 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14636 if possible, NULL otherwise. */
14637
14638 static dw_die_ref
14639 base_type_for_mode (machine_mode mode, bool unsignedp)
14640 {
14641 dw_die_ref type_die;
14642 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14643
14644 if (type == NULL)
14645 return NULL;
14646 switch (TREE_CODE (type))
14647 {
14648 case INTEGER_TYPE:
14649 case REAL_TYPE:
14650 break;
14651 default:
14652 return NULL;
14653 }
14654 type_die = lookup_type_die (type);
14655 if (!type_die)
14656 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14657 comp_unit_die ());
14658 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14659 return NULL;
14660 return type_die;
14661 }
14662
14663 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14664 type matching MODE, or, if MODE is narrower than or as wide as
14665 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14666 possible. */
14667
14668 static dw_loc_descr_ref
14669 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14670 {
14671 machine_mode outer_mode = mode;
14672 dw_die_ref type_die;
14673 dw_loc_descr_ref cvt;
14674
14675 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14676 {
14677 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14678 return op;
14679 }
14680 type_die = base_type_for_mode (outer_mode, 1);
14681 if (type_die == NULL)
14682 return NULL;
14683 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14684 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14685 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14686 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14687 add_loc_descr (&op, cvt);
14688 return op;
14689 }
14690
14691 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14692
14693 static dw_loc_descr_ref
14694 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14695 dw_loc_descr_ref op1)
14696 {
14697 dw_loc_descr_ref ret = op0;
14698 add_loc_descr (&ret, op1);
14699 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14700 if (STORE_FLAG_VALUE != 1)
14701 {
14702 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14703 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14704 }
14705 return ret;
14706 }
14707
14708 /* Subroutine of scompare_loc_descriptor for the case in which we're
14709 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14710 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14711
14712 static dw_loc_descr_ref
14713 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14714 scalar_int_mode op_mode,
14715 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14716 {
14717 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14718 dw_loc_descr_ref cvt;
14719
14720 if (type_die == NULL)
14721 return NULL;
14722 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14723 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14724 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14725 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14726 add_loc_descr (&op0, cvt);
14727 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14728 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14729 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14730 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14731 add_loc_descr (&op1, cvt);
14732 return compare_loc_descriptor (op, op0, op1);
14733 }
14734
14735 /* Subroutine of scompare_loc_descriptor for the case in which we're
14736 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14737 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14738
14739 static dw_loc_descr_ref
14740 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14741 scalar_int_mode op_mode,
14742 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14743 {
14744 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14745 /* For eq/ne, if the operands are known to be zero-extended,
14746 there is no need to do the fancy shifting up. */
14747 if (op == DW_OP_eq || op == DW_OP_ne)
14748 {
14749 dw_loc_descr_ref last0, last1;
14750 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14751 ;
14752 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14753 ;
14754 /* deref_size zero extends, and for constants we can check
14755 whether they are zero extended or not. */
14756 if (((last0->dw_loc_opc == DW_OP_deref_size
14757 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14758 || (CONST_INT_P (XEXP (rtl, 0))
14759 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14760 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14761 && ((last1->dw_loc_opc == DW_OP_deref_size
14762 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14763 || (CONST_INT_P (XEXP (rtl, 1))
14764 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14765 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14766 return compare_loc_descriptor (op, op0, op1);
14767
14768 /* EQ/NE comparison against constant in narrower type than
14769 DWARF2_ADDR_SIZE can be performed either as
14770 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14771 DW_OP_{eq,ne}
14772 or
14773 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14774 DW_OP_{eq,ne}. Pick whatever is shorter. */
14775 if (CONST_INT_P (XEXP (rtl, 1))
14776 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14777 && (size_of_int_loc_descriptor (shift) + 1
14778 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14779 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14780 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14781 & GET_MODE_MASK (op_mode))))
14782 {
14783 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14784 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14785 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14786 & GET_MODE_MASK (op_mode));
14787 return compare_loc_descriptor (op, op0, op1);
14788 }
14789 }
14790 add_loc_descr (&op0, int_loc_descriptor (shift));
14791 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14792 if (CONST_INT_P (XEXP (rtl, 1)))
14793 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14794 else
14795 {
14796 add_loc_descr (&op1, int_loc_descriptor (shift));
14797 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14798 }
14799 return compare_loc_descriptor (op, op0, op1);
14800 }
14801
14802 /* Return location descriptor for unsigned comparison OP RTL. */
14803
14804 static dw_loc_descr_ref
14805 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14806 machine_mode mem_mode)
14807 {
14808 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14809 dw_loc_descr_ref op0, op1;
14810
14811 if (op_mode == VOIDmode)
14812 op_mode = GET_MODE (XEXP (rtl, 1));
14813 if (op_mode == VOIDmode)
14814 return NULL;
14815
14816 scalar_int_mode int_op_mode;
14817 if (dwarf_strict
14818 && dwarf_version < 5
14819 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14820 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14821 return NULL;
14822
14823 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14824 VAR_INIT_STATUS_INITIALIZED);
14825 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14826 VAR_INIT_STATUS_INITIALIZED);
14827
14828 if (op0 == NULL || op1 == NULL)
14829 return NULL;
14830
14831 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14832 {
14833 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14834 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14835
14836 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14837 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14838 }
14839 return compare_loc_descriptor (op, op0, op1);
14840 }
14841
14842 /* Return location descriptor for unsigned comparison OP RTL. */
14843
14844 static dw_loc_descr_ref
14845 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14846 machine_mode mem_mode)
14847 {
14848 dw_loc_descr_ref op0, op1;
14849
14850 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14851 if (test_op_mode == VOIDmode)
14852 test_op_mode = GET_MODE (XEXP (rtl, 1));
14853
14854 scalar_int_mode op_mode;
14855 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14856 return NULL;
14857
14858 if (dwarf_strict
14859 && dwarf_version < 5
14860 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14861 return NULL;
14862
14863 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14864 VAR_INIT_STATUS_INITIALIZED);
14865 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14866 VAR_INIT_STATUS_INITIALIZED);
14867
14868 if (op0 == NULL || op1 == NULL)
14869 return NULL;
14870
14871 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14872 {
14873 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14874 dw_loc_descr_ref last0, last1;
14875 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14876 ;
14877 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14878 ;
14879 if (CONST_INT_P (XEXP (rtl, 0)))
14880 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14881 /* deref_size zero extends, so no need to mask it again. */
14882 else if (last0->dw_loc_opc != DW_OP_deref_size
14883 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14884 {
14885 add_loc_descr (&op0, int_loc_descriptor (mask));
14886 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14887 }
14888 if (CONST_INT_P (XEXP (rtl, 1)))
14889 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14890 /* deref_size zero extends, so no need to mask it again. */
14891 else if (last1->dw_loc_opc != DW_OP_deref_size
14892 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14893 {
14894 add_loc_descr (&op1, int_loc_descriptor (mask));
14895 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14896 }
14897 }
14898 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14899 {
14900 HOST_WIDE_INT bias = 1;
14901 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14902 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14903 if (CONST_INT_P (XEXP (rtl, 1)))
14904 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14905 + INTVAL (XEXP (rtl, 1)));
14906 else
14907 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14908 bias, 0));
14909 }
14910 return compare_loc_descriptor (op, op0, op1);
14911 }
14912
14913 /* Return location descriptor for {U,S}{MIN,MAX}. */
14914
14915 static dw_loc_descr_ref
14916 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14917 machine_mode mem_mode)
14918 {
14919 enum dwarf_location_atom op;
14920 dw_loc_descr_ref op0, op1, ret;
14921 dw_loc_descr_ref bra_node, drop_node;
14922
14923 scalar_int_mode int_mode;
14924 if (dwarf_strict
14925 && dwarf_version < 5
14926 && (!is_a <scalar_int_mode> (mode, &int_mode)
14927 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14928 return NULL;
14929
14930 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14931 VAR_INIT_STATUS_INITIALIZED);
14932 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14933 VAR_INIT_STATUS_INITIALIZED);
14934
14935 if (op0 == NULL || op1 == NULL)
14936 return NULL;
14937
14938 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14939 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14940 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14941 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14942 {
14943 /* Checked by the caller. */
14944 int_mode = as_a <scalar_int_mode> (mode);
14945 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14946 {
14947 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14948 add_loc_descr (&op0, int_loc_descriptor (mask));
14949 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14950 add_loc_descr (&op1, int_loc_descriptor (mask));
14951 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14952 }
14953 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14954 {
14955 HOST_WIDE_INT bias = 1;
14956 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14957 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14958 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14959 }
14960 }
14961 else if (is_a <scalar_int_mode> (mode, &int_mode)
14962 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14963 {
14964 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14965 add_loc_descr (&op0, int_loc_descriptor (shift));
14966 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14967 add_loc_descr (&op1, int_loc_descriptor (shift));
14968 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14969 }
14970 else if (is_a <scalar_int_mode> (mode, &int_mode)
14971 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14972 {
14973 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14974 dw_loc_descr_ref cvt;
14975 if (type_die == NULL)
14976 return NULL;
14977 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14978 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14979 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14980 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14981 add_loc_descr (&op0, cvt);
14982 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14983 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14984 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14985 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14986 add_loc_descr (&op1, cvt);
14987 }
14988
14989 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14990 op = DW_OP_lt;
14991 else
14992 op = DW_OP_gt;
14993 ret = op0;
14994 add_loc_descr (&ret, op1);
14995 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14996 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14997 add_loc_descr (&ret, bra_node);
14998 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14999 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15000 add_loc_descr (&ret, drop_node);
15001 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15002 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15003 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15004 && is_a <scalar_int_mode> (mode, &int_mode)
15005 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15006 ret = convert_descriptor_to_mode (int_mode, ret);
15007 return ret;
15008 }
15009
15010 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15011 but after converting arguments to type_die, afterwards
15012 convert back to unsigned. */
15013
15014 static dw_loc_descr_ref
15015 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15016 scalar_int_mode mode, machine_mode mem_mode)
15017 {
15018 dw_loc_descr_ref cvt, op0, op1;
15019
15020 if (type_die == NULL)
15021 return NULL;
15022 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15023 VAR_INIT_STATUS_INITIALIZED);
15024 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15025 VAR_INIT_STATUS_INITIALIZED);
15026 if (op0 == NULL || op1 == NULL)
15027 return NULL;
15028 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15029 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15030 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15031 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15032 add_loc_descr (&op0, cvt);
15033 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15034 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15035 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15036 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15037 add_loc_descr (&op1, cvt);
15038 add_loc_descr (&op0, op1);
15039 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15040 return convert_descriptor_to_mode (mode, op0);
15041 }
15042
15043 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15044 const0 is DW_OP_lit0 or corresponding typed constant,
15045 const1 is DW_OP_lit1 or corresponding typed constant
15046 and constMSB is constant with just the MSB bit set
15047 for the mode):
15048 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15049 L1: const0 DW_OP_swap
15050 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15051 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15052 L3: DW_OP_drop
15053 L4: DW_OP_nop
15054
15055 CTZ is similar:
15056 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15057 L1: const0 DW_OP_swap
15058 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15059 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15060 L3: DW_OP_drop
15061 L4: DW_OP_nop
15062
15063 FFS is similar:
15064 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15065 L1: const1 DW_OP_swap
15066 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15067 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15068 L3: DW_OP_drop
15069 L4: DW_OP_nop */
15070
15071 static dw_loc_descr_ref
15072 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15073 machine_mode mem_mode)
15074 {
15075 dw_loc_descr_ref op0, ret, tmp;
15076 HOST_WIDE_INT valv;
15077 dw_loc_descr_ref l1jump, l1label;
15078 dw_loc_descr_ref l2jump, l2label;
15079 dw_loc_descr_ref l3jump, l3label;
15080 dw_loc_descr_ref l4jump, l4label;
15081 rtx msb;
15082
15083 if (GET_MODE (XEXP (rtl, 0)) != mode)
15084 return NULL;
15085
15086 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15087 VAR_INIT_STATUS_INITIALIZED);
15088 if (op0 == NULL)
15089 return NULL;
15090 ret = op0;
15091 if (GET_CODE (rtl) == CLZ)
15092 {
15093 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15094 valv = GET_MODE_BITSIZE (mode);
15095 }
15096 else if (GET_CODE (rtl) == FFS)
15097 valv = 0;
15098 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15099 valv = GET_MODE_BITSIZE (mode);
15100 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15101 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15102 add_loc_descr (&ret, l1jump);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15104 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15105 VAR_INIT_STATUS_INITIALIZED);
15106 if (tmp == NULL)
15107 return NULL;
15108 add_loc_descr (&ret, tmp);
15109 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15110 add_loc_descr (&ret, l4jump);
15111 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15112 ? const1_rtx : const0_rtx,
15113 mode, mem_mode,
15114 VAR_INIT_STATUS_INITIALIZED);
15115 if (l1label == NULL)
15116 return NULL;
15117 add_loc_descr (&ret, l1label);
15118 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15119 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15120 add_loc_descr (&ret, l2label);
15121 if (GET_CODE (rtl) != CLZ)
15122 msb = const1_rtx;
15123 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15124 msb = GEN_INT (HOST_WIDE_INT_1U
15125 << (GET_MODE_BITSIZE (mode) - 1));
15126 else
15127 msb = immed_wide_int_const
15128 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15129 GET_MODE_PRECISION (mode)), mode);
15130 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15131 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15132 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15133 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15134 else
15135 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15136 VAR_INIT_STATUS_INITIALIZED);
15137 if (tmp == NULL)
15138 return NULL;
15139 add_loc_descr (&ret, tmp);
15140 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15141 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15142 add_loc_descr (&ret, l3jump);
15143 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 if (tmp == NULL)
15146 return NULL;
15147 add_loc_descr (&ret, tmp);
15148 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15149 ? DW_OP_shl : DW_OP_shr, 0, 0));
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15151 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15152 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15153 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15154 add_loc_descr (&ret, l2jump);
15155 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15156 add_loc_descr (&ret, l3label);
15157 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15158 add_loc_descr (&ret, l4label);
15159 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15160 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15161 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15162 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15163 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15164 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15165 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15166 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15167 return ret;
15168 }
15169
15170 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15171 const1 is DW_OP_lit1 or corresponding typed constant):
15172 const0 DW_OP_swap
15173 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15174 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15175 L2: DW_OP_drop
15176
15177 PARITY is similar:
15178 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15179 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15180 L2: DW_OP_drop */
15181
15182 static dw_loc_descr_ref
15183 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15184 machine_mode mem_mode)
15185 {
15186 dw_loc_descr_ref op0, ret, tmp;
15187 dw_loc_descr_ref l1jump, l1label;
15188 dw_loc_descr_ref l2jump, l2label;
15189
15190 if (GET_MODE (XEXP (rtl, 0)) != mode)
15191 return NULL;
15192
15193 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 if (op0 == NULL)
15196 return NULL;
15197 ret = op0;
15198 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15199 VAR_INIT_STATUS_INITIALIZED);
15200 if (tmp == NULL)
15201 return NULL;
15202 add_loc_descr (&ret, tmp);
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15204 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15205 add_loc_descr (&ret, l1label);
15206 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15207 add_loc_descr (&ret, l2jump);
15208 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15209 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15210 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15211 VAR_INIT_STATUS_INITIALIZED);
15212 if (tmp == NULL)
15213 return NULL;
15214 add_loc_descr (&ret, tmp);
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15216 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15217 ? DW_OP_plus : DW_OP_xor, 0, 0));
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15219 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15220 VAR_INIT_STATUS_INITIALIZED);
15221 add_loc_descr (&ret, tmp);
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15223 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15224 add_loc_descr (&ret, l1jump);
15225 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15226 add_loc_descr (&ret, l2label);
15227 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15228 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15229 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15230 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15231 return ret;
15232 }
15233
15234 /* BSWAP (constS is initial shift count, either 56 or 24):
15235 constS const0
15236 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15237 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15238 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15239 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15240 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15241
15242 static dw_loc_descr_ref
15243 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15244 machine_mode mem_mode)
15245 {
15246 dw_loc_descr_ref op0, ret, tmp;
15247 dw_loc_descr_ref l1jump, l1label;
15248 dw_loc_descr_ref l2jump, l2label;
15249
15250 if (BITS_PER_UNIT != 8
15251 || (GET_MODE_BITSIZE (mode) != 32
15252 && GET_MODE_BITSIZE (mode) != 64))
15253 return NULL;
15254
15255 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15256 VAR_INIT_STATUS_INITIALIZED);
15257 if (op0 == NULL)
15258 return NULL;
15259
15260 ret = op0;
15261 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15262 mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 if (tmp == NULL)
15265 return NULL;
15266 add_loc_descr (&ret, tmp);
15267 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15268 VAR_INIT_STATUS_INITIALIZED);
15269 if (tmp == NULL)
15270 return NULL;
15271 add_loc_descr (&ret, tmp);
15272 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15273 add_loc_descr (&ret, l1label);
15274 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15275 mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277 add_loc_descr (&ret, tmp);
15278 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15280 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15281 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15282 VAR_INIT_STATUS_INITIALIZED);
15283 if (tmp == NULL)
15284 return NULL;
15285 add_loc_descr (&ret, tmp);
15286 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15287 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15288 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15292 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15293 VAR_INIT_STATUS_INITIALIZED);
15294 add_loc_descr (&ret, tmp);
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15296 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15297 add_loc_descr (&ret, l2jump);
15298 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15299 VAR_INIT_STATUS_INITIALIZED);
15300 add_loc_descr (&ret, tmp);
15301 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15303 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15304 add_loc_descr (&ret, l1jump);
15305 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15306 add_loc_descr (&ret, l2label);
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15308 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15309 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15310 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15311 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15312 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15313 return ret;
15314 }
15315
15316 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15317 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15318 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15319 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15320
15321 ROTATERT is similar:
15322 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15323 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15324 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15325
15326 static dw_loc_descr_ref
15327 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15328 machine_mode mem_mode)
15329 {
15330 rtx rtlop1 = XEXP (rtl, 1);
15331 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15332 int i;
15333
15334 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15335 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15336 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15337 VAR_INIT_STATUS_INITIALIZED);
15338 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15339 VAR_INIT_STATUS_INITIALIZED);
15340 if (op0 == NULL || op1 == NULL)
15341 return NULL;
15342 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15343 for (i = 0; i < 2; i++)
15344 {
15345 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15346 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15347 mode, mem_mode,
15348 VAR_INIT_STATUS_INITIALIZED);
15349 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15350 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15351 ? DW_OP_const4u
15352 : HOST_BITS_PER_WIDE_INT == 64
15353 ? DW_OP_const8u : DW_OP_constu,
15354 GET_MODE_MASK (mode), 0);
15355 else
15356 mask[i] = NULL;
15357 if (mask[i] == NULL)
15358 return NULL;
15359 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15360 }
15361 ret = op0;
15362 add_loc_descr (&ret, op1);
15363 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15364 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15365 if (GET_CODE (rtl) == ROTATERT)
15366 {
15367 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15368 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15369 GET_MODE_BITSIZE (mode), 0));
15370 }
15371 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15372 if (mask[0] != NULL)
15373 add_loc_descr (&ret, mask[0]);
15374 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15375 if (mask[1] != NULL)
15376 {
15377 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15378 add_loc_descr (&ret, mask[1]);
15379 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15380 }
15381 if (GET_CODE (rtl) == ROTATE)
15382 {
15383 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15384 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15385 GET_MODE_BITSIZE (mode), 0));
15386 }
15387 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15388 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15389 return ret;
15390 }
15391
15392 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15393 for DEBUG_PARAMETER_REF RTL. */
15394
15395 static dw_loc_descr_ref
15396 parameter_ref_descriptor (rtx rtl)
15397 {
15398 dw_loc_descr_ref ret;
15399 dw_die_ref ref;
15400
15401 if (dwarf_strict)
15402 return NULL;
15403 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15404 /* With LTO during LTRANS we get the late DIE that refers to the early
15405 DIE, thus we add another indirection here. This seems to confuse
15406 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15407 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15408 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15409 if (ref)
15410 {
15411 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15412 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15413 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15414 }
15415 else
15416 {
15417 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15418 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15419 }
15420 return ret;
15421 }
15422
15423 /* The following routine converts the RTL for a variable or parameter
15424 (resident in memory) into an equivalent Dwarf representation of a
15425 mechanism for getting the address of that same variable onto the top of a
15426 hypothetical "address evaluation" stack.
15427
15428 When creating memory location descriptors, we are effectively transforming
15429 the RTL for a memory-resident object into its Dwarf postfix expression
15430 equivalent. This routine recursively descends an RTL tree, turning
15431 it into Dwarf postfix code as it goes.
15432
15433 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15434
15435 MEM_MODE is the mode of the memory reference, needed to handle some
15436 autoincrement addressing modes.
15437
15438 Return 0 if we can't represent the location. */
15439
15440 dw_loc_descr_ref
15441 mem_loc_descriptor (rtx rtl, machine_mode mode,
15442 machine_mode mem_mode,
15443 enum var_init_status initialized)
15444 {
15445 dw_loc_descr_ref mem_loc_result = NULL;
15446 enum dwarf_location_atom op;
15447 dw_loc_descr_ref op0, op1;
15448 rtx inner = NULL_RTX;
15449 poly_int64 offset;
15450
15451 if (mode == VOIDmode)
15452 mode = GET_MODE (rtl);
15453
15454 /* Note that for a dynamically sized array, the location we will generate a
15455 description of here will be the lowest numbered location which is
15456 actually within the array. That's *not* necessarily the same as the
15457 zeroth element of the array. */
15458
15459 rtl = targetm.delegitimize_address (rtl);
15460
15461 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15462 return NULL;
15463
15464 scalar_int_mode int_mode, inner_mode, op1_mode;
15465 switch (GET_CODE (rtl))
15466 {
15467 case POST_INC:
15468 case POST_DEC:
15469 case POST_MODIFY:
15470 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15471
15472 case SUBREG:
15473 /* The case of a subreg may arise when we have a local (register)
15474 variable or a formal (register) parameter which doesn't quite fill
15475 up an entire register. For now, just assume that it is
15476 legitimate to make the Dwarf info refer to the whole register which
15477 contains the given subreg. */
15478 if (!subreg_lowpart_p (rtl))
15479 break;
15480 inner = SUBREG_REG (rtl);
15481 /* FALLTHRU */
15482 case TRUNCATE:
15483 if (inner == NULL_RTX)
15484 inner = XEXP (rtl, 0);
15485 if (is_a <scalar_int_mode> (mode, &int_mode)
15486 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15487 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15488 #ifdef POINTERS_EXTEND_UNSIGNED
15489 || (int_mode == Pmode && mem_mode != VOIDmode)
15490 #endif
15491 )
15492 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15493 {
15494 mem_loc_result = mem_loc_descriptor (inner,
15495 inner_mode,
15496 mem_mode, initialized);
15497 break;
15498 }
15499 if (dwarf_strict && dwarf_version < 5)
15500 break;
15501 if (is_a <scalar_int_mode> (mode, &int_mode)
15502 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15503 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15504 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15505 {
15506 dw_die_ref type_die;
15507 dw_loc_descr_ref cvt;
15508
15509 mem_loc_result = mem_loc_descriptor (inner,
15510 GET_MODE (inner),
15511 mem_mode, initialized);
15512 if (mem_loc_result == NULL)
15513 break;
15514 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15515 if (type_die == NULL)
15516 {
15517 mem_loc_result = NULL;
15518 break;
15519 }
15520 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15521 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15522 else
15523 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15524 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15525 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15526 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15527 add_loc_descr (&mem_loc_result, cvt);
15528 if (is_a <scalar_int_mode> (mode, &int_mode)
15529 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15530 {
15531 /* Convert it to untyped afterwards. */
15532 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15533 add_loc_descr (&mem_loc_result, cvt);
15534 }
15535 }
15536 break;
15537
15538 case REG:
15539 if (!is_a <scalar_int_mode> (mode, &int_mode)
15540 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15541 && rtl != arg_pointer_rtx
15542 && rtl != frame_pointer_rtx
15543 #ifdef POINTERS_EXTEND_UNSIGNED
15544 && (int_mode != Pmode || mem_mode == VOIDmode)
15545 #endif
15546 ))
15547 {
15548 dw_die_ref type_die;
15549 unsigned int dbx_regnum;
15550
15551 if (dwarf_strict && dwarf_version < 5)
15552 break;
15553 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15554 break;
15555 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15556 if (type_die == NULL)
15557 break;
15558
15559 dbx_regnum = dbx_reg_number (rtl);
15560 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15561 break;
15562 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15563 dbx_regnum, 0);
15564 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15565 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15566 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15567 break;
15568 }
15569 /* Whenever a register number forms a part of the description of the
15570 method for calculating the (dynamic) address of a memory resident
15571 object, DWARF rules require the register number be referred to as
15572 a "base register". This distinction is not based in any way upon
15573 what category of register the hardware believes the given register
15574 belongs to. This is strictly DWARF terminology we're dealing with
15575 here. Note that in cases where the location of a memory-resident
15576 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15577 OP_CONST (0)) the actual DWARF location descriptor that we generate
15578 may just be OP_BASEREG (basereg). This may look deceptively like
15579 the object in question was allocated to a register (rather than in
15580 memory) so DWARF consumers need to be aware of the subtle
15581 distinction between OP_REG and OP_BASEREG. */
15582 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15583 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15584 else if (stack_realign_drap
15585 && crtl->drap_reg
15586 && crtl->args.internal_arg_pointer == rtl
15587 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15588 {
15589 /* If RTL is internal_arg_pointer, which has been optimized
15590 out, use DRAP instead. */
15591 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15592 VAR_INIT_STATUS_INITIALIZED);
15593 }
15594 break;
15595
15596 case SIGN_EXTEND:
15597 case ZERO_EXTEND:
15598 if (!is_a <scalar_int_mode> (mode, &int_mode)
15599 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15600 break;
15601 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15602 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15603 if (op0 == 0)
15604 break;
15605 else if (GET_CODE (rtl) == ZERO_EXTEND
15606 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15607 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15608 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15609 to expand zero extend as two shifts instead of
15610 masking. */
15611 && GET_MODE_SIZE (inner_mode) <= 4)
15612 {
15613 mem_loc_result = op0;
15614 add_loc_descr (&mem_loc_result,
15615 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15616 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15617 }
15618 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15619 {
15620 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15621 shift *= BITS_PER_UNIT;
15622 if (GET_CODE (rtl) == SIGN_EXTEND)
15623 op = DW_OP_shra;
15624 else
15625 op = DW_OP_shr;
15626 mem_loc_result = op0;
15627 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15628 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15629 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15630 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15631 }
15632 else if (!dwarf_strict || dwarf_version >= 5)
15633 {
15634 dw_die_ref type_die1, type_die2;
15635 dw_loc_descr_ref cvt;
15636
15637 type_die1 = base_type_for_mode (inner_mode,
15638 GET_CODE (rtl) == ZERO_EXTEND);
15639 if (type_die1 == NULL)
15640 break;
15641 type_die2 = base_type_for_mode (int_mode, 1);
15642 if (type_die2 == NULL)
15643 break;
15644 mem_loc_result = op0;
15645 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15646 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15647 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15648 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15649 add_loc_descr (&mem_loc_result, cvt);
15650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15654 add_loc_descr (&mem_loc_result, cvt);
15655 }
15656 break;
15657
15658 case MEM:
15659 {
15660 rtx new_rtl = avoid_constant_pool_reference (rtl);
15661 if (new_rtl != rtl)
15662 {
15663 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15664 initialized);
15665 if (mem_loc_result != NULL)
15666 return mem_loc_result;
15667 }
15668 }
15669 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15670 get_address_mode (rtl), mode,
15671 VAR_INIT_STATUS_INITIALIZED);
15672 if (mem_loc_result == NULL)
15673 mem_loc_result = tls_mem_loc_descriptor (rtl);
15674 if (mem_loc_result != NULL)
15675 {
15676 if (!is_a <scalar_int_mode> (mode, &int_mode)
15677 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15678 {
15679 dw_die_ref type_die;
15680 dw_loc_descr_ref deref;
15681 HOST_WIDE_INT size;
15682
15683 if (dwarf_strict && dwarf_version < 5)
15684 return NULL;
15685 if (!GET_MODE_SIZE (mode).is_constant (&size))
15686 return NULL;
15687 type_die
15688 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15689 if (type_die == NULL)
15690 return NULL;
15691 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15692 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15693 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15694 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15695 add_loc_descr (&mem_loc_result, deref);
15696 }
15697 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15698 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15699 else
15700 add_loc_descr (&mem_loc_result,
15701 new_loc_descr (DW_OP_deref_size,
15702 GET_MODE_SIZE (int_mode), 0));
15703 }
15704 break;
15705
15706 case LO_SUM:
15707 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15708
15709 case LABEL_REF:
15710 /* Some ports can transform a symbol ref into a label ref, because
15711 the symbol ref is too far away and has to be dumped into a constant
15712 pool. */
15713 case CONST:
15714 case SYMBOL_REF:
15715 case UNSPEC:
15716 if (!is_a <scalar_int_mode> (mode, &int_mode)
15717 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15718 #ifdef POINTERS_EXTEND_UNSIGNED
15719 && (int_mode != Pmode || mem_mode == VOIDmode)
15720 #endif
15721 ))
15722 break;
15723
15724 if (GET_CODE (rtl) == UNSPEC)
15725 {
15726 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15727 can't express it in the debug info. This can happen e.g. with some
15728 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15729 approves. */
15730 bool not_ok = false;
15731 subrtx_var_iterator::array_type array;
15732 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15733 if (*iter != rtl && !CONSTANT_P (*iter))
15734 {
15735 not_ok = true;
15736 break;
15737 }
15738
15739 if (not_ok)
15740 break;
15741
15742 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15743 if (!const_ok_for_output_1 (*iter))
15744 {
15745 not_ok = true;
15746 break;
15747 }
15748
15749 if (not_ok)
15750 break;
15751
15752 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15753 goto symref;
15754 }
15755
15756 if (GET_CODE (rtl) == SYMBOL_REF
15757 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15758 {
15759 dw_loc_descr_ref temp;
15760
15761 /* If this is not defined, we have no way to emit the data. */
15762 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15763 break;
15764
15765 temp = new_addr_loc_descr (rtl, dtprel_true);
15766
15767 /* We check for DWARF 5 here because gdb did not implement
15768 DW_OP_form_tls_address until after 7.12. */
15769 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15770 ? DW_OP_form_tls_address
15771 : DW_OP_GNU_push_tls_address),
15772 0, 0);
15773 add_loc_descr (&mem_loc_result, temp);
15774
15775 break;
15776 }
15777
15778 if (!const_ok_for_output (rtl))
15779 {
15780 if (GET_CODE (rtl) == CONST)
15781 switch (GET_CODE (XEXP (rtl, 0)))
15782 {
15783 case NOT:
15784 op = DW_OP_not;
15785 goto try_const_unop;
15786 case NEG:
15787 op = DW_OP_neg;
15788 goto try_const_unop;
15789 try_const_unop:
15790 rtx arg;
15791 arg = XEXP (XEXP (rtl, 0), 0);
15792 if (!CONSTANT_P (arg))
15793 arg = gen_rtx_CONST (int_mode, arg);
15794 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15795 initialized);
15796 if (op0)
15797 {
15798 mem_loc_result = op0;
15799 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15800 }
15801 break;
15802 default:
15803 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15804 mem_mode, initialized);
15805 break;
15806 }
15807 break;
15808 }
15809
15810 symref:
15811 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15812 vec_safe_push (used_rtx_array, rtl);
15813 break;
15814
15815 case CONCAT:
15816 case CONCATN:
15817 case VAR_LOCATION:
15818 case DEBUG_IMPLICIT_PTR:
15819 expansion_failed (NULL_TREE, rtl,
15820 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15821 return 0;
15822
15823 case ENTRY_VALUE:
15824 if (dwarf_strict && dwarf_version < 5)
15825 return NULL;
15826 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15827 {
15828 if (!is_a <scalar_int_mode> (mode, &int_mode)
15829 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15830 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15831 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15832 else
15833 {
15834 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15835 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15836 return NULL;
15837 op0 = one_reg_loc_descriptor (dbx_regnum,
15838 VAR_INIT_STATUS_INITIALIZED);
15839 }
15840 }
15841 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15842 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15843 {
15844 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15845 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15846 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15847 return NULL;
15848 }
15849 else
15850 gcc_unreachable ();
15851 if (op0 == NULL)
15852 return NULL;
15853 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15854 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15855 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15856 break;
15857
15858 case DEBUG_PARAMETER_REF:
15859 mem_loc_result = parameter_ref_descriptor (rtl);
15860 break;
15861
15862 case PRE_MODIFY:
15863 /* Extract the PLUS expression nested inside and fall into
15864 PLUS code below. */
15865 rtl = XEXP (rtl, 1);
15866 goto plus;
15867
15868 case PRE_INC:
15869 case PRE_DEC:
15870 /* Turn these into a PLUS expression and fall into the PLUS code
15871 below. */
15872 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15873 gen_int_mode (GET_CODE (rtl) == PRE_INC
15874 ? GET_MODE_UNIT_SIZE (mem_mode)
15875 : -GET_MODE_UNIT_SIZE (mem_mode),
15876 mode));
15877
15878 /* fall through */
15879
15880 case PLUS:
15881 plus:
15882 if (is_based_loc (rtl)
15883 && is_a <scalar_int_mode> (mode, &int_mode)
15884 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15885 || XEXP (rtl, 0) == arg_pointer_rtx
15886 || XEXP (rtl, 0) == frame_pointer_rtx))
15887 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15888 INTVAL (XEXP (rtl, 1)),
15889 VAR_INIT_STATUS_INITIALIZED);
15890 else
15891 {
15892 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15893 VAR_INIT_STATUS_INITIALIZED);
15894 if (mem_loc_result == 0)
15895 break;
15896
15897 if (CONST_INT_P (XEXP (rtl, 1))
15898 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15899 <= DWARF2_ADDR_SIZE))
15900 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15901 else
15902 {
15903 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15904 VAR_INIT_STATUS_INITIALIZED);
15905 if (op1 == 0)
15906 return NULL;
15907 add_loc_descr (&mem_loc_result, op1);
15908 add_loc_descr (&mem_loc_result,
15909 new_loc_descr (DW_OP_plus, 0, 0));
15910 }
15911 }
15912 break;
15913
15914 /* If a pseudo-reg is optimized away, it is possible for it to
15915 be replaced with a MEM containing a multiply or shift. */
15916 case MINUS:
15917 op = DW_OP_minus;
15918 goto do_binop;
15919
15920 case MULT:
15921 op = DW_OP_mul;
15922 goto do_binop;
15923
15924 case DIV:
15925 if ((!dwarf_strict || dwarf_version >= 5)
15926 && is_a <scalar_int_mode> (mode, &int_mode)
15927 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15928 {
15929 mem_loc_result = typed_binop (DW_OP_div, rtl,
15930 base_type_for_mode (mode, 0),
15931 int_mode, mem_mode);
15932 break;
15933 }
15934 op = DW_OP_div;
15935 goto do_binop;
15936
15937 case UMOD:
15938 op = DW_OP_mod;
15939 goto do_binop;
15940
15941 case ASHIFT:
15942 op = DW_OP_shl;
15943 goto do_shift;
15944
15945 case ASHIFTRT:
15946 op = DW_OP_shra;
15947 goto do_shift;
15948
15949 case LSHIFTRT:
15950 op = DW_OP_shr;
15951 goto do_shift;
15952
15953 do_shift:
15954 if (!is_a <scalar_int_mode> (mode, &int_mode))
15955 break;
15956 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15957 VAR_INIT_STATUS_INITIALIZED);
15958 {
15959 rtx rtlop1 = XEXP (rtl, 1);
15960 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15961 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15962 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15963 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15964 VAR_INIT_STATUS_INITIALIZED);
15965 }
15966
15967 if (op0 == 0 || op1 == 0)
15968 break;
15969
15970 mem_loc_result = op0;
15971 add_loc_descr (&mem_loc_result, op1);
15972 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15973 break;
15974
15975 case AND:
15976 op = DW_OP_and;
15977 goto do_binop;
15978
15979 case IOR:
15980 op = DW_OP_or;
15981 goto do_binop;
15982
15983 case XOR:
15984 op = DW_OP_xor;
15985 goto do_binop;
15986
15987 do_binop:
15988 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15989 VAR_INIT_STATUS_INITIALIZED);
15990 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15991 VAR_INIT_STATUS_INITIALIZED);
15992
15993 if (op0 == 0 || op1 == 0)
15994 break;
15995
15996 mem_loc_result = op0;
15997 add_loc_descr (&mem_loc_result, op1);
15998 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15999 break;
16000
16001 case MOD:
16002 if ((!dwarf_strict || dwarf_version >= 5)
16003 && is_a <scalar_int_mode> (mode, &int_mode)
16004 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16005 {
16006 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16007 base_type_for_mode (mode, 0),
16008 int_mode, mem_mode);
16009 break;
16010 }
16011
16012 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16013 VAR_INIT_STATUS_INITIALIZED);
16014 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16015 VAR_INIT_STATUS_INITIALIZED);
16016
16017 if (op0 == 0 || op1 == 0)
16018 break;
16019
16020 mem_loc_result = op0;
16021 add_loc_descr (&mem_loc_result, op1);
16022 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16023 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16024 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16025 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16026 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16027 break;
16028
16029 case UDIV:
16030 if ((!dwarf_strict || dwarf_version >= 5)
16031 && is_a <scalar_int_mode> (mode, &int_mode))
16032 {
16033 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16034 {
16035 op = DW_OP_div;
16036 goto do_binop;
16037 }
16038 mem_loc_result = typed_binop (DW_OP_div, rtl,
16039 base_type_for_mode (int_mode, 1),
16040 int_mode, mem_mode);
16041 }
16042 break;
16043
16044 case NOT:
16045 op = DW_OP_not;
16046 goto do_unop;
16047
16048 case ABS:
16049 op = DW_OP_abs;
16050 goto do_unop;
16051
16052 case NEG:
16053 op = DW_OP_neg;
16054 goto do_unop;
16055
16056 do_unop:
16057 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16058 VAR_INIT_STATUS_INITIALIZED);
16059
16060 if (op0 == 0)
16061 break;
16062
16063 mem_loc_result = op0;
16064 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16065 break;
16066
16067 case CONST_INT:
16068 if (!is_a <scalar_int_mode> (mode, &int_mode)
16069 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16070 #ifdef POINTERS_EXTEND_UNSIGNED
16071 || (int_mode == Pmode
16072 && mem_mode != VOIDmode
16073 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16074 #endif
16075 )
16076 {
16077 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16078 break;
16079 }
16080 if ((!dwarf_strict || dwarf_version >= 5)
16081 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16082 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16083 {
16084 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16085 scalar_int_mode amode;
16086 if (type_die == NULL)
16087 return NULL;
16088 if (INTVAL (rtl) >= 0
16089 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16090 .exists (&amode))
16091 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16092 /* const DW_OP_convert <XXX> vs.
16093 DW_OP_const_type <XXX, 1, const>. */
16094 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16095 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16096 {
16097 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16098 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16099 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16100 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16101 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16102 add_loc_descr (&mem_loc_result, op0);
16103 return mem_loc_result;
16104 }
16105 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16106 INTVAL (rtl));
16107 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16108 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16109 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16110 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16111 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16112 else
16113 {
16114 mem_loc_result->dw_loc_oprnd2.val_class
16115 = dw_val_class_const_double;
16116 mem_loc_result->dw_loc_oprnd2.v.val_double
16117 = double_int::from_shwi (INTVAL (rtl));
16118 }
16119 }
16120 break;
16121
16122 case CONST_DOUBLE:
16123 if (!dwarf_strict || dwarf_version >= 5)
16124 {
16125 dw_die_ref type_die;
16126
16127 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16128 CONST_DOUBLE rtx could represent either a large integer
16129 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16130 the value is always a floating point constant.
16131
16132 When it is an integer, a CONST_DOUBLE is used whenever
16133 the constant requires 2 HWIs to be adequately represented.
16134 We output CONST_DOUBLEs as blocks. */
16135 if (mode == VOIDmode
16136 || (GET_MODE (rtl) == VOIDmode
16137 && maybe_ne (GET_MODE_BITSIZE (mode),
16138 HOST_BITS_PER_DOUBLE_INT)))
16139 break;
16140 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16141 if (type_die == NULL)
16142 return NULL;
16143 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16144 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16145 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16146 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16147 #if TARGET_SUPPORTS_WIDE_INT == 0
16148 if (!SCALAR_FLOAT_MODE_P (mode))
16149 {
16150 mem_loc_result->dw_loc_oprnd2.val_class
16151 = dw_val_class_const_double;
16152 mem_loc_result->dw_loc_oprnd2.v.val_double
16153 = rtx_to_double_int (rtl);
16154 }
16155 else
16156 #endif
16157 {
16158 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16159 unsigned int length = GET_MODE_SIZE (float_mode);
16160 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16161
16162 insert_float (rtl, array);
16163 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16164 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16165 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16166 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16167 }
16168 }
16169 break;
16170
16171 case CONST_WIDE_INT:
16172 if (!dwarf_strict || dwarf_version >= 5)
16173 {
16174 dw_die_ref type_die;
16175
16176 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16177 if (type_die == NULL)
16178 return NULL;
16179 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16180 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16181 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16182 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16183 mem_loc_result->dw_loc_oprnd2.val_class
16184 = dw_val_class_wide_int;
16185 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16186 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16187 }
16188 break;
16189
16190 case CONST_POLY_INT:
16191 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16192 break;
16193
16194 case EQ:
16195 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16196 break;
16197
16198 case GE:
16199 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16200 break;
16201
16202 case GT:
16203 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16204 break;
16205
16206 case LE:
16207 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16208 break;
16209
16210 case LT:
16211 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16212 break;
16213
16214 case NE:
16215 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16216 break;
16217
16218 case GEU:
16219 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16220 break;
16221
16222 case GTU:
16223 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16224 break;
16225
16226 case LEU:
16227 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16228 break;
16229
16230 case LTU:
16231 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16232 break;
16233
16234 case UMIN:
16235 case UMAX:
16236 if (!SCALAR_INT_MODE_P (mode))
16237 break;
16238 /* FALLTHRU */
16239 case SMIN:
16240 case SMAX:
16241 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16242 break;
16243
16244 case ZERO_EXTRACT:
16245 case SIGN_EXTRACT:
16246 if (CONST_INT_P (XEXP (rtl, 1))
16247 && CONST_INT_P (XEXP (rtl, 2))
16248 && is_a <scalar_int_mode> (mode, &int_mode)
16249 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16250 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16251 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16252 && ((unsigned) INTVAL (XEXP (rtl, 1))
16253 + (unsigned) INTVAL (XEXP (rtl, 2))
16254 <= GET_MODE_BITSIZE (int_mode)))
16255 {
16256 int shift, size;
16257 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16258 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16259 if (op0 == 0)
16260 break;
16261 if (GET_CODE (rtl) == SIGN_EXTRACT)
16262 op = DW_OP_shra;
16263 else
16264 op = DW_OP_shr;
16265 mem_loc_result = op0;
16266 size = INTVAL (XEXP (rtl, 1));
16267 shift = INTVAL (XEXP (rtl, 2));
16268 if (BITS_BIG_ENDIAN)
16269 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16270 if (shift + size != (int) DWARF2_ADDR_SIZE)
16271 {
16272 add_loc_descr (&mem_loc_result,
16273 int_loc_descriptor (DWARF2_ADDR_SIZE
16274 - shift - size));
16275 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16276 }
16277 if (size != (int) DWARF2_ADDR_SIZE)
16278 {
16279 add_loc_descr (&mem_loc_result,
16280 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16281 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16282 }
16283 }
16284 break;
16285
16286 case IF_THEN_ELSE:
16287 {
16288 dw_loc_descr_ref op2, bra_node, drop_node;
16289 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16290 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16291 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16292 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16293 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16294 VAR_INIT_STATUS_INITIALIZED);
16295 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16296 VAR_INIT_STATUS_INITIALIZED);
16297 if (op0 == NULL || op1 == NULL || op2 == NULL)
16298 break;
16299
16300 mem_loc_result = op1;
16301 add_loc_descr (&mem_loc_result, op2);
16302 add_loc_descr (&mem_loc_result, op0);
16303 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16304 add_loc_descr (&mem_loc_result, bra_node);
16305 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16306 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16307 add_loc_descr (&mem_loc_result, drop_node);
16308 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16309 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16310 }
16311 break;
16312
16313 case FLOAT_EXTEND:
16314 case FLOAT_TRUNCATE:
16315 case FLOAT:
16316 case UNSIGNED_FLOAT:
16317 case FIX:
16318 case UNSIGNED_FIX:
16319 if (!dwarf_strict || dwarf_version >= 5)
16320 {
16321 dw_die_ref type_die;
16322 dw_loc_descr_ref cvt;
16323
16324 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16325 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16326 if (op0 == NULL)
16327 break;
16328 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16329 && (GET_CODE (rtl) == FLOAT
16330 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16331 {
16332 type_die = base_type_for_mode (int_mode,
16333 GET_CODE (rtl) == UNSIGNED_FLOAT);
16334 if (type_die == NULL)
16335 break;
16336 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16337 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16338 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16339 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16340 add_loc_descr (&op0, cvt);
16341 }
16342 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16343 if (type_die == NULL)
16344 break;
16345 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16346 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16347 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16348 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16349 add_loc_descr (&op0, cvt);
16350 if (is_a <scalar_int_mode> (mode, &int_mode)
16351 && (GET_CODE (rtl) == FIX
16352 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16353 {
16354 op0 = convert_descriptor_to_mode (int_mode, op0);
16355 if (op0 == NULL)
16356 break;
16357 }
16358 mem_loc_result = op0;
16359 }
16360 break;
16361
16362 case CLZ:
16363 case CTZ:
16364 case FFS:
16365 if (is_a <scalar_int_mode> (mode, &int_mode))
16366 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16367 break;
16368
16369 case POPCOUNT:
16370 case PARITY:
16371 if (is_a <scalar_int_mode> (mode, &int_mode))
16372 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16373 break;
16374
16375 case BSWAP:
16376 if (is_a <scalar_int_mode> (mode, &int_mode))
16377 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16378 break;
16379
16380 case ROTATE:
16381 case ROTATERT:
16382 if (is_a <scalar_int_mode> (mode, &int_mode))
16383 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16384 break;
16385
16386 case COMPARE:
16387 /* In theory, we could implement the above. */
16388 /* DWARF cannot represent the unsigned compare operations
16389 natively. */
16390 case SS_MULT:
16391 case US_MULT:
16392 case SS_DIV:
16393 case US_DIV:
16394 case SS_PLUS:
16395 case US_PLUS:
16396 case SS_MINUS:
16397 case US_MINUS:
16398 case SS_NEG:
16399 case US_NEG:
16400 case SS_ABS:
16401 case SS_ASHIFT:
16402 case US_ASHIFT:
16403 case SS_TRUNCATE:
16404 case US_TRUNCATE:
16405 case UNORDERED:
16406 case ORDERED:
16407 case UNEQ:
16408 case UNGE:
16409 case UNGT:
16410 case UNLE:
16411 case UNLT:
16412 case LTGT:
16413 case FRACT_CONVERT:
16414 case UNSIGNED_FRACT_CONVERT:
16415 case SAT_FRACT:
16416 case UNSIGNED_SAT_FRACT:
16417 case SQRT:
16418 case ASM_OPERANDS:
16419 case VEC_MERGE:
16420 case VEC_SELECT:
16421 case VEC_CONCAT:
16422 case VEC_DUPLICATE:
16423 case VEC_SERIES:
16424 case HIGH:
16425 case FMA:
16426 case STRICT_LOW_PART:
16427 case CONST_VECTOR:
16428 case CONST_FIXED:
16429 case CLRSB:
16430 case CLOBBER:
16431 case CLOBBER_HIGH:
16432 break;
16433
16434 case CONST_STRING:
16435 resolve_one_addr (&rtl);
16436 goto symref;
16437
16438 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16439 the expression. An UNSPEC rtx represents a raw DWARF operation,
16440 new_loc_descr is called for it to build the operation directly.
16441 Otherwise mem_loc_descriptor is called recursively. */
16442 case PARALLEL:
16443 {
16444 int index = 0;
16445 dw_loc_descr_ref exp_result = NULL;
16446
16447 for (; index < XVECLEN (rtl, 0); index++)
16448 {
16449 rtx elem = XVECEXP (rtl, 0, index);
16450 if (GET_CODE (elem) == UNSPEC)
16451 {
16452 /* Each DWARF operation UNSPEC contain two operands, if
16453 one operand is not used for the operation, const0_rtx is
16454 passed. */
16455 gcc_assert (XVECLEN (elem, 0) == 2);
16456
16457 HOST_WIDE_INT dw_op = XINT (elem, 1);
16458 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16459 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16460 exp_result
16461 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16462 oprnd2);
16463 }
16464 else
16465 exp_result
16466 = mem_loc_descriptor (elem, mode, mem_mode,
16467 VAR_INIT_STATUS_INITIALIZED);
16468
16469 if (!mem_loc_result)
16470 mem_loc_result = exp_result;
16471 else
16472 add_loc_descr (&mem_loc_result, exp_result);
16473 }
16474
16475 break;
16476 }
16477
16478 default:
16479 if (flag_checking)
16480 {
16481 print_rtl (stderr, rtl);
16482 gcc_unreachable ();
16483 }
16484 break;
16485 }
16486
16487 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16488 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16489
16490 return mem_loc_result;
16491 }
16492
16493 /* Return a descriptor that describes the concatenation of two locations.
16494 This is typically a complex variable. */
16495
16496 static dw_loc_descr_ref
16497 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16498 {
16499 /* At present we only track constant-sized pieces. */
16500 unsigned int size0, size1;
16501 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16502 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16503 return 0;
16504
16505 dw_loc_descr_ref cc_loc_result = NULL;
16506 dw_loc_descr_ref x0_ref
16507 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16508 dw_loc_descr_ref x1_ref
16509 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16510
16511 if (x0_ref == 0 || x1_ref == 0)
16512 return 0;
16513
16514 cc_loc_result = x0_ref;
16515 add_loc_descr_op_piece (&cc_loc_result, size0);
16516
16517 add_loc_descr (&cc_loc_result, x1_ref);
16518 add_loc_descr_op_piece (&cc_loc_result, size1);
16519
16520 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16521 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16522
16523 return cc_loc_result;
16524 }
16525
16526 /* Return a descriptor that describes the concatenation of N
16527 locations. */
16528
16529 static dw_loc_descr_ref
16530 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16531 {
16532 unsigned int i;
16533 dw_loc_descr_ref cc_loc_result = NULL;
16534 unsigned int n = XVECLEN (concatn, 0);
16535 unsigned int size;
16536
16537 for (i = 0; i < n; ++i)
16538 {
16539 dw_loc_descr_ref ref;
16540 rtx x = XVECEXP (concatn, 0, i);
16541
16542 /* At present we only track constant-sized pieces. */
16543 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16544 return NULL;
16545
16546 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16547 if (ref == NULL)
16548 return NULL;
16549
16550 add_loc_descr (&cc_loc_result, ref);
16551 add_loc_descr_op_piece (&cc_loc_result, size);
16552 }
16553
16554 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16555 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16556
16557 return cc_loc_result;
16558 }
16559
16560 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16561 for DEBUG_IMPLICIT_PTR RTL. */
16562
16563 static dw_loc_descr_ref
16564 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16565 {
16566 dw_loc_descr_ref ret;
16567 dw_die_ref ref;
16568
16569 if (dwarf_strict && dwarf_version < 5)
16570 return NULL;
16571 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16572 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16573 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16574 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16575 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16576 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16577 if (ref)
16578 {
16579 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16580 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16581 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16582 }
16583 else
16584 {
16585 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16586 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16587 }
16588 return ret;
16589 }
16590
16591 /* Output a proper Dwarf location descriptor for a variable or parameter
16592 which is either allocated in a register or in a memory location. For a
16593 register, we just generate an OP_REG and the register number. For a
16594 memory location we provide a Dwarf postfix expression describing how to
16595 generate the (dynamic) address of the object onto the address stack.
16596
16597 MODE is mode of the decl if this loc_descriptor is going to be used in
16598 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16599 allowed, VOIDmode otherwise.
16600
16601 If we don't know how to describe it, return 0. */
16602
16603 static dw_loc_descr_ref
16604 loc_descriptor (rtx rtl, machine_mode mode,
16605 enum var_init_status initialized)
16606 {
16607 dw_loc_descr_ref loc_result = NULL;
16608 scalar_int_mode int_mode;
16609
16610 switch (GET_CODE (rtl))
16611 {
16612 case SUBREG:
16613 /* The case of a subreg may arise when we have a local (register)
16614 variable or a formal (register) parameter which doesn't quite fill
16615 up an entire register. For now, just assume that it is
16616 legitimate to make the Dwarf info refer to the whole register which
16617 contains the given subreg. */
16618 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16619 loc_result = loc_descriptor (SUBREG_REG (rtl),
16620 GET_MODE (SUBREG_REG (rtl)), initialized);
16621 else
16622 goto do_default;
16623 break;
16624
16625 case REG:
16626 loc_result = reg_loc_descriptor (rtl, initialized);
16627 break;
16628
16629 case MEM:
16630 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16631 GET_MODE (rtl), initialized);
16632 if (loc_result == NULL)
16633 loc_result = tls_mem_loc_descriptor (rtl);
16634 if (loc_result == NULL)
16635 {
16636 rtx new_rtl = avoid_constant_pool_reference (rtl);
16637 if (new_rtl != rtl)
16638 loc_result = loc_descriptor (new_rtl, mode, initialized);
16639 }
16640 break;
16641
16642 case CONCAT:
16643 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16644 initialized);
16645 break;
16646
16647 case CONCATN:
16648 loc_result = concatn_loc_descriptor (rtl, initialized);
16649 break;
16650
16651 case VAR_LOCATION:
16652 /* Single part. */
16653 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16654 {
16655 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16656 if (GET_CODE (loc) == EXPR_LIST)
16657 loc = XEXP (loc, 0);
16658 loc_result = loc_descriptor (loc, mode, initialized);
16659 break;
16660 }
16661
16662 rtl = XEXP (rtl, 1);
16663 /* FALLTHRU */
16664
16665 case PARALLEL:
16666 {
16667 rtvec par_elems = XVEC (rtl, 0);
16668 int num_elem = GET_NUM_ELEM (par_elems);
16669 machine_mode mode;
16670 int i, size;
16671
16672 /* Create the first one, so we have something to add to. */
16673 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16674 VOIDmode, initialized);
16675 if (loc_result == NULL)
16676 return NULL;
16677 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16678 /* At present we only track constant-sized pieces. */
16679 if (!GET_MODE_SIZE (mode).is_constant (&size))
16680 return NULL;
16681 add_loc_descr_op_piece (&loc_result, size);
16682 for (i = 1; i < num_elem; i++)
16683 {
16684 dw_loc_descr_ref temp;
16685
16686 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16687 VOIDmode, initialized);
16688 if (temp == NULL)
16689 return NULL;
16690 add_loc_descr (&loc_result, temp);
16691 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16692 /* At present we only track constant-sized pieces. */
16693 if (!GET_MODE_SIZE (mode).is_constant (&size))
16694 return NULL;
16695 add_loc_descr_op_piece (&loc_result, size);
16696 }
16697 }
16698 break;
16699
16700 case CONST_INT:
16701 if (mode != VOIDmode && mode != BLKmode)
16702 {
16703 int_mode = as_a <scalar_int_mode> (mode);
16704 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16705 INTVAL (rtl));
16706 }
16707 break;
16708
16709 case CONST_DOUBLE:
16710 if (mode == VOIDmode)
16711 mode = GET_MODE (rtl);
16712
16713 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16714 {
16715 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16716
16717 /* Note that a CONST_DOUBLE rtx could represent either an integer
16718 or a floating-point constant. A CONST_DOUBLE is used whenever
16719 the constant requires more than one word in order to be
16720 adequately represented. We output CONST_DOUBLEs as blocks. */
16721 scalar_mode smode = as_a <scalar_mode> (mode);
16722 loc_result = new_loc_descr (DW_OP_implicit_value,
16723 GET_MODE_SIZE (smode), 0);
16724 #if TARGET_SUPPORTS_WIDE_INT == 0
16725 if (!SCALAR_FLOAT_MODE_P (smode))
16726 {
16727 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16728 loc_result->dw_loc_oprnd2.v.val_double
16729 = rtx_to_double_int (rtl);
16730 }
16731 else
16732 #endif
16733 {
16734 unsigned int length = GET_MODE_SIZE (smode);
16735 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16736
16737 insert_float (rtl, array);
16738 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16739 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16740 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16741 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16742 }
16743 }
16744 break;
16745
16746 case CONST_WIDE_INT:
16747 if (mode == VOIDmode)
16748 mode = GET_MODE (rtl);
16749
16750 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16751 {
16752 int_mode = as_a <scalar_int_mode> (mode);
16753 loc_result = new_loc_descr (DW_OP_implicit_value,
16754 GET_MODE_SIZE (int_mode), 0);
16755 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16756 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16757 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16758 }
16759 break;
16760
16761 case CONST_VECTOR:
16762 if (mode == VOIDmode)
16763 mode = GET_MODE (rtl);
16764
16765 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16766 {
16767 unsigned int length;
16768 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16769 return NULL;
16770
16771 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16772 unsigned char *array
16773 = ggc_vec_alloc<unsigned char> (length * elt_size);
16774 unsigned int i;
16775 unsigned char *p;
16776 machine_mode imode = GET_MODE_INNER (mode);
16777
16778 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16779 switch (GET_MODE_CLASS (mode))
16780 {
16781 case MODE_VECTOR_INT:
16782 for (i = 0, p = array; i < length; i++, p += elt_size)
16783 {
16784 rtx elt = CONST_VECTOR_ELT (rtl, i);
16785 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16786 }
16787 break;
16788
16789 case MODE_VECTOR_FLOAT:
16790 for (i = 0, p = array; i < length; i++, p += elt_size)
16791 {
16792 rtx elt = CONST_VECTOR_ELT (rtl, i);
16793 insert_float (elt, p);
16794 }
16795 break;
16796
16797 default:
16798 gcc_unreachable ();
16799 }
16800
16801 loc_result = new_loc_descr (DW_OP_implicit_value,
16802 length * elt_size, 0);
16803 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16804 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16805 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16806 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16807 }
16808 break;
16809
16810 case CONST:
16811 if (mode == VOIDmode
16812 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16813 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16814 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16815 {
16816 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16817 break;
16818 }
16819 /* FALLTHROUGH */
16820 case SYMBOL_REF:
16821 if (!const_ok_for_output (rtl))
16822 break;
16823 /* FALLTHROUGH */
16824 case LABEL_REF:
16825 if (is_a <scalar_int_mode> (mode, &int_mode)
16826 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16827 && (dwarf_version >= 4 || !dwarf_strict))
16828 {
16829 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16830 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16831 vec_safe_push (used_rtx_array, rtl);
16832 }
16833 break;
16834
16835 case DEBUG_IMPLICIT_PTR:
16836 loc_result = implicit_ptr_descriptor (rtl, 0);
16837 break;
16838
16839 case PLUS:
16840 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16841 && CONST_INT_P (XEXP (rtl, 1)))
16842 {
16843 loc_result
16844 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16845 break;
16846 }
16847 /* FALLTHRU */
16848 do_default:
16849 default:
16850 if ((is_a <scalar_int_mode> (mode, &int_mode)
16851 && GET_MODE (rtl) == int_mode
16852 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16853 && dwarf_version >= 4)
16854 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16855 {
16856 /* Value expression. */
16857 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16858 if (loc_result)
16859 add_loc_descr (&loc_result,
16860 new_loc_descr (DW_OP_stack_value, 0, 0));
16861 }
16862 break;
16863 }
16864
16865 return loc_result;
16866 }
16867
16868 /* We need to figure out what section we should use as the base for the
16869 address ranges where a given location is valid.
16870 1. If this particular DECL has a section associated with it, use that.
16871 2. If this function has a section associated with it, use that.
16872 3. Otherwise, use the text section.
16873 XXX: If you split a variable across multiple sections, we won't notice. */
16874
16875 static const char *
16876 secname_for_decl (const_tree decl)
16877 {
16878 const char *secname;
16879
16880 if (VAR_OR_FUNCTION_DECL_P (decl)
16881 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16882 && DECL_SECTION_NAME (decl))
16883 secname = DECL_SECTION_NAME (decl);
16884 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16885 {
16886 if (in_cold_section_p)
16887 {
16888 section *sec = current_function_section ();
16889 if (sec->common.flags & SECTION_NAMED)
16890 return sec->named.name;
16891 }
16892 secname = DECL_SECTION_NAME (current_function_decl);
16893 }
16894 else if (cfun && in_cold_section_p)
16895 secname = crtl->subsections.cold_section_label;
16896 else
16897 secname = text_section_label;
16898
16899 return secname;
16900 }
16901
16902 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16903
16904 static bool
16905 decl_by_reference_p (tree decl)
16906 {
16907 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16908 || VAR_P (decl))
16909 && DECL_BY_REFERENCE (decl));
16910 }
16911
16912 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16913 for VARLOC. */
16914
16915 static dw_loc_descr_ref
16916 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16917 enum var_init_status initialized)
16918 {
16919 int have_address = 0;
16920 dw_loc_descr_ref descr;
16921 machine_mode mode;
16922
16923 if (want_address != 2)
16924 {
16925 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16926 /* Single part. */
16927 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16928 {
16929 varloc = PAT_VAR_LOCATION_LOC (varloc);
16930 if (GET_CODE (varloc) == EXPR_LIST)
16931 varloc = XEXP (varloc, 0);
16932 mode = GET_MODE (varloc);
16933 if (MEM_P (varloc))
16934 {
16935 rtx addr = XEXP (varloc, 0);
16936 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16937 mode, initialized);
16938 if (descr)
16939 have_address = 1;
16940 else
16941 {
16942 rtx x = avoid_constant_pool_reference (varloc);
16943 if (x != varloc)
16944 descr = mem_loc_descriptor (x, mode, VOIDmode,
16945 initialized);
16946 }
16947 }
16948 else
16949 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16950 }
16951 else
16952 return 0;
16953 }
16954 else
16955 {
16956 if (GET_CODE (varloc) == VAR_LOCATION)
16957 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16958 else
16959 mode = DECL_MODE (loc);
16960 descr = loc_descriptor (varloc, mode, initialized);
16961 have_address = 1;
16962 }
16963
16964 if (!descr)
16965 return 0;
16966
16967 if (want_address == 2 && !have_address
16968 && (dwarf_version >= 4 || !dwarf_strict))
16969 {
16970 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16971 {
16972 expansion_failed (loc, NULL_RTX,
16973 "DWARF address size mismatch");
16974 return 0;
16975 }
16976 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16977 have_address = 1;
16978 }
16979 /* Show if we can't fill the request for an address. */
16980 if (want_address && !have_address)
16981 {
16982 expansion_failed (loc, NULL_RTX,
16983 "Want address and only have value");
16984 return 0;
16985 }
16986
16987 /* If we've got an address and don't want one, dereference. */
16988 if (!want_address && have_address)
16989 {
16990 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16991 enum dwarf_location_atom op;
16992
16993 if (size > DWARF2_ADDR_SIZE || size == -1)
16994 {
16995 expansion_failed (loc, NULL_RTX,
16996 "DWARF address size mismatch");
16997 return 0;
16998 }
16999 else if (size == DWARF2_ADDR_SIZE)
17000 op = DW_OP_deref;
17001 else
17002 op = DW_OP_deref_size;
17003
17004 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17005 }
17006
17007 return descr;
17008 }
17009
17010 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17011 if it is not possible. */
17012
17013 static dw_loc_descr_ref
17014 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17015 {
17016 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17017 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17018 else if (dwarf_version >= 3 || !dwarf_strict)
17019 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17020 else
17021 return NULL;
17022 }
17023
17024 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17025 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17026
17027 static dw_loc_descr_ref
17028 dw_sra_loc_expr (tree decl, rtx loc)
17029 {
17030 rtx p;
17031 unsigned HOST_WIDE_INT padsize = 0;
17032 dw_loc_descr_ref descr, *descr_tail;
17033 unsigned HOST_WIDE_INT decl_size;
17034 rtx varloc;
17035 enum var_init_status initialized;
17036
17037 if (DECL_SIZE (decl) == NULL
17038 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17039 return NULL;
17040
17041 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17042 descr = NULL;
17043 descr_tail = &descr;
17044
17045 for (p = loc; p; p = XEXP (p, 1))
17046 {
17047 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17048 rtx loc_note = *decl_piece_varloc_ptr (p);
17049 dw_loc_descr_ref cur_descr;
17050 dw_loc_descr_ref *tail, last = NULL;
17051 unsigned HOST_WIDE_INT opsize = 0;
17052
17053 if (loc_note == NULL_RTX
17054 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17055 {
17056 padsize += bitsize;
17057 continue;
17058 }
17059 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17060 varloc = NOTE_VAR_LOCATION (loc_note);
17061 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17062 if (cur_descr == NULL)
17063 {
17064 padsize += bitsize;
17065 continue;
17066 }
17067
17068 /* Check that cur_descr either doesn't use
17069 DW_OP_*piece operations, or their sum is equal
17070 to bitsize. Otherwise we can't embed it. */
17071 for (tail = &cur_descr; *tail != NULL;
17072 tail = &(*tail)->dw_loc_next)
17073 if ((*tail)->dw_loc_opc == DW_OP_piece)
17074 {
17075 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17076 * BITS_PER_UNIT;
17077 last = *tail;
17078 }
17079 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17080 {
17081 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17082 last = *tail;
17083 }
17084
17085 if (last != NULL && opsize != bitsize)
17086 {
17087 padsize += bitsize;
17088 /* Discard the current piece of the descriptor and release any
17089 addr_table entries it uses. */
17090 remove_loc_list_addr_table_entries (cur_descr);
17091 continue;
17092 }
17093
17094 /* If there is a hole, add DW_OP_*piece after empty DWARF
17095 expression, which means that those bits are optimized out. */
17096 if (padsize)
17097 {
17098 if (padsize > decl_size)
17099 {
17100 remove_loc_list_addr_table_entries (cur_descr);
17101 goto discard_descr;
17102 }
17103 decl_size -= padsize;
17104 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17105 if (*descr_tail == NULL)
17106 {
17107 remove_loc_list_addr_table_entries (cur_descr);
17108 goto discard_descr;
17109 }
17110 descr_tail = &(*descr_tail)->dw_loc_next;
17111 padsize = 0;
17112 }
17113 *descr_tail = cur_descr;
17114 descr_tail = tail;
17115 if (bitsize > decl_size)
17116 goto discard_descr;
17117 decl_size -= bitsize;
17118 if (last == NULL)
17119 {
17120 HOST_WIDE_INT offset = 0;
17121 if (GET_CODE (varloc) == VAR_LOCATION
17122 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17123 {
17124 varloc = PAT_VAR_LOCATION_LOC (varloc);
17125 if (GET_CODE (varloc) == EXPR_LIST)
17126 varloc = XEXP (varloc, 0);
17127 }
17128 do
17129 {
17130 if (GET_CODE (varloc) == CONST
17131 || GET_CODE (varloc) == SIGN_EXTEND
17132 || GET_CODE (varloc) == ZERO_EXTEND)
17133 varloc = XEXP (varloc, 0);
17134 else if (GET_CODE (varloc) == SUBREG)
17135 varloc = SUBREG_REG (varloc);
17136 else
17137 break;
17138 }
17139 while (1);
17140 /* DW_OP_bit_size offset should be zero for register
17141 or implicit location descriptions and empty location
17142 descriptions, but for memory addresses needs big endian
17143 adjustment. */
17144 if (MEM_P (varloc))
17145 {
17146 unsigned HOST_WIDE_INT memsize;
17147 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17148 goto discard_descr;
17149 memsize *= BITS_PER_UNIT;
17150 if (memsize != bitsize)
17151 {
17152 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17153 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17154 goto discard_descr;
17155 if (memsize < bitsize)
17156 goto discard_descr;
17157 if (BITS_BIG_ENDIAN)
17158 offset = memsize - bitsize;
17159 }
17160 }
17161
17162 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17163 if (*descr_tail == NULL)
17164 goto discard_descr;
17165 descr_tail = &(*descr_tail)->dw_loc_next;
17166 }
17167 }
17168
17169 /* If there were any non-empty expressions, add padding till the end of
17170 the decl. */
17171 if (descr != NULL && decl_size != 0)
17172 {
17173 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17174 if (*descr_tail == NULL)
17175 goto discard_descr;
17176 }
17177 return descr;
17178
17179 discard_descr:
17180 /* Discard the descriptor and release any addr_table entries it uses. */
17181 remove_loc_list_addr_table_entries (descr);
17182 return NULL;
17183 }
17184
17185 /* Return the dwarf representation of the location list LOC_LIST of
17186 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17187 function. */
17188
17189 static dw_loc_list_ref
17190 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17191 {
17192 const char *endname, *secname;
17193 var_loc_view endview;
17194 rtx varloc;
17195 enum var_init_status initialized;
17196 struct var_loc_node *node;
17197 dw_loc_descr_ref descr;
17198 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17199 dw_loc_list_ref list = NULL;
17200 dw_loc_list_ref *listp = &list;
17201
17202 /* Now that we know what section we are using for a base,
17203 actually construct the list of locations.
17204 The first location information is what is passed to the
17205 function that creates the location list, and the remaining
17206 locations just get added on to that list.
17207 Note that we only know the start address for a location
17208 (IE location changes), so to build the range, we use
17209 the range [current location start, next location start].
17210 This means we have to special case the last node, and generate
17211 a range of [last location start, end of function label]. */
17212
17213 if (cfun && crtl->has_bb_partition)
17214 {
17215 bool save_in_cold_section_p = in_cold_section_p;
17216 in_cold_section_p = first_function_block_is_cold;
17217 if (loc_list->last_before_switch == NULL)
17218 in_cold_section_p = !in_cold_section_p;
17219 secname = secname_for_decl (decl);
17220 in_cold_section_p = save_in_cold_section_p;
17221 }
17222 else
17223 secname = secname_for_decl (decl);
17224
17225 for (node = loc_list->first; node; node = node->next)
17226 {
17227 bool range_across_switch = false;
17228 if (GET_CODE (node->loc) == EXPR_LIST
17229 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17230 {
17231 if (GET_CODE (node->loc) == EXPR_LIST)
17232 {
17233 descr = NULL;
17234 /* This requires DW_OP_{,bit_}piece, which is not usable
17235 inside DWARF expressions. */
17236 if (want_address == 2)
17237 descr = dw_sra_loc_expr (decl, node->loc);
17238 }
17239 else
17240 {
17241 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17242 varloc = NOTE_VAR_LOCATION (node->loc);
17243 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17244 }
17245 if (descr)
17246 {
17247 /* If section switch happens in between node->label
17248 and node->next->label (or end of function) and
17249 we can't emit it as a single entry list,
17250 emit two ranges, first one ending at the end
17251 of first partition and second one starting at the
17252 beginning of second partition. */
17253 if (node == loc_list->last_before_switch
17254 && (node != loc_list->first || loc_list->first->next
17255 /* If we are to emit a view number, we will emit
17256 a loclist rather than a single location
17257 expression for the entire function (see
17258 loc_list_has_views), so we have to split the
17259 range that straddles across partitions. */
17260 || !ZERO_VIEW_P (node->view))
17261 && current_function_decl)
17262 {
17263 endname = cfun->fde->dw_fde_end;
17264 endview = 0;
17265 range_across_switch = true;
17266 }
17267 /* The variable has a location between NODE->LABEL and
17268 NODE->NEXT->LABEL. */
17269 else if (node->next)
17270 endname = node->next->label, endview = node->next->view;
17271 /* If the variable has a location at the last label
17272 it keeps its location until the end of function. */
17273 else if (!current_function_decl)
17274 endname = text_end_label, endview = 0;
17275 else
17276 {
17277 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17278 current_function_funcdef_no);
17279 endname = ggc_strdup (label_id);
17280 endview = 0;
17281 }
17282
17283 *listp = new_loc_list (descr, node->label, node->view,
17284 endname, endview, secname);
17285 if (TREE_CODE (decl) == PARM_DECL
17286 && node == loc_list->first
17287 && NOTE_P (node->loc)
17288 && strcmp (node->label, endname) == 0)
17289 (*listp)->force = true;
17290 listp = &(*listp)->dw_loc_next;
17291 }
17292 }
17293
17294 if (cfun
17295 && crtl->has_bb_partition
17296 && node == loc_list->last_before_switch)
17297 {
17298 bool save_in_cold_section_p = in_cold_section_p;
17299 in_cold_section_p = !first_function_block_is_cold;
17300 secname = secname_for_decl (decl);
17301 in_cold_section_p = save_in_cold_section_p;
17302 }
17303
17304 if (range_across_switch)
17305 {
17306 if (GET_CODE (node->loc) == EXPR_LIST)
17307 descr = dw_sra_loc_expr (decl, node->loc);
17308 else
17309 {
17310 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17311 varloc = NOTE_VAR_LOCATION (node->loc);
17312 descr = dw_loc_list_1 (decl, varloc, want_address,
17313 initialized);
17314 }
17315 gcc_assert (descr);
17316 /* The variable has a location between NODE->LABEL and
17317 NODE->NEXT->LABEL. */
17318 if (node->next)
17319 endname = node->next->label, endview = node->next->view;
17320 else
17321 endname = cfun->fde->dw_fde_second_end, endview = 0;
17322 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17323 endname, endview, secname);
17324 listp = &(*listp)->dw_loc_next;
17325 }
17326 }
17327
17328 /* Try to avoid the overhead of a location list emitting a location
17329 expression instead, but only if we didn't have more than one
17330 location entry in the first place. If some entries were not
17331 representable, we don't want to pretend a single entry that was
17332 applies to the entire scope in which the variable is
17333 available. */
17334 if (list && loc_list->first->next)
17335 gen_llsym (list);
17336 else
17337 maybe_gen_llsym (list);
17338
17339 return list;
17340 }
17341
17342 /* Return if the loc_list has only single element and thus can be represented
17343 as location description. */
17344
17345 static bool
17346 single_element_loc_list_p (dw_loc_list_ref list)
17347 {
17348 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17349 return !list->ll_symbol;
17350 }
17351
17352 /* Duplicate a single element of location list. */
17353
17354 static inline dw_loc_descr_ref
17355 copy_loc_descr (dw_loc_descr_ref ref)
17356 {
17357 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17358 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17359 return copy;
17360 }
17361
17362 /* To each location in list LIST append loc descr REF. */
17363
17364 static void
17365 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17366 {
17367 dw_loc_descr_ref copy;
17368 add_loc_descr (&list->expr, ref);
17369 list = list->dw_loc_next;
17370 while (list)
17371 {
17372 copy = copy_loc_descr (ref);
17373 add_loc_descr (&list->expr, copy);
17374 while (copy->dw_loc_next)
17375 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17376 list = list->dw_loc_next;
17377 }
17378 }
17379
17380 /* To each location in list LIST prepend loc descr REF. */
17381
17382 static void
17383 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17384 {
17385 dw_loc_descr_ref copy;
17386 dw_loc_descr_ref ref_end = list->expr;
17387 add_loc_descr (&ref, list->expr);
17388 list->expr = ref;
17389 list = list->dw_loc_next;
17390 while (list)
17391 {
17392 dw_loc_descr_ref end = list->expr;
17393 list->expr = copy = copy_loc_descr (ref);
17394 while (copy->dw_loc_next != ref_end)
17395 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17396 copy->dw_loc_next = end;
17397 list = list->dw_loc_next;
17398 }
17399 }
17400
17401 /* Given two lists RET and LIST
17402 produce location list that is result of adding expression in LIST
17403 to expression in RET on each position in program.
17404 Might be destructive on both RET and LIST.
17405
17406 TODO: We handle only simple cases of RET or LIST having at most one
17407 element. General case would involve sorting the lists in program order
17408 and merging them that will need some additional work.
17409 Adding that will improve quality of debug info especially for SRA-ed
17410 structures. */
17411
17412 static void
17413 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17414 {
17415 if (!list)
17416 return;
17417 if (!*ret)
17418 {
17419 *ret = list;
17420 return;
17421 }
17422 if (!list->dw_loc_next)
17423 {
17424 add_loc_descr_to_each (*ret, list->expr);
17425 return;
17426 }
17427 if (!(*ret)->dw_loc_next)
17428 {
17429 prepend_loc_descr_to_each (list, (*ret)->expr);
17430 *ret = list;
17431 return;
17432 }
17433 expansion_failed (NULL_TREE, NULL_RTX,
17434 "Don't know how to merge two non-trivial"
17435 " location lists.\n");
17436 *ret = NULL;
17437 return;
17438 }
17439
17440 /* LOC is constant expression. Try a luck, look it up in constant
17441 pool and return its loc_descr of its address. */
17442
17443 static dw_loc_descr_ref
17444 cst_pool_loc_descr (tree loc)
17445 {
17446 /* Get an RTL for this, if something has been emitted. */
17447 rtx rtl = lookup_constant_def (loc);
17448
17449 if (!rtl || !MEM_P (rtl))
17450 {
17451 gcc_assert (!rtl);
17452 return 0;
17453 }
17454 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17455
17456 /* TODO: We might get more coverage if we was actually delaying expansion
17457 of all expressions till end of compilation when constant pools are fully
17458 populated. */
17459 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17460 {
17461 expansion_failed (loc, NULL_RTX,
17462 "CST value in contant pool but not marked.");
17463 return 0;
17464 }
17465 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17466 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17467 }
17468
17469 /* Return dw_loc_list representing address of addr_expr LOC
17470 by looking for inner INDIRECT_REF expression and turning
17471 it into simple arithmetics.
17472
17473 See loc_list_from_tree for the meaning of CONTEXT. */
17474
17475 static dw_loc_list_ref
17476 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17477 loc_descr_context *context)
17478 {
17479 tree obj, offset;
17480 poly_int64 bitsize, bitpos, bytepos;
17481 machine_mode mode;
17482 int unsignedp, reversep, volatilep = 0;
17483 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17484
17485 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17486 &bitsize, &bitpos, &offset, &mode,
17487 &unsignedp, &reversep, &volatilep);
17488 STRIP_NOPS (obj);
17489 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17490 {
17491 expansion_failed (loc, NULL_RTX, "bitfield access");
17492 return 0;
17493 }
17494 if (!INDIRECT_REF_P (obj))
17495 {
17496 expansion_failed (obj,
17497 NULL_RTX, "no indirect ref in inner refrence");
17498 return 0;
17499 }
17500 if (!offset && known_eq (bitpos, 0))
17501 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17502 context);
17503 else if (toplev
17504 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17505 && (dwarf_version >= 4 || !dwarf_strict))
17506 {
17507 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17508 if (!list_ret)
17509 return 0;
17510 if (offset)
17511 {
17512 /* Variable offset. */
17513 list_ret1 = loc_list_from_tree (offset, 0, context);
17514 if (list_ret1 == 0)
17515 return 0;
17516 add_loc_list (&list_ret, list_ret1);
17517 if (!list_ret)
17518 return 0;
17519 add_loc_descr_to_each (list_ret,
17520 new_loc_descr (DW_OP_plus, 0, 0));
17521 }
17522 HOST_WIDE_INT value;
17523 if (bytepos.is_constant (&value) && value > 0)
17524 add_loc_descr_to_each (list_ret,
17525 new_loc_descr (DW_OP_plus_uconst, value, 0));
17526 else if (maybe_ne (bytepos, 0))
17527 loc_list_plus_const (list_ret, bytepos);
17528 add_loc_descr_to_each (list_ret,
17529 new_loc_descr (DW_OP_stack_value, 0, 0));
17530 }
17531 return list_ret;
17532 }
17533
17534 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17535 all operations from LOC are nops, move to the last one. Insert in NOPS all
17536 operations that are skipped. */
17537
17538 static void
17539 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17540 hash_set<dw_loc_descr_ref> &nops)
17541 {
17542 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17543 {
17544 nops.add (loc);
17545 loc = loc->dw_loc_next;
17546 }
17547 }
17548
17549 /* Helper for loc_descr_without_nops: free the location description operation
17550 P. */
17551
17552 bool
17553 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17554 {
17555 ggc_free (loc);
17556 return true;
17557 }
17558
17559 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17560 finishes LOC. */
17561
17562 static void
17563 loc_descr_without_nops (dw_loc_descr_ref &loc)
17564 {
17565 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17566 return;
17567
17568 /* Set of all DW_OP_nop operations we remove. */
17569 hash_set<dw_loc_descr_ref> nops;
17570
17571 /* First, strip all prefix NOP operations in order to keep the head of the
17572 operations list. */
17573 loc_descr_to_next_no_nop (loc, nops);
17574
17575 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17576 {
17577 /* For control flow operations: strip "prefix" nops in destination
17578 labels. */
17579 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17580 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17581 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17582 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17583
17584 /* Do the same for the operations that follow, then move to the next
17585 iteration. */
17586 if (cur->dw_loc_next != NULL)
17587 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17588 cur = cur->dw_loc_next;
17589 }
17590
17591 nops.traverse<void *, free_loc_descr> (NULL);
17592 }
17593
17594
17595 struct dwarf_procedure_info;
17596
17597 /* Helper structure for location descriptions generation. */
17598 struct loc_descr_context
17599 {
17600 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17601 NULL_TREE if DW_OP_push_object_address in invalid for this location
17602 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17603 tree context_type;
17604 /* The ..._DECL node that should be translated as a
17605 DW_OP_push_object_address operation. */
17606 tree base_decl;
17607 /* Information about the DWARF procedure we are currently generating. NULL if
17608 we are not generating a DWARF procedure. */
17609 struct dwarf_procedure_info *dpi;
17610 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17611 by consumer. Used for DW_TAG_generic_subrange attributes. */
17612 bool placeholder_arg;
17613 /* True if PLACEHOLDER_EXPR has been seen. */
17614 bool placeholder_seen;
17615 };
17616
17617 /* DWARF procedures generation
17618
17619 DWARF expressions (aka. location descriptions) are used to encode variable
17620 things such as sizes or offsets. Such computations can have redundant parts
17621 that can be factorized in order to reduce the size of the output debug
17622 information. This is the whole point of DWARF procedures.
17623
17624 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17625 already factorized into functions ("size functions") in order to handle very
17626 big and complex types. Such functions are quite simple: they have integral
17627 arguments, they return an integral result and their body contains only a
17628 return statement with arithmetic expressions. This is the only kind of
17629 function we are interested in translating into DWARF procedures, here.
17630
17631 DWARF expressions and DWARF procedure are executed using a stack, so we have
17632 to define some calling convention for them to interact. Let's say that:
17633
17634 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17635 all arguments in reverse order (right-to-left) so that when the DWARF
17636 procedure execution starts, the first argument is the top of the stack.
17637
17638 - Then, when returning, the DWARF procedure must have consumed all arguments
17639 on the stack, must have pushed the result and touched nothing else.
17640
17641 - Each integral argument and the result are integral types can be hold in a
17642 single stack slot.
17643
17644 - We call "frame offset" the number of stack slots that are "under DWARF
17645 procedure control": it includes the arguments slots, the temporaries and
17646 the result slot. Thus, it is equal to the number of arguments when the
17647 procedure execution starts and must be equal to one (the result) when it
17648 returns. */
17649
17650 /* Helper structure used when generating operations for a DWARF procedure. */
17651 struct dwarf_procedure_info
17652 {
17653 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17654 currently translated. */
17655 tree fndecl;
17656 /* The number of arguments FNDECL takes. */
17657 unsigned args_count;
17658 };
17659
17660 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17661 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17662 equate it to this DIE. */
17663
17664 static dw_die_ref
17665 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17666 dw_die_ref parent_die)
17667 {
17668 dw_die_ref dwarf_proc_die;
17669
17670 if ((dwarf_version < 3 && dwarf_strict)
17671 || location == NULL)
17672 return NULL;
17673
17674 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17675 if (fndecl)
17676 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17677 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17678 return dwarf_proc_die;
17679 }
17680
17681 /* Return whether TYPE is a supported type as a DWARF procedure argument
17682 type or return type (we handle only scalar types and pointer types that
17683 aren't wider than the DWARF expression evaluation stack. */
17684
17685 static bool
17686 is_handled_procedure_type (tree type)
17687 {
17688 return ((INTEGRAL_TYPE_P (type)
17689 || TREE_CODE (type) == OFFSET_TYPE
17690 || TREE_CODE (type) == POINTER_TYPE)
17691 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17692 }
17693
17694 /* Helper for resolve_args_picking: do the same but stop when coming across
17695 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17696 offset *before* evaluating the corresponding operation. */
17697
17698 static bool
17699 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17700 struct dwarf_procedure_info *dpi,
17701 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17702 {
17703 /* The "frame_offset" identifier is already used to name a macro... */
17704 unsigned frame_offset_ = initial_frame_offset;
17705 dw_loc_descr_ref l;
17706
17707 for (l = loc; l != NULL;)
17708 {
17709 bool existed;
17710 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17711
17712 /* If we already met this node, there is nothing to compute anymore. */
17713 if (existed)
17714 {
17715 /* Make sure that the stack size is consistent wherever the execution
17716 flow comes from. */
17717 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17718 break;
17719 }
17720 l_frame_offset = frame_offset_;
17721
17722 /* If needed, relocate the picking offset with respect to the frame
17723 offset. */
17724 if (l->frame_offset_rel)
17725 {
17726 unsigned HOST_WIDE_INT off;
17727 switch (l->dw_loc_opc)
17728 {
17729 case DW_OP_pick:
17730 off = l->dw_loc_oprnd1.v.val_unsigned;
17731 break;
17732 case DW_OP_dup:
17733 off = 0;
17734 break;
17735 case DW_OP_over:
17736 off = 1;
17737 break;
17738 default:
17739 gcc_unreachable ();
17740 }
17741 /* frame_offset_ is the size of the current stack frame, including
17742 incoming arguments. Besides, the arguments are pushed
17743 right-to-left. Thus, in order to access the Nth argument from
17744 this operation node, the picking has to skip temporaries *plus*
17745 one stack slot per argument (0 for the first one, 1 for the second
17746 one, etc.).
17747
17748 The targetted argument number (N) is already set as the operand,
17749 and the number of temporaries can be computed with:
17750 frame_offsets_ - dpi->args_count */
17751 off += frame_offset_ - dpi->args_count;
17752
17753 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17754 if (off > 255)
17755 return false;
17756
17757 if (off == 0)
17758 {
17759 l->dw_loc_opc = DW_OP_dup;
17760 l->dw_loc_oprnd1.v.val_unsigned = 0;
17761 }
17762 else if (off == 1)
17763 {
17764 l->dw_loc_opc = DW_OP_over;
17765 l->dw_loc_oprnd1.v.val_unsigned = 0;
17766 }
17767 else
17768 {
17769 l->dw_loc_opc = DW_OP_pick;
17770 l->dw_loc_oprnd1.v.val_unsigned = off;
17771 }
17772 }
17773
17774 /* Update frame_offset according to the effect the current operation has
17775 on the stack. */
17776 switch (l->dw_loc_opc)
17777 {
17778 case DW_OP_deref:
17779 case DW_OP_swap:
17780 case DW_OP_rot:
17781 case DW_OP_abs:
17782 case DW_OP_neg:
17783 case DW_OP_not:
17784 case DW_OP_plus_uconst:
17785 case DW_OP_skip:
17786 case DW_OP_reg0:
17787 case DW_OP_reg1:
17788 case DW_OP_reg2:
17789 case DW_OP_reg3:
17790 case DW_OP_reg4:
17791 case DW_OP_reg5:
17792 case DW_OP_reg6:
17793 case DW_OP_reg7:
17794 case DW_OP_reg8:
17795 case DW_OP_reg9:
17796 case DW_OP_reg10:
17797 case DW_OP_reg11:
17798 case DW_OP_reg12:
17799 case DW_OP_reg13:
17800 case DW_OP_reg14:
17801 case DW_OP_reg15:
17802 case DW_OP_reg16:
17803 case DW_OP_reg17:
17804 case DW_OP_reg18:
17805 case DW_OP_reg19:
17806 case DW_OP_reg20:
17807 case DW_OP_reg21:
17808 case DW_OP_reg22:
17809 case DW_OP_reg23:
17810 case DW_OP_reg24:
17811 case DW_OP_reg25:
17812 case DW_OP_reg26:
17813 case DW_OP_reg27:
17814 case DW_OP_reg28:
17815 case DW_OP_reg29:
17816 case DW_OP_reg30:
17817 case DW_OP_reg31:
17818 case DW_OP_bregx:
17819 case DW_OP_piece:
17820 case DW_OP_deref_size:
17821 case DW_OP_nop:
17822 case DW_OP_bit_piece:
17823 case DW_OP_implicit_value:
17824 case DW_OP_stack_value:
17825 break;
17826
17827 case DW_OP_addr:
17828 case DW_OP_const1u:
17829 case DW_OP_const1s:
17830 case DW_OP_const2u:
17831 case DW_OP_const2s:
17832 case DW_OP_const4u:
17833 case DW_OP_const4s:
17834 case DW_OP_const8u:
17835 case DW_OP_const8s:
17836 case DW_OP_constu:
17837 case DW_OP_consts:
17838 case DW_OP_dup:
17839 case DW_OP_over:
17840 case DW_OP_pick:
17841 case DW_OP_lit0:
17842 case DW_OP_lit1:
17843 case DW_OP_lit2:
17844 case DW_OP_lit3:
17845 case DW_OP_lit4:
17846 case DW_OP_lit5:
17847 case DW_OP_lit6:
17848 case DW_OP_lit7:
17849 case DW_OP_lit8:
17850 case DW_OP_lit9:
17851 case DW_OP_lit10:
17852 case DW_OP_lit11:
17853 case DW_OP_lit12:
17854 case DW_OP_lit13:
17855 case DW_OP_lit14:
17856 case DW_OP_lit15:
17857 case DW_OP_lit16:
17858 case DW_OP_lit17:
17859 case DW_OP_lit18:
17860 case DW_OP_lit19:
17861 case DW_OP_lit20:
17862 case DW_OP_lit21:
17863 case DW_OP_lit22:
17864 case DW_OP_lit23:
17865 case DW_OP_lit24:
17866 case DW_OP_lit25:
17867 case DW_OP_lit26:
17868 case DW_OP_lit27:
17869 case DW_OP_lit28:
17870 case DW_OP_lit29:
17871 case DW_OP_lit30:
17872 case DW_OP_lit31:
17873 case DW_OP_breg0:
17874 case DW_OP_breg1:
17875 case DW_OP_breg2:
17876 case DW_OP_breg3:
17877 case DW_OP_breg4:
17878 case DW_OP_breg5:
17879 case DW_OP_breg6:
17880 case DW_OP_breg7:
17881 case DW_OP_breg8:
17882 case DW_OP_breg9:
17883 case DW_OP_breg10:
17884 case DW_OP_breg11:
17885 case DW_OP_breg12:
17886 case DW_OP_breg13:
17887 case DW_OP_breg14:
17888 case DW_OP_breg15:
17889 case DW_OP_breg16:
17890 case DW_OP_breg17:
17891 case DW_OP_breg18:
17892 case DW_OP_breg19:
17893 case DW_OP_breg20:
17894 case DW_OP_breg21:
17895 case DW_OP_breg22:
17896 case DW_OP_breg23:
17897 case DW_OP_breg24:
17898 case DW_OP_breg25:
17899 case DW_OP_breg26:
17900 case DW_OP_breg27:
17901 case DW_OP_breg28:
17902 case DW_OP_breg29:
17903 case DW_OP_breg30:
17904 case DW_OP_breg31:
17905 case DW_OP_fbreg:
17906 case DW_OP_push_object_address:
17907 case DW_OP_call_frame_cfa:
17908 case DW_OP_GNU_variable_value:
17909 ++frame_offset_;
17910 break;
17911
17912 case DW_OP_drop:
17913 case DW_OP_xderef:
17914 case DW_OP_and:
17915 case DW_OP_div:
17916 case DW_OP_minus:
17917 case DW_OP_mod:
17918 case DW_OP_mul:
17919 case DW_OP_or:
17920 case DW_OP_plus:
17921 case DW_OP_shl:
17922 case DW_OP_shr:
17923 case DW_OP_shra:
17924 case DW_OP_xor:
17925 case DW_OP_bra:
17926 case DW_OP_eq:
17927 case DW_OP_ge:
17928 case DW_OP_gt:
17929 case DW_OP_le:
17930 case DW_OP_lt:
17931 case DW_OP_ne:
17932 case DW_OP_regx:
17933 case DW_OP_xderef_size:
17934 --frame_offset_;
17935 break;
17936
17937 case DW_OP_call2:
17938 case DW_OP_call4:
17939 case DW_OP_call_ref:
17940 {
17941 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17942 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17943
17944 if (stack_usage == NULL)
17945 return false;
17946 frame_offset_ += *stack_usage;
17947 break;
17948 }
17949
17950 case DW_OP_implicit_pointer:
17951 case DW_OP_entry_value:
17952 case DW_OP_const_type:
17953 case DW_OP_regval_type:
17954 case DW_OP_deref_type:
17955 case DW_OP_convert:
17956 case DW_OP_reinterpret:
17957 case DW_OP_form_tls_address:
17958 case DW_OP_GNU_push_tls_address:
17959 case DW_OP_GNU_uninit:
17960 case DW_OP_GNU_encoded_addr:
17961 case DW_OP_GNU_implicit_pointer:
17962 case DW_OP_GNU_entry_value:
17963 case DW_OP_GNU_const_type:
17964 case DW_OP_GNU_regval_type:
17965 case DW_OP_GNU_deref_type:
17966 case DW_OP_GNU_convert:
17967 case DW_OP_GNU_reinterpret:
17968 case DW_OP_GNU_parameter_ref:
17969 /* loc_list_from_tree will probably not output these operations for
17970 size functions, so assume they will not appear here. */
17971 /* Fall through... */
17972
17973 default:
17974 gcc_unreachable ();
17975 }
17976
17977 /* Now, follow the control flow (except subroutine calls). */
17978 switch (l->dw_loc_opc)
17979 {
17980 case DW_OP_bra:
17981 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17982 frame_offsets))
17983 return false;
17984 /* Fall through. */
17985
17986 case DW_OP_skip:
17987 l = l->dw_loc_oprnd1.v.val_loc;
17988 break;
17989
17990 case DW_OP_stack_value:
17991 return true;
17992
17993 default:
17994 l = l->dw_loc_next;
17995 break;
17996 }
17997 }
17998
17999 return true;
18000 }
18001
18002 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18003 operations) in order to resolve the operand of DW_OP_pick operations that
18004 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18005 offset *before* LOC is executed. Return if all relocations were
18006 successful. */
18007
18008 static bool
18009 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18010 struct dwarf_procedure_info *dpi)
18011 {
18012 /* Associate to all visited operations the frame offset *before* evaluating
18013 this operation. */
18014 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18015
18016 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18017 frame_offsets);
18018 }
18019
18020 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18021 Return NULL if it is not possible. */
18022
18023 static dw_die_ref
18024 function_to_dwarf_procedure (tree fndecl)
18025 {
18026 struct loc_descr_context ctx;
18027 struct dwarf_procedure_info dpi;
18028 dw_die_ref dwarf_proc_die;
18029 tree tree_body = DECL_SAVED_TREE (fndecl);
18030 dw_loc_descr_ref loc_body, epilogue;
18031
18032 tree cursor;
18033 unsigned i;
18034
18035 /* Do not generate multiple DWARF procedures for the same function
18036 declaration. */
18037 dwarf_proc_die = lookup_decl_die (fndecl);
18038 if (dwarf_proc_die != NULL)
18039 return dwarf_proc_die;
18040
18041 /* DWARF procedures are available starting with the DWARFv3 standard. */
18042 if (dwarf_version < 3 && dwarf_strict)
18043 return NULL;
18044
18045 /* We handle only functions for which we still have a body, that return a
18046 supported type and that takes arguments with supported types. Note that
18047 there is no point translating functions that return nothing. */
18048 if (tree_body == NULL_TREE
18049 || DECL_RESULT (fndecl) == NULL_TREE
18050 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18051 return NULL;
18052
18053 for (cursor = DECL_ARGUMENTS (fndecl);
18054 cursor != NULL_TREE;
18055 cursor = TREE_CHAIN (cursor))
18056 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18057 return NULL;
18058
18059 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18060 if (TREE_CODE (tree_body) != RETURN_EXPR)
18061 return NULL;
18062 tree_body = TREE_OPERAND (tree_body, 0);
18063 if (TREE_CODE (tree_body) != MODIFY_EXPR
18064 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18065 return NULL;
18066 tree_body = TREE_OPERAND (tree_body, 1);
18067
18068 /* Try to translate the body expression itself. Note that this will probably
18069 cause an infinite recursion if its call graph has a cycle. This is very
18070 unlikely for size functions, however, so don't bother with such things at
18071 the moment. */
18072 ctx.context_type = NULL_TREE;
18073 ctx.base_decl = NULL_TREE;
18074 ctx.dpi = &dpi;
18075 ctx.placeholder_arg = false;
18076 ctx.placeholder_seen = false;
18077 dpi.fndecl = fndecl;
18078 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18079 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18080 if (!loc_body)
18081 return NULL;
18082
18083 /* After evaluating all operands in "loc_body", we should still have on the
18084 stack all arguments plus the desired function result (top of the stack).
18085 Generate code in order to keep only the result in our stack frame. */
18086 epilogue = NULL;
18087 for (i = 0; i < dpi.args_count; ++i)
18088 {
18089 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18090 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18091 op_couple->dw_loc_next->dw_loc_next = epilogue;
18092 epilogue = op_couple;
18093 }
18094 add_loc_descr (&loc_body, epilogue);
18095 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18096 return NULL;
18097
18098 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18099 because they are considered useful. Now there is an epilogue, they are
18100 not anymore, so give it another try. */
18101 loc_descr_without_nops (loc_body);
18102
18103 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18104 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18105 though, given that size functions do not come from source, so they should
18106 not have a dedicated DW_TAG_subprogram DIE. */
18107 dwarf_proc_die
18108 = new_dwarf_proc_die (loc_body, fndecl,
18109 get_context_die (DECL_CONTEXT (fndecl)));
18110
18111 /* The called DWARF procedure consumes one stack slot per argument and
18112 returns one stack slot. */
18113 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18114
18115 return dwarf_proc_die;
18116 }
18117
18118
18119 /* Generate Dwarf location list representing LOC.
18120 If WANT_ADDRESS is false, expression computing LOC will be computed
18121 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18122 if WANT_ADDRESS is 2, expression computing address useable in location
18123 will be returned (i.e. DW_OP_reg can be used
18124 to refer to register values).
18125
18126 CONTEXT provides information to customize the location descriptions
18127 generation. Its context_type field specifies what type is implicitly
18128 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18129 will not be generated.
18130
18131 Its DPI field determines whether we are generating a DWARF expression for a
18132 DWARF procedure, so PARM_DECL references are processed specifically.
18133
18134 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18135 and dpi fields were null. */
18136
18137 static dw_loc_list_ref
18138 loc_list_from_tree_1 (tree loc, int want_address,
18139 struct loc_descr_context *context)
18140 {
18141 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18142 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18143 int have_address = 0;
18144 enum dwarf_location_atom op;
18145
18146 /* ??? Most of the time we do not take proper care for sign/zero
18147 extending the values properly. Hopefully this won't be a real
18148 problem... */
18149
18150 if (context != NULL
18151 && context->base_decl == loc
18152 && want_address == 0)
18153 {
18154 if (dwarf_version >= 3 || !dwarf_strict)
18155 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18156 NULL, 0, NULL, 0, NULL);
18157 else
18158 return NULL;
18159 }
18160
18161 switch (TREE_CODE (loc))
18162 {
18163 case ERROR_MARK:
18164 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18165 return 0;
18166
18167 case PLACEHOLDER_EXPR:
18168 /* This case involves extracting fields from an object to determine the
18169 position of other fields. It is supposed to appear only as the first
18170 operand of COMPONENT_REF nodes and to reference precisely the type
18171 that the context allows. */
18172 if (context != NULL
18173 && TREE_TYPE (loc) == context->context_type
18174 && want_address >= 1)
18175 {
18176 if (dwarf_version >= 3 || !dwarf_strict)
18177 {
18178 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18179 have_address = 1;
18180 break;
18181 }
18182 else
18183 return NULL;
18184 }
18185 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18186 the single argument passed by consumer. */
18187 else if (context != NULL
18188 && context->placeholder_arg
18189 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18190 && want_address == 0)
18191 {
18192 ret = new_loc_descr (DW_OP_pick, 0, 0);
18193 ret->frame_offset_rel = 1;
18194 context->placeholder_seen = true;
18195 break;
18196 }
18197 else
18198 expansion_failed (loc, NULL_RTX,
18199 "PLACEHOLDER_EXPR for an unexpected type");
18200 break;
18201
18202 case CALL_EXPR:
18203 {
18204 const int nargs = call_expr_nargs (loc);
18205 tree callee = get_callee_fndecl (loc);
18206 int i;
18207 dw_die_ref dwarf_proc;
18208
18209 if (callee == NULL_TREE)
18210 goto call_expansion_failed;
18211
18212 /* We handle only functions that return an integer. */
18213 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18214 goto call_expansion_failed;
18215
18216 dwarf_proc = function_to_dwarf_procedure (callee);
18217 if (dwarf_proc == NULL)
18218 goto call_expansion_failed;
18219
18220 /* Evaluate arguments right-to-left so that the first argument will
18221 be the top-most one on the stack. */
18222 for (i = nargs - 1; i >= 0; --i)
18223 {
18224 dw_loc_descr_ref loc_descr
18225 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18226 context);
18227
18228 if (loc_descr == NULL)
18229 goto call_expansion_failed;
18230
18231 add_loc_descr (&ret, loc_descr);
18232 }
18233
18234 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18235 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18236 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18237 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18238 add_loc_descr (&ret, ret1);
18239 break;
18240
18241 call_expansion_failed:
18242 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18243 /* There are no opcodes for these operations. */
18244 return 0;
18245 }
18246
18247 case PREINCREMENT_EXPR:
18248 case PREDECREMENT_EXPR:
18249 case POSTINCREMENT_EXPR:
18250 case POSTDECREMENT_EXPR:
18251 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18252 /* There are no opcodes for these operations. */
18253 return 0;
18254
18255 case ADDR_EXPR:
18256 /* If we already want an address, see if there is INDIRECT_REF inside
18257 e.g. for &this->field. */
18258 if (want_address)
18259 {
18260 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18261 (loc, want_address == 2, context);
18262 if (list_ret)
18263 have_address = 1;
18264 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18265 && (ret = cst_pool_loc_descr (loc)))
18266 have_address = 1;
18267 }
18268 /* Otherwise, process the argument and look for the address. */
18269 if (!list_ret && !ret)
18270 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18271 else
18272 {
18273 if (want_address)
18274 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18275 return NULL;
18276 }
18277 break;
18278
18279 case VAR_DECL:
18280 if (DECL_THREAD_LOCAL_P (loc))
18281 {
18282 rtx rtl;
18283 enum dwarf_location_atom tls_op;
18284 enum dtprel_bool dtprel = dtprel_false;
18285
18286 if (targetm.have_tls)
18287 {
18288 /* If this is not defined, we have no way to emit the
18289 data. */
18290 if (!targetm.asm_out.output_dwarf_dtprel)
18291 return 0;
18292
18293 /* The way DW_OP_GNU_push_tls_address is specified, we
18294 can only look up addresses of objects in the current
18295 module. We used DW_OP_addr as first op, but that's
18296 wrong, because DW_OP_addr is relocated by the debug
18297 info consumer, while DW_OP_GNU_push_tls_address
18298 operand shouldn't be. */
18299 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18300 return 0;
18301 dtprel = dtprel_true;
18302 /* We check for DWARF 5 here because gdb did not implement
18303 DW_OP_form_tls_address until after 7.12. */
18304 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18305 : DW_OP_GNU_push_tls_address);
18306 }
18307 else
18308 {
18309 if (!targetm.emutls.debug_form_tls_address
18310 || !(dwarf_version >= 3 || !dwarf_strict))
18311 return 0;
18312 /* We stuffed the control variable into the DECL_VALUE_EXPR
18313 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18314 no longer appear in gimple code. We used the control
18315 variable in specific so that we could pick it up here. */
18316 loc = DECL_VALUE_EXPR (loc);
18317 tls_op = DW_OP_form_tls_address;
18318 }
18319
18320 rtl = rtl_for_decl_location (loc);
18321 if (rtl == NULL_RTX)
18322 return 0;
18323
18324 if (!MEM_P (rtl))
18325 return 0;
18326 rtl = XEXP (rtl, 0);
18327 if (! CONSTANT_P (rtl))
18328 return 0;
18329
18330 ret = new_addr_loc_descr (rtl, dtprel);
18331 ret1 = new_loc_descr (tls_op, 0, 0);
18332 add_loc_descr (&ret, ret1);
18333
18334 have_address = 1;
18335 break;
18336 }
18337 /* FALLTHRU */
18338
18339 case PARM_DECL:
18340 if (context != NULL && context->dpi != NULL
18341 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18342 {
18343 /* We are generating code for a DWARF procedure and we want to access
18344 one of its arguments: find the appropriate argument offset and let
18345 the resolve_args_picking pass compute the offset that complies
18346 with the stack frame size. */
18347 unsigned i = 0;
18348 tree cursor;
18349
18350 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18351 cursor != NULL_TREE && cursor != loc;
18352 cursor = TREE_CHAIN (cursor), ++i)
18353 ;
18354 /* If we are translating a DWARF procedure, all referenced parameters
18355 must belong to the current function. */
18356 gcc_assert (cursor != NULL_TREE);
18357
18358 ret = new_loc_descr (DW_OP_pick, i, 0);
18359 ret->frame_offset_rel = 1;
18360 break;
18361 }
18362 /* FALLTHRU */
18363
18364 case RESULT_DECL:
18365 if (DECL_HAS_VALUE_EXPR_P (loc))
18366 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18367 want_address, context);
18368 /* FALLTHRU */
18369
18370 case FUNCTION_DECL:
18371 {
18372 rtx rtl;
18373 var_loc_list *loc_list = lookup_decl_loc (loc);
18374
18375 if (loc_list && loc_list->first)
18376 {
18377 list_ret = dw_loc_list (loc_list, loc, want_address);
18378 have_address = want_address != 0;
18379 break;
18380 }
18381 rtl = rtl_for_decl_location (loc);
18382 if (rtl == NULL_RTX)
18383 {
18384 if (TREE_CODE (loc) != FUNCTION_DECL
18385 && early_dwarf
18386 && current_function_decl
18387 && want_address != 1
18388 && ! DECL_IGNORED_P (loc)
18389 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18390 || POINTER_TYPE_P (TREE_TYPE (loc)))
18391 && DECL_CONTEXT (loc) == current_function_decl
18392 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18393 <= DWARF2_ADDR_SIZE))
18394 {
18395 dw_die_ref ref = lookup_decl_die (loc);
18396 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18397 if (ref)
18398 {
18399 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18400 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18401 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18402 }
18403 else
18404 {
18405 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18406 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18407 }
18408 break;
18409 }
18410 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18411 return 0;
18412 }
18413 else if (CONST_INT_P (rtl))
18414 {
18415 HOST_WIDE_INT val = INTVAL (rtl);
18416 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18417 val &= GET_MODE_MASK (DECL_MODE (loc));
18418 ret = int_loc_descriptor (val);
18419 }
18420 else if (GET_CODE (rtl) == CONST_STRING)
18421 {
18422 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18423 return 0;
18424 }
18425 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18426 ret = new_addr_loc_descr (rtl, dtprel_false);
18427 else
18428 {
18429 machine_mode mode, mem_mode;
18430
18431 /* Certain constructs can only be represented at top-level. */
18432 if (want_address == 2)
18433 {
18434 ret = loc_descriptor (rtl, VOIDmode,
18435 VAR_INIT_STATUS_INITIALIZED);
18436 have_address = 1;
18437 }
18438 else
18439 {
18440 mode = GET_MODE (rtl);
18441 mem_mode = VOIDmode;
18442 if (MEM_P (rtl))
18443 {
18444 mem_mode = mode;
18445 mode = get_address_mode (rtl);
18446 rtl = XEXP (rtl, 0);
18447 have_address = 1;
18448 }
18449 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18450 VAR_INIT_STATUS_INITIALIZED);
18451 }
18452 if (!ret)
18453 expansion_failed (loc, rtl,
18454 "failed to produce loc descriptor for rtl");
18455 }
18456 }
18457 break;
18458
18459 case MEM_REF:
18460 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18461 {
18462 have_address = 1;
18463 goto do_plus;
18464 }
18465 /* Fallthru. */
18466 case INDIRECT_REF:
18467 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18468 have_address = 1;
18469 break;
18470
18471 case TARGET_MEM_REF:
18472 case SSA_NAME:
18473 case DEBUG_EXPR_DECL:
18474 return NULL;
18475
18476 case COMPOUND_EXPR:
18477 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18478 context);
18479
18480 CASE_CONVERT:
18481 case VIEW_CONVERT_EXPR:
18482 case SAVE_EXPR:
18483 case MODIFY_EXPR:
18484 case NON_LVALUE_EXPR:
18485 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18486 context);
18487
18488 case COMPONENT_REF:
18489 case BIT_FIELD_REF:
18490 case ARRAY_REF:
18491 case ARRAY_RANGE_REF:
18492 case REALPART_EXPR:
18493 case IMAGPART_EXPR:
18494 {
18495 tree obj, offset;
18496 poly_int64 bitsize, bitpos, bytepos;
18497 machine_mode mode;
18498 int unsignedp, reversep, volatilep = 0;
18499
18500 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18501 &unsignedp, &reversep, &volatilep);
18502
18503 gcc_assert (obj != loc);
18504
18505 list_ret = loc_list_from_tree_1 (obj,
18506 want_address == 2
18507 && known_eq (bitpos, 0)
18508 && !offset ? 2 : 1,
18509 context);
18510 /* TODO: We can extract value of the small expression via shifting even
18511 for nonzero bitpos. */
18512 if (list_ret == 0)
18513 return 0;
18514 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18515 || !multiple_p (bitsize, BITS_PER_UNIT))
18516 {
18517 expansion_failed (loc, NULL_RTX,
18518 "bitfield access");
18519 return 0;
18520 }
18521
18522 if (offset != NULL_TREE)
18523 {
18524 /* Variable offset. */
18525 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18526 if (list_ret1 == 0)
18527 return 0;
18528 add_loc_list (&list_ret, list_ret1);
18529 if (!list_ret)
18530 return 0;
18531 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18532 }
18533
18534 HOST_WIDE_INT value;
18535 if (bytepos.is_constant (&value) && value > 0)
18536 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18537 value, 0));
18538 else if (maybe_ne (bytepos, 0))
18539 loc_list_plus_const (list_ret, bytepos);
18540
18541 have_address = 1;
18542 break;
18543 }
18544
18545 case INTEGER_CST:
18546 if ((want_address || !tree_fits_shwi_p (loc))
18547 && (ret = cst_pool_loc_descr (loc)))
18548 have_address = 1;
18549 else if (want_address == 2
18550 && tree_fits_shwi_p (loc)
18551 && (ret = address_of_int_loc_descriptor
18552 (int_size_in_bytes (TREE_TYPE (loc)),
18553 tree_to_shwi (loc))))
18554 have_address = 1;
18555 else if (tree_fits_shwi_p (loc))
18556 ret = int_loc_descriptor (tree_to_shwi (loc));
18557 else if (tree_fits_uhwi_p (loc))
18558 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18559 else
18560 {
18561 expansion_failed (loc, NULL_RTX,
18562 "Integer operand is not host integer");
18563 return 0;
18564 }
18565 break;
18566
18567 case CONSTRUCTOR:
18568 case REAL_CST:
18569 case STRING_CST:
18570 case COMPLEX_CST:
18571 if ((ret = cst_pool_loc_descr (loc)))
18572 have_address = 1;
18573 else if (TREE_CODE (loc) == CONSTRUCTOR)
18574 {
18575 tree type = TREE_TYPE (loc);
18576 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18577 unsigned HOST_WIDE_INT offset = 0;
18578 unsigned HOST_WIDE_INT cnt;
18579 constructor_elt *ce;
18580
18581 if (TREE_CODE (type) == RECORD_TYPE)
18582 {
18583 /* This is very limited, but it's enough to output
18584 pointers to member functions, as long as the
18585 referenced function is defined in the current
18586 translation unit. */
18587 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18588 {
18589 tree val = ce->value;
18590
18591 tree field = ce->index;
18592
18593 if (val)
18594 STRIP_NOPS (val);
18595
18596 if (!field || DECL_BIT_FIELD (field))
18597 {
18598 expansion_failed (loc, NULL_RTX,
18599 "bitfield in record type constructor");
18600 size = offset = (unsigned HOST_WIDE_INT)-1;
18601 ret = NULL;
18602 break;
18603 }
18604
18605 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18606 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18607 gcc_assert (pos + fieldsize <= size);
18608 if (pos < offset)
18609 {
18610 expansion_failed (loc, NULL_RTX,
18611 "out-of-order fields in record constructor");
18612 size = offset = (unsigned HOST_WIDE_INT)-1;
18613 ret = NULL;
18614 break;
18615 }
18616 if (pos > offset)
18617 {
18618 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18619 add_loc_descr (&ret, ret1);
18620 offset = pos;
18621 }
18622 if (val && fieldsize != 0)
18623 {
18624 ret1 = loc_descriptor_from_tree (val, want_address, context);
18625 if (!ret1)
18626 {
18627 expansion_failed (loc, NULL_RTX,
18628 "unsupported expression in field");
18629 size = offset = (unsigned HOST_WIDE_INT)-1;
18630 ret = NULL;
18631 break;
18632 }
18633 add_loc_descr (&ret, ret1);
18634 }
18635 if (fieldsize)
18636 {
18637 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18638 add_loc_descr (&ret, ret1);
18639 offset = pos + fieldsize;
18640 }
18641 }
18642
18643 if (offset != size)
18644 {
18645 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18646 add_loc_descr (&ret, ret1);
18647 offset = size;
18648 }
18649
18650 have_address = !!want_address;
18651 }
18652 else
18653 expansion_failed (loc, NULL_RTX,
18654 "constructor of non-record type");
18655 }
18656 else
18657 /* We can construct small constants here using int_loc_descriptor. */
18658 expansion_failed (loc, NULL_RTX,
18659 "constructor or constant not in constant pool");
18660 break;
18661
18662 case TRUTH_AND_EXPR:
18663 case TRUTH_ANDIF_EXPR:
18664 case BIT_AND_EXPR:
18665 op = DW_OP_and;
18666 goto do_binop;
18667
18668 case TRUTH_XOR_EXPR:
18669 case BIT_XOR_EXPR:
18670 op = DW_OP_xor;
18671 goto do_binop;
18672
18673 case TRUTH_OR_EXPR:
18674 case TRUTH_ORIF_EXPR:
18675 case BIT_IOR_EXPR:
18676 op = DW_OP_or;
18677 goto do_binop;
18678
18679 case FLOOR_DIV_EXPR:
18680 case CEIL_DIV_EXPR:
18681 case ROUND_DIV_EXPR:
18682 case TRUNC_DIV_EXPR:
18683 case EXACT_DIV_EXPR:
18684 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18685 return 0;
18686 op = DW_OP_div;
18687 goto do_binop;
18688
18689 case MINUS_EXPR:
18690 op = DW_OP_minus;
18691 goto do_binop;
18692
18693 case FLOOR_MOD_EXPR:
18694 case CEIL_MOD_EXPR:
18695 case ROUND_MOD_EXPR:
18696 case TRUNC_MOD_EXPR:
18697 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18698 {
18699 op = DW_OP_mod;
18700 goto do_binop;
18701 }
18702 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18703 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18704 if (list_ret == 0 || list_ret1 == 0)
18705 return 0;
18706
18707 add_loc_list (&list_ret, list_ret1);
18708 if (list_ret == 0)
18709 return 0;
18710 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18711 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18712 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18713 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18714 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18715 break;
18716
18717 case MULT_EXPR:
18718 op = DW_OP_mul;
18719 goto do_binop;
18720
18721 case LSHIFT_EXPR:
18722 op = DW_OP_shl;
18723 goto do_binop;
18724
18725 case RSHIFT_EXPR:
18726 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18727 goto do_binop;
18728
18729 case POINTER_PLUS_EXPR:
18730 case PLUS_EXPR:
18731 do_plus:
18732 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18733 {
18734 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18735 smarter to encode their opposite. The DW_OP_plus_uconst operation
18736 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18737 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18738 bytes, Y being the size of the operation that pushes the opposite
18739 of the addend. So let's choose the smallest representation. */
18740 const tree tree_addend = TREE_OPERAND (loc, 1);
18741 offset_int wi_addend;
18742 HOST_WIDE_INT shwi_addend;
18743 dw_loc_descr_ref loc_naddend;
18744
18745 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18746 if (list_ret == 0)
18747 return 0;
18748
18749 /* Try to get the literal to push. It is the opposite of the addend,
18750 so as we rely on wrapping during DWARF evaluation, first decode
18751 the literal as a "DWARF-sized" signed number. */
18752 wi_addend = wi::to_offset (tree_addend);
18753 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18754 shwi_addend = wi_addend.to_shwi ();
18755 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18756 ? int_loc_descriptor (-shwi_addend)
18757 : NULL;
18758
18759 if (loc_naddend != NULL
18760 && ((unsigned) size_of_uleb128 (shwi_addend)
18761 > size_of_loc_descr (loc_naddend)))
18762 {
18763 add_loc_descr_to_each (list_ret, loc_naddend);
18764 add_loc_descr_to_each (list_ret,
18765 new_loc_descr (DW_OP_minus, 0, 0));
18766 }
18767 else
18768 {
18769 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18770 {
18771 loc_naddend = loc_cur;
18772 loc_cur = loc_cur->dw_loc_next;
18773 ggc_free (loc_naddend);
18774 }
18775 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18776 }
18777 break;
18778 }
18779
18780 op = DW_OP_plus;
18781 goto do_binop;
18782
18783 case LE_EXPR:
18784 op = DW_OP_le;
18785 goto do_comp_binop;
18786
18787 case GE_EXPR:
18788 op = DW_OP_ge;
18789 goto do_comp_binop;
18790
18791 case LT_EXPR:
18792 op = DW_OP_lt;
18793 goto do_comp_binop;
18794
18795 case GT_EXPR:
18796 op = DW_OP_gt;
18797 goto do_comp_binop;
18798
18799 do_comp_binop:
18800 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18801 {
18802 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18803 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18804 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18805 TREE_CODE (loc));
18806 break;
18807 }
18808 else
18809 goto do_binop;
18810
18811 case EQ_EXPR:
18812 op = DW_OP_eq;
18813 goto do_binop;
18814
18815 case NE_EXPR:
18816 op = DW_OP_ne;
18817 goto do_binop;
18818
18819 do_binop:
18820 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18821 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18822 if (list_ret == 0 || list_ret1 == 0)
18823 return 0;
18824
18825 add_loc_list (&list_ret, list_ret1);
18826 if (list_ret == 0)
18827 return 0;
18828 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18829 break;
18830
18831 case TRUTH_NOT_EXPR:
18832 case BIT_NOT_EXPR:
18833 op = DW_OP_not;
18834 goto do_unop;
18835
18836 case ABS_EXPR:
18837 op = DW_OP_abs;
18838 goto do_unop;
18839
18840 case NEGATE_EXPR:
18841 op = DW_OP_neg;
18842 goto do_unop;
18843
18844 do_unop:
18845 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18846 if (list_ret == 0)
18847 return 0;
18848
18849 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18850 break;
18851
18852 case MIN_EXPR:
18853 case MAX_EXPR:
18854 {
18855 const enum tree_code code =
18856 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18857
18858 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18859 build2 (code, integer_type_node,
18860 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18861 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18862 }
18863
18864 /* fall through */
18865
18866 case COND_EXPR:
18867 {
18868 dw_loc_descr_ref lhs
18869 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18870 dw_loc_list_ref rhs
18871 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18872 dw_loc_descr_ref bra_node, jump_node, tmp;
18873
18874 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18875 if (list_ret == 0 || lhs == 0 || rhs == 0)
18876 return 0;
18877
18878 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18879 add_loc_descr_to_each (list_ret, bra_node);
18880
18881 add_loc_list (&list_ret, rhs);
18882 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18883 add_loc_descr_to_each (list_ret, jump_node);
18884
18885 add_loc_descr_to_each (list_ret, lhs);
18886 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18887 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18888
18889 /* ??? Need a node to point the skip at. Use a nop. */
18890 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18891 add_loc_descr_to_each (list_ret, tmp);
18892 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18893 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18894 }
18895 break;
18896
18897 case FIX_TRUNC_EXPR:
18898 return 0;
18899
18900 default:
18901 /* Leave front-end specific codes as simply unknown. This comes
18902 up, for instance, with the C STMT_EXPR. */
18903 if ((unsigned int) TREE_CODE (loc)
18904 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18905 {
18906 expansion_failed (loc, NULL_RTX,
18907 "language specific tree node");
18908 return 0;
18909 }
18910
18911 /* Otherwise this is a generic code; we should just lists all of
18912 these explicitly. We forgot one. */
18913 if (flag_checking)
18914 gcc_unreachable ();
18915
18916 /* In a release build, we want to degrade gracefully: better to
18917 generate incomplete debugging information than to crash. */
18918 return NULL;
18919 }
18920
18921 if (!ret && !list_ret)
18922 return 0;
18923
18924 if (want_address == 2 && !have_address
18925 && (dwarf_version >= 4 || !dwarf_strict))
18926 {
18927 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18928 {
18929 expansion_failed (loc, NULL_RTX,
18930 "DWARF address size mismatch");
18931 return 0;
18932 }
18933 if (ret)
18934 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18935 else
18936 add_loc_descr_to_each (list_ret,
18937 new_loc_descr (DW_OP_stack_value, 0, 0));
18938 have_address = 1;
18939 }
18940 /* Show if we can't fill the request for an address. */
18941 if (want_address && !have_address)
18942 {
18943 expansion_failed (loc, NULL_RTX,
18944 "Want address and only have value");
18945 return 0;
18946 }
18947
18948 gcc_assert (!ret || !list_ret);
18949
18950 /* If we've got an address and don't want one, dereference. */
18951 if (!want_address && have_address)
18952 {
18953 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18954
18955 if (size > DWARF2_ADDR_SIZE || size == -1)
18956 {
18957 expansion_failed (loc, NULL_RTX,
18958 "DWARF address size mismatch");
18959 return 0;
18960 }
18961 else if (size == DWARF2_ADDR_SIZE)
18962 op = DW_OP_deref;
18963 else
18964 op = DW_OP_deref_size;
18965
18966 if (ret)
18967 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18968 else
18969 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18970 }
18971 if (ret)
18972 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18973
18974 return list_ret;
18975 }
18976
18977 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18978 expressions. */
18979
18980 static dw_loc_list_ref
18981 loc_list_from_tree (tree loc, int want_address,
18982 struct loc_descr_context *context)
18983 {
18984 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18985
18986 for (dw_loc_list_ref loc_cur = result;
18987 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18988 loc_descr_without_nops (loc_cur->expr);
18989 return result;
18990 }
18991
18992 /* Same as above but return only single location expression. */
18993 static dw_loc_descr_ref
18994 loc_descriptor_from_tree (tree loc, int want_address,
18995 struct loc_descr_context *context)
18996 {
18997 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18998 if (!ret)
18999 return NULL;
19000 if (ret->dw_loc_next)
19001 {
19002 expansion_failed (loc, NULL_RTX,
19003 "Location list where only loc descriptor needed");
19004 return NULL;
19005 }
19006 return ret->expr;
19007 }
19008
19009 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19010 pointer to the declared type for the relevant field variable, or return
19011 `integer_type_node' if the given node turns out to be an
19012 ERROR_MARK node. */
19013
19014 static inline tree
19015 field_type (const_tree decl)
19016 {
19017 tree type;
19018
19019 if (TREE_CODE (decl) == ERROR_MARK)
19020 return integer_type_node;
19021
19022 type = DECL_BIT_FIELD_TYPE (decl);
19023 if (type == NULL_TREE)
19024 type = TREE_TYPE (decl);
19025
19026 return type;
19027 }
19028
19029 /* Given a pointer to a tree node, return the alignment in bits for
19030 it, or else return BITS_PER_WORD if the node actually turns out to
19031 be an ERROR_MARK node. */
19032
19033 static inline unsigned
19034 simple_type_align_in_bits (const_tree type)
19035 {
19036 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19037 }
19038
19039 static inline unsigned
19040 simple_decl_align_in_bits (const_tree decl)
19041 {
19042 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19043 }
19044
19045 /* Return the result of rounding T up to ALIGN. */
19046
19047 static inline offset_int
19048 round_up_to_align (const offset_int &t, unsigned int align)
19049 {
19050 return wi::udiv_trunc (t + align - 1, align) * align;
19051 }
19052
19053 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19054 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19055 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19056 if we fail to return the size in one of these two forms. */
19057
19058 static dw_loc_descr_ref
19059 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19060 {
19061 tree tree_size;
19062 struct loc_descr_context ctx;
19063
19064 /* Return a constant integer in priority, if possible. */
19065 *cst_size = int_size_in_bytes (type);
19066 if (*cst_size != -1)
19067 return NULL;
19068
19069 ctx.context_type = const_cast<tree> (type);
19070 ctx.base_decl = NULL_TREE;
19071 ctx.dpi = NULL;
19072 ctx.placeholder_arg = false;
19073 ctx.placeholder_seen = false;
19074
19075 type = TYPE_MAIN_VARIANT (type);
19076 tree_size = TYPE_SIZE_UNIT (type);
19077 return ((tree_size != NULL_TREE)
19078 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19079 : NULL);
19080 }
19081
19082 /* Helper structure for RECORD_TYPE processing. */
19083 struct vlr_context
19084 {
19085 /* Root RECORD_TYPE. It is needed to generate data member location
19086 descriptions in variable-length records (VLR), but also to cope with
19087 variants, which are composed of nested structures multiplexed with
19088 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19089 function processing a FIELD_DECL, it is required to be non null. */
19090 tree struct_type;
19091 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19092 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19093 this variant part as part of the root record (in storage units). For
19094 regular records, it must be NULL_TREE. */
19095 tree variant_part_offset;
19096 };
19097
19098 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19099 addressed byte of the "containing object" for the given FIELD_DECL. If
19100 possible, return a native constant through CST_OFFSET (in which case NULL is
19101 returned); otherwise return a DWARF expression that computes the offset.
19102
19103 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19104 that offset is, either because the argument turns out to be a pointer to an
19105 ERROR_MARK node, or because the offset expression is too complex for us.
19106
19107 CTX is required: see the comment for VLR_CONTEXT. */
19108
19109 static dw_loc_descr_ref
19110 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19111 HOST_WIDE_INT *cst_offset)
19112 {
19113 tree tree_result;
19114 dw_loc_list_ref loc_result;
19115
19116 *cst_offset = 0;
19117
19118 if (TREE_CODE (decl) == ERROR_MARK)
19119 return NULL;
19120 else
19121 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19122
19123 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19124 case. */
19125 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19126 return NULL;
19127
19128 /* We used to handle only constant offsets in all cases. Now, we handle
19129 properly dynamic byte offsets only when PCC bitfield type doesn't
19130 matter. */
19131 if (PCC_BITFIELD_TYPE_MATTERS
19132 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19133 {
19134 offset_int object_offset_in_bits;
19135 offset_int object_offset_in_bytes;
19136 offset_int bitpos_int;
19137 tree type;
19138 tree field_size_tree;
19139 offset_int deepest_bitpos;
19140 offset_int field_size_in_bits;
19141 unsigned int type_align_in_bits;
19142 unsigned int decl_align_in_bits;
19143 offset_int type_size_in_bits;
19144
19145 bitpos_int = wi::to_offset (bit_position (decl));
19146 type = field_type (decl);
19147 type_size_in_bits = offset_int_type_size_in_bits (type);
19148 type_align_in_bits = simple_type_align_in_bits (type);
19149
19150 field_size_tree = DECL_SIZE (decl);
19151
19152 /* The size could be unspecified if there was an error, or for
19153 a flexible array member. */
19154 if (!field_size_tree)
19155 field_size_tree = bitsize_zero_node;
19156
19157 /* If the size of the field is not constant, use the type size. */
19158 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19159 field_size_in_bits = wi::to_offset (field_size_tree);
19160 else
19161 field_size_in_bits = type_size_in_bits;
19162
19163 decl_align_in_bits = simple_decl_align_in_bits (decl);
19164
19165 /* The GCC front-end doesn't make any attempt to keep track of the
19166 starting bit offset (relative to the start of the containing
19167 structure type) of the hypothetical "containing object" for a
19168 bit-field. Thus, when computing the byte offset value for the
19169 start of the "containing object" of a bit-field, we must deduce
19170 this information on our own. This can be rather tricky to do in
19171 some cases. For example, handling the following structure type
19172 definition when compiling for an i386/i486 target (which only
19173 aligns long long's to 32-bit boundaries) can be very tricky:
19174
19175 struct S { int field1; long long field2:31; };
19176
19177 Fortunately, there is a simple rule-of-thumb which can be used
19178 in such cases. When compiling for an i386/i486, GCC will
19179 allocate 8 bytes for the structure shown above. It decides to
19180 do this based upon one simple rule for bit-field allocation.
19181 GCC allocates each "containing object" for each bit-field at
19182 the first (i.e. lowest addressed) legitimate alignment boundary
19183 (based upon the required minimum alignment for the declared
19184 type of the field) which it can possibly use, subject to the
19185 condition that there is still enough available space remaining
19186 in the containing object (when allocated at the selected point)
19187 to fully accommodate all of the bits of the bit-field itself.
19188
19189 This simple rule makes it obvious why GCC allocates 8 bytes for
19190 each object of the structure type shown above. When looking
19191 for a place to allocate the "containing object" for `field2',
19192 the compiler simply tries to allocate a 64-bit "containing
19193 object" at each successive 32-bit boundary (starting at zero)
19194 until it finds a place to allocate that 64- bit field such that
19195 at least 31 contiguous (and previously unallocated) bits remain
19196 within that selected 64 bit field. (As it turns out, for the
19197 example above, the compiler finds it is OK to allocate the
19198 "containing object" 64-bit field at bit-offset zero within the
19199 structure type.)
19200
19201 Here we attempt to work backwards from the limited set of facts
19202 we're given, and we try to deduce from those facts, where GCC
19203 must have believed that the containing object started (within
19204 the structure type). The value we deduce is then used (by the
19205 callers of this routine) to generate DW_AT_location and
19206 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19207 the case of DW_AT_location, regular fields as well). */
19208
19209 /* Figure out the bit-distance from the start of the structure to
19210 the "deepest" bit of the bit-field. */
19211 deepest_bitpos = bitpos_int + field_size_in_bits;
19212
19213 /* This is the tricky part. Use some fancy footwork to deduce
19214 where the lowest addressed bit of the containing object must
19215 be. */
19216 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19217
19218 /* Round up to type_align by default. This works best for
19219 bitfields. */
19220 object_offset_in_bits
19221 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19222
19223 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19224 {
19225 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19226
19227 /* Round up to decl_align instead. */
19228 object_offset_in_bits
19229 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19230 }
19231
19232 object_offset_in_bytes
19233 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19234 if (ctx->variant_part_offset == NULL_TREE)
19235 {
19236 *cst_offset = object_offset_in_bytes.to_shwi ();
19237 return NULL;
19238 }
19239 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19240 }
19241 else
19242 tree_result = byte_position (decl);
19243
19244 if (ctx->variant_part_offset != NULL_TREE)
19245 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19246 ctx->variant_part_offset, tree_result);
19247
19248 /* If the byte offset is a constant, it's simplier to handle a native
19249 constant rather than a DWARF expression. */
19250 if (TREE_CODE (tree_result) == INTEGER_CST)
19251 {
19252 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19253 return NULL;
19254 }
19255 struct loc_descr_context loc_ctx = {
19256 ctx->struct_type, /* context_type */
19257 NULL_TREE, /* base_decl */
19258 NULL, /* dpi */
19259 false, /* placeholder_arg */
19260 false /* placeholder_seen */
19261 };
19262 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19263
19264 /* We want a DWARF expression: abort if we only have a location list with
19265 multiple elements. */
19266 if (!loc_result || !single_element_loc_list_p (loc_result))
19267 return NULL;
19268 else
19269 return loc_result->expr;
19270 }
19271 \f
19272 /* The following routines define various Dwarf attributes and any data
19273 associated with them. */
19274
19275 /* Add a location description attribute value to a DIE.
19276
19277 This emits location attributes suitable for whole variables and
19278 whole parameters. Note that the location attributes for struct fields are
19279 generated by the routine `data_member_location_attribute' below. */
19280
19281 static inline void
19282 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19283 dw_loc_list_ref descr)
19284 {
19285 bool check_no_locviews = true;
19286 if (descr == 0)
19287 return;
19288 if (single_element_loc_list_p (descr))
19289 add_AT_loc (die, attr_kind, descr->expr);
19290 else
19291 {
19292 add_AT_loc_list (die, attr_kind, descr);
19293 gcc_assert (descr->ll_symbol);
19294 if (attr_kind == DW_AT_location && descr->vl_symbol
19295 && dwarf2out_locviews_in_attribute ())
19296 {
19297 add_AT_view_list (die, DW_AT_GNU_locviews);
19298 check_no_locviews = false;
19299 }
19300 }
19301
19302 if (check_no_locviews)
19303 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19304 }
19305
19306 /* Add DW_AT_accessibility attribute to DIE if needed. */
19307
19308 static void
19309 add_accessibility_attribute (dw_die_ref die, tree decl)
19310 {
19311 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19312 children, otherwise the default is DW_ACCESS_public. In DWARF2
19313 the default has always been DW_ACCESS_public. */
19314 if (TREE_PROTECTED (decl))
19315 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19316 else if (TREE_PRIVATE (decl))
19317 {
19318 if (dwarf_version == 2
19319 || die->die_parent == NULL
19320 || die->die_parent->die_tag != DW_TAG_class_type)
19321 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19322 }
19323 else if (dwarf_version > 2
19324 && die->die_parent
19325 && die->die_parent->die_tag == DW_TAG_class_type)
19326 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19327 }
19328
19329 /* Attach the specialized form of location attribute used for data members of
19330 struct and union types. In the special case of a FIELD_DECL node which
19331 represents a bit-field, the "offset" part of this special location
19332 descriptor must indicate the distance in bytes from the lowest-addressed
19333 byte of the containing struct or union type to the lowest-addressed byte of
19334 the "containing object" for the bit-field. (See the `field_byte_offset'
19335 function above).
19336
19337 For any given bit-field, the "containing object" is a hypothetical object
19338 (of some integral or enum type) within which the given bit-field lives. The
19339 type of this hypothetical "containing object" is always the same as the
19340 declared type of the individual bit-field itself (for GCC anyway... the
19341 DWARF spec doesn't actually mandate this). Note that it is the size (in
19342 bytes) of the hypothetical "containing object" which will be given in the
19343 DW_AT_byte_size attribute for this bit-field. (See the
19344 `byte_size_attribute' function below.) It is also used when calculating the
19345 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19346 function below.)
19347
19348 CTX is required: see the comment for VLR_CONTEXT. */
19349
19350 static void
19351 add_data_member_location_attribute (dw_die_ref die,
19352 tree decl,
19353 struct vlr_context *ctx)
19354 {
19355 HOST_WIDE_INT offset;
19356 dw_loc_descr_ref loc_descr = 0;
19357
19358 if (TREE_CODE (decl) == TREE_BINFO)
19359 {
19360 /* We're working on the TAG_inheritance for a base class. */
19361 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19362 {
19363 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19364 aren't at a fixed offset from all (sub)objects of the same
19365 type. We need to extract the appropriate offset from our
19366 vtable. The following dwarf expression means
19367
19368 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19369
19370 This is specific to the V3 ABI, of course. */
19371
19372 dw_loc_descr_ref tmp;
19373
19374 /* Make a copy of the object address. */
19375 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19376 add_loc_descr (&loc_descr, tmp);
19377
19378 /* Extract the vtable address. */
19379 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19380 add_loc_descr (&loc_descr, tmp);
19381
19382 /* Calculate the address of the offset. */
19383 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19384 gcc_assert (offset < 0);
19385
19386 tmp = int_loc_descriptor (-offset);
19387 add_loc_descr (&loc_descr, tmp);
19388 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19389 add_loc_descr (&loc_descr, tmp);
19390
19391 /* Extract the offset. */
19392 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19393 add_loc_descr (&loc_descr, tmp);
19394
19395 /* Add it to the object address. */
19396 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19397 add_loc_descr (&loc_descr, tmp);
19398 }
19399 else
19400 offset = tree_to_shwi (BINFO_OFFSET (decl));
19401 }
19402 else
19403 {
19404 loc_descr = field_byte_offset (decl, ctx, &offset);
19405
19406 /* If loc_descr is available then we know the field offset is dynamic.
19407 However, GDB does not handle dynamic field offsets very well at the
19408 moment. */
19409 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19410 {
19411 loc_descr = NULL;
19412 offset = 0;
19413 }
19414
19415 /* Data member location evalutation starts with the base address on the
19416 stack. Compute the field offset and add it to this base address. */
19417 else if (loc_descr != NULL)
19418 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19419 }
19420
19421 if (! loc_descr)
19422 {
19423 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19424 e.g. GDB only added support to it in November 2016. For DWARF5
19425 we need newer debug info consumers anyway. We might change this
19426 to dwarf_version >= 4 once most consumers catched up. */
19427 if (dwarf_version >= 5
19428 && TREE_CODE (decl) == FIELD_DECL
19429 && DECL_BIT_FIELD_TYPE (decl))
19430 {
19431 tree off = bit_position (decl);
19432 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19433 {
19434 remove_AT (die, DW_AT_byte_size);
19435 remove_AT (die, DW_AT_bit_offset);
19436 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19437 return;
19438 }
19439 }
19440 if (dwarf_version > 2)
19441 {
19442 /* Don't need to output a location expression, just the constant. */
19443 if (offset < 0)
19444 add_AT_int (die, DW_AT_data_member_location, offset);
19445 else
19446 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19447 return;
19448 }
19449 else
19450 {
19451 enum dwarf_location_atom op;
19452
19453 /* The DWARF2 standard says that we should assume that the structure
19454 address is already on the stack, so we can specify a structure
19455 field address by using DW_OP_plus_uconst. */
19456 op = DW_OP_plus_uconst;
19457 loc_descr = new_loc_descr (op, offset, 0);
19458 }
19459 }
19460
19461 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19462 }
19463
19464 /* Writes integer values to dw_vec_const array. */
19465
19466 static void
19467 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19468 {
19469 while (size != 0)
19470 {
19471 *dest++ = val & 0xff;
19472 val >>= 8;
19473 --size;
19474 }
19475 }
19476
19477 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19478
19479 static HOST_WIDE_INT
19480 extract_int (const unsigned char *src, unsigned int size)
19481 {
19482 HOST_WIDE_INT val = 0;
19483
19484 src += size;
19485 while (size != 0)
19486 {
19487 val <<= 8;
19488 val |= *--src & 0xff;
19489 --size;
19490 }
19491 return val;
19492 }
19493
19494 /* Writes wide_int values to dw_vec_const array. */
19495
19496 static void
19497 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19498 {
19499 int i;
19500
19501 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19502 {
19503 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19504 return;
19505 }
19506
19507 /* We'd have to extend this code to support odd sizes. */
19508 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19509
19510 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19511
19512 if (WORDS_BIG_ENDIAN)
19513 for (i = n - 1; i >= 0; i--)
19514 {
19515 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19516 dest += sizeof (HOST_WIDE_INT);
19517 }
19518 else
19519 for (i = 0; i < n; i++)
19520 {
19521 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19522 dest += sizeof (HOST_WIDE_INT);
19523 }
19524 }
19525
19526 /* Writes floating point values to dw_vec_const array. */
19527
19528 static void
19529 insert_float (const_rtx rtl, unsigned char *array)
19530 {
19531 long val[4];
19532 int i;
19533 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19534
19535 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19536
19537 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19538 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19539 {
19540 insert_int (val[i], 4, array);
19541 array += 4;
19542 }
19543 }
19544
19545 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19546 does not have a "location" either in memory or in a register. These
19547 things can arise in GNU C when a constant is passed as an actual parameter
19548 to an inlined function. They can also arise in C++ where declared
19549 constants do not necessarily get memory "homes". */
19550
19551 static bool
19552 add_const_value_attribute (dw_die_ref die, rtx rtl)
19553 {
19554 switch (GET_CODE (rtl))
19555 {
19556 case CONST_INT:
19557 {
19558 HOST_WIDE_INT val = INTVAL (rtl);
19559
19560 if (val < 0)
19561 add_AT_int (die, DW_AT_const_value, val);
19562 else
19563 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19564 }
19565 return true;
19566
19567 case CONST_WIDE_INT:
19568 {
19569 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19570 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19571 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19572 wide_int w = wi::zext (w1, prec);
19573 add_AT_wide (die, DW_AT_const_value, w);
19574 }
19575 return true;
19576
19577 case CONST_DOUBLE:
19578 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19579 floating-point constant. A CONST_DOUBLE is used whenever the
19580 constant requires more than one word in order to be adequately
19581 represented. */
19582 if (TARGET_SUPPORTS_WIDE_INT == 0
19583 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19584 add_AT_double (die, DW_AT_const_value,
19585 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19586 else
19587 {
19588 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19589 unsigned int length = GET_MODE_SIZE (mode);
19590 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19591
19592 insert_float (rtl, array);
19593 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19594 }
19595 return true;
19596
19597 case CONST_VECTOR:
19598 {
19599 unsigned int length;
19600 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19601 return false;
19602
19603 machine_mode mode = GET_MODE (rtl);
19604 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19605 unsigned char *array
19606 = ggc_vec_alloc<unsigned char> (length * elt_size);
19607 unsigned int i;
19608 unsigned char *p;
19609 machine_mode imode = GET_MODE_INNER (mode);
19610
19611 switch (GET_MODE_CLASS (mode))
19612 {
19613 case MODE_VECTOR_INT:
19614 for (i = 0, p = array; i < length; i++, p += elt_size)
19615 {
19616 rtx elt = CONST_VECTOR_ELT (rtl, i);
19617 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19618 }
19619 break;
19620
19621 case MODE_VECTOR_FLOAT:
19622 for (i = 0, p = array; i < length; i++, p += elt_size)
19623 {
19624 rtx elt = CONST_VECTOR_ELT (rtl, i);
19625 insert_float (elt, p);
19626 }
19627 break;
19628
19629 default:
19630 gcc_unreachable ();
19631 }
19632
19633 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19634 }
19635 return true;
19636
19637 case CONST_STRING:
19638 if (dwarf_version >= 4 || !dwarf_strict)
19639 {
19640 dw_loc_descr_ref loc_result;
19641 resolve_one_addr (&rtl);
19642 rtl_addr:
19643 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19644 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19645 add_AT_loc (die, DW_AT_location, loc_result);
19646 vec_safe_push (used_rtx_array, rtl);
19647 return true;
19648 }
19649 return false;
19650
19651 case CONST:
19652 if (CONSTANT_P (XEXP (rtl, 0)))
19653 return add_const_value_attribute (die, XEXP (rtl, 0));
19654 /* FALLTHROUGH */
19655 case SYMBOL_REF:
19656 if (!const_ok_for_output (rtl))
19657 return false;
19658 /* FALLTHROUGH */
19659 case LABEL_REF:
19660 if (dwarf_version >= 4 || !dwarf_strict)
19661 goto rtl_addr;
19662 return false;
19663
19664 case PLUS:
19665 /* In cases where an inlined instance of an inline function is passed
19666 the address of an `auto' variable (which is local to the caller) we
19667 can get a situation where the DECL_RTL of the artificial local
19668 variable (for the inlining) which acts as a stand-in for the
19669 corresponding formal parameter (of the inline function) will look
19670 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19671 exactly a compile-time constant expression, but it isn't the address
19672 of the (artificial) local variable either. Rather, it represents the
19673 *value* which the artificial local variable always has during its
19674 lifetime. We currently have no way to represent such quasi-constant
19675 values in Dwarf, so for now we just punt and generate nothing. */
19676 return false;
19677
19678 case HIGH:
19679 case CONST_FIXED:
19680 case MINUS:
19681 case SIGN_EXTEND:
19682 case ZERO_EXTEND:
19683 return false;
19684
19685 case MEM:
19686 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19687 && MEM_READONLY_P (rtl)
19688 && GET_MODE (rtl) == BLKmode)
19689 {
19690 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19691 return true;
19692 }
19693 return false;
19694
19695 default:
19696 /* No other kinds of rtx should be possible here. */
19697 gcc_unreachable ();
19698 }
19699 return false;
19700 }
19701
19702 /* Determine whether the evaluation of EXPR references any variables
19703 or functions which aren't otherwise used (and therefore may not be
19704 output). */
19705 static tree
19706 reference_to_unused (tree * tp, int * walk_subtrees,
19707 void * data ATTRIBUTE_UNUSED)
19708 {
19709 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19710 *walk_subtrees = 0;
19711
19712 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19713 && ! TREE_ASM_WRITTEN (*tp))
19714 return *tp;
19715 /* ??? The C++ FE emits debug information for using decls, so
19716 putting gcc_unreachable here falls over. See PR31899. For now
19717 be conservative. */
19718 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19719 return *tp;
19720 else if (VAR_P (*tp))
19721 {
19722 varpool_node *node = varpool_node::get (*tp);
19723 if (!node || !node->definition)
19724 return *tp;
19725 }
19726 else if (TREE_CODE (*tp) == FUNCTION_DECL
19727 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19728 {
19729 /* The call graph machinery must have finished analyzing,
19730 optimizing and gimplifying the CU by now.
19731 So if *TP has no call graph node associated
19732 to it, it means *TP will not be emitted. */
19733 if (!cgraph_node::get (*tp))
19734 return *tp;
19735 }
19736 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19737 return *tp;
19738
19739 return NULL_TREE;
19740 }
19741
19742 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19743 for use in a later add_const_value_attribute call. */
19744
19745 static rtx
19746 rtl_for_decl_init (tree init, tree type)
19747 {
19748 rtx rtl = NULL_RTX;
19749
19750 STRIP_NOPS (init);
19751
19752 /* If a variable is initialized with a string constant without embedded
19753 zeros, build CONST_STRING. */
19754 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19755 {
19756 tree enttype = TREE_TYPE (type);
19757 tree domain = TYPE_DOMAIN (type);
19758 scalar_int_mode mode;
19759
19760 if (is_int_mode (TYPE_MODE (enttype), &mode)
19761 && GET_MODE_SIZE (mode) == 1
19762 && domain
19763 && TYPE_MAX_VALUE (domain)
19764 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19765 && integer_zerop (TYPE_MIN_VALUE (domain))
19766 && compare_tree_int (TYPE_MAX_VALUE (domain),
19767 TREE_STRING_LENGTH (init) - 1) == 0
19768 && ((size_t) TREE_STRING_LENGTH (init)
19769 == strlen (TREE_STRING_POINTER (init)) + 1))
19770 {
19771 rtl = gen_rtx_CONST_STRING (VOIDmode,
19772 ggc_strdup (TREE_STRING_POINTER (init)));
19773 rtl = gen_rtx_MEM (BLKmode, rtl);
19774 MEM_READONLY_P (rtl) = 1;
19775 }
19776 }
19777 /* Other aggregates, and complex values, could be represented using
19778 CONCAT: FIXME! */
19779 else if (AGGREGATE_TYPE_P (type)
19780 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19781 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19782 || TREE_CODE (type) == COMPLEX_TYPE)
19783 ;
19784 /* Vectors only work if their mode is supported by the target.
19785 FIXME: generic vectors ought to work too. */
19786 else if (TREE_CODE (type) == VECTOR_TYPE
19787 && !VECTOR_MODE_P (TYPE_MODE (type)))
19788 ;
19789 /* If the initializer is something that we know will expand into an
19790 immediate RTL constant, expand it now. We must be careful not to
19791 reference variables which won't be output. */
19792 else if (initializer_constant_valid_p (init, type)
19793 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19794 {
19795 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19796 possible. */
19797 if (TREE_CODE (type) == VECTOR_TYPE)
19798 switch (TREE_CODE (init))
19799 {
19800 case VECTOR_CST:
19801 break;
19802 case CONSTRUCTOR:
19803 if (TREE_CONSTANT (init))
19804 {
19805 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19806 bool constant_p = true;
19807 tree value;
19808 unsigned HOST_WIDE_INT ix;
19809
19810 /* Even when ctor is constant, it might contain non-*_CST
19811 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19812 belong into VECTOR_CST nodes. */
19813 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19814 if (!CONSTANT_CLASS_P (value))
19815 {
19816 constant_p = false;
19817 break;
19818 }
19819
19820 if (constant_p)
19821 {
19822 init = build_vector_from_ctor (type, elts);
19823 break;
19824 }
19825 }
19826 /* FALLTHRU */
19827
19828 default:
19829 return NULL;
19830 }
19831
19832 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19833
19834 /* If expand_expr returns a MEM, it wasn't immediate. */
19835 gcc_assert (!rtl || !MEM_P (rtl));
19836 }
19837
19838 return rtl;
19839 }
19840
19841 /* Generate RTL for the variable DECL to represent its location. */
19842
19843 static rtx
19844 rtl_for_decl_location (tree decl)
19845 {
19846 rtx rtl;
19847
19848 /* Here we have to decide where we are going to say the parameter "lives"
19849 (as far as the debugger is concerned). We only have a couple of
19850 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19851
19852 DECL_RTL normally indicates where the parameter lives during most of the
19853 activation of the function. If optimization is enabled however, this
19854 could be either NULL or else a pseudo-reg. Both of those cases indicate
19855 that the parameter doesn't really live anywhere (as far as the code
19856 generation parts of GCC are concerned) during most of the function's
19857 activation. That will happen (for example) if the parameter is never
19858 referenced within the function.
19859
19860 We could just generate a location descriptor here for all non-NULL
19861 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19862 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19863 where DECL_RTL is NULL or is a pseudo-reg.
19864
19865 Note however that we can only get away with using DECL_INCOMING_RTL as
19866 a backup substitute for DECL_RTL in certain limited cases. In cases
19867 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19868 we can be sure that the parameter was passed using the same type as it is
19869 declared to have within the function, and that its DECL_INCOMING_RTL
19870 points us to a place where a value of that type is passed.
19871
19872 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19873 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19874 because in these cases DECL_INCOMING_RTL points us to a value of some
19875 type which is *different* from the type of the parameter itself. Thus,
19876 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19877 such cases, the debugger would end up (for example) trying to fetch a
19878 `float' from a place which actually contains the first part of a
19879 `double'. That would lead to really incorrect and confusing
19880 output at debug-time.
19881
19882 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19883 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19884 are a couple of exceptions however. On little-endian machines we can
19885 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19886 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19887 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19888 when (on a little-endian machine) a non-prototyped function has a
19889 parameter declared to be of type `short' or `char'. In such cases,
19890 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19891 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19892 passed `int' value. If the debugger then uses that address to fetch
19893 a `short' or a `char' (on a little-endian machine) the result will be
19894 the correct data, so we allow for such exceptional cases below.
19895
19896 Note that our goal here is to describe the place where the given formal
19897 parameter lives during most of the function's activation (i.e. between the
19898 end of the prologue and the start of the epilogue). We'll do that as best
19899 as we can. Note however that if the given formal parameter is modified
19900 sometime during the execution of the function, then a stack backtrace (at
19901 debug-time) will show the function as having been called with the *new*
19902 value rather than the value which was originally passed in. This happens
19903 rarely enough that it is not a major problem, but it *is* a problem, and
19904 I'd like to fix it.
19905
19906 A future version of dwarf2out.c may generate two additional attributes for
19907 any given DW_TAG_formal_parameter DIE which will describe the "passed
19908 type" and the "passed location" for the given formal parameter in addition
19909 to the attributes we now generate to indicate the "declared type" and the
19910 "active location" for each parameter. This additional set of attributes
19911 could be used by debuggers for stack backtraces. Separately, note that
19912 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19913 This happens (for example) for inlined-instances of inline function formal
19914 parameters which are never referenced. This really shouldn't be
19915 happening. All PARM_DECL nodes should get valid non-NULL
19916 DECL_INCOMING_RTL values. FIXME. */
19917
19918 /* Use DECL_RTL as the "location" unless we find something better. */
19919 rtl = DECL_RTL_IF_SET (decl);
19920
19921 /* When generating abstract instances, ignore everything except
19922 constants, symbols living in memory, and symbols living in
19923 fixed registers. */
19924 if (! reload_completed)
19925 {
19926 if (rtl
19927 && (CONSTANT_P (rtl)
19928 || (MEM_P (rtl)
19929 && CONSTANT_P (XEXP (rtl, 0)))
19930 || (REG_P (rtl)
19931 && VAR_P (decl)
19932 && TREE_STATIC (decl))))
19933 {
19934 rtl = targetm.delegitimize_address (rtl);
19935 return rtl;
19936 }
19937 rtl = NULL_RTX;
19938 }
19939 else if (TREE_CODE (decl) == PARM_DECL)
19940 {
19941 if (rtl == NULL_RTX
19942 || is_pseudo_reg (rtl)
19943 || (MEM_P (rtl)
19944 && is_pseudo_reg (XEXP (rtl, 0))
19945 && DECL_INCOMING_RTL (decl)
19946 && MEM_P (DECL_INCOMING_RTL (decl))
19947 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19948 {
19949 tree declared_type = TREE_TYPE (decl);
19950 tree passed_type = DECL_ARG_TYPE (decl);
19951 machine_mode dmode = TYPE_MODE (declared_type);
19952 machine_mode pmode = TYPE_MODE (passed_type);
19953
19954 /* This decl represents a formal parameter which was optimized out.
19955 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19956 all cases where (rtl == NULL_RTX) just below. */
19957 if (dmode == pmode)
19958 rtl = DECL_INCOMING_RTL (decl);
19959 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19960 && SCALAR_INT_MODE_P (dmode)
19961 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19962 && DECL_INCOMING_RTL (decl))
19963 {
19964 rtx inc = DECL_INCOMING_RTL (decl);
19965 if (REG_P (inc))
19966 rtl = inc;
19967 else if (MEM_P (inc))
19968 {
19969 if (BYTES_BIG_ENDIAN)
19970 rtl = adjust_address_nv (inc, dmode,
19971 GET_MODE_SIZE (pmode)
19972 - GET_MODE_SIZE (dmode));
19973 else
19974 rtl = inc;
19975 }
19976 }
19977 }
19978
19979 /* If the parm was passed in registers, but lives on the stack, then
19980 make a big endian correction if the mode of the type of the
19981 parameter is not the same as the mode of the rtl. */
19982 /* ??? This is the same series of checks that are made in dbxout.c before
19983 we reach the big endian correction code there. It isn't clear if all
19984 of these checks are necessary here, but keeping them all is the safe
19985 thing to do. */
19986 else if (MEM_P (rtl)
19987 && XEXP (rtl, 0) != const0_rtx
19988 && ! CONSTANT_P (XEXP (rtl, 0))
19989 /* Not passed in memory. */
19990 && !MEM_P (DECL_INCOMING_RTL (decl))
19991 /* Not passed by invisible reference. */
19992 && (!REG_P (XEXP (rtl, 0))
19993 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19994 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19995 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19996 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19997 #endif
19998 )
19999 /* Big endian correction check. */
20000 && BYTES_BIG_ENDIAN
20001 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20002 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20003 UNITS_PER_WORD))
20004 {
20005 machine_mode addr_mode = get_address_mode (rtl);
20006 poly_int64 offset = (UNITS_PER_WORD
20007 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20008
20009 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20010 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20011 }
20012 }
20013 else if (VAR_P (decl)
20014 && rtl
20015 && MEM_P (rtl)
20016 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20017 {
20018 machine_mode addr_mode = get_address_mode (rtl);
20019 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20020 GET_MODE (rtl));
20021
20022 /* If a variable is declared "register" yet is smaller than
20023 a register, then if we store the variable to memory, it
20024 looks like we're storing a register-sized value, when in
20025 fact we are not. We need to adjust the offset of the
20026 storage location to reflect the actual value's bytes,
20027 else gdb will not be able to display it. */
20028 if (maybe_ne (offset, 0))
20029 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20030 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20031 }
20032
20033 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20034 and will have been substituted directly into all expressions that use it.
20035 C does not have such a concept, but C++ and other languages do. */
20036 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20037 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20038
20039 if (rtl)
20040 rtl = targetm.delegitimize_address (rtl);
20041
20042 /* If we don't look past the constant pool, we risk emitting a
20043 reference to a constant pool entry that isn't referenced from
20044 code, and thus is not emitted. */
20045 if (rtl)
20046 rtl = avoid_constant_pool_reference (rtl);
20047
20048 /* Try harder to get a rtl. If this symbol ends up not being emitted
20049 in the current CU, resolve_addr will remove the expression referencing
20050 it. */
20051 if (rtl == NULL_RTX
20052 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20053 && VAR_P (decl)
20054 && !DECL_EXTERNAL (decl)
20055 && TREE_STATIC (decl)
20056 && DECL_NAME (decl)
20057 && !DECL_HARD_REGISTER (decl)
20058 && DECL_MODE (decl) != VOIDmode)
20059 {
20060 rtl = make_decl_rtl_for_debug (decl);
20061 if (!MEM_P (rtl)
20062 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20063 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20064 rtl = NULL_RTX;
20065 }
20066
20067 return rtl;
20068 }
20069
20070 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20071 returned. If so, the decl for the COMMON block is returned, and the
20072 value is the offset into the common block for the symbol. */
20073
20074 static tree
20075 fortran_common (tree decl, HOST_WIDE_INT *value)
20076 {
20077 tree val_expr, cvar;
20078 machine_mode mode;
20079 poly_int64 bitsize, bitpos;
20080 tree offset;
20081 HOST_WIDE_INT cbitpos;
20082 int unsignedp, reversep, volatilep = 0;
20083
20084 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20085 it does not have a value (the offset into the common area), or if it
20086 is thread local (as opposed to global) then it isn't common, and shouldn't
20087 be handled as such. */
20088 if (!VAR_P (decl)
20089 || !TREE_STATIC (decl)
20090 || !DECL_HAS_VALUE_EXPR_P (decl)
20091 || !is_fortran ())
20092 return NULL_TREE;
20093
20094 val_expr = DECL_VALUE_EXPR (decl);
20095 if (TREE_CODE (val_expr) != COMPONENT_REF)
20096 return NULL_TREE;
20097
20098 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20099 &unsignedp, &reversep, &volatilep);
20100
20101 if (cvar == NULL_TREE
20102 || !VAR_P (cvar)
20103 || DECL_ARTIFICIAL (cvar)
20104 || !TREE_PUBLIC (cvar)
20105 /* We don't expect to have to cope with variable offsets,
20106 since at present all static data must have a constant size. */
20107 || !bitpos.is_constant (&cbitpos))
20108 return NULL_TREE;
20109
20110 *value = 0;
20111 if (offset != NULL)
20112 {
20113 if (!tree_fits_shwi_p (offset))
20114 return NULL_TREE;
20115 *value = tree_to_shwi (offset);
20116 }
20117 if (cbitpos != 0)
20118 *value += cbitpos / BITS_PER_UNIT;
20119
20120 return cvar;
20121 }
20122
20123 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20124 data attribute for a variable or a parameter. We generate the
20125 DW_AT_const_value attribute only in those cases where the given variable
20126 or parameter does not have a true "location" either in memory or in a
20127 register. This can happen (for example) when a constant is passed as an
20128 actual argument in a call to an inline function. (It's possible that
20129 these things can crop up in other ways also.) Note that one type of
20130 constant value which can be passed into an inlined function is a constant
20131 pointer. This can happen for example if an actual argument in an inlined
20132 function call evaluates to a compile-time constant address.
20133
20134 CACHE_P is true if it is worth caching the location list for DECL,
20135 so that future calls can reuse it rather than regenerate it from scratch.
20136 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20137 since we will need to refer to them each time the function is inlined. */
20138
20139 static bool
20140 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20141 {
20142 rtx rtl;
20143 dw_loc_list_ref list;
20144 var_loc_list *loc_list;
20145 cached_dw_loc_list *cache;
20146
20147 if (early_dwarf)
20148 return false;
20149
20150 if (TREE_CODE (decl) == ERROR_MARK)
20151 return false;
20152
20153 if (get_AT (die, DW_AT_location)
20154 || get_AT (die, DW_AT_const_value))
20155 return true;
20156
20157 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20158 || TREE_CODE (decl) == RESULT_DECL);
20159
20160 /* Try to get some constant RTL for this decl, and use that as the value of
20161 the location. */
20162
20163 rtl = rtl_for_decl_location (decl);
20164 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20165 && add_const_value_attribute (die, rtl))
20166 return true;
20167
20168 /* See if we have single element location list that is equivalent to
20169 a constant value. That way we are better to use add_const_value_attribute
20170 rather than expanding constant value equivalent. */
20171 loc_list = lookup_decl_loc (decl);
20172 if (loc_list
20173 && loc_list->first
20174 && loc_list->first->next == NULL
20175 && NOTE_P (loc_list->first->loc)
20176 && NOTE_VAR_LOCATION (loc_list->first->loc)
20177 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20178 {
20179 struct var_loc_node *node;
20180
20181 node = loc_list->first;
20182 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20183 if (GET_CODE (rtl) == EXPR_LIST)
20184 rtl = XEXP (rtl, 0);
20185 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20186 && add_const_value_attribute (die, rtl))
20187 return true;
20188 }
20189 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20190 list several times. See if we've already cached the contents. */
20191 list = NULL;
20192 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20193 cache_p = false;
20194 if (cache_p)
20195 {
20196 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20197 if (cache)
20198 list = cache->loc_list;
20199 }
20200 if (list == NULL)
20201 {
20202 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20203 NULL);
20204 /* It is usually worth caching this result if the decl is from
20205 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20206 if (cache_p && list && list->dw_loc_next)
20207 {
20208 cached_dw_loc_list **slot
20209 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20210 DECL_UID (decl),
20211 INSERT);
20212 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20213 cache->decl_id = DECL_UID (decl);
20214 cache->loc_list = list;
20215 *slot = cache;
20216 }
20217 }
20218 if (list)
20219 {
20220 add_AT_location_description (die, DW_AT_location, list);
20221 return true;
20222 }
20223 /* None of that worked, so it must not really have a location;
20224 try adding a constant value attribute from the DECL_INITIAL. */
20225 return tree_add_const_value_attribute_for_decl (die, decl);
20226 }
20227
20228 /* Helper function for tree_add_const_value_attribute. Natively encode
20229 initializer INIT into an array. Return true if successful. */
20230
20231 static bool
20232 native_encode_initializer (tree init, unsigned char *array, int size)
20233 {
20234 tree type;
20235
20236 if (init == NULL_TREE)
20237 return false;
20238
20239 STRIP_NOPS (init);
20240 switch (TREE_CODE (init))
20241 {
20242 case STRING_CST:
20243 type = TREE_TYPE (init);
20244 if (TREE_CODE (type) == ARRAY_TYPE)
20245 {
20246 tree enttype = TREE_TYPE (type);
20247 scalar_int_mode mode;
20248
20249 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20250 || GET_MODE_SIZE (mode) != 1)
20251 return false;
20252 if (int_size_in_bytes (type) != size)
20253 return false;
20254 if (size > TREE_STRING_LENGTH (init))
20255 {
20256 memcpy (array, TREE_STRING_POINTER (init),
20257 TREE_STRING_LENGTH (init));
20258 memset (array + TREE_STRING_LENGTH (init),
20259 '\0', size - TREE_STRING_LENGTH (init));
20260 }
20261 else
20262 memcpy (array, TREE_STRING_POINTER (init), size);
20263 return true;
20264 }
20265 return false;
20266 case CONSTRUCTOR:
20267 type = TREE_TYPE (init);
20268 if (int_size_in_bytes (type) != size)
20269 return false;
20270 if (TREE_CODE (type) == ARRAY_TYPE)
20271 {
20272 HOST_WIDE_INT min_index;
20273 unsigned HOST_WIDE_INT cnt;
20274 int curpos = 0, fieldsize;
20275 constructor_elt *ce;
20276
20277 if (TYPE_DOMAIN (type) == NULL_TREE
20278 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20279 return false;
20280
20281 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20282 if (fieldsize <= 0)
20283 return false;
20284
20285 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20286 memset (array, '\0', size);
20287 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20288 {
20289 tree val = ce->value;
20290 tree index = ce->index;
20291 int pos = curpos;
20292 if (index && TREE_CODE (index) == RANGE_EXPR)
20293 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20294 * fieldsize;
20295 else if (index)
20296 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20297
20298 if (val)
20299 {
20300 STRIP_NOPS (val);
20301 if (!native_encode_initializer (val, array + pos, fieldsize))
20302 return false;
20303 }
20304 curpos = pos + fieldsize;
20305 if (index && TREE_CODE (index) == RANGE_EXPR)
20306 {
20307 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20308 - tree_to_shwi (TREE_OPERAND (index, 0));
20309 while (count-- > 0)
20310 {
20311 if (val)
20312 memcpy (array + curpos, array + pos, fieldsize);
20313 curpos += fieldsize;
20314 }
20315 }
20316 gcc_assert (curpos <= size);
20317 }
20318 return true;
20319 }
20320 else if (TREE_CODE (type) == RECORD_TYPE
20321 || TREE_CODE (type) == UNION_TYPE)
20322 {
20323 tree field = NULL_TREE;
20324 unsigned HOST_WIDE_INT cnt;
20325 constructor_elt *ce;
20326
20327 if (int_size_in_bytes (type) != size)
20328 return false;
20329
20330 if (TREE_CODE (type) == RECORD_TYPE)
20331 field = TYPE_FIELDS (type);
20332
20333 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20334 {
20335 tree val = ce->value;
20336 int pos, fieldsize;
20337
20338 if (ce->index != 0)
20339 field = ce->index;
20340
20341 if (val)
20342 STRIP_NOPS (val);
20343
20344 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20345 return false;
20346
20347 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20348 && TYPE_DOMAIN (TREE_TYPE (field))
20349 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20350 return false;
20351 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20352 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20353 return false;
20354 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20355 pos = int_byte_position (field);
20356 gcc_assert (pos + fieldsize <= size);
20357 if (val && fieldsize != 0
20358 && !native_encode_initializer (val, array + pos, fieldsize))
20359 return false;
20360 }
20361 return true;
20362 }
20363 return false;
20364 case VIEW_CONVERT_EXPR:
20365 case NON_LVALUE_EXPR:
20366 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20367 default:
20368 return native_encode_expr (init, array, size) == size;
20369 }
20370 }
20371
20372 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20373 attribute is the const value T. */
20374
20375 static bool
20376 tree_add_const_value_attribute (dw_die_ref die, tree t)
20377 {
20378 tree init;
20379 tree type = TREE_TYPE (t);
20380 rtx rtl;
20381
20382 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20383 return false;
20384
20385 init = t;
20386 gcc_assert (!DECL_P (init));
20387
20388 if (TREE_CODE (init) == INTEGER_CST)
20389 {
20390 if (tree_fits_uhwi_p (init))
20391 {
20392 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20393 return true;
20394 }
20395 if (tree_fits_shwi_p (init))
20396 {
20397 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20398 return true;
20399 }
20400 }
20401 if (! early_dwarf)
20402 {
20403 rtl = rtl_for_decl_init (init, type);
20404 if (rtl)
20405 return add_const_value_attribute (die, rtl);
20406 }
20407 /* If the host and target are sane, try harder. */
20408 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20409 && initializer_constant_valid_p (init, type))
20410 {
20411 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20412 if (size > 0 && (int) size == size)
20413 {
20414 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20415
20416 if (native_encode_initializer (init, array, size))
20417 {
20418 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20419 return true;
20420 }
20421 ggc_free (array);
20422 }
20423 }
20424 return false;
20425 }
20426
20427 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20428 attribute is the const value of T, where T is an integral constant
20429 variable with static storage duration
20430 (so it can't be a PARM_DECL or a RESULT_DECL). */
20431
20432 static bool
20433 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20434 {
20435
20436 if (!decl
20437 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20438 || (VAR_P (decl) && !TREE_STATIC (decl)))
20439 return false;
20440
20441 if (TREE_READONLY (decl)
20442 && ! TREE_THIS_VOLATILE (decl)
20443 && DECL_INITIAL (decl))
20444 /* OK */;
20445 else
20446 return false;
20447
20448 /* Don't add DW_AT_const_value if abstract origin already has one. */
20449 if (get_AT (var_die, DW_AT_const_value))
20450 return false;
20451
20452 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20453 }
20454
20455 /* Convert the CFI instructions for the current function into a
20456 location list. This is used for DW_AT_frame_base when we targeting
20457 a dwarf2 consumer that does not support the dwarf3
20458 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20459 expressions. */
20460
20461 static dw_loc_list_ref
20462 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20463 {
20464 int ix;
20465 dw_fde_ref fde;
20466 dw_loc_list_ref list, *list_tail;
20467 dw_cfi_ref cfi;
20468 dw_cfa_location last_cfa, next_cfa;
20469 const char *start_label, *last_label, *section;
20470 dw_cfa_location remember;
20471
20472 fde = cfun->fde;
20473 gcc_assert (fde != NULL);
20474
20475 section = secname_for_decl (current_function_decl);
20476 list_tail = &list;
20477 list = NULL;
20478
20479 memset (&next_cfa, 0, sizeof (next_cfa));
20480 next_cfa.reg = INVALID_REGNUM;
20481 remember = next_cfa;
20482
20483 start_label = fde->dw_fde_begin;
20484
20485 /* ??? Bald assumption that the CIE opcode list does not contain
20486 advance opcodes. */
20487 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20488 lookup_cfa_1 (cfi, &next_cfa, &remember);
20489
20490 last_cfa = next_cfa;
20491 last_label = start_label;
20492
20493 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20494 {
20495 /* If the first partition contained no CFI adjustments, the
20496 CIE opcodes apply to the whole first partition. */
20497 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20498 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20499 list_tail =&(*list_tail)->dw_loc_next;
20500 start_label = last_label = fde->dw_fde_second_begin;
20501 }
20502
20503 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20504 {
20505 switch (cfi->dw_cfi_opc)
20506 {
20507 case DW_CFA_set_loc:
20508 case DW_CFA_advance_loc1:
20509 case DW_CFA_advance_loc2:
20510 case DW_CFA_advance_loc4:
20511 if (!cfa_equal_p (&last_cfa, &next_cfa))
20512 {
20513 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20514 start_label, 0, last_label, 0, section);
20515
20516 list_tail = &(*list_tail)->dw_loc_next;
20517 last_cfa = next_cfa;
20518 start_label = last_label;
20519 }
20520 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20521 break;
20522
20523 case DW_CFA_advance_loc:
20524 /* The encoding is complex enough that we should never emit this. */
20525 gcc_unreachable ();
20526
20527 default:
20528 lookup_cfa_1 (cfi, &next_cfa, &remember);
20529 break;
20530 }
20531 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20532 {
20533 if (!cfa_equal_p (&last_cfa, &next_cfa))
20534 {
20535 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20536 start_label, 0, last_label, 0, section);
20537
20538 list_tail = &(*list_tail)->dw_loc_next;
20539 last_cfa = next_cfa;
20540 start_label = last_label;
20541 }
20542 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20543 start_label, 0, fde->dw_fde_end, 0, section);
20544 list_tail = &(*list_tail)->dw_loc_next;
20545 start_label = last_label = fde->dw_fde_second_begin;
20546 }
20547 }
20548
20549 if (!cfa_equal_p (&last_cfa, &next_cfa))
20550 {
20551 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20552 start_label, 0, last_label, 0, section);
20553 list_tail = &(*list_tail)->dw_loc_next;
20554 start_label = last_label;
20555 }
20556
20557 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20558 start_label, 0,
20559 fde->dw_fde_second_begin
20560 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20561 section);
20562
20563 maybe_gen_llsym (list);
20564
20565 return list;
20566 }
20567
20568 /* Compute a displacement from the "steady-state frame pointer" to the
20569 frame base (often the same as the CFA), and store it in
20570 frame_pointer_fb_offset. OFFSET is added to the displacement
20571 before the latter is negated. */
20572
20573 static void
20574 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20575 {
20576 rtx reg, elim;
20577
20578 #ifdef FRAME_POINTER_CFA_OFFSET
20579 reg = frame_pointer_rtx;
20580 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20581 #else
20582 reg = arg_pointer_rtx;
20583 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20584 #endif
20585
20586 elim = (ira_use_lra_p
20587 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20588 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20589 elim = strip_offset_and_add (elim, &offset);
20590
20591 frame_pointer_fb_offset = -offset;
20592
20593 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20594 in which to eliminate. This is because it's stack pointer isn't
20595 directly accessible as a register within the ISA. To work around
20596 this, assume that while we cannot provide a proper value for
20597 frame_pointer_fb_offset, we won't need one either. We can use
20598 hard frame pointer in debug info even if frame pointer isn't used
20599 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20600 which uses the DW_AT_frame_base attribute, not hard frame pointer
20601 directly. */
20602 frame_pointer_fb_offset_valid
20603 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20604 }
20605
20606 /* Generate a DW_AT_name attribute given some string value to be included as
20607 the value of the attribute. */
20608
20609 static void
20610 add_name_attribute (dw_die_ref die, const char *name_string)
20611 {
20612 if (name_string != NULL && *name_string != 0)
20613 {
20614 if (demangle_name_func)
20615 name_string = (*demangle_name_func) (name_string);
20616
20617 add_AT_string (die, DW_AT_name, name_string);
20618 }
20619 }
20620
20621 /* Generate a DW_AT_description attribute given some string value to be included
20622 as the value of the attribute. */
20623
20624 static void
20625 add_desc_attribute (dw_die_ref die, const char *name_string)
20626 {
20627 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20628 return;
20629
20630 if (name_string == NULL || *name_string == 0)
20631 return;
20632
20633 if (demangle_name_func)
20634 name_string = (*demangle_name_func) (name_string);
20635
20636 add_AT_string (die, DW_AT_description, name_string);
20637 }
20638
20639 /* Generate a DW_AT_description attribute given some decl to be included
20640 as the value of the attribute. */
20641
20642 static void
20643 add_desc_attribute (dw_die_ref die, tree decl)
20644 {
20645 tree decl_name;
20646
20647 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20648 return;
20649
20650 if (decl == NULL_TREE || !DECL_P (decl))
20651 return;
20652 decl_name = DECL_NAME (decl);
20653
20654 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20655 {
20656 const char *name = dwarf2_name (decl, 0);
20657 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20658 }
20659 else
20660 {
20661 char *desc = print_generic_expr_to_str (decl);
20662 add_desc_attribute (die, desc);
20663 free (desc);
20664 }
20665 }
20666
20667 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20668 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20669 of TYPE accordingly.
20670
20671 ??? This is a temporary measure until after we're able to generate
20672 regular DWARF for the complex Ada type system. */
20673
20674 static void
20675 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20676 dw_die_ref context_die)
20677 {
20678 tree dtype;
20679 dw_die_ref dtype_die;
20680
20681 if (!lang_hooks.types.descriptive_type)
20682 return;
20683
20684 dtype = lang_hooks.types.descriptive_type (type);
20685 if (!dtype)
20686 return;
20687
20688 dtype_die = lookup_type_die (dtype);
20689 if (!dtype_die)
20690 {
20691 gen_type_die (dtype, context_die);
20692 dtype_die = lookup_type_die (dtype);
20693 gcc_assert (dtype_die);
20694 }
20695
20696 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20697 }
20698
20699 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20700
20701 static const char *
20702 comp_dir_string (void)
20703 {
20704 const char *wd;
20705 char *wd_plus_sep = NULL;
20706 static const char *cached_wd = NULL;
20707
20708 if (cached_wd != NULL)
20709 return cached_wd;
20710
20711 wd = get_src_pwd ();
20712 if (wd == NULL)
20713 return NULL;
20714
20715 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20716 {
20717 size_t wdlen = strlen (wd);
20718 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20719 strcpy (wd_plus_sep, wd);
20720 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20721 wd_plus_sep [wdlen + 1] = 0;
20722 wd = wd_plus_sep;
20723 }
20724
20725 cached_wd = remap_debug_filename (wd);
20726
20727 /* remap_debug_filename can just pass through wd or return a new gc string.
20728 These two types can't be both stored in a GTY(())-tagged string, but since
20729 the cached value lives forever just copy it if needed. */
20730 if (cached_wd != wd)
20731 {
20732 cached_wd = xstrdup (cached_wd);
20733 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20734 free (wd_plus_sep);
20735 }
20736
20737 return cached_wd;
20738 }
20739
20740 /* Generate a DW_AT_comp_dir attribute for DIE. */
20741
20742 static void
20743 add_comp_dir_attribute (dw_die_ref die)
20744 {
20745 const char * wd = comp_dir_string ();
20746 if (wd != NULL)
20747 add_AT_string (die, DW_AT_comp_dir, wd);
20748 }
20749
20750 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20751 pointer computation, ...), output a representation for that bound according
20752 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20753 loc_list_from_tree for the meaning of CONTEXT. */
20754
20755 static void
20756 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20757 int forms, struct loc_descr_context *context)
20758 {
20759 dw_die_ref context_die, decl_die = NULL;
20760 dw_loc_list_ref list;
20761 bool strip_conversions = true;
20762 bool placeholder_seen = false;
20763
20764 while (strip_conversions)
20765 switch (TREE_CODE (value))
20766 {
20767 case ERROR_MARK:
20768 case SAVE_EXPR:
20769 return;
20770
20771 CASE_CONVERT:
20772 case VIEW_CONVERT_EXPR:
20773 value = TREE_OPERAND (value, 0);
20774 break;
20775
20776 default:
20777 strip_conversions = false;
20778 break;
20779 }
20780
20781 /* If possible and permitted, output the attribute as a constant. */
20782 if ((forms & dw_scalar_form_constant) != 0
20783 && TREE_CODE (value) == INTEGER_CST)
20784 {
20785 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20786
20787 /* If HOST_WIDE_INT is big enough then represent the bound as
20788 a constant value. We need to choose a form based on
20789 whether the type is signed or unsigned. We cannot just
20790 call add_AT_unsigned if the value itself is positive
20791 (add_AT_unsigned might add the unsigned value encoded as
20792 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20793 bounds type and then sign extend any unsigned values found
20794 for signed types. This is needed only for
20795 DW_AT_{lower,upper}_bound, since for most other attributes,
20796 consumers will treat DW_FORM_data[1248] as unsigned values,
20797 regardless of the underlying type. */
20798 if (prec <= HOST_BITS_PER_WIDE_INT
20799 || tree_fits_uhwi_p (value))
20800 {
20801 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20802 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20803 else
20804 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20805 }
20806 else
20807 /* Otherwise represent the bound as an unsigned value with
20808 the precision of its type. The precision and signedness
20809 of the type will be necessary to re-interpret it
20810 unambiguously. */
20811 add_AT_wide (die, attr, wi::to_wide (value));
20812 return;
20813 }
20814
20815 /* Otherwise, if it's possible and permitted too, output a reference to
20816 another DIE. */
20817 if ((forms & dw_scalar_form_reference) != 0)
20818 {
20819 tree decl = NULL_TREE;
20820
20821 /* Some type attributes reference an outer type. For instance, the upper
20822 bound of an array may reference an embedding record (this happens in
20823 Ada). */
20824 if (TREE_CODE (value) == COMPONENT_REF
20825 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20826 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20827 decl = TREE_OPERAND (value, 1);
20828
20829 else if (VAR_P (value)
20830 || TREE_CODE (value) == PARM_DECL
20831 || TREE_CODE (value) == RESULT_DECL)
20832 decl = value;
20833
20834 if (decl != NULL_TREE)
20835 {
20836 decl_die = lookup_decl_die (decl);
20837
20838 /* ??? Can this happen, or should the variable have been bound
20839 first? Probably it can, since I imagine that we try to create
20840 the types of parameters in the order in which they exist in
20841 the list, and won't have created a forward reference to a
20842 later parameter. */
20843 if (decl_die != NULL)
20844 {
20845 if (get_AT (decl_die, DW_AT_location)
20846 || get_AT (decl_die, DW_AT_const_value))
20847 {
20848 add_AT_die_ref (die, attr, decl_die);
20849 return;
20850 }
20851 }
20852 }
20853 }
20854
20855 /* Last chance: try to create a stack operation procedure to evaluate the
20856 value. Do nothing if even that is not possible or permitted. */
20857 if ((forms & dw_scalar_form_exprloc) == 0)
20858 return;
20859
20860 list = loc_list_from_tree (value, 2, context);
20861 if (context && context->placeholder_arg)
20862 {
20863 placeholder_seen = context->placeholder_seen;
20864 context->placeholder_seen = false;
20865 }
20866 if (list == NULL || single_element_loc_list_p (list))
20867 {
20868 /* If this attribute is not a reference nor constant, it is
20869 a DWARF expression rather than location description. For that
20870 loc_list_from_tree (value, 0, &context) is needed. */
20871 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20872 if (list2 && single_element_loc_list_p (list2))
20873 {
20874 if (placeholder_seen)
20875 {
20876 struct dwarf_procedure_info dpi;
20877 dpi.fndecl = NULL_TREE;
20878 dpi.args_count = 1;
20879 if (!resolve_args_picking (list2->expr, 1, &dpi))
20880 return;
20881 }
20882 add_AT_loc (die, attr, list2->expr);
20883 return;
20884 }
20885 }
20886
20887 /* If that failed to give a single element location list, fall back to
20888 outputting this as a reference... still if permitted. */
20889 if (list == NULL
20890 || (forms & dw_scalar_form_reference) == 0
20891 || placeholder_seen)
20892 return;
20893
20894 if (!decl_die)
20895 {
20896 if (current_function_decl == 0)
20897 context_die = comp_unit_die ();
20898 else
20899 context_die = lookup_decl_die (current_function_decl);
20900
20901 decl_die = new_die (DW_TAG_variable, context_die, value);
20902 add_AT_flag (decl_die, DW_AT_artificial, 1);
20903 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20904 context_die);
20905 }
20906
20907 add_AT_location_description (decl_die, DW_AT_location, list);
20908 add_AT_die_ref (die, attr, decl_die);
20909 }
20910
20911 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20912 default. */
20913
20914 static int
20915 lower_bound_default (void)
20916 {
20917 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20918 {
20919 case DW_LANG_C:
20920 case DW_LANG_C89:
20921 case DW_LANG_C99:
20922 case DW_LANG_C11:
20923 case DW_LANG_C_plus_plus:
20924 case DW_LANG_C_plus_plus_11:
20925 case DW_LANG_C_plus_plus_14:
20926 case DW_LANG_ObjC:
20927 case DW_LANG_ObjC_plus_plus:
20928 return 0;
20929 case DW_LANG_Fortran77:
20930 case DW_LANG_Fortran90:
20931 case DW_LANG_Fortran95:
20932 case DW_LANG_Fortran03:
20933 case DW_LANG_Fortran08:
20934 return 1;
20935 case DW_LANG_UPC:
20936 case DW_LANG_D:
20937 case DW_LANG_Python:
20938 return dwarf_version >= 4 ? 0 : -1;
20939 case DW_LANG_Ada95:
20940 case DW_LANG_Ada83:
20941 case DW_LANG_Cobol74:
20942 case DW_LANG_Cobol85:
20943 case DW_LANG_Modula2:
20944 case DW_LANG_PLI:
20945 return dwarf_version >= 4 ? 1 : -1;
20946 default:
20947 return -1;
20948 }
20949 }
20950
20951 /* Given a tree node describing an array bound (either lower or upper) output
20952 a representation for that bound. */
20953
20954 static void
20955 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20956 tree bound, struct loc_descr_context *context)
20957 {
20958 int dflt;
20959
20960 while (1)
20961 switch (TREE_CODE (bound))
20962 {
20963 /* Strip all conversions. */
20964 CASE_CONVERT:
20965 case VIEW_CONVERT_EXPR:
20966 bound = TREE_OPERAND (bound, 0);
20967 break;
20968
20969 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20970 are even omitted when they are the default. */
20971 case INTEGER_CST:
20972 /* If the value for this bound is the default one, we can even omit the
20973 attribute. */
20974 if (bound_attr == DW_AT_lower_bound
20975 && tree_fits_shwi_p (bound)
20976 && (dflt = lower_bound_default ()) != -1
20977 && tree_to_shwi (bound) == dflt)
20978 return;
20979
20980 /* FALLTHRU */
20981
20982 default:
20983 /* Because of the complex interaction there can be with other GNAT
20984 encodings, GDB isn't ready yet to handle proper DWARF description
20985 for self-referencial subrange bounds: let GNAT encodings do the
20986 magic in such a case. */
20987 if (is_ada ()
20988 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20989 && contains_placeholder_p (bound))
20990 return;
20991
20992 add_scalar_info (subrange_die, bound_attr, bound,
20993 dw_scalar_form_constant
20994 | dw_scalar_form_exprloc
20995 | dw_scalar_form_reference,
20996 context);
20997 return;
20998 }
20999 }
21000
21001 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
21002 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
21003 Note that the block of subscript information for an array type also
21004 includes information about the element type of the given array type.
21005
21006 This function reuses previously set type and bound information if
21007 available. */
21008
21009 static void
21010 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
21011 {
21012 unsigned dimension_number;
21013 tree lower, upper;
21014 dw_die_ref child = type_die->die_child;
21015
21016 for (dimension_number = 0;
21017 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
21018 type = TREE_TYPE (type), dimension_number++)
21019 {
21020 tree domain = TYPE_DOMAIN (type);
21021
21022 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
21023 break;
21024
21025 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
21026 and (in GNU C only) variable bounds. Handle all three forms
21027 here. */
21028
21029 /* Find and reuse a previously generated DW_TAG_subrange_type if
21030 available.
21031
21032 For multi-dimensional arrays, as we iterate through the
21033 various dimensions in the enclosing for loop above, we also
21034 iterate through the DIE children and pick at each
21035 DW_TAG_subrange_type previously generated (if available).
21036 Each child DW_TAG_subrange_type DIE describes the range of
21037 the current dimension. At this point we should have as many
21038 DW_TAG_subrange_type's as we have dimensions in the
21039 array. */
21040 dw_die_ref subrange_die = NULL;
21041 if (child)
21042 while (1)
21043 {
21044 child = child->die_sib;
21045 if (child->die_tag == DW_TAG_subrange_type)
21046 subrange_die = child;
21047 if (child == type_die->die_child)
21048 {
21049 /* If we wrapped around, stop looking next time. */
21050 child = NULL;
21051 break;
21052 }
21053 if (child->die_tag == DW_TAG_subrange_type)
21054 break;
21055 }
21056 if (!subrange_die)
21057 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21058
21059 if (domain)
21060 {
21061 /* We have an array type with specified bounds. */
21062 lower = TYPE_MIN_VALUE (domain);
21063 upper = TYPE_MAX_VALUE (domain);
21064
21065 /* Define the index type. */
21066 if (TREE_TYPE (domain)
21067 && !get_AT (subrange_die, DW_AT_type))
21068 {
21069 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21070 TREE_TYPE field. We can't emit debug info for this
21071 because it is an unnamed integral type. */
21072 if (TREE_CODE (domain) == INTEGER_TYPE
21073 && TYPE_NAME (domain) == NULL_TREE
21074 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21075 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21076 ;
21077 else
21078 add_type_attribute (subrange_die, TREE_TYPE (domain),
21079 TYPE_UNQUALIFIED, false, type_die);
21080 }
21081
21082 /* ??? If upper is NULL, the array has unspecified length,
21083 but it does have a lower bound. This happens with Fortran
21084 dimension arr(N:*)
21085 Since the debugger is definitely going to need to know N
21086 to produce useful results, go ahead and output the lower
21087 bound solo, and hope the debugger can cope. */
21088
21089 if (!get_AT (subrange_die, DW_AT_lower_bound))
21090 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21091 if (!get_AT (subrange_die, DW_AT_upper_bound)
21092 && !get_AT (subrange_die, DW_AT_count))
21093 {
21094 if (upper)
21095 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21096 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21097 /* Zero-length array. */
21098 add_bound_info (subrange_die, DW_AT_count,
21099 build_int_cst (TREE_TYPE (lower), 0), NULL);
21100 }
21101 }
21102
21103 /* Otherwise we have an array type with an unspecified length. The
21104 DWARF-2 spec does not say how to handle this; let's just leave out the
21105 bounds. */
21106 }
21107 }
21108
21109 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21110
21111 static void
21112 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21113 {
21114 dw_die_ref decl_die;
21115 HOST_WIDE_INT size;
21116 dw_loc_descr_ref size_expr = NULL;
21117
21118 switch (TREE_CODE (tree_node))
21119 {
21120 case ERROR_MARK:
21121 size = 0;
21122 break;
21123 case ENUMERAL_TYPE:
21124 case RECORD_TYPE:
21125 case UNION_TYPE:
21126 case QUAL_UNION_TYPE:
21127 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21128 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21129 {
21130 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21131 return;
21132 }
21133 size_expr = type_byte_size (tree_node, &size);
21134 break;
21135 case FIELD_DECL:
21136 /* For a data member of a struct or union, the DW_AT_byte_size is
21137 generally given as the number of bytes normally allocated for an
21138 object of the *declared* type of the member itself. This is true
21139 even for bit-fields. */
21140 size = int_size_in_bytes (field_type (tree_node));
21141 break;
21142 default:
21143 gcc_unreachable ();
21144 }
21145
21146 /* Support for dynamically-sized objects was introduced by DWARFv3.
21147 At the moment, GDB does not handle variable byte sizes very well,
21148 though. */
21149 if ((dwarf_version >= 3 || !dwarf_strict)
21150 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21151 && size_expr != NULL)
21152 add_AT_loc (die, DW_AT_byte_size, size_expr);
21153
21154 /* Note that `size' might be -1 when we get to this point. If it is, that
21155 indicates that the byte size of the entity in question is variable and
21156 that we could not generate a DWARF expression that computes it. */
21157 if (size >= 0)
21158 add_AT_unsigned (die, DW_AT_byte_size, size);
21159 }
21160
21161 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21162 alignment. */
21163
21164 static void
21165 add_alignment_attribute (dw_die_ref die, tree tree_node)
21166 {
21167 if (dwarf_version < 5 && dwarf_strict)
21168 return;
21169
21170 unsigned align;
21171
21172 if (DECL_P (tree_node))
21173 {
21174 if (!DECL_USER_ALIGN (tree_node))
21175 return;
21176
21177 align = DECL_ALIGN_UNIT (tree_node);
21178 }
21179 else if (TYPE_P (tree_node))
21180 {
21181 if (!TYPE_USER_ALIGN (tree_node))
21182 return;
21183
21184 align = TYPE_ALIGN_UNIT (tree_node);
21185 }
21186 else
21187 gcc_unreachable ();
21188
21189 add_AT_unsigned (die, DW_AT_alignment, align);
21190 }
21191
21192 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21193 which specifies the distance in bits from the highest order bit of the
21194 "containing object" for the bit-field to the highest order bit of the
21195 bit-field itself.
21196
21197 For any given bit-field, the "containing object" is a hypothetical object
21198 (of some integral or enum type) within which the given bit-field lives. The
21199 type of this hypothetical "containing object" is always the same as the
21200 declared type of the individual bit-field itself. The determination of the
21201 exact location of the "containing object" for a bit-field is rather
21202 complicated. It's handled by the `field_byte_offset' function (above).
21203
21204 CTX is required: see the comment for VLR_CONTEXT.
21205
21206 Note that it is the size (in bytes) of the hypothetical "containing object"
21207 which will be given in the DW_AT_byte_size attribute for this bit-field.
21208 (See `byte_size_attribute' above). */
21209
21210 static inline void
21211 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21212 {
21213 HOST_WIDE_INT object_offset_in_bytes;
21214 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21215 HOST_WIDE_INT bitpos_int;
21216 HOST_WIDE_INT highest_order_object_bit_offset;
21217 HOST_WIDE_INT highest_order_field_bit_offset;
21218 HOST_WIDE_INT bit_offset;
21219
21220 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21221
21222 /* Must be a field and a bit field. */
21223 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21224
21225 /* We can't yet handle bit-fields whose offsets are variable, so if we
21226 encounter such things, just return without generating any attribute
21227 whatsoever. Likewise for variable or too large size. */
21228 if (! tree_fits_shwi_p (bit_position (decl))
21229 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21230 return;
21231
21232 bitpos_int = int_bit_position (decl);
21233
21234 /* Note that the bit offset is always the distance (in bits) from the
21235 highest-order bit of the "containing object" to the highest-order bit of
21236 the bit-field itself. Since the "high-order end" of any object or field
21237 is different on big-endian and little-endian machines, the computation
21238 below must take account of these differences. */
21239 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21240 highest_order_field_bit_offset = bitpos_int;
21241
21242 if (! BYTES_BIG_ENDIAN)
21243 {
21244 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21245 highest_order_object_bit_offset +=
21246 simple_type_size_in_bits (original_type);
21247 }
21248
21249 bit_offset
21250 = (! BYTES_BIG_ENDIAN
21251 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21252 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21253
21254 if (bit_offset < 0)
21255 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21256 else
21257 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21258 }
21259
21260 /* For a FIELD_DECL node which represents a bit field, output an attribute
21261 which specifies the length in bits of the given field. */
21262
21263 static inline void
21264 add_bit_size_attribute (dw_die_ref die, tree decl)
21265 {
21266 /* Must be a field and a bit field. */
21267 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21268 && DECL_BIT_FIELD_TYPE (decl));
21269
21270 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21271 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21272 }
21273
21274 /* If the compiled language is ANSI C, then add a 'prototyped'
21275 attribute, if arg types are given for the parameters of a function. */
21276
21277 static inline void
21278 add_prototyped_attribute (dw_die_ref die, tree func_type)
21279 {
21280 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21281 {
21282 case DW_LANG_C:
21283 case DW_LANG_C89:
21284 case DW_LANG_C99:
21285 case DW_LANG_C11:
21286 case DW_LANG_ObjC:
21287 if (prototype_p (func_type))
21288 add_AT_flag (die, DW_AT_prototyped, 1);
21289 break;
21290 default:
21291 break;
21292 }
21293 }
21294
21295 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21296 by looking in the type declaration, the object declaration equate table or
21297 the block mapping. */
21298
21299 static inline void
21300 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21301 {
21302 dw_die_ref origin_die = NULL;
21303
21304 /* For late LTO debug output we want to refer directly to the abstract
21305 DIE in the early debug rather to the possibly existing concrete
21306 instance and avoid creating that just for this purpose. */
21307 sym_off_pair *desc;
21308 if (in_lto_p
21309 && external_die_map
21310 && (desc = external_die_map->get (origin)))
21311 {
21312 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21313 desc->sym, desc->off);
21314 return;
21315 }
21316
21317 if (DECL_P (origin))
21318 origin_die = lookup_decl_die (origin);
21319 else if (TYPE_P (origin))
21320 origin_die = lookup_type_die (origin);
21321 else if (TREE_CODE (origin) == BLOCK)
21322 origin_die = lookup_block_die (origin);
21323
21324 /* XXX: Functions that are never lowered don't always have correct block
21325 trees (in the case of java, they simply have no block tree, in some other
21326 languages). For these functions, there is nothing we can really do to
21327 output correct debug info for inlined functions in all cases. Rather
21328 than die, we'll just produce deficient debug info now, in that we will
21329 have variables without a proper abstract origin. In the future, when all
21330 functions are lowered, we should re-add a gcc_assert (origin_die)
21331 here. */
21332
21333 if (origin_die)
21334 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21335 }
21336
21337 /* We do not currently support the pure_virtual attribute. */
21338
21339 static inline void
21340 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21341 {
21342 if (DECL_VINDEX (func_decl))
21343 {
21344 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21345
21346 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21347 add_AT_loc (die, DW_AT_vtable_elem_location,
21348 new_loc_descr (DW_OP_constu,
21349 tree_to_shwi (DECL_VINDEX (func_decl)),
21350 0));
21351
21352 /* GNU extension: Record what type this method came from originally. */
21353 if (debug_info_level > DINFO_LEVEL_TERSE
21354 && DECL_CONTEXT (func_decl))
21355 add_AT_die_ref (die, DW_AT_containing_type,
21356 lookup_type_die (DECL_CONTEXT (func_decl)));
21357 }
21358 }
21359 \f
21360 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21361 given decl. This used to be a vendor extension until after DWARF 4
21362 standardized it. */
21363
21364 static void
21365 add_linkage_attr (dw_die_ref die, tree decl)
21366 {
21367 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21368
21369 /* Mimic what assemble_name_raw does with a leading '*'. */
21370 if (name[0] == '*')
21371 name = &name[1];
21372
21373 if (dwarf_version >= 4)
21374 add_AT_string (die, DW_AT_linkage_name, name);
21375 else
21376 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21377 }
21378
21379 /* Add source coordinate attributes for the given decl. */
21380
21381 static void
21382 add_src_coords_attributes (dw_die_ref die, tree decl)
21383 {
21384 expanded_location s;
21385
21386 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21387 return;
21388 s = expand_location (DECL_SOURCE_LOCATION (decl));
21389 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21390 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21391 if (debug_column_info && s.column)
21392 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21393 }
21394
21395 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21396
21397 static void
21398 add_linkage_name_raw (dw_die_ref die, tree decl)
21399 {
21400 /* Defer until we have an assembler name set. */
21401 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21402 {
21403 limbo_die_node *asm_name;
21404
21405 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21406 asm_name->die = die;
21407 asm_name->created_for = decl;
21408 asm_name->next = deferred_asm_name;
21409 deferred_asm_name = asm_name;
21410 }
21411 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21412 add_linkage_attr (die, decl);
21413 }
21414
21415 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21416
21417 static void
21418 add_linkage_name (dw_die_ref die, tree decl)
21419 {
21420 if (debug_info_level > DINFO_LEVEL_NONE
21421 && VAR_OR_FUNCTION_DECL_P (decl)
21422 && TREE_PUBLIC (decl)
21423 && !(VAR_P (decl) && DECL_REGISTER (decl))
21424 && die->die_tag != DW_TAG_member)
21425 add_linkage_name_raw (die, decl);
21426 }
21427
21428 /* Add a DW_AT_name attribute and source coordinate attribute for the
21429 given decl, but only if it actually has a name. */
21430
21431 static void
21432 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21433 bool no_linkage_name)
21434 {
21435 tree decl_name;
21436
21437 decl_name = DECL_NAME (decl);
21438 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21439 {
21440 const char *name = dwarf2_name (decl, 0);
21441 if (name)
21442 add_name_attribute (die, name);
21443 else
21444 add_desc_attribute (die, decl);
21445
21446 if (! DECL_ARTIFICIAL (decl))
21447 add_src_coords_attributes (die, decl);
21448
21449 if (!no_linkage_name)
21450 add_linkage_name (die, decl);
21451 }
21452 else
21453 add_desc_attribute (die, decl);
21454
21455 #ifdef VMS_DEBUGGING_INFO
21456 /* Get the function's name, as described by its RTL. This may be different
21457 from the DECL_NAME name used in the source file. */
21458 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21459 {
21460 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21461 XEXP (DECL_RTL (decl), 0), false);
21462 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21463 }
21464 #endif /* VMS_DEBUGGING_INFO */
21465 }
21466
21467 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21468
21469 static void
21470 add_discr_value (dw_die_ref die, dw_discr_value *value)
21471 {
21472 dw_attr_node attr;
21473
21474 attr.dw_attr = DW_AT_discr_value;
21475 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21476 attr.dw_attr_val.val_entry = NULL;
21477 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21478 if (value->pos)
21479 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21480 else
21481 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21482 add_dwarf_attr (die, &attr);
21483 }
21484
21485 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21486
21487 static void
21488 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21489 {
21490 dw_attr_node attr;
21491
21492 attr.dw_attr = DW_AT_discr_list;
21493 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21494 attr.dw_attr_val.val_entry = NULL;
21495 attr.dw_attr_val.v.val_discr_list = discr_list;
21496 add_dwarf_attr (die, &attr);
21497 }
21498
21499 static inline dw_discr_list_ref
21500 AT_discr_list (dw_attr_node *attr)
21501 {
21502 return attr->dw_attr_val.v.val_discr_list;
21503 }
21504
21505 #ifdef VMS_DEBUGGING_INFO
21506 /* Output the debug main pointer die for VMS */
21507
21508 void
21509 dwarf2out_vms_debug_main_pointer (void)
21510 {
21511 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21512 dw_die_ref die;
21513
21514 /* Allocate the VMS debug main subprogram die. */
21515 die = new_die_raw (DW_TAG_subprogram);
21516 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21517 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21518 current_function_funcdef_no);
21519 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21520
21521 /* Make it the first child of comp_unit_die (). */
21522 die->die_parent = comp_unit_die ();
21523 if (comp_unit_die ()->die_child)
21524 {
21525 die->die_sib = comp_unit_die ()->die_child->die_sib;
21526 comp_unit_die ()->die_child->die_sib = die;
21527 }
21528 else
21529 {
21530 die->die_sib = die;
21531 comp_unit_die ()->die_child = die;
21532 }
21533 }
21534 #endif /* VMS_DEBUGGING_INFO */
21535
21536 /* walk_tree helper function for uses_local_type, below. */
21537
21538 static tree
21539 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21540 {
21541 if (!TYPE_P (*tp))
21542 *walk_subtrees = 0;
21543 else
21544 {
21545 tree name = TYPE_NAME (*tp);
21546 if (name && DECL_P (name) && decl_function_context (name))
21547 return *tp;
21548 }
21549 return NULL_TREE;
21550 }
21551
21552 /* If TYPE involves a function-local type (including a local typedef to a
21553 non-local type), returns that type; otherwise returns NULL_TREE. */
21554
21555 static tree
21556 uses_local_type (tree type)
21557 {
21558 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21559 return used;
21560 }
21561
21562 /* Return the DIE for the scope that immediately contains this type.
21563 Non-named types that do not involve a function-local type get global
21564 scope. Named types nested in namespaces or other types get their
21565 containing scope. All other types (i.e. function-local named types) get
21566 the current active scope. */
21567
21568 static dw_die_ref
21569 scope_die_for (tree t, dw_die_ref context_die)
21570 {
21571 dw_die_ref scope_die = NULL;
21572 tree containing_scope;
21573
21574 /* Non-types always go in the current scope. */
21575 gcc_assert (TYPE_P (t));
21576
21577 /* Use the scope of the typedef, rather than the scope of the type
21578 it refers to. */
21579 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21580 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21581 else
21582 containing_scope = TYPE_CONTEXT (t);
21583
21584 /* Use the containing namespace if there is one. */
21585 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21586 {
21587 if (context_die == lookup_decl_die (containing_scope))
21588 /* OK */;
21589 else if (debug_info_level > DINFO_LEVEL_TERSE)
21590 context_die = get_context_die (containing_scope);
21591 else
21592 containing_scope = NULL_TREE;
21593 }
21594
21595 /* Ignore function type "scopes" from the C frontend. They mean that
21596 a tagged type is local to a parmlist of a function declarator, but
21597 that isn't useful to DWARF. */
21598 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21599 containing_scope = NULL_TREE;
21600
21601 if (SCOPE_FILE_SCOPE_P (containing_scope))
21602 {
21603 /* If T uses a local type keep it local as well, to avoid references
21604 to function-local DIEs from outside the function. */
21605 if (current_function_decl && uses_local_type (t))
21606 scope_die = context_die;
21607 else
21608 scope_die = comp_unit_die ();
21609 }
21610 else if (TYPE_P (containing_scope))
21611 {
21612 /* For types, we can just look up the appropriate DIE. */
21613 if (debug_info_level > DINFO_LEVEL_TERSE)
21614 scope_die = get_context_die (containing_scope);
21615 else
21616 {
21617 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21618 if (scope_die == NULL)
21619 scope_die = comp_unit_die ();
21620 }
21621 }
21622 else
21623 scope_die = context_die;
21624
21625 return scope_die;
21626 }
21627
21628 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21629
21630 static inline int
21631 local_scope_p (dw_die_ref context_die)
21632 {
21633 for (; context_die; context_die = context_die->die_parent)
21634 if (context_die->die_tag == DW_TAG_inlined_subroutine
21635 || context_die->die_tag == DW_TAG_subprogram)
21636 return 1;
21637
21638 return 0;
21639 }
21640
21641 /* Returns nonzero if CONTEXT_DIE is a class. */
21642
21643 static inline int
21644 class_scope_p (dw_die_ref context_die)
21645 {
21646 return (context_die
21647 && (context_die->die_tag == DW_TAG_structure_type
21648 || context_die->die_tag == DW_TAG_class_type
21649 || context_die->die_tag == DW_TAG_interface_type
21650 || context_die->die_tag == DW_TAG_union_type));
21651 }
21652
21653 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21654 whether or not to treat a DIE in this context as a declaration. */
21655
21656 static inline int
21657 class_or_namespace_scope_p (dw_die_ref context_die)
21658 {
21659 return (class_scope_p (context_die)
21660 || (context_die && context_die->die_tag == DW_TAG_namespace));
21661 }
21662
21663 /* Many forms of DIEs require a "type description" attribute. This
21664 routine locates the proper "type descriptor" die for the type given
21665 by 'type' plus any additional qualifiers given by 'cv_quals', and
21666 adds a DW_AT_type attribute below the given die. */
21667
21668 static void
21669 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21670 bool reverse, dw_die_ref context_die)
21671 {
21672 enum tree_code code = TREE_CODE (type);
21673 dw_die_ref type_die = NULL;
21674
21675 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21676 or fixed-point type, use the inner type. This is because we have no
21677 support for unnamed types in base_type_die. This can happen if this is
21678 an Ada subrange type. Correct solution is emit a subrange type die. */
21679 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21680 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21681 type = TREE_TYPE (type), code = TREE_CODE (type);
21682
21683 if (code == ERROR_MARK
21684 /* Handle a special case. For functions whose return type is void, we
21685 generate *no* type attribute. (Note that no object may have type
21686 `void', so this only applies to function return types). */
21687 || code == VOID_TYPE)
21688 return;
21689
21690 type_die = modified_type_die (type,
21691 cv_quals | TYPE_QUALS (type),
21692 reverse,
21693 context_die);
21694
21695 if (type_die != NULL)
21696 add_AT_die_ref (object_die, DW_AT_type, type_die);
21697 }
21698
21699 /* Given an object die, add the calling convention attribute for the
21700 function call type. */
21701 static void
21702 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21703 {
21704 enum dwarf_calling_convention value = DW_CC_normal;
21705
21706 value = ((enum dwarf_calling_convention)
21707 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21708
21709 if (is_fortran ()
21710 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21711 {
21712 /* DWARF 2 doesn't provide a way to identify a program's source-level
21713 entry point. DW_AT_calling_convention attributes are only meant
21714 to describe functions' calling conventions. However, lacking a
21715 better way to signal the Fortran main program, we used this for
21716 a long time, following existing custom. Now, DWARF 4 has
21717 DW_AT_main_subprogram, which we add below, but some tools still
21718 rely on the old way, which we thus keep. */
21719 value = DW_CC_program;
21720
21721 if (dwarf_version >= 4 || !dwarf_strict)
21722 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21723 }
21724
21725 /* Only add the attribute if the backend requests it, and
21726 is not DW_CC_normal. */
21727 if (value && (value != DW_CC_normal))
21728 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21729 }
21730
21731 /* Given a tree pointer to a struct, class, union, or enum type node, return
21732 a pointer to the (string) tag name for the given type, or zero if the type
21733 was declared without a tag. */
21734
21735 static const char *
21736 type_tag (const_tree type)
21737 {
21738 const char *name = 0;
21739
21740 if (TYPE_NAME (type) != 0)
21741 {
21742 tree t = 0;
21743
21744 /* Find the IDENTIFIER_NODE for the type name. */
21745 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21746 && !TYPE_NAMELESS (type))
21747 t = TYPE_NAME (type);
21748
21749 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21750 a TYPE_DECL node, regardless of whether or not a `typedef' was
21751 involved. */
21752 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21753 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21754 {
21755 /* We want to be extra verbose. Don't call dwarf_name if
21756 DECL_NAME isn't set. The default hook for decl_printable_name
21757 doesn't like that, and in this context it's correct to return
21758 0, instead of "<anonymous>" or the like. */
21759 if (DECL_NAME (TYPE_NAME (type))
21760 && !DECL_NAMELESS (TYPE_NAME (type)))
21761 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21762 }
21763
21764 /* Now get the name as a string, or invent one. */
21765 if (!name && t != 0)
21766 name = IDENTIFIER_POINTER (t);
21767 }
21768
21769 return (name == 0 || *name == '\0') ? 0 : name;
21770 }
21771
21772 /* Return the type associated with a data member, make a special check
21773 for bit field types. */
21774
21775 static inline tree
21776 member_declared_type (const_tree member)
21777 {
21778 return (DECL_BIT_FIELD_TYPE (member)
21779 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21780 }
21781
21782 /* Get the decl's label, as described by its RTL. This may be different
21783 from the DECL_NAME name used in the source file. */
21784
21785 #if 0
21786 static const char *
21787 decl_start_label (tree decl)
21788 {
21789 rtx x;
21790 const char *fnname;
21791
21792 x = DECL_RTL (decl);
21793 gcc_assert (MEM_P (x));
21794
21795 x = XEXP (x, 0);
21796 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21797
21798 fnname = XSTR (x, 0);
21799 return fnname;
21800 }
21801 #endif
21802 \f
21803 /* For variable-length arrays that have been previously generated, but
21804 may be incomplete due to missing subscript info, fill the subscript
21805 info. Return TRUE if this is one of those cases. */
21806 static bool
21807 fill_variable_array_bounds (tree type)
21808 {
21809 if (TREE_ASM_WRITTEN (type)
21810 && TREE_CODE (type) == ARRAY_TYPE
21811 && variably_modified_type_p (type, NULL))
21812 {
21813 dw_die_ref array_die = lookup_type_die (type);
21814 if (!array_die)
21815 return false;
21816 add_subscript_info (array_die, type, !is_ada ());
21817 return true;
21818 }
21819 return false;
21820 }
21821
21822 /* These routines generate the internal representation of the DIE's for
21823 the compilation unit. Debugging information is collected by walking
21824 the declaration trees passed in from dwarf2out_decl(). */
21825
21826 static void
21827 gen_array_type_die (tree type, dw_die_ref context_die)
21828 {
21829 dw_die_ref array_die;
21830
21831 /* GNU compilers represent multidimensional array types as sequences of one
21832 dimensional array types whose element types are themselves array types.
21833 We sometimes squish that down to a single array_type DIE with multiple
21834 subscripts in the Dwarf debugging info. The draft Dwarf specification
21835 say that we are allowed to do this kind of compression in C, because
21836 there is no difference between an array of arrays and a multidimensional
21837 array. We don't do this for Ada to remain as close as possible to the
21838 actual representation, which is especially important against the language
21839 flexibilty wrt arrays of variable size. */
21840
21841 bool collapse_nested_arrays = !is_ada ();
21842
21843 if (fill_variable_array_bounds (type))
21844 return;
21845
21846 dw_die_ref scope_die = scope_die_for (type, context_die);
21847 tree element_type;
21848
21849 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21850 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21851 if (TYPE_STRING_FLAG (type)
21852 && TREE_CODE (type) == ARRAY_TYPE
21853 && is_fortran ()
21854 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21855 {
21856 HOST_WIDE_INT size;
21857
21858 array_die = new_die (DW_TAG_string_type, scope_die, type);
21859 add_name_attribute (array_die, type_tag (type));
21860 equate_type_number_to_die (type, array_die);
21861 size = int_size_in_bytes (type);
21862 if (size >= 0)
21863 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21864 /* ??? We can't annotate types late, but for LTO we may not
21865 generate a location early either (gfortran.dg/save_6.f90). */
21866 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21867 && TYPE_DOMAIN (type) != NULL_TREE
21868 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21869 {
21870 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21871 tree rszdecl = szdecl;
21872
21873 size = int_size_in_bytes (TREE_TYPE (szdecl));
21874 if (!DECL_P (szdecl))
21875 {
21876 if (TREE_CODE (szdecl) == INDIRECT_REF
21877 && DECL_P (TREE_OPERAND (szdecl, 0)))
21878 {
21879 rszdecl = TREE_OPERAND (szdecl, 0);
21880 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21881 != DWARF2_ADDR_SIZE)
21882 size = 0;
21883 }
21884 else
21885 size = 0;
21886 }
21887 if (size > 0)
21888 {
21889 dw_loc_list_ref loc
21890 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21891 NULL);
21892 if (loc)
21893 {
21894 add_AT_location_description (array_die, DW_AT_string_length,
21895 loc);
21896 if (size != DWARF2_ADDR_SIZE)
21897 add_AT_unsigned (array_die, dwarf_version >= 5
21898 ? DW_AT_string_length_byte_size
21899 : DW_AT_byte_size, size);
21900 }
21901 }
21902 }
21903 return;
21904 }
21905
21906 array_die = new_die (DW_TAG_array_type, scope_die, type);
21907 add_name_attribute (array_die, type_tag (type));
21908 equate_type_number_to_die (type, array_die);
21909
21910 if (TREE_CODE (type) == VECTOR_TYPE)
21911 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21912
21913 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21914 if (is_fortran ()
21915 && TREE_CODE (type) == ARRAY_TYPE
21916 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21917 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21918 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21919
21920 #if 0
21921 /* We default the array ordering. Debuggers will probably do the right
21922 things even if DW_AT_ordering is not present. It's not even an issue
21923 until we start to get into multidimensional arrays anyway. If a debugger
21924 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21925 then we'll have to put the DW_AT_ordering attribute back in. (But if
21926 and when we find out that we need to put these in, we will only do so
21927 for multidimensional arrays. */
21928 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21929 #endif
21930
21931 if (TREE_CODE (type) == VECTOR_TYPE)
21932 {
21933 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21934 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21935 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21936 add_bound_info (subrange_die, DW_AT_upper_bound,
21937 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21938 }
21939 else
21940 add_subscript_info (array_die, type, collapse_nested_arrays);
21941
21942 /* Add representation of the type of the elements of this array type and
21943 emit the corresponding DIE if we haven't done it already. */
21944 element_type = TREE_TYPE (type);
21945 if (collapse_nested_arrays)
21946 while (TREE_CODE (element_type) == ARRAY_TYPE)
21947 {
21948 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21949 break;
21950 element_type = TREE_TYPE (element_type);
21951 }
21952
21953 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21954 TREE_CODE (type) == ARRAY_TYPE
21955 && TYPE_REVERSE_STORAGE_ORDER (type),
21956 context_die);
21957
21958 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21959 if (TYPE_ARTIFICIAL (type))
21960 add_AT_flag (array_die, DW_AT_artificial, 1);
21961
21962 if (get_AT (array_die, DW_AT_name))
21963 add_pubtype (type, array_die);
21964
21965 add_alignment_attribute (array_die, type);
21966 }
21967
21968 /* This routine generates DIE for array with hidden descriptor, details
21969 are filled into *info by a langhook. */
21970
21971 static void
21972 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21973 dw_die_ref context_die)
21974 {
21975 const dw_die_ref scope_die = scope_die_for (type, context_die);
21976 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21977 struct loc_descr_context context = { type, info->base_decl, NULL,
21978 false, false };
21979 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21980 int dim;
21981
21982 add_name_attribute (array_die, type_tag (type));
21983 equate_type_number_to_die (type, array_die);
21984
21985 if (info->ndimensions > 1)
21986 switch (info->ordering)
21987 {
21988 case array_descr_ordering_row_major:
21989 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21990 break;
21991 case array_descr_ordering_column_major:
21992 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21993 break;
21994 default:
21995 break;
21996 }
21997
21998 if (dwarf_version >= 3 || !dwarf_strict)
21999 {
22000 if (info->data_location)
22001 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
22002 dw_scalar_form_exprloc, &context);
22003 if (info->associated)
22004 add_scalar_info (array_die, DW_AT_associated, info->associated,
22005 dw_scalar_form_constant
22006 | dw_scalar_form_exprloc
22007 | dw_scalar_form_reference, &context);
22008 if (info->allocated)
22009 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
22010 dw_scalar_form_constant
22011 | dw_scalar_form_exprloc
22012 | dw_scalar_form_reference, &context);
22013 if (info->stride)
22014 {
22015 const enum dwarf_attribute attr
22016 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22017 const int forms
22018 = (info->stride_in_bits)
22019 ? dw_scalar_form_constant
22020 : (dw_scalar_form_constant
22021 | dw_scalar_form_exprloc
22022 | dw_scalar_form_reference);
22023
22024 add_scalar_info (array_die, attr, info->stride, forms, &context);
22025 }
22026 }
22027 if (dwarf_version >= 5)
22028 {
22029 if (info->rank)
22030 {
22031 add_scalar_info (array_die, DW_AT_rank, info->rank,
22032 dw_scalar_form_constant
22033 | dw_scalar_form_exprloc, &context);
22034 subrange_tag = DW_TAG_generic_subrange;
22035 context.placeholder_arg = true;
22036 }
22037 }
22038
22039 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22040
22041 for (dim = 0; dim < info->ndimensions; dim++)
22042 {
22043 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22044
22045 if (info->dimen[dim].bounds_type)
22046 add_type_attribute (subrange_die,
22047 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22048 false, context_die);
22049 if (info->dimen[dim].lower_bound)
22050 add_bound_info (subrange_die, DW_AT_lower_bound,
22051 info->dimen[dim].lower_bound, &context);
22052 if (info->dimen[dim].upper_bound)
22053 add_bound_info (subrange_die, DW_AT_upper_bound,
22054 info->dimen[dim].upper_bound, &context);
22055 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22056 add_scalar_info (subrange_die, DW_AT_byte_stride,
22057 info->dimen[dim].stride,
22058 dw_scalar_form_constant
22059 | dw_scalar_form_exprloc
22060 | dw_scalar_form_reference,
22061 &context);
22062 }
22063
22064 gen_type_die (info->element_type, context_die);
22065 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22066 TREE_CODE (type) == ARRAY_TYPE
22067 && TYPE_REVERSE_STORAGE_ORDER (type),
22068 context_die);
22069
22070 if (get_AT (array_die, DW_AT_name))
22071 add_pubtype (type, array_die);
22072
22073 add_alignment_attribute (array_die, type);
22074 }
22075
22076 #if 0
22077 static void
22078 gen_entry_point_die (tree decl, dw_die_ref context_die)
22079 {
22080 tree origin = decl_ultimate_origin (decl);
22081 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22082
22083 if (origin != NULL)
22084 add_abstract_origin_attribute (decl_die, origin);
22085 else
22086 {
22087 add_name_and_src_coords_attributes (decl_die, decl);
22088 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22089 TYPE_UNQUALIFIED, false, context_die);
22090 }
22091
22092 if (DECL_ABSTRACT_P (decl))
22093 equate_decl_number_to_die (decl, decl_die);
22094 else
22095 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22096 }
22097 #endif
22098
22099 /* Walk through the list of incomplete types again, trying once more to
22100 emit full debugging info for them. */
22101
22102 static void
22103 retry_incomplete_types (void)
22104 {
22105 set_early_dwarf s;
22106 int i;
22107
22108 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22109 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22110 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22111 vec_safe_truncate (incomplete_types, 0);
22112 }
22113
22114 /* Determine what tag to use for a record type. */
22115
22116 static enum dwarf_tag
22117 record_type_tag (tree type)
22118 {
22119 if (! lang_hooks.types.classify_record)
22120 return DW_TAG_structure_type;
22121
22122 switch (lang_hooks.types.classify_record (type))
22123 {
22124 case RECORD_IS_STRUCT:
22125 return DW_TAG_structure_type;
22126
22127 case RECORD_IS_CLASS:
22128 return DW_TAG_class_type;
22129
22130 case RECORD_IS_INTERFACE:
22131 if (dwarf_version >= 3 || !dwarf_strict)
22132 return DW_TAG_interface_type;
22133 return DW_TAG_structure_type;
22134
22135 default:
22136 gcc_unreachable ();
22137 }
22138 }
22139
22140 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22141 include all of the information about the enumeration values also. Each
22142 enumerated type name/value is listed as a child of the enumerated type
22143 DIE. */
22144
22145 static dw_die_ref
22146 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22147 {
22148 dw_die_ref type_die = lookup_type_die (type);
22149 dw_die_ref orig_type_die = type_die;
22150
22151 if (type_die == NULL)
22152 {
22153 type_die = new_die (DW_TAG_enumeration_type,
22154 scope_die_for (type, context_die), type);
22155 equate_type_number_to_die (type, type_die);
22156 add_name_attribute (type_die, type_tag (type));
22157 if ((dwarf_version >= 4 || !dwarf_strict)
22158 && ENUM_IS_SCOPED (type))
22159 add_AT_flag (type_die, DW_AT_enum_class, 1);
22160 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22161 add_AT_flag (type_die, DW_AT_declaration, 1);
22162 if (!dwarf_strict)
22163 add_AT_unsigned (type_die, DW_AT_encoding,
22164 TYPE_UNSIGNED (type)
22165 ? DW_ATE_unsigned
22166 : DW_ATE_signed);
22167 }
22168 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22169 return type_die;
22170 else
22171 remove_AT (type_die, DW_AT_declaration);
22172
22173 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22174 given enum type is incomplete, do not generate the DW_AT_byte_size
22175 attribute or the DW_AT_element_list attribute. */
22176 if (TYPE_SIZE (type))
22177 {
22178 tree link;
22179
22180 if (!ENUM_IS_OPAQUE (type))
22181 TREE_ASM_WRITTEN (type) = 1;
22182 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22183 add_byte_size_attribute (type_die, type);
22184 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22185 add_alignment_attribute (type_die, type);
22186 if ((dwarf_version >= 3 || !dwarf_strict)
22187 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22188 {
22189 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22190 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22191 context_die);
22192 }
22193 if (TYPE_STUB_DECL (type) != NULL_TREE)
22194 {
22195 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22196 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22197 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22198 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22199 }
22200
22201 /* If the first reference to this type was as the return type of an
22202 inline function, then it may not have a parent. Fix this now. */
22203 if (type_die->die_parent == NULL)
22204 add_child_die (scope_die_for (type, context_die), type_die);
22205
22206 for (link = TYPE_VALUES (type);
22207 link != NULL; link = TREE_CHAIN (link))
22208 {
22209 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22210 tree value = TREE_VALUE (link);
22211
22212 gcc_assert (!ENUM_IS_OPAQUE (type));
22213 add_name_attribute (enum_die,
22214 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22215
22216 if (TREE_CODE (value) == CONST_DECL)
22217 value = DECL_INITIAL (value);
22218
22219 if (simple_type_size_in_bits (TREE_TYPE (value))
22220 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22221 {
22222 /* For constant forms created by add_AT_unsigned DWARF
22223 consumers (GDB, elfutils, etc.) always zero extend
22224 the value. Only when the actual value is negative
22225 do we need to use add_AT_int to generate a constant
22226 form that can represent negative values. */
22227 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22228 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22229 add_AT_unsigned (enum_die, DW_AT_const_value,
22230 (unsigned HOST_WIDE_INT) val);
22231 else
22232 add_AT_int (enum_die, DW_AT_const_value, val);
22233 }
22234 else
22235 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22236 that here. TODO: This should be re-worked to use correct
22237 signed/unsigned double tags for all cases. */
22238 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22239 }
22240
22241 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22242 if (TYPE_ARTIFICIAL (type)
22243 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22244 add_AT_flag (type_die, DW_AT_artificial, 1);
22245 }
22246 else
22247 add_AT_flag (type_die, DW_AT_declaration, 1);
22248
22249 add_pubtype (type, type_die);
22250
22251 return type_die;
22252 }
22253
22254 /* Generate a DIE to represent either a real live formal parameter decl or to
22255 represent just the type of some formal parameter position in some function
22256 type.
22257
22258 Note that this routine is a bit unusual because its argument may be a
22259 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22260 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22261 node. If it's the former then this function is being called to output a
22262 DIE to represent a formal parameter object (or some inlining thereof). If
22263 it's the latter, then this function is only being called to output a
22264 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22265 argument type of some subprogram type.
22266 If EMIT_NAME_P is true, name and source coordinate attributes
22267 are emitted. */
22268
22269 static dw_die_ref
22270 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22271 dw_die_ref context_die)
22272 {
22273 tree node_or_origin = node ? node : origin;
22274 tree ultimate_origin;
22275 dw_die_ref parm_die = NULL;
22276
22277 if (DECL_P (node_or_origin))
22278 {
22279 parm_die = lookup_decl_die (node);
22280
22281 /* If the contexts differ, we may not be talking about the same
22282 thing.
22283 ??? When in LTO the DIE parent is the "abstract" copy and the
22284 context_die is the specification "copy". But this whole block
22285 should eventually be no longer needed. */
22286 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22287 {
22288 if (!DECL_ABSTRACT_P (node))
22289 {
22290 /* This can happen when creating an inlined instance, in
22291 which case we need to create a new DIE that will get
22292 annotated with DW_AT_abstract_origin. */
22293 parm_die = NULL;
22294 }
22295 else
22296 gcc_unreachable ();
22297 }
22298
22299 if (parm_die && parm_die->die_parent == NULL)
22300 {
22301 /* Check that parm_die already has the right attributes that
22302 we would have added below. If any attributes are
22303 missing, fall through to add them. */
22304 if (! DECL_ABSTRACT_P (node_or_origin)
22305 && !get_AT (parm_die, DW_AT_location)
22306 && !get_AT (parm_die, DW_AT_const_value))
22307 /* We are missing location info, and are about to add it. */
22308 ;
22309 else
22310 {
22311 add_child_die (context_die, parm_die);
22312 return parm_die;
22313 }
22314 }
22315 }
22316
22317 /* If we have a previously generated DIE, use it, unless this is an
22318 concrete instance (origin != NULL), in which case we need a new
22319 DIE with a corresponding DW_AT_abstract_origin. */
22320 bool reusing_die;
22321 if (parm_die && origin == NULL)
22322 reusing_die = true;
22323 else
22324 {
22325 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22326 reusing_die = false;
22327 }
22328
22329 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22330 {
22331 case tcc_declaration:
22332 ultimate_origin = decl_ultimate_origin (node_or_origin);
22333 if (node || ultimate_origin)
22334 origin = ultimate_origin;
22335
22336 if (reusing_die)
22337 goto add_location;
22338
22339 if (origin != NULL)
22340 add_abstract_origin_attribute (parm_die, origin);
22341 else if (emit_name_p)
22342 add_name_and_src_coords_attributes (parm_die, node);
22343 if (origin == NULL
22344 || (! DECL_ABSTRACT_P (node_or_origin)
22345 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22346 decl_function_context
22347 (node_or_origin))))
22348 {
22349 tree type = TREE_TYPE (node_or_origin);
22350 if (decl_by_reference_p (node_or_origin))
22351 add_type_attribute (parm_die, TREE_TYPE (type),
22352 TYPE_UNQUALIFIED,
22353 false, context_die);
22354 else
22355 add_type_attribute (parm_die, type,
22356 decl_quals (node_or_origin),
22357 false, context_die);
22358 }
22359 if (origin == NULL && DECL_ARTIFICIAL (node))
22360 add_AT_flag (parm_die, DW_AT_artificial, 1);
22361 add_location:
22362 if (node && node != origin)
22363 equate_decl_number_to_die (node, parm_die);
22364 if (! DECL_ABSTRACT_P (node_or_origin))
22365 add_location_or_const_value_attribute (parm_die, node_or_origin,
22366 node == NULL);
22367
22368 break;
22369
22370 case tcc_type:
22371 /* We were called with some kind of a ..._TYPE node. */
22372 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22373 context_die);
22374 break;
22375
22376 default:
22377 gcc_unreachable ();
22378 }
22379
22380 return parm_die;
22381 }
22382
22383 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22384 children DW_TAG_formal_parameter DIEs representing the arguments of the
22385 parameter pack.
22386
22387 PARM_PACK must be a function parameter pack.
22388 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22389 must point to the subsequent arguments of the function PACK_ARG belongs to.
22390 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22391 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22392 following the last one for which a DIE was generated. */
22393
22394 static dw_die_ref
22395 gen_formal_parameter_pack_die (tree parm_pack,
22396 tree pack_arg,
22397 dw_die_ref subr_die,
22398 tree *next_arg)
22399 {
22400 tree arg;
22401 dw_die_ref parm_pack_die;
22402
22403 gcc_assert (parm_pack
22404 && lang_hooks.function_parameter_pack_p (parm_pack)
22405 && subr_die);
22406
22407 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22408 add_src_coords_attributes (parm_pack_die, parm_pack);
22409
22410 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22411 {
22412 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22413 parm_pack))
22414 break;
22415 gen_formal_parameter_die (arg, NULL,
22416 false /* Don't emit name attribute. */,
22417 parm_pack_die);
22418 }
22419 if (next_arg)
22420 *next_arg = arg;
22421 return parm_pack_die;
22422 }
22423
22424 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22425 at the end of an (ANSI prototyped) formal parameters list. */
22426
22427 static void
22428 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22429 {
22430 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22431 }
22432
22433 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22434 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22435 parameters as specified in some function type specification (except for
22436 those which appear as part of a function *definition*). */
22437
22438 static void
22439 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22440 {
22441 tree link;
22442 tree formal_type = NULL;
22443 tree first_parm_type;
22444 tree arg;
22445
22446 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22447 {
22448 arg = DECL_ARGUMENTS (function_or_method_type);
22449 function_or_method_type = TREE_TYPE (function_or_method_type);
22450 }
22451 else
22452 arg = NULL_TREE;
22453
22454 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22455
22456 /* Make our first pass over the list of formal parameter types and output a
22457 DW_TAG_formal_parameter DIE for each one. */
22458 for (link = first_parm_type; link; )
22459 {
22460 dw_die_ref parm_die;
22461
22462 formal_type = TREE_VALUE (link);
22463 if (formal_type == void_type_node)
22464 break;
22465
22466 /* Output a (nameless) DIE to represent the formal parameter itself. */
22467 parm_die = gen_formal_parameter_die (formal_type, NULL,
22468 true /* Emit name attribute. */,
22469 context_die);
22470 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22471 && link == first_parm_type)
22472 {
22473 add_AT_flag (parm_die, DW_AT_artificial, 1);
22474 if (dwarf_version >= 3 || !dwarf_strict)
22475 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22476 }
22477 else if (arg && DECL_ARTIFICIAL (arg))
22478 add_AT_flag (parm_die, DW_AT_artificial, 1);
22479
22480 link = TREE_CHAIN (link);
22481 if (arg)
22482 arg = DECL_CHAIN (arg);
22483 }
22484
22485 /* If this function type has an ellipsis, add a
22486 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22487 if (formal_type != void_type_node)
22488 gen_unspecified_parameters_die (function_or_method_type, context_die);
22489
22490 /* Make our second (and final) pass over the list of formal parameter types
22491 and output DIEs to represent those types (as necessary). */
22492 for (link = TYPE_ARG_TYPES (function_or_method_type);
22493 link && TREE_VALUE (link);
22494 link = TREE_CHAIN (link))
22495 gen_type_die (TREE_VALUE (link), context_die);
22496 }
22497
22498 /* We want to generate the DIE for TYPE so that we can generate the
22499 die for MEMBER, which has been defined; we will need to refer back
22500 to the member declaration nested within TYPE. If we're trying to
22501 generate minimal debug info for TYPE, processing TYPE won't do the
22502 trick; we need to attach the member declaration by hand. */
22503
22504 static void
22505 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22506 {
22507 gen_type_die (type, context_die);
22508
22509 /* If we're trying to avoid duplicate debug info, we may not have
22510 emitted the member decl for this function. Emit it now. */
22511 if (TYPE_STUB_DECL (type)
22512 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22513 && ! lookup_decl_die (member))
22514 {
22515 dw_die_ref type_die;
22516 gcc_assert (!decl_ultimate_origin (member));
22517
22518 type_die = lookup_type_die_strip_naming_typedef (type);
22519 if (TREE_CODE (member) == FUNCTION_DECL)
22520 gen_subprogram_die (member, type_die);
22521 else if (TREE_CODE (member) == FIELD_DECL)
22522 {
22523 /* Ignore the nameless fields that are used to skip bits but handle
22524 C++ anonymous unions and structs. */
22525 if (DECL_NAME (member) != NULL_TREE
22526 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22527 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22528 {
22529 struct vlr_context vlr_ctx = {
22530 DECL_CONTEXT (member), /* struct_type */
22531 NULL_TREE /* variant_part_offset */
22532 };
22533 gen_type_die (member_declared_type (member), type_die);
22534 gen_field_die (member, &vlr_ctx, type_die);
22535 }
22536 }
22537 else
22538 gen_variable_die (member, NULL_TREE, type_die);
22539 }
22540 }
22541 \f
22542 /* Forward declare these functions, because they are mutually recursive
22543 with their set_block_* pairing functions. */
22544 static void set_decl_origin_self (tree);
22545
22546 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22547 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22548 that it points to the node itself, thus indicating that the node is its
22549 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22550 the given node is NULL, recursively descend the decl/block tree which
22551 it is the root of, and for each other ..._DECL or BLOCK node contained
22552 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22553 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22554 values to point to themselves. */
22555
22556 static void
22557 set_block_origin_self (tree stmt)
22558 {
22559 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22560 {
22561 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22562
22563 {
22564 tree local_decl;
22565
22566 for (local_decl = BLOCK_VARS (stmt);
22567 local_decl != NULL_TREE;
22568 local_decl = DECL_CHAIN (local_decl))
22569 /* Do not recurse on nested functions since the inlining status
22570 of parent and child can be different as per the DWARF spec. */
22571 if (TREE_CODE (local_decl) != FUNCTION_DECL
22572 && !DECL_EXTERNAL (local_decl))
22573 set_decl_origin_self (local_decl);
22574 }
22575
22576 {
22577 tree subblock;
22578
22579 for (subblock = BLOCK_SUBBLOCKS (stmt);
22580 subblock != NULL_TREE;
22581 subblock = BLOCK_CHAIN (subblock))
22582 set_block_origin_self (subblock); /* Recurse. */
22583 }
22584 }
22585 }
22586
22587 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22588 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22589 node to so that it points to the node itself, thus indicating that the
22590 node represents its own (abstract) origin. Additionally, if the
22591 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22592 the decl/block tree of which the given node is the root of, and for
22593 each other ..._DECL or BLOCK node contained therein whose
22594 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22595 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22596 point to themselves. */
22597
22598 static void
22599 set_decl_origin_self (tree decl)
22600 {
22601 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22602 {
22603 DECL_ABSTRACT_ORIGIN (decl) = decl;
22604 if (TREE_CODE (decl) == FUNCTION_DECL)
22605 {
22606 tree arg;
22607
22608 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22609 DECL_ABSTRACT_ORIGIN (arg) = arg;
22610 if (DECL_INITIAL (decl) != NULL_TREE
22611 && DECL_INITIAL (decl) != error_mark_node)
22612 set_block_origin_self (DECL_INITIAL (decl));
22613 }
22614 }
22615 }
22616 \f
22617 /* Mark the early DIE for DECL as the abstract instance. */
22618
22619 static void
22620 dwarf2out_abstract_function (tree decl)
22621 {
22622 dw_die_ref old_die;
22623
22624 /* Make sure we have the actual abstract inline, not a clone. */
22625 decl = DECL_ORIGIN (decl);
22626
22627 if (DECL_IGNORED_P (decl))
22628 return;
22629
22630 /* In LTO we're all set. We already created abstract instances
22631 early and we want to avoid creating a concrete instance of that
22632 if we don't output it. */
22633 if (in_lto_p)
22634 return;
22635
22636 old_die = lookup_decl_die (decl);
22637 gcc_assert (old_die != NULL);
22638 if (get_AT (old_die, DW_AT_inline))
22639 /* We've already generated the abstract instance. */
22640 return;
22641
22642 /* Go ahead and put DW_AT_inline on the DIE. */
22643 if (DECL_DECLARED_INLINE_P (decl))
22644 {
22645 if (cgraph_function_possibly_inlined_p (decl))
22646 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22647 else
22648 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22649 }
22650 else
22651 {
22652 if (cgraph_function_possibly_inlined_p (decl))
22653 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22654 else
22655 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22656 }
22657
22658 if (DECL_DECLARED_INLINE_P (decl)
22659 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22660 add_AT_flag (old_die, DW_AT_artificial, 1);
22661
22662 set_decl_origin_self (decl);
22663 }
22664
22665 /* Helper function of premark_used_types() which gets called through
22666 htab_traverse.
22667
22668 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22669 marked as unused by prune_unused_types. */
22670
22671 bool
22672 premark_used_types_helper (tree const &type, void *)
22673 {
22674 dw_die_ref die;
22675
22676 die = lookup_type_die (type);
22677 if (die != NULL)
22678 die->die_perennial_p = 1;
22679 return true;
22680 }
22681
22682 /* Helper function of premark_types_used_by_global_vars which gets called
22683 through htab_traverse.
22684
22685 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22686 marked as unused by prune_unused_types. The DIE of the type is marked
22687 only if the global variable using the type will actually be emitted. */
22688
22689 int
22690 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22691 void *)
22692 {
22693 struct types_used_by_vars_entry *entry;
22694 dw_die_ref die;
22695
22696 entry = (struct types_used_by_vars_entry *) *slot;
22697 gcc_assert (entry->type != NULL
22698 && entry->var_decl != NULL);
22699 die = lookup_type_die (entry->type);
22700 if (die)
22701 {
22702 /* Ask cgraph if the global variable really is to be emitted.
22703 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22704 varpool_node *node = varpool_node::get (entry->var_decl);
22705 if (node && node->definition)
22706 {
22707 die->die_perennial_p = 1;
22708 /* Keep the parent DIEs as well. */
22709 while ((die = die->die_parent) && die->die_perennial_p == 0)
22710 die->die_perennial_p = 1;
22711 }
22712 }
22713 return 1;
22714 }
22715
22716 /* Mark all members of used_types_hash as perennial. */
22717
22718 static void
22719 premark_used_types (struct function *fun)
22720 {
22721 if (fun && fun->used_types_hash)
22722 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22723 }
22724
22725 /* Mark all members of types_used_by_vars_entry as perennial. */
22726
22727 static void
22728 premark_types_used_by_global_vars (void)
22729 {
22730 if (types_used_by_vars_hash)
22731 types_used_by_vars_hash
22732 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22733 }
22734
22735 /* Mark all variables used by the symtab as perennial. */
22736
22737 static void
22738 premark_used_variables (void)
22739 {
22740 /* Mark DIEs in the symtab as used. */
22741 varpool_node *var;
22742 FOR_EACH_VARIABLE (var)
22743 {
22744 dw_die_ref die = lookup_decl_die (var->decl);
22745 if (die)
22746 die->die_perennial_p = 1;
22747 }
22748 }
22749
22750 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22751 for CA_LOC call arg loc node. */
22752
22753 static dw_die_ref
22754 gen_call_site_die (tree decl, dw_die_ref subr_die,
22755 struct call_arg_loc_node *ca_loc)
22756 {
22757 dw_die_ref stmt_die = NULL, die;
22758 tree block = ca_loc->block;
22759
22760 while (block
22761 && block != DECL_INITIAL (decl)
22762 && TREE_CODE (block) == BLOCK)
22763 {
22764 stmt_die = lookup_block_die (block);
22765 if (stmt_die)
22766 break;
22767 block = BLOCK_SUPERCONTEXT (block);
22768 }
22769 if (stmt_die == NULL)
22770 stmt_die = subr_die;
22771 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22772 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22773 if (ca_loc->tail_call_p)
22774 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22775 if (ca_loc->symbol_ref)
22776 {
22777 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22778 if (tdie)
22779 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22780 else
22781 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22782 false);
22783 }
22784 return die;
22785 }
22786
22787 /* Generate a DIE to represent a declared function (either file-scope or
22788 block-local). */
22789
22790 static void
22791 gen_subprogram_die (tree decl, dw_die_ref context_die)
22792 {
22793 tree origin = decl_ultimate_origin (decl);
22794 dw_die_ref subr_die;
22795 dw_die_ref old_die = lookup_decl_die (decl);
22796
22797 /* This function gets called multiple times for different stages of
22798 the debug process. For example, for func() in this code:
22799
22800 namespace S
22801 {
22802 void func() { ... }
22803 }
22804
22805 ...we get called 4 times. Twice in early debug and twice in
22806 late debug:
22807
22808 Early debug
22809 -----------
22810
22811 1. Once while generating func() within the namespace. This is
22812 the declaration. The declaration bit below is set, as the
22813 context is the namespace.
22814
22815 A new DIE will be generated with DW_AT_declaration set.
22816
22817 2. Once for func() itself. This is the specification. The
22818 declaration bit below is clear as the context is the CU.
22819
22820 We will use the cached DIE from (1) to create a new DIE with
22821 DW_AT_specification pointing to the declaration in (1).
22822
22823 Late debug via rest_of_handle_final()
22824 -------------------------------------
22825
22826 3. Once generating func() within the namespace. This is also the
22827 declaration, as in (1), but this time we will early exit below
22828 as we have a cached DIE and a declaration needs no additional
22829 annotations (no locations), as the source declaration line
22830 info is enough.
22831
22832 4. Once for func() itself. As in (2), this is the specification,
22833 but this time we will re-use the cached DIE, and just annotate
22834 it with the location information that should now be available.
22835
22836 For something without namespaces, but with abstract instances, we
22837 are also called a multiple times:
22838
22839 class Base
22840 {
22841 public:
22842 Base (); // constructor declaration (1)
22843 };
22844
22845 Base::Base () { } // constructor specification (2)
22846
22847 Early debug
22848 -----------
22849
22850 1. Once for the Base() constructor by virtue of it being a
22851 member of the Base class. This is done via
22852 rest_of_type_compilation.
22853
22854 This is a declaration, so a new DIE will be created with
22855 DW_AT_declaration.
22856
22857 2. Once for the Base() constructor definition, but this time
22858 while generating the abstract instance of the base
22859 constructor (__base_ctor) which is being generated via early
22860 debug of reachable functions.
22861
22862 Even though we have a cached version of the declaration (1),
22863 we will create a DW_AT_specification of the declaration DIE
22864 in (1).
22865
22866 3. Once for the __base_ctor itself, but this time, we generate
22867 an DW_AT_abstract_origin version of the DW_AT_specification in
22868 (2).
22869
22870 Late debug via rest_of_handle_final
22871 -----------------------------------
22872
22873 4. One final time for the __base_ctor (which will have a cached
22874 DIE with DW_AT_abstract_origin created in (3). This time,
22875 we will just annotate the location information now
22876 available.
22877 */
22878 int declaration = (current_function_decl != decl
22879 || class_or_namespace_scope_p (context_die));
22880
22881 /* A declaration that has been previously dumped needs no
22882 additional information. */
22883 if (old_die && declaration)
22884 return;
22885
22886 /* Now that the C++ front end lazily declares artificial member fns, we
22887 might need to retrofit the declaration into its class. */
22888 if (!declaration && !origin && !old_die
22889 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22890 && !class_or_namespace_scope_p (context_die)
22891 && debug_info_level > DINFO_LEVEL_TERSE)
22892 old_die = force_decl_die (decl);
22893
22894 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22895 if (origin != NULL)
22896 {
22897 gcc_assert (!declaration || local_scope_p (context_die));
22898
22899 /* Fixup die_parent for the abstract instance of a nested
22900 inline function. */
22901 if (old_die && old_die->die_parent == NULL)
22902 add_child_die (context_die, old_die);
22903
22904 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22905 {
22906 /* If we have a DW_AT_abstract_origin we have a working
22907 cached version. */
22908 subr_die = old_die;
22909 }
22910 else
22911 {
22912 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22913 add_abstract_origin_attribute (subr_die, origin);
22914 /* This is where the actual code for a cloned function is.
22915 Let's emit linkage name attribute for it. This helps
22916 debuggers to e.g, set breakpoints into
22917 constructors/destructors when the user asks "break
22918 K::K". */
22919 add_linkage_name (subr_die, decl);
22920 }
22921 }
22922 /* A cached copy, possibly from early dwarf generation. Reuse as
22923 much as possible. */
22924 else if (old_die)
22925 {
22926 if (!get_AT_flag (old_die, DW_AT_declaration)
22927 /* We can have a normal definition following an inline one in the
22928 case of redefinition of GNU C extern inlines.
22929 It seems reasonable to use AT_specification in this case. */
22930 && !get_AT (old_die, DW_AT_inline))
22931 {
22932 /* Detect and ignore this case, where we are trying to output
22933 something we have already output. */
22934 if (get_AT (old_die, DW_AT_low_pc)
22935 || get_AT (old_die, DW_AT_ranges))
22936 return;
22937
22938 /* If we have no location information, this must be a
22939 partially generated DIE from early dwarf generation.
22940 Fall through and generate it. */
22941 }
22942
22943 /* If the definition comes from the same place as the declaration,
22944 maybe use the old DIE. We always want the DIE for this function
22945 that has the *_pc attributes to be under comp_unit_die so the
22946 debugger can find it. We also need to do this for abstract
22947 instances of inlines, since the spec requires the out-of-line copy
22948 to have the same parent. For local class methods, this doesn't
22949 apply; we just use the old DIE. */
22950 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22951 struct dwarf_file_data * file_index = lookup_filename (s.file);
22952 if (((is_unit_die (old_die->die_parent)
22953 /* This condition fixes the inconsistency/ICE with the
22954 following Fortran test (or some derivative thereof) while
22955 building libgfortran:
22956
22957 module some_m
22958 contains
22959 logical function funky (FLAG)
22960 funky = .true.
22961 end function
22962 end module
22963 */
22964 || (old_die->die_parent
22965 && old_die->die_parent->die_tag == DW_TAG_module)
22966 || local_scope_p (old_die->die_parent)
22967 || context_die == NULL)
22968 && (DECL_ARTIFICIAL (decl)
22969 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22970 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22971 == (unsigned) s.line)
22972 && (!debug_column_info
22973 || s.column == 0
22974 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22975 == (unsigned) s.column)))))
22976 /* With LTO if there's an abstract instance for
22977 the old DIE, this is a concrete instance and
22978 thus re-use the DIE. */
22979 || get_AT (old_die, DW_AT_abstract_origin))
22980 {
22981 subr_die = old_die;
22982
22983 /* Clear out the declaration attribute, but leave the
22984 parameters so they can be augmented with location
22985 information later. Unless this was a declaration, in
22986 which case, wipe out the nameless parameters and recreate
22987 them further down. */
22988 if (remove_AT (subr_die, DW_AT_declaration))
22989 {
22990
22991 remove_AT (subr_die, DW_AT_object_pointer);
22992 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22993 }
22994 }
22995 /* Make a specification pointing to the previously built
22996 declaration. */
22997 else
22998 {
22999 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23000 add_AT_specification (subr_die, old_die);
23001 add_pubname (decl, subr_die);
23002 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23003 add_AT_file (subr_die, DW_AT_decl_file, file_index);
23004 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23005 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
23006 if (debug_column_info
23007 && s.column
23008 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23009 != (unsigned) s.column))
23010 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
23011
23012 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
23013 emit the real type on the definition die. */
23014 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
23015 {
23016 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23017 if (die == auto_die || die == decltype_auto_die)
23018 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23019 TYPE_UNQUALIFIED, false, context_die);
23020 }
23021
23022 /* When we process the method declaration, we haven't seen
23023 the out-of-class defaulted definition yet, so we have to
23024 recheck now. */
23025 if ((dwarf_version >= 5 || ! dwarf_strict)
23026 && !get_AT (subr_die, DW_AT_defaulted))
23027 {
23028 int defaulted
23029 = lang_hooks.decls.decl_dwarf_attribute (decl,
23030 DW_AT_defaulted);
23031 if (defaulted != -1)
23032 {
23033 /* Other values must have been handled before. */
23034 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23035 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23036 }
23037 }
23038 }
23039 }
23040 /* Create a fresh DIE for anything else. */
23041 else
23042 {
23043 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23044
23045 if (TREE_PUBLIC (decl))
23046 add_AT_flag (subr_die, DW_AT_external, 1);
23047
23048 add_name_and_src_coords_attributes (subr_die, decl);
23049 add_pubname (decl, subr_die);
23050 if (debug_info_level > DINFO_LEVEL_TERSE)
23051 {
23052 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23053 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23054 TYPE_UNQUALIFIED, false, context_die);
23055 }
23056
23057 add_pure_or_virtual_attribute (subr_die, decl);
23058 if (DECL_ARTIFICIAL (decl))
23059 add_AT_flag (subr_die, DW_AT_artificial, 1);
23060
23061 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23062 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23063
23064 add_alignment_attribute (subr_die, decl);
23065
23066 add_accessibility_attribute (subr_die, decl);
23067 }
23068
23069 /* Unless we have an existing non-declaration DIE, equate the new
23070 DIE. */
23071 if (!old_die || is_declaration_die (old_die))
23072 equate_decl_number_to_die (decl, subr_die);
23073
23074 if (declaration)
23075 {
23076 if (!old_die || !get_AT (old_die, DW_AT_inline))
23077 {
23078 add_AT_flag (subr_die, DW_AT_declaration, 1);
23079
23080 /* If this is an explicit function declaration then generate
23081 a DW_AT_explicit attribute. */
23082 if ((dwarf_version >= 3 || !dwarf_strict)
23083 && lang_hooks.decls.decl_dwarf_attribute (decl,
23084 DW_AT_explicit) == 1)
23085 add_AT_flag (subr_die, DW_AT_explicit, 1);
23086
23087 /* If this is a C++11 deleted special function member then generate
23088 a DW_AT_deleted attribute. */
23089 if ((dwarf_version >= 5 || !dwarf_strict)
23090 && lang_hooks.decls.decl_dwarf_attribute (decl,
23091 DW_AT_deleted) == 1)
23092 add_AT_flag (subr_die, DW_AT_deleted, 1);
23093
23094 /* If this is a C++11 defaulted special function member then
23095 generate a DW_AT_defaulted attribute. */
23096 if (dwarf_version >= 5 || !dwarf_strict)
23097 {
23098 int defaulted
23099 = lang_hooks.decls.decl_dwarf_attribute (decl,
23100 DW_AT_defaulted);
23101 if (defaulted != -1)
23102 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23103 }
23104
23105 /* If this is a C++11 non-static member function with & ref-qualifier
23106 then generate a DW_AT_reference attribute. */
23107 if ((dwarf_version >= 5 || !dwarf_strict)
23108 && lang_hooks.decls.decl_dwarf_attribute (decl,
23109 DW_AT_reference) == 1)
23110 add_AT_flag (subr_die, DW_AT_reference, 1);
23111
23112 /* If this is a C++11 non-static member function with &&
23113 ref-qualifier then generate a DW_AT_reference attribute. */
23114 if ((dwarf_version >= 5 || !dwarf_strict)
23115 && lang_hooks.decls.decl_dwarf_attribute (decl,
23116 DW_AT_rvalue_reference)
23117 == 1)
23118 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23119 }
23120 }
23121 /* For non DECL_EXTERNALs, if range information is available, fill
23122 the DIE with it. */
23123 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23124 {
23125 HOST_WIDE_INT cfa_fb_offset;
23126
23127 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23128
23129 if (!crtl->has_bb_partition)
23130 {
23131 dw_fde_ref fde = fun->fde;
23132 if (fde->dw_fde_begin)
23133 {
23134 /* We have already generated the labels. */
23135 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23136 fde->dw_fde_end, false);
23137 }
23138 else
23139 {
23140 /* Create start/end labels and add the range. */
23141 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23142 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23143 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23144 current_function_funcdef_no);
23145 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23146 current_function_funcdef_no);
23147 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23148 false);
23149 }
23150
23151 #if VMS_DEBUGGING_INFO
23152 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23153 Section 2.3 Prologue and Epilogue Attributes:
23154 When a breakpoint is set on entry to a function, it is generally
23155 desirable for execution to be suspended, not on the very first
23156 instruction of the function, but rather at a point after the
23157 function's frame has been set up, after any language defined local
23158 declaration processing has been completed, and before execution of
23159 the first statement of the function begins. Debuggers generally
23160 cannot properly determine where this point is. Similarly for a
23161 breakpoint set on exit from a function. The prologue and epilogue
23162 attributes allow a compiler to communicate the location(s) to use. */
23163
23164 {
23165 if (fde->dw_fde_vms_end_prologue)
23166 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23167 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23168
23169 if (fde->dw_fde_vms_begin_epilogue)
23170 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23171 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23172 }
23173 #endif
23174
23175 }
23176 else
23177 {
23178 /* Generate pubnames entries for the split function code ranges. */
23179 dw_fde_ref fde = fun->fde;
23180
23181 if (fde->dw_fde_second_begin)
23182 {
23183 if (dwarf_version >= 3 || !dwarf_strict)
23184 {
23185 /* We should use ranges for non-contiguous code section
23186 addresses. Use the actual code range for the initial
23187 section, since the HOT/COLD labels might precede an
23188 alignment offset. */
23189 bool range_list_added = false;
23190 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23191 fde->dw_fde_end, &range_list_added,
23192 false);
23193 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23194 fde->dw_fde_second_end,
23195 &range_list_added, false);
23196 if (range_list_added)
23197 add_ranges (NULL);
23198 }
23199 else
23200 {
23201 /* There is no real support in DW2 for this .. so we make
23202 a work-around. First, emit the pub name for the segment
23203 containing the function label. Then make and emit a
23204 simplified subprogram DIE for the second segment with the
23205 name pre-fixed by __hot/cold_sect_of_. We use the same
23206 linkage name for the second die so that gdb will find both
23207 sections when given "b foo". */
23208 const char *name = NULL;
23209 tree decl_name = DECL_NAME (decl);
23210 dw_die_ref seg_die;
23211
23212 /* Do the 'primary' section. */
23213 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23214 fde->dw_fde_end, false);
23215
23216 /* Build a minimal DIE for the secondary section. */
23217 seg_die = new_die (DW_TAG_subprogram,
23218 subr_die->die_parent, decl);
23219
23220 if (TREE_PUBLIC (decl))
23221 add_AT_flag (seg_die, DW_AT_external, 1);
23222
23223 if (decl_name != NULL
23224 && IDENTIFIER_POINTER (decl_name) != NULL)
23225 {
23226 name = dwarf2_name (decl, 1);
23227 if (! DECL_ARTIFICIAL (decl))
23228 add_src_coords_attributes (seg_die, decl);
23229
23230 add_linkage_name (seg_die, decl);
23231 }
23232 gcc_assert (name != NULL);
23233 add_pure_or_virtual_attribute (seg_die, decl);
23234 if (DECL_ARTIFICIAL (decl))
23235 add_AT_flag (seg_die, DW_AT_artificial, 1);
23236
23237 name = concat ("__second_sect_of_", name, NULL);
23238 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23239 fde->dw_fde_second_end, false);
23240 add_name_attribute (seg_die, name);
23241 if (want_pubnames ())
23242 add_pubname_string (name, seg_die);
23243 }
23244 }
23245 else
23246 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23247 false);
23248 }
23249
23250 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23251
23252 /* We define the "frame base" as the function's CFA. This is more
23253 convenient for several reasons: (1) It's stable across the prologue
23254 and epilogue, which makes it better than just a frame pointer,
23255 (2) With dwarf3, there exists a one-byte encoding that allows us
23256 to reference the .debug_frame data by proxy, but failing that,
23257 (3) We can at least reuse the code inspection and interpretation
23258 code that determines the CFA position at various points in the
23259 function. */
23260 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23261 {
23262 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23263 add_AT_loc (subr_die, DW_AT_frame_base, op);
23264 }
23265 else
23266 {
23267 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23268 if (list->dw_loc_next)
23269 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23270 else
23271 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23272 }
23273
23274 /* Compute a displacement from the "steady-state frame pointer" to
23275 the CFA. The former is what all stack slots and argument slots
23276 will reference in the rtl; the latter is what we've told the
23277 debugger about. We'll need to adjust all frame_base references
23278 by this displacement. */
23279 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23280
23281 if (fun->static_chain_decl)
23282 {
23283 /* DWARF requires here a location expression that computes the
23284 address of the enclosing subprogram's frame base. The machinery
23285 in tree-nested.c is supposed to store this specific address in the
23286 last field of the FRAME record. */
23287 const tree frame_type
23288 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23289 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23290
23291 tree fb_expr
23292 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23293 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23294 fb_expr, fb_decl, NULL_TREE);
23295
23296 add_AT_location_description (subr_die, DW_AT_static_link,
23297 loc_list_from_tree (fb_expr, 0, NULL));
23298 }
23299
23300 resolve_variable_values ();
23301 }
23302
23303 /* Generate child dies for template paramaters. */
23304 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23305 gen_generic_params_dies (decl);
23306
23307 /* Now output descriptions of the arguments for this function. This gets
23308 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23309 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23310 `...' at the end of the formal parameter list. In order to find out if
23311 there was a trailing ellipsis or not, we must instead look at the type
23312 associated with the FUNCTION_DECL. This will be a node of type
23313 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23314 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23315 an ellipsis at the end. */
23316
23317 /* In the case where we are describing a mere function declaration, all we
23318 need to do here (and all we *can* do here) is to describe the *types* of
23319 its formal parameters. */
23320 if (debug_info_level <= DINFO_LEVEL_TERSE)
23321 ;
23322 else if (declaration)
23323 gen_formal_types_die (decl, subr_die);
23324 else
23325 {
23326 /* Generate DIEs to represent all known formal parameters. */
23327 tree parm = DECL_ARGUMENTS (decl);
23328 tree generic_decl = early_dwarf
23329 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23330 tree generic_decl_parm = generic_decl
23331 ? DECL_ARGUMENTS (generic_decl)
23332 : NULL;
23333
23334 /* Now we want to walk the list of parameters of the function and
23335 emit their relevant DIEs.
23336
23337 We consider the case of DECL being an instance of a generic function
23338 as well as it being a normal function.
23339
23340 If DECL is an instance of a generic function we walk the
23341 parameters of the generic function declaration _and_ the parameters of
23342 DECL itself. This is useful because we want to emit specific DIEs for
23343 function parameter packs and those are declared as part of the
23344 generic function declaration. In that particular case,
23345 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23346 That DIE has children DIEs representing the set of arguments
23347 of the pack. Note that the set of pack arguments can be empty.
23348 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23349 children DIE.
23350
23351 Otherwise, we just consider the parameters of DECL. */
23352 while (generic_decl_parm || parm)
23353 {
23354 if (generic_decl_parm
23355 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23356 gen_formal_parameter_pack_die (generic_decl_parm,
23357 parm, subr_die,
23358 &parm);
23359 else if (parm)
23360 {
23361 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23362
23363 if (early_dwarf
23364 && parm == DECL_ARGUMENTS (decl)
23365 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23366 && parm_die
23367 && (dwarf_version >= 3 || !dwarf_strict))
23368 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23369
23370 parm = DECL_CHAIN (parm);
23371 }
23372
23373 if (generic_decl_parm)
23374 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23375 }
23376
23377 /* Decide whether we need an unspecified_parameters DIE at the end.
23378 There are 2 more cases to do this for: 1) the ansi ... declaration -
23379 this is detectable when the end of the arg list is not a
23380 void_type_node 2) an unprototyped function declaration (not a
23381 definition). This just means that we have no info about the
23382 parameters at all. */
23383 if (early_dwarf)
23384 {
23385 if (prototype_p (TREE_TYPE (decl)))
23386 {
23387 /* This is the prototyped case, check for.... */
23388 if (stdarg_p (TREE_TYPE (decl)))
23389 gen_unspecified_parameters_die (decl, subr_die);
23390 }
23391 else if (DECL_INITIAL (decl) == NULL_TREE)
23392 gen_unspecified_parameters_die (decl, subr_die);
23393 }
23394 }
23395
23396 if (subr_die != old_die)
23397 /* Add the calling convention attribute if requested. */
23398 add_calling_convention_attribute (subr_die, decl);
23399
23400 /* Output Dwarf info for all of the stuff within the body of the function
23401 (if it has one - it may be just a declaration).
23402
23403 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23404 a function. This BLOCK actually represents the outermost binding contour
23405 for the function, i.e. the contour in which the function's formal
23406 parameters and labels get declared. Curiously, it appears that the front
23407 end doesn't actually put the PARM_DECL nodes for the current function onto
23408 the BLOCK_VARS list for this outer scope, but are strung off of the
23409 DECL_ARGUMENTS list for the function instead.
23410
23411 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23412 the LABEL_DECL nodes for the function however, and we output DWARF info
23413 for those in decls_for_scope. Just within the `outer_scope' there will be
23414 a BLOCK node representing the function's outermost pair of curly braces,
23415 and any blocks used for the base and member initializers of a C++
23416 constructor function. */
23417 tree outer_scope = DECL_INITIAL (decl);
23418 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23419 {
23420 int call_site_note_count = 0;
23421 int tail_call_site_note_count = 0;
23422
23423 /* Emit a DW_TAG_variable DIE for a named return value. */
23424 if (DECL_NAME (DECL_RESULT (decl)))
23425 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23426
23427 /* The first time through decls_for_scope we will generate the
23428 DIEs for the locals. The second time, we fill in the
23429 location info. */
23430 decls_for_scope (outer_scope, subr_die);
23431
23432 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23433 {
23434 struct call_arg_loc_node *ca_loc;
23435 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23436 {
23437 dw_die_ref die = NULL;
23438 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23439 rtx arg, next_arg;
23440 tree arg_decl = NULL_TREE;
23441
23442 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23443 ? XEXP (ca_loc->call_arg_loc_note, 0)
23444 : NULL_RTX);
23445 arg; arg = next_arg)
23446 {
23447 dw_loc_descr_ref reg, val;
23448 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23449 dw_die_ref cdie, tdie = NULL;
23450
23451 next_arg = XEXP (arg, 1);
23452 if (REG_P (XEXP (XEXP (arg, 0), 0))
23453 && next_arg
23454 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23455 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23456 && REGNO (XEXP (XEXP (arg, 0), 0))
23457 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23458 next_arg = XEXP (next_arg, 1);
23459 if (mode == VOIDmode)
23460 {
23461 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23462 if (mode == VOIDmode)
23463 mode = GET_MODE (XEXP (arg, 0));
23464 }
23465 if (mode == VOIDmode || mode == BLKmode)
23466 continue;
23467 /* Get dynamic information about call target only if we
23468 have no static information: we cannot generate both
23469 DW_AT_call_origin and DW_AT_call_target
23470 attributes. */
23471 if (ca_loc->symbol_ref == NULL_RTX)
23472 {
23473 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23474 {
23475 tloc = XEXP (XEXP (arg, 0), 1);
23476 continue;
23477 }
23478 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23479 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23480 {
23481 tlocc = XEXP (XEXP (arg, 0), 1);
23482 continue;
23483 }
23484 }
23485 reg = NULL;
23486 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23487 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23488 VAR_INIT_STATUS_INITIALIZED);
23489 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23490 {
23491 rtx mem = XEXP (XEXP (arg, 0), 0);
23492 reg = mem_loc_descriptor (XEXP (mem, 0),
23493 get_address_mode (mem),
23494 GET_MODE (mem),
23495 VAR_INIT_STATUS_INITIALIZED);
23496 }
23497 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23498 == DEBUG_PARAMETER_REF)
23499 {
23500 tree tdecl
23501 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23502 tdie = lookup_decl_die (tdecl);
23503 if (tdie == NULL)
23504 continue;
23505 arg_decl = tdecl;
23506 }
23507 else
23508 continue;
23509 if (reg == NULL
23510 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23511 != DEBUG_PARAMETER_REF)
23512 continue;
23513 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23514 VOIDmode,
23515 VAR_INIT_STATUS_INITIALIZED);
23516 if (val == NULL)
23517 continue;
23518 if (die == NULL)
23519 die = gen_call_site_die (decl, subr_die, ca_loc);
23520 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23521 NULL_TREE);
23522 add_desc_attribute (cdie, arg_decl);
23523 if (reg != NULL)
23524 add_AT_loc (cdie, DW_AT_location, reg);
23525 else if (tdie != NULL)
23526 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23527 tdie);
23528 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23529 if (next_arg != XEXP (arg, 1))
23530 {
23531 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23532 if (mode == VOIDmode)
23533 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23534 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23535 0), 1),
23536 mode, VOIDmode,
23537 VAR_INIT_STATUS_INITIALIZED);
23538 if (val != NULL)
23539 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23540 val);
23541 }
23542 }
23543 if (die == NULL
23544 && (ca_loc->symbol_ref || tloc))
23545 die = gen_call_site_die (decl, subr_die, ca_loc);
23546 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23547 {
23548 dw_loc_descr_ref tval = NULL;
23549
23550 if (tloc != NULL_RTX)
23551 tval = mem_loc_descriptor (tloc,
23552 GET_MODE (tloc) == VOIDmode
23553 ? Pmode : GET_MODE (tloc),
23554 VOIDmode,
23555 VAR_INIT_STATUS_INITIALIZED);
23556 if (tval)
23557 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23558 else if (tlocc != NULL_RTX)
23559 {
23560 tval = mem_loc_descriptor (tlocc,
23561 GET_MODE (tlocc) == VOIDmode
23562 ? Pmode : GET_MODE (tlocc),
23563 VOIDmode,
23564 VAR_INIT_STATUS_INITIALIZED);
23565 if (tval)
23566 add_AT_loc (die,
23567 dwarf_AT (DW_AT_call_target_clobbered),
23568 tval);
23569 }
23570 }
23571 if (die != NULL)
23572 {
23573 call_site_note_count++;
23574 if (ca_loc->tail_call_p)
23575 tail_call_site_note_count++;
23576 }
23577 }
23578 }
23579 call_arg_locations = NULL;
23580 call_arg_loc_last = NULL;
23581 if (tail_call_site_count >= 0
23582 && tail_call_site_count == tail_call_site_note_count
23583 && (!dwarf_strict || dwarf_version >= 5))
23584 {
23585 if (call_site_count >= 0
23586 && call_site_count == call_site_note_count)
23587 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23588 else
23589 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23590 }
23591 call_site_count = -1;
23592 tail_call_site_count = -1;
23593 }
23594
23595 /* Mark used types after we have created DIEs for the functions scopes. */
23596 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23597 }
23598
23599 /* Returns a hash value for X (which really is a die_struct). */
23600
23601 hashval_t
23602 block_die_hasher::hash (die_struct *d)
23603 {
23604 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23605 }
23606
23607 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23608 as decl_id and die_parent of die_struct Y. */
23609
23610 bool
23611 block_die_hasher::equal (die_struct *x, die_struct *y)
23612 {
23613 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23614 }
23615
23616 /* Hold information about markers for inlined entry points. */
23617 struct GTY ((for_user)) inline_entry_data
23618 {
23619 /* The block that's the inlined_function_outer_scope for an inlined
23620 function. */
23621 tree block;
23622
23623 /* The label at the inlined entry point. */
23624 const char *label_pfx;
23625 unsigned int label_num;
23626
23627 /* The view number to be used as the inlined entry point. */
23628 var_loc_view view;
23629 };
23630
23631 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23632 {
23633 typedef tree compare_type;
23634 static inline hashval_t hash (const inline_entry_data *);
23635 static inline bool equal (const inline_entry_data *, const_tree);
23636 };
23637
23638 /* Hash table routines for inline_entry_data. */
23639
23640 inline hashval_t
23641 inline_entry_data_hasher::hash (const inline_entry_data *data)
23642 {
23643 return htab_hash_pointer (data->block);
23644 }
23645
23646 inline bool
23647 inline_entry_data_hasher::equal (const inline_entry_data *data,
23648 const_tree block)
23649 {
23650 return data->block == block;
23651 }
23652
23653 /* Inlined entry points pending DIE creation in this compilation unit. */
23654
23655 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23656
23657
23658 /* Return TRUE if DECL, which may have been previously generated as
23659 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23660 true if decl (or its origin) is either an extern declaration or a
23661 class/namespace scoped declaration.
23662
23663 The declare_in_namespace support causes us to get two DIEs for one
23664 variable, both of which are declarations. We want to avoid
23665 considering one to be a specification, so we must test for
23666 DECLARATION and DW_AT_declaration. */
23667 static inline bool
23668 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23669 {
23670 return (old_die && TREE_STATIC (decl) && !declaration
23671 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23672 }
23673
23674 /* Return true if DECL is a local static. */
23675
23676 static inline bool
23677 local_function_static (tree decl)
23678 {
23679 gcc_assert (VAR_P (decl));
23680 return TREE_STATIC (decl)
23681 && DECL_CONTEXT (decl)
23682 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23683 }
23684
23685 /* Generate a DIE to represent a declared data object.
23686 Either DECL or ORIGIN must be non-null. */
23687
23688 static void
23689 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23690 {
23691 HOST_WIDE_INT off = 0;
23692 tree com_decl;
23693 tree decl_or_origin = decl ? decl : origin;
23694 tree ultimate_origin;
23695 dw_die_ref var_die;
23696 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23697 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23698 || class_or_namespace_scope_p (context_die));
23699 bool specialization_p = false;
23700 bool no_linkage_name = false;
23701
23702 /* While C++ inline static data members have definitions inside of the
23703 class, force the first DIE to be a declaration, then let gen_member_die
23704 reparent it to the class context and call gen_variable_die again
23705 to create the outside of the class DIE for the definition. */
23706 if (!declaration
23707 && old_die == NULL
23708 && decl
23709 && DECL_CONTEXT (decl)
23710 && TYPE_P (DECL_CONTEXT (decl))
23711 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23712 {
23713 declaration = true;
23714 if (dwarf_version < 5)
23715 no_linkage_name = true;
23716 }
23717
23718 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23719 if (decl || ultimate_origin)
23720 origin = ultimate_origin;
23721 com_decl = fortran_common (decl_or_origin, &off);
23722
23723 /* Symbol in common gets emitted as a child of the common block, in the form
23724 of a data member. */
23725 if (com_decl)
23726 {
23727 dw_die_ref com_die;
23728 dw_loc_list_ref loc = NULL;
23729 die_node com_die_arg;
23730
23731 var_die = lookup_decl_die (decl_or_origin);
23732 if (var_die)
23733 {
23734 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23735 {
23736 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23737 if (loc)
23738 {
23739 if (off)
23740 {
23741 /* Optimize the common case. */
23742 if (single_element_loc_list_p (loc)
23743 && loc->expr->dw_loc_opc == DW_OP_addr
23744 && loc->expr->dw_loc_next == NULL
23745 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23746 == SYMBOL_REF)
23747 {
23748 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23749 loc->expr->dw_loc_oprnd1.v.val_addr
23750 = plus_constant (GET_MODE (x), x , off);
23751 }
23752 else
23753 loc_list_plus_const (loc, off);
23754 }
23755 add_AT_location_description (var_die, DW_AT_location, loc);
23756 remove_AT (var_die, DW_AT_declaration);
23757 }
23758 }
23759 return;
23760 }
23761
23762 if (common_block_die_table == NULL)
23763 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23764
23765 com_die_arg.decl_id = DECL_UID (com_decl);
23766 com_die_arg.die_parent = context_die;
23767 com_die = common_block_die_table->find (&com_die_arg);
23768 if (! early_dwarf)
23769 loc = loc_list_from_tree (com_decl, 2, NULL);
23770 if (com_die == NULL)
23771 {
23772 const char *cnam
23773 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23774 die_node **slot;
23775
23776 com_die = new_die (DW_TAG_common_block, context_die, decl);
23777 add_name_and_src_coords_attributes (com_die, com_decl);
23778 if (loc)
23779 {
23780 add_AT_location_description (com_die, DW_AT_location, loc);
23781 /* Avoid sharing the same loc descriptor between
23782 DW_TAG_common_block and DW_TAG_variable. */
23783 loc = loc_list_from_tree (com_decl, 2, NULL);
23784 }
23785 else if (DECL_EXTERNAL (decl_or_origin))
23786 add_AT_flag (com_die, DW_AT_declaration, 1);
23787 if (want_pubnames ())
23788 add_pubname_string (cnam, com_die); /* ??? needed? */
23789 com_die->decl_id = DECL_UID (com_decl);
23790 slot = common_block_die_table->find_slot (com_die, INSERT);
23791 *slot = com_die;
23792 }
23793 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23794 {
23795 add_AT_location_description (com_die, DW_AT_location, loc);
23796 loc = loc_list_from_tree (com_decl, 2, NULL);
23797 remove_AT (com_die, DW_AT_declaration);
23798 }
23799 var_die = new_die (DW_TAG_variable, com_die, decl);
23800 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23801 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23802 decl_quals (decl_or_origin), false,
23803 context_die);
23804 add_alignment_attribute (var_die, decl);
23805 add_AT_flag (var_die, DW_AT_external, 1);
23806 if (loc)
23807 {
23808 if (off)
23809 {
23810 /* Optimize the common case. */
23811 if (single_element_loc_list_p (loc)
23812 && loc->expr->dw_loc_opc == DW_OP_addr
23813 && loc->expr->dw_loc_next == NULL
23814 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23815 {
23816 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23817 loc->expr->dw_loc_oprnd1.v.val_addr
23818 = plus_constant (GET_MODE (x), x, off);
23819 }
23820 else
23821 loc_list_plus_const (loc, off);
23822 }
23823 add_AT_location_description (var_die, DW_AT_location, loc);
23824 }
23825 else if (DECL_EXTERNAL (decl_or_origin))
23826 add_AT_flag (var_die, DW_AT_declaration, 1);
23827 if (decl)
23828 equate_decl_number_to_die (decl, var_die);
23829 return;
23830 }
23831
23832 if (old_die)
23833 {
23834 if (declaration)
23835 {
23836 /* A declaration that has been previously dumped, needs no
23837 further annotations, since it doesn't need location on
23838 the second pass. */
23839 return;
23840 }
23841 else if (decl_will_get_specification_p (old_die, decl, declaration)
23842 && !get_AT (old_die, DW_AT_specification))
23843 {
23844 /* Fall-thru so we can make a new variable die along with a
23845 DW_AT_specification. */
23846 }
23847 else if (origin && old_die->die_parent != context_die)
23848 {
23849 /* If we will be creating an inlined instance, we need a
23850 new DIE that will get annotated with
23851 DW_AT_abstract_origin. */
23852 gcc_assert (!DECL_ABSTRACT_P (decl));
23853 }
23854 else
23855 {
23856 /* If a DIE was dumped early, it still needs location info.
23857 Skip to where we fill the location bits. */
23858 var_die = old_die;
23859
23860 /* ??? In LTRANS we cannot annotate early created variably
23861 modified type DIEs without copying them and adjusting all
23862 references to them. Thus we dumped them again. Also add a
23863 reference to them but beware of -g0 compile and -g link
23864 in which case the reference will be already present. */
23865 tree type = TREE_TYPE (decl_or_origin);
23866 if (in_lto_p
23867 && ! get_AT (var_die, DW_AT_type)
23868 && variably_modified_type_p
23869 (type, decl_function_context (decl_or_origin)))
23870 {
23871 if (decl_by_reference_p (decl_or_origin))
23872 add_type_attribute (var_die, TREE_TYPE (type),
23873 TYPE_UNQUALIFIED, false, context_die);
23874 else
23875 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23876 false, context_die);
23877 }
23878
23879 goto gen_variable_die_location;
23880 }
23881 }
23882
23883 /* For static data members, the declaration in the class is supposed
23884 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23885 also in DWARF2; the specification should still be DW_TAG_variable
23886 referencing the DW_TAG_member DIE. */
23887 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23888 var_die = new_die (DW_TAG_member, context_die, decl);
23889 else
23890 var_die = new_die (DW_TAG_variable, context_die, decl);
23891
23892 if (origin != NULL)
23893 add_abstract_origin_attribute (var_die, origin);
23894
23895 /* Loop unrolling can create multiple blocks that refer to the same
23896 static variable, so we must test for the DW_AT_declaration flag.
23897
23898 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23899 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23900 sharing them.
23901
23902 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23903 else if (decl_will_get_specification_p (old_die, decl, declaration))
23904 {
23905 /* This is a definition of a C++ class level static. */
23906 add_AT_specification (var_die, old_die);
23907 specialization_p = true;
23908 if (DECL_NAME (decl))
23909 {
23910 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23911 struct dwarf_file_data * file_index = lookup_filename (s.file);
23912
23913 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23914 add_AT_file (var_die, DW_AT_decl_file, file_index);
23915
23916 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23917 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23918
23919 if (debug_column_info
23920 && s.column
23921 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23922 != (unsigned) s.column))
23923 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23924
23925 if (old_die->die_tag == DW_TAG_member)
23926 add_linkage_name (var_die, decl);
23927 }
23928 }
23929 else
23930 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23931
23932 if ((origin == NULL && !specialization_p)
23933 || (origin != NULL
23934 && !DECL_ABSTRACT_P (decl_or_origin)
23935 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23936 decl_function_context
23937 (decl_or_origin))))
23938 {
23939 tree type = TREE_TYPE (decl_or_origin);
23940
23941 if (decl_by_reference_p (decl_or_origin))
23942 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23943 context_die);
23944 else
23945 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23946 context_die);
23947 }
23948
23949 if (origin == NULL && !specialization_p)
23950 {
23951 if (TREE_PUBLIC (decl))
23952 add_AT_flag (var_die, DW_AT_external, 1);
23953
23954 if (DECL_ARTIFICIAL (decl))
23955 add_AT_flag (var_die, DW_AT_artificial, 1);
23956
23957 add_alignment_attribute (var_die, decl);
23958
23959 add_accessibility_attribute (var_die, decl);
23960 }
23961
23962 if (declaration)
23963 add_AT_flag (var_die, DW_AT_declaration, 1);
23964
23965 if (decl && (DECL_ABSTRACT_P (decl)
23966 || !old_die || is_declaration_die (old_die)))
23967 equate_decl_number_to_die (decl, var_die);
23968
23969 gen_variable_die_location:
23970 if (! declaration
23971 && (! DECL_ABSTRACT_P (decl_or_origin)
23972 /* Local static vars are shared between all clones/inlines,
23973 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23974 already set. */
23975 || (VAR_P (decl_or_origin)
23976 && TREE_STATIC (decl_or_origin)
23977 && DECL_RTL_SET_P (decl_or_origin))))
23978 {
23979 if (early_dwarf)
23980 add_pubname (decl_or_origin, var_die);
23981 else
23982 add_location_or_const_value_attribute (var_die, decl_or_origin,
23983 decl == NULL);
23984 }
23985 else
23986 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23987
23988 if ((dwarf_version >= 4 || !dwarf_strict)
23989 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23990 DW_AT_const_expr) == 1
23991 && !get_AT (var_die, DW_AT_const_expr)
23992 && !specialization_p)
23993 add_AT_flag (var_die, DW_AT_const_expr, 1);
23994
23995 if (!dwarf_strict)
23996 {
23997 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23998 DW_AT_inline);
23999 if (inl != -1
24000 && !get_AT (var_die, DW_AT_inline)
24001 && !specialization_p)
24002 add_AT_unsigned (var_die, DW_AT_inline, inl);
24003 }
24004 }
24005
24006 /* Generate a DIE to represent a named constant. */
24007
24008 static void
24009 gen_const_die (tree decl, dw_die_ref context_die)
24010 {
24011 dw_die_ref const_die;
24012 tree type = TREE_TYPE (decl);
24013
24014 const_die = lookup_decl_die (decl);
24015 if (const_die)
24016 return;
24017
24018 const_die = new_die (DW_TAG_constant, context_die, decl);
24019 equate_decl_number_to_die (decl, const_die);
24020 add_name_and_src_coords_attributes (const_die, decl);
24021 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24022 if (TREE_PUBLIC (decl))
24023 add_AT_flag (const_die, DW_AT_external, 1);
24024 if (DECL_ARTIFICIAL (decl))
24025 add_AT_flag (const_die, DW_AT_artificial, 1);
24026 tree_add_const_value_attribute_for_decl (const_die, decl);
24027 }
24028
24029 /* Generate a DIE to represent a label identifier. */
24030
24031 static void
24032 gen_label_die (tree decl, dw_die_ref context_die)
24033 {
24034 tree origin = decl_ultimate_origin (decl);
24035 dw_die_ref lbl_die = lookup_decl_die (decl);
24036 rtx insn;
24037 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24038
24039 if (!lbl_die)
24040 {
24041 lbl_die = new_die (DW_TAG_label, context_die, decl);
24042 equate_decl_number_to_die (decl, lbl_die);
24043
24044 if (origin != NULL)
24045 add_abstract_origin_attribute (lbl_die, origin);
24046 else
24047 add_name_and_src_coords_attributes (lbl_die, decl);
24048 }
24049
24050 if (DECL_ABSTRACT_P (decl))
24051 equate_decl_number_to_die (decl, lbl_die);
24052 else if (! early_dwarf)
24053 {
24054 insn = DECL_RTL_IF_SET (decl);
24055
24056 /* Deleted labels are programmer specified labels which have been
24057 eliminated because of various optimizations. We still emit them
24058 here so that it is possible to put breakpoints on them. */
24059 if (insn
24060 && (LABEL_P (insn)
24061 || ((NOTE_P (insn)
24062 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24063 {
24064 /* When optimization is enabled (via -O) some parts of the compiler
24065 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24066 represent source-level labels which were explicitly declared by
24067 the user. This really shouldn't be happening though, so catch
24068 it if it ever does happen. */
24069 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24070
24071 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24072 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24073 }
24074 else if (insn
24075 && NOTE_P (insn)
24076 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24077 && CODE_LABEL_NUMBER (insn) != -1)
24078 {
24079 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24080 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24081 }
24082 }
24083 }
24084
24085 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24086 attributes to the DIE for a block STMT, to describe where the inlined
24087 function was called from. This is similar to add_src_coords_attributes. */
24088
24089 static inline void
24090 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24091 {
24092 /* We can end up with BUILTINS_LOCATION here. */
24093 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24094 return;
24095
24096 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24097
24098 if (dwarf_version >= 3 || !dwarf_strict)
24099 {
24100 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24101 add_AT_unsigned (die, DW_AT_call_line, s.line);
24102 if (debug_column_info && s.column)
24103 add_AT_unsigned (die, DW_AT_call_column, s.column);
24104 }
24105 }
24106
24107
24108 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24109 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24110
24111 static inline void
24112 add_high_low_attributes (tree stmt, dw_die_ref die)
24113 {
24114 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24115
24116 if (inline_entry_data **iedp
24117 = !inline_entry_data_table ? NULL
24118 : inline_entry_data_table->find_slot_with_hash (stmt,
24119 htab_hash_pointer (stmt),
24120 NO_INSERT))
24121 {
24122 inline_entry_data *ied = *iedp;
24123 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24124 gcc_assert (debug_inline_points);
24125 gcc_assert (inlined_function_outer_scope_p (stmt));
24126
24127 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24128 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24129
24130 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24131 && !dwarf_strict)
24132 {
24133 if (!output_asm_line_debug_info ())
24134 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24135 else
24136 {
24137 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24138 /* FIXME: this will resolve to a small number. Could we
24139 possibly emit smaller data? Ideally we'd emit a
24140 uleb128, but that would make the size of DIEs
24141 impossible for the compiler to compute, since it's
24142 the assembler that computes the value of the view
24143 label in this case. Ideally, we'd have a single form
24144 encompassing both the address and the view, and
24145 indirecting them through a table might make things
24146 easier, but even that would be more wasteful,
24147 space-wise, than what we have now. */
24148 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24149 }
24150 }
24151
24152 inline_entry_data_table->clear_slot (iedp);
24153 }
24154
24155 if (BLOCK_FRAGMENT_CHAIN (stmt)
24156 && (dwarf_version >= 3 || !dwarf_strict))
24157 {
24158 tree chain, superblock = NULL_TREE;
24159 dw_die_ref pdie;
24160 dw_attr_node *attr = NULL;
24161
24162 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24163 {
24164 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24165 BLOCK_NUMBER (stmt));
24166 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24167 }
24168
24169 /* Optimize duplicate .debug_ranges lists or even tails of
24170 lists. If this BLOCK has same ranges as its supercontext,
24171 lookup DW_AT_ranges attribute in the supercontext (and
24172 recursively so), verify that the ranges_table contains the
24173 right values and use it instead of adding a new .debug_range. */
24174 for (chain = stmt, pdie = die;
24175 BLOCK_SAME_RANGE (chain);
24176 chain = BLOCK_SUPERCONTEXT (chain))
24177 {
24178 dw_attr_node *new_attr;
24179
24180 pdie = pdie->die_parent;
24181 if (pdie == NULL)
24182 break;
24183 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24184 break;
24185 new_attr = get_AT (pdie, DW_AT_ranges);
24186 if (new_attr == NULL
24187 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24188 break;
24189 attr = new_attr;
24190 superblock = BLOCK_SUPERCONTEXT (chain);
24191 }
24192 if (attr != NULL
24193 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24194 == (int)BLOCK_NUMBER (superblock))
24195 && BLOCK_FRAGMENT_CHAIN (superblock))
24196 {
24197 unsigned long off = attr->dw_attr_val.v.val_offset;
24198 unsigned long supercnt = 0, thiscnt = 0;
24199 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24200 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24201 {
24202 ++supercnt;
24203 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24204 == (int)BLOCK_NUMBER (chain));
24205 }
24206 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24207 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24208 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24209 ++thiscnt;
24210 gcc_assert (supercnt >= thiscnt);
24211 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24212 false);
24213 note_rnglist_head (off + supercnt - thiscnt);
24214 return;
24215 }
24216
24217 unsigned int offset = add_ranges (stmt, true);
24218 add_AT_range_list (die, DW_AT_ranges, offset, false);
24219 note_rnglist_head (offset);
24220
24221 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24222 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24223 do
24224 {
24225 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24226 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24227 chain = BLOCK_FRAGMENT_CHAIN (chain);
24228 }
24229 while (chain);
24230 add_ranges (NULL);
24231 }
24232 else
24233 {
24234 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24235 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24236 BLOCK_NUMBER (stmt));
24237 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24238 BLOCK_NUMBER (stmt));
24239 add_AT_low_high_pc (die, label, label_high, false);
24240 }
24241 }
24242
24243 /* Generate a DIE for a lexical block. */
24244
24245 static void
24246 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24247 {
24248 dw_die_ref old_die = lookup_block_die (stmt);
24249 dw_die_ref stmt_die = NULL;
24250 if (!old_die)
24251 {
24252 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24253 equate_block_to_die (stmt, stmt_die);
24254 }
24255
24256 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24257 {
24258 /* If this is an inlined or conrecte instance, create a new lexical
24259 die for anything below to attach DW_AT_abstract_origin to. */
24260 if (old_die)
24261 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24262
24263 tree origin = block_ultimate_origin (stmt);
24264 if (origin != NULL_TREE && (origin != stmt || old_die))
24265 add_abstract_origin_attribute (stmt_die, origin);
24266
24267 old_die = NULL;
24268 }
24269
24270 if (old_die)
24271 stmt_die = old_die;
24272
24273 /* A non abstract block whose blocks have already been reordered
24274 should have the instruction range for this block. If so, set the
24275 high/low attributes. */
24276 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24277 {
24278 gcc_assert (stmt_die);
24279 add_high_low_attributes (stmt, stmt_die);
24280 }
24281
24282 decls_for_scope (stmt, stmt_die);
24283 }
24284
24285 /* Generate a DIE for an inlined subprogram. */
24286
24287 static void
24288 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24289 {
24290 tree decl = block_ultimate_origin (stmt);
24291
24292 /* Make sure any inlined functions are known to be inlineable. */
24293 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24294 || cgraph_function_possibly_inlined_p (decl));
24295
24296 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24297
24298 if (call_arg_locations || debug_inline_points)
24299 equate_block_to_die (stmt, subr_die);
24300 add_abstract_origin_attribute (subr_die, decl);
24301 if (TREE_ASM_WRITTEN (stmt))
24302 add_high_low_attributes (stmt, subr_die);
24303 add_call_src_coords_attributes (stmt, subr_die);
24304
24305 /* The inliner creates an extra BLOCK for the parameter setup,
24306 we want to merge that with the actual outermost BLOCK of the
24307 inlined function to avoid duplicate locals in consumers.
24308 Do that by doing the recursion to subblocks on the single subblock
24309 of STMT. */
24310 bool unwrap_one = false;
24311 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24312 {
24313 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24314 if (origin
24315 && TREE_CODE (origin) == BLOCK
24316 && BLOCK_SUPERCONTEXT (origin) == decl)
24317 unwrap_one = true;
24318 }
24319 decls_for_scope (stmt, subr_die, !unwrap_one);
24320 if (unwrap_one)
24321 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24322 }
24323
24324 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24325 the comment for VLR_CONTEXT. */
24326
24327 static void
24328 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24329 {
24330 dw_die_ref decl_die;
24331
24332 if (TREE_TYPE (decl) == error_mark_node)
24333 return;
24334
24335 decl_die = new_die (DW_TAG_member, context_die, decl);
24336 add_name_and_src_coords_attributes (decl_die, decl);
24337 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24338 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24339 context_die);
24340
24341 if (DECL_BIT_FIELD_TYPE (decl))
24342 {
24343 add_byte_size_attribute (decl_die, decl);
24344 add_bit_size_attribute (decl_die, decl);
24345 add_bit_offset_attribute (decl_die, decl, ctx);
24346 }
24347
24348 add_alignment_attribute (decl_die, decl);
24349
24350 /* If we have a variant part offset, then we are supposed to process a member
24351 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24352 trees. */
24353 gcc_assert (ctx->variant_part_offset == NULL_TREE
24354 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24355 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24356 add_data_member_location_attribute (decl_die, decl, ctx);
24357
24358 if (DECL_ARTIFICIAL (decl))
24359 add_AT_flag (decl_die, DW_AT_artificial, 1);
24360
24361 add_accessibility_attribute (decl_die, decl);
24362
24363 /* Equate decl number to die, so that we can look up this decl later on. */
24364 equate_decl_number_to_die (decl, decl_die);
24365 }
24366
24367 /* Generate a DIE for a pointer to a member type. TYPE can be an
24368 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24369 pointer to member function. */
24370
24371 static void
24372 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24373 {
24374 if (lookup_type_die (type))
24375 return;
24376
24377 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24378 scope_die_for (type, context_die), type);
24379
24380 equate_type_number_to_die (type, ptr_die);
24381 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24382 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24383 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24384 context_die);
24385 add_alignment_attribute (ptr_die, type);
24386
24387 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24388 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24389 {
24390 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24391 add_AT_loc (ptr_die, DW_AT_use_location, op);
24392 }
24393 }
24394
24395 static char *producer_string;
24396
24397 /* Return a heap allocated producer string including command line options
24398 if -grecord-gcc-switches. */
24399
24400 static char *
24401 gen_producer_string (void)
24402 {
24403 size_t j;
24404 auto_vec<const char *> switches;
24405 const char *language_string = lang_hooks.name;
24406 char *producer, *tail;
24407 const char *p;
24408 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24409 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24410
24411 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24412 switch (save_decoded_options[j].opt_index)
24413 {
24414 case OPT_o:
24415 case OPT_d:
24416 case OPT_dumpbase:
24417 case OPT_dumpdir:
24418 case OPT_auxbase:
24419 case OPT_auxbase_strip:
24420 case OPT_quiet:
24421 case OPT_version:
24422 case OPT_v:
24423 case OPT_w:
24424 case OPT_L:
24425 case OPT_D:
24426 case OPT_I:
24427 case OPT_U:
24428 case OPT_SPECIAL_unknown:
24429 case OPT_SPECIAL_ignore:
24430 case OPT_SPECIAL_deprecated:
24431 case OPT_SPECIAL_program_name:
24432 case OPT_SPECIAL_input_file:
24433 case OPT_grecord_gcc_switches:
24434 case OPT__output_pch_:
24435 case OPT_fdiagnostics_show_location_:
24436 case OPT_fdiagnostics_show_option:
24437 case OPT_fdiagnostics_show_caret:
24438 case OPT_fdiagnostics_show_labels:
24439 case OPT_fdiagnostics_show_line_numbers:
24440 case OPT_fdiagnostics_color_:
24441 case OPT_fdiagnostics_format_:
24442 case OPT_fverbose_asm:
24443 case OPT____:
24444 case OPT__sysroot_:
24445 case OPT_nostdinc:
24446 case OPT_nostdinc__:
24447 case OPT_fpreprocessed:
24448 case OPT_fltrans_output_list_:
24449 case OPT_fresolution_:
24450 case OPT_fdebug_prefix_map_:
24451 case OPT_fmacro_prefix_map_:
24452 case OPT_ffile_prefix_map_:
24453 case OPT_fcompare_debug:
24454 case OPT_fchecking:
24455 case OPT_fchecking_:
24456 /* Ignore these. */
24457 continue;
24458 default:
24459 if (cl_options[save_decoded_options[j].opt_index].flags
24460 & CL_NO_DWARF_RECORD)
24461 continue;
24462 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24463 == '-');
24464 switch (save_decoded_options[j].canonical_option[0][1])
24465 {
24466 case 'M':
24467 case 'i':
24468 case 'W':
24469 continue;
24470 case 'f':
24471 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24472 "dump", 4) == 0)
24473 continue;
24474 break;
24475 default:
24476 break;
24477 }
24478 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24479 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24480 break;
24481 }
24482
24483 producer = XNEWVEC (char, plen + 1 + len + 1);
24484 tail = producer;
24485 sprintf (tail, "%s %s", language_string, version_string);
24486 tail += plen;
24487
24488 FOR_EACH_VEC_ELT (switches, j, p)
24489 {
24490 len = strlen (p);
24491 *tail = ' ';
24492 memcpy (tail + 1, p, len);
24493 tail += len + 1;
24494 }
24495
24496 *tail = '\0';
24497 return producer;
24498 }
24499
24500 /* Given a C and/or C++ language/version string return the "highest".
24501 C++ is assumed to be "higher" than C in this case. Used for merging
24502 LTO translation unit languages. */
24503 static const char *
24504 highest_c_language (const char *lang1, const char *lang2)
24505 {
24506 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24507 return "GNU C++17";
24508 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24509 return "GNU C++14";
24510 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24511 return "GNU C++11";
24512 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24513 return "GNU C++98";
24514
24515 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24516 return "GNU C2X";
24517 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24518 return "GNU C17";
24519 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24520 return "GNU C11";
24521 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24522 return "GNU C99";
24523 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24524 return "GNU C89";
24525
24526 gcc_unreachable ();
24527 }
24528
24529
24530 /* Generate the DIE for the compilation unit. */
24531
24532 static dw_die_ref
24533 gen_compile_unit_die (const char *filename)
24534 {
24535 dw_die_ref die;
24536 const char *language_string = lang_hooks.name;
24537 int language;
24538
24539 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24540
24541 if (filename)
24542 {
24543 add_name_attribute (die, filename);
24544 /* Don't add cwd for <built-in>. */
24545 if (filename[0] != '<')
24546 add_comp_dir_attribute (die);
24547 }
24548
24549 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24550
24551 /* If our producer is LTO try to figure out a common language to use
24552 from the global list of translation units. */
24553 if (strcmp (language_string, "GNU GIMPLE") == 0)
24554 {
24555 unsigned i;
24556 tree t;
24557 const char *common_lang = NULL;
24558
24559 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24560 {
24561 if (!TRANSLATION_UNIT_LANGUAGE (t))
24562 continue;
24563 if (!common_lang)
24564 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24565 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24566 ;
24567 else if (strncmp (common_lang, "GNU C", 5) == 0
24568 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24569 /* Mixing C and C++ is ok, use C++ in that case. */
24570 common_lang = highest_c_language (common_lang,
24571 TRANSLATION_UNIT_LANGUAGE (t));
24572 else
24573 {
24574 /* Fall back to C. */
24575 common_lang = NULL;
24576 break;
24577 }
24578 }
24579
24580 if (common_lang)
24581 language_string = common_lang;
24582 }
24583
24584 language = DW_LANG_C;
24585 if (strncmp (language_string, "GNU C", 5) == 0
24586 && ISDIGIT (language_string[5]))
24587 {
24588 language = DW_LANG_C89;
24589 if (dwarf_version >= 3 || !dwarf_strict)
24590 {
24591 if (strcmp (language_string, "GNU C89") != 0)
24592 language = DW_LANG_C99;
24593
24594 if (dwarf_version >= 5 /* || !dwarf_strict */)
24595 if (strcmp (language_string, "GNU C11") == 0
24596 || strcmp (language_string, "GNU C17") == 0
24597 || strcmp (language_string, "GNU C2X"))
24598 language = DW_LANG_C11;
24599 }
24600 }
24601 else if (strncmp (language_string, "GNU C++", 7) == 0)
24602 {
24603 language = DW_LANG_C_plus_plus;
24604 if (dwarf_version >= 5 /* || !dwarf_strict */)
24605 {
24606 if (strcmp (language_string, "GNU C++11") == 0)
24607 language = DW_LANG_C_plus_plus_11;
24608 else if (strcmp (language_string, "GNU C++14") == 0)
24609 language = DW_LANG_C_plus_plus_14;
24610 else if (strcmp (language_string, "GNU C++17") == 0)
24611 /* For now. */
24612 language = DW_LANG_C_plus_plus_14;
24613 }
24614 }
24615 else if (strcmp (language_string, "GNU F77") == 0)
24616 language = DW_LANG_Fortran77;
24617 else if (dwarf_version >= 3 || !dwarf_strict)
24618 {
24619 if (strcmp (language_string, "GNU Ada") == 0)
24620 language = DW_LANG_Ada95;
24621 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24622 {
24623 language = DW_LANG_Fortran95;
24624 if (dwarf_version >= 5 /* || !dwarf_strict */)
24625 {
24626 if (strcmp (language_string, "GNU Fortran2003") == 0)
24627 language = DW_LANG_Fortran03;
24628 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24629 language = DW_LANG_Fortran08;
24630 }
24631 }
24632 else if (strcmp (language_string, "GNU Objective-C") == 0)
24633 language = DW_LANG_ObjC;
24634 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24635 language = DW_LANG_ObjC_plus_plus;
24636 else if (strcmp (language_string, "GNU D") == 0)
24637 language = DW_LANG_D;
24638 else if (dwarf_version >= 5 || !dwarf_strict)
24639 {
24640 if (strcmp (language_string, "GNU Go") == 0)
24641 language = DW_LANG_Go;
24642 }
24643 }
24644 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24645 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24646 language = DW_LANG_Fortran90;
24647 /* Likewise for Ada. */
24648 else if (strcmp (language_string, "GNU Ada") == 0)
24649 language = DW_LANG_Ada83;
24650
24651 add_AT_unsigned (die, DW_AT_language, language);
24652
24653 switch (language)
24654 {
24655 case DW_LANG_Fortran77:
24656 case DW_LANG_Fortran90:
24657 case DW_LANG_Fortran95:
24658 case DW_LANG_Fortran03:
24659 case DW_LANG_Fortran08:
24660 /* Fortran has case insensitive identifiers and the front-end
24661 lowercases everything. */
24662 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24663 break;
24664 default:
24665 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24666 break;
24667 }
24668 return die;
24669 }
24670
24671 /* Generate the DIE for a base class. */
24672
24673 static void
24674 gen_inheritance_die (tree binfo, tree access, tree type,
24675 dw_die_ref context_die)
24676 {
24677 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24678 struct vlr_context ctx = { type, NULL };
24679
24680 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24681 context_die);
24682 add_data_member_location_attribute (die, binfo, &ctx);
24683
24684 if (BINFO_VIRTUAL_P (binfo))
24685 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24686
24687 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24688 children, otherwise the default is DW_ACCESS_public. In DWARF2
24689 the default has always been DW_ACCESS_private. */
24690 if (access == access_public_node)
24691 {
24692 if (dwarf_version == 2
24693 || context_die->die_tag == DW_TAG_class_type)
24694 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24695 }
24696 else if (access == access_protected_node)
24697 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24698 else if (dwarf_version > 2
24699 && context_die->die_tag != DW_TAG_class_type)
24700 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24701 }
24702
24703 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24704 structure. */
24705
24706 static bool
24707 is_variant_part (tree decl)
24708 {
24709 return (TREE_CODE (decl) == FIELD_DECL
24710 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24711 }
24712
24713 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24714 return the FIELD_DECL. Return NULL_TREE otherwise. */
24715
24716 static tree
24717 analyze_discr_in_predicate (tree operand, tree struct_type)
24718 {
24719 while (CONVERT_EXPR_P (operand))
24720 operand = TREE_OPERAND (operand, 0);
24721
24722 /* Match field access to members of struct_type only. */
24723 if (TREE_CODE (operand) == COMPONENT_REF
24724 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24725 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24726 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24727 return TREE_OPERAND (operand, 1);
24728 else
24729 return NULL_TREE;
24730 }
24731
24732 /* Check that SRC is a constant integer that can be represented as a native
24733 integer constant (either signed or unsigned). If so, store it into DEST and
24734 return true. Return false otherwise. */
24735
24736 static bool
24737 get_discr_value (tree src, dw_discr_value *dest)
24738 {
24739 tree discr_type = TREE_TYPE (src);
24740
24741 if (lang_hooks.types.get_debug_type)
24742 {
24743 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24744 if (debug_type != NULL)
24745 discr_type = debug_type;
24746 }
24747
24748 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24749 return false;
24750
24751 /* Signedness can vary between the original type and the debug type. This
24752 can happen for character types in Ada for instance: the character type
24753 used for code generation can be signed, to be compatible with the C one,
24754 but from a debugger point of view, it must be unsigned. */
24755 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24756 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24757
24758 if (is_orig_unsigned != is_debug_unsigned)
24759 src = fold_convert (discr_type, src);
24760
24761 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24762 return false;
24763
24764 dest->pos = is_debug_unsigned;
24765 if (is_debug_unsigned)
24766 dest->v.uval = tree_to_uhwi (src);
24767 else
24768 dest->v.sval = tree_to_shwi (src);
24769
24770 return true;
24771 }
24772
24773 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24774 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24775 store NULL_TREE in DISCR_DECL. Otherwise:
24776
24777 - store the discriminant field in STRUCT_TYPE that controls the variant
24778 part to *DISCR_DECL
24779
24780 - put in *DISCR_LISTS_P an array where for each variant, the item
24781 represents the corresponding matching list of discriminant values.
24782
24783 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24784 the above array.
24785
24786 Note that when the array is allocated (i.e. when the analysis is
24787 successful), it is up to the caller to free the array. */
24788
24789 static void
24790 analyze_variants_discr (tree variant_part_decl,
24791 tree struct_type,
24792 tree *discr_decl,
24793 dw_discr_list_ref **discr_lists_p,
24794 unsigned *discr_lists_length)
24795 {
24796 tree variant_part_type = TREE_TYPE (variant_part_decl);
24797 tree variant;
24798 dw_discr_list_ref *discr_lists;
24799 unsigned i;
24800
24801 /* Compute how many variants there are in this variant part. */
24802 *discr_lists_length = 0;
24803 for (variant = TYPE_FIELDS (variant_part_type);
24804 variant != NULL_TREE;
24805 variant = DECL_CHAIN (variant))
24806 ++*discr_lists_length;
24807
24808 *discr_decl = NULL_TREE;
24809 *discr_lists_p
24810 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24811 sizeof (**discr_lists_p));
24812 discr_lists = *discr_lists_p;
24813
24814 /* And then analyze all variants to extract discriminant information for all
24815 of them. This analysis is conservative: as soon as we detect something we
24816 do not support, abort everything and pretend we found nothing. */
24817 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24818 variant != NULL_TREE;
24819 variant = DECL_CHAIN (variant), ++i)
24820 {
24821 tree match_expr = DECL_QUALIFIER (variant);
24822
24823 /* Now, try to analyze the predicate and deduce a discriminant for
24824 it. */
24825 if (match_expr == boolean_true_node)
24826 /* Typically happens for the default variant: it matches all cases that
24827 previous variants rejected. Don't output any matching value for
24828 this one. */
24829 continue;
24830
24831 /* The following loop tries to iterate over each discriminant
24832 possibility: single values or ranges. */
24833 while (match_expr != NULL_TREE)
24834 {
24835 tree next_round_match_expr;
24836 tree candidate_discr = NULL_TREE;
24837 dw_discr_list_ref new_node = NULL;
24838
24839 /* Possibilities are matched one after the other by nested
24840 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24841 continue with the rest at next iteration. */
24842 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24843 {
24844 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24845 match_expr = TREE_OPERAND (match_expr, 1);
24846 }
24847 else
24848 next_round_match_expr = NULL_TREE;
24849
24850 if (match_expr == boolean_false_node)
24851 /* This sub-expression matches nothing: just wait for the next
24852 one. */
24853 ;
24854
24855 else if (TREE_CODE (match_expr) == EQ_EXPR)
24856 {
24857 /* We are matching: <discr_field> == <integer_cst>
24858 This sub-expression matches a single value. */
24859 tree integer_cst = TREE_OPERAND (match_expr, 1);
24860
24861 candidate_discr
24862 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24863 struct_type);
24864
24865 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24866 if (!get_discr_value (integer_cst,
24867 &new_node->dw_discr_lower_bound))
24868 goto abort;
24869 new_node->dw_discr_range = false;
24870 }
24871
24872 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24873 {
24874 /* We are matching:
24875 <discr_field> > <integer_cst>
24876 && <discr_field> < <integer_cst>.
24877 This sub-expression matches the range of values between the
24878 two matched integer constants. Note that comparisons can be
24879 inclusive or exclusive. */
24880 tree candidate_discr_1, candidate_discr_2;
24881 tree lower_cst, upper_cst;
24882 bool lower_cst_included, upper_cst_included;
24883 tree lower_op = TREE_OPERAND (match_expr, 0);
24884 tree upper_op = TREE_OPERAND (match_expr, 1);
24885
24886 /* When the comparison is exclusive, the integer constant is not
24887 the discriminant range bound we are looking for: we will have
24888 to increment or decrement it. */
24889 if (TREE_CODE (lower_op) == GE_EXPR)
24890 lower_cst_included = true;
24891 else if (TREE_CODE (lower_op) == GT_EXPR)
24892 lower_cst_included = false;
24893 else
24894 goto abort;
24895
24896 if (TREE_CODE (upper_op) == LE_EXPR)
24897 upper_cst_included = true;
24898 else if (TREE_CODE (upper_op) == LT_EXPR)
24899 upper_cst_included = false;
24900 else
24901 goto abort;
24902
24903 /* Extract the discriminant from the first operand and check it
24904 is consistant with the same analysis in the second
24905 operand. */
24906 candidate_discr_1
24907 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24908 struct_type);
24909 candidate_discr_2
24910 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24911 struct_type);
24912 if (candidate_discr_1 == candidate_discr_2)
24913 candidate_discr = candidate_discr_1;
24914 else
24915 goto abort;
24916
24917 /* Extract bounds from both. */
24918 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24919 lower_cst = TREE_OPERAND (lower_op, 1);
24920 upper_cst = TREE_OPERAND (upper_op, 1);
24921
24922 if (!lower_cst_included)
24923 lower_cst
24924 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24925 build_int_cst (TREE_TYPE (lower_cst), 1));
24926 if (!upper_cst_included)
24927 upper_cst
24928 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24929 build_int_cst (TREE_TYPE (upper_cst), 1));
24930
24931 if (!get_discr_value (lower_cst,
24932 &new_node->dw_discr_lower_bound)
24933 || !get_discr_value (upper_cst,
24934 &new_node->dw_discr_upper_bound))
24935 goto abort;
24936
24937 new_node->dw_discr_range = true;
24938 }
24939
24940 else if ((candidate_discr
24941 = analyze_discr_in_predicate (match_expr, struct_type))
24942 && TREE_TYPE (candidate_discr) == boolean_type_node)
24943 {
24944 /* We are matching: <discr_field> for a boolean discriminant.
24945 This sub-expression matches boolean_true_node. */
24946 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24947 if (!get_discr_value (boolean_true_node,
24948 &new_node->dw_discr_lower_bound))
24949 goto abort;
24950 new_node->dw_discr_range = false;
24951 }
24952
24953 else
24954 /* Unsupported sub-expression: we cannot determine the set of
24955 matching discriminant values. Abort everything. */
24956 goto abort;
24957
24958 /* If the discriminant info is not consistant with what we saw so
24959 far, consider the analysis failed and abort everything. */
24960 if (candidate_discr == NULL_TREE
24961 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24962 goto abort;
24963 else
24964 *discr_decl = candidate_discr;
24965
24966 if (new_node != NULL)
24967 {
24968 new_node->dw_discr_next = discr_lists[i];
24969 discr_lists[i] = new_node;
24970 }
24971 match_expr = next_round_match_expr;
24972 }
24973 }
24974
24975 /* If we reach this point, we could match everything we were interested
24976 in. */
24977 return;
24978
24979 abort:
24980 /* Clean all data structure and return no result. */
24981 free (*discr_lists_p);
24982 *discr_lists_p = NULL;
24983 *discr_decl = NULL_TREE;
24984 }
24985
24986 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24987 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24988 under CONTEXT_DIE.
24989
24990 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24991 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24992 this type, which are record types, represent the available variants and each
24993 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24994 values are inferred from these attributes.
24995
24996 In trees, the offsets for the fields inside these sub-records are relative
24997 to the variant part itself, whereas the corresponding DIEs should have
24998 offset attributes that are relative to the embedding record base address.
24999 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
25000 must be an expression that computes the offset of the variant part to
25001 describe in DWARF. */
25002
25003 static void
25004 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
25005 dw_die_ref context_die)
25006 {
25007 const tree variant_part_type = TREE_TYPE (variant_part_decl);
25008 tree variant_part_offset = vlr_ctx->variant_part_offset;
25009 struct loc_descr_context ctx = {
25010 vlr_ctx->struct_type, /* context_type */
25011 NULL_TREE, /* base_decl */
25012 NULL, /* dpi */
25013 false, /* placeholder_arg */
25014 false /* placeholder_seen */
25015 };
25016
25017 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
25018 NULL_TREE if there is no such field. */
25019 tree discr_decl = NULL_TREE;
25020 dw_discr_list_ref *discr_lists;
25021 unsigned discr_lists_length = 0;
25022 unsigned i;
25023
25024 dw_die_ref dwarf_proc_die = NULL;
25025 dw_die_ref variant_part_die
25026 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
25027
25028 equate_decl_number_to_die (variant_part_decl, variant_part_die);
25029
25030 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
25031 &discr_decl, &discr_lists, &discr_lists_length);
25032
25033 if (discr_decl != NULL_TREE)
25034 {
25035 dw_die_ref discr_die = lookup_decl_die (discr_decl);
25036
25037 if (discr_die)
25038 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
25039 else
25040 /* We have no DIE for the discriminant, so just discard all
25041 discrimimant information in the output. */
25042 discr_decl = NULL_TREE;
25043 }
25044
25045 /* If the offset for this variant part is more complex than a constant,
25046 create a DWARF procedure for it so that we will not have to generate DWARF
25047 expressions for it for each member. */
25048 if (TREE_CODE (variant_part_offset) != INTEGER_CST
25049 && (dwarf_version >= 3 || !dwarf_strict))
25050 {
25051 const tree dwarf_proc_fndecl
25052 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
25053 build_function_type (TREE_TYPE (variant_part_offset),
25054 NULL_TREE));
25055 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25056 const dw_loc_descr_ref dwarf_proc_body
25057 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25058
25059 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25060 dwarf_proc_fndecl, context_die);
25061 if (dwarf_proc_die != NULL)
25062 variant_part_offset = dwarf_proc_call;
25063 }
25064
25065 /* Output DIEs for all variants. */
25066 i = 0;
25067 for (tree variant = TYPE_FIELDS (variant_part_type);
25068 variant != NULL_TREE;
25069 variant = DECL_CHAIN (variant), ++i)
25070 {
25071 tree variant_type = TREE_TYPE (variant);
25072 dw_die_ref variant_die;
25073
25074 /* All variants (i.e. members of a variant part) are supposed to be
25075 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25076 under these records. */
25077 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25078
25079 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25080 equate_decl_number_to_die (variant, variant_die);
25081
25082 /* Output discriminant values this variant matches, if any. */
25083 if (discr_decl == NULL || discr_lists[i] == NULL)
25084 /* In the case we have discriminant information at all, this is
25085 probably the default variant: as the standard says, don't
25086 output any discriminant value/list attribute. */
25087 ;
25088 else if (discr_lists[i]->dw_discr_next == NULL
25089 && !discr_lists[i]->dw_discr_range)
25090 /* If there is only one accepted value, don't bother outputting a
25091 list. */
25092 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25093 else
25094 add_discr_list (variant_die, discr_lists[i]);
25095
25096 for (tree member = TYPE_FIELDS (variant_type);
25097 member != NULL_TREE;
25098 member = DECL_CHAIN (member))
25099 {
25100 struct vlr_context vlr_sub_ctx = {
25101 vlr_ctx->struct_type, /* struct_type */
25102 NULL /* variant_part_offset */
25103 };
25104 if (is_variant_part (member))
25105 {
25106 /* All offsets for fields inside variant parts are relative to
25107 the top-level embedding RECORD_TYPE's base address. On the
25108 other hand, offsets in GCC's types are relative to the
25109 nested-most variant part. So we have to sum offsets each time
25110 we recurse. */
25111
25112 vlr_sub_ctx.variant_part_offset
25113 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25114 variant_part_offset, byte_position (member));
25115 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25116 }
25117 else
25118 {
25119 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25120 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25121 }
25122 }
25123 }
25124
25125 free (discr_lists);
25126 }
25127
25128 /* Generate a DIE for a class member. */
25129
25130 static void
25131 gen_member_die (tree type, dw_die_ref context_die)
25132 {
25133 tree member;
25134 tree binfo = TYPE_BINFO (type);
25135
25136 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25137
25138 /* If this is not an incomplete type, output descriptions of each of its
25139 members. Note that as we output the DIEs necessary to represent the
25140 members of this record or union type, we will also be trying to output
25141 DIEs to represent the *types* of those members. However the `type'
25142 function (above) will specifically avoid generating type DIEs for member
25143 types *within* the list of member DIEs for this (containing) type except
25144 for those types (of members) which are explicitly marked as also being
25145 members of this (containing) type themselves. The g++ front- end can
25146 force any given type to be treated as a member of some other (containing)
25147 type by setting the TYPE_CONTEXT of the given (member) type to point to
25148 the TREE node representing the appropriate (containing) type. */
25149
25150 /* First output info about the base classes. */
25151 if (binfo && early_dwarf)
25152 {
25153 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25154 int i;
25155 tree base;
25156
25157 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25158 gen_inheritance_die (base,
25159 (accesses ? (*accesses)[i] : access_public_node),
25160 type,
25161 context_die);
25162 }
25163
25164 /* Now output info about the data members and type members. */
25165 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25166 {
25167 struct vlr_context vlr_ctx = { type, NULL_TREE };
25168 bool static_inline_p
25169 = (TREE_STATIC (member)
25170 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25171 != -1));
25172
25173 /* Ignore clones. */
25174 if (DECL_ABSTRACT_ORIGIN (member))
25175 continue;
25176
25177 /* If we thought we were generating minimal debug info for TYPE
25178 and then changed our minds, some of the member declarations
25179 may have already been defined. Don't define them again, but
25180 do put them in the right order. */
25181
25182 if (dw_die_ref child = lookup_decl_die (member))
25183 {
25184 /* Handle inline static data members, which only have in-class
25185 declarations. */
25186 dw_die_ref ref = NULL;
25187 if (child->die_tag == DW_TAG_variable
25188 && child->die_parent == comp_unit_die ())
25189 {
25190 ref = get_AT_ref (child, DW_AT_specification);
25191 /* For C++17 inline static data members followed by redundant
25192 out of class redeclaration, we might get here with
25193 child being the DIE created for the out of class
25194 redeclaration and with its DW_AT_specification being
25195 the DIE created for in-class definition. We want to
25196 reparent the latter, and don't want to create another
25197 DIE with DW_AT_specification in that case, because
25198 we already have one. */
25199 if (ref
25200 && static_inline_p
25201 && ref->die_tag == DW_TAG_variable
25202 && ref->die_parent == comp_unit_die ()
25203 && get_AT (ref, DW_AT_specification) == NULL)
25204 {
25205 child = ref;
25206 ref = NULL;
25207 static_inline_p = false;
25208 }
25209 }
25210
25211 if (child->die_tag == DW_TAG_variable
25212 && child->die_parent == comp_unit_die ()
25213 && ref == NULL)
25214 {
25215 reparent_child (child, context_die);
25216 if (dwarf_version < 5)
25217 child->die_tag = DW_TAG_member;
25218 }
25219 else
25220 splice_child_die (context_die, child);
25221 }
25222
25223 /* Do not generate standard DWARF for variant parts if we are generating
25224 the corresponding GNAT encodings: DIEs generated for both would
25225 conflict in our mappings. */
25226 else if (is_variant_part (member)
25227 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25228 {
25229 vlr_ctx.variant_part_offset = byte_position (member);
25230 gen_variant_part (member, &vlr_ctx, context_die);
25231 }
25232 else
25233 {
25234 vlr_ctx.variant_part_offset = NULL_TREE;
25235 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25236 }
25237
25238 /* For C++ inline static data members emit immediately a DW_TAG_variable
25239 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25240 DW_AT_specification. */
25241 if (static_inline_p)
25242 {
25243 int old_extern = DECL_EXTERNAL (member);
25244 DECL_EXTERNAL (member) = 0;
25245 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25246 DECL_EXTERNAL (member) = old_extern;
25247 }
25248 }
25249 }
25250
25251 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25252 is set, we pretend that the type was never defined, so we only get the
25253 member DIEs needed by later specification DIEs. */
25254
25255 static void
25256 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25257 enum debug_info_usage usage)
25258 {
25259 if (TREE_ASM_WRITTEN (type))
25260 {
25261 /* Fill in the bound of variable-length fields in late dwarf if
25262 still incomplete. */
25263 if (!early_dwarf && variably_modified_type_p (type, NULL))
25264 for (tree member = TYPE_FIELDS (type);
25265 member;
25266 member = DECL_CHAIN (member))
25267 fill_variable_array_bounds (TREE_TYPE (member));
25268 return;
25269 }
25270
25271 dw_die_ref type_die = lookup_type_die (type);
25272 dw_die_ref scope_die = 0;
25273 int nested = 0;
25274 int complete = (TYPE_SIZE (type)
25275 && (! TYPE_STUB_DECL (type)
25276 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25277 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25278 complete = complete && should_emit_struct_debug (type, usage);
25279
25280 if (type_die && ! complete)
25281 return;
25282
25283 if (TYPE_CONTEXT (type) != NULL_TREE
25284 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25285 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25286 nested = 1;
25287
25288 scope_die = scope_die_for (type, context_die);
25289
25290 /* Generate child dies for template paramaters. */
25291 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25292 schedule_generic_params_dies_gen (type);
25293
25294 if (! type_die || (nested && is_cu_die (scope_die)))
25295 /* First occurrence of type or toplevel definition of nested class. */
25296 {
25297 dw_die_ref old_die = type_die;
25298
25299 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25300 ? record_type_tag (type) : DW_TAG_union_type,
25301 scope_die, type);
25302 equate_type_number_to_die (type, type_die);
25303 if (old_die)
25304 add_AT_specification (type_die, old_die);
25305 else
25306 add_name_attribute (type_die, type_tag (type));
25307 }
25308 else
25309 remove_AT (type_die, DW_AT_declaration);
25310
25311 /* If this type has been completed, then give it a byte_size attribute and
25312 then give a list of members. */
25313 if (complete && !ns_decl)
25314 {
25315 /* Prevent infinite recursion in cases where the type of some member of
25316 this type is expressed in terms of this type itself. */
25317 TREE_ASM_WRITTEN (type) = 1;
25318 add_byte_size_attribute (type_die, type);
25319 add_alignment_attribute (type_die, type);
25320 if (TYPE_STUB_DECL (type) != NULL_TREE)
25321 {
25322 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25323 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25324 }
25325
25326 /* If the first reference to this type was as the return type of an
25327 inline function, then it may not have a parent. Fix this now. */
25328 if (type_die->die_parent == NULL)
25329 add_child_die (scope_die, type_die);
25330
25331 gen_member_die (type, type_die);
25332
25333 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25334 if (TYPE_ARTIFICIAL (type))
25335 add_AT_flag (type_die, DW_AT_artificial, 1);
25336
25337 /* GNU extension: Record what type our vtable lives in. */
25338 if (TYPE_VFIELD (type))
25339 {
25340 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25341
25342 gen_type_die (vtype, context_die);
25343 add_AT_die_ref (type_die, DW_AT_containing_type,
25344 lookup_type_die (vtype));
25345 }
25346 }
25347 else
25348 {
25349 add_AT_flag (type_die, DW_AT_declaration, 1);
25350
25351 /* We don't need to do this for function-local types. */
25352 if (TYPE_STUB_DECL (type)
25353 && ! decl_function_context (TYPE_STUB_DECL (type)))
25354 vec_safe_push (incomplete_types, type);
25355 }
25356
25357 if (get_AT (type_die, DW_AT_name))
25358 add_pubtype (type, type_die);
25359 }
25360
25361 /* Generate a DIE for a subroutine _type_. */
25362
25363 static void
25364 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25365 {
25366 tree return_type = TREE_TYPE (type);
25367 dw_die_ref subr_die
25368 = new_die (DW_TAG_subroutine_type,
25369 scope_die_for (type, context_die), type);
25370
25371 equate_type_number_to_die (type, subr_die);
25372 add_prototyped_attribute (subr_die, type);
25373 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25374 context_die);
25375 add_alignment_attribute (subr_die, type);
25376 gen_formal_types_die (type, subr_die);
25377
25378 if (get_AT (subr_die, DW_AT_name))
25379 add_pubtype (type, subr_die);
25380 if ((dwarf_version >= 5 || !dwarf_strict)
25381 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25382 add_AT_flag (subr_die, DW_AT_reference, 1);
25383 if ((dwarf_version >= 5 || !dwarf_strict)
25384 && lang_hooks.types.type_dwarf_attribute (type,
25385 DW_AT_rvalue_reference) != -1)
25386 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25387 }
25388
25389 /* Generate a DIE for a type definition. */
25390
25391 static void
25392 gen_typedef_die (tree decl, dw_die_ref context_die)
25393 {
25394 dw_die_ref type_die;
25395 tree type;
25396
25397 if (TREE_ASM_WRITTEN (decl))
25398 {
25399 if (DECL_ORIGINAL_TYPE (decl))
25400 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25401 return;
25402 }
25403
25404 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25405 checks in process_scope_var and modified_type_die), this should be called
25406 only for original types. */
25407 gcc_assert (decl_ultimate_origin (decl) == NULL
25408 || decl_ultimate_origin (decl) == decl);
25409
25410 TREE_ASM_WRITTEN (decl) = 1;
25411 type_die = new_die (DW_TAG_typedef, context_die, decl);
25412
25413 add_name_and_src_coords_attributes (type_die, decl);
25414 if (DECL_ORIGINAL_TYPE (decl))
25415 {
25416 type = DECL_ORIGINAL_TYPE (decl);
25417 if (type == error_mark_node)
25418 return;
25419
25420 gcc_assert (type != TREE_TYPE (decl));
25421 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25422 }
25423 else
25424 {
25425 type = TREE_TYPE (decl);
25426 if (type == error_mark_node)
25427 return;
25428
25429 if (is_naming_typedef_decl (TYPE_NAME (type)))
25430 {
25431 /* Here, we are in the case of decl being a typedef naming
25432 an anonymous type, e.g:
25433 typedef struct {...} foo;
25434 In that case TREE_TYPE (decl) is not a typedef variant
25435 type and TYPE_NAME of the anonymous type is set to the
25436 TYPE_DECL of the typedef. This construct is emitted by
25437 the C++ FE.
25438
25439 TYPE is the anonymous struct named by the typedef
25440 DECL. As we need the DW_AT_type attribute of the
25441 DW_TAG_typedef to point to the DIE of TYPE, let's
25442 generate that DIE right away. add_type_attribute
25443 called below will then pick (via lookup_type_die) that
25444 anonymous struct DIE. */
25445 if (!TREE_ASM_WRITTEN (type))
25446 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25447
25448 /* This is a GNU Extension. We are adding a
25449 DW_AT_linkage_name attribute to the DIE of the
25450 anonymous struct TYPE. The value of that attribute
25451 is the name of the typedef decl naming the anonymous
25452 struct. This greatly eases the work of consumers of
25453 this debug info. */
25454 add_linkage_name_raw (lookup_type_die (type), decl);
25455 }
25456 }
25457
25458 add_type_attribute (type_die, type, decl_quals (decl), false,
25459 context_die);
25460
25461 if (is_naming_typedef_decl (decl))
25462 /* We want that all subsequent calls to lookup_type_die with
25463 TYPE in argument yield the DW_TAG_typedef we have just
25464 created. */
25465 equate_type_number_to_die (type, type_die);
25466
25467 add_alignment_attribute (type_die, TREE_TYPE (decl));
25468
25469 add_accessibility_attribute (type_die, decl);
25470
25471 if (DECL_ABSTRACT_P (decl))
25472 equate_decl_number_to_die (decl, type_die);
25473
25474 if (get_AT (type_die, DW_AT_name))
25475 add_pubtype (decl, type_die);
25476 }
25477
25478 /* Generate a DIE for a struct, class, enum or union type. */
25479
25480 static void
25481 gen_tagged_type_die (tree type,
25482 dw_die_ref context_die,
25483 enum debug_info_usage usage)
25484 {
25485 if (type == NULL_TREE
25486 || !is_tagged_type (type))
25487 return;
25488
25489 if (TREE_ASM_WRITTEN (type))
25490 ;
25491 /* If this is a nested type whose containing class hasn't been written
25492 out yet, writing it out will cover this one, too. This does not apply
25493 to instantiations of member class templates; they need to be added to
25494 the containing class as they are generated. FIXME: This hurts the
25495 idea of combining type decls from multiple TUs, since we can't predict
25496 what set of template instantiations we'll get. */
25497 else if (TYPE_CONTEXT (type)
25498 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25499 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25500 {
25501 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25502
25503 if (TREE_ASM_WRITTEN (type))
25504 return;
25505
25506 /* If that failed, attach ourselves to the stub. */
25507 context_die = lookup_type_die (TYPE_CONTEXT (type));
25508 }
25509 else if (TYPE_CONTEXT (type) != NULL_TREE
25510 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25511 {
25512 /* If this type is local to a function that hasn't been written
25513 out yet, use a NULL context for now; it will be fixed up in
25514 decls_for_scope. */
25515 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25516 /* A declaration DIE doesn't count; nested types need to go in the
25517 specification. */
25518 if (context_die && is_declaration_die (context_die))
25519 context_die = NULL;
25520 }
25521 else
25522 context_die = declare_in_namespace (type, context_die);
25523
25524 if (TREE_CODE (type) == ENUMERAL_TYPE)
25525 {
25526 /* This might have been written out by the call to
25527 declare_in_namespace. */
25528 if (!TREE_ASM_WRITTEN (type))
25529 gen_enumeration_type_die (type, context_die);
25530 }
25531 else
25532 gen_struct_or_union_type_die (type, context_die, usage);
25533
25534 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25535 it up if it is ever completed. gen_*_type_die will set it for us
25536 when appropriate. */
25537 }
25538
25539 /* Generate a type description DIE. */
25540
25541 static void
25542 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25543 enum debug_info_usage usage)
25544 {
25545 struct array_descr_info info;
25546
25547 if (type == NULL_TREE || type == error_mark_node)
25548 return;
25549
25550 if (flag_checking && type)
25551 verify_type (type);
25552
25553 if (TYPE_NAME (type) != NULL_TREE
25554 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25555 && is_redundant_typedef (TYPE_NAME (type))
25556 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25557 /* The DECL of this type is a typedef we don't want to emit debug
25558 info for but we want debug info for its underlying typedef.
25559 This can happen for e.g, the injected-class-name of a C++
25560 type. */
25561 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25562
25563 /* If TYPE is a typedef type variant, let's generate debug info
25564 for the parent typedef which TYPE is a type of. */
25565 if (typedef_variant_p (type))
25566 {
25567 if (TREE_ASM_WRITTEN (type))
25568 return;
25569
25570 tree name = TYPE_NAME (type);
25571 tree origin = decl_ultimate_origin (name);
25572 if (origin != NULL && origin != name)
25573 {
25574 gen_decl_die (origin, NULL, NULL, context_die);
25575 return;
25576 }
25577
25578 /* Prevent broken recursion; we can't hand off to the same type. */
25579 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25580
25581 /* Give typedefs the right scope. */
25582 context_die = scope_die_for (type, context_die);
25583
25584 TREE_ASM_WRITTEN (type) = 1;
25585
25586 gen_decl_die (name, NULL, NULL, context_die);
25587 return;
25588 }
25589
25590 /* If type is an anonymous tagged type named by a typedef, let's
25591 generate debug info for the typedef. */
25592 if (is_naming_typedef_decl (TYPE_NAME (type)))
25593 {
25594 /* Give typedefs the right scope. */
25595 context_die = scope_die_for (type, context_die);
25596
25597 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25598 return;
25599 }
25600
25601 if (lang_hooks.types.get_debug_type)
25602 {
25603 tree debug_type = lang_hooks.types.get_debug_type (type);
25604
25605 if (debug_type != NULL_TREE && debug_type != type)
25606 {
25607 gen_type_die_with_usage (debug_type, context_die, usage);
25608 return;
25609 }
25610 }
25611
25612 /* We are going to output a DIE to represent the unqualified version
25613 of this type (i.e. without any const or volatile qualifiers) so
25614 get the main variant (i.e. the unqualified version) of this type
25615 now. (Vectors and arrays are special because the debugging info is in the
25616 cloned type itself. Similarly function/method types can contain extra
25617 ref-qualification). */
25618 if (TREE_CODE (type) == FUNCTION_TYPE
25619 || TREE_CODE (type) == METHOD_TYPE)
25620 {
25621 /* For function/method types, can't use type_main_variant here,
25622 because that can have different ref-qualifiers for C++,
25623 but try to canonicalize. */
25624 tree main = TYPE_MAIN_VARIANT (type);
25625 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25626 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25627 && check_base_type (t, main)
25628 && check_lang_type (t, type))
25629 {
25630 type = t;
25631 break;
25632 }
25633 }
25634 else if (TREE_CODE (type) != VECTOR_TYPE
25635 && TREE_CODE (type) != ARRAY_TYPE)
25636 type = type_main_variant (type);
25637
25638 /* If this is an array type with hidden descriptor, handle it first. */
25639 if (!TREE_ASM_WRITTEN (type)
25640 && lang_hooks.types.get_array_descr_info)
25641 {
25642 memset (&info, 0, sizeof (info));
25643 if (lang_hooks.types.get_array_descr_info (type, &info))
25644 {
25645 /* Fortran sometimes emits array types with no dimension. */
25646 gcc_assert (info.ndimensions >= 0
25647 && (info.ndimensions
25648 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25649 gen_descr_array_type_die (type, &info, context_die);
25650 TREE_ASM_WRITTEN (type) = 1;
25651 return;
25652 }
25653 }
25654
25655 if (TREE_ASM_WRITTEN (type))
25656 {
25657 /* Variable-length types may be incomplete even if
25658 TREE_ASM_WRITTEN. For such types, fall through to
25659 gen_array_type_die() and possibly fill in
25660 DW_AT_{upper,lower}_bound attributes. */
25661 if ((TREE_CODE (type) != ARRAY_TYPE
25662 && TREE_CODE (type) != RECORD_TYPE
25663 && TREE_CODE (type) != UNION_TYPE
25664 && TREE_CODE (type) != QUAL_UNION_TYPE)
25665 || !variably_modified_type_p (type, NULL))
25666 return;
25667 }
25668
25669 switch (TREE_CODE (type))
25670 {
25671 case ERROR_MARK:
25672 break;
25673
25674 case POINTER_TYPE:
25675 case REFERENCE_TYPE:
25676 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25677 ensures that the gen_type_die recursion will terminate even if the
25678 type is recursive. Recursive types are possible in Ada. */
25679 /* ??? We could perhaps do this for all types before the switch
25680 statement. */
25681 TREE_ASM_WRITTEN (type) = 1;
25682
25683 /* For these types, all that is required is that we output a DIE (or a
25684 set of DIEs) to represent the "basis" type. */
25685 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25686 DINFO_USAGE_IND_USE);
25687 break;
25688
25689 case OFFSET_TYPE:
25690 /* This code is used for C++ pointer-to-data-member types.
25691 Output a description of the relevant class type. */
25692 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25693 DINFO_USAGE_IND_USE);
25694
25695 /* Output a description of the type of the object pointed to. */
25696 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25697 DINFO_USAGE_IND_USE);
25698
25699 /* Now output a DIE to represent this pointer-to-data-member type
25700 itself. */
25701 gen_ptr_to_mbr_type_die (type, context_die);
25702 break;
25703
25704 case FUNCTION_TYPE:
25705 /* Force out return type (in case it wasn't forced out already). */
25706 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25707 DINFO_USAGE_DIR_USE);
25708 gen_subroutine_type_die (type, context_die);
25709 break;
25710
25711 case METHOD_TYPE:
25712 /* Force out return type (in case it wasn't forced out already). */
25713 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25714 DINFO_USAGE_DIR_USE);
25715 gen_subroutine_type_die (type, context_die);
25716 break;
25717
25718 case ARRAY_TYPE:
25719 case VECTOR_TYPE:
25720 gen_array_type_die (type, context_die);
25721 break;
25722
25723 case ENUMERAL_TYPE:
25724 case RECORD_TYPE:
25725 case UNION_TYPE:
25726 case QUAL_UNION_TYPE:
25727 gen_tagged_type_die (type, context_die, usage);
25728 return;
25729
25730 case VOID_TYPE:
25731 case INTEGER_TYPE:
25732 case REAL_TYPE:
25733 case FIXED_POINT_TYPE:
25734 case COMPLEX_TYPE:
25735 case BOOLEAN_TYPE:
25736 /* No DIEs needed for fundamental types. */
25737 break;
25738
25739 case NULLPTR_TYPE:
25740 case LANG_TYPE:
25741 /* Just use DW_TAG_unspecified_type. */
25742 {
25743 dw_die_ref type_die = lookup_type_die (type);
25744 if (type_die == NULL)
25745 {
25746 tree name = TYPE_IDENTIFIER (type);
25747 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25748 type);
25749 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25750 equate_type_number_to_die (type, type_die);
25751 }
25752 }
25753 break;
25754
25755 default:
25756 if (is_cxx_auto (type))
25757 {
25758 tree name = TYPE_IDENTIFIER (type);
25759 dw_die_ref *die = (name == get_identifier ("auto")
25760 ? &auto_die : &decltype_auto_die);
25761 if (!*die)
25762 {
25763 *die = new_die (DW_TAG_unspecified_type,
25764 comp_unit_die (), NULL_TREE);
25765 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25766 }
25767 equate_type_number_to_die (type, *die);
25768 break;
25769 }
25770 gcc_unreachable ();
25771 }
25772
25773 TREE_ASM_WRITTEN (type) = 1;
25774 }
25775
25776 static void
25777 gen_type_die (tree type, dw_die_ref context_die)
25778 {
25779 if (type != error_mark_node)
25780 {
25781 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25782 if (flag_checking)
25783 {
25784 dw_die_ref die = lookup_type_die (type);
25785 if (die)
25786 check_die (die);
25787 }
25788 }
25789 }
25790
25791 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25792 things which are local to the given block. */
25793
25794 static void
25795 gen_block_die (tree stmt, dw_die_ref context_die)
25796 {
25797 int must_output_die = 0;
25798 bool inlined_func;
25799
25800 /* Ignore blocks that are NULL. */
25801 if (stmt == NULL_TREE)
25802 return;
25803
25804 inlined_func = inlined_function_outer_scope_p (stmt);
25805
25806 /* If the block is one fragment of a non-contiguous block, do not
25807 process the variables, since they will have been done by the
25808 origin block. Do process subblocks. */
25809 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25810 {
25811 tree sub;
25812
25813 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25814 gen_block_die (sub, context_die);
25815
25816 return;
25817 }
25818
25819 /* Determine if we need to output any Dwarf DIEs at all to represent this
25820 block. */
25821 if (inlined_func)
25822 /* The outer scopes for inlinings *must* always be represented. We
25823 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25824 must_output_die = 1;
25825 else if (lookup_block_die (stmt))
25826 /* If we already have a DIE then it was filled early. Meanwhile
25827 we might have pruned all BLOCK_VARS as optimized out but we
25828 still want to generate high/low PC attributes so output it. */
25829 must_output_die = 1;
25830 else if (TREE_USED (stmt)
25831 || TREE_ASM_WRITTEN (stmt))
25832 {
25833 /* Determine if this block directly contains any "significant"
25834 local declarations which we will need to output DIEs for. */
25835 if (debug_info_level > DINFO_LEVEL_TERSE)
25836 {
25837 /* We are not in terse mode so any local declaration that
25838 is not ignored for debug purposes counts as being a
25839 "significant" one. */
25840 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25841 must_output_die = 1;
25842 else
25843 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25844 if (!DECL_IGNORED_P (var))
25845 {
25846 must_output_die = 1;
25847 break;
25848 }
25849 }
25850 else if (!dwarf2out_ignore_block (stmt))
25851 must_output_die = 1;
25852 }
25853
25854 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25855 DIE for any block which contains no significant local declarations at
25856 all. Rather, in such cases we just call `decls_for_scope' so that any
25857 needed Dwarf info for any sub-blocks will get properly generated. Note
25858 that in terse mode, our definition of what constitutes a "significant"
25859 local declaration gets restricted to include only inlined function
25860 instances and local (nested) function definitions. */
25861 if (must_output_die)
25862 {
25863 if (inlined_func)
25864 gen_inlined_subroutine_die (stmt, context_die);
25865 else
25866 gen_lexical_block_die (stmt, context_die);
25867 }
25868 else
25869 decls_for_scope (stmt, context_die);
25870 }
25871
25872 /* Process variable DECL (or variable with origin ORIGIN) within
25873 block STMT and add it to CONTEXT_DIE. */
25874 static void
25875 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25876 {
25877 dw_die_ref die;
25878 tree decl_or_origin = decl ? decl : origin;
25879
25880 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25881 die = lookup_decl_die (decl_or_origin);
25882 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25883 {
25884 if (TYPE_DECL_IS_STUB (decl_or_origin))
25885 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25886 else
25887 die = lookup_decl_die (decl_or_origin);
25888 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25889 if (! die && ! early_dwarf)
25890 return;
25891 }
25892 else
25893 die = NULL;
25894
25895 /* Avoid creating DIEs for local typedefs and concrete static variables that
25896 will only be pruned later. */
25897 if ((origin || decl_ultimate_origin (decl))
25898 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25899 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25900 {
25901 origin = decl_ultimate_origin (decl_or_origin);
25902 if (decl && VAR_P (decl) && die != NULL)
25903 {
25904 die = lookup_decl_die (origin);
25905 if (die != NULL)
25906 equate_decl_number_to_die (decl, die);
25907 }
25908 return;
25909 }
25910
25911 if (die != NULL && die->die_parent == NULL)
25912 add_child_die (context_die, die);
25913 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25914 {
25915 if (early_dwarf)
25916 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25917 stmt, context_die);
25918 }
25919 else
25920 {
25921 if (decl && DECL_P (decl))
25922 {
25923 die = lookup_decl_die (decl);
25924
25925 /* Early created DIEs do not have a parent as the decls refer
25926 to the function as DECL_CONTEXT rather than the BLOCK. */
25927 if (die && die->die_parent == NULL)
25928 {
25929 gcc_assert (in_lto_p);
25930 add_child_die (context_die, die);
25931 }
25932 }
25933
25934 gen_decl_die (decl, origin, NULL, context_die);
25935 }
25936 }
25937
25938 /* Generate all of the decls declared within a given scope and (recursively)
25939 all of its sub-blocks. */
25940
25941 static void
25942 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25943 {
25944 tree decl;
25945 unsigned int i;
25946 tree subblocks;
25947
25948 /* Ignore NULL blocks. */
25949 if (stmt == NULL_TREE)
25950 return;
25951
25952 /* Output the DIEs to represent all of the data objects and typedefs
25953 declared directly within this block but not within any nested
25954 sub-blocks. Also, nested function and tag DIEs have been
25955 generated with a parent of NULL; fix that up now. We don't
25956 have to do this if we're at -g1. */
25957 if (debug_info_level > DINFO_LEVEL_TERSE)
25958 {
25959 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25960 process_scope_var (stmt, decl, NULL_TREE, context_die);
25961 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25962 origin - avoid doing this twice as we have no good way to see
25963 if we've done it once already. */
25964 if (! early_dwarf)
25965 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25966 {
25967 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25968 if (decl == current_function_decl)
25969 /* Ignore declarations of the current function, while they
25970 are declarations, gen_subprogram_die would treat them
25971 as definitions again, because they are equal to
25972 current_function_decl and endlessly recurse. */;
25973 else if (TREE_CODE (decl) == FUNCTION_DECL)
25974 process_scope_var (stmt, decl, NULL_TREE, context_die);
25975 else
25976 process_scope_var (stmt, NULL_TREE, decl, context_die);
25977 }
25978 }
25979
25980 /* Even if we're at -g1, we need to process the subblocks in order to get
25981 inlined call information. */
25982
25983 /* Output the DIEs to represent all sub-blocks (and the items declared
25984 therein) of this block. */
25985 if (recurse)
25986 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25987 subblocks != NULL;
25988 subblocks = BLOCK_CHAIN (subblocks))
25989 gen_block_die (subblocks, context_die);
25990 }
25991
25992 /* Is this a typedef we can avoid emitting? */
25993
25994 static bool
25995 is_redundant_typedef (const_tree decl)
25996 {
25997 if (TYPE_DECL_IS_STUB (decl))
25998 return true;
25999
26000 if (DECL_ARTIFICIAL (decl)
26001 && DECL_CONTEXT (decl)
26002 && is_tagged_type (DECL_CONTEXT (decl))
26003 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
26004 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
26005 /* Also ignore the artificial member typedef for the class name. */
26006 return true;
26007
26008 return false;
26009 }
26010
26011 /* Return TRUE if TYPE is a typedef that names a type for linkage
26012 purposes. This kind of typedefs is produced by the C++ FE for
26013 constructs like:
26014
26015 typedef struct {...} foo;
26016
26017 In that case, there is no typedef variant type produced for foo.
26018 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
26019 struct type. */
26020
26021 static bool
26022 is_naming_typedef_decl (const_tree decl)
26023 {
26024 if (decl == NULL_TREE
26025 || TREE_CODE (decl) != TYPE_DECL
26026 || DECL_NAMELESS (decl)
26027 || !is_tagged_type (TREE_TYPE (decl))
26028 || DECL_IS_BUILTIN (decl)
26029 || is_redundant_typedef (decl)
26030 /* It looks like Ada produces TYPE_DECLs that are very similar
26031 to C++ naming typedefs but that have different
26032 semantics. Let's be specific to c++ for now. */
26033 || !is_cxx (decl))
26034 return FALSE;
26035
26036 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
26037 && TYPE_NAME (TREE_TYPE (decl)) == decl
26038 && (TYPE_STUB_DECL (TREE_TYPE (decl))
26039 != TYPE_NAME (TREE_TYPE (decl))));
26040 }
26041
26042 /* Looks up the DIE for a context. */
26043
26044 static inline dw_die_ref
26045 lookup_context_die (tree context)
26046 {
26047 if (context)
26048 {
26049 /* Find die that represents this context. */
26050 if (TYPE_P (context))
26051 {
26052 context = TYPE_MAIN_VARIANT (context);
26053 dw_die_ref ctx = lookup_type_die (context);
26054 if (!ctx)
26055 return NULL;
26056 return strip_naming_typedef (context, ctx);
26057 }
26058 else
26059 return lookup_decl_die (context);
26060 }
26061 return comp_unit_die ();
26062 }
26063
26064 /* Returns the DIE for a context. */
26065
26066 static inline dw_die_ref
26067 get_context_die (tree context)
26068 {
26069 if (context)
26070 {
26071 /* Find die that represents this context. */
26072 if (TYPE_P (context))
26073 {
26074 context = TYPE_MAIN_VARIANT (context);
26075 return strip_naming_typedef (context, force_type_die (context));
26076 }
26077 else
26078 return force_decl_die (context);
26079 }
26080 return comp_unit_die ();
26081 }
26082
26083 /* Returns the DIE for decl. A DIE will always be returned. */
26084
26085 static dw_die_ref
26086 force_decl_die (tree decl)
26087 {
26088 dw_die_ref decl_die;
26089 unsigned saved_external_flag;
26090 tree save_fn = NULL_TREE;
26091 decl_die = lookup_decl_die (decl);
26092 if (!decl_die)
26093 {
26094 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26095
26096 decl_die = lookup_decl_die (decl);
26097 if (decl_die)
26098 return decl_die;
26099
26100 switch (TREE_CODE (decl))
26101 {
26102 case FUNCTION_DECL:
26103 /* Clear current_function_decl, so that gen_subprogram_die thinks
26104 that this is a declaration. At this point, we just want to force
26105 declaration die. */
26106 save_fn = current_function_decl;
26107 current_function_decl = NULL_TREE;
26108 gen_subprogram_die (decl, context_die);
26109 current_function_decl = save_fn;
26110 break;
26111
26112 case VAR_DECL:
26113 /* Set external flag to force declaration die. Restore it after
26114 gen_decl_die() call. */
26115 saved_external_flag = DECL_EXTERNAL (decl);
26116 DECL_EXTERNAL (decl) = 1;
26117 gen_decl_die (decl, NULL, NULL, context_die);
26118 DECL_EXTERNAL (decl) = saved_external_flag;
26119 break;
26120
26121 case NAMESPACE_DECL:
26122 if (dwarf_version >= 3 || !dwarf_strict)
26123 dwarf2out_decl (decl);
26124 else
26125 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26126 decl_die = comp_unit_die ();
26127 break;
26128
26129 case TRANSLATION_UNIT_DECL:
26130 decl_die = comp_unit_die ();
26131 break;
26132
26133 default:
26134 gcc_unreachable ();
26135 }
26136
26137 /* We should be able to find the DIE now. */
26138 if (!decl_die)
26139 decl_die = lookup_decl_die (decl);
26140 gcc_assert (decl_die);
26141 }
26142
26143 return decl_die;
26144 }
26145
26146 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26147 always returned. */
26148
26149 static dw_die_ref
26150 force_type_die (tree type)
26151 {
26152 dw_die_ref type_die;
26153
26154 type_die = lookup_type_die (type);
26155 if (!type_die)
26156 {
26157 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26158
26159 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26160 false, context_die);
26161 gcc_assert (type_die);
26162 }
26163 return type_die;
26164 }
26165
26166 /* Force out any required namespaces to be able to output DECL,
26167 and return the new context_die for it, if it's changed. */
26168
26169 static dw_die_ref
26170 setup_namespace_context (tree thing, dw_die_ref context_die)
26171 {
26172 tree context = (DECL_P (thing)
26173 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26174 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26175 /* Force out the namespace. */
26176 context_die = force_decl_die (context);
26177
26178 return context_die;
26179 }
26180
26181 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26182 type) within its namespace, if appropriate.
26183
26184 For compatibility with older debuggers, namespace DIEs only contain
26185 declarations; all definitions are emitted at CU scope, with
26186 DW_AT_specification pointing to the declaration (like with class
26187 members). */
26188
26189 static dw_die_ref
26190 declare_in_namespace (tree thing, dw_die_ref context_die)
26191 {
26192 dw_die_ref ns_context;
26193
26194 if (debug_info_level <= DINFO_LEVEL_TERSE)
26195 return context_die;
26196
26197 /* External declarations in the local scope only need to be emitted
26198 once, not once in the namespace and once in the scope.
26199
26200 This avoids declaring the `extern' below in the
26201 namespace DIE as well as in the innermost scope:
26202
26203 namespace S
26204 {
26205 int i=5;
26206 int foo()
26207 {
26208 int i=8;
26209 extern int i;
26210 return i;
26211 }
26212 }
26213 */
26214 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26215 return context_die;
26216
26217 /* If this decl is from an inlined function, then don't try to emit it in its
26218 namespace, as we will get confused. It would have already been emitted
26219 when the abstract instance of the inline function was emitted anyways. */
26220 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26221 return context_die;
26222
26223 ns_context = setup_namespace_context (thing, context_die);
26224
26225 if (ns_context != context_die)
26226 {
26227 if (is_fortran () || is_dlang ())
26228 return ns_context;
26229 if (DECL_P (thing))
26230 gen_decl_die (thing, NULL, NULL, ns_context);
26231 else
26232 gen_type_die (thing, ns_context);
26233 }
26234 return context_die;
26235 }
26236
26237 /* Generate a DIE for a namespace or namespace alias. */
26238
26239 static void
26240 gen_namespace_die (tree decl, dw_die_ref context_die)
26241 {
26242 dw_die_ref namespace_die;
26243
26244 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26245 they are an alias of. */
26246 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26247 {
26248 /* Output a real namespace or module. */
26249 context_die = setup_namespace_context (decl, comp_unit_die ());
26250 namespace_die = new_die (is_fortran () || is_dlang ()
26251 ? DW_TAG_module : DW_TAG_namespace,
26252 context_die, decl);
26253 /* For Fortran modules defined in different CU don't add src coords. */
26254 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26255 {
26256 const char *name = dwarf2_name (decl, 0);
26257 if (name)
26258 add_name_attribute (namespace_die, name);
26259 }
26260 else
26261 add_name_and_src_coords_attributes (namespace_die, decl);
26262 if (DECL_EXTERNAL (decl))
26263 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26264 equate_decl_number_to_die (decl, namespace_die);
26265 }
26266 else
26267 {
26268 /* Output a namespace alias. */
26269
26270 /* Force out the namespace we are an alias of, if necessary. */
26271 dw_die_ref origin_die
26272 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26273
26274 if (DECL_FILE_SCOPE_P (decl)
26275 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26276 context_die = setup_namespace_context (decl, comp_unit_die ());
26277 /* Now create the namespace alias DIE. */
26278 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26279 add_name_and_src_coords_attributes (namespace_die, decl);
26280 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26281 equate_decl_number_to_die (decl, namespace_die);
26282 }
26283 if ((dwarf_version >= 5 || !dwarf_strict)
26284 && lang_hooks.decls.decl_dwarf_attribute (decl,
26285 DW_AT_export_symbols) == 1)
26286 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26287
26288 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26289 if (want_pubnames ())
26290 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26291 }
26292
26293 /* Generate Dwarf debug information for a decl described by DECL.
26294 The return value is currently only meaningful for PARM_DECLs,
26295 for all other decls it returns NULL.
26296
26297 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26298 It can be NULL otherwise. */
26299
26300 static dw_die_ref
26301 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26302 dw_die_ref context_die)
26303 {
26304 tree decl_or_origin = decl ? decl : origin;
26305 tree class_origin = NULL, ultimate_origin;
26306
26307 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26308 return NULL;
26309
26310 switch (TREE_CODE (decl_or_origin))
26311 {
26312 case ERROR_MARK:
26313 break;
26314
26315 case CONST_DECL:
26316 if (!is_fortran () && !is_ada () && !is_dlang ())
26317 {
26318 /* The individual enumerators of an enum type get output when we output
26319 the Dwarf representation of the relevant enum type itself. */
26320 break;
26321 }
26322
26323 /* Emit its type. */
26324 gen_type_die (TREE_TYPE (decl), context_die);
26325
26326 /* And its containing namespace. */
26327 context_die = declare_in_namespace (decl, context_die);
26328
26329 gen_const_die (decl, context_die);
26330 break;
26331
26332 case FUNCTION_DECL:
26333 #if 0
26334 /* FIXME */
26335 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26336 on local redeclarations of global functions. That seems broken. */
26337 if (current_function_decl != decl)
26338 /* This is only a declaration. */;
26339 #endif
26340
26341 /* We should have abstract copies already and should not generate
26342 stray type DIEs in late LTO dumping. */
26343 if (! early_dwarf)
26344 ;
26345
26346 /* If we're emitting a clone, emit info for the abstract instance. */
26347 else if (origin || DECL_ORIGIN (decl) != decl)
26348 dwarf2out_abstract_function (origin
26349 ? DECL_ORIGIN (origin)
26350 : DECL_ABSTRACT_ORIGIN (decl));
26351
26352 /* If we're emitting a possibly inlined function emit it as
26353 abstract instance. */
26354 else if (cgraph_function_possibly_inlined_p (decl)
26355 && ! DECL_ABSTRACT_P (decl)
26356 && ! class_or_namespace_scope_p (context_die)
26357 /* dwarf2out_abstract_function won't emit a die if this is just
26358 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26359 that case, because that works only if we have a die. */
26360 && DECL_INITIAL (decl) != NULL_TREE)
26361 dwarf2out_abstract_function (decl);
26362
26363 /* Otherwise we're emitting the primary DIE for this decl. */
26364 else if (debug_info_level > DINFO_LEVEL_TERSE)
26365 {
26366 /* Before we describe the FUNCTION_DECL itself, make sure that we
26367 have its containing type. */
26368 if (!origin)
26369 origin = decl_class_context (decl);
26370 if (origin != NULL_TREE)
26371 gen_type_die (origin, context_die);
26372
26373 /* And its return type. */
26374 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26375
26376 /* And its virtual context. */
26377 if (DECL_VINDEX (decl) != NULL_TREE)
26378 gen_type_die (DECL_CONTEXT (decl), context_die);
26379
26380 /* Make sure we have a member DIE for decl. */
26381 if (origin != NULL_TREE)
26382 gen_type_die_for_member (origin, decl, context_die);
26383
26384 /* And its containing namespace. */
26385 context_die = declare_in_namespace (decl, context_die);
26386 }
26387
26388 /* Now output a DIE to represent the function itself. */
26389 if (decl)
26390 gen_subprogram_die (decl, context_die);
26391 break;
26392
26393 case TYPE_DECL:
26394 /* If we are in terse mode, don't generate any DIEs to represent any
26395 actual typedefs. */
26396 if (debug_info_level <= DINFO_LEVEL_TERSE)
26397 break;
26398
26399 /* In the special case of a TYPE_DECL node representing the declaration
26400 of some type tag, if the given TYPE_DECL is marked as having been
26401 instantiated from some other (original) TYPE_DECL node (e.g. one which
26402 was generated within the original definition of an inline function) we
26403 used to generate a special (abbreviated) DW_TAG_structure_type,
26404 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26405 should be actually referencing those DIEs, as variable DIEs with that
26406 type would be emitted already in the abstract origin, so it was always
26407 removed during unused type prunning. Don't add anything in this
26408 case. */
26409 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26410 break;
26411
26412 if (is_redundant_typedef (decl))
26413 gen_type_die (TREE_TYPE (decl), context_die);
26414 else
26415 /* Output a DIE to represent the typedef itself. */
26416 gen_typedef_die (decl, context_die);
26417 break;
26418
26419 case LABEL_DECL:
26420 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26421 gen_label_die (decl, context_die);
26422 break;
26423
26424 case VAR_DECL:
26425 case RESULT_DECL:
26426 /* If we are in terse mode, don't generate any DIEs to represent any
26427 variable declarations or definitions. */
26428 if (debug_info_level <= DINFO_LEVEL_TERSE)
26429 break;
26430
26431 /* Avoid generating stray type DIEs during late dwarf dumping.
26432 All types have been dumped early. */
26433 if (early_dwarf
26434 /* ??? But in LTRANS we cannot annotate early created variably
26435 modified type DIEs without copying them and adjusting all
26436 references to them. Dump them again as happens for inlining
26437 which copies both the decl and the types. */
26438 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26439 in VLA bound information for example. */
26440 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26441 current_function_decl)))
26442 {
26443 /* Output any DIEs that are needed to specify the type of this data
26444 object. */
26445 if (decl_by_reference_p (decl_or_origin))
26446 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26447 else
26448 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26449 }
26450
26451 if (early_dwarf)
26452 {
26453 /* And its containing type. */
26454 class_origin = decl_class_context (decl_or_origin);
26455 if (class_origin != NULL_TREE)
26456 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26457
26458 /* And its containing namespace. */
26459 context_die = declare_in_namespace (decl_or_origin, context_die);
26460 }
26461
26462 /* Now output the DIE to represent the data object itself. This gets
26463 complicated because of the possibility that the VAR_DECL really
26464 represents an inlined instance of a formal parameter for an inline
26465 function. */
26466 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26467 if (ultimate_origin != NULL_TREE
26468 && TREE_CODE (ultimate_origin) == PARM_DECL)
26469 gen_formal_parameter_die (decl, origin,
26470 true /* Emit name attribute. */,
26471 context_die);
26472 else
26473 gen_variable_die (decl, origin, context_die);
26474 break;
26475
26476 case FIELD_DECL:
26477 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26478 /* Ignore the nameless fields that are used to skip bits but handle C++
26479 anonymous unions and structs. */
26480 if (DECL_NAME (decl) != NULL_TREE
26481 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26482 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26483 {
26484 gen_type_die (member_declared_type (decl), context_die);
26485 gen_field_die (decl, ctx, context_die);
26486 }
26487 break;
26488
26489 case PARM_DECL:
26490 /* Avoid generating stray type DIEs during late dwarf dumping.
26491 All types have been dumped early. */
26492 if (early_dwarf
26493 /* ??? But in LTRANS we cannot annotate early created variably
26494 modified type DIEs without copying them and adjusting all
26495 references to them. Dump them again as happens for inlining
26496 which copies both the decl and the types. */
26497 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26498 in VLA bound information for example. */
26499 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26500 current_function_decl)))
26501 {
26502 if (DECL_BY_REFERENCE (decl_or_origin))
26503 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26504 else
26505 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26506 }
26507 return gen_formal_parameter_die (decl, origin,
26508 true /* Emit name attribute. */,
26509 context_die);
26510
26511 case NAMESPACE_DECL:
26512 if (dwarf_version >= 3 || !dwarf_strict)
26513 gen_namespace_die (decl, context_die);
26514 break;
26515
26516 case IMPORTED_DECL:
26517 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26518 DECL_CONTEXT (decl), context_die);
26519 break;
26520
26521 case NAMELIST_DECL:
26522 gen_namelist_decl (DECL_NAME (decl), context_die,
26523 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26524 break;
26525
26526 default:
26527 /* Probably some frontend-internal decl. Assume we don't care. */
26528 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26529 break;
26530 }
26531
26532 return NULL;
26533 }
26534 \f
26535 /* Output initial debug information for global DECL. Called at the
26536 end of the parsing process.
26537
26538 This is the initial debug generation process. As such, the DIEs
26539 generated may be incomplete. A later debug generation pass
26540 (dwarf2out_late_global_decl) will augment the information generated
26541 in this pass (e.g., with complete location info). */
26542
26543 static void
26544 dwarf2out_early_global_decl (tree decl)
26545 {
26546 set_early_dwarf s;
26547
26548 /* gen_decl_die() will set DECL_ABSTRACT because
26549 cgraph_function_possibly_inlined_p() returns true. This is in
26550 turn will cause DW_AT_inline attributes to be set.
26551
26552 This happens because at early dwarf generation, there is no
26553 cgraph information, causing cgraph_function_possibly_inlined_p()
26554 to return true. Trick cgraph_function_possibly_inlined_p()
26555 while we generate dwarf early. */
26556 bool save = symtab->global_info_ready;
26557 symtab->global_info_ready = true;
26558
26559 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26560 other DECLs and they can point to template types or other things
26561 that dwarf2out can't handle when done via dwarf2out_decl. */
26562 if (TREE_CODE (decl) != TYPE_DECL
26563 && TREE_CODE (decl) != PARM_DECL)
26564 {
26565 if (TREE_CODE (decl) == FUNCTION_DECL)
26566 {
26567 tree save_fndecl = current_function_decl;
26568
26569 /* For nested functions, make sure we have DIEs for the parents first
26570 so that all nested DIEs are generated at the proper scope in the
26571 first shot. */
26572 tree context = decl_function_context (decl);
26573 if (context != NULL)
26574 {
26575 dw_die_ref context_die = lookup_decl_die (context);
26576 current_function_decl = context;
26577
26578 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26579 enough so that it lands in its own context. This avoids type
26580 pruning issues later on. */
26581 if (context_die == NULL || is_declaration_die (context_die))
26582 dwarf2out_early_global_decl (context);
26583 }
26584
26585 /* Emit an abstract origin of a function first. This happens
26586 with C++ constructor clones for example and makes
26587 dwarf2out_abstract_function happy which requires the early
26588 DIE of the abstract instance to be present. */
26589 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26590 dw_die_ref origin_die;
26591 if (origin != NULL
26592 /* Do not emit the DIE multiple times but make sure to
26593 process it fully here in case we just saw a declaration. */
26594 && ((origin_die = lookup_decl_die (origin)) == NULL
26595 || is_declaration_die (origin_die)))
26596 {
26597 current_function_decl = origin;
26598 dwarf2out_decl (origin);
26599 }
26600
26601 /* Emit the DIE for decl but avoid doing that multiple times. */
26602 dw_die_ref old_die;
26603 if ((old_die = lookup_decl_die (decl)) == NULL
26604 || is_declaration_die (old_die))
26605 {
26606 current_function_decl = decl;
26607 dwarf2out_decl (decl);
26608 }
26609
26610 current_function_decl = save_fndecl;
26611 }
26612 else
26613 dwarf2out_decl (decl);
26614 }
26615 symtab->global_info_ready = save;
26616 }
26617
26618 /* Return whether EXPR is an expression with the following pattern:
26619 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26620
26621 static bool
26622 is_trivial_indirect_ref (tree expr)
26623 {
26624 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26625 return false;
26626
26627 tree nop = TREE_OPERAND (expr, 0);
26628 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26629 return false;
26630
26631 tree int_cst = TREE_OPERAND (nop, 0);
26632 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26633 }
26634
26635 /* Output debug information for global decl DECL. Called from
26636 toplev.c after compilation proper has finished. */
26637
26638 static void
26639 dwarf2out_late_global_decl (tree decl)
26640 {
26641 /* Fill-in any location information we were unable to determine
26642 on the first pass. */
26643 if (VAR_P (decl))
26644 {
26645 dw_die_ref die = lookup_decl_die (decl);
26646
26647 /* We may have to generate early debug late for LTO in case debug
26648 was not enabled at compile-time or the target doesn't support
26649 the LTO early debug scheme. */
26650 if (! die && in_lto_p)
26651 {
26652 dwarf2out_decl (decl);
26653 die = lookup_decl_die (decl);
26654 }
26655
26656 if (die)
26657 {
26658 /* We get called via the symtab code invoking late_global_decl
26659 for symbols that are optimized out.
26660
26661 Do not add locations for those, except if they have a
26662 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26663 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26664 INDIRECT_REF expression, as this could generate relocations to
26665 text symbols in LTO object files, which is invalid. */
26666 varpool_node *node = varpool_node::get (decl);
26667 if ((! node || ! node->definition)
26668 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26669 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26670 tree_add_const_value_attribute_for_decl (die, decl);
26671 else
26672 add_location_or_const_value_attribute (die, decl, false);
26673 }
26674 }
26675 }
26676
26677 /* Output debug information for type decl DECL. Called from toplev.c
26678 and from language front ends (to record built-in types). */
26679 static void
26680 dwarf2out_type_decl (tree decl, int local)
26681 {
26682 if (!local)
26683 {
26684 set_early_dwarf s;
26685 dwarf2out_decl (decl);
26686 }
26687 }
26688
26689 /* Output debug information for imported module or decl DECL.
26690 NAME is non-NULL name in the lexical block if the decl has been renamed.
26691 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26692 that DECL belongs to.
26693 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26694 static void
26695 dwarf2out_imported_module_or_decl_1 (tree decl,
26696 tree name,
26697 tree lexical_block,
26698 dw_die_ref lexical_block_die)
26699 {
26700 expanded_location xloc;
26701 dw_die_ref imported_die = NULL;
26702 dw_die_ref at_import_die;
26703
26704 if (TREE_CODE (decl) == IMPORTED_DECL)
26705 {
26706 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26707 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26708 gcc_assert (decl);
26709 }
26710 else
26711 xloc = expand_location (input_location);
26712
26713 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26714 {
26715 at_import_die = force_type_die (TREE_TYPE (decl));
26716 /* For namespace N { typedef void T; } using N::T; base_type_die
26717 returns NULL, but DW_TAG_imported_declaration requires
26718 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26719 if (!at_import_die)
26720 {
26721 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26722 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26723 at_import_die = lookup_type_die (TREE_TYPE (decl));
26724 gcc_assert (at_import_die);
26725 }
26726 }
26727 else
26728 {
26729 at_import_die = lookup_decl_die (decl);
26730 if (!at_import_die)
26731 {
26732 /* If we're trying to avoid duplicate debug info, we may not have
26733 emitted the member decl for this field. Emit it now. */
26734 if (TREE_CODE (decl) == FIELD_DECL)
26735 {
26736 tree type = DECL_CONTEXT (decl);
26737
26738 if (TYPE_CONTEXT (type)
26739 && TYPE_P (TYPE_CONTEXT (type))
26740 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26741 DINFO_USAGE_DIR_USE))
26742 return;
26743 gen_type_die_for_member (type, decl,
26744 get_context_die (TYPE_CONTEXT (type)));
26745 }
26746 if (TREE_CODE (decl) == NAMELIST_DECL)
26747 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26748 get_context_die (DECL_CONTEXT (decl)),
26749 NULL_TREE);
26750 else
26751 at_import_die = force_decl_die (decl);
26752 }
26753 }
26754
26755 if (TREE_CODE (decl) == NAMESPACE_DECL)
26756 {
26757 if (dwarf_version >= 3 || !dwarf_strict)
26758 imported_die = new_die (DW_TAG_imported_module,
26759 lexical_block_die,
26760 lexical_block);
26761 else
26762 return;
26763 }
26764 else
26765 imported_die = new_die (DW_TAG_imported_declaration,
26766 lexical_block_die,
26767 lexical_block);
26768
26769 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26770 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26771 if (debug_column_info && xloc.column)
26772 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26773 if (name)
26774 add_AT_string (imported_die, DW_AT_name,
26775 IDENTIFIER_POINTER (name));
26776 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26777 }
26778
26779 /* Output debug information for imported module or decl DECL.
26780 NAME is non-NULL name in context if the decl has been renamed.
26781 CHILD is true if decl is one of the renamed decls as part of
26782 importing whole module.
26783 IMPLICIT is set if this hook is called for an implicit import
26784 such as inline namespace. */
26785
26786 static void
26787 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26788 bool child, bool implicit)
26789 {
26790 /* dw_die_ref at_import_die; */
26791 dw_die_ref scope_die;
26792
26793 if (debug_info_level <= DINFO_LEVEL_TERSE)
26794 return;
26795
26796 gcc_assert (decl);
26797
26798 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26799 should be enough, for DWARF4 and older even if we emit as extension
26800 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26801 for the benefit of consumers unaware of DW_AT_export_symbols. */
26802 if (implicit
26803 && dwarf_version >= 5
26804 && lang_hooks.decls.decl_dwarf_attribute (decl,
26805 DW_AT_export_symbols) == 1)
26806 return;
26807
26808 set_early_dwarf s;
26809
26810 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26811 We need decl DIE for reference and scope die. First, get DIE for the decl
26812 itself. */
26813
26814 /* Get the scope die for decl context. Use comp_unit_die for global module
26815 or decl. If die is not found for non globals, force new die. */
26816 if (context
26817 && TYPE_P (context)
26818 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26819 return;
26820
26821 scope_die = get_context_die (context);
26822
26823 if (child)
26824 {
26825 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26826 there is nothing we can do, here. */
26827 if (dwarf_version < 3 && dwarf_strict)
26828 return;
26829
26830 gcc_assert (scope_die->die_child);
26831 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26832 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26833 scope_die = scope_die->die_child;
26834 }
26835
26836 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26837 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26838 }
26839
26840 /* Output debug information for namelists. */
26841
26842 static dw_die_ref
26843 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26844 {
26845 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26846 tree value;
26847 unsigned i;
26848
26849 if (debug_info_level <= DINFO_LEVEL_TERSE)
26850 return NULL;
26851
26852 gcc_assert (scope_die != NULL);
26853 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26854 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26855
26856 /* If there are no item_decls, we have a nondefining namelist, e.g.
26857 with USE association; hence, set DW_AT_declaration. */
26858 if (item_decls == NULL_TREE)
26859 {
26860 add_AT_flag (nml_die, DW_AT_declaration, 1);
26861 return nml_die;
26862 }
26863
26864 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26865 {
26866 nml_item_ref_die = lookup_decl_die (value);
26867 if (!nml_item_ref_die)
26868 nml_item_ref_die = force_decl_die (value);
26869
26870 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26871 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26872 }
26873 return nml_die;
26874 }
26875
26876
26877 /* Write the debugging output for DECL and return the DIE. */
26878
26879 static void
26880 dwarf2out_decl (tree decl)
26881 {
26882 dw_die_ref context_die = comp_unit_die ();
26883
26884 switch (TREE_CODE (decl))
26885 {
26886 case ERROR_MARK:
26887 return;
26888
26889 case FUNCTION_DECL:
26890 /* If we're a nested function, initially use a parent of NULL; if we're
26891 a plain function, this will be fixed up in decls_for_scope. If
26892 we're a method, it will be ignored, since we already have a DIE.
26893 Avoid doing this late though since clones of class methods may
26894 otherwise end up in limbo and create type DIEs late. */
26895 if (early_dwarf
26896 && decl_function_context (decl)
26897 /* But if we're in terse mode, we don't care about scope. */
26898 && debug_info_level > DINFO_LEVEL_TERSE)
26899 context_die = NULL;
26900 break;
26901
26902 case VAR_DECL:
26903 /* For local statics lookup proper context die. */
26904 if (local_function_static (decl))
26905 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26906
26907 /* If we are in terse mode, don't generate any DIEs to represent any
26908 variable declarations or definitions. */
26909 if (debug_info_level <= DINFO_LEVEL_TERSE)
26910 return;
26911 break;
26912
26913 case CONST_DECL:
26914 if (debug_info_level <= DINFO_LEVEL_TERSE)
26915 return;
26916 if (!is_fortran () && !is_ada () && !is_dlang ())
26917 return;
26918 if (TREE_STATIC (decl) && decl_function_context (decl))
26919 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26920 break;
26921
26922 case NAMESPACE_DECL:
26923 case IMPORTED_DECL:
26924 if (debug_info_level <= DINFO_LEVEL_TERSE)
26925 return;
26926 if (lookup_decl_die (decl) != NULL)
26927 return;
26928 break;
26929
26930 case TYPE_DECL:
26931 /* Don't emit stubs for types unless they are needed by other DIEs. */
26932 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26933 return;
26934
26935 /* Don't bother trying to generate any DIEs to represent any of the
26936 normal built-in types for the language we are compiling. */
26937 if (DECL_IS_BUILTIN (decl))
26938 return;
26939
26940 /* If we are in terse mode, don't generate any DIEs for types. */
26941 if (debug_info_level <= DINFO_LEVEL_TERSE)
26942 return;
26943
26944 /* If we're a function-scope tag, initially use a parent of NULL;
26945 this will be fixed up in decls_for_scope. */
26946 if (decl_function_context (decl))
26947 context_die = NULL;
26948
26949 break;
26950
26951 case NAMELIST_DECL:
26952 break;
26953
26954 default:
26955 return;
26956 }
26957
26958 gen_decl_die (decl, NULL, NULL, context_die);
26959
26960 if (flag_checking)
26961 {
26962 dw_die_ref die = lookup_decl_die (decl);
26963 if (die)
26964 check_die (die);
26965 }
26966 }
26967
26968 /* Write the debugging output for DECL. */
26969
26970 static void
26971 dwarf2out_function_decl (tree decl)
26972 {
26973 dwarf2out_decl (decl);
26974 call_arg_locations = NULL;
26975 call_arg_loc_last = NULL;
26976 call_site_count = -1;
26977 tail_call_site_count = -1;
26978 decl_loc_table->empty ();
26979 cached_dw_loc_list_table->empty ();
26980 }
26981
26982 /* Output a marker (i.e. a label) for the beginning of the generated code for
26983 a lexical block. */
26984
26985 static void
26986 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26987 unsigned int blocknum)
26988 {
26989 switch_to_section (current_function_section ());
26990 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26991 }
26992
26993 /* Output a marker (i.e. a label) for the end of the generated code for a
26994 lexical block. */
26995
26996 static void
26997 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26998 {
26999 switch_to_section (current_function_section ());
27000 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
27001 }
27002
27003 /* Returns nonzero if it is appropriate not to emit any debugging
27004 information for BLOCK, because it doesn't contain any instructions.
27005
27006 Don't allow this for blocks with nested functions or local classes
27007 as we would end up with orphans, and in the presence of scheduling
27008 we may end up calling them anyway. */
27009
27010 static bool
27011 dwarf2out_ignore_block (const_tree block)
27012 {
27013 tree decl;
27014 unsigned int i;
27015
27016 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
27017 if (TREE_CODE (decl) == FUNCTION_DECL
27018 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27019 return 0;
27020 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
27021 {
27022 decl = BLOCK_NONLOCALIZED_VAR (block, i);
27023 if (TREE_CODE (decl) == FUNCTION_DECL
27024 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27025 return 0;
27026 }
27027
27028 return 1;
27029 }
27030
27031 /* Hash table routines for file_hash. */
27032
27033 bool
27034 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27035 {
27036 return filename_cmp (p1->filename, p2) == 0;
27037 }
27038
27039 hashval_t
27040 dwarf_file_hasher::hash (dwarf_file_data *p)
27041 {
27042 return htab_hash_string (p->filename);
27043 }
27044
27045 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27046 dwarf2out.c) and return its "index". The index of each (known) filename is
27047 just a unique number which is associated with only that one filename. We
27048 need such numbers for the sake of generating labels (in the .debug_sfnames
27049 section) and references to those files numbers (in the .debug_srcinfo
27050 and .debug_macinfo sections). If the filename given as an argument is not
27051 found in our current list, add it to the list and assign it the next
27052 available unique index number. */
27053
27054 static struct dwarf_file_data *
27055 lookup_filename (const char *file_name)
27056 {
27057 struct dwarf_file_data * created;
27058
27059 if (!file_name)
27060 return NULL;
27061
27062 dwarf_file_data **slot
27063 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27064 INSERT);
27065 if (*slot)
27066 return *slot;
27067
27068 created = ggc_alloc<dwarf_file_data> ();
27069 created->filename = file_name;
27070 created->emitted_number = 0;
27071 *slot = created;
27072 return created;
27073 }
27074
27075 /* If the assembler will construct the file table, then translate the compiler
27076 internal file table number into the assembler file table number, and emit
27077 a .file directive if we haven't already emitted one yet. The file table
27078 numbers are different because we prune debug info for unused variables and
27079 types, which may include filenames. */
27080
27081 static int
27082 maybe_emit_file (struct dwarf_file_data * fd)
27083 {
27084 if (! fd->emitted_number)
27085 {
27086 if (last_emitted_file)
27087 fd->emitted_number = last_emitted_file->emitted_number + 1;
27088 else
27089 fd->emitted_number = 1;
27090 last_emitted_file = fd;
27091
27092 if (output_asm_line_debug_info ())
27093 {
27094 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27095 output_quoted_string (asm_out_file,
27096 remap_debug_filename (fd->filename));
27097 fputc ('\n', asm_out_file);
27098 }
27099 }
27100
27101 return fd->emitted_number;
27102 }
27103
27104 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27105 That generation should happen after function debug info has been
27106 generated. The value of the attribute is the constant value of ARG. */
27107
27108 static void
27109 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27110 {
27111 die_arg_entry entry;
27112
27113 if (!die || !arg)
27114 return;
27115
27116 gcc_assert (early_dwarf);
27117
27118 if (!tmpl_value_parm_die_table)
27119 vec_alloc (tmpl_value_parm_die_table, 32);
27120
27121 entry.die = die;
27122 entry.arg = arg;
27123 vec_safe_push (tmpl_value_parm_die_table, entry);
27124 }
27125
27126 /* Return TRUE if T is an instance of generic type, FALSE
27127 otherwise. */
27128
27129 static bool
27130 generic_type_p (tree t)
27131 {
27132 if (t == NULL_TREE || !TYPE_P (t))
27133 return false;
27134 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27135 }
27136
27137 /* Schedule the generation of the generic parameter dies for the
27138 instance of generic type T. The proper generation itself is later
27139 done by gen_scheduled_generic_parms_dies. */
27140
27141 static void
27142 schedule_generic_params_dies_gen (tree t)
27143 {
27144 if (!generic_type_p (t))
27145 return;
27146
27147 gcc_assert (early_dwarf);
27148
27149 if (!generic_type_instances)
27150 vec_alloc (generic_type_instances, 256);
27151
27152 vec_safe_push (generic_type_instances, t);
27153 }
27154
27155 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27156 by append_entry_to_tmpl_value_parm_die_table. This function must
27157 be called after function DIEs have been generated. */
27158
27159 static void
27160 gen_remaining_tmpl_value_param_die_attribute (void)
27161 {
27162 if (tmpl_value_parm_die_table)
27163 {
27164 unsigned i, j;
27165 die_arg_entry *e;
27166
27167 /* We do this in two phases - first get the cases we can
27168 handle during early-finish, preserving those we cannot
27169 (containing symbolic constants where we don't yet know
27170 whether we are going to output the referenced symbols).
27171 For those we try again at late-finish. */
27172 j = 0;
27173 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27174 {
27175 if (!e->die->removed
27176 && !tree_add_const_value_attribute (e->die, e->arg))
27177 {
27178 dw_loc_descr_ref loc = NULL;
27179 if (! early_dwarf
27180 && (dwarf_version >= 5 || !dwarf_strict))
27181 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27182 if (loc)
27183 add_AT_loc (e->die, DW_AT_location, loc);
27184 else
27185 (*tmpl_value_parm_die_table)[j++] = *e;
27186 }
27187 }
27188 tmpl_value_parm_die_table->truncate (j);
27189 }
27190 }
27191
27192 /* Generate generic parameters DIEs for instances of generic types
27193 that have been previously scheduled by
27194 schedule_generic_params_dies_gen. This function must be called
27195 after all the types of the CU have been laid out. */
27196
27197 static void
27198 gen_scheduled_generic_parms_dies (void)
27199 {
27200 unsigned i;
27201 tree t;
27202
27203 if (!generic_type_instances)
27204 return;
27205
27206 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27207 if (COMPLETE_TYPE_P (t))
27208 gen_generic_params_dies (t);
27209
27210 generic_type_instances = NULL;
27211 }
27212
27213
27214 /* Replace DW_AT_name for the decl with name. */
27215
27216 static void
27217 dwarf2out_set_name (tree decl, tree name)
27218 {
27219 dw_die_ref die;
27220 dw_attr_node *attr;
27221 const char *dname;
27222
27223 die = TYPE_SYMTAB_DIE (decl);
27224 if (!die)
27225 return;
27226
27227 dname = dwarf2_name (name, 0);
27228 if (!dname)
27229 return;
27230
27231 attr = get_AT (die, DW_AT_name);
27232 if (attr)
27233 {
27234 struct indirect_string_node *node;
27235
27236 node = find_AT_string (dname);
27237 /* replace the string. */
27238 attr->dw_attr_val.v.val_str = node;
27239 }
27240
27241 else
27242 add_name_attribute (die, dname);
27243 }
27244
27245 /* True if before or during processing of the first function being emitted. */
27246 static bool in_first_function_p = true;
27247 /* True if loc_note during dwarf2out_var_location call might still be
27248 before first real instruction at address equal to .Ltext0. */
27249 static bool maybe_at_text_label_p = true;
27250 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27251 static unsigned int first_loclabel_num_not_at_text_label;
27252
27253 /* Look ahead for a real insn, or for a begin stmt marker. */
27254
27255 static rtx_insn *
27256 dwarf2out_next_real_insn (rtx_insn *loc_note)
27257 {
27258 rtx_insn *next_real = NEXT_INSN (loc_note);
27259
27260 while (next_real)
27261 if (INSN_P (next_real))
27262 break;
27263 else
27264 next_real = NEXT_INSN (next_real);
27265
27266 return next_real;
27267 }
27268
27269 /* Called by the final INSN scan whenever we see a var location. We
27270 use it to drop labels in the right places, and throw the location in
27271 our lookup table. */
27272
27273 static void
27274 dwarf2out_var_location (rtx_insn *loc_note)
27275 {
27276 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27277 struct var_loc_node *newloc;
27278 rtx_insn *next_real, *next_note;
27279 rtx_insn *call_insn = NULL;
27280 static const char *last_label;
27281 static const char *last_postcall_label;
27282 static bool last_in_cold_section_p;
27283 static rtx_insn *expected_next_loc_note;
27284 tree decl;
27285 bool var_loc_p;
27286 var_loc_view view = 0;
27287
27288 if (!NOTE_P (loc_note))
27289 {
27290 if (CALL_P (loc_note))
27291 {
27292 maybe_reset_location_view (loc_note, cur_line_info_table);
27293 call_site_count++;
27294 if (SIBLING_CALL_P (loc_note))
27295 tail_call_site_count++;
27296 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27297 {
27298 call_insn = loc_note;
27299 loc_note = NULL;
27300 var_loc_p = false;
27301
27302 next_real = dwarf2out_next_real_insn (call_insn);
27303 next_note = NULL;
27304 cached_next_real_insn = NULL;
27305 goto create_label;
27306 }
27307 if (optimize == 0 && !flag_var_tracking)
27308 {
27309 /* When the var-tracking pass is not running, there is no note
27310 for indirect calls whose target is compile-time known. In this
27311 case, process such calls specifically so that we generate call
27312 sites for them anyway. */
27313 rtx x = PATTERN (loc_note);
27314 if (GET_CODE (x) == PARALLEL)
27315 x = XVECEXP (x, 0, 0);
27316 if (GET_CODE (x) == SET)
27317 x = SET_SRC (x);
27318 if (GET_CODE (x) == CALL)
27319 x = XEXP (x, 0);
27320 if (!MEM_P (x)
27321 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27322 || !SYMBOL_REF_DECL (XEXP (x, 0))
27323 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27324 != FUNCTION_DECL))
27325 {
27326 call_insn = loc_note;
27327 loc_note = NULL;
27328 var_loc_p = false;
27329
27330 next_real = dwarf2out_next_real_insn (call_insn);
27331 next_note = NULL;
27332 cached_next_real_insn = NULL;
27333 goto create_label;
27334 }
27335 }
27336 }
27337 else if (!debug_variable_location_views)
27338 gcc_unreachable ();
27339 else
27340 maybe_reset_location_view (loc_note, cur_line_info_table);
27341
27342 return;
27343 }
27344
27345 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27346 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27347 return;
27348
27349 /* Optimize processing a large consecutive sequence of location
27350 notes so we don't spend too much time in next_real_insn. If the
27351 next insn is another location note, remember the next_real_insn
27352 calculation for next time. */
27353 next_real = cached_next_real_insn;
27354 if (next_real)
27355 {
27356 if (expected_next_loc_note != loc_note)
27357 next_real = NULL;
27358 }
27359
27360 next_note = NEXT_INSN (loc_note);
27361 if (! next_note
27362 || next_note->deleted ()
27363 || ! NOTE_P (next_note)
27364 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27365 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27366 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27367 next_note = NULL;
27368
27369 if (! next_real)
27370 next_real = dwarf2out_next_real_insn (loc_note);
27371
27372 if (next_note)
27373 {
27374 expected_next_loc_note = next_note;
27375 cached_next_real_insn = next_real;
27376 }
27377 else
27378 cached_next_real_insn = NULL;
27379
27380 /* If there are no instructions which would be affected by this note,
27381 don't do anything. */
27382 if (var_loc_p
27383 && next_real == NULL_RTX
27384 && !NOTE_DURING_CALL_P (loc_note))
27385 return;
27386
27387 create_label:
27388
27389 if (next_real == NULL_RTX)
27390 next_real = get_last_insn ();
27391
27392 /* If there were any real insns between note we processed last time
27393 and this note (or if it is the first note), clear
27394 last_{,postcall_}label so that they are not reused this time. */
27395 if (last_var_location_insn == NULL_RTX
27396 || last_var_location_insn != next_real
27397 || last_in_cold_section_p != in_cold_section_p)
27398 {
27399 last_label = NULL;
27400 last_postcall_label = NULL;
27401 }
27402
27403 if (var_loc_p)
27404 {
27405 const char *label
27406 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27407 view = cur_line_info_table->view;
27408 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27409 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27410 if (newloc == NULL)
27411 return;
27412 }
27413 else
27414 {
27415 decl = NULL_TREE;
27416 newloc = NULL;
27417 }
27418
27419 /* If there were no real insns between note we processed last time
27420 and this note, use the label we emitted last time. Otherwise
27421 create a new label and emit it. */
27422 if (last_label == NULL)
27423 {
27424 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27425 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27426 loclabel_num++;
27427 last_label = ggc_strdup (loclabel);
27428 /* See if loclabel might be equal to .Ltext0. If yes,
27429 bump first_loclabel_num_not_at_text_label. */
27430 if (!have_multiple_function_sections
27431 && in_first_function_p
27432 && maybe_at_text_label_p)
27433 {
27434 static rtx_insn *last_start;
27435 rtx_insn *insn;
27436 for (insn = loc_note; insn; insn = previous_insn (insn))
27437 if (insn == last_start)
27438 break;
27439 else if (!NONDEBUG_INSN_P (insn))
27440 continue;
27441 else
27442 {
27443 rtx body = PATTERN (insn);
27444 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27445 continue;
27446 /* Inline asm could occupy zero bytes. */
27447 else if (GET_CODE (body) == ASM_INPUT
27448 || asm_noperands (body) >= 0)
27449 continue;
27450 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27451 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27452 continue;
27453 #endif
27454 else
27455 {
27456 /* Assume insn has non-zero length. */
27457 maybe_at_text_label_p = false;
27458 break;
27459 }
27460 }
27461 if (maybe_at_text_label_p)
27462 {
27463 last_start = loc_note;
27464 first_loclabel_num_not_at_text_label = loclabel_num;
27465 }
27466 }
27467 }
27468
27469 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27470 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27471
27472 if (!var_loc_p)
27473 {
27474 struct call_arg_loc_node *ca_loc
27475 = ggc_cleared_alloc<call_arg_loc_node> ();
27476 rtx_insn *prev = call_insn;
27477
27478 ca_loc->call_arg_loc_note
27479 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27480 ca_loc->next = NULL;
27481 ca_loc->label = last_label;
27482 gcc_assert (prev
27483 && (CALL_P (prev)
27484 || (NONJUMP_INSN_P (prev)
27485 && GET_CODE (PATTERN (prev)) == SEQUENCE
27486 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27487 if (!CALL_P (prev))
27488 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27489 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27490
27491 /* Look for a SYMBOL_REF in the "prev" instruction. */
27492 rtx x = get_call_rtx_from (PATTERN (prev));
27493 if (x)
27494 {
27495 /* Try to get the call symbol, if any. */
27496 if (MEM_P (XEXP (x, 0)))
27497 x = XEXP (x, 0);
27498 /* First, look for a memory access to a symbol_ref. */
27499 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27500 && SYMBOL_REF_DECL (XEXP (x, 0))
27501 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27502 ca_loc->symbol_ref = XEXP (x, 0);
27503 /* Otherwise, look at a compile-time known user-level function
27504 declaration. */
27505 else if (MEM_P (x)
27506 && MEM_EXPR (x)
27507 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27508 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27509 }
27510
27511 ca_loc->block = insn_scope (prev);
27512 if (call_arg_locations)
27513 call_arg_loc_last->next = ca_loc;
27514 else
27515 call_arg_locations = ca_loc;
27516 call_arg_loc_last = ca_loc;
27517 }
27518 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27519 {
27520 newloc->label = last_label;
27521 newloc->view = view;
27522 }
27523 else
27524 {
27525 if (!last_postcall_label)
27526 {
27527 sprintf (loclabel, "%s-1", last_label);
27528 last_postcall_label = ggc_strdup (loclabel);
27529 }
27530 newloc->label = last_postcall_label;
27531 /* ??? This view is at last_label, not last_label-1, but we
27532 could only assume view at last_label-1 is zero if we could
27533 assume calls always have length greater than one. This is
27534 probably true in general, though there might be a rare
27535 exception to this rule, e.g. if a call insn is optimized out
27536 by target magic. Then, even the -1 in the label will be
27537 wrong, which might invalidate the range. Anyway, using view,
27538 though technically possibly incorrect, will work as far as
27539 ranges go: since L-1 is in the middle of the call insn,
27540 (L-1).0 and (L-1).V shouldn't make any difference, and having
27541 the loclist entry refer to the .loc entry might be useful, so
27542 leave it like this. */
27543 newloc->view = view;
27544 }
27545
27546 if (var_loc_p && flag_debug_asm)
27547 {
27548 const char *name, *sep, *patstr;
27549 if (decl && DECL_NAME (decl))
27550 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27551 else
27552 name = "";
27553 if (NOTE_VAR_LOCATION_LOC (loc_note))
27554 {
27555 sep = " => ";
27556 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27557 }
27558 else
27559 {
27560 sep = " ";
27561 patstr = "RESET";
27562 }
27563 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27564 name, sep, patstr);
27565 }
27566
27567 last_var_location_insn = next_real;
27568 last_in_cold_section_p = in_cold_section_p;
27569 }
27570
27571 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27572 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27573 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27574 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27575 BLOCK_FRAGMENT_ORIGIN links. */
27576 static bool
27577 block_within_block_p (tree block, tree outer, bool bothways)
27578 {
27579 if (block == outer)
27580 return true;
27581
27582 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27583 for (tree context = BLOCK_SUPERCONTEXT (block);
27584 context != outer;
27585 context = BLOCK_SUPERCONTEXT (context))
27586 if (!context || TREE_CODE (context) != BLOCK)
27587 return false;
27588
27589 if (!bothways)
27590 return true;
27591
27592 /* Now check that each block is actually referenced by its
27593 parent. */
27594 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27595 context = BLOCK_SUPERCONTEXT (context))
27596 {
27597 if (BLOCK_FRAGMENT_ORIGIN (context))
27598 {
27599 gcc_assert (!BLOCK_SUBBLOCKS (context));
27600 context = BLOCK_FRAGMENT_ORIGIN (context);
27601 }
27602 for (tree sub = BLOCK_SUBBLOCKS (context);
27603 sub != block;
27604 sub = BLOCK_CHAIN (sub))
27605 if (!sub)
27606 return false;
27607 if (context == outer)
27608 return true;
27609 else
27610 block = context;
27611 }
27612 }
27613
27614 /* Called during final while assembling the marker of the entry point
27615 for an inlined function. */
27616
27617 static void
27618 dwarf2out_inline_entry (tree block)
27619 {
27620 gcc_assert (debug_inline_points);
27621
27622 /* If we can't represent it, don't bother. */
27623 if (!(dwarf_version >= 3 || !dwarf_strict))
27624 return;
27625
27626 gcc_assert (DECL_P (block_ultimate_origin (block)));
27627
27628 /* Sanity check the block tree. This would catch a case in which
27629 BLOCK got removed from the tree reachable from the outermost
27630 lexical block, but got retained in markers. It would still link
27631 back to its parents, but some ancestor would be missing a link
27632 down the path to the sub BLOCK. If the block got removed, its
27633 BLOCK_NUMBER will not be a usable value. */
27634 if (flag_checking)
27635 gcc_assert (block_within_block_p (block,
27636 DECL_INITIAL (current_function_decl),
27637 true));
27638
27639 gcc_assert (inlined_function_outer_scope_p (block));
27640 gcc_assert (!lookup_block_die (block));
27641
27642 if (BLOCK_FRAGMENT_ORIGIN (block))
27643 block = BLOCK_FRAGMENT_ORIGIN (block);
27644 /* Can the entry point ever not be at the beginning of an
27645 unfragmented lexical block? */
27646 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27647 || (cur_line_info_table
27648 && !ZERO_VIEW_P (cur_line_info_table->view))))
27649 return;
27650
27651 if (!inline_entry_data_table)
27652 inline_entry_data_table
27653 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27654
27655
27656 inline_entry_data **iedp
27657 = inline_entry_data_table->find_slot_with_hash (block,
27658 htab_hash_pointer (block),
27659 INSERT);
27660 if (*iedp)
27661 /* ??? Ideally, we'd record all entry points for the same inlined
27662 function (some may have been duplicated by e.g. unrolling), but
27663 we have no way to represent that ATM. */
27664 return;
27665
27666 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27667 ied->block = block;
27668 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27669 ied->label_num = BLOCK_NUMBER (block);
27670 if (cur_line_info_table)
27671 ied->view = cur_line_info_table->view;
27672
27673 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27674
27675 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27676 BLOCK_NUMBER (block));
27677 ASM_OUTPUT_LABEL (asm_out_file, label);
27678 }
27679
27680 /* Called from finalize_size_functions for size functions so that their body
27681 can be encoded in the debug info to describe the layout of variable-length
27682 structures. */
27683
27684 static void
27685 dwarf2out_size_function (tree decl)
27686 {
27687 function_to_dwarf_procedure (decl);
27688 }
27689
27690 /* Note in one location list that text section has changed. */
27691
27692 int
27693 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27694 {
27695 var_loc_list *list = *slot;
27696 if (list->first)
27697 list->last_before_switch
27698 = list->last->next ? list->last->next : list->last;
27699 return 1;
27700 }
27701
27702 /* Note in all location lists that text section has changed. */
27703
27704 static void
27705 var_location_switch_text_section (void)
27706 {
27707 if (decl_loc_table == NULL)
27708 return;
27709
27710 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27711 }
27712
27713 /* Create a new line number table. */
27714
27715 static dw_line_info_table *
27716 new_line_info_table (void)
27717 {
27718 dw_line_info_table *table;
27719
27720 table = ggc_cleared_alloc<dw_line_info_table> ();
27721 table->file_num = 1;
27722 table->line_num = 1;
27723 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27724 FORCE_RESET_NEXT_VIEW (table->view);
27725 table->symviews_since_reset = 0;
27726
27727 return table;
27728 }
27729
27730 /* Lookup the "current" table into which we emit line info, so
27731 that we don't have to do it for every source line. */
27732
27733 static void
27734 set_cur_line_info_table (section *sec)
27735 {
27736 dw_line_info_table *table;
27737
27738 if (sec == text_section)
27739 table = text_section_line_info;
27740 else if (sec == cold_text_section)
27741 {
27742 table = cold_text_section_line_info;
27743 if (!table)
27744 {
27745 cold_text_section_line_info = table = new_line_info_table ();
27746 table->end_label = cold_end_label;
27747 }
27748 }
27749 else
27750 {
27751 const char *end_label;
27752
27753 if (crtl->has_bb_partition)
27754 {
27755 if (in_cold_section_p)
27756 end_label = crtl->subsections.cold_section_end_label;
27757 else
27758 end_label = crtl->subsections.hot_section_end_label;
27759 }
27760 else
27761 {
27762 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27763 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27764 current_function_funcdef_no);
27765 end_label = ggc_strdup (label);
27766 }
27767
27768 table = new_line_info_table ();
27769 table->end_label = end_label;
27770
27771 vec_safe_push (separate_line_info, table);
27772 }
27773
27774 if (output_asm_line_debug_info ())
27775 table->is_stmt = (cur_line_info_table
27776 ? cur_line_info_table->is_stmt
27777 : DWARF_LINE_DEFAULT_IS_STMT_START);
27778 cur_line_info_table = table;
27779 }
27780
27781
27782 /* We need to reset the locations at the beginning of each
27783 function. We can't do this in the end_function hook, because the
27784 declarations that use the locations won't have been output when
27785 that hook is called. Also compute have_multiple_function_sections here. */
27786
27787 static void
27788 dwarf2out_begin_function (tree fun)
27789 {
27790 section *sec = function_section (fun);
27791
27792 if (sec != text_section)
27793 have_multiple_function_sections = true;
27794
27795 if (crtl->has_bb_partition && !cold_text_section)
27796 {
27797 gcc_assert (current_function_decl == fun);
27798 cold_text_section = unlikely_text_section ();
27799 switch_to_section (cold_text_section);
27800 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27801 switch_to_section (sec);
27802 }
27803
27804 dwarf2out_note_section_used ();
27805 call_site_count = 0;
27806 tail_call_site_count = 0;
27807
27808 set_cur_line_info_table (sec);
27809 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27810 }
27811
27812 /* Helper function of dwarf2out_end_function, called only after emitting
27813 the very first function into assembly. Check if some .debug_loc range
27814 might end with a .LVL* label that could be equal to .Ltext0.
27815 In that case we must force using absolute addresses in .debug_loc ranges,
27816 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27817 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27818 list terminator.
27819 Set have_multiple_function_sections to true in that case and
27820 terminate htab traversal. */
27821
27822 int
27823 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27824 {
27825 var_loc_list *entry = *slot;
27826 struct var_loc_node *node;
27827
27828 node = entry->first;
27829 if (node && node->next && node->next->label)
27830 {
27831 unsigned int i;
27832 const char *label = node->next->label;
27833 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27834
27835 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27836 {
27837 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27838 if (strcmp (label, loclabel) == 0)
27839 {
27840 have_multiple_function_sections = true;
27841 return 0;
27842 }
27843 }
27844 }
27845 return 1;
27846 }
27847
27848 /* Hook called after emitting a function into assembly.
27849 This does something only for the very first function emitted. */
27850
27851 static void
27852 dwarf2out_end_function (unsigned int)
27853 {
27854 if (in_first_function_p
27855 && !have_multiple_function_sections
27856 && first_loclabel_num_not_at_text_label
27857 && decl_loc_table)
27858 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27859 in_first_function_p = false;
27860 maybe_at_text_label_p = false;
27861 }
27862
27863 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27864 front-ends register a translation unit even before dwarf2out_init is
27865 called. */
27866 static tree main_translation_unit = NULL_TREE;
27867
27868 /* Hook called by front-ends after they built their main translation unit.
27869 Associate comp_unit_die to UNIT. */
27870
27871 static void
27872 dwarf2out_register_main_translation_unit (tree unit)
27873 {
27874 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27875 && main_translation_unit == NULL_TREE);
27876 main_translation_unit = unit;
27877 /* If dwarf2out_init has not been called yet, it will perform the association
27878 itself looking at main_translation_unit. */
27879 if (decl_die_table != NULL)
27880 equate_decl_number_to_die (unit, comp_unit_die ());
27881 }
27882
27883 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27884
27885 static void
27886 push_dw_line_info_entry (dw_line_info_table *table,
27887 enum dw_line_info_opcode opcode, unsigned int val)
27888 {
27889 dw_line_info_entry e;
27890 e.opcode = opcode;
27891 e.val = val;
27892 vec_safe_push (table->entries, e);
27893 }
27894
27895 /* Output a label to mark the beginning of a source code line entry
27896 and record information relating to this source line, in
27897 'line_info_table' for later output of the .debug_line section. */
27898 /* ??? The discriminator parameter ought to be unsigned. */
27899
27900 static void
27901 dwarf2out_source_line (unsigned int line, unsigned int column,
27902 const char *filename,
27903 int discriminator, bool is_stmt)
27904 {
27905 unsigned int file_num;
27906 dw_line_info_table *table;
27907 static var_loc_view lvugid;
27908
27909 if (debug_info_level < DINFO_LEVEL_TERSE)
27910 return;
27911
27912 table = cur_line_info_table;
27913
27914 if (line == 0)
27915 {
27916 if (debug_variable_location_views
27917 && output_asm_line_debug_info ()
27918 && table && !RESETTING_VIEW_P (table->view))
27919 {
27920 /* If we're using the assembler to compute view numbers, we
27921 can't issue a .loc directive for line zero, so we can't
27922 get a view number at this point. We might attempt to
27923 compute it from the previous view, or equate it to a
27924 subsequent view (though it might not be there!), but
27925 since we're omitting the line number entry, we might as
27926 well omit the view number as well. That means pretending
27927 it's a view number zero, which might very well turn out
27928 to be correct. ??? Extend the assembler so that the
27929 compiler could emit e.g. ".locview .LVU#", to output a
27930 view without changing line number information. We'd then
27931 have to count it in symviews_since_reset; when it's omitted,
27932 it doesn't count. */
27933 if (!zero_view_p)
27934 zero_view_p = BITMAP_GGC_ALLOC ();
27935 bitmap_set_bit (zero_view_p, table->view);
27936 if (flag_debug_asm)
27937 {
27938 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27939 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27940 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27941 ASM_COMMENT_START);
27942 assemble_name (asm_out_file, label);
27943 putc ('\n', asm_out_file);
27944 }
27945 table->view = ++lvugid;
27946 }
27947 return;
27948 }
27949
27950 /* The discriminator column was added in dwarf4. Simplify the below
27951 by simply removing it if we're not supposed to output it. */
27952 if (dwarf_version < 4 && dwarf_strict)
27953 discriminator = 0;
27954
27955 if (!debug_column_info)
27956 column = 0;
27957
27958 file_num = maybe_emit_file (lookup_filename (filename));
27959
27960 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27961 the debugger has used the second (possibly duplicate) line number
27962 at the beginning of the function to mark the end of the prologue.
27963 We could eliminate any other duplicates within the function. For
27964 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27965 that second line number entry. */
27966 /* Recall that this end-of-prologue indication is *not* the same thing
27967 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27968 to which the hook corresponds, follows the last insn that was
27969 emitted by gen_prologue. What we need is to precede the first insn
27970 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27971 insn that corresponds to something the user wrote. These may be
27972 very different locations once scheduling is enabled. */
27973
27974 if (0 && file_num == table->file_num
27975 && line == table->line_num
27976 && column == table->column_num
27977 && discriminator == table->discrim_num
27978 && is_stmt == table->is_stmt)
27979 return;
27980
27981 switch_to_section (current_function_section ());
27982
27983 /* If requested, emit something human-readable. */
27984 if (flag_debug_asm)
27985 {
27986 if (debug_column_info)
27987 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27988 filename, line, column);
27989 else
27990 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27991 filename, line);
27992 }
27993
27994 if (output_asm_line_debug_info ())
27995 {
27996 /* Emit the .loc directive understood by GNU as. */
27997 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27998 file_num, line, is_stmt, discriminator */
27999 fputs ("\t.loc ", asm_out_file);
28000 fprint_ul (asm_out_file, file_num);
28001 putc (' ', asm_out_file);
28002 fprint_ul (asm_out_file, line);
28003 putc (' ', asm_out_file);
28004 fprint_ul (asm_out_file, column);
28005
28006 if (is_stmt != table->is_stmt)
28007 {
28008 #if HAVE_GAS_LOC_STMT
28009 fputs (" is_stmt ", asm_out_file);
28010 putc (is_stmt ? '1' : '0', asm_out_file);
28011 #endif
28012 }
28013 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
28014 {
28015 gcc_assert (discriminator > 0);
28016 fputs (" discriminator ", asm_out_file);
28017 fprint_ul (asm_out_file, (unsigned long) discriminator);
28018 }
28019 if (debug_variable_location_views)
28020 {
28021 if (!RESETTING_VIEW_P (table->view))
28022 {
28023 table->symviews_since_reset++;
28024 if (table->symviews_since_reset > symview_upper_bound)
28025 symview_upper_bound = table->symviews_since_reset;
28026 /* When we're using the assembler to compute view
28027 numbers, we output symbolic labels after "view" in
28028 .loc directives, and the assembler will set them for
28029 us, so that we can refer to the view numbers in
28030 location lists. The only exceptions are when we know
28031 a view will be zero: "-0" is a forced reset, used
28032 e.g. in the beginning of functions, whereas "0" tells
28033 the assembler to check that there was a PC change
28034 since the previous view, in a way that implicitly
28035 resets the next view. */
28036 fputs (" view ", asm_out_file);
28037 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28038 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28039 assemble_name (asm_out_file, label);
28040 table->view = ++lvugid;
28041 }
28042 else
28043 {
28044 table->symviews_since_reset = 0;
28045 if (FORCE_RESETTING_VIEW_P (table->view))
28046 fputs (" view -0", asm_out_file);
28047 else
28048 fputs (" view 0", asm_out_file);
28049 /* Mark the present view as a zero view. Earlier debug
28050 binds may have already added its id to loclists to be
28051 emitted later, so we can't reuse the id for something
28052 else. However, it's good to know whether a view is
28053 known to be zero, because then we may be able to
28054 optimize out locviews that are all zeros, so take
28055 note of it in zero_view_p. */
28056 if (!zero_view_p)
28057 zero_view_p = BITMAP_GGC_ALLOC ();
28058 bitmap_set_bit (zero_view_p, lvugid);
28059 table->view = ++lvugid;
28060 }
28061 }
28062 putc ('\n', asm_out_file);
28063 }
28064 else
28065 {
28066 unsigned int label_num = ++line_info_label_num;
28067
28068 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28069
28070 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28071 push_dw_line_info_entry (table, LI_adv_address, label_num);
28072 else
28073 push_dw_line_info_entry (table, LI_set_address, label_num);
28074 if (debug_variable_location_views)
28075 {
28076 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28077 if (resetting)
28078 table->view = 0;
28079
28080 if (flag_debug_asm)
28081 fprintf (asm_out_file, "\t%s view %s%d\n",
28082 ASM_COMMENT_START,
28083 resetting ? "-" : "",
28084 table->view);
28085
28086 table->view++;
28087 }
28088 if (file_num != table->file_num)
28089 push_dw_line_info_entry (table, LI_set_file, file_num);
28090 if (discriminator != table->discrim_num)
28091 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28092 if (is_stmt != table->is_stmt)
28093 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28094 push_dw_line_info_entry (table, LI_set_line, line);
28095 if (debug_column_info)
28096 push_dw_line_info_entry (table, LI_set_column, column);
28097 }
28098
28099 table->file_num = file_num;
28100 table->line_num = line;
28101 table->column_num = column;
28102 table->discrim_num = discriminator;
28103 table->is_stmt = is_stmt;
28104 table->in_use = true;
28105 }
28106
28107 /* Record the beginning of a new source file. */
28108
28109 static void
28110 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28111 {
28112 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28113 {
28114 macinfo_entry e;
28115 e.code = DW_MACINFO_start_file;
28116 e.lineno = lineno;
28117 e.info = ggc_strdup (filename);
28118 vec_safe_push (macinfo_table, e);
28119 }
28120 }
28121
28122 /* Record the end of a source file. */
28123
28124 static void
28125 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28126 {
28127 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28128 {
28129 macinfo_entry e;
28130 e.code = DW_MACINFO_end_file;
28131 e.lineno = lineno;
28132 e.info = NULL;
28133 vec_safe_push (macinfo_table, e);
28134 }
28135 }
28136
28137 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28138 the tail part of the directive line, i.e. the part which is past the
28139 initial whitespace, #, whitespace, directive-name, whitespace part. */
28140
28141 static void
28142 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28143 const char *buffer ATTRIBUTE_UNUSED)
28144 {
28145 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28146 {
28147 macinfo_entry e;
28148 /* Insert a dummy first entry to be able to optimize the whole
28149 predefined macro block using DW_MACRO_import. */
28150 if (macinfo_table->is_empty () && lineno <= 1)
28151 {
28152 e.code = 0;
28153 e.lineno = 0;
28154 e.info = NULL;
28155 vec_safe_push (macinfo_table, e);
28156 }
28157 e.code = DW_MACINFO_define;
28158 e.lineno = lineno;
28159 e.info = ggc_strdup (buffer);
28160 vec_safe_push (macinfo_table, e);
28161 }
28162 }
28163
28164 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28165 the tail part of the directive line, i.e. the part which is past the
28166 initial whitespace, #, whitespace, directive-name, whitespace part. */
28167
28168 static void
28169 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28170 const char *buffer ATTRIBUTE_UNUSED)
28171 {
28172 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28173 {
28174 macinfo_entry e;
28175 /* Insert a dummy first entry to be able to optimize the whole
28176 predefined macro block using DW_MACRO_import. */
28177 if (macinfo_table->is_empty () && lineno <= 1)
28178 {
28179 e.code = 0;
28180 e.lineno = 0;
28181 e.info = NULL;
28182 vec_safe_push (macinfo_table, e);
28183 }
28184 e.code = DW_MACINFO_undef;
28185 e.lineno = lineno;
28186 e.info = ggc_strdup (buffer);
28187 vec_safe_push (macinfo_table, e);
28188 }
28189 }
28190
28191 /* Helpers to manipulate hash table of CUs. */
28192
28193 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28194 {
28195 static inline hashval_t hash (const macinfo_entry *);
28196 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28197 };
28198
28199 inline hashval_t
28200 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28201 {
28202 return htab_hash_string (entry->info);
28203 }
28204
28205 inline bool
28206 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28207 const macinfo_entry *entry2)
28208 {
28209 return !strcmp (entry1->info, entry2->info);
28210 }
28211
28212 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28213
28214 /* Output a single .debug_macinfo entry. */
28215
28216 static void
28217 output_macinfo_op (macinfo_entry *ref)
28218 {
28219 int file_num;
28220 size_t len;
28221 struct indirect_string_node *node;
28222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28223 struct dwarf_file_data *fd;
28224
28225 switch (ref->code)
28226 {
28227 case DW_MACINFO_start_file:
28228 fd = lookup_filename (ref->info);
28229 file_num = maybe_emit_file (fd);
28230 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28231 dw2_asm_output_data_uleb128 (ref->lineno,
28232 "Included from line number %lu",
28233 (unsigned long) ref->lineno);
28234 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28235 break;
28236 case DW_MACINFO_end_file:
28237 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28238 break;
28239 case DW_MACINFO_define:
28240 case DW_MACINFO_undef:
28241 len = strlen (ref->info) + 1;
28242 if (!dwarf_strict
28243 && len > DWARF_OFFSET_SIZE
28244 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28245 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28246 {
28247 ref->code = ref->code == DW_MACINFO_define
28248 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28249 output_macinfo_op (ref);
28250 return;
28251 }
28252 dw2_asm_output_data (1, ref->code,
28253 ref->code == DW_MACINFO_define
28254 ? "Define macro" : "Undefine macro");
28255 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28256 (unsigned long) ref->lineno);
28257 dw2_asm_output_nstring (ref->info, -1, "The macro");
28258 break;
28259 case DW_MACRO_define_strp:
28260 case DW_MACRO_undef_strp:
28261 /* NB: dwarf2out_finish performs:
28262 1. save_macinfo_strings
28263 2. hash table traverse of index_string
28264 3. output_macinfo -> output_macinfo_op
28265 4. output_indirect_strings
28266 -> hash table traverse of output_index_string
28267
28268 When output_macinfo_op is called, all index strings have been
28269 added to hash table by save_macinfo_strings and we can't pass
28270 INSERT to find_slot_with_hash which may expand hash table, even
28271 if no insertion is needed, and change hash table traverse order
28272 between index_string and output_index_string. */
28273 node = find_AT_string (ref->info, NO_INSERT);
28274 gcc_assert (node
28275 && (node->form == DW_FORM_strp
28276 || node->form == dwarf_FORM (DW_FORM_strx)));
28277 dw2_asm_output_data (1, ref->code,
28278 ref->code == DW_MACRO_define_strp
28279 ? "Define macro strp"
28280 : "Undefine macro strp");
28281 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28282 (unsigned long) ref->lineno);
28283 if (node->form == DW_FORM_strp)
28284 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28285 debug_str_section, "The macro: \"%s\"",
28286 ref->info);
28287 else
28288 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28289 ref->info);
28290 break;
28291 case DW_MACRO_import:
28292 dw2_asm_output_data (1, ref->code, "Import");
28293 ASM_GENERATE_INTERNAL_LABEL (label,
28294 DEBUG_MACRO_SECTION_LABEL,
28295 ref->lineno + macinfo_label_base);
28296 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28297 break;
28298 default:
28299 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28300 ASM_COMMENT_START, (unsigned long) ref->code);
28301 break;
28302 }
28303 }
28304
28305 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28306 other compilation unit .debug_macinfo sections. IDX is the first
28307 index of a define/undef, return the number of ops that should be
28308 emitted in a comdat .debug_macinfo section and emit
28309 a DW_MACRO_import entry referencing it.
28310 If the define/undef entry should be emitted normally, return 0. */
28311
28312 static unsigned
28313 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28314 macinfo_hash_type **macinfo_htab)
28315 {
28316 macinfo_entry *first, *second, *cur, *inc;
28317 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28318 unsigned char checksum[16];
28319 struct md5_ctx ctx;
28320 char *grp_name, *tail;
28321 const char *base;
28322 unsigned int i, count, encoded_filename_len, linebuf_len;
28323 macinfo_entry **slot;
28324
28325 first = &(*macinfo_table)[idx];
28326 second = &(*macinfo_table)[idx + 1];
28327
28328 /* Optimize only if there are at least two consecutive define/undef ops,
28329 and either all of them are before first DW_MACINFO_start_file
28330 with lineno {0,1} (i.e. predefined macro block), or all of them are
28331 in some included header file. */
28332 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28333 return 0;
28334 if (vec_safe_is_empty (files))
28335 {
28336 if (first->lineno > 1 || second->lineno > 1)
28337 return 0;
28338 }
28339 else if (first->lineno == 0)
28340 return 0;
28341
28342 /* Find the last define/undef entry that can be grouped together
28343 with first and at the same time compute md5 checksum of their
28344 codes, linenumbers and strings. */
28345 md5_init_ctx (&ctx);
28346 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28347 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28348 break;
28349 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28350 break;
28351 else
28352 {
28353 unsigned char code = cur->code;
28354 md5_process_bytes (&code, 1, &ctx);
28355 checksum_uleb128 (cur->lineno, &ctx);
28356 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28357 }
28358 md5_finish_ctx (&ctx, checksum);
28359 count = i - idx;
28360
28361 /* From the containing include filename (if any) pick up just
28362 usable characters from its basename. */
28363 if (vec_safe_is_empty (files))
28364 base = "";
28365 else
28366 base = lbasename (files->last ().info);
28367 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28368 if (ISIDNUM (base[i]) || base[i] == '.')
28369 encoded_filename_len++;
28370 /* Count . at the end. */
28371 if (encoded_filename_len)
28372 encoded_filename_len++;
28373
28374 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28375 linebuf_len = strlen (linebuf);
28376
28377 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28378 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28379 + 16 * 2 + 1);
28380 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28381 tail = grp_name + 4;
28382 if (encoded_filename_len)
28383 {
28384 for (i = 0; base[i]; i++)
28385 if (ISIDNUM (base[i]) || base[i] == '.')
28386 *tail++ = base[i];
28387 *tail++ = '.';
28388 }
28389 memcpy (tail, linebuf, linebuf_len);
28390 tail += linebuf_len;
28391 *tail++ = '.';
28392 for (i = 0; i < 16; i++)
28393 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28394
28395 /* Construct a macinfo_entry for DW_MACRO_import
28396 in the empty vector entry before the first define/undef. */
28397 inc = &(*macinfo_table)[idx - 1];
28398 inc->code = DW_MACRO_import;
28399 inc->lineno = 0;
28400 inc->info = ggc_strdup (grp_name);
28401 if (!*macinfo_htab)
28402 *macinfo_htab = new macinfo_hash_type (10);
28403 /* Avoid emitting duplicates. */
28404 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28405 if (*slot != NULL)
28406 {
28407 inc->code = 0;
28408 inc->info = NULL;
28409 /* If such an entry has been used before, just emit
28410 a DW_MACRO_import op. */
28411 inc = *slot;
28412 output_macinfo_op (inc);
28413 /* And clear all macinfo_entry in the range to avoid emitting them
28414 in the second pass. */
28415 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28416 {
28417 cur->code = 0;
28418 cur->info = NULL;
28419 }
28420 }
28421 else
28422 {
28423 *slot = inc;
28424 inc->lineno = (*macinfo_htab)->elements ();
28425 output_macinfo_op (inc);
28426 }
28427 return count;
28428 }
28429
28430 /* Save any strings needed by the macinfo table in the debug str
28431 table. All strings must be collected into the table by the time
28432 index_string is called. */
28433
28434 static void
28435 save_macinfo_strings (void)
28436 {
28437 unsigned len;
28438 unsigned i;
28439 macinfo_entry *ref;
28440
28441 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28442 {
28443 switch (ref->code)
28444 {
28445 /* Match the logic in output_macinfo_op to decide on
28446 indirect strings. */
28447 case DW_MACINFO_define:
28448 case DW_MACINFO_undef:
28449 len = strlen (ref->info) + 1;
28450 if (!dwarf_strict
28451 && len > DWARF_OFFSET_SIZE
28452 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28453 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28454 set_indirect_string (find_AT_string (ref->info));
28455 break;
28456 case DW_MACINFO_start_file:
28457 /* -gsplit-dwarf -g3 will also output filename as indirect
28458 string. */
28459 if (!dwarf_split_debug_info)
28460 break;
28461 /* Fall through. */
28462 case DW_MACRO_define_strp:
28463 case DW_MACRO_undef_strp:
28464 set_indirect_string (find_AT_string (ref->info));
28465 break;
28466 default:
28467 break;
28468 }
28469 }
28470 }
28471
28472 /* Output macinfo section(s). */
28473
28474 static void
28475 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28476 {
28477 unsigned i;
28478 unsigned long length = vec_safe_length (macinfo_table);
28479 macinfo_entry *ref;
28480 vec<macinfo_entry, va_gc> *files = NULL;
28481 macinfo_hash_type *macinfo_htab = NULL;
28482 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28483
28484 if (! length)
28485 return;
28486
28487 /* output_macinfo* uses these interchangeably. */
28488 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28489 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28490 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28491 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28492
28493 /* AIX Assembler inserts the length, so adjust the reference to match the
28494 offset expected by debuggers. */
28495 strcpy (dl_section_ref, debug_line_label);
28496 if (XCOFF_DEBUGGING_INFO)
28497 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28498
28499 /* For .debug_macro emit the section header. */
28500 if (!dwarf_strict || dwarf_version >= 5)
28501 {
28502 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28503 "DWARF macro version number");
28504 if (DWARF_OFFSET_SIZE == 8)
28505 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28506 else
28507 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28508 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28509 debug_line_section, NULL);
28510 }
28511
28512 /* In the first loop, it emits the primary .debug_macinfo section
28513 and after each emitted op the macinfo_entry is cleared.
28514 If a longer range of define/undef ops can be optimized using
28515 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28516 the vector before the first define/undef in the range and the
28517 whole range of define/undef ops is not emitted and kept. */
28518 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28519 {
28520 switch (ref->code)
28521 {
28522 case DW_MACINFO_start_file:
28523 vec_safe_push (files, *ref);
28524 break;
28525 case DW_MACINFO_end_file:
28526 if (!vec_safe_is_empty (files))
28527 files->pop ();
28528 break;
28529 case DW_MACINFO_define:
28530 case DW_MACINFO_undef:
28531 if ((!dwarf_strict || dwarf_version >= 5)
28532 && HAVE_COMDAT_GROUP
28533 && vec_safe_length (files) != 1
28534 && i > 0
28535 && i + 1 < length
28536 && (*macinfo_table)[i - 1].code == 0)
28537 {
28538 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28539 if (count)
28540 {
28541 i += count - 1;
28542 continue;
28543 }
28544 }
28545 break;
28546 case 0:
28547 /* A dummy entry may be inserted at the beginning to be able
28548 to optimize the whole block of predefined macros. */
28549 if (i == 0)
28550 continue;
28551 default:
28552 break;
28553 }
28554 output_macinfo_op (ref);
28555 ref->info = NULL;
28556 ref->code = 0;
28557 }
28558
28559 if (!macinfo_htab)
28560 return;
28561
28562 /* Save the number of transparent includes so we can adjust the
28563 label number for the fat LTO object DWARF. */
28564 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28565
28566 delete macinfo_htab;
28567 macinfo_htab = NULL;
28568
28569 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28570 terminate the current chain and switch to a new comdat .debug_macinfo
28571 section and emit the define/undef entries within it. */
28572 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28573 switch (ref->code)
28574 {
28575 case 0:
28576 continue;
28577 case DW_MACRO_import:
28578 {
28579 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28580 tree comdat_key = get_identifier (ref->info);
28581 /* Terminate the previous .debug_macinfo section. */
28582 dw2_asm_output_data (1, 0, "End compilation unit");
28583 targetm.asm_out.named_section (debug_macinfo_section_name,
28584 SECTION_DEBUG
28585 | SECTION_LINKONCE
28586 | (early_lto_debug
28587 ? SECTION_EXCLUDE : 0),
28588 comdat_key);
28589 ASM_GENERATE_INTERNAL_LABEL (label,
28590 DEBUG_MACRO_SECTION_LABEL,
28591 ref->lineno + macinfo_label_base);
28592 ASM_OUTPUT_LABEL (asm_out_file, label);
28593 ref->code = 0;
28594 ref->info = NULL;
28595 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28596 "DWARF macro version number");
28597 if (DWARF_OFFSET_SIZE == 8)
28598 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28599 else
28600 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28601 }
28602 break;
28603 case DW_MACINFO_define:
28604 case DW_MACINFO_undef:
28605 output_macinfo_op (ref);
28606 ref->code = 0;
28607 ref->info = NULL;
28608 break;
28609 default:
28610 gcc_unreachable ();
28611 }
28612
28613 macinfo_label_base += macinfo_label_base_adj;
28614 }
28615
28616 /* Initialize the various sections and labels for dwarf output and prefix
28617 them with PREFIX if non-NULL. Returns the generation (zero based
28618 number of times function was called). */
28619
28620 static unsigned
28621 init_sections_and_labels (bool early_lto_debug)
28622 {
28623 /* As we may get called multiple times have a generation count for
28624 labels. */
28625 static unsigned generation = 0;
28626
28627 if (early_lto_debug)
28628 {
28629 if (!dwarf_split_debug_info)
28630 {
28631 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28632 SECTION_DEBUG | SECTION_EXCLUDE,
28633 NULL);
28634 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28635 SECTION_DEBUG | SECTION_EXCLUDE,
28636 NULL);
28637 debug_macinfo_section_name
28638 = ((dwarf_strict && dwarf_version < 5)
28639 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28640 debug_macinfo_section = get_section (debug_macinfo_section_name,
28641 SECTION_DEBUG
28642 | SECTION_EXCLUDE, NULL);
28643 }
28644 else
28645 {
28646 /* ??? Which of the following do we need early? */
28647 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28648 SECTION_DEBUG | SECTION_EXCLUDE,
28649 NULL);
28650 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28651 SECTION_DEBUG | SECTION_EXCLUDE,
28652 NULL);
28653 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28654 SECTION_DEBUG
28655 | SECTION_EXCLUDE, NULL);
28656 debug_skeleton_abbrev_section
28657 = get_section (DEBUG_LTO_ABBREV_SECTION,
28658 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28659 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28660 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28661 generation);
28662
28663 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28664 stay in the main .o, but the skeleton_line goes into the split
28665 off dwo. */
28666 debug_skeleton_line_section
28667 = get_section (DEBUG_LTO_LINE_SECTION,
28668 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28669 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28670 DEBUG_SKELETON_LINE_SECTION_LABEL,
28671 generation);
28672 debug_str_offsets_section
28673 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28674 SECTION_DEBUG | SECTION_EXCLUDE,
28675 NULL);
28676 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28677 DEBUG_SKELETON_INFO_SECTION_LABEL,
28678 generation);
28679 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28680 DEBUG_STR_DWO_SECTION_FLAGS,
28681 NULL);
28682 debug_macinfo_section_name
28683 = ((dwarf_strict && dwarf_version < 5)
28684 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28685 debug_macinfo_section = get_section (debug_macinfo_section_name,
28686 SECTION_DEBUG | SECTION_EXCLUDE,
28687 NULL);
28688 }
28689 /* For macro info and the file table we have to refer to a
28690 debug_line section. */
28691 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28692 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28693 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28694 DEBUG_LINE_SECTION_LABEL, generation);
28695
28696 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28697 DEBUG_STR_SECTION_FLAGS
28698 | SECTION_EXCLUDE, NULL);
28699 if (!dwarf_split_debug_info)
28700 debug_line_str_section
28701 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28702 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28703 }
28704 else
28705 {
28706 if (!dwarf_split_debug_info)
28707 {
28708 debug_info_section = get_section (DEBUG_INFO_SECTION,
28709 SECTION_DEBUG, NULL);
28710 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28711 SECTION_DEBUG, NULL);
28712 debug_loc_section = get_section (dwarf_version >= 5
28713 ? DEBUG_LOCLISTS_SECTION
28714 : DEBUG_LOC_SECTION,
28715 SECTION_DEBUG, NULL);
28716 debug_macinfo_section_name
28717 = ((dwarf_strict && dwarf_version < 5)
28718 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28719 debug_macinfo_section = get_section (debug_macinfo_section_name,
28720 SECTION_DEBUG, NULL);
28721 }
28722 else
28723 {
28724 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28725 SECTION_DEBUG | SECTION_EXCLUDE,
28726 NULL);
28727 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28728 SECTION_DEBUG | SECTION_EXCLUDE,
28729 NULL);
28730 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28731 SECTION_DEBUG, NULL);
28732 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28733 SECTION_DEBUG, NULL);
28734 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28735 SECTION_DEBUG, NULL);
28736 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28737 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28738 generation);
28739
28740 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28741 stay in the main .o, but the skeleton_line goes into the
28742 split off dwo. */
28743 debug_skeleton_line_section
28744 = get_section (DEBUG_DWO_LINE_SECTION,
28745 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28746 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28747 DEBUG_SKELETON_LINE_SECTION_LABEL,
28748 generation);
28749 debug_str_offsets_section
28750 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28751 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28752 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28753 DEBUG_SKELETON_INFO_SECTION_LABEL,
28754 generation);
28755 debug_loc_section = get_section (dwarf_version >= 5
28756 ? DEBUG_DWO_LOCLISTS_SECTION
28757 : DEBUG_DWO_LOC_SECTION,
28758 SECTION_DEBUG | SECTION_EXCLUDE,
28759 NULL);
28760 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28761 DEBUG_STR_DWO_SECTION_FLAGS,
28762 NULL);
28763 debug_macinfo_section_name
28764 = ((dwarf_strict && dwarf_version < 5)
28765 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28766 debug_macinfo_section = get_section (debug_macinfo_section_name,
28767 SECTION_DEBUG | SECTION_EXCLUDE,
28768 NULL);
28769 }
28770 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28771 SECTION_DEBUG, NULL);
28772 debug_line_section = get_section (DEBUG_LINE_SECTION,
28773 SECTION_DEBUG, NULL);
28774 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28775 SECTION_DEBUG, NULL);
28776 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28777 SECTION_DEBUG, NULL);
28778 debug_str_section = get_section (DEBUG_STR_SECTION,
28779 DEBUG_STR_SECTION_FLAGS, NULL);
28780 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28781 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28782 DEBUG_STR_SECTION_FLAGS, NULL);
28783
28784 debug_ranges_section = get_section (dwarf_version >= 5
28785 ? DEBUG_RNGLISTS_SECTION
28786 : DEBUG_RANGES_SECTION,
28787 SECTION_DEBUG, NULL);
28788 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28789 SECTION_DEBUG, NULL);
28790 }
28791
28792 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28793 DEBUG_ABBREV_SECTION_LABEL, generation);
28794 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28795 DEBUG_INFO_SECTION_LABEL, generation);
28796 info_section_emitted = false;
28797 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28798 DEBUG_LINE_SECTION_LABEL, generation);
28799 /* There are up to 4 unique ranges labels per generation.
28800 See also output_rnglists. */
28801 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28802 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28803 if (dwarf_version >= 5 && dwarf_split_debug_info)
28804 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28805 DEBUG_RANGES_SECTION_LABEL,
28806 1 + generation * 4);
28807 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28808 DEBUG_ADDR_SECTION_LABEL, generation);
28809 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28810 (dwarf_strict && dwarf_version < 5)
28811 ? DEBUG_MACINFO_SECTION_LABEL
28812 : DEBUG_MACRO_SECTION_LABEL, generation);
28813 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28814 generation);
28815
28816 ++generation;
28817 return generation - 1;
28818 }
28819
28820 /* Set up for Dwarf output at the start of compilation. */
28821
28822 static void
28823 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28824 {
28825 /* Allocate the file_table. */
28826 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28827
28828 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28829 /* Allocate the decl_die_table. */
28830 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28831
28832 /* Allocate the decl_loc_table. */
28833 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28834
28835 /* Allocate the cached_dw_loc_list_table. */
28836 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28837
28838 /* Allocate the initial hunk of the abbrev_die_table. */
28839 vec_alloc (abbrev_die_table, 256);
28840 /* Zero-th entry is allocated, but unused. */
28841 abbrev_die_table->quick_push (NULL);
28842
28843 /* Allocate the dwarf_proc_stack_usage_map. */
28844 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28845
28846 /* Allocate the pubtypes and pubnames vectors. */
28847 vec_alloc (pubname_table, 32);
28848 vec_alloc (pubtype_table, 32);
28849
28850 vec_alloc (incomplete_types, 64);
28851
28852 vec_alloc (used_rtx_array, 32);
28853
28854 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28855 vec_alloc (macinfo_table, 64);
28856 #endif
28857
28858 /* If front-ends already registered a main translation unit but we were not
28859 ready to perform the association, do this now. */
28860 if (main_translation_unit != NULL_TREE)
28861 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28862 }
28863
28864 /* Called before compile () starts outputtting functions, variables
28865 and toplevel asms into assembly. */
28866
28867 static void
28868 dwarf2out_assembly_start (void)
28869 {
28870 if (text_section_line_info)
28871 return;
28872
28873 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28874 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28875 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28876 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28877 COLD_TEXT_SECTION_LABEL, 0);
28878 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28879
28880 switch_to_section (text_section);
28881 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28882 #endif
28883
28884 /* Make sure the line number table for .text always exists. */
28885 text_section_line_info = new_line_info_table ();
28886 text_section_line_info->end_label = text_end_label;
28887
28888 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28889 cur_line_info_table = text_section_line_info;
28890 #endif
28891
28892 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28893 && dwarf2out_do_cfi_asm ()
28894 && !dwarf2out_do_eh_frame ())
28895 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28896 }
28897
28898 /* A helper function for dwarf2out_finish called through
28899 htab_traverse. Assign a string its index. All strings must be
28900 collected into the table by the time index_string is called,
28901 because the indexing code relies on htab_traverse to traverse nodes
28902 in the same order for each run. */
28903
28904 int
28905 index_string (indirect_string_node **h, unsigned int *index)
28906 {
28907 indirect_string_node *node = *h;
28908
28909 find_string_form (node);
28910 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28911 {
28912 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28913 node->index = *index;
28914 *index += 1;
28915 }
28916 return 1;
28917 }
28918
28919 /* A helper function for output_indirect_strings called through
28920 htab_traverse. Output the offset to a string and update the
28921 current offset. */
28922
28923 int
28924 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28925 {
28926 indirect_string_node *node = *h;
28927
28928 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28929 {
28930 /* Assert that this node has been assigned an index. */
28931 gcc_assert (node->index != NO_INDEX_ASSIGNED
28932 && node->index != NOT_INDEXED);
28933 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28934 "indexed string 0x%x: %s", node->index, node->str);
28935 *offset += strlen (node->str) + 1;
28936 }
28937 return 1;
28938 }
28939
28940 /* A helper function for dwarf2out_finish called through
28941 htab_traverse. Output the indexed string. */
28942
28943 int
28944 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28945 {
28946 struct indirect_string_node *node = *h;
28947
28948 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28949 {
28950 /* Assert that the strings are output in the same order as their
28951 indexes were assigned. */
28952 gcc_assert (*cur_idx == node->index);
28953 assemble_string (node->str, strlen (node->str) + 1);
28954 *cur_idx += 1;
28955 }
28956 return 1;
28957 }
28958
28959 /* A helper function for output_indirect_strings. Counts the number
28960 of index strings offsets. Must match the logic of the functions
28961 output_index_string[_offsets] above. */
28962 int
28963 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28964 {
28965 struct indirect_string_node *node = *h;
28966
28967 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28968 *last_idx += 1;
28969 return 1;
28970 }
28971
28972 /* A helper function for dwarf2out_finish called through
28973 htab_traverse. Emit one queued .debug_str string. */
28974
28975 int
28976 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28977 {
28978 struct indirect_string_node *node = *h;
28979
28980 node->form = find_string_form (node);
28981 if (node->form == form && node->refcount > 0)
28982 {
28983 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28984 assemble_string (node->str, strlen (node->str) + 1);
28985 }
28986
28987 return 1;
28988 }
28989
28990 /* Output the indexed string table. */
28991
28992 static void
28993 output_indirect_strings (void)
28994 {
28995 switch_to_section (debug_str_section);
28996 if (!dwarf_split_debug_info)
28997 debug_str_hash->traverse<enum dwarf_form,
28998 output_indirect_string> (DW_FORM_strp);
28999 else
29000 {
29001 unsigned int offset = 0;
29002 unsigned int cur_idx = 0;
29003
29004 if (skeleton_debug_str_hash)
29005 skeleton_debug_str_hash->traverse<enum dwarf_form,
29006 output_indirect_string> (DW_FORM_strp);
29007
29008 switch_to_section (debug_str_offsets_section);
29009 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
29010 header. Note that we don't need to generate a label to the
29011 actual index table following the header here, because this is
29012 for the split dwarf case only. In an .dwo file there is only
29013 one string offsets table (and one debug info section). But
29014 if we would start using string offset tables for the main (or
29015 skeleton) unit, then we have to add a DW_AT_str_offsets_base
29016 pointing to the actual index after the header. Split dwarf
29017 units will never have a string offsets base attribute. When
29018 a split unit is moved into a .dwp file the string offsets can
29019 be found through the .debug_cu_index section table. */
29020 if (dwarf_version >= 5)
29021 {
29022 unsigned int last_idx = 0;
29023 unsigned long str_offsets_length;
29024
29025 debug_str_hash->traverse_noresize
29026 <unsigned int *, count_index_strings> (&last_idx);
29027 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
29028 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29029 dw2_asm_output_data (4, 0xffffffff,
29030 "Escape value for 64-bit DWARF extension");
29031 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
29032 "Length of string offsets unit");
29033 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29034 dw2_asm_output_data (2, 0, "Header zero padding");
29035 }
29036 debug_str_hash->traverse_noresize
29037 <unsigned int *, output_index_string_offset> (&offset);
29038 switch_to_section (debug_str_dwo_section);
29039 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29040 (&cur_idx);
29041 }
29042 }
29043
29044 /* Callback for htab_traverse to assign an index to an entry in the
29045 table, and to write that entry to the .debug_addr section. */
29046
29047 int
29048 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29049 {
29050 addr_table_entry *entry = *slot;
29051
29052 if (entry->refcount == 0)
29053 {
29054 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29055 || entry->index == NOT_INDEXED);
29056 return 1;
29057 }
29058
29059 gcc_assert (entry->index == *cur_index);
29060 (*cur_index)++;
29061
29062 switch (entry->kind)
29063 {
29064 case ate_kind_rtx:
29065 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29066 "0x%x", entry->index);
29067 break;
29068 case ate_kind_rtx_dtprel:
29069 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29070 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29071 DWARF2_ADDR_SIZE,
29072 entry->addr.rtl);
29073 fputc ('\n', asm_out_file);
29074 break;
29075 case ate_kind_label:
29076 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29077 "0x%x", entry->index);
29078 break;
29079 default:
29080 gcc_unreachable ();
29081 }
29082 return 1;
29083 }
29084
29085 /* A helper function for dwarf2out_finish. Counts the number
29086 of indexed addresses. Must match the logic of the functions
29087 output_addr_table_entry above. */
29088 int
29089 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29090 {
29091 addr_table_entry *entry = *slot;
29092
29093 if (entry->refcount > 0)
29094 *last_idx += 1;
29095 return 1;
29096 }
29097
29098 /* Produce the .debug_addr section. */
29099
29100 static void
29101 output_addr_table (void)
29102 {
29103 unsigned int index = 0;
29104 if (addr_index_table == NULL || addr_index_table->size () == 0)
29105 return;
29106
29107 switch_to_section (debug_addr_section);
29108 addr_index_table
29109 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29110 }
29111
29112 #if ENABLE_ASSERT_CHECKING
29113 /* Verify that all marks are clear. */
29114
29115 static void
29116 verify_marks_clear (dw_die_ref die)
29117 {
29118 dw_die_ref c;
29119
29120 gcc_assert (! die->die_mark);
29121 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29122 }
29123 #endif /* ENABLE_ASSERT_CHECKING */
29124
29125 /* Clear the marks for a die and its children.
29126 Be cool if the mark isn't set. */
29127
29128 static void
29129 prune_unmark_dies (dw_die_ref die)
29130 {
29131 dw_die_ref c;
29132
29133 if (die->die_mark)
29134 die->die_mark = 0;
29135 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29136 }
29137
29138 /* Given LOC that is referenced by a DIE we're marking as used, find all
29139 referenced DWARF procedures it references and mark them as used. */
29140
29141 static void
29142 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29143 {
29144 for (; loc != NULL; loc = loc->dw_loc_next)
29145 switch (loc->dw_loc_opc)
29146 {
29147 case DW_OP_implicit_pointer:
29148 case DW_OP_convert:
29149 case DW_OP_reinterpret:
29150 case DW_OP_GNU_implicit_pointer:
29151 case DW_OP_GNU_convert:
29152 case DW_OP_GNU_reinterpret:
29153 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29154 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29155 break;
29156 case DW_OP_GNU_variable_value:
29157 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29158 {
29159 dw_die_ref ref
29160 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29161 if (ref == NULL)
29162 break;
29163 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29164 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29165 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29166 }
29167 /* FALLTHRU */
29168 case DW_OP_call2:
29169 case DW_OP_call4:
29170 case DW_OP_call_ref:
29171 case DW_OP_const_type:
29172 case DW_OP_GNU_const_type:
29173 case DW_OP_GNU_parameter_ref:
29174 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29175 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29176 break;
29177 case DW_OP_regval_type:
29178 case DW_OP_deref_type:
29179 case DW_OP_GNU_regval_type:
29180 case DW_OP_GNU_deref_type:
29181 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29182 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29183 break;
29184 case DW_OP_entry_value:
29185 case DW_OP_GNU_entry_value:
29186 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29187 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29188 break;
29189 default:
29190 break;
29191 }
29192 }
29193
29194 /* Given DIE that we're marking as used, find any other dies
29195 it references as attributes and mark them as used. */
29196
29197 static void
29198 prune_unused_types_walk_attribs (dw_die_ref die)
29199 {
29200 dw_attr_node *a;
29201 unsigned ix;
29202
29203 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29204 {
29205 switch (AT_class (a))
29206 {
29207 /* Make sure DWARF procedures referenced by location descriptions will
29208 get emitted. */
29209 case dw_val_class_loc:
29210 prune_unused_types_walk_loc_descr (AT_loc (a));
29211 break;
29212 case dw_val_class_loc_list:
29213 for (dw_loc_list_ref list = AT_loc_list (a);
29214 list != NULL;
29215 list = list->dw_loc_next)
29216 prune_unused_types_walk_loc_descr (list->expr);
29217 break;
29218
29219 case dw_val_class_view_list:
29220 /* This points to a loc_list in another attribute, so it's
29221 already covered. */
29222 break;
29223
29224 case dw_val_class_die_ref:
29225 /* A reference to another DIE.
29226 Make sure that it will get emitted.
29227 If it was broken out into a comdat group, don't follow it. */
29228 if (! AT_ref (a)->comdat_type_p
29229 || a->dw_attr == DW_AT_specification)
29230 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29231 break;
29232
29233 case dw_val_class_str:
29234 /* Set the string's refcount to 0 so that prune_unused_types_mark
29235 accounts properly for it. */
29236 a->dw_attr_val.v.val_str->refcount = 0;
29237 break;
29238
29239 default:
29240 break;
29241 }
29242 }
29243 }
29244
29245 /* Mark the generic parameters and arguments children DIEs of DIE. */
29246
29247 static void
29248 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29249 {
29250 dw_die_ref c;
29251
29252 if (die == NULL || die->die_child == NULL)
29253 return;
29254 c = die->die_child;
29255 do
29256 {
29257 if (is_template_parameter (c))
29258 prune_unused_types_mark (c, 1);
29259 c = c->die_sib;
29260 } while (c && c != die->die_child);
29261 }
29262
29263 /* Mark DIE as being used. If DOKIDS is true, then walk down
29264 to DIE's children. */
29265
29266 static void
29267 prune_unused_types_mark (dw_die_ref die, int dokids)
29268 {
29269 dw_die_ref c;
29270
29271 if (die->die_mark == 0)
29272 {
29273 /* We haven't done this node yet. Mark it as used. */
29274 die->die_mark = 1;
29275 /* If this is the DIE of a generic type instantiation,
29276 mark the children DIEs that describe its generic parms and
29277 args. */
29278 prune_unused_types_mark_generic_parms_dies (die);
29279
29280 /* We also have to mark its parents as used.
29281 (But we don't want to mark our parent's kids due to this,
29282 unless it is a class.) */
29283 if (die->die_parent)
29284 prune_unused_types_mark (die->die_parent,
29285 class_scope_p (die->die_parent));
29286
29287 /* Mark any referenced nodes. */
29288 prune_unused_types_walk_attribs (die);
29289
29290 /* If this node is a specification,
29291 also mark the definition, if it exists. */
29292 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29293 prune_unused_types_mark (die->die_definition, 1);
29294 }
29295
29296 if (dokids && die->die_mark != 2)
29297 {
29298 /* We need to walk the children, but haven't done so yet.
29299 Remember that we've walked the kids. */
29300 die->die_mark = 2;
29301
29302 /* If this is an array type, we need to make sure our
29303 kids get marked, even if they're types. If we're
29304 breaking out types into comdat sections, do this
29305 for all type definitions. */
29306 if (die->die_tag == DW_TAG_array_type
29307 || (use_debug_types
29308 && is_type_die (die) && ! is_declaration_die (die)))
29309 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29310 else
29311 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29312 }
29313 }
29314
29315 /* For local classes, look if any static member functions were emitted
29316 and if so, mark them. */
29317
29318 static void
29319 prune_unused_types_walk_local_classes (dw_die_ref die)
29320 {
29321 dw_die_ref c;
29322
29323 if (die->die_mark == 2)
29324 return;
29325
29326 switch (die->die_tag)
29327 {
29328 case DW_TAG_structure_type:
29329 case DW_TAG_union_type:
29330 case DW_TAG_class_type:
29331 case DW_TAG_interface_type:
29332 break;
29333
29334 case DW_TAG_subprogram:
29335 if (!get_AT_flag (die, DW_AT_declaration)
29336 || die->die_definition != NULL)
29337 prune_unused_types_mark (die, 1);
29338 return;
29339
29340 default:
29341 return;
29342 }
29343
29344 /* Mark children. */
29345 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29346 }
29347
29348 /* Walk the tree DIE and mark types that we actually use. */
29349
29350 static void
29351 prune_unused_types_walk (dw_die_ref die)
29352 {
29353 dw_die_ref c;
29354
29355 /* Don't do anything if this node is already marked and
29356 children have been marked as well. */
29357 if (die->die_mark == 2)
29358 return;
29359
29360 switch (die->die_tag)
29361 {
29362 case DW_TAG_structure_type:
29363 case DW_TAG_union_type:
29364 case DW_TAG_class_type:
29365 case DW_TAG_interface_type:
29366 if (die->die_perennial_p)
29367 break;
29368
29369 for (c = die->die_parent; c; c = c->die_parent)
29370 if (c->die_tag == DW_TAG_subprogram)
29371 break;
29372
29373 /* Finding used static member functions inside of classes
29374 is needed just for local classes, because for other classes
29375 static member function DIEs with DW_AT_specification
29376 are emitted outside of the DW_TAG_*_type. If we ever change
29377 it, we'd need to call this even for non-local classes. */
29378 if (c)
29379 prune_unused_types_walk_local_classes (die);
29380
29381 /* It's a type node --- don't mark it. */
29382 return;
29383
29384 case DW_TAG_const_type:
29385 case DW_TAG_packed_type:
29386 case DW_TAG_pointer_type:
29387 case DW_TAG_reference_type:
29388 case DW_TAG_rvalue_reference_type:
29389 case DW_TAG_volatile_type:
29390 case DW_TAG_typedef:
29391 case DW_TAG_array_type:
29392 case DW_TAG_friend:
29393 case DW_TAG_enumeration_type:
29394 case DW_TAG_subroutine_type:
29395 case DW_TAG_string_type:
29396 case DW_TAG_set_type:
29397 case DW_TAG_subrange_type:
29398 case DW_TAG_ptr_to_member_type:
29399 case DW_TAG_file_type:
29400 /* Type nodes are useful only when other DIEs reference them --- don't
29401 mark them. */
29402 /* FALLTHROUGH */
29403
29404 case DW_TAG_dwarf_procedure:
29405 /* Likewise for DWARF procedures. */
29406
29407 if (die->die_perennial_p)
29408 break;
29409
29410 return;
29411
29412 case DW_TAG_variable:
29413 if (flag_debug_only_used_symbols)
29414 {
29415 if (die->die_perennial_p)
29416 break;
29417
29418 /* premark_used_variables marks external variables --- don't mark
29419 them here. */
29420 if (get_AT (die, DW_AT_external))
29421 return;
29422 }
29423 /* FALLTHROUGH */
29424
29425 default:
29426 /* Mark everything else. */
29427 break;
29428 }
29429
29430 if (die->die_mark == 0)
29431 {
29432 die->die_mark = 1;
29433
29434 /* Now, mark any dies referenced from here. */
29435 prune_unused_types_walk_attribs (die);
29436 }
29437
29438 die->die_mark = 2;
29439
29440 /* Mark children. */
29441 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29442 }
29443
29444 /* Increment the string counts on strings referred to from DIE's
29445 attributes. */
29446
29447 static void
29448 prune_unused_types_update_strings (dw_die_ref die)
29449 {
29450 dw_attr_node *a;
29451 unsigned ix;
29452
29453 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29454 if (AT_class (a) == dw_val_class_str)
29455 {
29456 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29457 s->refcount++;
29458 /* Avoid unnecessarily putting strings that are used less than
29459 twice in the hash table. */
29460 if (s->refcount
29461 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29462 {
29463 indirect_string_node **slot
29464 = debug_str_hash->find_slot_with_hash (s->str,
29465 htab_hash_string (s->str),
29466 INSERT);
29467 gcc_assert (*slot == NULL);
29468 *slot = s;
29469 }
29470 }
29471 }
29472
29473 /* Mark DIE and its children as removed. */
29474
29475 static void
29476 mark_removed (dw_die_ref die)
29477 {
29478 dw_die_ref c;
29479 die->removed = true;
29480 FOR_EACH_CHILD (die, c, mark_removed (c));
29481 }
29482
29483 /* Remove from the tree DIE any dies that aren't marked. */
29484
29485 static void
29486 prune_unused_types_prune (dw_die_ref die)
29487 {
29488 dw_die_ref c;
29489
29490 gcc_assert (die->die_mark);
29491 prune_unused_types_update_strings (die);
29492
29493 if (! die->die_child)
29494 return;
29495
29496 c = die->die_child;
29497 do {
29498 dw_die_ref prev = c, next;
29499 for (c = c->die_sib; ! c->die_mark; c = next)
29500 if (c == die->die_child)
29501 {
29502 /* No marked children between 'prev' and the end of the list. */
29503 if (prev == c)
29504 /* No marked children at all. */
29505 die->die_child = NULL;
29506 else
29507 {
29508 prev->die_sib = c->die_sib;
29509 die->die_child = prev;
29510 }
29511 c->die_sib = NULL;
29512 mark_removed (c);
29513 return;
29514 }
29515 else
29516 {
29517 next = c->die_sib;
29518 c->die_sib = NULL;
29519 mark_removed (c);
29520 }
29521
29522 if (c != prev->die_sib)
29523 prev->die_sib = c;
29524 prune_unused_types_prune (c);
29525 } while (c != die->die_child);
29526 }
29527
29528 /* Remove dies representing declarations that we never use. */
29529
29530 static void
29531 prune_unused_types (void)
29532 {
29533 unsigned int i;
29534 limbo_die_node *node;
29535 comdat_type_node *ctnode;
29536 pubname_entry *pub;
29537 dw_die_ref base_type;
29538
29539 #if ENABLE_ASSERT_CHECKING
29540 /* All the marks should already be clear. */
29541 verify_marks_clear (comp_unit_die ());
29542 for (node = limbo_die_list; node; node = node->next)
29543 verify_marks_clear (node->die);
29544 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29545 verify_marks_clear (ctnode->root_die);
29546 #endif /* ENABLE_ASSERT_CHECKING */
29547
29548 /* Mark types that are used in global variables. */
29549 premark_types_used_by_global_vars ();
29550
29551 /* Mark variables used in the symtab. */
29552 if (flag_debug_only_used_symbols)
29553 premark_used_variables ();
29554
29555 /* Set the mark on nodes that are actually used. */
29556 prune_unused_types_walk (comp_unit_die ());
29557 for (node = limbo_die_list; node; node = node->next)
29558 prune_unused_types_walk (node->die);
29559 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29560 {
29561 prune_unused_types_walk (ctnode->root_die);
29562 prune_unused_types_mark (ctnode->type_die, 1);
29563 }
29564
29565 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29566 are unusual in that they are pubnames that are the children of pubtypes.
29567 They should only be marked via their parent DW_TAG_enumeration_type die,
29568 not as roots in themselves. */
29569 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29570 if (pub->die->die_tag != DW_TAG_enumerator)
29571 prune_unused_types_mark (pub->die, 1);
29572 for (i = 0; base_types.iterate (i, &base_type); i++)
29573 prune_unused_types_mark (base_type, 1);
29574
29575 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29576 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29577 callees). */
29578 cgraph_node *cnode;
29579 FOR_EACH_FUNCTION (cnode)
29580 if (cnode->referred_to_p (false))
29581 {
29582 dw_die_ref die = lookup_decl_die (cnode->decl);
29583 if (die == NULL || die->die_mark)
29584 continue;
29585 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29586 if (e->caller != cnode
29587 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29588 {
29589 prune_unused_types_mark (die, 1);
29590 break;
29591 }
29592 }
29593
29594 if (debug_str_hash)
29595 debug_str_hash->empty ();
29596 if (skeleton_debug_str_hash)
29597 skeleton_debug_str_hash->empty ();
29598 prune_unused_types_prune (comp_unit_die ());
29599 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29600 {
29601 node = *pnode;
29602 if (!node->die->die_mark)
29603 *pnode = node->next;
29604 else
29605 {
29606 prune_unused_types_prune (node->die);
29607 pnode = &node->next;
29608 }
29609 }
29610 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29611 prune_unused_types_prune (ctnode->root_die);
29612
29613 /* Leave the marks clear. */
29614 prune_unmark_dies (comp_unit_die ());
29615 for (node = limbo_die_list; node; node = node->next)
29616 prune_unmark_dies (node->die);
29617 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29618 prune_unmark_dies (ctnode->root_die);
29619 }
29620
29621 /* Helpers to manipulate hash table of comdat type units. */
29622
29623 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29624 {
29625 static inline hashval_t hash (const comdat_type_node *);
29626 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29627 };
29628
29629 inline hashval_t
29630 comdat_type_hasher::hash (const comdat_type_node *type_node)
29631 {
29632 hashval_t h;
29633 memcpy (&h, type_node->signature, sizeof (h));
29634 return h;
29635 }
29636
29637 inline bool
29638 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29639 const comdat_type_node *type_node_2)
29640 {
29641 return (! memcmp (type_node_1->signature, type_node_2->signature,
29642 DWARF_TYPE_SIGNATURE_SIZE));
29643 }
29644
29645 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29646 to the location it would have been added, should we know its
29647 DECL_ASSEMBLER_NAME when we added other attributes. This will
29648 probably improve compactness of debug info, removing equivalent
29649 abbrevs, and hide any differences caused by deferring the
29650 computation of the assembler name, triggered by e.g. PCH. */
29651
29652 static inline void
29653 move_linkage_attr (dw_die_ref die)
29654 {
29655 unsigned ix = vec_safe_length (die->die_attr);
29656 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29657
29658 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29659 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29660
29661 while (--ix > 0)
29662 {
29663 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29664
29665 if (prev->dw_attr == DW_AT_decl_line
29666 || prev->dw_attr == DW_AT_decl_column
29667 || prev->dw_attr == DW_AT_name)
29668 break;
29669 }
29670
29671 if (ix != vec_safe_length (die->die_attr) - 1)
29672 {
29673 die->die_attr->pop ();
29674 die->die_attr->quick_insert (ix, linkage);
29675 }
29676 }
29677
29678 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29679 referenced from typed stack ops and count how often they are used. */
29680
29681 static void
29682 mark_base_types (dw_loc_descr_ref loc)
29683 {
29684 dw_die_ref base_type = NULL;
29685
29686 for (; loc; loc = loc->dw_loc_next)
29687 {
29688 switch (loc->dw_loc_opc)
29689 {
29690 case DW_OP_regval_type:
29691 case DW_OP_deref_type:
29692 case DW_OP_GNU_regval_type:
29693 case DW_OP_GNU_deref_type:
29694 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29695 break;
29696 case DW_OP_convert:
29697 case DW_OP_reinterpret:
29698 case DW_OP_GNU_convert:
29699 case DW_OP_GNU_reinterpret:
29700 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29701 continue;
29702 /* FALLTHRU */
29703 case DW_OP_const_type:
29704 case DW_OP_GNU_const_type:
29705 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29706 break;
29707 case DW_OP_entry_value:
29708 case DW_OP_GNU_entry_value:
29709 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29710 continue;
29711 default:
29712 continue;
29713 }
29714 gcc_assert (base_type->die_parent == comp_unit_die ());
29715 if (base_type->die_mark)
29716 base_type->die_mark++;
29717 else
29718 {
29719 base_types.safe_push (base_type);
29720 base_type->die_mark = 1;
29721 }
29722 }
29723 }
29724
29725 /* Comparison function for sorting marked base types. */
29726
29727 static int
29728 base_type_cmp (const void *x, const void *y)
29729 {
29730 dw_die_ref dx = *(const dw_die_ref *) x;
29731 dw_die_ref dy = *(const dw_die_ref *) y;
29732 unsigned int byte_size1, byte_size2;
29733 unsigned int encoding1, encoding2;
29734 unsigned int align1, align2;
29735 if (dx->die_mark > dy->die_mark)
29736 return -1;
29737 if (dx->die_mark < dy->die_mark)
29738 return 1;
29739 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29740 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29741 if (byte_size1 < byte_size2)
29742 return 1;
29743 if (byte_size1 > byte_size2)
29744 return -1;
29745 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29746 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29747 if (encoding1 < encoding2)
29748 return 1;
29749 if (encoding1 > encoding2)
29750 return -1;
29751 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29752 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29753 if (align1 < align2)
29754 return 1;
29755 if (align1 > align2)
29756 return -1;
29757 return 0;
29758 }
29759
29760 /* Move base types marked by mark_base_types as early as possible
29761 in the CU, sorted by decreasing usage count both to make the
29762 uleb128 references as small as possible and to make sure they
29763 will have die_offset already computed by calc_die_sizes when
29764 sizes of typed stack loc ops is computed. */
29765
29766 static void
29767 move_marked_base_types (void)
29768 {
29769 unsigned int i;
29770 dw_die_ref base_type, die, c;
29771
29772 if (base_types.is_empty ())
29773 return;
29774
29775 /* Sort by decreasing usage count, they will be added again in that
29776 order later on. */
29777 base_types.qsort (base_type_cmp);
29778 die = comp_unit_die ();
29779 c = die->die_child;
29780 do
29781 {
29782 dw_die_ref prev = c;
29783 c = c->die_sib;
29784 while (c->die_mark)
29785 {
29786 remove_child_with_prev (c, prev);
29787 /* As base types got marked, there must be at least
29788 one node other than DW_TAG_base_type. */
29789 gcc_assert (die->die_child != NULL);
29790 c = prev->die_sib;
29791 }
29792 }
29793 while (c != die->die_child);
29794 gcc_assert (die->die_child);
29795 c = die->die_child;
29796 for (i = 0; base_types.iterate (i, &base_type); i++)
29797 {
29798 base_type->die_mark = 0;
29799 base_type->die_sib = c->die_sib;
29800 c->die_sib = base_type;
29801 c = base_type;
29802 }
29803 }
29804
29805 /* Helper function for resolve_addr, attempt to resolve
29806 one CONST_STRING, return true if successful. Similarly verify that
29807 SYMBOL_REFs refer to variables emitted in the current CU. */
29808
29809 static bool
29810 resolve_one_addr (rtx *addr)
29811 {
29812 rtx rtl = *addr;
29813
29814 if (GET_CODE (rtl) == CONST_STRING)
29815 {
29816 size_t len = strlen (XSTR (rtl, 0)) + 1;
29817 tree t = build_string (len, XSTR (rtl, 0));
29818 tree tlen = size_int (len - 1);
29819 TREE_TYPE (t)
29820 = build_array_type (char_type_node, build_index_type (tlen));
29821 rtl = lookup_constant_def (t);
29822 if (!rtl || !MEM_P (rtl))
29823 return false;
29824 rtl = XEXP (rtl, 0);
29825 if (GET_CODE (rtl) == SYMBOL_REF
29826 && SYMBOL_REF_DECL (rtl)
29827 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29828 return false;
29829 vec_safe_push (used_rtx_array, rtl);
29830 *addr = rtl;
29831 return true;
29832 }
29833
29834 if (GET_CODE (rtl) == SYMBOL_REF
29835 && SYMBOL_REF_DECL (rtl))
29836 {
29837 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29838 {
29839 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29840 return false;
29841 }
29842 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29843 return false;
29844 }
29845
29846 if (GET_CODE (rtl) == CONST)
29847 {
29848 subrtx_ptr_iterator::array_type array;
29849 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29850 if (!resolve_one_addr (*iter))
29851 return false;
29852 }
29853
29854 return true;
29855 }
29856
29857 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29858 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29859 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29860
29861 static rtx
29862 string_cst_pool_decl (tree t)
29863 {
29864 rtx rtl = output_constant_def (t, 1);
29865 unsigned char *array;
29866 dw_loc_descr_ref l;
29867 tree decl;
29868 size_t len;
29869 dw_die_ref ref;
29870
29871 if (!rtl || !MEM_P (rtl))
29872 return NULL_RTX;
29873 rtl = XEXP (rtl, 0);
29874 if (GET_CODE (rtl) != SYMBOL_REF
29875 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29876 return NULL_RTX;
29877
29878 decl = SYMBOL_REF_DECL (rtl);
29879 if (!lookup_decl_die (decl))
29880 {
29881 len = TREE_STRING_LENGTH (t);
29882 vec_safe_push (used_rtx_array, rtl);
29883 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29884 array = ggc_vec_alloc<unsigned char> (len);
29885 memcpy (array, TREE_STRING_POINTER (t), len);
29886 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29887 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29888 l->dw_loc_oprnd2.v.val_vec.length = len;
29889 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29890 l->dw_loc_oprnd2.v.val_vec.array = array;
29891 add_AT_loc (ref, DW_AT_location, l);
29892 equate_decl_number_to_die (decl, ref);
29893 }
29894 return rtl;
29895 }
29896
29897 /* Helper function of resolve_addr_in_expr. LOC is
29898 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29899 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29900 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29901 with DW_OP_implicit_pointer if possible
29902 and return true, if unsuccessful, return false. */
29903
29904 static bool
29905 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29906 {
29907 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29908 HOST_WIDE_INT offset = 0;
29909 dw_die_ref ref = NULL;
29910 tree decl;
29911
29912 if (GET_CODE (rtl) == CONST
29913 && GET_CODE (XEXP (rtl, 0)) == PLUS
29914 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29915 {
29916 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29917 rtl = XEXP (XEXP (rtl, 0), 0);
29918 }
29919 if (GET_CODE (rtl) == CONST_STRING)
29920 {
29921 size_t len = strlen (XSTR (rtl, 0)) + 1;
29922 tree t = build_string (len, XSTR (rtl, 0));
29923 tree tlen = size_int (len - 1);
29924
29925 TREE_TYPE (t)
29926 = build_array_type (char_type_node, build_index_type (tlen));
29927 rtl = string_cst_pool_decl (t);
29928 if (!rtl)
29929 return false;
29930 }
29931 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29932 {
29933 decl = SYMBOL_REF_DECL (rtl);
29934 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29935 {
29936 ref = lookup_decl_die (decl);
29937 if (ref && (get_AT (ref, DW_AT_location)
29938 || get_AT (ref, DW_AT_const_value)))
29939 {
29940 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29941 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29942 loc->dw_loc_oprnd1.val_entry = NULL;
29943 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29944 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29945 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29946 loc->dw_loc_oprnd2.v.val_int = offset;
29947 return true;
29948 }
29949 }
29950 }
29951 return false;
29952 }
29953
29954 /* Helper function for resolve_addr, handle one location
29955 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29956 the location list couldn't be resolved. */
29957
29958 static bool
29959 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29960 {
29961 dw_loc_descr_ref keep = NULL;
29962 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29963 switch (loc->dw_loc_opc)
29964 {
29965 case DW_OP_addr:
29966 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29967 {
29968 if ((prev == NULL
29969 || prev->dw_loc_opc == DW_OP_piece
29970 || prev->dw_loc_opc == DW_OP_bit_piece)
29971 && loc->dw_loc_next
29972 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29973 && (!dwarf_strict || dwarf_version >= 5)
29974 && optimize_one_addr_into_implicit_ptr (loc))
29975 break;
29976 return false;
29977 }
29978 break;
29979 case DW_OP_GNU_addr_index:
29980 case DW_OP_addrx:
29981 case DW_OP_GNU_const_index:
29982 case DW_OP_constx:
29983 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29984 || loc->dw_loc_opc == DW_OP_addrx)
29985 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29986 || loc->dw_loc_opc == DW_OP_constx)
29987 && loc->dtprel))
29988 {
29989 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29990 if (!resolve_one_addr (&rtl))
29991 return false;
29992 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29993 loc->dw_loc_oprnd1.val_entry
29994 = add_addr_table_entry (rtl, ate_kind_rtx);
29995 }
29996 break;
29997 case DW_OP_const4u:
29998 case DW_OP_const8u:
29999 if (loc->dtprel
30000 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30001 return false;
30002 break;
30003 case DW_OP_plus_uconst:
30004 if (size_of_loc_descr (loc)
30005 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30006 + 1
30007 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30008 {
30009 dw_loc_descr_ref repl
30010 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30011 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30012 add_loc_descr (&repl, loc->dw_loc_next);
30013 *loc = *repl;
30014 }
30015 break;
30016 case DW_OP_implicit_value:
30017 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30018 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30019 return false;
30020 break;
30021 case DW_OP_implicit_pointer:
30022 case DW_OP_GNU_implicit_pointer:
30023 case DW_OP_GNU_parameter_ref:
30024 case DW_OP_GNU_variable_value:
30025 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30026 {
30027 dw_die_ref ref
30028 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30029 if (ref == NULL)
30030 return false;
30031 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30032 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30033 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30034 }
30035 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30036 {
30037 if (prev == NULL
30038 && loc->dw_loc_next == NULL
30039 && AT_class (a) == dw_val_class_loc)
30040 switch (a->dw_attr)
30041 {
30042 /* Following attributes allow both exprloc and reference,
30043 so if the whole expression is DW_OP_GNU_variable_value
30044 alone we could transform it into reference. */
30045 case DW_AT_byte_size:
30046 case DW_AT_bit_size:
30047 case DW_AT_lower_bound:
30048 case DW_AT_upper_bound:
30049 case DW_AT_bit_stride:
30050 case DW_AT_count:
30051 case DW_AT_allocated:
30052 case DW_AT_associated:
30053 case DW_AT_byte_stride:
30054 a->dw_attr_val.val_class = dw_val_class_die_ref;
30055 a->dw_attr_val.val_entry = NULL;
30056 a->dw_attr_val.v.val_die_ref.die
30057 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30058 a->dw_attr_val.v.val_die_ref.external = 0;
30059 return true;
30060 default:
30061 break;
30062 }
30063 if (dwarf_strict)
30064 return false;
30065 }
30066 break;
30067 case DW_OP_const_type:
30068 case DW_OP_regval_type:
30069 case DW_OP_deref_type:
30070 case DW_OP_convert:
30071 case DW_OP_reinterpret:
30072 case DW_OP_GNU_const_type:
30073 case DW_OP_GNU_regval_type:
30074 case DW_OP_GNU_deref_type:
30075 case DW_OP_GNU_convert:
30076 case DW_OP_GNU_reinterpret:
30077 while (loc->dw_loc_next
30078 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30079 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30080 {
30081 dw_die_ref base1, base2;
30082 unsigned enc1, enc2, size1, size2;
30083 if (loc->dw_loc_opc == DW_OP_regval_type
30084 || loc->dw_loc_opc == DW_OP_deref_type
30085 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30086 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30087 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30088 else if (loc->dw_loc_oprnd1.val_class
30089 == dw_val_class_unsigned_const)
30090 break;
30091 else
30092 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30093 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30094 == dw_val_class_unsigned_const)
30095 break;
30096 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30097 gcc_assert (base1->die_tag == DW_TAG_base_type
30098 && base2->die_tag == DW_TAG_base_type);
30099 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30100 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30101 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30102 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30103 if (size1 == size2
30104 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30105 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30106 && loc != keep)
30107 || enc1 == enc2))
30108 {
30109 /* Optimize away next DW_OP_convert after
30110 adjusting LOC's base type die reference. */
30111 if (loc->dw_loc_opc == DW_OP_regval_type
30112 || loc->dw_loc_opc == DW_OP_deref_type
30113 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30114 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30115 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30116 else
30117 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30118 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30119 continue;
30120 }
30121 /* Don't change integer DW_OP_convert after e.g. floating
30122 point typed stack entry. */
30123 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30124 keep = loc->dw_loc_next;
30125 break;
30126 }
30127 break;
30128 default:
30129 break;
30130 }
30131 return true;
30132 }
30133
30134 /* Helper function of resolve_addr. DIE had DW_AT_location of
30135 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30136 and DW_OP_addr couldn't be resolved. resolve_addr has already
30137 removed the DW_AT_location attribute. This function attempts to
30138 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30139 to it or DW_AT_const_value attribute, if possible. */
30140
30141 static void
30142 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30143 {
30144 if (!VAR_P (decl)
30145 || lookup_decl_die (decl) != die
30146 || DECL_EXTERNAL (decl)
30147 || !TREE_STATIC (decl)
30148 || DECL_INITIAL (decl) == NULL_TREE
30149 || DECL_P (DECL_INITIAL (decl))
30150 || get_AT (die, DW_AT_const_value))
30151 return;
30152
30153 tree init = DECL_INITIAL (decl);
30154 HOST_WIDE_INT offset = 0;
30155 /* For variables that have been optimized away and thus
30156 don't have a memory location, see if we can emit
30157 DW_AT_const_value instead. */
30158 if (tree_add_const_value_attribute (die, init))
30159 return;
30160 if (dwarf_strict && dwarf_version < 5)
30161 return;
30162 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30163 and ADDR_EXPR refers to a decl that has DW_AT_location or
30164 DW_AT_const_value (but isn't addressable, otherwise
30165 resolving the original DW_OP_addr wouldn't fail), see if
30166 we can add DW_OP_implicit_pointer. */
30167 STRIP_NOPS (init);
30168 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30169 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30170 {
30171 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30172 init = TREE_OPERAND (init, 0);
30173 STRIP_NOPS (init);
30174 }
30175 if (TREE_CODE (init) != ADDR_EXPR)
30176 return;
30177 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30178 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30179 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30180 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30181 && TREE_OPERAND (init, 0) != decl))
30182 {
30183 dw_die_ref ref;
30184 dw_loc_descr_ref l;
30185
30186 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30187 {
30188 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30189 if (!rtl)
30190 return;
30191 decl = SYMBOL_REF_DECL (rtl);
30192 }
30193 else
30194 decl = TREE_OPERAND (init, 0);
30195 ref = lookup_decl_die (decl);
30196 if (ref == NULL
30197 || (!get_AT (ref, DW_AT_location)
30198 && !get_AT (ref, DW_AT_const_value)))
30199 return;
30200 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30201 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30202 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30203 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30204 add_AT_loc (die, DW_AT_location, l);
30205 }
30206 }
30207
30208 /* Return NULL if l is a DWARF expression, or first op that is not
30209 valid DWARF expression. */
30210
30211 static dw_loc_descr_ref
30212 non_dwarf_expression (dw_loc_descr_ref l)
30213 {
30214 while (l)
30215 {
30216 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30217 return l;
30218 switch (l->dw_loc_opc)
30219 {
30220 case DW_OP_regx:
30221 case DW_OP_implicit_value:
30222 case DW_OP_stack_value:
30223 case DW_OP_implicit_pointer:
30224 case DW_OP_GNU_implicit_pointer:
30225 case DW_OP_GNU_parameter_ref:
30226 case DW_OP_piece:
30227 case DW_OP_bit_piece:
30228 return l;
30229 default:
30230 break;
30231 }
30232 l = l->dw_loc_next;
30233 }
30234 return NULL;
30235 }
30236
30237 /* Return adjusted copy of EXPR:
30238 If it is empty DWARF expression, return it.
30239 If it is valid non-empty DWARF expression,
30240 return copy of EXPR with DW_OP_deref appended to it.
30241 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30242 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30243 If it is DWARF expression followed by DW_OP_stack_value, return
30244 copy of the DWARF expression without anything appended.
30245 Otherwise, return NULL. */
30246
30247 static dw_loc_descr_ref
30248 copy_deref_exprloc (dw_loc_descr_ref expr)
30249 {
30250 dw_loc_descr_ref tail = NULL;
30251
30252 if (expr == NULL)
30253 return NULL;
30254
30255 dw_loc_descr_ref l = non_dwarf_expression (expr);
30256 if (l && l->dw_loc_next)
30257 return NULL;
30258
30259 if (l)
30260 {
30261 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30262 tail = new_loc_descr ((enum dwarf_location_atom)
30263 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30264 0, 0);
30265 else
30266 switch (l->dw_loc_opc)
30267 {
30268 case DW_OP_regx:
30269 tail = new_loc_descr (DW_OP_bregx,
30270 l->dw_loc_oprnd1.v.val_unsigned, 0);
30271 break;
30272 case DW_OP_stack_value:
30273 break;
30274 default:
30275 return NULL;
30276 }
30277 }
30278 else
30279 tail = new_loc_descr (DW_OP_deref, 0, 0);
30280
30281 dw_loc_descr_ref ret = NULL, *p = &ret;
30282 while (expr != l)
30283 {
30284 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30285 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30286 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30287 p = &(*p)->dw_loc_next;
30288 expr = expr->dw_loc_next;
30289 }
30290 *p = tail;
30291 return ret;
30292 }
30293
30294 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30295 reference to a variable or argument, adjust it if needed and return:
30296 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30297 attribute if present should be removed
30298 0 keep the attribute perhaps with minor modifications, no need to rescan
30299 1 if the attribute has been successfully adjusted. */
30300
30301 static int
30302 optimize_string_length (dw_attr_node *a)
30303 {
30304 dw_loc_descr_ref l = AT_loc (a), lv;
30305 dw_die_ref die;
30306 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30307 {
30308 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30309 die = lookup_decl_die (decl);
30310 if (die)
30311 {
30312 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30313 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30314 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30315 }
30316 else
30317 return -1;
30318 }
30319 else
30320 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30321
30322 /* DWARF5 allows reference class, so we can then reference the DIE.
30323 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30324 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30325 {
30326 a->dw_attr_val.val_class = dw_val_class_die_ref;
30327 a->dw_attr_val.val_entry = NULL;
30328 a->dw_attr_val.v.val_die_ref.die = die;
30329 a->dw_attr_val.v.val_die_ref.external = 0;
30330 return 0;
30331 }
30332
30333 dw_attr_node *av = get_AT (die, DW_AT_location);
30334 dw_loc_list_ref d;
30335 bool non_dwarf_expr = false;
30336
30337 if (av == NULL)
30338 return dwarf_strict ? -1 : 0;
30339 switch (AT_class (av))
30340 {
30341 case dw_val_class_loc_list:
30342 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30343 if (d->expr && non_dwarf_expression (d->expr))
30344 non_dwarf_expr = true;
30345 break;
30346 case dw_val_class_view_list:
30347 gcc_unreachable ();
30348 case dw_val_class_loc:
30349 lv = AT_loc (av);
30350 if (lv == NULL)
30351 return dwarf_strict ? -1 : 0;
30352 if (non_dwarf_expression (lv))
30353 non_dwarf_expr = true;
30354 break;
30355 default:
30356 return dwarf_strict ? -1 : 0;
30357 }
30358
30359 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30360 into DW_OP_call4 or DW_OP_GNU_variable_value into
30361 DW_OP_call4 DW_OP_deref, do so. */
30362 if (!non_dwarf_expr
30363 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30364 {
30365 l->dw_loc_opc = DW_OP_call4;
30366 if (l->dw_loc_next)
30367 l->dw_loc_next = NULL;
30368 else
30369 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30370 return 0;
30371 }
30372
30373 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30374 copy over the DW_AT_location attribute from die to a. */
30375 if (l->dw_loc_next != NULL)
30376 {
30377 a->dw_attr_val = av->dw_attr_val;
30378 return 1;
30379 }
30380
30381 dw_loc_list_ref list, *p;
30382 switch (AT_class (av))
30383 {
30384 case dw_val_class_loc_list:
30385 p = &list;
30386 list = NULL;
30387 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30388 {
30389 lv = copy_deref_exprloc (d->expr);
30390 if (lv)
30391 {
30392 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30393 p = &(*p)->dw_loc_next;
30394 }
30395 else if (!dwarf_strict && d->expr)
30396 return 0;
30397 }
30398 if (list == NULL)
30399 return dwarf_strict ? -1 : 0;
30400 a->dw_attr_val.val_class = dw_val_class_loc_list;
30401 gen_llsym (list);
30402 *AT_loc_list_ptr (a) = list;
30403 return 1;
30404 case dw_val_class_loc:
30405 lv = copy_deref_exprloc (AT_loc (av));
30406 if (lv == NULL)
30407 return dwarf_strict ? -1 : 0;
30408 a->dw_attr_val.v.val_loc = lv;
30409 return 1;
30410 default:
30411 gcc_unreachable ();
30412 }
30413 }
30414
30415 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30416 an address in .rodata section if the string literal is emitted there,
30417 or remove the containing location list or replace DW_AT_const_value
30418 with DW_AT_location and empty location expression, if it isn't found
30419 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30420 to something that has been emitted in the current CU. */
30421
30422 static void
30423 resolve_addr (dw_die_ref die)
30424 {
30425 dw_die_ref c;
30426 dw_attr_node *a;
30427 dw_loc_list_ref *curr, *start, loc;
30428 unsigned ix;
30429 bool remove_AT_byte_size = false;
30430
30431 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30432 switch (AT_class (a))
30433 {
30434 case dw_val_class_loc_list:
30435 start = curr = AT_loc_list_ptr (a);
30436 loc = *curr;
30437 gcc_assert (loc);
30438 /* The same list can be referenced more than once. See if we have
30439 already recorded the result from a previous pass. */
30440 if (loc->replaced)
30441 *curr = loc->dw_loc_next;
30442 else if (!loc->resolved_addr)
30443 {
30444 /* As things stand, we do not expect or allow one die to
30445 reference a suffix of another die's location list chain.
30446 References must be identical or completely separate.
30447 There is therefore no need to cache the result of this
30448 pass on any list other than the first; doing so
30449 would lead to unnecessary writes. */
30450 while (*curr)
30451 {
30452 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30453 if (!resolve_addr_in_expr (a, (*curr)->expr))
30454 {
30455 dw_loc_list_ref next = (*curr)->dw_loc_next;
30456 dw_loc_descr_ref l = (*curr)->expr;
30457
30458 if (next && (*curr)->ll_symbol)
30459 {
30460 gcc_assert (!next->ll_symbol);
30461 next->ll_symbol = (*curr)->ll_symbol;
30462 next->vl_symbol = (*curr)->vl_symbol;
30463 }
30464 if (dwarf_split_debug_info)
30465 remove_loc_list_addr_table_entries (l);
30466 *curr = next;
30467 }
30468 else
30469 {
30470 mark_base_types ((*curr)->expr);
30471 curr = &(*curr)->dw_loc_next;
30472 }
30473 }
30474 if (loc == *start)
30475 loc->resolved_addr = 1;
30476 else
30477 {
30478 loc->replaced = 1;
30479 loc->dw_loc_next = *start;
30480 }
30481 }
30482 if (!*start)
30483 {
30484 remove_AT (die, a->dw_attr);
30485 ix--;
30486 }
30487 break;
30488 case dw_val_class_view_list:
30489 {
30490 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30491 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30492 dw_val_node *llnode
30493 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30494 /* If we no longer have a loclist, or it no longer needs
30495 views, drop this attribute. */
30496 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30497 {
30498 remove_AT (die, a->dw_attr);
30499 ix--;
30500 }
30501 break;
30502 }
30503 case dw_val_class_loc:
30504 {
30505 dw_loc_descr_ref l = AT_loc (a);
30506 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30507 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30508 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30509 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30510 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30511 with DW_FORM_ref referencing the same DIE as
30512 DW_OP_GNU_variable_value used to reference. */
30513 if (a->dw_attr == DW_AT_string_length
30514 && l
30515 && l->dw_loc_opc == DW_OP_GNU_variable_value
30516 && (l->dw_loc_next == NULL
30517 || (l->dw_loc_next->dw_loc_next == NULL
30518 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30519 {
30520 switch (optimize_string_length (a))
30521 {
30522 case -1:
30523 remove_AT (die, a->dw_attr);
30524 ix--;
30525 /* If we drop DW_AT_string_length, we need to drop also
30526 DW_AT_{string_length_,}byte_size. */
30527 remove_AT_byte_size = true;
30528 continue;
30529 default:
30530 break;
30531 case 1:
30532 /* Even if we keep the optimized DW_AT_string_length,
30533 it might have changed AT_class, so process it again. */
30534 ix--;
30535 continue;
30536 }
30537 }
30538 /* For -gdwarf-2 don't attempt to optimize
30539 DW_AT_data_member_location containing
30540 DW_OP_plus_uconst - older consumers might
30541 rely on it being that op instead of a more complex,
30542 but shorter, location description. */
30543 if ((dwarf_version > 2
30544 || a->dw_attr != DW_AT_data_member_location
30545 || l == NULL
30546 || l->dw_loc_opc != DW_OP_plus_uconst
30547 || l->dw_loc_next != NULL)
30548 && !resolve_addr_in_expr (a, l))
30549 {
30550 if (dwarf_split_debug_info)
30551 remove_loc_list_addr_table_entries (l);
30552 if (l != NULL
30553 && l->dw_loc_next == NULL
30554 && l->dw_loc_opc == DW_OP_addr
30555 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30556 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30557 && a->dw_attr == DW_AT_location)
30558 {
30559 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30560 remove_AT (die, a->dw_attr);
30561 ix--;
30562 optimize_location_into_implicit_ptr (die, decl);
30563 break;
30564 }
30565 if (a->dw_attr == DW_AT_string_length)
30566 /* If we drop DW_AT_string_length, we need to drop also
30567 DW_AT_{string_length_,}byte_size. */
30568 remove_AT_byte_size = true;
30569 remove_AT (die, a->dw_attr);
30570 ix--;
30571 }
30572 else
30573 mark_base_types (l);
30574 }
30575 break;
30576 case dw_val_class_addr:
30577 if (a->dw_attr == DW_AT_const_value
30578 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30579 {
30580 if (AT_index (a) != NOT_INDEXED)
30581 remove_addr_table_entry (a->dw_attr_val.val_entry);
30582 remove_AT (die, a->dw_attr);
30583 ix--;
30584 }
30585 if ((die->die_tag == DW_TAG_call_site
30586 && a->dw_attr == DW_AT_call_origin)
30587 || (die->die_tag == DW_TAG_GNU_call_site
30588 && a->dw_attr == DW_AT_abstract_origin))
30589 {
30590 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30591 dw_die_ref tdie = lookup_decl_die (tdecl);
30592 dw_die_ref cdie;
30593 if (tdie == NULL
30594 && DECL_EXTERNAL (tdecl)
30595 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30596 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30597 {
30598 dw_die_ref pdie = cdie;
30599 /* Make sure we don't add these DIEs into type units.
30600 We could emit skeleton DIEs for context (namespaces,
30601 outer structs/classes) and a skeleton DIE for the
30602 innermost context with DW_AT_signature pointing to the
30603 type unit. See PR78835. */
30604 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30605 pdie = pdie->die_parent;
30606 if (pdie == NULL)
30607 {
30608 /* Creating a full DIE for tdecl is overly expensive and
30609 at this point even wrong when in the LTO phase
30610 as it can end up generating new type DIEs we didn't
30611 output and thus optimize_external_refs will crash. */
30612 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30613 add_AT_flag (tdie, DW_AT_external, 1);
30614 add_AT_flag (tdie, DW_AT_declaration, 1);
30615 add_linkage_attr (tdie, tdecl);
30616 add_name_and_src_coords_attributes (tdie, tdecl, true);
30617 equate_decl_number_to_die (tdecl, tdie);
30618 }
30619 }
30620 if (tdie)
30621 {
30622 a->dw_attr_val.val_class = dw_val_class_die_ref;
30623 a->dw_attr_val.v.val_die_ref.die = tdie;
30624 a->dw_attr_val.v.val_die_ref.external = 0;
30625 }
30626 else
30627 {
30628 if (AT_index (a) != NOT_INDEXED)
30629 remove_addr_table_entry (a->dw_attr_val.val_entry);
30630 remove_AT (die, a->dw_attr);
30631 ix--;
30632 }
30633 }
30634 break;
30635 default:
30636 break;
30637 }
30638
30639 if (remove_AT_byte_size)
30640 remove_AT (die, dwarf_version >= 5
30641 ? DW_AT_string_length_byte_size
30642 : DW_AT_byte_size);
30643
30644 FOR_EACH_CHILD (die, c, resolve_addr (c));
30645 }
30646 \f
30647 /* Helper routines for optimize_location_lists.
30648 This pass tries to share identical local lists in .debug_loc
30649 section. */
30650
30651 /* Iteratively hash operands of LOC opcode into HSTATE. */
30652
30653 static void
30654 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30655 {
30656 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30657 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30658
30659 switch (loc->dw_loc_opc)
30660 {
30661 case DW_OP_const4u:
30662 case DW_OP_const8u:
30663 if (loc->dtprel)
30664 goto hash_addr;
30665 /* FALLTHRU */
30666 case DW_OP_const1u:
30667 case DW_OP_const1s:
30668 case DW_OP_const2u:
30669 case DW_OP_const2s:
30670 case DW_OP_const4s:
30671 case DW_OP_const8s:
30672 case DW_OP_constu:
30673 case DW_OP_consts:
30674 case DW_OP_pick:
30675 case DW_OP_plus_uconst:
30676 case DW_OP_breg0:
30677 case DW_OP_breg1:
30678 case DW_OP_breg2:
30679 case DW_OP_breg3:
30680 case DW_OP_breg4:
30681 case DW_OP_breg5:
30682 case DW_OP_breg6:
30683 case DW_OP_breg7:
30684 case DW_OP_breg8:
30685 case DW_OP_breg9:
30686 case DW_OP_breg10:
30687 case DW_OP_breg11:
30688 case DW_OP_breg12:
30689 case DW_OP_breg13:
30690 case DW_OP_breg14:
30691 case DW_OP_breg15:
30692 case DW_OP_breg16:
30693 case DW_OP_breg17:
30694 case DW_OP_breg18:
30695 case DW_OP_breg19:
30696 case DW_OP_breg20:
30697 case DW_OP_breg21:
30698 case DW_OP_breg22:
30699 case DW_OP_breg23:
30700 case DW_OP_breg24:
30701 case DW_OP_breg25:
30702 case DW_OP_breg26:
30703 case DW_OP_breg27:
30704 case DW_OP_breg28:
30705 case DW_OP_breg29:
30706 case DW_OP_breg30:
30707 case DW_OP_breg31:
30708 case DW_OP_regx:
30709 case DW_OP_fbreg:
30710 case DW_OP_piece:
30711 case DW_OP_deref_size:
30712 case DW_OP_xderef_size:
30713 hstate.add_object (val1->v.val_int);
30714 break;
30715 case DW_OP_skip:
30716 case DW_OP_bra:
30717 {
30718 int offset;
30719
30720 gcc_assert (val1->val_class == dw_val_class_loc);
30721 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30722 hstate.add_object (offset);
30723 }
30724 break;
30725 case DW_OP_implicit_value:
30726 hstate.add_object (val1->v.val_unsigned);
30727 switch (val2->val_class)
30728 {
30729 case dw_val_class_const:
30730 hstate.add_object (val2->v.val_int);
30731 break;
30732 case dw_val_class_vec:
30733 {
30734 unsigned int elt_size = val2->v.val_vec.elt_size;
30735 unsigned int len = val2->v.val_vec.length;
30736
30737 hstate.add_int (elt_size);
30738 hstate.add_int (len);
30739 hstate.add (val2->v.val_vec.array, len * elt_size);
30740 }
30741 break;
30742 case dw_val_class_const_double:
30743 hstate.add_object (val2->v.val_double.low);
30744 hstate.add_object (val2->v.val_double.high);
30745 break;
30746 case dw_val_class_wide_int:
30747 hstate.add (val2->v.val_wide->get_val (),
30748 get_full_len (*val2->v.val_wide)
30749 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30750 break;
30751 case dw_val_class_addr:
30752 inchash::add_rtx (val2->v.val_addr, hstate);
30753 break;
30754 default:
30755 gcc_unreachable ();
30756 }
30757 break;
30758 case DW_OP_bregx:
30759 case DW_OP_bit_piece:
30760 hstate.add_object (val1->v.val_int);
30761 hstate.add_object (val2->v.val_int);
30762 break;
30763 case DW_OP_addr:
30764 hash_addr:
30765 if (loc->dtprel)
30766 {
30767 unsigned char dtprel = 0xd1;
30768 hstate.add_object (dtprel);
30769 }
30770 inchash::add_rtx (val1->v.val_addr, hstate);
30771 break;
30772 case DW_OP_GNU_addr_index:
30773 case DW_OP_addrx:
30774 case DW_OP_GNU_const_index:
30775 case DW_OP_constx:
30776 {
30777 if (loc->dtprel)
30778 {
30779 unsigned char dtprel = 0xd1;
30780 hstate.add_object (dtprel);
30781 }
30782 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30783 }
30784 break;
30785 case DW_OP_implicit_pointer:
30786 case DW_OP_GNU_implicit_pointer:
30787 hstate.add_int (val2->v.val_int);
30788 break;
30789 case DW_OP_entry_value:
30790 case DW_OP_GNU_entry_value:
30791 hstate.add_object (val1->v.val_loc);
30792 break;
30793 case DW_OP_regval_type:
30794 case DW_OP_deref_type:
30795 case DW_OP_GNU_regval_type:
30796 case DW_OP_GNU_deref_type:
30797 {
30798 unsigned int byte_size
30799 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30800 unsigned int encoding
30801 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30802 hstate.add_object (val1->v.val_int);
30803 hstate.add_object (byte_size);
30804 hstate.add_object (encoding);
30805 }
30806 break;
30807 case DW_OP_convert:
30808 case DW_OP_reinterpret:
30809 case DW_OP_GNU_convert:
30810 case DW_OP_GNU_reinterpret:
30811 if (val1->val_class == dw_val_class_unsigned_const)
30812 {
30813 hstate.add_object (val1->v.val_unsigned);
30814 break;
30815 }
30816 /* FALLTHRU */
30817 case DW_OP_const_type:
30818 case DW_OP_GNU_const_type:
30819 {
30820 unsigned int byte_size
30821 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30822 unsigned int encoding
30823 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30824 hstate.add_object (byte_size);
30825 hstate.add_object (encoding);
30826 if (loc->dw_loc_opc != DW_OP_const_type
30827 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30828 break;
30829 hstate.add_object (val2->val_class);
30830 switch (val2->val_class)
30831 {
30832 case dw_val_class_const:
30833 hstate.add_object (val2->v.val_int);
30834 break;
30835 case dw_val_class_vec:
30836 {
30837 unsigned int elt_size = val2->v.val_vec.elt_size;
30838 unsigned int len = val2->v.val_vec.length;
30839
30840 hstate.add_object (elt_size);
30841 hstate.add_object (len);
30842 hstate.add (val2->v.val_vec.array, len * elt_size);
30843 }
30844 break;
30845 case dw_val_class_const_double:
30846 hstate.add_object (val2->v.val_double.low);
30847 hstate.add_object (val2->v.val_double.high);
30848 break;
30849 case dw_val_class_wide_int:
30850 hstate.add (val2->v.val_wide->get_val (),
30851 get_full_len (*val2->v.val_wide)
30852 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30853 break;
30854 default:
30855 gcc_unreachable ();
30856 }
30857 }
30858 break;
30859
30860 default:
30861 /* Other codes have no operands. */
30862 break;
30863 }
30864 }
30865
30866 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30867
30868 static inline void
30869 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30870 {
30871 dw_loc_descr_ref l;
30872 bool sizes_computed = false;
30873 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30874 size_of_locs (loc);
30875
30876 for (l = loc; l != NULL; l = l->dw_loc_next)
30877 {
30878 enum dwarf_location_atom opc = l->dw_loc_opc;
30879 hstate.add_object (opc);
30880 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30881 {
30882 size_of_locs (loc);
30883 sizes_computed = true;
30884 }
30885 hash_loc_operands (l, hstate);
30886 }
30887 }
30888
30889 /* Compute hash of the whole location list LIST_HEAD. */
30890
30891 static inline void
30892 hash_loc_list (dw_loc_list_ref list_head)
30893 {
30894 dw_loc_list_ref curr = list_head;
30895 inchash::hash hstate;
30896
30897 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30898 {
30899 hstate.add (curr->begin, strlen (curr->begin) + 1);
30900 hstate.add (curr->end, strlen (curr->end) + 1);
30901 hstate.add_object (curr->vbegin);
30902 hstate.add_object (curr->vend);
30903 if (curr->section)
30904 hstate.add (curr->section, strlen (curr->section) + 1);
30905 hash_locs (curr->expr, hstate);
30906 }
30907 list_head->hash = hstate.end ();
30908 }
30909
30910 /* Return true if X and Y opcodes have the same operands. */
30911
30912 static inline bool
30913 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30914 {
30915 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30916 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30917 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30918 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30919
30920 switch (x->dw_loc_opc)
30921 {
30922 case DW_OP_const4u:
30923 case DW_OP_const8u:
30924 if (x->dtprel)
30925 goto hash_addr;
30926 /* FALLTHRU */
30927 case DW_OP_const1u:
30928 case DW_OP_const1s:
30929 case DW_OP_const2u:
30930 case DW_OP_const2s:
30931 case DW_OP_const4s:
30932 case DW_OP_const8s:
30933 case DW_OP_constu:
30934 case DW_OP_consts:
30935 case DW_OP_pick:
30936 case DW_OP_plus_uconst:
30937 case DW_OP_breg0:
30938 case DW_OP_breg1:
30939 case DW_OP_breg2:
30940 case DW_OP_breg3:
30941 case DW_OP_breg4:
30942 case DW_OP_breg5:
30943 case DW_OP_breg6:
30944 case DW_OP_breg7:
30945 case DW_OP_breg8:
30946 case DW_OP_breg9:
30947 case DW_OP_breg10:
30948 case DW_OP_breg11:
30949 case DW_OP_breg12:
30950 case DW_OP_breg13:
30951 case DW_OP_breg14:
30952 case DW_OP_breg15:
30953 case DW_OP_breg16:
30954 case DW_OP_breg17:
30955 case DW_OP_breg18:
30956 case DW_OP_breg19:
30957 case DW_OP_breg20:
30958 case DW_OP_breg21:
30959 case DW_OP_breg22:
30960 case DW_OP_breg23:
30961 case DW_OP_breg24:
30962 case DW_OP_breg25:
30963 case DW_OP_breg26:
30964 case DW_OP_breg27:
30965 case DW_OP_breg28:
30966 case DW_OP_breg29:
30967 case DW_OP_breg30:
30968 case DW_OP_breg31:
30969 case DW_OP_regx:
30970 case DW_OP_fbreg:
30971 case DW_OP_piece:
30972 case DW_OP_deref_size:
30973 case DW_OP_xderef_size:
30974 return valx1->v.val_int == valy1->v.val_int;
30975 case DW_OP_skip:
30976 case DW_OP_bra:
30977 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30978 can cause irrelevant differences in dw_loc_addr. */
30979 gcc_assert (valx1->val_class == dw_val_class_loc
30980 && valy1->val_class == dw_val_class_loc
30981 && (dwarf_split_debug_info
30982 || x->dw_loc_addr == y->dw_loc_addr));
30983 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30984 case DW_OP_implicit_value:
30985 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30986 || valx2->val_class != valy2->val_class)
30987 return false;
30988 switch (valx2->val_class)
30989 {
30990 case dw_val_class_const:
30991 return valx2->v.val_int == valy2->v.val_int;
30992 case dw_val_class_vec:
30993 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30994 && valx2->v.val_vec.length == valy2->v.val_vec.length
30995 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30996 valx2->v.val_vec.elt_size
30997 * valx2->v.val_vec.length) == 0;
30998 case dw_val_class_const_double:
30999 return valx2->v.val_double.low == valy2->v.val_double.low
31000 && valx2->v.val_double.high == valy2->v.val_double.high;
31001 case dw_val_class_wide_int:
31002 return *valx2->v.val_wide == *valy2->v.val_wide;
31003 case dw_val_class_addr:
31004 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31005 default:
31006 gcc_unreachable ();
31007 }
31008 case DW_OP_bregx:
31009 case DW_OP_bit_piece:
31010 return valx1->v.val_int == valy1->v.val_int
31011 && valx2->v.val_int == valy2->v.val_int;
31012 case DW_OP_addr:
31013 hash_addr:
31014 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31015 case DW_OP_GNU_addr_index:
31016 case DW_OP_addrx:
31017 case DW_OP_GNU_const_index:
31018 case DW_OP_constx:
31019 {
31020 rtx ax1 = valx1->val_entry->addr.rtl;
31021 rtx ay1 = valy1->val_entry->addr.rtl;
31022 return rtx_equal_p (ax1, ay1);
31023 }
31024 case DW_OP_implicit_pointer:
31025 case DW_OP_GNU_implicit_pointer:
31026 return valx1->val_class == dw_val_class_die_ref
31027 && valx1->val_class == valy1->val_class
31028 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31029 && valx2->v.val_int == valy2->v.val_int;
31030 case DW_OP_entry_value:
31031 case DW_OP_GNU_entry_value:
31032 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31033 case DW_OP_const_type:
31034 case DW_OP_GNU_const_type:
31035 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31036 || valx2->val_class != valy2->val_class)
31037 return false;
31038 switch (valx2->val_class)
31039 {
31040 case dw_val_class_const:
31041 return valx2->v.val_int == valy2->v.val_int;
31042 case dw_val_class_vec:
31043 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31044 && valx2->v.val_vec.length == valy2->v.val_vec.length
31045 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31046 valx2->v.val_vec.elt_size
31047 * valx2->v.val_vec.length) == 0;
31048 case dw_val_class_const_double:
31049 return valx2->v.val_double.low == valy2->v.val_double.low
31050 && valx2->v.val_double.high == valy2->v.val_double.high;
31051 case dw_val_class_wide_int:
31052 return *valx2->v.val_wide == *valy2->v.val_wide;
31053 default:
31054 gcc_unreachable ();
31055 }
31056 case DW_OP_regval_type:
31057 case DW_OP_deref_type:
31058 case DW_OP_GNU_regval_type:
31059 case DW_OP_GNU_deref_type:
31060 return valx1->v.val_int == valy1->v.val_int
31061 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31062 case DW_OP_convert:
31063 case DW_OP_reinterpret:
31064 case DW_OP_GNU_convert:
31065 case DW_OP_GNU_reinterpret:
31066 if (valx1->val_class != valy1->val_class)
31067 return false;
31068 if (valx1->val_class == dw_val_class_unsigned_const)
31069 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31070 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31071 case DW_OP_GNU_parameter_ref:
31072 return valx1->val_class == dw_val_class_die_ref
31073 && valx1->val_class == valy1->val_class
31074 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31075 default:
31076 /* Other codes have no operands. */
31077 return true;
31078 }
31079 }
31080
31081 /* Return true if DWARF location expressions X and Y are the same. */
31082
31083 static inline bool
31084 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31085 {
31086 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31087 if (x->dw_loc_opc != y->dw_loc_opc
31088 || x->dtprel != y->dtprel
31089 || !compare_loc_operands (x, y))
31090 break;
31091 return x == NULL && y == NULL;
31092 }
31093
31094 /* Hashtable helpers. */
31095
31096 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31097 {
31098 static inline hashval_t hash (const dw_loc_list_struct *);
31099 static inline bool equal (const dw_loc_list_struct *,
31100 const dw_loc_list_struct *);
31101 };
31102
31103 /* Return precomputed hash of location list X. */
31104
31105 inline hashval_t
31106 loc_list_hasher::hash (const dw_loc_list_struct *x)
31107 {
31108 return x->hash;
31109 }
31110
31111 /* Return true if location lists A and B are the same. */
31112
31113 inline bool
31114 loc_list_hasher::equal (const dw_loc_list_struct *a,
31115 const dw_loc_list_struct *b)
31116 {
31117 if (a == b)
31118 return 1;
31119 if (a->hash != b->hash)
31120 return 0;
31121 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31122 if (strcmp (a->begin, b->begin) != 0
31123 || strcmp (a->end, b->end) != 0
31124 || (a->section == NULL) != (b->section == NULL)
31125 || (a->section && strcmp (a->section, b->section) != 0)
31126 || a->vbegin != b->vbegin || a->vend != b->vend
31127 || !compare_locs (a->expr, b->expr))
31128 break;
31129 return a == NULL && b == NULL;
31130 }
31131
31132 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31133
31134
31135 /* Recursively optimize location lists referenced from DIE
31136 children and share them whenever possible. */
31137
31138 static void
31139 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31140 {
31141 dw_die_ref c;
31142 dw_attr_node *a;
31143 unsigned ix;
31144 dw_loc_list_struct **slot;
31145 bool drop_locviews = false;
31146 bool has_locviews = false;
31147
31148 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31149 if (AT_class (a) == dw_val_class_loc_list)
31150 {
31151 dw_loc_list_ref list = AT_loc_list (a);
31152 /* TODO: perform some optimizations here, before hashing
31153 it and storing into the hash table. */
31154 hash_loc_list (list);
31155 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31156 if (*slot == NULL)
31157 {
31158 *slot = list;
31159 if (loc_list_has_views (list))
31160 gcc_assert (list->vl_symbol);
31161 else if (list->vl_symbol)
31162 {
31163 drop_locviews = true;
31164 list->vl_symbol = NULL;
31165 }
31166 }
31167 else
31168 {
31169 if (list->vl_symbol && !(*slot)->vl_symbol)
31170 drop_locviews = true;
31171 a->dw_attr_val.v.val_loc_list = *slot;
31172 }
31173 }
31174 else if (AT_class (a) == dw_val_class_view_list)
31175 {
31176 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31177 has_locviews = true;
31178 }
31179
31180
31181 if (drop_locviews && has_locviews)
31182 remove_AT (die, DW_AT_GNU_locviews);
31183
31184 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31185 }
31186
31187
31188 /* Recursively assign each location list a unique index into the debug_addr
31189 section. */
31190
31191 static void
31192 index_location_lists (dw_die_ref die)
31193 {
31194 dw_die_ref c;
31195 dw_attr_node *a;
31196 unsigned ix;
31197
31198 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31199 if (AT_class (a) == dw_val_class_loc_list)
31200 {
31201 dw_loc_list_ref list = AT_loc_list (a);
31202 dw_loc_list_ref curr;
31203 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31204 {
31205 /* Don't index an entry that has already been indexed
31206 or won't be output. Make sure skip_loc_list_entry doesn't
31207 call size_of_locs, because that might cause circular dependency,
31208 index_location_lists requiring address table indexes to be
31209 computed, but adding new indexes through add_addr_table_entry
31210 and address table index computation requiring no new additions
31211 to the hash table. In the rare case of DWARF[234] >= 64KB
31212 location expression, we'll just waste unused address table entry
31213 for it. */
31214 if (curr->begin_entry != NULL
31215 || skip_loc_list_entry (curr))
31216 continue;
31217
31218 curr->begin_entry
31219 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31220 }
31221 }
31222
31223 FOR_EACH_CHILD (die, c, index_location_lists (c));
31224 }
31225
31226 /* Optimize location lists referenced from DIE
31227 children and share them whenever possible. */
31228
31229 static void
31230 optimize_location_lists (dw_die_ref die)
31231 {
31232 loc_list_hash_type htab (500);
31233 optimize_location_lists_1 (die, &htab);
31234 }
31235 \f
31236 /* Traverse the limbo die list, and add parent/child links. The only
31237 dies without parents that should be here are concrete instances of
31238 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31239 For concrete instances, we can get the parent die from the abstract
31240 instance. */
31241
31242 static void
31243 flush_limbo_die_list (void)
31244 {
31245 limbo_die_node *node;
31246
31247 /* get_context_die calls force_decl_die, which can put new DIEs on the
31248 limbo list in LTO mode when nested functions are put in a different
31249 partition than that of their parent function. */
31250 while ((node = limbo_die_list))
31251 {
31252 dw_die_ref die = node->die;
31253 limbo_die_list = node->next;
31254
31255 if (die->die_parent == NULL)
31256 {
31257 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31258
31259 if (origin && origin->die_parent)
31260 add_child_die (origin->die_parent, die);
31261 else if (is_cu_die (die))
31262 ;
31263 else if (seen_error ())
31264 /* It's OK to be confused by errors in the input. */
31265 add_child_die (comp_unit_die (), die);
31266 else
31267 {
31268 /* In certain situations, the lexical block containing a
31269 nested function can be optimized away, which results
31270 in the nested function die being orphaned. Likewise
31271 with the return type of that nested function. Force
31272 this to be a child of the containing function.
31273
31274 It may happen that even the containing function got fully
31275 inlined and optimized out. In that case we are lost and
31276 assign the empty child. This should not be big issue as
31277 the function is likely unreachable too. */
31278 gcc_assert (node->created_for);
31279
31280 if (DECL_P (node->created_for))
31281 origin = get_context_die (DECL_CONTEXT (node->created_for));
31282 else if (TYPE_P (node->created_for))
31283 origin = scope_die_for (node->created_for, comp_unit_die ());
31284 else
31285 origin = comp_unit_die ();
31286
31287 add_child_die (origin, die);
31288 }
31289 }
31290 }
31291 }
31292
31293 /* Reset DIEs so we can output them again. */
31294
31295 static void
31296 reset_dies (dw_die_ref die)
31297 {
31298 dw_die_ref c;
31299
31300 /* Remove stuff we re-generate. */
31301 die->die_mark = 0;
31302 die->die_offset = 0;
31303 die->die_abbrev = 0;
31304 remove_AT (die, DW_AT_sibling);
31305
31306 FOR_EACH_CHILD (die, c, reset_dies (c));
31307 }
31308
31309 /* Output stuff that dwarf requires at the end of every file,
31310 and generate the DWARF-2 debugging info. */
31311
31312 static void
31313 dwarf2out_finish (const char *filename)
31314 {
31315 comdat_type_node *ctnode;
31316 dw_die_ref main_comp_unit_die;
31317 unsigned char checksum[16];
31318 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31319
31320 /* Flush out any latecomers to the limbo party. */
31321 flush_limbo_die_list ();
31322
31323 if (inline_entry_data_table)
31324 gcc_assert (inline_entry_data_table->elements () == 0);
31325
31326 if (flag_checking)
31327 {
31328 verify_die (comp_unit_die ());
31329 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31330 verify_die (node->die);
31331 }
31332
31333 /* We shouldn't have any symbols with delayed asm names for
31334 DIEs generated after early finish. */
31335 gcc_assert (deferred_asm_name == NULL);
31336
31337 gen_remaining_tmpl_value_param_die_attribute ();
31338
31339 if (flag_generate_lto || flag_generate_offload)
31340 {
31341 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31342
31343 /* Prune stuff so that dwarf2out_finish runs successfully
31344 for the fat part of the object. */
31345 reset_dies (comp_unit_die ());
31346 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31347 reset_dies (node->die);
31348
31349 hash_table<comdat_type_hasher> comdat_type_table (100);
31350 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31351 {
31352 comdat_type_node **slot
31353 = comdat_type_table.find_slot (ctnode, INSERT);
31354
31355 /* Don't reset types twice. */
31356 if (*slot != HTAB_EMPTY_ENTRY)
31357 continue;
31358
31359 /* Remove the pointer to the line table. */
31360 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31361
31362 if (debug_info_level >= DINFO_LEVEL_TERSE)
31363 reset_dies (ctnode->root_die);
31364
31365 *slot = ctnode;
31366 }
31367
31368 /* Reset die CU symbol so we don't output it twice. */
31369 comp_unit_die ()->die_id.die_symbol = NULL;
31370
31371 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31372 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31373 if (have_macinfo)
31374 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31375
31376 /* Remove indirect string decisions. */
31377 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31378 if (debug_line_str_hash)
31379 {
31380 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31381 debug_line_str_hash = NULL;
31382 }
31383 }
31384
31385 #if ENABLE_ASSERT_CHECKING
31386 {
31387 dw_die_ref die = comp_unit_die (), c;
31388 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31389 }
31390 #endif
31391 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31392 resolve_addr (ctnode->root_die);
31393 resolve_addr (comp_unit_die ());
31394 move_marked_base_types ();
31395
31396 if (dump_file)
31397 {
31398 fprintf (dump_file, "DWARF for %s\n", filename);
31399 print_die (comp_unit_die (), dump_file);
31400 }
31401
31402 /* Initialize sections and labels used for actual assembler output. */
31403 unsigned generation = init_sections_and_labels (false);
31404
31405 /* Traverse the DIE's and add sibling attributes to those DIE's that
31406 have children. */
31407 add_sibling_attributes (comp_unit_die ());
31408 limbo_die_node *node;
31409 for (node = cu_die_list; node; node = node->next)
31410 add_sibling_attributes (node->die);
31411 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31412 add_sibling_attributes (ctnode->root_die);
31413
31414 /* When splitting DWARF info, we put some attributes in the
31415 skeleton compile_unit DIE that remains in the .o, while
31416 most attributes go in the DWO compile_unit_die. */
31417 if (dwarf_split_debug_info)
31418 {
31419 limbo_die_node *cu;
31420 main_comp_unit_die = gen_compile_unit_die (NULL);
31421 if (dwarf_version >= 5)
31422 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31423 cu = limbo_die_list;
31424 gcc_assert (cu->die == main_comp_unit_die);
31425 limbo_die_list = limbo_die_list->next;
31426 cu->next = cu_die_list;
31427 cu_die_list = cu;
31428 }
31429 else
31430 main_comp_unit_die = comp_unit_die ();
31431
31432 /* Output a terminator label for the .text section. */
31433 switch_to_section (text_section);
31434 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31435 if (cold_text_section)
31436 {
31437 switch_to_section (cold_text_section);
31438 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31439 }
31440
31441 /* We can only use the low/high_pc attributes if all of the code was
31442 in .text. */
31443 if (!have_multiple_function_sections
31444 || (dwarf_version < 3 && dwarf_strict))
31445 {
31446 /* Don't add if the CU has no associated code. */
31447 if (text_section_used)
31448 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31449 text_end_label, true);
31450 }
31451 else
31452 {
31453 unsigned fde_idx;
31454 dw_fde_ref fde;
31455 bool range_list_added = false;
31456
31457 if (text_section_used)
31458 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31459 text_end_label, &range_list_added, true);
31460 if (cold_text_section_used)
31461 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31462 cold_end_label, &range_list_added, true);
31463
31464 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31465 {
31466 if (DECL_IGNORED_P (fde->decl))
31467 continue;
31468 if (!fde->in_std_section)
31469 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31470 fde->dw_fde_end, &range_list_added,
31471 true);
31472 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31473 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31474 fde->dw_fde_second_end, &range_list_added,
31475 true);
31476 }
31477
31478 if (range_list_added)
31479 {
31480 /* We need to give .debug_loc and .debug_ranges an appropriate
31481 "base address". Use zero so that these addresses become
31482 absolute. Historically, we've emitted the unexpected
31483 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31484 Emit both to give time for other tools to adapt. */
31485 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31486 if (! dwarf_strict && dwarf_version < 4)
31487 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31488
31489 add_ranges (NULL);
31490 }
31491 }
31492
31493 /* AIX Assembler inserts the length, so adjust the reference to match the
31494 offset expected by debuggers. */
31495 strcpy (dl_section_ref, debug_line_section_label);
31496 if (XCOFF_DEBUGGING_INFO)
31497 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31498
31499 if (debug_info_level >= DINFO_LEVEL_TERSE)
31500 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31501 dl_section_ref);
31502
31503 if (have_macinfo)
31504 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31505 macinfo_section_label);
31506
31507 if (dwarf_split_debug_info)
31508 {
31509 if (have_location_lists)
31510 {
31511 /* Since we generate the loclists in the split DWARF .dwo
31512 file itself, we don't need to generate a loclists_base
31513 attribute for the split compile unit DIE. That attribute
31514 (and using relocatable sec_offset FORMs) isn't allowed
31515 for a split compile unit. Only if the .debug_loclists
31516 section was in the main file, would we need to generate a
31517 loclists_base attribute here (for the full or skeleton
31518 unit DIE). */
31519
31520 /* optimize_location_lists calculates the size of the lists,
31521 so index them first, and assign indices to the entries.
31522 Although optimize_location_lists will remove entries from
31523 the table, it only does so for duplicates, and therefore
31524 only reduces ref_counts to 1. */
31525 index_location_lists (comp_unit_die ());
31526 }
31527
31528 if (addr_index_table != NULL)
31529 {
31530 unsigned int index = 0;
31531 addr_index_table
31532 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31533 (&index);
31534 }
31535 }
31536
31537 loc_list_idx = 0;
31538 if (have_location_lists)
31539 {
31540 optimize_location_lists (comp_unit_die ());
31541 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31542 if (dwarf_version >= 5 && dwarf_split_debug_info)
31543 assign_location_list_indexes (comp_unit_die ());
31544 }
31545
31546 save_macinfo_strings ();
31547
31548 if (dwarf_split_debug_info)
31549 {
31550 unsigned int index = 0;
31551
31552 /* Add attributes common to skeleton compile_units and
31553 type_units. Because these attributes include strings, it
31554 must be done before freezing the string table. Top-level
31555 skeleton die attrs are added when the skeleton type unit is
31556 created, so ensure it is created by this point. */
31557 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31558 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31559 }
31560
31561 /* Output all of the compilation units. We put the main one last so that
31562 the offsets are available to output_pubnames. */
31563 for (node = cu_die_list; node; node = node->next)
31564 output_comp_unit (node->die, 0, NULL);
31565
31566 hash_table<comdat_type_hasher> comdat_type_table (100);
31567 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31568 {
31569 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31570
31571 /* Don't output duplicate types. */
31572 if (*slot != HTAB_EMPTY_ENTRY)
31573 continue;
31574
31575 /* Add a pointer to the line table for the main compilation unit
31576 so that the debugger can make sense of DW_AT_decl_file
31577 attributes. */
31578 if (debug_info_level >= DINFO_LEVEL_TERSE)
31579 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31580 (!dwarf_split_debug_info
31581 ? dl_section_ref
31582 : debug_skeleton_line_section_label));
31583
31584 output_comdat_type_unit (ctnode, false);
31585 *slot = ctnode;
31586 }
31587
31588 if (dwarf_split_debug_info)
31589 {
31590 int mark;
31591 struct md5_ctx ctx;
31592
31593 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31594 index_rnglists ();
31595
31596 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31597 md5_init_ctx (&ctx);
31598 mark = 0;
31599 die_checksum (comp_unit_die (), &ctx, &mark);
31600 unmark_all_dies (comp_unit_die ());
31601 md5_finish_ctx (&ctx, checksum);
31602
31603 if (dwarf_version < 5)
31604 {
31605 /* Use the first 8 bytes of the checksum as the dwo_id,
31606 and add it to both comp-unit DIEs. */
31607 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31608 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31609 }
31610
31611 /* Add the base offset of the ranges table to the skeleton
31612 comp-unit DIE. */
31613 if (!vec_safe_is_empty (ranges_table))
31614 {
31615 if (dwarf_version >= 5)
31616 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31617 ranges_base_label);
31618 else
31619 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31620 ranges_section_label);
31621 }
31622
31623 switch_to_section (debug_addr_section);
31624 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31625 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31626 before DWARF5, didn't have a header for .debug_addr units.
31627 DWARF5 specifies a small header when address tables are used. */
31628 if (dwarf_version >= 5)
31629 {
31630 unsigned int last_idx = 0;
31631 unsigned long addrs_length;
31632
31633 addr_index_table->traverse_noresize
31634 <unsigned int *, count_index_addrs> (&last_idx);
31635 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31636
31637 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31638 dw2_asm_output_data (4, 0xffffffff,
31639 "Escape value for 64-bit DWARF extension");
31640 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31641 "Length of Address Unit");
31642 dw2_asm_output_data (2, 5, "DWARF addr version");
31643 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31644 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31645 }
31646 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31647 output_addr_table ();
31648 }
31649
31650 /* Output the main compilation unit if non-empty or if .debug_macinfo
31651 or .debug_macro will be emitted. */
31652 output_comp_unit (comp_unit_die (), have_macinfo,
31653 dwarf_split_debug_info ? checksum : NULL);
31654
31655 if (dwarf_split_debug_info && info_section_emitted)
31656 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31657
31658 /* Output the abbreviation table. */
31659 if (vec_safe_length (abbrev_die_table) != 1)
31660 {
31661 switch_to_section (debug_abbrev_section);
31662 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31663 output_abbrev_section ();
31664 }
31665
31666 /* Output location list section if necessary. */
31667 if (have_location_lists)
31668 {
31669 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31670 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31671 /* Output the location lists info. */
31672 switch_to_section (debug_loc_section);
31673 if (dwarf_version >= 5)
31674 {
31675 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31676 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31677 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31678 dw2_asm_output_data (4, 0xffffffff,
31679 "Initial length escape value indicating "
31680 "64-bit DWARF extension");
31681 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31682 "Length of Location Lists");
31683 ASM_OUTPUT_LABEL (asm_out_file, l1);
31684 output_dwarf_version ();
31685 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31686 dw2_asm_output_data (1, 0, "Segment Size");
31687 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31688 "Offset Entry Count");
31689 }
31690 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31691 if (dwarf_version >= 5 && dwarf_split_debug_info)
31692 {
31693 unsigned int save_loc_list_idx = loc_list_idx;
31694 loc_list_idx = 0;
31695 output_loclists_offsets (comp_unit_die ());
31696 gcc_assert (save_loc_list_idx == loc_list_idx);
31697 }
31698 output_location_lists (comp_unit_die ());
31699 if (dwarf_version >= 5)
31700 ASM_OUTPUT_LABEL (asm_out_file, l2);
31701 }
31702
31703 output_pubtables ();
31704
31705 /* Output the address range information if a CU (.debug_info section)
31706 was emitted. We output an empty table even if we had no functions
31707 to put in it. This because the consumer has no way to tell the
31708 difference between an empty table that we omitted and failure to
31709 generate a table that would have contained data. */
31710 if (info_section_emitted)
31711 {
31712 switch_to_section (debug_aranges_section);
31713 output_aranges ();
31714 }
31715
31716 /* Output ranges section if necessary. */
31717 if (!vec_safe_is_empty (ranges_table))
31718 {
31719 if (dwarf_version >= 5)
31720 output_rnglists (generation);
31721 else
31722 output_ranges ();
31723 }
31724
31725 /* Have to end the macro section. */
31726 if (have_macinfo)
31727 {
31728 switch_to_section (debug_macinfo_section);
31729 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31730 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31731 : debug_skeleton_line_section_label, false);
31732 dw2_asm_output_data (1, 0, "End compilation unit");
31733 }
31734
31735 /* Output the source line correspondence table. We must do this
31736 even if there is no line information. Otherwise, on an empty
31737 translation unit, we will generate a present, but empty,
31738 .debug_info section. IRIX 6.5 `nm' will then complain when
31739 examining the file. This is done late so that any filenames
31740 used by the debug_info section are marked as 'used'. */
31741 switch_to_section (debug_line_section);
31742 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31743 if (! output_asm_line_debug_info ())
31744 output_line_info (false);
31745
31746 if (dwarf_split_debug_info && info_section_emitted)
31747 {
31748 switch_to_section (debug_skeleton_line_section);
31749 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31750 output_line_info (true);
31751 }
31752
31753 /* If we emitted any indirect strings, output the string table too. */
31754 if (debug_str_hash || skeleton_debug_str_hash)
31755 output_indirect_strings ();
31756 if (debug_line_str_hash)
31757 {
31758 switch_to_section (debug_line_str_section);
31759 const enum dwarf_form form = DW_FORM_line_strp;
31760 debug_line_str_hash->traverse<enum dwarf_form,
31761 output_indirect_string> (form);
31762 }
31763
31764 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31765 symview_upper_bound = 0;
31766 if (zero_view_p)
31767 bitmap_clear (zero_view_p);
31768 }
31769
31770 /* Returns a hash value for X (which really is a variable_value_struct). */
31771
31772 inline hashval_t
31773 variable_value_hasher::hash (variable_value_struct *x)
31774 {
31775 return (hashval_t) x->decl_id;
31776 }
31777
31778 /* Return nonzero if decl_id of variable_value_struct X is the same as
31779 UID of decl Y. */
31780
31781 inline bool
31782 variable_value_hasher::equal (variable_value_struct *x, tree y)
31783 {
31784 return x->decl_id == DECL_UID (y);
31785 }
31786
31787 /* Helper function for resolve_variable_value, handle
31788 DW_OP_GNU_variable_value in one location expression.
31789 Return true if exprloc has been changed into loclist. */
31790
31791 static bool
31792 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31793 {
31794 dw_loc_descr_ref next;
31795 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31796 {
31797 next = loc->dw_loc_next;
31798 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31799 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31800 continue;
31801
31802 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31803 if (DECL_CONTEXT (decl) != current_function_decl)
31804 continue;
31805
31806 dw_die_ref ref = lookup_decl_die (decl);
31807 if (ref)
31808 {
31809 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31810 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31811 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31812 continue;
31813 }
31814 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31815 if (l == NULL)
31816 continue;
31817 if (l->dw_loc_next)
31818 {
31819 if (AT_class (a) != dw_val_class_loc)
31820 continue;
31821 switch (a->dw_attr)
31822 {
31823 /* Following attributes allow both exprloc and loclist
31824 classes, so we can change them into a loclist. */
31825 case DW_AT_location:
31826 case DW_AT_string_length:
31827 case DW_AT_return_addr:
31828 case DW_AT_data_member_location:
31829 case DW_AT_frame_base:
31830 case DW_AT_segment:
31831 case DW_AT_static_link:
31832 case DW_AT_use_location:
31833 case DW_AT_vtable_elem_location:
31834 if (prev)
31835 {
31836 prev->dw_loc_next = NULL;
31837 prepend_loc_descr_to_each (l, AT_loc (a));
31838 }
31839 if (next)
31840 add_loc_descr_to_each (l, next);
31841 a->dw_attr_val.val_class = dw_val_class_loc_list;
31842 a->dw_attr_val.val_entry = NULL;
31843 a->dw_attr_val.v.val_loc_list = l;
31844 have_location_lists = true;
31845 return true;
31846 /* Following attributes allow both exprloc and reference,
31847 so if the whole expression is DW_OP_GNU_variable_value alone
31848 we could transform it into reference. */
31849 case DW_AT_byte_size:
31850 case DW_AT_bit_size:
31851 case DW_AT_lower_bound:
31852 case DW_AT_upper_bound:
31853 case DW_AT_bit_stride:
31854 case DW_AT_count:
31855 case DW_AT_allocated:
31856 case DW_AT_associated:
31857 case DW_AT_byte_stride:
31858 if (prev == NULL && next == NULL)
31859 break;
31860 /* FALLTHRU */
31861 default:
31862 if (dwarf_strict)
31863 continue;
31864 break;
31865 }
31866 /* Create DW_TAG_variable that we can refer to. */
31867 gen_decl_die (decl, NULL_TREE, NULL,
31868 lookup_decl_die (current_function_decl));
31869 ref = lookup_decl_die (decl);
31870 if (ref)
31871 {
31872 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31873 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31874 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31875 }
31876 continue;
31877 }
31878 if (prev)
31879 {
31880 prev->dw_loc_next = l->expr;
31881 add_loc_descr (&prev->dw_loc_next, next);
31882 free_loc_descr (loc, NULL);
31883 next = prev->dw_loc_next;
31884 }
31885 else
31886 {
31887 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31888 add_loc_descr (&loc, next);
31889 next = loc;
31890 }
31891 loc = prev;
31892 }
31893 return false;
31894 }
31895
31896 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31897
31898 static void
31899 resolve_variable_value (dw_die_ref die)
31900 {
31901 dw_attr_node *a;
31902 dw_loc_list_ref loc;
31903 unsigned ix;
31904
31905 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31906 switch (AT_class (a))
31907 {
31908 case dw_val_class_loc:
31909 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31910 break;
31911 /* FALLTHRU */
31912 case dw_val_class_loc_list:
31913 loc = AT_loc_list (a);
31914 gcc_assert (loc);
31915 for (; loc; loc = loc->dw_loc_next)
31916 resolve_variable_value_in_expr (a, loc->expr);
31917 break;
31918 default:
31919 break;
31920 }
31921 }
31922
31923 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31924 temporaries in the current function. */
31925
31926 static void
31927 resolve_variable_values (void)
31928 {
31929 if (!variable_value_hash || !current_function_decl)
31930 return;
31931
31932 struct variable_value_struct *node
31933 = variable_value_hash->find_with_hash (current_function_decl,
31934 DECL_UID (current_function_decl));
31935
31936 if (node == NULL)
31937 return;
31938
31939 unsigned int i;
31940 dw_die_ref die;
31941 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31942 resolve_variable_value (die);
31943 }
31944
31945 /* Helper function for note_variable_value, handle one location
31946 expression. */
31947
31948 static void
31949 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31950 {
31951 for (; loc; loc = loc->dw_loc_next)
31952 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31953 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31954 {
31955 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31956 dw_die_ref ref = lookup_decl_die (decl);
31957 if (! ref && (flag_generate_lto || flag_generate_offload))
31958 {
31959 /* ??? This is somewhat a hack because we do not create DIEs
31960 for variables not in BLOCK trees early but when generating
31961 early LTO output we need the dw_val_class_decl_ref to be
31962 fully resolved. For fat LTO objects we'd also like to
31963 undo this after LTO dwarf output. */
31964 gcc_assert (DECL_CONTEXT (decl));
31965 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31966 gcc_assert (ctx != NULL);
31967 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31968 ref = lookup_decl_die (decl);
31969 gcc_assert (ref != NULL);
31970 }
31971 if (ref)
31972 {
31973 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31974 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31975 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31976 continue;
31977 }
31978 if (VAR_P (decl)
31979 && DECL_CONTEXT (decl)
31980 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31981 && lookup_decl_die (DECL_CONTEXT (decl)))
31982 {
31983 if (!variable_value_hash)
31984 variable_value_hash
31985 = hash_table<variable_value_hasher>::create_ggc (10);
31986
31987 tree fndecl = DECL_CONTEXT (decl);
31988 struct variable_value_struct *node;
31989 struct variable_value_struct **slot
31990 = variable_value_hash->find_slot_with_hash (fndecl,
31991 DECL_UID (fndecl),
31992 INSERT);
31993 if (*slot == NULL)
31994 {
31995 node = ggc_cleared_alloc<variable_value_struct> ();
31996 node->decl_id = DECL_UID (fndecl);
31997 *slot = node;
31998 }
31999 else
32000 node = *slot;
32001
32002 vec_safe_push (node->dies, die);
32003 }
32004 }
32005 }
32006
32007 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32008 with dw_val_class_decl_ref operand. */
32009
32010 static void
32011 note_variable_value (dw_die_ref die)
32012 {
32013 dw_die_ref c;
32014 dw_attr_node *a;
32015 dw_loc_list_ref loc;
32016 unsigned ix;
32017
32018 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32019 switch (AT_class (a))
32020 {
32021 case dw_val_class_loc_list:
32022 loc = AT_loc_list (a);
32023 gcc_assert (loc);
32024 if (!loc->noted_variable_value)
32025 {
32026 loc->noted_variable_value = 1;
32027 for (; loc; loc = loc->dw_loc_next)
32028 note_variable_value_in_expr (die, loc->expr);
32029 }
32030 break;
32031 case dw_val_class_loc:
32032 note_variable_value_in_expr (die, AT_loc (a));
32033 break;
32034 default:
32035 break;
32036 }
32037
32038 /* Mark children. */
32039 FOR_EACH_CHILD (die, c, note_variable_value (c));
32040 }
32041
32042 /* Perform any cleanups needed after the early debug generation pass
32043 has run. */
32044
32045 static void
32046 dwarf2out_early_finish (const char *filename)
32047 {
32048 set_early_dwarf s;
32049 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32050
32051 /* PCH might result in DW_AT_producer string being restored from the
32052 header compilation, so always fill it with empty string initially
32053 and overwrite only here. */
32054 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32055 producer_string = gen_producer_string ();
32056 producer->dw_attr_val.v.val_str->refcount--;
32057 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32058
32059 /* Add the name for the main input file now. We delayed this from
32060 dwarf2out_init to avoid complications with PCH. */
32061 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
32062 add_comp_dir_attribute (comp_unit_die ());
32063
32064 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
32065 DW_AT_comp_dir into .debug_line_str section. */
32066 if (!output_asm_line_debug_info ()
32067 && dwarf_version >= 5
32068 && DWARF5_USE_DEBUG_LINE_STR)
32069 {
32070 for (int i = 0; i < 2; i++)
32071 {
32072 dw_attr_node *a = get_AT (comp_unit_die (),
32073 i ? DW_AT_comp_dir : DW_AT_name);
32074 if (a == NULL
32075 || AT_class (a) != dw_val_class_str
32076 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32077 continue;
32078
32079 if (! debug_line_str_hash)
32080 debug_line_str_hash
32081 = hash_table<indirect_string_hasher>::create_ggc (10);
32082
32083 struct indirect_string_node *node
32084 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32085 set_indirect_string (node);
32086 node->form = DW_FORM_line_strp;
32087 a->dw_attr_val.v.val_str->refcount--;
32088 a->dw_attr_val.v.val_str = node;
32089 }
32090 }
32091
32092 /* With LTO early dwarf was really finished at compile-time, so make
32093 sure to adjust the phase after annotating the LTRANS CU DIE. */
32094 if (in_lto_p)
32095 {
32096 /* Force DW_TAG_imported_unit to be created now, otherwise
32097 we might end up without it or ordered after DW_TAG_inlined_subroutine
32098 referencing DIEs from it. */
32099 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
32100 {
32101 unsigned i;
32102 tree tu;
32103 if (external_die_map)
32104 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
32105 if (sym_off_pair *desc = external_die_map->get (tu))
32106 {
32107 dw_die_ref import = new_die (DW_TAG_imported_unit,
32108 comp_unit_die (), NULL_TREE);
32109 add_AT_external_die_ref (import, DW_AT_import,
32110 desc->sym, desc->off);
32111 }
32112 }
32113
32114 early_dwarf_finished = true;
32115 if (dump_file)
32116 {
32117 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32118 print_die (comp_unit_die (), dump_file);
32119 }
32120 return;
32121 }
32122
32123 /* Walk through the list of incomplete types again, trying once more to
32124 emit full debugging info for them. */
32125 retry_incomplete_types ();
32126
32127 /* The point here is to flush out the limbo list so that it is empty
32128 and we don't need to stream it for LTO. */
32129 flush_limbo_die_list ();
32130
32131 gen_scheduled_generic_parms_dies ();
32132 gen_remaining_tmpl_value_param_die_attribute ();
32133
32134 /* Add DW_AT_linkage_name for all deferred DIEs. */
32135 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32136 {
32137 tree decl = node->created_for;
32138 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32139 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32140 ended up in deferred_asm_name before we knew it was
32141 constant and never written to disk. */
32142 && DECL_ASSEMBLER_NAME (decl))
32143 {
32144 add_linkage_attr (node->die, decl);
32145 move_linkage_attr (node->die);
32146 }
32147 }
32148 deferred_asm_name = NULL;
32149
32150 if (flag_eliminate_unused_debug_types)
32151 prune_unused_types ();
32152
32153 /* Generate separate COMDAT sections for type DIEs. */
32154 if (use_debug_types)
32155 {
32156 break_out_comdat_types (comp_unit_die ());
32157
32158 /* Each new type_unit DIE was added to the limbo die list when created.
32159 Since these have all been added to comdat_type_list, clear the
32160 limbo die list. */
32161 limbo_die_list = NULL;
32162
32163 /* For each new comdat type unit, copy declarations for incomplete
32164 types to make the new unit self-contained (i.e., no direct
32165 references to the main compile unit). */
32166 for (comdat_type_node *ctnode = comdat_type_list;
32167 ctnode != NULL; ctnode = ctnode->next)
32168 copy_decls_for_unworthy_types (ctnode->root_die);
32169 copy_decls_for_unworthy_types (comp_unit_die ());
32170
32171 /* In the process of copying declarations from one unit to another,
32172 we may have left some declarations behind that are no longer
32173 referenced. Prune them. */
32174 prune_unused_types ();
32175 }
32176
32177 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32178 with dw_val_class_decl_ref operand. */
32179 note_variable_value (comp_unit_die ());
32180 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32181 note_variable_value (node->die);
32182 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32183 ctnode = ctnode->next)
32184 note_variable_value (ctnode->root_die);
32185 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32186 note_variable_value (node->die);
32187
32188 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32189 both the main_cu and all skeleton TUs. Making this call unconditional
32190 would end up either adding a second copy of the AT_pubnames attribute, or
32191 requiring a special case in add_top_level_skeleton_die_attrs. */
32192 if (!dwarf_split_debug_info)
32193 add_AT_pubnames (comp_unit_die ());
32194
32195 /* The early debug phase is now finished. */
32196 early_dwarf_finished = true;
32197 if (dump_file)
32198 {
32199 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32200 print_die (comp_unit_die (), dump_file);
32201 }
32202
32203 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32204 if ((!flag_generate_lto && !flag_generate_offload)
32205 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32206 copy_lto_debug_sections operation of the simple object support in
32207 libiberty is not implemented for them yet. */
32208 || TARGET_PECOFF || TARGET_COFF)
32209 return;
32210
32211 /* Now as we are going to output for LTO initialize sections and labels
32212 to the LTO variants. We don't need a random-seed postfix as other
32213 LTO sections as linking the LTO debug sections into one in a partial
32214 link is fine. */
32215 init_sections_and_labels (true);
32216
32217 /* The output below is modeled after dwarf2out_finish with all
32218 location related output removed and some LTO specific changes.
32219 Some refactoring might make both smaller and easier to match up. */
32220
32221 /* Traverse the DIE's and add add sibling attributes to those DIE's
32222 that have children. */
32223 add_sibling_attributes (comp_unit_die ());
32224 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32225 add_sibling_attributes (node->die);
32226 for (comdat_type_node *ctnode = comdat_type_list;
32227 ctnode != NULL; ctnode = ctnode->next)
32228 add_sibling_attributes (ctnode->root_die);
32229
32230 /* AIX Assembler inserts the length, so adjust the reference to match the
32231 offset expected by debuggers. */
32232 strcpy (dl_section_ref, debug_line_section_label);
32233 if (XCOFF_DEBUGGING_INFO)
32234 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32235
32236 if (debug_info_level >= DINFO_LEVEL_TERSE)
32237 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32238
32239 if (have_macinfo)
32240 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32241 macinfo_section_label);
32242
32243 save_macinfo_strings ();
32244
32245 if (dwarf_split_debug_info)
32246 {
32247 unsigned int index = 0;
32248 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32249 }
32250
32251 /* Output all of the compilation units. We put the main one last so that
32252 the offsets are available to output_pubnames. */
32253 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32254 output_comp_unit (node->die, 0, NULL);
32255
32256 hash_table<comdat_type_hasher> comdat_type_table (100);
32257 for (comdat_type_node *ctnode = comdat_type_list;
32258 ctnode != NULL; ctnode = ctnode->next)
32259 {
32260 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32261
32262 /* Don't output duplicate types. */
32263 if (*slot != HTAB_EMPTY_ENTRY)
32264 continue;
32265
32266 /* Add a pointer to the line table for the main compilation unit
32267 so that the debugger can make sense of DW_AT_decl_file
32268 attributes. */
32269 if (debug_info_level >= DINFO_LEVEL_TERSE)
32270 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32271 (!dwarf_split_debug_info
32272 ? debug_line_section_label
32273 : debug_skeleton_line_section_label));
32274
32275 output_comdat_type_unit (ctnode, true);
32276 *slot = ctnode;
32277 }
32278
32279 /* Stick a unique symbol to the main debuginfo section. */
32280 compute_comp_unit_symbol (comp_unit_die ());
32281
32282 /* Output the main compilation unit. We always need it if only for
32283 the CU symbol. */
32284 output_comp_unit (comp_unit_die (), true, NULL);
32285
32286 /* Output the abbreviation table. */
32287 if (vec_safe_length (abbrev_die_table) != 1)
32288 {
32289 switch_to_section (debug_abbrev_section);
32290 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32291 output_abbrev_section ();
32292 }
32293
32294 /* Have to end the macro section. */
32295 if (have_macinfo)
32296 {
32297 /* We have to save macinfo state if we need to output it again
32298 for the FAT part of the object. */
32299 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32300 if (flag_fat_lto_objects)
32301 macinfo_table = macinfo_table->copy ();
32302
32303 switch_to_section (debug_macinfo_section);
32304 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32305 output_macinfo (debug_line_section_label, true);
32306 dw2_asm_output_data (1, 0, "End compilation unit");
32307
32308 if (flag_fat_lto_objects)
32309 {
32310 vec_free (macinfo_table);
32311 macinfo_table = saved_macinfo_table;
32312 }
32313 }
32314
32315 /* Emit a skeleton debug_line section. */
32316 switch_to_section (debug_line_section);
32317 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32318 output_line_info (true);
32319
32320 /* If we emitted any indirect strings, output the string table too. */
32321 if (debug_str_hash || skeleton_debug_str_hash)
32322 output_indirect_strings ();
32323 if (debug_line_str_hash)
32324 {
32325 switch_to_section (debug_line_str_section);
32326 const enum dwarf_form form = DW_FORM_line_strp;
32327 debug_line_str_hash->traverse<enum dwarf_form,
32328 output_indirect_string> (form);
32329 }
32330
32331 /* Switch back to the text section. */
32332 switch_to_section (text_section);
32333 }
32334
32335 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32336 within the same process. For use by toplev::finalize. */
32337
32338 void
32339 dwarf2out_c_finalize (void)
32340 {
32341 last_var_location_insn = NULL;
32342 cached_next_real_insn = NULL;
32343 used_rtx_array = NULL;
32344 incomplete_types = NULL;
32345 debug_info_section = NULL;
32346 debug_skeleton_info_section = NULL;
32347 debug_abbrev_section = NULL;
32348 debug_skeleton_abbrev_section = NULL;
32349 debug_aranges_section = NULL;
32350 debug_addr_section = NULL;
32351 debug_macinfo_section = NULL;
32352 debug_line_section = NULL;
32353 debug_skeleton_line_section = NULL;
32354 debug_loc_section = NULL;
32355 debug_pubnames_section = NULL;
32356 debug_pubtypes_section = NULL;
32357 debug_str_section = NULL;
32358 debug_line_str_section = NULL;
32359 debug_str_dwo_section = NULL;
32360 debug_str_offsets_section = NULL;
32361 debug_ranges_section = NULL;
32362 debug_frame_section = NULL;
32363 fde_vec = NULL;
32364 debug_str_hash = NULL;
32365 debug_line_str_hash = NULL;
32366 skeleton_debug_str_hash = NULL;
32367 dw2_string_counter = 0;
32368 have_multiple_function_sections = false;
32369 text_section_used = false;
32370 cold_text_section_used = false;
32371 cold_text_section = NULL;
32372 current_unit_personality = NULL;
32373
32374 early_dwarf = false;
32375 early_dwarf_finished = false;
32376
32377 next_die_offset = 0;
32378 single_comp_unit_die = NULL;
32379 comdat_type_list = NULL;
32380 limbo_die_list = NULL;
32381 file_table = NULL;
32382 decl_die_table = NULL;
32383 common_block_die_table = NULL;
32384 decl_loc_table = NULL;
32385 call_arg_locations = NULL;
32386 call_arg_loc_last = NULL;
32387 call_site_count = -1;
32388 tail_call_site_count = -1;
32389 cached_dw_loc_list_table = NULL;
32390 abbrev_die_table = NULL;
32391 delete dwarf_proc_stack_usage_map;
32392 dwarf_proc_stack_usage_map = NULL;
32393 line_info_label_num = 0;
32394 cur_line_info_table = NULL;
32395 text_section_line_info = NULL;
32396 cold_text_section_line_info = NULL;
32397 separate_line_info = NULL;
32398 info_section_emitted = false;
32399 pubname_table = NULL;
32400 pubtype_table = NULL;
32401 macinfo_table = NULL;
32402 ranges_table = NULL;
32403 ranges_by_label = NULL;
32404 rnglist_idx = 0;
32405 have_location_lists = false;
32406 loclabel_num = 0;
32407 poc_label_num = 0;
32408 last_emitted_file = NULL;
32409 label_num = 0;
32410 tmpl_value_parm_die_table = NULL;
32411 generic_type_instances = NULL;
32412 frame_pointer_fb_offset = 0;
32413 frame_pointer_fb_offset_valid = false;
32414 base_types.release ();
32415 XDELETEVEC (producer_string);
32416 producer_string = NULL;
32417 }
32418
32419 #include "gt-dwarf2out.h"