]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
Add ability to remap file names in __FILE__, etc (PR other/70268)
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 /* Location lists are ranges + location descriptions for that range,
1295 so you can track variables that are in different places over
1296 their entire life. */
1297 typedef struct GTY(()) dw_loc_list_struct {
1298 dw_loc_list_ref dw_loc_next;
1299 const char *begin; /* Label and addr_entry for start of range */
1300 addr_table_entry *begin_entry;
1301 const char *end; /* Label for end of range */
1302 char *ll_symbol; /* Label for beginning of location list.
1303 Only on head of list */
1304 const char *section; /* Section this loclist is relative to */
1305 dw_loc_descr_ref expr;
1306 hashval_t hash;
1307 /* True if all addresses in this and subsequent lists are known to be
1308 resolved. */
1309 bool resolved_addr;
1310 /* True if this list has been replaced by dw_loc_next. */
1311 bool replaced;
1312 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1313 section. */
1314 unsigned char emitted : 1;
1315 /* True if hash field is index rather than hash value. */
1316 unsigned char num_assigned : 1;
1317 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1318 unsigned char offset_emitted : 1;
1319 /* True if note_variable_value_in_expr has been called on it. */
1320 unsigned char noted_variable_value : 1;
1321 /* True if the range should be emitted even if begin and end
1322 are the same. */
1323 bool force;
1324 } dw_loc_list_node;
1325
1326 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1327 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1328
1329 /* Convert a DWARF stack opcode into its string name. */
1330
1331 static const char *
1332 dwarf_stack_op_name (unsigned int op)
1333 {
1334 const char *name = get_DW_OP_name (op);
1335
1336 if (name != NULL)
1337 return name;
1338
1339 return "OP_<unknown>";
1340 }
1341
1342 /* Return a pointer to a newly allocated location description. Location
1343 descriptions are simple expression terms that can be strung
1344 together to form more complicated location (address) descriptions. */
1345
1346 static inline dw_loc_descr_ref
1347 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1348 unsigned HOST_WIDE_INT oprnd2)
1349 {
1350 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1351
1352 descr->dw_loc_opc = op;
1353 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1354 descr->dw_loc_oprnd1.val_entry = NULL;
1355 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1356 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1357 descr->dw_loc_oprnd2.val_entry = NULL;
1358 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1359
1360 return descr;
1361 }
1362
1363 /* Add a location description term to a location description expression. */
1364
1365 static inline void
1366 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1367 {
1368 dw_loc_descr_ref *d;
1369
1370 /* Find the end of the chain. */
1371 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1372 ;
1373
1374 *d = descr;
1375 }
1376
1377 /* Compare two location operands for exact equality. */
1378
1379 static bool
1380 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1381 {
1382 if (a->val_class != b->val_class)
1383 return false;
1384 switch (a->val_class)
1385 {
1386 case dw_val_class_none:
1387 return true;
1388 case dw_val_class_addr:
1389 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1390
1391 case dw_val_class_offset:
1392 case dw_val_class_unsigned_const:
1393 case dw_val_class_const:
1394 case dw_val_class_unsigned_const_implicit:
1395 case dw_val_class_const_implicit:
1396 case dw_val_class_range_list:
1397 /* These are all HOST_WIDE_INT, signed or unsigned. */
1398 return a->v.val_unsigned == b->v.val_unsigned;
1399
1400 case dw_val_class_loc:
1401 return a->v.val_loc == b->v.val_loc;
1402 case dw_val_class_loc_list:
1403 return a->v.val_loc_list == b->v.val_loc_list;
1404 case dw_val_class_die_ref:
1405 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1406 case dw_val_class_fde_ref:
1407 return a->v.val_fde_index == b->v.val_fde_index;
1408 case dw_val_class_lbl_id:
1409 case dw_val_class_lineptr:
1410 case dw_val_class_macptr:
1411 case dw_val_class_loclistsptr:
1412 case dw_val_class_high_pc:
1413 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1414 case dw_val_class_str:
1415 return a->v.val_str == b->v.val_str;
1416 case dw_val_class_flag:
1417 return a->v.val_flag == b->v.val_flag;
1418 case dw_val_class_file:
1419 case dw_val_class_file_implicit:
1420 return a->v.val_file == b->v.val_file;
1421 case dw_val_class_decl_ref:
1422 return a->v.val_decl_ref == b->v.val_decl_ref;
1423
1424 case dw_val_class_const_double:
1425 return (a->v.val_double.high == b->v.val_double.high
1426 && a->v.val_double.low == b->v.val_double.low);
1427
1428 case dw_val_class_wide_int:
1429 return *a->v.val_wide == *b->v.val_wide;
1430
1431 case dw_val_class_vec:
1432 {
1433 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1434 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1435
1436 return (a_len == b_len
1437 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1438 }
1439
1440 case dw_val_class_data8:
1441 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1442
1443 case dw_val_class_vms_delta:
1444 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1445 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1446
1447 case dw_val_class_discr_value:
1448 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1449 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1450 case dw_val_class_discr_list:
1451 /* It makes no sense comparing two discriminant value lists. */
1452 return false;
1453 }
1454 gcc_unreachable ();
1455 }
1456
1457 /* Compare two location atoms for exact equality. */
1458
1459 static bool
1460 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1461 {
1462 if (a->dw_loc_opc != b->dw_loc_opc)
1463 return false;
1464
1465 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1466 address size, but since we always allocate cleared storage it
1467 should be zero for other types of locations. */
1468 if (a->dtprel != b->dtprel)
1469 return false;
1470
1471 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1472 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1473 }
1474
1475 /* Compare two complete location expressions for exact equality. */
1476
1477 bool
1478 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1479 {
1480 while (1)
1481 {
1482 if (a == b)
1483 return true;
1484 if (a == NULL || b == NULL)
1485 return false;
1486 if (!loc_descr_equal_p_1 (a, b))
1487 return false;
1488
1489 a = a->dw_loc_next;
1490 b = b->dw_loc_next;
1491 }
1492 }
1493
1494
1495 /* Add a constant POLY_OFFSET to a location expression. */
1496
1497 static void
1498 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1499 {
1500 dw_loc_descr_ref loc;
1501 HOST_WIDE_INT *p;
1502
1503 gcc_assert (*list_head != NULL);
1504
1505 if (known_eq (poly_offset, 0))
1506 return;
1507
1508 /* Find the end of the chain. */
1509 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1510 ;
1511
1512 HOST_WIDE_INT offset;
1513 if (!poly_offset.is_constant (&offset))
1514 {
1515 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1516 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1517 return;
1518 }
1519
1520 p = NULL;
1521 if (loc->dw_loc_opc == DW_OP_fbreg
1522 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1523 p = &loc->dw_loc_oprnd1.v.val_int;
1524 else if (loc->dw_loc_opc == DW_OP_bregx)
1525 p = &loc->dw_loc_oprnd2.v.val_int;
1526
1527 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1528 offset. Don't optimize if an signed integer overflow would happen. */
1529 if (p != NULL
1530 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1531 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1532 *p += offset;
1533
1534 else if (offset > 0)
1535 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1536
1537 else
1538 {
1539 loc->dw_loc_next
1540 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1541 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1542 }
1543 }
1544
1545 /* Return a pointer to a newly allocated location description for
1546 REG and OFFSET. */
1547
1548 static inline dw_loc_descr_ref
1549 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1550 {
1551 HOST_WIDE_INT const_offset;
1552 if (offset.is_constant (&const_offset))
1553 {
1554 if (reg <= 31)
1555 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1556 const_offset, 0);
1557 else
1558 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1559 }
1560 else
1561 {
1562 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1563 loc_descr_plus_const (&ret, offset);
1564 return ret;
1565 }
1566 }
1567
1568 /* Add a constant OFFSET to a location list. */
1569
1570 static void
1571 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1572 {
1573 dw_loc_list_ref d;
1574 for (d = list_head; d != NULL; d = d->dw_loc_next)
1575 loc_descr_plus_const (&d->expr, offset);
1576 }
1577
1578 #define DWARF_REF_SIZE \
1579 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1580
1581 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1582 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1583 DW_FORM_data16 with 128 bits. */
1584 #define DWARF_LARGEST_DATA_FORM_BITS \
1585 (dwarf_version >= 5 ? 128 : 64)
1586
1587 /* Utility inline function for construction of ops that were GNU extension
1588 before DWARF 5. */
1589 static inline enum dwarf_location_atom
1590 dwarf_OP (enum dwarf_location_atom op)
1591 {
1592 switch (op)
1593 {
1594 case DW_OP_implicit_pointer:
1595 if (dwarf_version < 5)
1596 return DW_OP_GNU_implicit_pointer;
1597 break;
1598
1599 case DW_OP_entry_value:
1600 if (dwarf_version < 5)
1601 return DW_OP_GNU_entry_value;
1602 break;
1603
1604 case DW_OP_const_type:
1605 if (dwarf_version < 5)
1606 return DW_OP_GNU_const_type;
1607 break;
1608
1609 case DW_OP_regval_type:
1610 if (dwarf_version < 5)
1611 return DW_OP_GNU_regval_type;
1612 break;
1613
1614 case DW_OP_deref_type:
1615 if (dwarf_version < 5)
1616 return DW_OP_GNU_deref_type;
1617 break;
1618
1619 case DW_OP_convert:
1620 if (dwarf_version < 5)
1621 return DW_OP_GNU_convert;
1622 break;
1623
1624 case DW_OP_reinterpret:
1625 if (dwarf_version < 5)
1626 return DW_OP_GNU_reinterpret;
1627 break;
1628
1629 default:
1630 break;
1631 }
1632 return op;
1633 }
1634
1635 /* Similarly for attributes. */
1636 static inline enum dwarf_attribute
1637 dwarf_AT (enum dwarf_attribute at)
1638 {
1639 switch (at)
1640 {
1641 case DW_AT_call_return_pc:
1642 if (dwarf_version < 5)
1643 return DW_AT_low_pc;
1644 break;
1645
1646 case DW_AT_call_tail_call:
1647 if (dwarf_version < 5)
1648 return DW_AT_GNU_tail_call;
1649 break;
1650
1651 case DW_AT_call_origin:
1652 if (dwarf_version < 5)
1653 return DW_AT_abstract_origin;
1654 break;
1655
1656 case DW_AT_call_target:
1657 if (dwarf_version < 5)
1658 return DW_AT_GNU_call_site_target;
1659 break;
1660
1661 case DW_AT_call_target_clobbered:
1662 if (dwarf_version < 5)
1663 return DW_AT_GNU_call_site_target_clobbered;
1664 break;
1665
1666 case DW_AT_call_parameter:
1667 if (dwarf_version < 5)
1668 return DW_AT_abstract_origin;
1669 break;
1670
1671 case DW_AT_call_value:
1672 if (dwarf_version < 5)
1673 return DW_AT_GNU_call_site_value;
1674 break;
1675
1676 case DW_AT_call_data_value:
1677 if (dwarf_version < 5)
1678 return DW_AT_GNU_call_site_data_value;
1679 break;
1680
1681 case DW_AT_call_all_calls:
1682 if (dwarf_version < 5)
1683 return DW_AT_GNU_all_call_sites;
1684 break;
1685
1686 case DW_AT_call_all_tail_calls:
1687 if (dwarf_version < 5)
1688 return DW_AT_GNU_all_tail_call_sites;
1689 break;
1690
1691 case DW_AT_dwo_name:
1692 if (dwarf_version < 5)
1693 return DW_AT_GNU_dwo_name;
1694 break;
1695
1696 default:
1697 break;
1698 }
1699 return at;
1700 }
1701
1702 /* And similarly for tags. */
1703 static inline enum dwarf_tag
1704 dwarf_TAG (enum dwarf_tag tag)
1705 {
1706 switch (tag)
1707 {
1708 case DW_TAG_call_site:
1709 if (dwarf_version < 5)
1710 return DW_TAG_GNU_call_site;
1711 break;
1712
1713 case DW_TAG_call_site_parameter:
1714 if (dwarf_version < 5)
1715 return DW_TAG_GNU_call_site_parameter;
1716 break;
1717
1718 default:
1719 break;
1720 }
1721 return tag;
1722 }
1723
1724 static unsigned long int get_base_type_offset (dw_die_ref);
1725
1726 /* Return the size of a location descriptor. */
1727
1728 static unsigned long
1729 size_of_loc_descr (dw_loc_descr_ref loc)
1730 {
1731 unsigned long size = 1;
1732
1733 switch (loc->dw_loc_opc)
1734 {
1735 case DW_OP_addr:
1736 size += DWARF2_ADDR_SIZE;
1737 break;
1738 case DW_OP_GNU_addr_index:
1739 case DW_OP_GNU_const_index:
1740 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1741 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1742 break;
1743 case DW_OP_const1u:
1744 case DW_OP_const1s:
1745 size += 1;
1746 break;
1747 case DW_OP_const2u:
1748 case DW_OP_const2s:
1749 size += 2;
1750 break;
1751 case DW_OP_const4u:
1752 case DW_OP_const4s:
1753 size += 4;
1754 break;
1755 case DW_OP_const8u:
1756 case DW_OP_const8s:
1757 size += 8;
1758 break;
1759 case DW_OP_constu:
1760 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1761 break;
1762 case DW_OP_consts:
1763 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1764 break;
1765 case DW_OP_pick:
1766 size += 1;
1767 break;
1768 case DW_OP_plus_uconst:
1769 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1770 break;
1771 case DW_OP_skip:
1772 case DW_OP_bra:
1773 size += 2;
1774 break;
1775 case DW_OP_breg0:
1776 case DW_OP_breg1:
1777 case DW_OP_breg2:
1778 case DW_OP_breg3:
1779 case DW_OP_breg4:
1780 case DW_OP_breg5:
1781 case DW_OP_breg6:
1782 case DW_OP_breg7:
1783 case DW_OP_breg8:
1784 case DW_OP_breg9:
1785 case DW_OP_breg10:
1786 case DW_OP_breg11:
1787 case DW_OP_breg12:
1788 case DW_OP_breg13:
1789 case DW_OP_breg14:
1790 case DW_OP_breg15:
1791 case DW_OP_breg16:
1792 case DW_OP_breg17:
1793 case DW_OP_breg18:
1794 case DW_OP_breg19:
1795 case DW_OP_breg20:
1796 case DW_OP_breg21:
1797 case DW_OP_breg22:
1798 case DW_OP_breg23:
1799 case DW_OP_breg24:
1800 case DW_OP_breg25:
1801 case DW_OP_breg26:
1802 case DW_OP_breg27:
1803 case DW_OP_breg28:
1804 case DW_OP_breg29:
1805 case DW_OP_breg30:
1806 case DW_OP_breg31:
1807 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1808 break;
1809 case DW_OP_regx:
1810 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1811 break;
1812 case DW_OP_fbreg:
1813 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1814 break;
1815 case DW_OP_bregx:
1816 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1817 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1818 break;
1819 case DW_OP_piece:
1820 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1821 break;
1822 case DW_OP_bit_piece:
1823 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1824 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1825 break;
1826 case DW_OP_deref_size:
1827 case DW_OP_xderef_size:
1828 size += 1;
1829 break;
1830 case DW_OP_call2:
1831 size += 2;
1832 break;
1833 case DW_OP_call4:
1834 size += 4;
1835 break;
1836 case DW_OP_call_ref:
1837 case DW_OP_GNU_variable_value:
1838 size += DWARF_REF_SIZE;
1839 break;
1840 case DW_OP_implicit_value:
1841 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1842 + loc->dw_loc_oprnd1.v.val_unsigned;
1843 break;
1844 case DW_OP_implicit_pointer:
1845 case DW_OP_GNU_implicit_pointer:
1846 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1847 break;
1848 case DW_OP_entry_value:
1849 case DW_OP_GNU_entry_value:
1850 {
1851 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1852 size += size_of_uleb128 (op_size) + op_size;
1853 break;
1854 }
1855 case DW_OP_const_type:
1856 case DW_OP_GNU_const_type:
1857 {
1858 unsigned long o
1859 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1860 size += size_of_uleb128 (o) + 1;
1861 switch (loc->dw_loc_oprnd2.val_class)
1862 {
1863 case dw_val_class_vec:
1864 size += loc->dw_loc_oprnd2.v.val_vec.length
1865 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1866 break;
1867 case dw_val_class_const:
1868 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1869 break;
1870 case dw_val_class_const_double:
1871 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1872 break;
1873 case dw_val_class_wide_int:
1874 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1875 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1876 break;
1877 default:
1878 gcc_unreachable ();
1879 }
1880 break;
1881 }
1882 case DW_OP_regval_type:
1883 case DW_OP_GNU_regval_type:
1884 {
1885 unsigned long o
1886 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1887 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1888 + size_of_uleb128 (o);
1889 }
1890 break;
1891 case DW_OP_deref_type:
1892 case DW_OP_GNU_deref_type:
1893 {
1894 unsigned long o
1895 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1896 size += 1 + size_of_uleb128 (o);
1897 }
1898 break;
1899 case DW_OP_convert:
1900 case DW_OP_reinterpret:
1901 case DW_OP_GNU_convert:
1902 case DW_OP_GNU_reinterpret:
1903 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1904 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1905 else
1906 {
1907 unsigned long o
1908 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1909 size += size_of_uleb128 (o);
1910 }
1911 break;
1912 case DW_OP_GNU_parameter_ref:
1913 size += 4;
1914 break;
1915 default:
1916 break;
1917 }
1918
1919 return size;
1920 }
1921
1922 /* Return the size of a series of location descriptors. */
1923
1924 unsigned long
1925 size_of_locs (dw_loc_descr_ref loc)
1926 {
1927 dw_loc_descr_ref l;
1928 unsigned long size;
1929
1930 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1931 field, to avoid writing to a PCH file. */
1932 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1933 {
1934 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1935 break;
1936 size += size_of_loc_descr (l);
1937 }
1938 if (! l)
1939 return size;
1940
1941 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1942 {
1943 l->dw_loc_addr = size;
1944 size += size_of_loc_descr (l);
1945 }
1946
1947 return size;
1948 }
1949
1950 /* Return the size of the value in a DW_AT_discr_value attribute. */
1951
1952 static int
1953 size_of_discr_value (dw_discr_value *discr_value)
1954 {
1955 if (discr_value->pos)
1956 return size_of_uleb128 (discr_value->v.uval);
1957 else
1958 return size_of_sleb128 (discr_value->v.sval);
1959 }
1960
1961 /* Return the size of the value in a DW_AT_discr_list attribute. */
1962
1963 static int
1964 size_of_discr_list (dw_discr_list_ref discr_list)
1965 {
1966 int size = 0;
1967
1968 for (dw_discr_list_ref list = discr_list;
1969 list != NULL;
1970 list = list->dw_discr_next)
1971 {
1972 /* One byte for the discriminant value descriptor, and then one or two
1973 LEB128 numbers, depending on whether it's a single case label or a
1974 range label. */
1975 size += 1;
1976 size += size_of_discr_value (&list->dw_discr_lower_bound);
1977 if (list->dw_discr_range != 0)
1978 size += size_of_discr_value (&list->dw_discr_upper_bound);
1979 }
1980 return size;
1981 }
1982
1983 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1984 static void get_ref_die_offset_label (char *, dw_die_ref);
1985 static unsigned long int get_ref_die_offset (dw_die_ref);
1986
1987 /* Output location description stack opcode's operands (if any).
1988 The for_eh_or_skip parameter controls whether register numbers are
1989 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1990 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1991 info). This should be suppressed for the cases that have not been converted
1992 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1993
1994 static void
1995 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1996 {
1997 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1998 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1999
2000 switch (loc->dw_loc_opc)
2001 {
2002 #ifdef DWARF2_DEBUGGING_INFO
2003 case DW_OP_const2u:
2004 case DW_OP_const2s:
2005 dw2_asm_output_data (2, val1->v.val_int, NULL);
2006 break;
2007 case DW_OP_const4u:
2008 if (loc->dtprel)
2009 {
2010 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2011 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2012 val1->v.val_addr);
2013 fputc ('\n', asm_out_file);
2014 break;
2015 }
2016 /* FALLTHRU */
2017 case DW_OP_const4s:
2018 dw2_asm_output_data (4, val1->v.val_int, NULL);
2019 break;
2020 case DW_OP_const8u:
2021 if (loc->dtprel)
2022 {
2023 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2024 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2025 val1->v.val_addr);
2026 fputc ('\n', asm_out_file);
2027 break;
2028 }
2029 /* FALLTHRU */
2030 case DW_OP_const8s:
2031 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2032 dw2_asm_output_data (8, val1->v.val_int, NULL);
2033 break;
2034 case DW_OP_skip:
2035 case DW_OP_bra:
2036 {
2037 int offset;
2038
2039 gcc_assert (val1->val_class == dw_val_class_loc);
2040 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2041
2042 dw2_asm_output_data (2, offset, NULL);
2043 }
2044 break;
2045 case DW_OP_implicit_value:
2046 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2047 switch (val2->val_class)
2048 {
2049 case dw_val_class_const:
2050 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2051 break;
2052 case dw_val_class_vec:
2053 {
2054 unsigned int elt_size = val2->v.val_vec.elt_size;
2055 unsigned int len = val2->v.val_vec.length;
2056 unsigned int i;
2057 unsigned char *p;
2058
2059 if (elt_size > sizeof (HOST_WIDE_INT))
2060 {
2061 elt_size /= 2;
2062 len *= 2;
2063 }
2064 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2065 i < len;
2066 i++, p += elt_size)
2067 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2068 "fp or vector constant word %u", i);
2069 }
2070 break;
2071 case dw_val_class_const_double:
2072 {
2073 unsigned HOST_WIDE_INT first, second;
2074
2075 if (WORDS_BIG_ENDIAN)
2076 {
2077 first = val2->v.val_double.high;
2078 second = val2->v.val_double.low;
2079 }
2080 else
2081 {
2082 first = val2->v.val_double.low;
2083 second = val2->v.val_double.high;
2084 }
2085 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2086 first, NULL);
2087 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2088 second, NULL);
2089 }
2090 break;
2091 case dw_val_class_wide_int:
2092 {
2093 int i;
2094 int len = get_full_len (*val2->v.val_wide);
2095 if (WORDS_BIG_ENDIAN)
2096 for (i = len - 1; i >= 0; --i)
2097 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2098 val2->v.val_wide->elt (i), NULL);
2099 else
2100 for (i = 0; i < len; ++i)
2101 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2102 val2->v.val_wide->elt (i), NULL);
2103 }
2104 break;
2105 case dw_val_class_addr:
2106 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2107 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2108 break;
2109 default:
2110 gcc_unreachable ();
2111 }
2112 break;
2113 #else
2114 case DW_OP_const2u:
2115 case DW_OP_const2s:
2116 case DW_OP_const4u:
2117 case DW_OP_const4s:
2118 case DW_OP_const8u:
2119 case DW_OP_const8s:
2120 case DW_OP_skip:
2121 case DW_OP_bra:
2122 case DW_OP_implicit_value:
2123 /* We currently don't make any attempt to make sure these are
2124 aligned properly like we do for the main unwind info, so
2125 don't support emitting things larger than a byte if we're
2126 only doing unwinding. */
2127 gcc_unreachable ();
2128 #endif
2129 case DW_OP_const1u:
2130 case DW_OP_const1s:
2131 dw2_asm_output_data (1, val1->v.val_int, NULL);
2132 break;
2133 case DW_OP_constu:
2134 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2135 break;
2136 case DW_OP_consts:
2137 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2138 break;
2139 case DW_OP_pick:
2140 dw2_asm_output_data (1, val1->v.val_int, NULL);
2141 break;
2142 case DW_OP_plus_uconst:
2143 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2144 break;
2145 case DW_OP_breg0:
2146 case DW_OP_breg1:
2147 case DW_OP_breg2:
2148 case DW_OP_breg3:
2149 case DW_OP_breg4:
2150 case DW_OP_breg5:
2151 case DW_OP_breg6:
2152 case DW_OP_breg7:
2153 case DW_OP_breg8:
2154 case DW_OP_breg9:
2155 case DW_OP_breg10:
2156 case DW_OP_breg11:
2157 case DW_OP_breg12:
2158 case DW_OP_breg13:
2159 case DW_OP_breg14:
2160 case DW_OP_breg15:
2161 case DW_OP_breg16:
2162 case DW_OP_breg17:
2163 case DW_OP_breg18:
2164 case DW_OP_breg19:
2165 case DW_OP_breg20:
2166 case DW_OP_breg21:
2167 case DW_OP_breg22:
2168 case DW_OP_breg23:
2169 case DW_OP_breg24:
2170 case DW_OP_breg25:
2171 case DW_OP_breg26:
2172 case DW_OP_breg27:
2173 case DW_OP_breg28:
2174 case DW_OP_breg29:
2175 case DW_OP_breg30:
2176 case DW_OP_breg31:
2177 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2178 break;
2179 case DW_OP_regx:
2180 {
2181 unsigned r = val1->v.val_unsigned;
2182 if (for_eh_or_skip >= 0)
2183 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2184 gcc_assert (size_of_uleb128 (r)
2185 == size_of_uleb128 (val1->v.val_unsigned));
2186 dw2_asm_output_data_uleb128 (r, NULL);
2187 }
2188 break;
2189 case DW_OP_fbreg:
2190 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2191 break;
2192 case DW_OP_bregx:
2193 {
2194 unsigned r = val1->v.val_unsigned;
2195 if (for_eh_or_skip >= 0)
2196 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2197 gcc_assert (size_of_uleb128 (r)
2198 == size_of_uleb128 (val1->v.val_unsigned));
2199 dw2_asm_output_data_uleb128 (r, NULL);
2200 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2201 }
2202 break;
2203 case DW_OP_piece:
2204 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2205 break;
2206 case DW_OP_bit_piece:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2209 break;
2210 case DW_OP_deref_size:
2211 case DW_OP_xderef_size:
2212 dw2_asm_output_data (1, val1->v.val_int, NULL);
2213 break;
2214
2215 case DW_OP_addr:
2216 if (loc->dtprel)
2217 {
2218 if (targetm.asm_out.output_dwarf_dtprel)
2219 {
2220 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2221 DWARF2_ADDR_SIZE,
2222 val1->v.val_addr);
2223 fputc ('\n', asm_out_file);
2224 }
2225 else
2226 gcc_unreachable ();
2227 }
2228 else
2229 {
2230 #ifdef DWARF2_DEBUGGING_INFO
2231 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2232 #else
2233 gcc_unreachable ();
2234 #endif
2235 }
2236 break;
2237
2238 case DW_OP_GNU_addr_index:
2239 case DW_OP_GNU_const_index:
2240 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2241 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2242 "(index into .debug_addr)");
2243 break;
2244
2245 case DW_OP_call2:
2246 case DW_OP_call4:
2247 {
2248 unsigned long die_offset
2249 = get_ref_die_offset (val1->v.val_die_ref.die);
2250 /* Make sure the offset has been computed and that we can encode it as
2251 an operand. */
2252 gcc_assert (die_offset > 0
2253 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2254 ? 0xffff
2255 : 0xffffffff));
2256 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2257 die_offset, NULL);
2258 }
2259 break;
2260
2261 case DW_OP_call_ref:
2262 case DW_OP_GNU_variable_value:
2263 {
2264 char label[MAX_ARTIFICIAL_LABEL_BYTES
2265 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2266 gcc_assert (val1->val_class == dw_val_class_die_ref);
2267 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2268 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2269 }
2270 break;
2271
2272 case DW_OP_implicit_pointer:
2273 case DW_OP_GNU_implicit_pointer:
2274 {
2275 char label[MAX_ARTIFICIAL_LABEL_BYTES
2276 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2277 gcc_assert (val1->val_class == dw_val_class_die_ref);
2278 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2279 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2280 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2281 }
2282 break;
2283
2284 case DW_OP_entry_value:
2285 case DW_OP_GNU_entry_value:
2286 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2287 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2288 break;
2289
2290 case DW_OP_const_type:
2291 case DW_OP_GNU_const_type:
2292 {
2293 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2294 gcc_assert (o);
2295 dw2_asm_output_data_uleb128 (o, NULL);
2296 switch (val2->val_class)
2297 {
2298 case dw_val_class_const:
2299 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2300 dw2_asm_output_data (1, l, NULL);
2301 dw2_asm_output_data (l, val2->v.val_int, NULL);
2302 break;
2303 case dw_val_class_vec:
2304 {
2305 unsigned int elt_size = val2->v.val_vec.elt_size;
2306 unsigned int len = val2->v.val_vec.length;
2307 unsigned int i;
2308 unsigned char *p;
2309
2310 l = len * elt_size;
2311 dw2_asm_output_data (1, l, NULL);
2312 if (elt_size > sizeof (HOST_WIDE_INT))
2313 {
2314 elt_size /= 2;
2315 len *= 2;
2316 }
2317 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2318 i < len;
2319 i++, p += elt_size)
2320 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2321 "fp or vector constant word %u", i);
2322 }
2323 break;
2324 case dw_val_class_const_double:
2325 {
2326 unsigned HOST_WIDE_INT first, second;
2327 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2328
2329 dw2_asm_output_data (1, 2 * l, NULL);
2330 if (WORDS_BIG_ENDIAN)
2331 {
2332 first = val2->v.val_double.high;
2333 second = val2->v.val_double.low;
2334 }
2335 else
2336 {
2337 first = val2->v.val_double.low;
2338 second = val2->v.val_double.high;
2339 }
2340 dw2_asm_output_data (l, first, NULL);
2341 dw2_asm_output_data (l, second, NULL);
2342 }
2343 break;
2344 case dw_val_class_wide_int:
2345 {
2346 int i;
2347 int len = get_full_len (*val2->v.val_wide);
2348 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2349
2350 dw2_asm_output_data (1, len * l, NULL);
2351 if (WORDS_BIG_ENDIAN)
2352 for (i = len - 1; i >= 0; --i)
2353 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2354 else
2355 for (i = 0; i < len; ++i)
2356 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2357 }
2358 break;
2359 default:
2360 gcc_unreachable ();
2361 }
2362 }
2363 break;
2364 case DW_OP_regval_type:
2365 case DW_OP_GNU_regval_type:
2366 {
2367 unsigned r = val1->v.val_unsigned;
2368 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2369 gcc_assert (o);
2370 if (for_eh_or_skip >= 0)
2371 {
2372 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2373 gcc_assert (size_of_uleb128 (r)
2374 == size_of_uleb128 (val1->v.val_unsigned));
2375 }
2376 dw2_asm_output_data_uleb128 (r, NULL);
2377 dw2_asm_output_data_uleb128 (o, NULL);
2378 }
2379 break;
2380 case DW_OP_deref_type:
2381 case DW_OP_GNU_deref_type:
2382 {
2383 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2384 gcc_assert (o);
2385 dw2_asm_output_data (1, val1->v.val_int, NULL);
2386 dw2_asm_output_data_uleb128 (o, NULL);
2387 }
2388 break;
2389 case DW_OP_convert:
2390 case DW_OP_reinterpret:
2391 case DW_OP_GNU_convert:
2392 case DW_OP_GNU_reinterpret:
2393 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2394 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2395 else
2396 {
2397 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2398 gcc_assert (o);
2399 dw2_asm_output_data_uleb128 (o, NULL);
2400 }
2401 break;
2402
2403 case DW_OP_GNU_parameter_ref:
2404 {
2405 unsigned long o;
2406 gcc_assert (val1->val_class == dw_val_class_die_ref);
2407 o = get_ref_die_offset (val1->v.val_die_ref.die);
2408 dw2_asm_output_data (4, o, NULL);
2409 }
2410 break;
2411
2412 default:
2413 /* Other codes have no operands. */
2414 break;
2415 }
2416 }
2417
2418 /* Output a sequence of location operations.
2419 The for_eh_or_skip parameter controls whether register numbers are
2420 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2421 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2422 info). This should be suppressed for the cases that have not been converted
2423 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2424
2425 void
2426 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2427 {
2428 for (; loc != NULL; loc = loc->dw_loc_next)
2429 {
2430 enum dwarf_location_atom opc = loc->dw_loc_opc;
2431 /* Output the opcode. */
2432 if (for_eh_or_skip >= 0
2433 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2434 {
2435 unsigned r = (opc - DW_OP_breg0);
2436 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2437 gcc_assert (r <= 31);
2438 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2439 }
2440 else if (for_eh_or_skip >= 0
2441 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2442 {
2443 unsigned r = (opc - DW_OP_reg0);
2444 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2445 gcc_assert (r <= 31);
2446 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2447 }
2448
2449 dw2_asm_output_data (1, opc,
2450 "%s", dwarf_stack_op_name (opc));
2451
2452 /* Output the operand(s) (if any). */
2453 output_loc_operands (loc, for_eh_or_skip);
2454 }
2455 }
2456
2457 /* Output location description stack opcode's operands (if any).
2458 The output is single bytes on a line, suitable for .cfi_escape. */
2459
2460 static void
2461 output_loc_operands_raw (dw_loc_descr_ref loc)
2462 {
2463 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2464 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2465
2466 switch (loc->dw_loc_opc)
2467 {
2468 case DW_OP_addr:
2469 case DW_OP_GNU_addr_index:
2470 case DW_OP_GNU_const_index:
2471 case DW_OP_implicit_value:
2472 /* We cannot output addresses in .cfi_escape, only bytes. */
2473 gcc_unreachable ();
2474
2475 case DW_OP_const1u:
2476 case DW_OP_const1s:
2477 case DW_OP_pick:
2478 case DW_OP_deref_size:
2479 case DW_OP_xderef_size:
2480 fputc (',', asm_out_file);
2481 dw2_asm_output_data_raw (1, val1->v.val_int);
2482 break;
2483
2484 case DW_OP_const2u:
2485 case DW_OP_const2s:
2486 fputc (',', asm_out_file);
2487 dw2_asm_output_data_raw (2, val1->v.val_int);
2488 break;
2489
2490 case DW_OP_const4u:
2491 case DW_OP_const4s:
2492 fputc (',', asm_out_file);
2493 dw2_asm_output_data_raw (4, val1->v.val_int);
2494 break;
2495
2496 case DW_OP_const8u:
2497 case DW_OP_const8s:
2498 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2499 fputc (',', asm_out_file);
2500 dw2_asm_output_data_raw (8, val1->v.val_int);
2501 break;
2502
2503 case DW_OP_skip:
2504 case DW_OP_bra:
2505 {
2506 int offset;
2507
2508 gcc_assert (val1->val_class == dw_val_class_loc);
2509 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2510
2511 fputc (',', asm_out_file);
2512 dw2_asm_output_data_raw (2, offset);
2513 }
2514 break;
2515
2516 case DW_OP_regx:
2517 {
2518 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2519 gcc_assert (size_of_uleb128 (r)
2520 == size_of_uleb128 (val1->v.val_unsigned));
2521 fputc (',', asm_out_file);
2522 dw2_asm_output_data_uleb128_raw (r);
2523 }
2524 break;
2525
2526 case DW_OP_constu:
2527 case DW_OP_plus_uconst:
2528 case DW_OP_piece:
2529 fputc (',', asm_out_file);
2530 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2531 break;
2532
2533 case DW_OP_bit_piece:
2534 fputc (',', asm_out_file);
2535 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2536 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2537 break;
2538
2539 case DW_OP_consts:
2540 case DW_OP_breg0:
2541 case DW_OP_breg1:
2542 case DW_OP_breg2:
2543 case DW_OP_breg3:
2544 case DW_OP_breg4:
2545 case DW_OP_breg5:
2546 case DW_OP_breg6:
2547 case DW_OP_breg7:
2548 case DW_OP_breg8:
2549 case DW_OP_breg9:
2550 case DW_OP_breg10:
2551 case DW_OP_breg11:
2552 case DW_OP_breg12:
2553 case DW_OP_breg13:
2554 case DW_OP_breg14:
2555 case DW_OP_breg15:
2556 case DW_OP_breg16:
2557 case DW_OP_breg17:
2558 case DW_OP_breg18:
2559 case DW_OP_breg19:
2560 case DW_OP_breg20:
2561 case DW_OP_breg21:
2562 case DW_OP_breg22:
2563 case DW_OP_breg23:
2564 case DW_OP_breg24:
2565 case DW_OP_breg25:
2566 case DW_OP_breg26:
2567 case DW_OP_breg27:
2568 case DW_OP_breg28:
2569 case DW_OP_breg29:
2570 case DW_OP_breg30:
2571 case DW_OP_breg31:
2572 case DW_OP_fbreg:
2573 fputc (',', asm_out_file);
2574 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2575 break;
2576
2577 case DW_OP_bregx:
2578 {
2579 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2580 gcc_assert (size_of_uleb128 (r)
2581 == size_of_uleb128 (val1->v.val_unsigned));
2582 fputc (',', asm_out_file);
2583 dw2_asm_output_data_uleb128_raw (r);
2584 fputc (',', asm_out_file);
2585 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2586 }
2587 break;
2588
2589 case DW_OP_implicit_pointer:
2590 case DW_OP_entry_value:
2591 case DW_OP_const_type:
2592 case DW_OP_regval_type:
2593 case DW_OP_deref_type:
2594 case DW_OP_convert:
2595 case DW_OP_reinterpret:
2596 case DW_OP_GNU_implicit_pointer:
2597 case DW_OP_GNU_entry_value:
2598 case DW_OP_GNU_const_type:
2599 case DW_OP_GNU_regval_type:
2600 case DW_OP_GNU_deref_type:
2601 case DW_OP_GNU_convert:
2602 case DW_OP_GNU_reinterpret:
2603 case DW_OP_GNU_parameter_ref:
2604 gcc_unreachable ();
2605 break;
2606
2607 default:
2608 /* Other codes have no operands. */
2609 break;
2610 }
2611 }
2612
2613 void
2614 output_loc_sequence_raw (dw_loc_descr_ref loc)
2615 {
2616 while (1)
2617 {
2618 enum dwarf_location_atom opc = loc->dw_loc_opc;
2619 /* Output the opcode. */
2620 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2621 {
2622 unsigned r = (opc - DW_OP_breg0);
2623 r = DWARF2_FRAME_REG_OUT (r, 1);
2624 gcc_assert (r <= 31);
2625 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2626 }
2627 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2628 {
2629 unsigned r = (opc - DW_OP_reg0);
2630 r = DWARF2_FRAME_REG_OUT (r, 1);
2631 gcc_assert (r <= 31);
2632 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2633 }
2634 /* Output the opcode. */
2635 fprintf (asm_out_file, "%#x", opc);
2636 output_loc_operands_raw (loc);
2637
2638 if (!loc->dw_loc_next)
2639 break;
2640 loc = loc->dw_loc_next;
2641
2642 fputc (',', asm_out_file);
2643 }
2644 }
2645
2646 /* This function builds a dwarf location descriptor sequence from a
2647 dw_cfa_location, adding the given OFFSET to the result of the
2648 expression. */
2649
2650 struct dw_loc_descr_node *
2651 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2652 {
2653 struct dw_loc_descr_node *head, *tmp;
2654
2655 offset += cfa->offset;
2656
2657 if (cfa->indirect)
2658 {
2659 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2660 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2661 head->dw_loc_oprnd1.val_entry = NULL;
2662 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2663 add_loc_descr (&head, tmp);
2664 loc_descr_plus_const (&head, offset);
2665 }
2666 else
2667 head = new_reg_loc_descr (cfa->reg, offset);
2668
2669 return head;
2670 }
2671
2672 /* This function builds a dwarf location descriptor sequence for
2673 the address at OFFSET from the CFA when stack is aligned to
2674 ALIGNMENT byte. */
2675
2676 struct dw_loc_descr_node *
2677 build_cfa_aligned_loc (dw_cfa_location *cfa,
2678 poly_int64 offset, HOST_WIDE_INT alignment)
2679 {
2680 struct dw_loc_descr_node *head;
2681 unsigned int dwarf_fp
2682 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2683
2684 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2685 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2686 {
2687 head = new_reg_loc_descr (dwarf_fp, 0);
2688 add_loc_descr (&head, int_loc_descriptor (alignment));
2689 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2690 loc_descr_plus_const (&head, offset);
2691 }
2692 else
2693 head = new_reg_loc_descr (dwarf_fp, offset);
2694 return head;
2695 }
2696 \f
2697 /* And now, the support for symbolic debugging information. */
2698
2699 /* .debug_str support. */
2700
2701 static void dwarf2out_init (const char *);
2702 static void dwarf2out_finish (const char *);
2703 static void dwarf2out_early_finish (const char *);
2704 static void dwarf2out_assembly_start (void);
2705 static void dwarf2out_define (unsigned int, const char *);
2706 static void dwarf2out_undef (unsigned int, const char *);
2707 static void dwarf2out_start_source_file (unsigned, const char *);
2708 static void dwarf2out_end_source_file (unsigned);
2709 static void dwarf2out_function_decl (tree);
2710 static void dwarf2out_begin_block (unsigned, unsigned);
2711 static void dwarf2out_end_block (unsigned, unsigned);
2712 static bool dwarf2out_ignore_block (const_tree);
2713 static void dwarf2out_early_global_decl (tree);
2714 static void dwarf2out_late_global_decl (tree);
2715 static void dwarf2out_type_decl (tree, int);
2716 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2717 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2718 dw_die_ref);
2719 static void dwarf2out_abstract_function (tree);
2720 static void dwarf2out_var_location (rtx_insn *);
2721 static void dwarf2out_size_function (tree);
2722 static void dwarf2out_begin_function (tree);
2723 static void dwarf2out_end_function (unsigned int);
2724 static void dwarf2out_register_main_translation_unit (tree unit);
2725 static void dwarf2out_set_name (tree, tree);
2726 static void dwarf2out_register_external_die (tree decl, const char *sym,
2727 unsigned HOST_WIDE_INT off);
2728 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2729 unsigned HOST_WIDE_INT *off);
2730
2731 /* The debug hooks structure. */
2732
2733 const struct gcc_debug_hooks dwarf2_debug_hooks =
2734 {
2735 dwarf2out_init,
2736 dwarf2out_finish,
2737 dwarf2out_early_finish,
2738 dwarf2out_assembly_start,
2739 dwarf2out_define,
2740 dwarf2out_undef,
2741 dwarf2out_start_source_file,
2742 dwarf2out_end_source_file,
2743 dwarf2out_begin_block,
2744 dwarf2out_end_block,
2745 dwarf2out_ignore_block,
2746 dwarf2out_source_line,
2747 dwarf2out_begin_prologue,
2748 #if VMS_DEBUGGING_INFO
2749 dwarf2out_vms_end_prologue,
2750 dwarf2out_vms_begin_epilogue,
2751 #else
2752 debug_nothing_int_charstar,
2753 debug_nothing_int_charstar,
2754 #endif
2755 dwarf2out_end_epilogue,
2756 dwarf2out_begin_function,
2757 dwarf2out_end_function, /* end_function */
2758 dwarf2out_register_main_translation_unit,
2759 dwarf2out_function_decl, /* function_decl */
2760 dwarf2out_early_global_decl,
2761 dwarf2out_late_global_decl,
2762 dwarf2out_type_decl, /* type_decl */
2763 dwarf2out_imported_module_or_decl,
2764 dwarf2out_die_ref_for_decl,
2765 dwarf2out_register_external_die,
2766 debug_nothing_tree, /* deferred_inline_function */
2767 /* The DWARF 2 backend tries to reduce debugging bloat by not
2768 emitting the abstract description of inline functions until
2769 something tries to reference them. */
2770 dwarf2out_abstract_function, /* outlining_inline_function */
2771 debug_nothing_rtx_code_label, /* label */
2772 debug_nothing_int, /* handle_pch */
2773 dwarf2out_var_location,
2774 debug_nothing_tree, /* inline_entry */
2775 dwarf2out_size_function, /* size_function */
2776 dwarf2out_switch_text_section,
2777 dwarf2out_set_name,
2778 1, /* start_end_main_source_file */
2779 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2780 };
2781
2782 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2783 {
2784 dwarf2out_init,
2785 debug_nothing_charstar,
2786 debug_nothing_charstar,
2787 dwarf2out_assembly_start,
2788 debug_nothing_int_charstar,
2789 debug_nothing_int_charstar,
2790 debug_nothing_int_charstar,
2791 debug_nothing_int,
2792 debug_nothing_int_int, /* begin_block */
2793 debug_nothing_int_int, /* end_block */
2794 debug_true_const_tree, /* ignore_block */
2795 dwarf2out_source_line, /* source_line */
2796 debug_nothing_int_int_charstar, /* begin_prologue */
2797 debug_nothing_int_charstar, /* end_prologue */
2798 debug_nothing_int_charstar, /* begin_epilogue */
2799 debug_nothing_int_charstar, /* end_epilogue */
2800 debug_nothing_tree, /* begin_function */
2801 debug_nothing_int, /* end_function */
2802 debug_nothing_tree, /* register_main_translation_unit */
2803 debug_nothing_tree, /* function_decl */
2804 debug_nothing_tree, /* early_global_decl */
2805 debug_nothing_tree, /* late_global_decl */
2806 debug_nothing_tree_int, /* type_decl */
2807 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2808 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2809 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2810 debug_nothing_tree, /* deferred_inline_function */
2811 debug_nothing_tree, /* outlining_inline_function */
2812 debug_nothing_rtx_code_label, /* label */
2813 debug_nothing_int, /* handle_pch */
2814 debug_nothing_rtx_insn, /* var_location */
2815 debug_nothing_tree, /* inline_entry */
2816 debug_nothing_tree, /* size_function */
2817 debug_nothing_void, /* switch_text_section */
2818 debug_nothing_tree_tree, /* set_name */
2819 0, /* start_end_main_source_file */
2820 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2821 };
2822 \f
2823 /* NOTE: In the comments in this file, many references are made to
2824 "Debugging Information Entries". This term is abbreviated as `DIE'
2825 throughout the remainder of this file. */
2826
2827 /* An internal representation of the DWARF output is built, and then
2828 walked to generate the DWARF debugging info. The walk of the internal
2829 representation is done after the entire program has been compiled.
2830 The types below are used to describe the internal representation. */
2831
2832 /* Whether to put type DIEs into their own section .debug_types instead
2833 of making them part of the .debug_info section. Only supported for
2834 Dwarf V4 or higher and the user didn't disable them through
2835 -fno-debug-types-section. It is more efficient to put them in a
2836 separate comdat sections since the linker will then be able to
2837 remove duplicates. But not all tools support .debug_types sections
2838 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2839 it is DW_UT_type unit type in .debug_info section. */
2840
2841 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2842
2843 /* Various DIE's use offsets relative to the beginning of the
2844 .debug_info section to refer to each other. */
2845
2846 typedef long int dw_offset;
2847
2848 struct comdat_type_node;
2849
2850 /* The entries in the line_info table more-or-less mirror the opcodes
2851 that are used in the real dwarf line table. Arrays of these entries
2852 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2853 supported. */
2854
2855 enum dw_line_info_opcode {
2856 /* Emit DW_LNE_set_address; the operand is the label index. */
2857 LI_set_address,
2858
2859 /* Emit a row to the matrix with the given line. This may be done
2860 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2861 special opcodes. */
2862 LI_set_line,
2863
2864 /* Emit a DW_LNS_set_file. */
2865 LI_set_file,
2866
2867 /* Emit a DW_LNS_set_column. */
2868 LI_set_column,
2869
2870 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2871 LI_negate_stmt,
2872
2873 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2874 LI_set_prologue_end,
2875 LI_set_epilogue_begin,
2876
2877 /* Emit a DW_LNE_set_discriminator. */
2878 LI_set_discriminator
2879 };
2880
2881 typedef struct GTY(()) dw_line_info_struct {
2882 enum dw_line_info_opcode opcode;
2883 unsigned int val;
2884 } dw_line_info_entry;
2885
2886
2887 struct GTY(()) dw_line_info_table {
2888 /* The label that marks the end of this section. */
2889 const char *end_label;
2890
2891 /* The values for the last row of the matrix, as collected in the table.
2892 These are used to minimize the changes to the next row. */
2893 unsigned int file_num;
2894 unsigned int line_num;
2895 unsigned int column_num;
2896 int discrim_num;
2897 bool is_stmt;
2898 bool in_use;
2899
2900 vec<dw_line_info_entry, va_gc> *entries;
2901 };
2902
2903
2904 /* Each DIE attribute has a field specifying the attribute kind,
2905 a link to the next attribute in the chain, and an attribute value.
2906 Attributes are typically linked below the DIE they modify. */
2907
2908 typedef struct GTY(()) dw_attr_struct {
2909 enum dwarf_attribute dw_attr;
2910 dw_val_node dw_attr_val;
2911 }
2912 dw_attr_node;
2913
2914
2915 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2916 The children of each node form a circular list linked by
2917 die_sib. die_child points to the node *before* the "first" child node. */
2918
2919 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2920 union die_symbol_or_type_node
2921 {
2922 const char * GTY ((tag ("0"))) die_symbol;
2923 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2924 }
2925 GTY ((desc ("%0.comdat_type_p"))) die_id;
2926 vec<dw_attr_node, va_gc> *die_attr;
2927 dw_die_ref die_parent;
2928 dw_die_ref die_child;
2929 dw_die_ref die_sib;
2930 dw_die_ref die_definition; /* ref from a specification to its definition */
2931 dw_offset die_offset;
2932 unsigned long die_abbrev;
2933 int die_mark;
2934 unsigned int decl_id;
2935 enum dwarf_tag die_tag;
2936 /* Die is used and must not be pruned as unused. */
2937 BOOL_BITFIELD die_perennial_p : 1;
2938 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2939 /* For an external ref to die_symbol if die_offset contains an extra
2940 offset to that symbol. */
2941 BOOL_BITFIELD with_offset : 1;
2942 /* Whether this DIE was removed from the DIE tree, for example via
2943 prune_unused_types. We don't consider those present from the
2944 DIE lookup routines. */
2945 BOOL_BITFIELD removed : 1;
2946 /* Lots of spare bits. */
2947 }
2948 die_node;
2949
2950 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2951 static bool early_dwarf;
2952 static bool early_dwarf_finished;
2953 struct set_early_dwarf {
2954 bool saved;
2955 set_early_dwarf () : saved(early_dwarf)
2956 {
2957 gcc_assert (! early_dwarf_finished);
2958 early_dwarf = true;
2959 }
2960 ~set_early_dwarf () { early_dwarf = saved; }
2961 };
2962
2963 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2964 #define FOR_EACH_CHILD(die, c, expr) do { \
2965 c = die->die_child; \
2966 if (c) do { \
2967 c = c->die_sib; \
2968 expr; \
2969 } while (c != die->die_child); \
2970 } while (0)
2971
2972 /* The pubname structure */
2973
2974 typedef struct GTY(()) pubname_struct {
2975 dw_die_ref die;
2976 const char *name;
2977 }
2978 pubname_entry;
2979
2980
2981 struct GTY(()) dw_ranges {
2982 const char *label;
2983 /* If this is positive, it's a block number, otherwise it's a
2984 bitwise-negated index into dw_ranges_by_label. */
2985 int num;
2986 /* Index for the range list for DW_FORM_rnglistx. */
2987 unsigned int idx : 31;
2988 /* True if this range might be possibly in a different section
2989 from previous entry. */
2990 unsigned int maybe_new_sec : 1;
2991 };
2992
2993 /* A structure to hold a macinfo entry. */
2994
2995 typedef struct GTY(()) macinfo_struct {
2996 unsigned char code;
2997 unsigned HOST_WIDE_INT lineno;
2998 const char *info;
2999 }
3000 macinfo_entry;
3001
3002
3003 struct GTY(()) dw_ranges_by_label {
3004 const char *begin;
3005 const char *end;
3006 };
3007
3008 /* The comdat type node structure. */
3009 struct GTY(()) comdat_type_node
3010 {
3011 dw_die_ref root_die;
3012 dw_die_ref type_die;
3013 dw_die_ref skeleton_die;
3014 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3015 comdat_type_node *next;
3016 };
3017
3018 /* A list of DIEs for which we can't determine ancestry (parent_die
3019 field) just yet. Later in dwarf2out_finish we will fill in the
3020 missing bits. */
3021 typedef struct GTY(()) limbo_die_struct {
3022 dw_die_ref die;
3023 /* The tree for which this DIE was created. We use this to
3024 determine ancestry later. */
3025 tree created_for;
3026 struct limbo_die_struct *next;
3027 }
3028 limbo_die_node;
3029
3030 typedef struct skeleton_chain_struct
3031 {
3032 dw_die_ref old_die;
3033 dw_die_ref new_die;
3034 struct skeleton_chain_struct *parent;
3035 }
3036 skeleton_chain_node;
3037
3038 /* Define a macro which returns nonzero for a TYPE_DECL which was
3039 implicitly generated for a type.
3040
3041 Note that, unlike the C front-end (which generates a NULL named
3042 TYPE_DECL node for each complete tagged type, each array type,
3043 and each function type node created) the C++ front-end generates
3044 a _named_ TYPE_DECL node for each tagged type node created.
3045 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3046 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3047 front-end, but for each type, tagged or not. */
3048
3049 #define TYPE_DECL_IS_STUB(decl) \
3050 (DECL_NAME (decl) == NULL_TREE \
3051 || (DECL_ARTIFICIAL (decl) \
3052 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3053 /* This is necessary for stub decls that \
3054 appear in nested inline functions. */ \
3055 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3056 && (decl_ultimate_origin (decl) \
3057 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3058
3059 /* Information concerning the compilation unit's programming
3060 language, and compiler version. */
3061
3062 /* Fixed size portion of the DWARF compilation unit header. */
3063 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3064 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3065 + (dwarf_version >= 5 ? 4 : 3))
3066
3067 /* Fixed size portion of the DWARF comdat type unit header. */
3068 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3069 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3070 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3071
3072 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3073 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3074 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3075
3076 /* Fixed size portion of public names info. */
3077 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3078
3079 /* Fixed size portion of the address range info. */
3080 #define DWARF_ARANGES_HEADER_SIZE \
3081 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3082 DWARF2_ADDR_SIZE * 2) \
3083 - DWARF_INITIAL_LENGTH_SIZE)
3084
3085 /* Size of padding portion in the address range info. It must be
3086 aligned to twice the pointer size. */
3087 #define DWARF_ARANGES_PAD_SIZE \
3088 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3089 DWARF2_ADDR_SIZE * 2) \
3090 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3091
3092 /* Use assembler line directives if available. */
3093 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3094 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3095 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3096 #else
3097 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3098 #endif
3099 #endif
3100
3101 /* Minimum line offset in a special line info. opcode.
3102 This value was chosen to give a reasonable range of values. */
3103 #define DWARF_LINE_BASE -10
3104
3105 /* First special line opcode - leave room for the standard opcodes. */
3106 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3107
3108 /* Range of line offsets in a special line info. opcode. */
3109 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3110
3111 /* Flag that indicates the initial value of the is_stmt_start flag.
3112 In the present implementation, we do not mark any lines as
3113 the beginning of a source statement, because that information
3114 is not made available by the GCC front-end. */
3115 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3116
3117 /* Maximum number of operations per instruction bundle. */
3118 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3119 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3120 #endif
3121
3122 /* This location is used by calc_die_sizes() to keep track
3123 the offset of each DIE within the .debug_info section. */
3124 static unsigned long next_die_offset;
3125
3126 /* Record the root of the DIE's built for the current compilation unit. */
3127 static GTY(()) dw_die_ref single_comp_unit_die;
3128
3129 /* A list of type DIEs that have been separated into comdat sections. */
3130 static GTY(()) comdat_type_node *comdat_type_list;
3131
3132 /* A list of CU DIEs that have been separated. */
3133 static GTY(()) limbo_die_node *cu_die_list;
3134
3135 /* A list of DIEs with a NULL parent waiting to be relocated. */
3136 static GTY(()) limbo_die_node *limbo_die_list;
3137
3138 /* A list of DIEs for which we may have to generate
3139 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3140 static GTY(()) limbo_die_node *deferred_asm_name;
3141
3142 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3143 {
3144 typedef const char *compare_type;
3145
3146 static hashval_t hash (dwarf_file_data *);
3147 static bool equal (dwarf_file_data *, const char *);
3148 };
3149
3150 /* Filenames referenced by this compilation unit. */
3151 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3152
3153 struct decl_die_hasher : ggc_ptr_hash<die_node>
3154 {
3155 typedef tree compare_type;
3156
3157 static hashval_t hash (die_node *);
3158 static bool equal (die_node *, tree);
3159 };
3160 /* A hash table of references to DIE's that describe declarations.
3161 The key is a DECL_UID() which is a unique number identifying each decl. */
3162 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3163
3164 struct GTY ((for_user)) variable_value_struct {
3165 unsigned int decl_id;
3166 vec<dw_die_ref, va_gc> *dies;
3167 };
3168
3169 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3170 {
3171 typedef tree compare_type;
3172
3173 static hashval_t hash (variable_value_struct *);
3174 static bool equal (variable_value_struct *, tree);
3175 };
3176 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3177 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3178 DECL_CONTEXT of the referenced VAR_DECLs. */
3179 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3180
3181 struct block_die_hasher : ggc_ptr_hash<die_struct>
3182 {
3183 static hashval_t hash (die_struct *);
3184 static bool equal (die_struct *, die_struct *);
3185 };
3186
3187 /* A hash table of references to DIE's that describe COMMON blocks.
3188 The key is DECL_UID() ^ die_parent. */
3189 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3190
3191 typedef struct GTY(()) die_arg_entry_struct {
3192 dw_die_ref die;
3193 tree arg;
3194 } die_arg_entry;
3195
3196
3197 /* Node of the variable location list. */
3198 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3199 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3200 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3201 in mode of the EXPR_LIST node and first EXPR_LIST operand
3202 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3203 location or NULL for padding. For larger bitsizes,
3204 mode is 0 and first operand is a CONCAT with bitsize
3205 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3206 NULL as second operand. */
3207 rtx GTY (()) loc;
3208 const char * GTY (()) label;
3209 struct var_loc_node * GTY (()) next;
3210 };
3211
3212 /* Variable location list. */
3213 struct GTY ((for_user)) var_loc_list_def {
3214 struct var_loc_node * GTY (()) first;
3215
3216 /* Pointer to the last but one or last element of the
3217 chained list. If the list is empty, both first and
3218 last are NULL, if the list contains just one node
3219 or the last node certainly is not redundant, it points
3220 to the last node, otherwise points to the last but one.
3221 Do not mark it for GC because it is marked through the chain. */
3222 struct var_loc_node * GTY ((skip ("%h"))) last;
3223
3224 /* Pointer to the last element before section switch,
3225 if NULL, either sections weren't switched or first
3226 is after section switch. */
3227 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3228
3229 /* DECL_UID of the variable decl. */
3230 unsigned int decl_id;
3231 };
3232 typedef struct var_loc_list_def var_loc_list;
3233
3234 /* Call argument location list. */
3235 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3236 rtx GTY (()) call_arg_loc_note;
3237 const char * GTY (()) label;
3238 tree GTY (()) block;
3239 bool tail_call_p;
3240 rtx GTY (()) symbol_ref;
3241 struct call_arg_loc_node * GTY (()) next;
3242 };
3243
3244
3245 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3246 {
3247 typedef const_tree compare_type;
3248
3249 static hashval_t hash (var_loc_list *);
3250 static bool equal (var_loc_list *, const_tree);
3251 };
3252
3253 /* Table of decl location linked lists. */
3254 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3255
3256 /* Head and tail of call_arg_loc chain. */
3257 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3258 static struct call_arg_loc_node *call_arg_loc_last;
3259
3260 /* Number of call sites in the current function. */
3261 static int call_site_count = -1;
3262 /* Number of tail call sites in the current function. */
3263 static int tail_call_site_count = -1;
3264
3265 /* A cached location list. */
3266 struct GTY ((for_user)) cached_dw_loc_list_def {
3267 /* The DECL_UID of the decl that this entry describes. */
3268 unsigned int decl_id;
3269
3270 /* The cached location list. */
3271 dw_loc_list_ref loc_list;
3272 };
3273 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3274
3275 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3276 {
3277
3278 typedef const_tree compare_type;
3279
3280 static hashval_t hash (cached_dw_loc_list *);
3281 static bool equal (cached_dw_loc_list *, const_tree);
3282 };
3283
3284 /* Table of cached location lists. */
3285 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3286
3287 /* A vector of references to DIE's that are uniquely identified by their tag,
3288 presence/absence of children DIE's, and list of attribute/value pairs. */
3289 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3290
3291 /* A hash map to remember the stack usage for DWARF procedures. The value
3292 stored is the stack size difference between before the DWARF procedure
3293 invokation and after it returned. In other words, for a DWARF procedure
3294 that consumes N stack slots and that pushes M ones, this stores M - N. */
3295 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3296
3297 /* A global counter for generating labels for line number data. */
3298 static unsigned int line_info_label_num;
3299
3300 /* The current table to which we should emit line number information
3301 for the current function. This will be set up at the beginning of
3302 assembly for the function. */
3303 static GTY(()) dw_line_info_table *cur_line_info_table;
3304
3305 /* The two default tables of line number info. */
3306 static GTY(()) dw_line_info_table *text_section_line_info;
3307 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3308
3309 /* The set of all non-default tables of line number info. */
3310 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3311
3312 /* A flag to tell pubnames/types export if there is an info section to
3313 refer to. */
3314 static bool info_section_emitted;
3315
3316 /* A pointer to the base of a table that contains a list of publicly
3317 accessible names. */
3318 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3319
3320 /* A pointer to the base of a table that contains a list of publicly
3321 accessible types. */
3322 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3323
3324 /* A pointer to the base of a table that contains a list of macro
3325 defines/undefines (and file start/end markers). */
3326 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3327
3328 /* True if .debug_macinfo or .debug_macros section is going to be
3329 emitted. */
3330 #define have_macinfo \
3331 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3332 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3333 && !macinfo_table->is_empty ())
3334
3335 /* Vector of dies for which we should generate .debug_ranges info. */
3336 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3337
3338 /* Vector of pairs of labels referenced in ranges_table. */
3339 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3340
3341 /* Whether we have location lists that need outputting */
3342 static GTY(()) bool have_location_lists;
3343
3344 /* Unique label counter. */
3345 static GTY(()) unsigned int loclabel_num;
3346
3347 /* Unique label counter for point-of-call tables. */
3348 static GTY(()) unsigned int poc_label_num;
3349
3350 /* The last file entry emitted by maybe_emit_file(). */
3351 static GTY(()) struct dwarf_file_data * last_emitted_file;
3352
3353 /* Number of internal labels generated by gen_internal_sym(). */
3354 static GTY(()) int label_num;
3355
3356 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3357
3358 /* Instances of generic types for which we need to generate debug
3359 info that describe their generic parameters and arguments. That
3360 generation needs to happen once all types are properly laid out so
3361 we do it at the end of compilation. */
3362 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3363
3364 /* Offset from the "steady-state frame pointer" to the frame base,
3365 within the current function. */
3366 static poly_int64 frame_pointer_fb_offset;
3367 static bool frame_pointer_fb_offset_valid;
3368
3369 static vec<dw_die_ref> base_types;
3370
3371 /* Flags to represent a set of attribute classes for attributes that represent
3372 a scalar value (bounds, pointers, ...). */
3373 enum dw_scalar_form
3374 {
3375 dw_scalar_form_constant = 0x01,
3376 dw_scalar_form_exprloc = 0x02,
3377 dw_scalar_form_reference = 0x04
3378 };
3379
3380 /* Forward declarations for functions defined in this file. */
3381
3382 static int is_pseudo_reg (const_rtx);
3383 static tree type_main_variant (tree);
3384 static int is_tagged_type (const_tree);
3385 static const char *dwarf_tag_name (unsigned);
3386 static const char *dwarf_attr_name (unsigned);
3387 static const char *dwarf_form_name (unsigned);
3388 static tree decl_ultimate_origin (const_tree);
3389 static tree decl_class_context (tree);
3390 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3391 static inline enum dw_val_class AT_class (dw_attr_node *);
3392 static inline unsigned int AT_index (dw_attr_node *);
3393 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3394 static inline unsigned AT_flag (dw_attr_node *);
3395 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3396 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3397 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3398 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3399 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3400 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3401 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3402 unsigned int, unsigned char *);
3403 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3404 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3405 static inline const char *AT_string (dw_attr_node *);
3406 static enum dwarf_form AT_string_form (dw_attr_node *);
3407 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3408 static void add_AT_specification (dw_die_ref, dw_die_ref);
3409 static inline dw_die_ref AT_ref (dw_attr_node *);
3410 static inline int AT_ref_external (dw_attr_node *);
3411 static inline void set_AT_ref_external (dw_attr_node *, int);
3412 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3413 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3414 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3415 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3416 dw_loc_list_ref);
3417 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3418 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3419 static void remove_addr_table_entry (addr_table_entry *);
3420 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3421 static inline rtx AT_addr (dw_attr_node *);
3422 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3423 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3424 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3425 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3426 const char *);
3427 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3428 unsigned HOST_WIDE_INT);
3429 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3430 unsigned long, bool);
3431 static inline const char *AT_lbl (dw_attr_node *);
3432 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3433 static const char *get_AT_low_pc (dw_die_ref);
3434 static const char *get_AT_hi_pc (dw_die_ref);
3435 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3436 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3437 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3438 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3439 static bool is_cxx (void);
3440 static bool is_cxx (const_tree);
3441 static bool is_fortran (void);
3442 static bool is_ada (void);
3443 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3444 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3445 static void add_child_die (dw_die_ref, dw_die_ref);
3446 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3447 static dw_die_ref lookup_type_die (tree);
3448 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3449 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3450 static void equate_type_number_to_die (tree, dw_die_ref);
3451 static dw_die_ref lookup_decl_die (tree);
3452 static var_loc_list *lookup_decl_loc (const_tree);
3453 static void equate_decl_number_to_die (tree, dw_die_ref);
3454 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3455 static void print_spaces (FILE *);
3456 static void print_die (dw_die_ref, FILE *);
3457 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3458 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3459 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3460 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3461 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3462 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3463 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3464 struct md5_ctx *, int *);
3465 struct checksum_attributes;
3466 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3467 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3468 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3469 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3470 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3471 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3472 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3473 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3474 static int is_type_die (dw_die_ref);
3475 static int is_comdat_die (dw_die_ref);
3476 static inline bool is_template_instantiation (dw_die_ref);
3477 static int is_declaration_die (dw_die_ref);
3478 static int should_move_die_to_comdat (dw_die_ref);
3479 static dw_die_ref clone_as_declaration (dw_die_ref);
3480 static dw_die_ref clone_die (dw_die_ref);
3481 static dw_die_ref clone_tree (dw_die_ref);
3482 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3483 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3484 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3485 static dw_die_ref generate_skeleton (dw_die_ref);
3486 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3487 dw_die_ref,
3488 dw_die_ref);
3489 static void break_out_comdat_types (dw_die_ref);
3490 static void copy_decls_for_unworthy_types (dw_die_ref);
3491
3492 static void add_sibling_attributes (dw_die_ref);
3493 static void output_location_lists (dw_die_ref);
3494 static int constant_size (unsigned HOST_WIDE_INT);
3495 static unsigned long size_of_die (dw_die_ref);
3496 static void calc_die_sizes (dw_die_ref);
3497 static void calc_base_type_die_sizes (void);
3498 static void mark_dies (dw_die_ref);
3499 static void unmark_dies (dw_die_ref);
3500 static void unmark_all_dies (dw_die_ref);
3501 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3502 static unsigned long size_of_aranges (void);
3503 static enum dwarf_form value_format (dw_attr_node *);
3504 static void output_value_format (dw_attr_node *);
3505 static void output_abbrev_section (void);
3506 static void output_die_abbrevs (unsigned long, dw_die_ref);
3507 static void output_die (dw_die_ref);
3508 static void output_compilation_unit_header (enum dwarf_unit_type);
3509 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3510 static void output_comdat_type_unit (comdat_type_node *);
3511 static const char *dwarf2_name (tree, int);
3512 static void add_pubname (tree, dw_die_ref);
3513 static void add_enumerator_pubname (const char *, dw_die_ref);
3514 static void add_pubname_string (const char *, dw_die_ref);
3515 static void add_pubtype (tree, dw_die_ref);
3516 static void output_pubnames (vec<pubname_entry, va_gc> *);
3517 static void output_aranges (void);
3518 static unsigned int add_ranges (const_tree, bool = false);
3519 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3520 bool *, bool);
3521 static void output_ranges (void);
3522 static dw_line_info_table *new_line_info_table (void);
3523 static void output_line_info (bool);
3524 static void output_file_names (void);
3525 static dw_die_ref base_type_die (tree, bool);
3526 static int is_base_type (tree);
3527 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3528 static int decl_quals (const_tree);
3529 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3530 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3531 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3532 static int type_is_enum (const_tree);
3533 static unsigned int dbx_reg_number (const_rtx);
3534 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3535 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3536 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3537 enum var_init_status);
3538 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3539 enum var_init_status);
3540 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3541 enum var_init_status);
3542 static int is_based_loc (const_rtx);
3543 static bool resolve_one_addr (rtx *);
3544 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3545 enum var_init_status);
3546 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3547 enum var_init_status);
3548 struct loc_descr_context;
3549 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3550 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3551 static dw_loc_list_ref loc_list_from_tree (tree, int,
3552 struct loc_descr_context *);
3553 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3554 struct loc_descr_context *);
3555 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3556 static tree field_type (const_tree);
3557 static unsigned int simple_type_align_in_bits (const_tree);
3558 static unsigned int simple_decl_align_in_bits (const_tree);
3559 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3560 struct vlr_context;
3561 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3562 HOST_WIDE_INT *);
3563 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3564 dw_loc_list_ref);
3565 static void add_data_member_location_attribute (dw_die_ref, tree,
3566 struct vlr_context *);
3567 static bool add_const_value_attribute (dw_die_ref, rtx);
3568 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3569 static void insert_wide_int (const wide_int &, unsigned char *, int);
3570 static void insert_float (const_rtx, unsigned char *);
3571 static rtx rtl_for_decl_location (tree);
3572 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3573 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3574 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3575 static void add_name_attribute (dw_die_ref, const char *);
3576 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3577 static void add_comp_dir_attribute (dw_die_ref);
3578 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3579 struct loc_descr_context *);
3580 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3581 struct loc_descr_context *);
3582 static void add_subscript_info (dw_die_ref, tree, bool);
3583 static void add_byte_size_attribute (dw_die_ref, tree);
3584 static void add_alignment_attribute (dw_die_ref, tree);
3585 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3586 struct vlr_context *);
3587 static void add_bit_size_attribute (dw_die_ref, tree);
3588 static void add_prototyped_attribute (dw_die_ref, tree);
3589 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3590 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3591 static void add_src_coords_attributes (dw_die_ref, tree);
3592 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3593 static void add_discr_value (dw_die_ref, dw_discr_value *);
3594 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3595 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3596 static void push_decl_scope (tree);
3597 static void pop_decl_scope (void);
3598 static dw_die_ref scope_die_for (tree, dw_die_ref);
3599 static inline int local_scope_p (dw_die_ref);
3600 static inline int class_scope_p (dw_die_ref);
3601 static inline int class_or_namespace_scope_p (dw_die_ref);
3602 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3603 static void add_calling_convention_attribute (dw_die_ref, tree);
3604 static const char *type_tag (const_tree);
3605 static tree member_declared_type (const_tree);
3606 #if 0
3607 static const char *decl_start_label (tree);
3608 #endif
3609 static void gen_array_type_die (tree, dw_die_ref);
3610 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3611 #if 0
3612 static void gen_entry_point_die (tree, dw_die_ref);
3613 #endif
3614 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3615 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3616 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3617 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3618 static void gen_formal_types_die (tree, dw_die_ref);
3619 static void gen_subprogram_die (tree, dw_die_ref);
3620 static void gen_variable_die (tree, tree, dw_die_ref);
3621 static void gen_const_die (tree, dw_die_ref);
3622 static void gen_label_die (tree, dw_die_ref);
3623 static void gen_lexical_block_die (tree, dw_die_ref);
3624 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3625 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3626 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3627 static dw_die_ref gen_compile_unit_die (const char *);
3628 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3629 static void gen_member_die (tree, dw_die_ref);
3630 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3631 enum debug_info_usage);
3632 static void gen_subroutine_type_die (tree, dw_die_ref);
3633 static void gen_typedef_die (tree, dw_die_ref);
3634 static void gen_type_die (tree, dw_die_ref);
3635 static void gen_block_die (tree, dw_die_ref);
3636 static void decls_for_scope (tree, dw_die_ref);
3637 static bool is_naming_typedef_decl (const_tree);
3638 static inline dw_die_ref get_context_die (tree);
3639 static void gen_namespace_die (tree, dw_die_ref);
3640 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3641 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3642 static dw_die_ref force_decl_die (tree);
3643 static dw_die_ref force_type_die (tree);
3644 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3645 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3646 static struct dwarf_file_data * lookup_filename (const char *);
3647 static void retry_incomplete_types (void);
3648 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3649 static void gen_generic_params_dies (tree);
3650 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3651 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3652 static void splice_child_die (dw_die_ref, dw_die_ref);
3653 static int file_info_cmp (const void *, const void *);
3654 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3655 const char *, const char *);
3656 static void output_loc_list (dw_loc_list_ref);
3657 static char *gen_internal_sym (const char *);
3658 static bool want_pubnames (void);
3659
3660 static void prune_unmark_dies (dw_die_ref);
3661 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3662 static void prune_unused_types_mark (dw_die_ref, int);
3663 static void prune_unused_types_walk (dw_die_ref);
3664 static void prune_unused_types_walk_attribs (dw_die_ref);
3665 static void prune_unused_types_prune (dw_die_ref);
3666 static void prune_unused_types (void);
3667 static int maybe_emit_file (struct dwarf_file_data *fd);
3668 static inline const char *AT_vms_delta1 (dw_attr_node *);
3669 static inline const char *AT_vms_delta2 (dw_attr_node *);
3670 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3671 const char *, const char *);
3672 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3673 static void gen_remaining_tmpl_value_param_die_attribute (void);
3674 static bool generic_type_p (tree);
3675 static void schedule_generic_params_dies_gen (tree t);
3676 static void gen_scheduled_generic_parms_dies (void);
3677 static void resolve_variable_values (void);
3678
3679 static const char *comp_dir_string (void);
3680
3681 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3682
3683 /* enum for tracking thread-local variables whose address is really an offset
3684 relative to the TLS pointer, which will need link-time relocation, but will
3685 not need relocation by the DWARF consumer. */
3686
3687 enum dtprel_bool
3688 {
3689 dtprel_false = 0,
3690 dtprel_true = 1
3691 };
3692
3693 /* Return the operator to use for an address of a variable. For dtprel_true, we
3694 use DW_OP_const*. For regular variables, which need both link-time
3695 relocation and consumer-level relocation (e.g., to account for shared objects
3696 loaded at a random address), we use DW_OP_addr*. */
3697
3698 static inline enum dwarf_location_atom
3699 dw_addr_op (enum dtprel_bool dtprel)
3700 {
3701 if (dtprel == dtprel_true)
3702 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3703 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3704 else
3705 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3706 }
3707
3708 /* Return a pointer to a newly allocated address location description. If
3709 dwarf_split_debug_info is true, then record the address with the appropriate
3710 relocation. */
3711 static inline dw_loc_descr_ref
3712 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3713 {
3714 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3715
3716 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3717 ref->dw_loc_oprnd1.v.val_addr = addr;
3718 ref->dtprel = dtprel;
3719 if (dwarf_split_debug_info)
3720 ref->dw_loc_oprnd1.val_entry
3721 = add_addr_table_entry (addr,
3722 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3723 else
3724 ref->dw_loc_oprnd1.val_entry = NULL;
3725
3726 return ref;
3727 }
3728
3729 /* Section names used to hold DWARF debugging information. */
3730
3731 #ifndef DEBUG_INFO_SECTION
3732 #define DEBUG_INFO_SECTION ".debug_info"
3733 #endif
3734 #ifndef DEBUG_DWO_INFO_SECTION
3735 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3736 #endif
3737 #ifndef DEBUG_LTO_INFO_SECTION
3738 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3739 #endif
3740 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3741 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3742 #endif
3743 #ifndef DEBUG_ABBREV_SECTION
3744 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3745 #endif
3746 #ifndef DEBUG_LTO_ABBREV_SECTION
3747 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3748 #endif
3749 #ifndef DEBUG_DWO_ABBREV_SECTION
3750 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3751 #endif
3752 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3753 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3754 #endif
3755 #ifndef DEBUG_ARANGES_SECTION
3756 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3757 #endif
3758 #ifndef DEBUG_ADDR_SECTION
3759 #define DEBUG_ADDR_SECTION ".debug_addr"
3760 #endif
3761 #ifndef DEBUG_MACINFO_SECTION
3762 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3763 #endif
3764 #ifndef DEBUG_LTO_MACINFO_SECTION
3765 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3766 #endif
3767 #ifndef DEBUG_DWO_MACINFO_SECTION
3768 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3769 #endif
3770 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3771 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3772 #endif
3773 #ifndef DEBUG_MACRO_SECTION
3774 #define DEBUG_MACRO_SECTION ".debug_macro"
3775 #endif
3776 #ifndef DEBUG_LTO_MACRO_SECTION
3777 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3778 #endif
3779 #ifndef DEBUG_DWO_MACRO_SECTION
3780 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3781 #endif
3782 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3783 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3784 #endif
3785 #ifndef DEBUG_LINE_SECTION
3786 #define DEBUG_LINE_SECTION ".debug_line"
3787 #endif
3788 #ifndef DEBUG_LTO_LINE_SECTION
3789 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3790 #endif
3791 #ifndef DEBUG_DWO_LINE_SECTION
3792 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3793 #endif
3794 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3795 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3796 #endif
3797 #ifndef DEBUG_LOC_SECTION
3798 #define DEBUG_LOC_SECTION ".debug_loc"
3799 #endif
3800 #ifndef DEBUG_DWO_LOC_SECTION
3801 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3802 #endif
3803 #ifndef DEBUG_LOCLISTS_SECTION
3804 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3805 #endif
3806 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3807 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3808 #endif
3809 #ifndef DEBUG_PUBNAMES_SECTION
3810 #define DEBUG_PUBNAMES_SECTION \
3811 ((debug_generate_pub_sections == 2) \
3812 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3813 #endif
3814 #ifndef DEBUG_PUBTYPES_SECTION
3815 #define DEBUG_PUBTYPES_SECTION \
3816 ((debug_generate_pub_sections == 2) \
3817 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3818 #endif
3819 #ifndef DEBUG_STR_OFFSETS_SECTION
3820 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3821 #endif
3822 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3823 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3824 #endif
3825 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
3826 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
3827 #endif
3828 #ifndef DEBUG_STR_SECTION
3829 #define DEBUG_STR_SECTION ".debug_str"
3830 #endif
3831 #ifndef DEBUG_LTO_STR_SECTION
3832 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
3833 #endif
3834 #ifndef DEBUG_STR_DWO_SECTION
3835 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3836 #endif
3837 #ifndef DEBUG_LTO_STR_DWO_SECTION
3838 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
3839 #endif
3840 #ifndef DEBUG_RANGES_SECTION
3841 #define DEBUG_RANGES_SECTION ".debug_ranges"
3842 #endif
3843 #ifndef DEBUG_RNGLISTS_SECTION
3844 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
3845 #endif
3846 #ifndef DEBUG_LINE_STR_SECTION
3847 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
3848 #endif
3849 #ifndef DEBUG_LTO_LINE_STR_SECTION
3850 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
3851 #endif
3852
3853 /* Standard ELF section names for compiled code and data. */
3854 #ifndef TEXT_SECTION_NAME
3855 #define TEXT_SECTION_NAME ".text"
3856 #endif
3857
3858 /* Section flags for .debug_str section. */
3859 #define DEBUG_STR_SECTION_FLAGS \
3860 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3861 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3862 : SECTION_DEBUG)
3863
3864 /* Section flags for .debug_str.dwo section. */
3865 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3866
3867 /* Attribute used to refer to the macro section. */
3868 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
3869 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
3870
3871 /* Labels we insert at beginning sections we can reference instead of
3872 the section names themselves. */
3873
3874 #ifndef TEXT_SECTION_LABEL
3875 #define TEXT_SECTION_LABEL "Ltext"
3876 #endif
3877 #ifndef COLD_TEXT_SECTION_LABEL
3878 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3879 #endif
3880 #ifndef DEBUG_LINE_SECTION_LABEL
3881 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3882 #endif
3883 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3884 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3885 #endif
3886 #ifndef DEBUG_INFO_SECTION_LABEL
3887 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3888 #endif
3889 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3890 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3891 #endif
3892 #ifndef DEBUG_ABBREV_SECTION_LABEL
3893 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3894 #endif
3895 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3896 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3897 #endif
3898 #ifndef DEBUG_ADDR_SECTION_LABEL
3899 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3900 #endif
3901 #ifndef DEBUG_LOC_SECTION_LABEL
3902 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3903 #endif
3904 #ifndef DEBUG_RANGES_SECTION_LABEL
3905 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3906 #endif
3907 #ifndef DEBUG_MACINFO_SECTION_LABEL
3908 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3909 #endif
3910 #ifndef DEBUG_MACRO_SECTION_LABEL
3911 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3912 #endif
3913 #define SKELETON_COMP_DIE_ABBREV 1
3914 #define SKELETON_TYPE_DIE_ABBREV 2
3915
3916 /* Definitions of defaults for formats and names of various special
3917 (artificial) labels which may be generated within this file (when the -g
3918 options is used and DWARF2_DEBUGGING_INFO is in effect.
3919 If necessary, these may be overridden from within the tm.h file, but
3920 typically, overriding these defaults is unnecessary. */
3921
3922 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3923 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3924 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3925 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3926 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3927 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3928 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3929 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3930 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3931 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3932 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3933 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3934 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3935 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3936 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3937
3938 #ifndef TEXT_END_LABEL
3939 #define TEXT_END_LABEL "Letext"
3940 #endif
3941 #ifndef COLD_END_LABEL
3942 #define COLD_END_LABEL "Letext_cold"
3943 #endif
3944 #ifndef BLOCK_BEGIN_LABEL
3945 #define BLOCK_BEGIN_LABEL "LBB"
3946 #endif
3947 #ifndef BLOCK_END_LABEL
3948 #define BLOCK_END_LABEL "LBE"
3949 #endif
3950 #ifndef LINE_CODE_LABEL
3951 #define LINE_CODE_LABEL "LM"
3952 #endif
3953
3954 \f
3955 /* Return the root of the DIE's built for the current compilation unit. */
3956 static dw_die_ref
3957 comp_unit_die (void)
3958 {
3959 if (!single_comp_unit_die)
3960 single_comp_unit_die = gen_compile_unit_die (NULL);
3961 return single_comp_unit_die;
3962 }
3963
3964 /* We allow a language front-end to designate a function that is to be
3965 called to "demangle" any name before it is put into a DIE. */
3966
3967 static const char *(*demangle_name_func) (const char *);
3968
3969 void
3970 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3971 {
3972 demangle_name_func = func;
3973 }
3974
3975 /* Test if rtl node points to a pseudo register. */
3976
3977 static inline int
3978 is_pseudo_reg (const_rtx rtl)
3979 {
3980 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3981 || (GET_CODE (rtl) == SUBREG
3982 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3983 }
3984
3985 /* Return a reference to a type, with its const and volatile qualifiers
3986 removed. */
3987
3988 static inline tree
3989 type_main_variant (tree type)
3990 {
3991 type = TYPE_MAIN_VARIANT (type);
3992
3993 /* ??? There really should be only one main variant among any group of
3994 variants of a given type (and all of the MAIN_VARIANT values for all
3995 members of the group should point to that one type) but sometimes the C
3996 front-end messes this up for array types, so we work around that bug
3997 here. */
3998 if (TREE_CODE (type) == ARRAY_TYPE)
3999 while (type != TYPE_MAIN_VARIANT (type))
4000 type = TYPE_MAIN_VARIANT (type);
4001
4002 return type;
4003 }
4004
4005 /* Return nonzero if the given type node represents a tagged type. */
4006
4007 static inline int
4008 is_tagged_type (const_tree type)
4009 {
4010 enum tree_code code = TREE_CODE (type);
4011
4012 return (code == RECORD_TYPE || code == UNION_TYPE
4013 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4014 }
4015
4016 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4017
4018 static void
4019 get_ref_die_offset_label (char *label, dw_die_ref ref)
4020 {
4021 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4022 }
4023
4024 /* Return die_offset of a DIE reference to a base type. */
4025
4026 static unsigned long int
4027 get_base_type_offset (dw_die_ref ref)
4028 {
4029 if (ref->die_offset)
4030 return ref->die_offset;
4031 if (comp_unit_die ()->die_abbrev)
4032 {
4033 calc_base_type_die_sizes ();
4034 gcc_assert (ref->die_offset);
4035 }
4036 return ref->die_offset;
4037 }
4038
4039 /* Return die_offset of a DIE reference other than base type. */
4040
4041 static unsigned long int
4042 get_ref_die_offset (dw_die_ref ref)
4043 {
4044 gcc_assert (ref->die_offset);
4045 return ref->die_offset;
4046 }
4047
4048 /* Convert a DIE tag into its string name. */
4049
4050 static const char *
4051 dwarf_tag_name (unsigned int tag)
4052 {
4053 const char *name = get_DW_TAG_name (tag);
4054
4055 if (name != NULL)
4056 return name;
4057
4058 return "DW_TAG_<unknown>";
4059 }
4060
4061 /* Convert a DWARF attribute code into its string name. */
4062
4063 static const char *
4064 dwarf_attr_name (unsigned int attr)
4065 {
4066 const char *name;
4067
4068 switch (attr)
4069 {
4070 #if VMS_DEBUGGING_INFO
4071 case DW_AT_HP_prologue:
4072 return "DW_AT_HP_prologue";
4073 #else
4074 case DW_AT_MIPS_loop_unroll_factor:
4075 return "DW_AT_MIPS_loop_unroll_factor";
4076 #endif
4077
4078 #if VMS_DEBUGGING_INFO
4079 case DW_AT_HP_epilogue:
4080 return "DW_AT_HP_epilogue";
4081 #else
4082 case DW_AT_MIPS_stride:
4083 return "DW_AT_MIPS_stride";
4084 #endif
4085 }
4086
4087 name = get_DW_AT_name (attr);
4088
4089 if (name != NULL)
4090 return name;
4091
4092 return "DW_AT_<unknown>";
4093 }
4094
4095 /* Convert a DWARF value form code into its string name. */
4096
4097 static const char *
4098 dwarf_form_name (unsigned int form)
4099 {
4100 const char *name = get_DW_FORM_name (form);
4101
4102 if (name != NULL)
4103 return name;
4104
4105 return "DW_FORM_<unknown>";
4106 }
4107 \f
4108 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4109 instance of an inlined instance of a decl which is local to an inline
4110 function, so we have to trace all of the way back through the origin chain
4111 to find out what sort of node actually served as the original seed for the
4112 given block. */
4113
4114 static tree
4115 decl_ultimate_origin (const_tree decl)
4116 {
4117 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4118 return NULL_TREE;
4119
4120 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4121 we're trying to output the abstract instance of this function. */
4122 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4123 return NULL_TREE;
4124
4125 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4126 most distant ancestor, this should never happen. */
4127 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4128
4129 return DECL_ABSTRACT_ORIGIN (decl);
4130 }
4131
4132 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4133 of a virtual function may refer to a base class, so we check the 'this'
4134 parameter. */
4135
4136 static tree
4137 decl_class_context (tree decl)
4138 {
4139 tree context = NULL_TREE;
4140
4141 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4142 context = DECL_CONTEXT (decl);
4143 else
4144 context = TYPE_MAIN_VARIANT
4145 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4146
4147 if (context && !TYPE_P (context))
4148 context = NULL_TREE;
4149
4150 return context;
4151 }
4152 \f
4153 /* Add an attribute/value pair to a DIE. */
4154
4155 static inline void
4156 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4157 {
4158 /* Maybe this should be an assert? */
4159 if (die == NULL)
4160 return;
4161
4162 if (flag_checking)
4163 {
4164 /* Check we do not add duplicate attrs. Can't use get_AT here
4165 because that recurses to the specification/abstract origin DIE. */
4166 dw_attr_node *a;
4167 unsigned ix;
4168 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4169 gcc_assert (a->dw_attr != attr->dw_attr);
4170 }
4171
4172 vec_safe_reserve (die->die_attr, 1);
4173 vec_safe_push (die->die_attr, *attr);
4174 }
4175
4176 static inline enum dw_val_class
4177 AT_class (dw_attr_node *a)
4178 {
4179 return a->dw_attr_val.val_class;
4180 }
4181
4182 /* Return the index for any attribute that will be referenced with a
4183 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4184 are stored in dw_attr_val.v.val_str for reference counting
4185 pruning. */
4186
4187 static inline unsigned int
4188 AT_index (dw_attr_node *a)
4189 {
4190 if (AT_class (a) == dw_val_class_str)
4191 return a->dw_attr_val.v.val_str->index;
4192 else if (a->dw_attr_val.val_entry != NULL)
4193 return a->dw_attr_val.val_entry->index;
4194 return NOT_INDEXED;
4195 }
4196
4197 /* Add a flag value attribute to a DIE. */
4198
4199 static inline void
4200 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4201 {
4202 dw_attr_node attr;
4203
4204 attr.dw_attr = attr_kind;
4205 attr.dw_attr_val.val_class = dw_val_class_flag;
4206 attr.dw_attr_val.val_entry = NULL;
4207 attr.dw_attr_val.v.val_flag = flag;
4208 add_dwarf_attr (die, &attr);
4209 }
4210
4211 static inline unsigned
4212 AT_flag (dw_attr_node *a)
4213 {
4214 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4215 return a->dw_attr_val.v.val_flag;
4216 }
4217
4218 /* Add a signed integer attribute value to a DIE. */
4219
4220 static inline void
4221 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4222 {
4223 dw_attr_node attr;
4224
4225 attr.dw_attr = attr_kind;
4226 attr.dw_attr_val.val_class = dw_val_class_const;
4227 attr.dw_attr_val.val_entry = NULL;
4228 attr.dw_attr_val.v.val_int = int_val;
4229 add_dwarf_attr (die, &attr);
4230 }
4231
4232 static inline HOST_WIDE_INT
4233 AT_int (dw_attr_node *a)
4234 {
4235 gcc_assert (a && (AT_class (a) == dw_val_class_const
4236 || AT_class (a) == dw_val_class_const_implicit));
4237 return a->dw_attr_val.v.val_int;
4238 }
4239
4240 /* Add an unsigned integer attribute value to a DIE. */
4241
4242 static inline void
4243 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4244 unsigned HOST_WIDE_INT unsigned_val)
4245 {
4246 dw_attr_node attr;
4247
4248 attr.dw_attr = attr_kind;
4249 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4250 attr.dw_attr_val.val_entry = NULL;
4251 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4252 add_dwarf_attr (die, &attr);
4253 }
4254
4255 static inline unsigned HOST_WIDE_INT
4256 AT_unsigned (dw_attr_node *a)
4257 {
4258 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4259 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4260 return a->dw_attr_val.v.val_unsigned;
4261 }
4262
4263 /* Add an unsigned wide integer attribute value to a DIE. */
4264
4265 static inline void
4266 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4267 const wide_int& w)
4268 {
4269 dw_attr_node attr;
4270
4271 attr.dw_attr = attr_kind;
4272 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4273 attr.dw_attr_val.val_entry = NULL;
4274 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4275 *attr.dw_attr_val.v.val_wide = w;
4276 add_dwarf_attr (die, &attr);
4277 }
4278
4279 /* Add an unsigned double integer attribute value to a DIE. */
4280
4281 static inline void
4282 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4283 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4284 {
4285 dw_attr_node attr;
4286
4287 attr.dw_attr = attr_kind;
4288 attr.dw_attr_val.val_class = dw_val_class_const_double;
4289 attr.dw_attr_val.val_entry = NULL;
4290 attr.dw_attr_val.v.val_double.high = high;
4291 attr.dw_attr_val.v.val_double.low = low;
4292 add_dwarf_attr (die, &attr);
4293 }
4294
4295 /* Add a floating point attribute value to a DIE and return it. */
4296
4297 static inline void
4298 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4299 unsigned int length, unsigned int elt_size, unsigned char *array)
4300 {
4301 dw_attr_node attr;
4302
4303 attr.dw_attr = attr_kind;
4304 attr.dw_attr_val.val_class = dw_val_class_vec;
4305 attr.dw_attr_val.val_entry = NULL;
4306 attr.dw_attr_val.v.val_vec.length = length;
4307 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4308 attr.dw_attr_val.v.val_vec.array = array;
4309 add_dwarf_attr (die, &attr);
4310 }
4311
4312 /* Add an 8-byte data attribute value to a DIE. */
4313
4314 static inline void
4315 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4316 unsigned char data8[8])
4317 {
4318 dw_attr_node attr;
4319
4320 attr.dw_attr = attr_kind;
4321 attr.dw_attr_val.val_class = dw_val_class_data8;
4322 attr.dw_attr_val.val_entry = NULL;
4323 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4324 add_dwarf_attr (die, &attr);
4325 }
4326
4327 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4328 dwarf_split_debug_info, address attributes in dies destined for the
4329 final executable have force_direct set to avoid using indexed
4330 references. */
4331
4332 static inline void
4333 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4334 bool force_direct)
4335 {
4336 dw_attr_node attr;
4337 char * lbl_id;
4338
4339 lbl_id = xstrdup (lbl_low);
4340 attr.dw_attr = DW_AT_low_pc;
4341 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4342 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4343 if (dwarf_split_debug_info && !force_direct)
4344 attr.dw_attr_val.val_entry
4345 = add_addr_table_entry (lbl_id, ate_kind_label);
4346 else
4347 attr.dw_attr_val.val_entry = NULL;
4348 add_dwarf_attr (die, &attr);
4349
4350 attr.dw_attr = DW_AT_high_pc;
4351 if (dwarf_version < 4)
4352 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4353 else
4354 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4355 lbl_id = xstrdup (lbl_high);
4356 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4357 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4358 && dwarf_split_debug_info && !force_direct)
4359 attr.dw_attr_val.val_entry
4360 = add_addr_table_entry (lbl_id, ate_kind_label);
4361 else
4362 attr.dw_attr_val.val_entry = NULL;
4363 add_dwarf_attr (die, &attr);
4364 }
4365
4366 /* Hash and equality functions for debug_str_hash. */
4367
4368 hashval_t
4369 indirect_string_hasher::hash (indirect_string_node *x)
4370 {
4371 return htab_hash_string (x->str);
4372 }
4373
4374 bool
4375 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4376 {
4377 return strcmp (x1->str, x2) == 0;
4378 }
4379
4380 /* Add STR to the given string hash table. */
4381
4382 static struct indirect_string_node *
4383 find_AT_string_in_table (const char *str,
4384 hash_table<indirect_string_hasher> *table)
4385 {
4386 struct indirect_string_node *node;
4387
4388 indirect_string_node **slot
4389 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4390 if (*slot == NULL)
4391 {
4392 node = ggc_cleared_alloc<indirect_string_node> ();
4393 node->str = ggc_strdup (str);
4394 *slot = node;
4395 }
4396 else
4397 node = *slot;
4398
4399 node->refcount++;
4400 return node;
4401 }
4402
4403 /* Add STR to the indirect string hash table. */
4404
4405 static struct indirect_string_node *
4406 find_AT_string (const char *str)
4407 {
4408 if (! debug_str_hash)
4409 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4410
4411 return find_AT_string_in_table (str, debug_str_hash);
4412 }
4413
4414 /* Add a string attribute value to a DIE. */
4415
4416 static inline void
4417 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4418 {
4419 dw_attr_node attr;
4420 struct indirect_string_node *node;
4421
4422 node = find_AT_string (str);
4423
4424 attr.dw_attr = attr_kind;
4425 attr.dw_attr_val.val_class = dw_val_class_str;
4426 attr.dw_attr_val.val_entry = NULL;
4427 attr.dw_attr_val.v.val_str = node;
4428 add_dwarf_attr (die, &attr);
4429 }
4430
4431 static inline const char *
4432 AT_string (dw_attr_node *a)
4433 {
4434 gcc_assert (a && AT_class (a) == dw_val_class_str);
4435 return a->dw_attr_val.v.val_str->str;
4436 }
4437
4438 /* Call this function directly to bypass AT_string_form's logic to put
4439 the string inline in the die. */
4440
4441 static void
4442 set_indirect_string (struct indirect_string_node *node)
4443 {
4444 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4445 /* Already indirect is a no op. */
4446 if (node->form == DW_FORM_strp
4447 || node->form == DW_FORM_line_strp
4448 || node->form == DW_FORM_GNU_str_index)
4449 {
4450 gcc_assert (node->label);
4451 return;
4452 }
4453 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4454 ++dw2_string_counter;
4455 node->label = xstrdup (label);
4456
4457 if (!dwarf_split_debug_info)
4458 {
4459 node->form = DW_FORM_strp;
4460 node->index = NOT_INDEXED;
4461 }
4462 else
4463 {
4464 node->form = DW_FORM_GNU_str_index;
4465 node->index = NO_INDEX_ASSIGNED;
4466 }
4467 }
4468
4469 /* A helper function for dwarf2out_finish, called to reset indirect
4470 string decisions done for early LTO dwarf output before fat object
4471 dwarf output. */
4472
4473 int
4474 reset_indirect_string (indirect_string_node **h, void *)
4475 {
4476 struct indirect_string_node *node = *h;
4477 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4478 {
4479 free (node->label);
4480 node->label = NULL;
4481 node->form = (dwarf_form) 0;
4482 node->index = 0;
4483 }
4484 return 1;
4485 }
4486
4487 /* Find out whether a string should be output inline in DIE
4488 or out-of-line in .debug_str section. */
4489
4490 static enum dwarf_form
4491 find_string_form (struct indirect_string_node *node)
4492 {
4493 unsigned int len;
4494
4495 if (node->form)
4496 return node->form;
4497
4498 len = strlen (node->str) + 1;
4499
4500 /* If the string is shorter or equal to the size of the reference, it is
4501 always better to put it inline. */
4502 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4503 return node->form = DW_FORM_string;
4504
4505 /* If we cannot expect the linker to merge strings in .debug_str
4506 section, only put it into .debug_str if it is worth even in this
4507 single module. */
4508 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4509 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4510 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4511 return node->form = DW_FORM_string;
4512
4513 set_indirect_string (node);
4514
4515 return node->form;
4516 }
4517
4518 /* Find out whether the string referenced from the attribute should be
4519 output inline in DIE or out-of-line in .debug_str section. */
4520
4521 static enum dwarf_form
4522 AT_string_form (dw_attr_node *a)
4523 {
4524 gcc_assert (a && AT_class (a) == dw_val_class_str);
4525 return find_string_form (a->dw_attr_val.v.val_str);
4526 }
4527
4528 /* Add a DIE reference attribute value to a DIE. */
4529
4530 static inline void
4531 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4532 {
4533 dw_attr_node attr;
4534 gcc_checking_assert (targ_die != NULL);
4535
4536 /* With LTO we can end up trying to reference something we didn't create
4537 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4538 if (targ_die == NULL)
4539 return;
4540
4541 attr.dw_attr = attr_kind;
4542 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4543 attr.dw_attr_val.val_entry = NULL;
4544 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4545 attr.dw_attr_val.v.val_die_ref.external = 0;
4546 add_dwarf_attr (die, &attr);
4547 }
4548
4549 /* Change DIE reference REF to point to NEW_DIE instead. */
4550
4551 static inline void
4552 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4553 {
4554 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4555 ref->dw_attr_val.v.val_die_ref.die = new_die;
4556 ref->dw_attr_val.v.val_die_ref.external = 0;
4557 }
4558
4559 /* Add an AT_specification attribute to a DIE, and also make the back
4560 pointer from the specification to the definition. */
4561
4562 static inline void
4563 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4564 {
4565 add_AT_die_ref (die, DW_AT_specification, targ_die);
4566 gcc_assert (!targ_die->die_definition);
4567 targ_die->die_definition = die;
4568 }
4569
4570 static inline dw_die_ref
4571 AT_ref (dw_attr_node *a)
4572 {
4573 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4574 return a->dw_attr_val.v.val_die_ref.die;
4575 }
4576
4577 static inline int
4578 AT_ref_external (dw_attr_node *a)
4579 {
4580 if (a && AT_class (a) == dw_val_class_die_ref)
4581 return a->dw_attr_val.v.val_die_ref.external;
4582
4583 return 0;
4584 }
4585
4586 static inline void
4587 set_AT_ref_external (dw_attr_node *a, int i)
4588 {
4589 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4590 a->dw_attr_val.v.val_die_ref.external = i;
4591 }
4592
4593 /* Add an FDE reference attribute value to a DIE. */
4594
4595 static inline void
4596 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4597 {
4598 dw_attr_node attr;
4599
4600 attr.dw_attr = attr_kind;
4601 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4602 attr.dw_attr_val.val_entry = NULL;
4603 attr.dw_attr_val.v.val_fde_index = targ_fde;
4604 add_dwarf_attr (die, &attr);
4605 }
4606
4607 /* Add a location description attribute value to a DIE. */
4608
4609 static inline void
4610 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4611 {
4612 dw_attr_node attr;
4613
4614 attr.dw_attr = attr_kind;
4615 attr.dw_attr_val.val_class = dw_val_class_loc;
4616 attr.dw_attr_val.val_entry = NULL;
4617 attr.dw_attr_val.v.val_loc = loc;
4618 add_dwarf_attr (die, &attr);
4619 }
4620
4621 static inline dw_loc_descr_ref
4622 AT_loc (dw_attr_node *a)
4623 {
4624 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4625 return a->dw_attr_val.v.val_loc;
4626 }
4627
4628 static inline void
4629 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4630 {
4631 dw_attr_node attr;
4632
4633 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4634 return;
4635
4636 attr.dw_attr = attr_kind;
4637 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4638 attr.dw_attr_val.val_entry = NULL;
4639 attr.dw_attr_val.v.val_loc_list = loc_list;
4640 add_dwarf_attr (die, &attr);
4641 have_location_lists = true;
4642 }
4643
4644 static inline dw_loc_list_ref
4645 AT_loc_list (dw_attr_node *a)
4646 {
4647 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4648 return a->dw_attr_val.v.val_loc_list;
4649 }
4650
4651 static inline dw_loc_list_ref *
4652 AT_loc_list_ptr (dw_attr_node *a)
4653 {
4654 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4655 return &a->dw_attr_val.v.val_loc_list;
4656 }
4657
4658 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4659 {
4660 static hashval_t hash (addr_table_entry *);
4661 static bool equal (addr_table_entry *, addr_table_entry *);
4662 };
4663
4664 /* Table of entries into the .debug_addr section. */
4665
4666 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4667
4668 /* Hash an address_table_entry. */
4669
4670 hashval_t
4671 addr_hasher::hash (addr_table_entry *a)
4672 {
4673 inchash::hash hstate;
4674 switch (a->kind)
4675 {
4676 case ate_kind_rtx:
4677 hstate.add_int (0);
4678 break;
4679 case ate_kind_rtx_dtprel:
4680 hstate.add_int (1);
4681 break;
4682 case ate_kind_label:
4683 return htab_hash_string (a->addr.label);
4684 default:
4685 gcc_unreachable ();
4686 }
4687 inchash::add_rtx (a->addr.rtl, hstate);
4688 return hstate.end ();
4689 }
4690
4691 /* Determine equality for two address_table_entries. */
4692
4693 bool
4694 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4695 {
4696 if (a1->kind != a2->kind)
4697 return 0;
4698 switch (a1->kind)
4699 {
4700 case ate_kind_rtx:
4701 case ate_kind_rtx_dtprel:
4702 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4703 case ate_kind_label:
4704 return strcmp (a1->addr.label, a2->addr.label) == 0;
4705 default:
4706 gcc_unreachable ();
4707 }
4708 }
4709
4710 /* Initialize an addr_table_entry. */
4711
4712 void
4713 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4714 {
4715 e->kind = kind;
4716 switch (kind)
4717 {
4718 case ate_kind_rtx:
4719 case ate_kind_rtx_dtprel:
4720 e->addr.rtl = (rtx) addr;
4721 break;
4722 case ate_kind_label:
4723 e->addr.label = (char *) addr;
4724 break;
4725 }
4726 e->refcount = 0;
4727 e->index = NO_INDEX_ASSIGNED;
4728 }
4729
4730 /* Add attr to the address table entry to the table. Defer setting an
4731 index until output time. */
4732
4733 static addr_table_entry *
4734 add_addr_table_entry (void *addr, enum ate_kind kind)
4735 {
4736 addr_table_entry *node;
4737 addr_table_entry finder;
4738
4739 gcc_assert (dwarf_split_debug_info);
4740 if (! addr_index_table)
4741 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4742 init_addr_table_entry (&finder, kind, addr);
4743 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4744
4745 if (*slot == HTAB_EMPTY_ENTRY)
4746 {
4747 node = ggc_cleared_alloc<addr_table_entry> ();
4748 init_addr_table_entry (node, kind, addr);
4749 *slot = node;
4750 }
4751 else
4752 node = *slot;
4753
4754 node->refcount++;
4755 return node;
4756 }
4757
4758 /* Remove an entry from the addr table by decrementing its refcount.
4759 Strictly, decrementing the refcount would be enough, but the
4760 assertion that the entry is actually in the table has found
4761 bugs. */
4762
4763 static void
4764 remove_addr_table_entry (addr_table_entry *entry)
4765 {
4766 gcc_assert (dwarf_split_debug_info && addr_index_table);
4767 /* After an index is assigned, the table is frozen. */
4768 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4769 entry->refcount--;
4770 }
4771
4772 /* Given a location list, remove all addresses it refers to from the
4773 address_table. */
4774
4775 static void
4776 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4777 {
4778 for (; descr; descr = descr->dw_loc_next)
4779 if (descr->dw_loc_oprnd1.val_entry != NULL)
4780 {
4781 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4782 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4783 }
4784 }
4785
4786 /* A helper function for dwarf2out_finish called through
4787 htab_traverse. Assign an addr_table_entry its index. All entries
4788 must be collected into the table when this function is called,
4789 because the indexing code relies on htab_traverse to traverse nodes
4790 in the same order for each run. */
4791
4792 int
4793 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4794 {
4795 addr_table_entry *node = *h;
4796
4797 /* Don't index unreferenced nodes. */
4798 if (node->refcount == 0)
4799 return 1;
4800
4801 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4802 node->index = *index;
4803 *index += 1;
4804
4805 return 1;
4806 }
4807
4808 /* Add an address constant attribute value to a DIE. When using
4809 dwarf_split_debug_info, address attributes in dies destined for the
4810 final executable should be direct references--setting the parameter
4811 force_direct ensures this behavior. */
4812
4813 static inline void
4814 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4815 bool force_direct)
4816 {
4817 dw_attr_node attr;
4818
4819 attr.dw_attr = attr_kind;
4820 attr.dw_attr_val.val_class = dw_val_class_addr;
4821 attr.dw_attr_val.v.val_addr = addr;
4822 if (dwarf_split_debug_info && !force_direct)
4823 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4824 else
4825 attr.dw_attr_val.val_entry = NULL;
4826 add_dwarf_attr (die, &attr);
4827 }
4828
4829 /* Get the RTX from to an address DIE attribute. */
4830
4831 static inline rtx
4832 AT_addr (dw_attr_node *a)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4835 return a->dw_attr_val.v.val_addr;
4836 }
4837
4838 /* Add a file attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4842 struct dwarf_file_data *fd)
4843 {
4844 dw_attr_node attr;
4845
4846 attr.dw_attr = attr_kind;
4847 attr.dw_attr_val.val_class = dw_val_class_file;
4848 attr.dw_attr_val.val_entry = NULL;
4849 attr.dw_attr_val.v.val_file = fd;
4850 add_dwarf_attr (die, &attr);
4851 }
4852
4853 /* Get the dwarf_file_data from a file DIE attribute. */
4854
4855 static inline struct dwarf_file_data *
4856 AT_file (dw_attr_node *a)
4857 {
4858 gcc_assert (a && (AT_class (a) == dw_val_class_file
4859 || AT_class (a) == dw_val_class_file_implicit));
4860 return a->dw_attr_val.v.val_file;
4861 }
4862
4863 /* Add a vms delta attribute value to a DIE. */
4864
4865 static inline void
4866 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4867 const char *lbl1, const char *lbl2)
4868 {
4869 dw_attr_node attr;
4870
4871 attr.dw_attr = attr_kind;
4872 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4873 attr.dw_attr_val.val_entry = NULL;
4874 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4875 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4876 add_dwarf_attr (die, &attr);
4877 }
4878
4879 /* Add a label identifier attribute value to a DIE. */
4880
4881 static inline void
4882 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4883 const char *lbl_id)
4884 {
4885 dw_attr_node attr;
4886
4887 attr.dw_attr = attr_kind;
4888 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4889 attr.dw_attr_val.val_entry = NULL;
4890 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4891 if (dwarf_split_debug_info)
4892 attr.dw_attr_val.val_entry
4893 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4894 ate_kind_label);
4895 add_dwarf_attr (die, &attr);
4896 }
4897
4898 /* Add a section offset attribute value to a DIE, an offset into the
4899 debug_line section. */
4900
4901 static inline void
4902 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4903 const char *label)
4904 {
4905 dw_attr_node attr;
4906
4907 attr.dw_attr = attr_kind;
4908 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4909 attr.dw_attr_val.val_entry = NULL;
4910 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4911 add_dwarf_attr (die, &attr);
4912 }
4913
4914 /* Add a section offset attribute value to a DIE, an offset into the
4915 debug_loclists section. */
4916
4917 static inline void
4918 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4919 const char *label)
4920 {
4921 dw_attr_node attr;
4922
4923 attr.dw_attr = attr_kind;
4924 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
4925 attr.dw_attr_val.val_entry = NULL;
4926 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4927 add_dwarf_attr (die, &attr);
4928 }
4929
4930 /* Add a section offset attribute value to a DIE, an offset into the
4931 debug_macinfo section. */
4932
4933 static inline void
4934 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4935 const char *label)
4936 {
4937 dw_attr_node attr;
4938
4939 attr.dw_attr = attr_kind;
4940 attr.dw_attr_val.val_class = dw_val_class_macptr;
4941 attr.dw_attr_val.val_entry = NULL;
4942 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4943 add_dwarf_attr (die, &attr);
4944 }
4945
4946 /* Add an offset attribute value to a DIE. */
4947
4948 static inline void
4949 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4950 unsigned HOST_WIDE_INT offset)
4951 {
4952 dw_attr_node attr;
4953
4954 attr.dw_attr = attr_kind;
4955 attr.dw_attr_val.val_class = dw_val_class_offset;
4956 attr.dw_attr_val.val_entry = NULL;
4957 attr.dw_attr_val.v.val_offset = offset;
4958 add_dwarf_attr (die, &attr);
4959 }
4960
4961 /* Add a range_list attribute value to a DIE. When using
4962 dwarf_split_debug_info, address attributes in dies destined for the
4963 final executable should be direct references--setting the parameter
4964 force_direct ensures this behavior. */
4965
4966 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4967 #define RELOCATED_OFFSET (NULL)
4968
4969 static void
4970 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4971 long unsigned int offset, bool force_direct)
4972 {
4973 dw_attr_node attr;
4974
4975 attr.dw_attr = attr_kind;
4976 attr.dw_attr_val.val_class = dw_val_class_range_list;
4977 /* For the range_list attribute, use val_entry to store whether the
4978 offset should follow split-debug-info or normal semantics. This
4979 value is read in output_range_list_offset. */
4980 if (dwarf_split_debug_info && !force_direct)
4981 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4982 else
4983 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4984 attr.dw_attr_val.v.val_offset = offset;
4985 add_dwarf_attr (die, &attr);
4986 }
4987
4988 /* Return the start label of a delta attribute. */
4989
4990 static inline const char *
4991 AT_vms_delta1 (dw_attr_node *a)
4992 {
4993 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4994 return a->dw_attr_val.v.val_vms_delta.lbl1;
4995 }
4996
4997 /* Return the end label of a delta attribute. */
4998
4999 static inline const char *
5000 AT_vms_delta2 (dw_attr_node *a)
5001 {
5002 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5003 return a->dw_attr_val.v.val_vms_delta.lbl2;
5004 }
5005
5006 static inline const char *
5007 AT_lbl (dw_attr_node *a)
5008 {
5009 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5010 || AT_class (a) == dw_val_class_lineptr
5011 || AT_class (a) == dw_val_class_macptr
5012 || AT_class (a) == dw_val_class_loclistsptr
5013 || AT_class (a) == dw_val_class_high_pc));
5014 return a->dw_attr_val.v.val_lbl_id;
5015 }
5016
5017 /* Get the attribute of type attr_kind. */
5018
5019 static dw_attr_node *
5020 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5021 {
5022 dw_attr_node *a;
5023 unsigned ix;
5024 dw_die_ref spec = NULL;
5025
5026 if (! die)
5027 return NULL;
5028
5029 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5030 if (a->dw_attr == attr_kind)
5031 return a;
5032 else if (a->dw_attr == DW_AT_specification
5033 || a->dw_attr == DW_AT_abstract_origin)
5034 spec = AT_ref (a);
5035
5036 if (spec)
5037 return get_AT (spec, attr_kind);
5038
5039 return NULL;
5040 }
5041
5042 /* Returns the parent of the declaration of DIE. */
5043
5044 static dw_die_ref
5045 get_die_parent (dw_die_ref die)
5046 {
5047 dw_die_ref t;
5048
5049 if (!die)
5050 return NULL;
5051
5052 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5053 || (t = get_AT_ref (die, DW_AT_specification)))
5054 die = t;
5055
5056 return die->die_parent;
5057 }
5058
5059 /* Return the "low pc" attribute value, typically associated with a subprogram
5060 DIE. Return null if the "low pc" attribute is either not present, or if it
5061 cannot be represented as an assembler label identifier. */
5062
5063 static inline const char *
5064 get_AT_low_pc (dw_die_ref die)
5065 {
5066 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5067
5068 return a ? AT_lbl (a) : NULL;
5069 }
5070
5071 /* Return the "high pc" attribute value, typically associated with a subprogram
5072 DIE. Return null if the "high pc" attribute is either not present, or if it
5073 cannot be represented as an assembler label identifier. */
5074
5075 static inline const char *
5076 get_AT_hi_pc (dw_die_ref die)
5077 {
5078 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5079
5080 return a ? AT_lbl (a) : NULL;
5081 }
5082
5083 /* Return the value of the string attribute designated by ATTR_KIND, or
5084 NULL if it is not present. */
5085
5086 static inline const char *
5087 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5088 {
5089 dw_attr_node *a = get_AT (die, attr_kind);
5090
5091 return a ? AT_string (a) : NULL;
5092 }
5093
5094 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5095 if it is not present. */
5096
5097 static inline int
5098 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5099 {
5100 dw_attr_node *a = get_AT (die, attr_kind);
5101
5102 return a ? AT_flag (a) : 0;
5103 }
5104
5105 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5106 if it is not present. */
5107
5108 static inline unsigned
5109 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5110 {
5111 dw_attr_node *a = get_AT (die, attr_kind);
5112
5113 return a ? AT_unsigned (a) : 0;
5114 }
5115
5116 static inline dw_die_ref
5117 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5118 {
5119 dw_attr_node *a = get_AT (die, attr_kind);
5120
5121 return a ? AT_ref (a) : NULL;
5122 }
5123
5124 static inline struct dwarf_file_data *
5125 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5126 {
5127 dw_attr_node *a = get_AT (die, attr_kind);
5128
5129 return a ? AT_file (a) : NULL;
5130 }
5131
5132 /* Return TRUE if the language is C++. */
5133
5134 static inline bool
5135 is_cxx (void)
5136 {
5137 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5138
5139 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5140 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5141 }
5142
5143 /* Return TRUE if DECL was created by the C++ frontend. */
5144
5145 static bool
5146 is_cxx (const_tree decl)
5147 {
5148 if (in_lto_p)
5149 {
5150 const_tree context = get_ultimate_context (decl);
5151 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5152 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5153 }
5154 return is_cxx ();
5155 }
5156
5157 /* Return TRUE if the language is Fortran. */
5158
5159 static inline bool
5160 is_fortran (void)
5161 {
5162 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5163
5164 return (lang == DW_LANG_Fortran77
5165 || lang == DW_LANG_Fortran90
5166 || lang == DW_LANG_Fortran95
5167 || lang == DW_LANG_Fortran03
5168 || lang == DW_LANG_Fortran08);
5169 }
5170
5171 static inline bool
5172 is_fortran (const_tree decl)
5173 {
5174 if (in_lto_p)
5175 {
5176 const_tree context = get_ultimate_context (decl);
5177 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5178 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5179 "GNU Fortran", 11) == 0
5180 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5181 "GNU F77") == 0);
5182 }
5183 return is_fortran ();
5184 }
5185
5186 /* Return TRUE if the language is Ada. */
5187
5188 static inline bool
5189 is_ada (void)
5190 {
5191 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5192
5193 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5194 }
5195
5196 /* Remove the specified attribute if present. Return TRUE if removal
5197 was successful. */
5198
5199 static bool
5200 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5201 {
5202 dw_attr_node *a;
5203 unsigned ix;
5204
5205 if (! die)
5206 return false;
5207
5208 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5209 if (a->dw_attr == attr_kind)
5210 {
5211 if (AT_class (a) == dw_val_class_str)
5212 if (a->dw_attr_val.v.val_str->refcount)
5213 a->dw_attr_val.v.val_str->refcount--;
5214
5215 /* vec::ordered_remove should help reduce the number of abbrevs
5216 that are needed. */
5217 die->die_attr->ordered_remove (ix);
5218 return true;
5219 }
5220 return false;
5221 }
5222
5223 /* Remove CHILD from its parent. PREV must have the property that
5224 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5225
5226 static void
5227 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5228 {
5229 gcc_assert (child->die_parent == prev->die_parent);
5230 gcc_assert (prev->die_sib == child);
5231 if (prev == child)
5232 {
5233 gcc_assert (child->die_parent->die_child == child);
5234 prev = NULL;
5235 }
5236 else
5237 prev->die_sib = child->die_sib;
5238 if (child->die_parent->die_child == child)
5239 child->die_parent->die_child = prev;
5240 child->die_sib = NULL;
5241 }
5242
5243 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5244 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5245
5246 static void
5247 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5248 {
5249 dw_die_ref parent = old_child->die_parent;
5250
5251 gcc_assert (parent == prev->die_parent);
5252 gcc_assert (prev->die_sib == old_child);
5253
5254 new_child->die_parent = parent;
5255 if (prev == old_child)
5256 {
5257 gcc_assert (parent->die_child == old_child);
5258 new_child->die_sib = new_child;
5259 }
5260 else
5261 {
5262 prev->die_sib = new_child;
5263 new_child->die_sib = old_child->die_sib;
5264 }
5265 if (old_child->die_parent->die_child == old_child)
5266 old_child->die_parent->die_child = new_child;
5267 old_child->die_sib = NULL;
5268 }
5269
5270 /* Move all children from OLD_PARENT to NEW_PARENT. */
5271
5272 static void
5273 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5274 {
5275 dw_die_ref c;
5276 new_parent->die_child = old_parent->die_child;
5277 old_parent->die_child = NULL;
5278 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5279 }
5280
5281 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5282 matches TAG. */
5283
5284 static void
5285 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5286 {
5287 dw_die_ref c;
5288
5289 c = die->die_child;
5290 if (c) do {
5291 dw_die_ref prev = c;
5292 c = c->die_sib;
5293 while (c->die_tag == tag)
5294 {
5295 remove_child_with_prev (c, prev);
5296 c->die_parent = NULL;
5297 /* Might have removed every child. */
5298 if (die->die_child == NULL)
5299 return;
5300 c = prev->die_sib;
5301 }
5302 } while (c != die->die_child);
5303 }
5304
5305 /* Add a CHILD_DIE as the last child of DIE. */
5306
5307 static void
5308 add_child_die (dw_die_ref die, dw_die_ref child_die)
5309 {
5310 /* FIXME this should probably be an assert. */
5311 if (! die || ! child_die)
5312 return;
5313 gcc_assert (die != child_die);
5314
5315 child_die->die_parent = die;
5316 if (die->die_child)
5317 {
5318 child_die->die_sib = die->die_child->die_sib;
5319 die->die_child->die_sib = child_die;
5320 }
5321 else
5322 child_die->die_sib = child_die;
5323 die->die_child = child_die;
5324 }
5325
5326 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5327
5328 static void
5329 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5330 dw_die_ref after_die)
5331 {
5332 gcc_assert (die
5333 && child_die
5334 && after_die
5335 && die->die_child
5336 && die != child_die);
5337
5338 child_die->die_parent = die;
5339 child_die->die_sib = after_die->die_sib;
5340 after_die->die_sib = child_die;
5341 if (die->die_child == after_die)
5342 die->die_child = child_die;
5343 }
5344
5345 /* Unassociate CHILD from its parent, and make its parent be
5346 NEW_PARENT. */
5347
5348 static void
5349 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5350 {
5351 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5352 if (p->die_sib == child)
5353 {
5354 remove_child_with_prev (child, p);
5355 break;
5356 }
5357 add_child_die (new_parent, child);
5358 }
5359
5360 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5361 is the specification, to the end of PARENT's list of children.
5362 This is done by removing and re-adding it. */
5363
5364 static void
5365 splice_child_die (dw_die_ref parent, dw_die_ref child)
5366 {
5367 /* We want the declaration DIE from inside the class, not the
5368 specification DIE at toplevel. */
5369 if (child->die_parent != parent)
5370 {
5371 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5372
5373 if (tmp)
5374 child = tmp;
5375 }
5376
5377 gcc_assert (child->die_parent == parent
5378 || (child->die_parent
5379 == get_AT_ref (parent, DW_AT_specification)));
5380
5381 reparent_child (child, parent);
5382 }
5383
5384 /* Create and return a new die with TAG_VALUE as tag. */
5385
5386 static inline dw_die_ref
5387 new_die_raw (enum dwarf_tag tag_value)
5388 {
5389 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5390 die->die_tag = tag_value;
5391 return die;
5392 }
5393
5394 /* Create and return a new die with a parent of PARENT_DIE. If
5395 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5396 associated tree T must be supplied to determine parenthood
5397 later. */
5398
5399 static inline dw_die_ref
5400 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5401 {
5402 dw_die_ref die = new_die_raw (tag_value);
5403
5404 if (parent_die != NULL)
5405 add_child_die (parent_die, die);
5406 else
5407 {
5408 limbo_die_node *limbo_node;
5409
5410 /* No DIEs created after early dwarf should end up in limbo,
5411 because the limbo list should not persist past LTO
5412 streaming. */
5413 if (tag_value != DW_TAG_compile_unit
5414 /* These are allowed because they're generated while
5415 breaking out COMDAT units late. */
5416 && tag_value != DW_TAG_type_unit
5417 && tag_value != DW_TAG_skeleton_unit
5418 && !early_dwarf
5419 /* Allow nested functions to live in limbo because they will
5420 only temporarily live there, as decls_for_scope will fix
5421 them up. */
5422 && (TREE_CODE (t) != FUNCTION_DECL
5423 || !decl_function_context (t))
5424 /* Same as nested functions above but for types. Types that
5425 are local to a function will be fixed in
5426 decls_for_scope. */
5427 && (!RECORD_OR_UNION_TYPE_P (t)
5428 || !TYPE_CONTEXT (t)
5429 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5430 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5431 especially in the ltrans stage, but once we implement LTO
5432 dwarf streaming, we should remove this exception. */
5433 && !in_lto_p)
5434 {
5435 fprintf (stderr, "symbol ended up in limbo too late:");
5436 debug_generic_stmt (t);
5437 gcc_unreachable ();
5438 }
5439
5440 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5441 limbo_node->die = die;
5442 limbo_node->created_for = t;
5443 limbo_node->next = limbo_die_list;
5444 limbo_die_list = limbo_node;
5445 }
5446
5447 return die;
5448 }
5449
5450 /* Return the DIE associated with the given type specifier. */
5451
5452 static inline dw_die_ref
5453 lookup_type_die (tree type)
5454 {
5455 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5456 if (die && die->removed)
5457 {
5458 TYPE_SYMTAB_DIE (type) = NULL;
5459 return NULL;
5460 }
5461 return die;
5462 }
5463
5464 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5465 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5466 anonymous type instead the one of the naming typedef. */
5467
5468 static inline dw_die_ref
5469 strip_naming_typedef (tree type, dw_die_ref type_die)
5470 {
5471 if (type
5472 && TREE_CODE (type) == RECORD_TYPE
5473 && type_die
5474 && type_die->die_tag == DW_TAG_typedef
5475 && is_naming_typedef_decl (TYPE_NAME (type)))
5476 type_die = get_AT_ref (type_die, DW_AT_type);
5477 return type_die;
5478 }
5479
5480 /* Like lookup_type_die, but if type is an anonymous type named by a
5481 typedef[1], return the DIE of the anonymous type instead the one of
5482 the naming typedef. This is because in gen_typedef_die, we did
5483 equate the anonymous struct named by the typedef with the DIE of
5484 the naming typedef. So by default, lookup_type_die on an anonymous
5485 struct yields the DIE of the naming typedef.
5486
5487 [1]: Read the comment of is_naming_typedef_decl to learn about what
5488 a naming typedef is. */
5489
5490 static inline dw_die_ref
5491 lookup_type_die_strip_naming_typedef (tree type)
5492 {
5493 dw_die_ref die = lookup_type_die (type);
5494 return strip_naming_typedef (type, die);
5495 }
5496
5497 /* Equate a DIE to a given type specifier. */
5498
5499 static inline void
5500 equate_type_number_to_die (tree type, dw_die_ref type_die)
5501 {
5502 TYPE_SYMTAB_DIE (type) = type_die;
5503 }
5504
5505 /* Returns a hash value for X (which really is a die_struct). */
5506
5507 inline hashval_t
5508 decl_die_hasher::hash (die_node *x)
5509 {
5510 return (hashval_t) x->decl_id;
5511 }
5512
5513 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5514
5515 inline bool
5516 decl_die_hasher::equal (die_node *x, tree y)
5517 {
5518 return (x->decl_id == DECL_UID (y));
5519 }
5520
5521 /* Return the DIE associated with a given declaration. */
5522
5523 static inline dw_die_ref
5524 lookup_decl_die (tree decl)
5525 {
5526 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5527 NO_INSERT);
5528 if (!die)
5529 return NULL;
5530 if ((*die)->removed)
5531 {
5532 decl_die_table->clear_slot (die);
5533 return NULL;
5534 }
5535 return *die;
5536 }
5537
5538
5539 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5540 style reference. Return true if we found one refering to a DIE for
5541 DECL, otherwise return false. */
5542
5543 static bool
5544 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5545 unsigned HOST_WIDE_INT *off)
5546 {
5547 dw_die_ref die;
5548
5549 if (flag_wpa && !decl_die_table)
5550 return false;
5551
5552 if (TREE_CODE (decl) == BLOCK)
5553 die = BLOCK_DIE (decl);
5554 else
5555 die = lookup_decl_die (decl);
5556 if (!die)
5557 return false;
5558
5559 /* During WPA stage we currently use DIEs to store the
5560 decl <-> label + offset map. That's quite inefficient but it
5561 works for now. */
5562 if (flag_wpa)
5563 {
5564 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5565 if (!ref)
5566 {
5567 gcc_assert (die == comp_unit_die ());
5568 return false;
5569 }
5570 *off = ref->die_offset;
5571 *sym = ref->die_id.die_symbol;
5572 return true;
5573 }
5574
5575 /* Similar to get_ref_die_offset_label, but using the "correct"
5576 label. */
5577 *off = die->die_offset;
5578 while (die->die_parent)
5579 die = die->die_parent;
5580 /* For the containing CU DIE we compute a die_symbol in
5581 compute_comp_unit_symbol. */
5582 gcc_assert (die->die_tag == DW_TAG_compile_unit
5583 && die->die_id.die_symbol != NULL);
5584 *sym = die->die_id.die_symbol;
5585 return true;
5586 }
5587
5588 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5589
5590 static void
5591 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5592 const char *symbol, HOST_WIDE_INT offset)
5593 {
5594 /* Create a fake DIE that contains the reference. Don't use
5595 new_die because we don't want to end up in the limbo list. */
5596 dw_die_ref ref = new_die_raw (die->die_tag);
5597 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5598 ref->die_offset = offset;
5599 ref->with_offset = 1;
5600 add_AT_die_ref (die, attr_kind, ref);
5601 }
5602
5603 /* Create a DIE for DECL if required and add a reference to a DIE
5604 at SYMBOL + OFFSET which contains attributes dumped early. */
5605
5606 static void
5607 dwarf2out_register_external_die (tree decl, const char *sym,
5608 unsigned HOST_WIDE_INT off)
5609 {
5610 if (debug_info_level == DINFO_LEVEL_NONE)
5611 return;
5612
5613 if (flag_wpa && !decl_die_table)
5614 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5615
5616 dw_die_ref die
5617 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5618 gcc_assert (!die);
5619
5620 tree ctx;
5621 dw_die_ref parent = NULL;
5622 /* Need to lookup a DIE for the decls context - the containing
5623 function or translation unit. */
5624 if (TREE_CODE (decl) == BLOCK)
5625 {
5626 ctx = BLOCK_SUPERCONTEXT (decl);
5627 /* ??? We do not output DIEs for all scopes thus skip as
5628 many DIEs as needed. */
5629 while (TREE_CODE (ctx) == BLOCK
5630 && !BLOCK_DIE (ctx))
5631 ctx = BLOCK_SUPERCONTEXT (ctx);
5632 }
5633 else
5634 ctx = DECL_CONTEXT (decl);
5635 while (ctx && TYPE_P (ctx))
5636 ctx = TYPE_CONTEXT (ctx);
5637 if (ctx)
5638 {
5639 if (TREE_CODE (ctx) == BLOCK)
5640 parent = BLOCK_DIE (ctx);
5641 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5642 /* Keep the 1:1 association during WPA. */
5643 && !flag_wpa)
5644 /* Otherwise all late annotations go to the main CU which
5645 imports the original CUs. */
5646 parent = comp_unit_die ();
5647 else if (TREE_CODE (ctx) == FUNCTION_DECL
5648 && TREE_CODE (decl) != PARM_DECL
5649 && TREE_CODE (decl) != BLOCK)
5650 /* Leave function local entities parent determination to when
5651 we process scope vars. */
5652 ;
5653 else
5654 parent = lookup_decl_die (ctx);
5655 }
5656 else
5657 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5658 Handle this case gracefully by globalizing stuff. */
5659 parent = comp_unit_die ();
5660 /* Create a DIE "stub". */
5661 switch (TREE_CODE (decl))
5662 {
5663 case TRANSLATION_UNIT_DECL:
5664 if (! flag_wpa)
5665 {
5666 die = comp_unit_die ();
5667 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5668 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5669 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5670 to create a DIE for the original CUs. */
5671 return;
5672 }
5673 /* Keep the 1:1 association during WPA. */
5674 die = new_die (DW_TAG_compile_unit, NULL, decl);
5675 break;
5676 case NAMESPACE_DECL:
5677 if (is_fortran (decl))
5678 die = new_die (DW_TAG_module, parent, decl);
5679 else
5680 die = new_die (DW_TAG_namespace, parent, decl);
5681 break;
5682 case FUNCTION_DECL:
5683 die = new_die (DW_TAG_subprogram, parent, decl);
5684 break;
5685 case VAR_DECL:
5686 die = new_die (DW_TAG_variable, parent, decl);
5687 break;
5688 case RESULT_DECL:
5689 die = new_die (DW_TAG_variable, parent, decl);
5690 break;
5691 case PARM_DECL:
5692 die = new_die (DW_TAG_formal_parameter, parent, decl);
5693 break;
5694 case CONST_DECL:
5695 die = new_die (DW_TAG_constant, parent, decl);
5696 break;
5697 case LABEL_DECL:
5698 die = new_die (DW_TAG_label, parent, decl);
5699 break;
5700 case BLOCK:
5701 die = new_die (DW_TAG_lexical_block, parent, decl);
5702 break;
5703 default:
5704 gcc_unreachable ();
5705 }
5706 if (TREE_CODE (decl) == BLOCK)
5707 BLOCK_DIE (decl) = die;
5708 else
5709 equate_decl_number_to_die (decl, die);
5710
5711 /* Add a reference to the DIE providing early debug at $sym + off. */
5712 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5713 }
5714
5715 /* Returns a hash value for X (which really is a var_loc_list). */
5716
5717 inline hashval_t
5718 decl_loc_hasher::hash (var_loc_list *x)
5719 {
5720 return (hashval_t) x->decl_id;
5721 }
5722
5723 /* Return nonzero if decl_id of var_loc_list X is the same as
5724 UID of decl *Y. */
5725
5726 inline bool
5727 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5728 {
5729 return (x->decl_id == DECL_UID (y));
5730 }
5731
5732 /* Return the var_loc list associated with a given declaration. */
5733
5734 static inline var_loc_list *
5735 lookup_decl_loc (const_tree decl)
5736 {
5737 if (!decl_loc_table)
5738 return NULL;
5739 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5740 }
5741
5742 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5743
5744 inline hashval_t
5745 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5746 {
5747 return (hashval_t) x->decl_id;
5748 }
5749
5750 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5751 UID of decl *Y. */
5752
5753 inline bool
5754 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5755 {
5756 return (x->decl_id == DECL_UID (y));
5757 }
5758
5759 /* Equate a DIE to a particular declaration. */
5760
5761 static void
5762 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5763 {
5764 unsigned int decl_id = DECL_UID (decl);
5765
5766 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5767 decl_die->decl_id = decl_id;
5768 }
5769
5770 /* Return how many bits covers PIECE EXPR_LIST. */
5771
5772 static HOST_WIDE_INT
5773 decl_piece_bitsize (rtx piece)
5774 {
5775 int ret = (int) GET_MODE (piece);
5776 if (ret)
5777 return ret;
5778 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5779 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5780 return INTVAL (XEXP (XEXP (piece, 0), 0));
5781 }
5782
5783 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5784
5785 static rtx *
5786 decl_piece_varloc_ptr (rtx piece)
5787 {
5788 if ((int) GET_MODE (piece))
5789 return &XEXP (piece, 0);
5790 else
5791 return &XEXP (XEXP (piece, 0), 1);
5792 }
5793
5794 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5795 Next is the chain of following piece nodes. */
5796
5797 static rtx_expr_list *
5798 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5799 {
5800 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5801 return alloc_EXPR_LIST (bitsize, loc_note, next);
5802 else
5803 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5804 GEN_INT (bitsize),
5805 loc_note), next);
5806 }
5807
5808 /* Return rtx that should be stored into loc field for
5809 LOC_NOTE and BITPOS/BITSIZE. */
5810
5811 static rtx
5812 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5813 HOST_WIDE_INT bitsize)
5814 {
5815 if (bitsize != -1)
5816 {
5817 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5818 if (bitpos != 0)
5819 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5820 }
5821 return loc_note;
5822 }
5823
5824 /* This function either modifies location piece list *DEST in
5825 place (if SRC and INNER is NULL), or copies location piece list
5826 *SRC to *DEST while modifying it. Location BITPOS is modified
5827 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5828 not copied and if needed some padding around it is added.
5829 When modifying in place, DEST should point to EXPR_LIST where
5830 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5831 to the start of the whole list and INNER points to the EXPR_LIST
5832 where earlier pieces cover PIECE_BITPOS bits. */
5833
5834 static void
5835 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5836 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5837 HOST_WIDE_INT bitsize, rtx loc_note)
5838 {
5839 HOST_WIDE_INT diff;
5840 bool copy = inner != NULL;
5841
5842 if (copy)
5843 {
5844 /* First copy all nodes preceding the current bitpos. */
5845 while (src != inner)
5846 {
5847 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5848 decl_piece_bitsize (*src), NULL_RTX);
5849 dest = &XEXP (*dest, 1);
5850 src = &XEXP (*src, 1);
5851 }
5852 }
5853 /* Add padding if needed. */
5854 if (bitpos != piece_bitpos)
5855 {
5856 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5857 copy ? NULL_RTX : *dest);
5858 dest = &XEXP (*dest, 1);
5859 }
5860 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5861 {
5862 gcc_assert (!copy);
5863 /* A piece with correct bitpos and bitsize already exist,
5864 just update the location for it and return. */
5865 *decl_piece_varloc_ptr (*dest) = loc_note;
5866 return;
5867 }
5868 /* Add the piece that changed. */
5869 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5870 dest = &XEXP (*dest, 1);
5871 /* Skip over pieces that overlap it. */
5872 diff = bitpos - piece_bitpos + bitsize;
5873 if (!copy)
5874 src = dest;
5875 while (diff > 0 && *src)
5876 {
5877 rtx piece = *src;
5878 diff -= decl_piece_bitsize (piece);
5879 if (copy)
5880 src = &XEXP (piece, 1);
5881 else
5882 {
5883 *src = XEXP (piece, 1);
5884 free_EXPR_LIST_node (piece);
5885 }
5886 }
5887 /* Add padding if needed. */
5888 if (diff < 0 && *src)
5889 {
5890 if (!copy)
5891 dest = src;
5892 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5893 dest = &XEXP (*dest, 1);
5894 }
5895 if (!copy)
5896 return;
5897 /* Finally copy all nodes following it. */
5898 while (*src)
5899 {
5900 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5901 decl_piece_bitsize (*src), NULL_RTX);
5902 dest = &XEXP (*dest, 1);
5903 src = &XEXP (*src, 1);
5904 }
5905 }
5906
5907 /* Add a variable location node to the linked list for DECL. */
5908
5909 static struct var_loc_node *
5910 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5911 {
5912 unsigned int decl_id;
5913 var_loc_list *temp;
5914 struct var_loc_node *loc = NULL;
5915 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5916
5917 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5918 {
5919 tree realdecl = DECL_DEBUG_EXPR (decl);
5920 if (handled_component_p (realdecl)
5921 || (TREE_CODE (realdecl) == MEM_REF
5922 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5923 {
5924 bool reverse;
5925 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
5926 &bitsize, &reverse);
5927 if (!innerdecl
5928 || !DECL_P (innerdecl)
5929 || DECL_IGNORED_P (innerdecl)
5930 || TREE_STATIC (innerdecl)
5931 || bitsize == 0
5932 || bitpos + bitsize > 256)
5933 return NULL;
5934 decl = innerdecl;
5935 }
5936 }
5937
5938 decl_id = DECL_UID (decl);
5939 var_loc_list **slot
5940 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5941 if (*slot == NULL)
5942 {
5943 temp = ggc_cleared_alloc<var_loc_list> ();
5944 temp->decl_id = decl_id;
5945 *slot = temp;
5946 }
5947 else
5948 temp = *slot;
5949
5950 /* For PARM_DECLs try to keep around the original incoming value,
5951 even if that means we'll emit a zero-range .debug_loc entry. */
5952 if (temp->last
5953 && temp->first == temp->last
5954 && TREE_CODE (decl) == PARM_DECL
5955 && NOTE_P (temp->first->loc)
5956 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5957 && DECL_INCOMING_RTL (decl)
5958 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5959 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5960 == GET_CODE (DECL_INCOMING_RTL (decl))
5961 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5962 && (bitsize != -1
5963 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5964 NOTE_VAR_LOCATION_LOC (loc_note))
5965 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5966 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5967 {
5968 loc = ggc_cleared_alloc<var_loc_node> ();
5969 temp->first->next = loc;
5970 temp->last = loc;
5971 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5972 }
5973 else if (temp->last)
5974 {
5975 struct var_loc_node *last = temp->last, *unused = NULL;
5976 rtx *piece_loc = NULL, last_loc_note;
5977 HOST_WIDE_INT piece_bitpos = 0;
5978 if (last->next)
5979 {
5980 last = last->next;
5981 gcc_assert (last->next == NULL);
5982 }
5983 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5984 {
5985 piece_loc = &last->loc;
5986 do
5987 {
5988 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5989 if (piece_bitpos + cur_bitsize > bitpos)
5990 break;
5991 piece_bitpos += cur_bitsize;
5992 piece_loc = &XEXP (*piece_loc, 1);
5993 }
5994 while (*piece_loc);
5995 }
5996 /* TEMP->LAST here is either pointer to the last but one or
5997 last element in the chained list, LAST is pointer to the
5998 last element. */
5999 if (label && strcmp (last->label, label) == 0)
6000 {
6001 /* For SRA optimized variables if there weren't any real
6002 insns since last note, just modify the last node. */
6003 if (piece_loc != NULL)
6004 {
6005 adjust_piece_list (piece_loc, NULL, NULL,
6006 bitpos, piece_bitpos, bitsize, loc_note);
6007 return NULL;
6008 }
6009 /* If the last note doesn't cover any instructions, remove it. */
6010 if (temp->last != last)
6011 {
6012 temp->last->next = NULL;
6013 unused = last;
6014 last = temp->last;
6015 gcc_assert (strcmp (last->label, label) != 0);
6016 }
6017 else
6018 {
6019 gcc_assert (temp->first == temp->last
6020 || (temp->first->next == temp->last
6021 && TREE_CODE (decl) == PARM_DECL));
6022 memset (temp->last, '\0', sizeof (*temp->last));
6023 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6024 return temp->last;
6025 }
6026 }
6027 if (bitsize == -1 && NOTE_P (last->loc))
6028 last_loc_note = last->loc;
6029 else if (piece_loc != NULL
6030 && *piece_loc != NULL_RTX
6031 && piece_bitpos == bitpos
6032 && decl_piece_bitsize (*piece_loc) == bitsize)
6033 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6034 else
6035 last_loc_note = NULL_RTX;
6036 /* If the current location is the same as the end of the list,
6037 and either both or neither of the locations is uninitialized,
6038 we have nothing to do. */
6039 if (last_loc_note == NULL_RTX
6040 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6041 NOTE_VAR_LOCATION_LOC (loc_note)))
6042 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6043 != NOTE_VAR_LOCATION_STATUS (loc_note))
6044 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6045 == VAR_INIT_STATUS_UNINITIALIZED)
6046 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6047 == VAR_INIT_STATUS_UNINITIALIZED))))
6048 {
6049 /* Add LOC to the end of list and update LAST. If the last
6050 element of the list has been removed above, reuse its
6051 memory for the new node, otherwise allocate a new one. */
6052 if (unused)
6053 {
6054 loc = unused;
6055 memset (loc, '\0', sizeof (*loc));
6056 }
6057 else
6058 loc = ggc_cleared_alloc<var_loc_node> ();
6059 if (bitsize == -1 || piece_loc == NULL)
6060 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6061 else
6062 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6063 bitpos, piece_bitpos, bitsize, loc_note);
6064 last->next = loc;
6065 /* Ensure TEMP->LAST will point either to the new last but one
6066 element of the chain, or to the last element in it. */
6067 if (last != temp->last)
6068 temp->last = last;
6069 }
6070 else if (unused)
6071 ggc_free (unused);
6072 }
6073 else
6074 {
6075 loc = ggc_cleared_alloc<var_loc_node> ();
6076 temp->first = loc;
6077 temp->last = loc;
6078 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6079 }
6080 return loc;
6081 }
6082 \f
6083 /* Keep track of the number of spaces used to indent the
6084 output of the debugging routines that print the structure of
6085 the DIE internal representation. */
6086 static int print_indent;
6087
6088 /* Indent the line the number of spaces given by print_indent. */
6089
6090 static inline void
6091 print_spaces (FILE *outfile)
6092 {
6093 fprintf (outfile, "%*s", print_indent, "");
6094 }
6095
6096 /* Print a type signature in hex. */
6097
6098 static inline void
6099 print_signature (FILE *outfile, char *sig)
6100 {
6101 int i;
6102
6103 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6104 fprintf (outfile, "%02x", sig[i] & 0xff);
6105 }
6106
6107 static inline void
6108 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6109 {
6110 if (discr_value->pos)
6111 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6112 else
6113 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6114 }
6115
6116 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6117
6118 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6119 RECURSE, output location descriptor operations. */
6120
6121 static void
6122 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6123 {
6124 switch (val->val_class)
6125 {
6126 case dw_val_class_addr:
6127 fprintf (outfile, "address");
6128 break;
6129 case dw_val_class_offset:
6130 fprintf (outfile, "offset");
6131 break;
6132 case dw_val_class_loc:
6133 fprintf (outfile, "location descriptor");
6134 if (val->v.val_loc == NULL)
6135 fprintf (outfile, " -> <null>\n");
6136 else if (recurse)
6137 {
6138 fprintf (outfile, ":\n");
6139 print_indent += 4;
6140 print_loc_descr (val->v.val_loc, outfile);
6141 print_indent -= 4;
6142 }
6143 else
6144 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6145 break;
6146 case dw_val_class_loc_list:
6147 fprintf (outfile, "location list -> label:%s",
6148 val->v.val_loc_list->ll_symbol);
6149 break;
6150 case dw_val_class_range_list:
6151 fprintf (outfile, "range list");
6152 break;
6153 case dw_val_class_const:
6154 case dw_val_class_const_implicit:
6155 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6156 break;
6157 case dw_val_class_unsigned_const:
6158 case dw_val_class_unsigned_const_implicit:
6159 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6160 break;
6161 case dw_val_class_const_double:
6162 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6163 HOST_WIDE_INT_PRINT_UNSIGNED")",
6164 val->v.val_double.high,
6165 val->v.val_double.low);
6166 break;
6167 case dw_val_class_wide_int:
6168 {
6169 int i = val->v.val_wide->get_len ();
6170 fprintf (outfile, "constant (");
6171 gcc_assert (i > 0);
6172 if (val->v.val_wide->elt (i - 1) == 0)
6173 fprintf (outfile, "0x");
6174 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6175 val->v.val_wide->elt (--i));
6176 while (--i >= 0)
6177 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6178 val->v.val_wide->elt (i));
6179 fprintf (outfile, ")");
6180 break;
6181 }
6182 case dw_val_class_vec:
6183 fprintf (outfile, "floating-point or vector constant");
6184 break;
6185 case dw_val_class_flag:
6186 fprintf (outfile, "%u", val->v.val_flag);
6187 break;
6188 case dw_val_class_die_ref:
6189 if (val->v.val_die_ref.die != NULL)
6190 {
6191 dw_die_ref die = val->v.val_die_ref.die;
6192
6193 if (die->comdat_type_p)
6194 {
6195 fprintf (outfile, "die -> signature: ");
6196 print_signature (outfile,
6197 die->die_id.die_type_node->signature);
6198 }
6199 else if (die->die_id.die_symbol)
6200 {
6201 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6202 if (die->with_offset)
6203 fprintf (outfile, " + %ld", die->die_offset);
6204 }
6205 else
6206 fprintf (outfile, "die -> %ld", die->die_offset);
6207 fprintf (outfile, " (%p)", (void *) die);
6208 }
6209 else
6210 fprintf (outfile, "die -> <null>");
6211 break;
6212 case dw_val_class_vms_delta:
6213 fprintf (outfile, "delta: @slotcount(%s-%s)",
6214 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6215 break;
6216 case dw_val_class_lbl_id:
6217 case dw_val_class_lineptr:
6218 case dw_val_class_macptr:
6219 case dw_val_class_loclistsptr:
6220 case dw_val_class_high_pc:
6221 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6222 break;
6223 case dw_val_class_str:
6224 if (val->v.val_str->str != NULL)
6225 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6226 else
6227 fprintf (outfile, "<null>");
6228 break;
6229 case dw_val_class_file:
6230 case dw_val_class_file_implicit:
6231 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6232 val->v.val_file->emitted_number);
6233 break;
6234 case dw_val_class_data8:
6235 {
6236 int i;
6237
6238 for (i = 0; i < 8; i++)
6239 fprintf (outfile, "%02x", val->v.val_data8[i]);
6240 break;
6241 }
6242 case dw_val_class_discr_value:
6243 print_discr_value (outfile, &val->v.val_discr_value);
6244 break;
6245 case dw_val_class_discr_list:
6246 for (dw_discr_list_ref node = val->v.val_discr_list;
6247 node != NULL;
6248 node = node->dw_discr_next)
6249 {
6250 if (node->dw_discr_range)
6251 {
6252 fprintf (outfile, " .. ");
6253 print_discr_value (outfile, &node->dw_discr_lower_bound);
6254 print_discr_value (outfile, &node->dw_discr_upper_bound);
6255 }
6256 else
6257 print_discr_value (outfile, &node->dw_discr_lower_bound);
6258
6259 if (node->dw_discr_next != NULL)
6260 fprintf (outfile, " | ");
6261 }
6262 default:
6263 break;
6264 }
6265 }
6266
6267 /* Likewise, for a DIE attribute. */
6268
6269 static void
6270 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6271 {
6272 print_dw_val (&a->dw_attr_val, recurse, outfile);
6273 }
6274
6275
6276 /* Print the list of operands in the LOC location description to OUTFILE. This
6277 routine is a debugging aid only. */
6278
6279 static void
6280 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6281 {
6282 dw_loc_descr_ref l = loc;
6283
6284 if (loc == NULL)
6285 {
6286 print_spaces (outfile);
6287 fprintf (outfile, "<null>\n");
6288 return;
6289 }
6290
6291 for (l = loc; l != NULL; l = l->dw_loc_next)
6292 {
6293 print_spaces (outfile);
6294 fprintf (outfile, "(%p) %s",
6295 (void *) l,
6296 dwarf_stack_op_name (l->dw_loc_opc));
6297 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6298 {
6299 fprintf (outfile, " ");
6300 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6301 }
6302 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6303 {
6304 fprintf (outfile, ", ");
6305 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6306 }
6307 fprintf (outfile, "\n");
6308 }
6309 }
6310
6311 /* Print the information associated with a given DIE, and its children.
6312 This routine is a debugging aid only. */
6313
6314 static void
6315 print_die (dw_die_ref die, FILE *outfile)
6316 {
6317 dw_attr_node *a;
6318 dw_die_ref c;
6319 unsigned ix;
6320
6321 print_spaces (outfile);
6322 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6323 die->die_offset, dwarf_tag_name (die->die_tag),
6324 (void*) die);
6325 print_spaces (outfile);
6326 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6327 fprintf (outfile, " offset: %ld", die->die_offset);
6328 fprintf (outfile, " mark: %d\n", die->die_mark);
6329
6330 if (die->comdat_type_p)
6331 {
6332 print_spaces (outfile);
6333 fprintf (outfile, " signature: ");
6334 print_signature (outfile, die->die_id.die_type_node->signature);
6335 fprintf (outfile, "\n");
6336 }
6337
6338 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6339 {
6340 print_spaces (outfile);
6341 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6342
6343 print_attribute (a, true, outfile);
6344 fprintf (outfile, "\n");
6345 }
6346
6347 if (die->die_child != NULL)
6348 {
6349 print_indent += 4;
6350 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6351 print_indent -= 4;
6352 }
6353 if (print_indent == 0)
6354 fprintf (outfile, "\n");
6355 }
6356
6357 /* Print the list of operations in the LOC location description. */
6358
6359 DEBUG_FUNCTION void
6360 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6361 {
6362 print_loc_descr (loc, stderr);
6363 }
6364
6365 /* Print the information collected for a given DIE. */
6366
6367 DEBUG_FUNCTION void
6368 debug_dwarf_die (dw_die_ref die)
6369 {
6370 print_die (die, stderr);
6371 }
6372
6373 DEBUG_FUNCTION void
6374 debug (die_struct &ref)
6375 {
6376 print_die (&ref, stderr);
6377 }
6378
6379 DEBUG_FUNCTION void
6380 debug (die_struct *ptr)
6381 {
6382 if (ptr)
6383 debug (*ptr);
6384 else
6385 fprintf (stderr, "<nil>\n");
6386 }
6387
6388
6389 /* Print all DWARF information collected for the compilation unit.
6390 This routine is a debugging aid only. */
6391
6392 DEBUG_FUNCTION void
6393 debug_dwarf (void)
6394 {
6395 print_indent = 0;
6396 print_die (comp_unit_die (), stderr);
6397 }
6398
6399 /* Verify the DIE tree structure. */
6400
6401 DEBUG_FUNCTION void
6402 verify_die (dw_die_ref die)
6403 {
6404 gcc_assert (!die->die_mark);
6405 if (die->die_parent == NULL
6406 && die->die_sib == NULL)
6407 return;
6408 /* Verify the die_sib list is cyclic. */
6409 dw_die_ref x = die;
6410 do
6411 {
6412 x->die_mark = 1;
6413 x = x->die_sib;
6414 }
6415 while (x && !x->die_mark);
6416 gcc_assert (x == die);
6417 x = die;
6418 do
6419 {
6420 /* Verify all dies have the same parent. */
6421 gcc_assert (x->die_parent == die->die_parent);
6422 if (x->die_child)
6423 {
6424 /* Verify the child has the proper parent and recurse. */
6425 gcc_assert (x->die_child->die_parent == x);
6426 verify_die (x->die_child);
6427 }
6428 x->die_mark = 0;
6429 x = x->die_sib;
6430 }
6431 while (x && x->die_mark);
6432 }
6433
6434 /* Sanity checks on DIEs. */
6435
6436 static void
6437 check_die (dw_die_ref die)
6438 {
6439 unsigned ix;
6440 dw_attr_node *a;
6441 bool inline_found = false;
6442 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6443 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6444 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6445 {
6446 switch (a->dw_attr)
6447 {
6448 case DW_AT_inline:
6449 if (a->dw_attr_val.v.val_unsigned)
6450 inline_found = true;
6451 break;
6452 case DW_AT_location:
6453 ++n_location;
6454 break;
6455 case DW_AT_low_pc:
6456 ++n_low_pc;
6457 break;
6458 case DW_AT_high_pc:
6459 ++n_high_pc;
6460 break;
6461 case DW_AT_artificial:
6462 ++n_artificial;
6463 break;
6464 case DW_AT_decl_column:
6465 ++n_decl_column;
6466 break;
6467 case DW_AT_decl_line:
6468 ++n_decl_line;
6469 break;
6470 case DW_AT_decl_file:
6471 ++n_decl_file;
6472 break;
6473 default:
6474 break;
6475 }
6476 }
6477 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6478 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6479 {
6480 fprintf (stderr, "Duplicate attributes in DIE:\n");
6481 debug_dwarf_die (die);
6482 gcc_unreachable ();
6483 }
6484 if (inline_found)
6485 {
6486 /* A debugging information entry that is a member of an abstract
6487 instance tree [that has DW_AT_inline] should not contain any
6488 attributes which describe aspects of the subroutine which vary
6489 between distinct inlined expansions or distinct out-of-line
6490 expansions. */
6491 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6492 gcc_assert (a->dw_attr != DW_AT_low_pc
6493 && a->dw_attr != DW_AT_high_pc
6494 && a->dw_attr != DW_AT_location
6495 && a->dw_attr != DW_AT_frame_base
6496 && a->dw_attr != DW_AT_call_all_calls
6497 && a->dw_attr != DW_AT_GNU_all_call_sites);
6498 }
6499 }
6500 \f
6501 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6502 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6503 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6504
6505 /* Calculate the checksum of a location expression. */
6506
6507 static inline void
6508 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6509 {
6510 int tem;
6511 inchash::hash hstate;
6512 hashval_t hash;
6513
6514 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6515 CHECKSUM (tem);
6516 hash_loc_operands (loc, hstate);
6517 hash = hstate.end();
6518 CHECKSUM (hash);
6519 }
6520
6521 /* Calculate the checksum of an attribute. */
6522
6523 static void
6524 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6525 {
6526 dw_loc_descr_ref loc;
6527 rtx r;
6528
6529 CHECKSUM (at->dw_attr);
6530
6531 /* We don't care that this was compiled with a different compiler
6532 snapshot; if the output is the same, that's what matters. */
6533 if (at->dw_attr == DW_AT_producer)
6534 return;
6535
6536 switch (AT_class (at))
6537 {
6538 case dw_val_class_const:
6539 case dw_val_class_const_implicit:
6540 CHECKSUM (at->dw_attr_val.v.val_int);
6541 break;
6542 case dw_val_class_unsigned_const:
6543 case dw_val_class_unsigned_const_implicit:
6544 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6545 break;
6546 case dw_val_class_const_double:
6547 CHECKSUM (at->dw_attr_val.v.val_double);
6548 break;
6549 case dw_val_class_wide_int:
6550 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6551 get_full_len (*at->dw_attr_val.v.val_wide)
6552 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6553 break;
6554 case dw_val_class_vec:
6555 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6556 (at->dw_attr_val.v.val_vec.length
6557 * at->dw_attr_val.v.val_vec.elt_size));
6558 break;
6559 case dw_val_class_flag:
6560 CHECKSUM (at->dw_attr_val.v.val_flag);
6561 break;
6562 case dw_val_class_str:
6563 CHECKSUM_STRING (AT_string (at));
6564 break;
6565
6566 case dw_val_class_addr:
6567 r = AT_addr (at);
6568 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6569 CHECKSUM_STRING (XSTR (r, 0));
6570 break;
6571
6572 case dw_val_class_offset:
6573 CHECKSUM (at->dw_attr_val.v.val_offset);
6574 break;
6575
6576 case dw_val_class_loc:
6577 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6578 loc_checksum (loc, ctx);
6579 break;
6580
6581 case dw_val_class_die_ref:
6582 die_checksum (AT_ref (at), ctx, mark);
6583 break;
6584
6585 case dw_val_class_fde_ref:
6586 case dw_val_class_vms_delta:
6587 case dw_val_class_lbl_id:
6588 case dw_val_class_lineptr:
6589 case dw_val_class_macptr:
6590 case dw_val_class_loclistsptr:
6591 case dw_val_class_high_pc:
6592 break;
6593
6594 case dw_val_class_file:
6595 case dw_val_class_file_implicit:
6596 CHECKSUM_STRING (AT_file (at)->filename);
6597 break;
6598
6599 case dw_val_class_data8:
6600 CHECKSUM (at->dw_attr_val.v.val_data8);
6601 break;
6602
6603 default:
6604 break;
6605 }
6606 }
6607
6608 /* Calculate the checksum of a DIE. */
6609
6610 static void
6611 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6612 {
6613 dw_die_ref c;
6614 dw_attr_node *a;
6615 unsigned ix;
6616
6617 /* To avoid infinite recursion. */
6618 if (die->die_mark)
6619 {
6620 CHECKSUM (die->die_mark);
6621 return;
6622 }
6623 die->die_mark = ++(*mark);
6624
6625 CHECKSUM (die->die_tag);
6626
6627 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6628 attr_checksum (a, ctx, mark);
6629
6630 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6631 }
6632
6633 #undef CHECKSUM
6634 #undef CHECKSUM_BLOCK
6635 #undef CHECKSUM_STRING
6636
6637 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6638 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6639 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6640 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6641 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6642 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6643 #define CHECKSUM_ATTR(FOO) \
6644 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6645
6646 /* Calculate the checksum of a number in signed LEB128 format. */
6647
6648 static void
6649 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6650 {
6651 unsigned char byte;
6652 bool more;
6653
6654 while (1)
6655 {
6656 byte = (value & 0x7f);
6657 value >>= 7;
6658 more = !((value == 0 && (byte & 0x40) == 0)
6659 || (value == -1 && (byte & 0x40) != 0));
6660 if (more)
6661 byte |= 0x80;
6662 CHECKSUM (byte);
6663 if (!more)
6664 break;
6665 }
6666 }
6667
6668 /* Calculate the checksum of a number in unsigned LEB128 format. */
6669
6670 static void
6671 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6672 {
6673 while (1)
6674 {
6675 unsigned char byte = (value & 0x7f);
6676 value >>= 7;
6677 if (value != 0)
6678 /* More bytes to follow. */
6679 byte |= 0x80;
6680 CHECKSUM (byte);
6681 if (value == 0)
6682 break;
6683 }
6684 }
6685
6686 /* Checksum the context of the DIE. This adds the names of any
6687 surrounding namespaces or structures to the checksum. */
6688
6689 static void
6690 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6691 {
6692 const char *name;
6693 dw_die_ref spec;
6694 int tag = die->die_tag;
6695
6696 if (tag != DW_TAG_namespace
6697 && tag != DW_TAG_structure_type
6698 && tag != DW_TAG_class_type)
6699 return;
6700
6701 name = get_AT_string (die, DW_AT_name);
6702
6703 spec = get_AT_ref (die, DW_AT_specification);
6704 if (spec != NULL)
6705 die = spec;
6706
6707 if (die->die_parent != NULL)
6708 checksum_die_context (die->die_parent, ctx);
6709
6710 CHECKSUM_ULEB128 ('C');
6711 CHECKSUM_ULEB128 (tag);
6712 if (name != NULL)
6713 CHECKSUM_STRING (name);
6714 }
6715
6716 /* Calculate the checksum of a location expression. */
6717
6718 static inline void
6719 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6720 {
6721 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6722 were emitted as a DW_FORM_sdata instead of a location expression. */
6723 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6724 {
6725 CHECKSUM_ULEB128 (DW_FORM_sdata);
6726 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6727 return;
6728 }
6729
6730 /* Otherwise, just checksum the raw location expression. */
6731 while (loc != NULL)
6732 {
6733 inchash::hash hstate;
6734 hashval_t hash;
6735
6736 CHECKSUM_ULEB128 (loc->dtprel);
6737 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6738 hash_loc_operands (loc, hstate);
6739 hash = hstate.end ();
6740 CHECKSUM (hash);
6741 loc = loc->dw_loc_next;
6742 }
6743 }
6744
6745 /* Calculate the checksum of an attribute. */
6746
6747 static void
6748 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6749 struct md5_ctx *ctx, int *mark)
6750 {
6751 dw_loc_descr_ref loc;
6752 rtx r;
6753
6754 if (AT_class (at) == dw_val_class_die_ref)
6755 {
6756 dw_die_ref target_die = AT_ref (at);
6757
6758 /* For pointer and reference types, we checksum only the (qualified)
6759 name of the target type (if there is a name). For friend entries,
6760 we checksum only the (qualified) name of the target type or function.
6761 This allows the checksum to remain the same whether the target type
6762 is complete or not. */
6763 if ((at->dw_attr == DW_AT_type
6764 && (tag == DW_TAG_pointer_type
6765 || tag == DW_TAG_reference_type
6766 || tag == DW_TAG_rvalue_reference_type
6767 || tag == DW_TAG_ptr_to_member_type))
6768 || (at->dw_attr == DW_AT_friend
6769 && tag == DW_TAG_friend))
6770 {
6771 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6772
6773 if (name_attr != NULL)
6774 {
6775 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6776
6777 if (decl == NULL)
6778 decl = target_die;
6779 CHECKSUM_ULEB128 ('N');
6780 CHECKSUM_ULEB128 (at->dw_attr);
6781 if (decl->die_parent != NULL)
6782 checksum_die_context (decl->die_parent, ctx);
6783 CHECKSUM_ULEB128 ('E');
6784 CHECKSUM_STRING (AT_string (name_attr));
6785 return;
6786 }
6787 }
6788
6789 /* For all other references to another DIE, we check to see if the
6790 target DIE has already been visited. If it has, we emit a
6791 backward reference; if not, we descend recursively. */
6792 if (target_die->die_mark > 0)
6793 {
6794 CHECKSUM_ULEB128 ('R');
6795 CHECKSUM_ULEB128 (at->dw_attr);
6796 CHECKSUM_ULEB128 (target_die->die_mark);
6797 }
6798 else
6799 {
6800 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6801
6802 if (decl == NULL)
6803 decl = target_die;
6804 target_die->die_mark = ++(*mark);
6805 CHECKSUM_ULEB128 ('T');
6806 CHECKSUM_ULEB128 (at->dw_attr);
6807 if (decl->die_parent != NULL)
6808 checksum_die_context (decl->die_parent, ctx);
6809 die_checksum_ordered (target_die, ctx, mark);
6810 }
6811 return;
6812 }
6813
6814 CHECKSUM_ULEB128 ('A');
6815 CHECKSUM_ULEB128 (at->dw_attr);
6816
6817 switch (AT_class (at))
6818 {
6819 case dw_val_class_const:
6820 case dw_val_class_const_implicit:
6821 CHECKSUM_ULEB128 (DW_FORM_sdata);
6822 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6823 break;
6824
6825 case dw_val_class_unsigned_const:
6826 case dw_val_class_unsigned_const_implicit:
6827 CHECKSUM_ULEB128 (DW_FORM_sdata);
6828 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6829 break;
6830
6831 case dw_val_class_const_double:
6832 CHECKSUM_ULEB128 (DW_FORM_block);
6833 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6834 CHECKSUM (at->dw_attr_val.v.val_double);
6835 break;
6836
6837 case dw_val_class_wide_int:
6838 CHECKSUM_ULEB128 (DW_FORM_block);
6839 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6840 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6841 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6842 get_full_len (*at->dw_attr_val.v.val_wide)
6843 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6844 break;
6845
6846 case dw_val_class_vec:
6847 CHECKSUM_ULEB128 (DW_FORM_block);
6848 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6849 * at->dw_attr_val.v.val_vec.elt_size);
6850 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6851 (at->dw_attr_val.v.val_vec.length
6852 * at->dw_attr_val.v.val_vec.elt_size));
6853 break;
6854
6855 case dw_val_class_flag:
6856 CHECKSUM_ULEB128 (DW_FORM_flag);
6857 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6858 break;
6859
6860 case dw_val_class_str:
6861 CHECKSUM_ULEB128 (DW_FORM_string);
6862 CHECKSUM_STRING (AT_string (at));
6863 break;
6864
6865 case dw_val_class_addr:
6866 r = AT_addr (at);
6867 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6868 CHECKSUM_ULEB128 (DW_FORM_string);
6869 CHECKSUM_STRING (XSTR (r, 0));
6870 break;
6871
6872 case dw_val_class_offset:
6873 CHECKSUM_ULEB128 (DW_FORM_sdata);
6874 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6875 break;
6876
6877 case dw_val_class_loc:
6878 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6879 loc_checksum_ordered (loc, ctx);
6880 break;
6881
6882 case dw_val_class_fde_ref:
6883 case dw_val_class_lbl_id:
6884 case dw_val_class_lineptr:
6885 case dw_val_class_macptr:
6886 case dw_val_class_loclistsptr:
6887 case dw_val_class_high_pc:
6888 break;
6889
6890 case dw_val_class_file:
6891 case dw_val_class_file_implicit:
6892 CHECKSUM_ULEB128 (DW_FORM_string);
6893 CHECKSUM_STRING (AT_file (at)->filename);
6894 break;
6895
6896 case dw_val_class_data8:
6897 CHECKSUM (at->dw_attr_val.v.val_data8);
6898 break;
6899
6900 default:
6901 break;
6902 }
6903 }
6904
6905 struct checksum_attributes
6906 {
6907 dw_attr_node *at_name;
6908 dw_attr_node *at_type;
6909 dw_attr_node *at_friend;
6910 dw_attr_node *at_accessibility;
6911 dw_attr_node *at_address_class;
6912 dw_attr_node *at_alignment;
6913 dw_attr_node *at_allocated;
6914 dw_attr_node *at_artificial;
6915 dw_attr_node *at_associated;
6916 dw_attr_node *at_binary_scale;
6917 dw_attr_node *at_bit_offset;
6918 dw_attr_node *at_bit_size;
6919 dw_attr_node *at_bit_stride;
6920 dw_attr_node *at_byte_size;
6921 dw_attr_node *at_byte_stride;
6922 dw_attr_node *at_const_value;
6923 dw_attr_node *at_containing_type;
6924 dw_attr_node *at_count;
6925 dw_attr_node *at_data_location;
6926 dw_attr_node *at_data_member_location;
6927 dw_attr_node *at_decimal_scale;
6928 dw_attr_node *at_decimal_sign;
6929 dw_attr_node *at_default_value;
6930 dw_attr_node *at_digit_count;
6931 dw_attr_node *at_discr;
6932 dw_attr_node *at_discr_list;
6933 dw_attr_node *at_discr_value;
6934 dw_attr_node *at_encoding;
6935 dw_attr_node *at_endianity;
6936 dw_attr_node *at_explicit;
6937 dw_attr_node *at_is_optional;
6938 dw_attr_node *at_location;
6939 dw_attr_node *at_lower_bound;
6940 dw_attr_node *at_mutable;
6941 dw_attr_node *at_ordering;
6942 dw_attr_node *at_picture_string;
6943 dw_attr_node *at_prototyped;
6944 dw_attr_node *at_small;
6945 dw_attr_node *at_segment;
6946 dw_attr_node *at_string_length;
6947 dw_attr_node *at_string_length_bit_size;
6948 dw_attr_node *at_string_length_byte_size;
6949 dw_attr_node *at_threads_scaled;
6950 dw_attr_node *at_upper_bound;
6951 dw_attr_node *at_use_location;
6952 dw_attr_node *at_use_UTF8;
6953 dw_attr_node *at_variable_parameter;
6954 dw_attr_node *at_virtuality;
6955 dw_attr_node *at_visibility;
6956 dw_attr_node *at_vtable_elem_location;
6957 };
6958
6959 /* Collect the attributes that we will want to use for the checksum. */
6960
6961 static void
6962 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6963 {
6964 dw_attr_node *a;
6965 unsigned ix;
6966
6967 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6968 {
6969 switch (a->dw_attr)
6970 {
6971 case DW_AT_name:
6972 attrs->at_name = a;
6973 break;
6974 case DW_AT_type:
6975 attrs->at_type = a;
6976 break;
6977 case DW_AT_friend:
6978 attrs->at_friend = a;
6979 break;
6980 case DW_AT_accessibility:
6981 attrs->at_accessibility = a;
6982 break;
6983 case DW_AT_address_class:
6984 attrs->at_address_class = a;
6985 break;
6986 case DW_AT_alignment:
6987 attrs->at_alignment = a;
6988 break;
6989 case DW_AT_allocated:
6990 attrs->at_allocated = a;
6991 break;
6992 case DW_AT_artificial:
6993 attrs->at_artificial = a;
6994 break;
6995 case DW_AT_associated:
6996 attrs->at_associated = a;
6997 break;
6998 case DW_AT_binary_scale:
6999 attrs->at_binary_scale = a;
7000 break;
7001 case DW_AT_bit_offset:
7002 attrs->at_bit_offset = a;
7003 break;
7004 case DW_AT_bit_size:
7005 attrs->at_bit_size = a;
7006 break;
7007 case DW_AT_bit_stride:
7008 attrs->at_bit_stride = a;
7009 break;
7010 case DW_AT_byte_size:
7011 attrs->at_byte_size = a;
7012 break;
7013 case DW_AT_byte_stride:
7014 attrs->at_byte_stride = a;
7015 break;
7016 case DW_AT_const_value:
7017 attrs->at_const_value = a;
7018 break;
7019 case DW_AT_containing_type:
7020 attrs->at_containing_type = a;
7021 break;
7022 case DW_AT_count:
7023 attrs->at_count = a;
7024 break;
7025 case DW_AT_data_location:
7026 attrs->at_data_location = a;
7027 break;
7028 case DW_AT_data_member_location:
7029 attrs->at_data_member_location = a;
7030 break;
7031 case DW_AT_decimal_scale:
7032 attrs->at_decimal_scale = a;
7033 break;
7034 case DW_AT_decimal_sign:
7035 attrs->at_decimal_sign = a;
7036 break;
7037 case DW_AT_default_value:
7038 attrs->at_default_value = a;
7039 break;
7040 case DW_AT_digit_count:
7041 attrs->at_digit_count = a;
7042 break;
7043 case DW_AT_discr:
7044 attrs->at_discr = a;
7045 break;
7046 case DW_AT_discr_list:
7047 attrs->at_discr_list = a;
7048 break;
7049 case DW_AT_discr_value:
7050 attrs->at_discr_value = a;
7051 break;
7052 case DW_AT_encoding:
7053 attrs->at_encoding = a;
7054 break;
7055 case DW_AT_endianity:
7056 attrs->at_endianity = a;
7057 break;
7058 case DW_AT_explicit:
7059 attrs->at_explicit = a;
7060 break;
7061 case DW_AT_is_optional:
7062 attrs->at_is_optional = a;
7063 break;
7064 case DW_AT_location:
7065 attrs->at_location = a;
7066 break;
7067 case DW_AT_lower_bound:
7068 attrs->at_lower_bound = a;
7069 break;
7070 case DW_AT_mutable:
7071 attrs->at_mutable = a;
7072 break;
7073 case DW_AT_ordering:
7074 attrs->at_ordering = a;
7075 break;
7076 case DW_AT_picture_string:
7077 attrs->at_picture_string = a;
7078 break;
7079 case DW_AT_prototyped:
7080 attrs->at_prototyped = a;
7081 break;
7082 case DW_AT_small:
7083 attrs->at_small = a;
7084 break;
7085 case DW_AT_segment:
7086 attrs->at_segment = a;
7087 break;
7088 case DW_AT_string_length:
7089 attrs->at_string_length = a;
7090 break;
7091 case DW_AT_string_length_bit_size:
7092 attrs->at_string_length_bit_size = a;
7093 break;
7094 case DW_AT_string_length_byte_size:
7095 attrs->at_string_length_byte_size = a;
7096 break;
7097 case DW_AT_threads_scaled:
7098 attrs->at_threads_scaled = a;
7099 break;
7100 case DW_AT_upper_bound:
7101 attrs->at_upper_bound = a;
7102 break;
7103 case DW_AT_use_location:
7104 attrs->at_use_location = a;
7105 break;
7106 case DW_AT_use_UTF8:
7107 attrs->at_use_UTF8 = a;
7108 break;
7109 case DW_AT_variable_parameter:
7110 attrs->at_variable_parameter = a;
7111 break;
7112 case DW_AT_virtuality:
7113 attrs->at_virtuality = a;
7114 break;
7115 case DW_AT_visibility:
7116 attrs->at_visibility = a;
7117 break;
7118 case DW_AT_vtable_elem_location:
7119 attrs->at_vtable_elem_location = a;
7120 break;
7121 default:
7122 break;
7123 }
7124 }
7125 }
7126
7127 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7128
7129 static void
7130 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7131 {
7132 dw_die_ref c;
7133 dw_die_ref decl;
7134 struct checksum_attributes attrs;
7135
7136 CHECKSUM_ULEB128 ('D');
7137 CHECKSUM_ULEB128 (die->die_tag);
7138
7139 memset (&attrs, 0, sizeof (attrs));
7140
7141 decl = get_AT_ref (die, DW_AT_specification);
7142 if (decl != NULL)
7143 collect_checksum_attributes (&attrs, decl);
7144 collect_checksum_attributes (&attrs, die);
7145
7146 CHECKSUM_ATTR (attrs.at_name);
7147 CHECKSUM_ATTR (attrs.at_accessibility);
7148 CHECKSUM_ATTR (attrs.at_address_class);
7149 CHECKSUM_ATTR (attrs.at_allocated);
7150 CHECKSUM_ATTR (attrs.at_artificial);
7151 CHECKSUM_ATTR (attrs.at_associated);
7152 CHECKSUM_ATTR (attrs.at_binary_scale);
7153 CHECKSUM_ATTR (attrs.at_bit_offset);
7154 CHECKSUM_ATTR (attrs.at_bit_size);
7155 CHECKSUM_ATTR (attrs.at_bit_stride);
7156 CHECKSUM_ATTR (attrs.at_byte_size);
7157 CHECKSUM_ATTR (attrs.at_byte_stride);
7158 CHECKSUM_ATTR (attrs.at_const_value);
7159 CHECKSUM_ATTR (attrs.at_containing_type);
7160 CHECKSUM_ATTR (attrs.at_count);
7161 CHECKSUM_ATTR (attrs.at_data_location);
7162 CHECKSUM_ATTR (attrs.at_data_member_location);
7163 CHECKSUM_ATTR (attrs.at_decimal_scale);
7164 CHECKSUM_ATTR (attrs.at_decimal_sign);
7165 CHECKSUM_ATTR (attrs.at_default_value);
7166 CHECKSUM_ATTR (attrs.at_digit_count);
7167 CHECKSUM_ATTR (attrs.at_discr);
7168 CHECKSUM_ATTR (attrs.at_discr_list);
7169 CHECKSUM_ATTR (attrs.at_discr_value);
7170 CHECKSUM_ATTR (attrs.at_encoding);
7171 CHECKSUM_ATTR (attrs.at_endianity);
7172 CHECKSUM_ATTR (attrs.at_explicit);
7173 CHECKSUM_ATTR (attrs.at_is_optional);
7174 CHECKSUM_ATTR (attrs.at_location);
7175 CHECKSUM_ATTR (attrs.at_lower_bound);
7176 CHECKSUM_ATTR (attrs.at_mutable);
7177 CHECKSUM_ATTR (attrs.at_ordering);
7178 CHECKSUM_ATTR (attrs.at_picture_string);
7179 CHECKSUM_ATTR (attrs.at_prototyped);
7180 CHECKSUM_ATTR (attrs.at_small);
7181 CHECKSUM_ATTR (attrs.at_segment);
7182 CHECKSUM_ATTR (attrs.at_string_length);
7183 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7184 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7185 CHECKSUM_ATTR (attrs.at_threads_scaled);
7186 CHECKSUM_ATTR (attrs.at_upper_bound);
7187 CHECKSUM_ATTR (attrs.at_use_location);
7188 CHECKSUM_ATTR (attrs.at_use_UTF8);
7189 CHECKSUM_ATTR (attrs.at_variable_parameter);
7190 CHECKSUM_ATTR (attrs.at_virtuality);
7191 CHECKSUM_ATTR (attrs.at_visibility);
7192 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7193 CHECKSUM_ATTR (attrs.at_type);
7194 CHECKSUM_ATTR (attrs.at_friend);
7195 CHECKSUM_ATTR (attrs.at_alignment);
7196
7197 /* Checksum the child DIEs. */
7198 c = die->die_child;
7199 if (c) do {
7200 dw_attr_node *name_attr;
7201
7202 c = c->die_sib;
7203 name_attr = get_AT (c, DW_AT_name);
7204 if (is_template_instantiation (c))
7205 {
7206 /* Ignore instantiations of member type and function templates. */
7207 }
7208 else if (name_attr != NULL
7209 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7210 {
7211 /* Use a shallow checksum for named nested types and member
7212 functions. */
7213 CHECKSUM_ULEB128 ('S');
7214 CHECKSUM_ULEB128 (c->die_tag);
7215 CHECKSUM_STRING (AT_string (name_attr));
7216 }
7217 else
7218 {
7219 /* Use a deep checksum for other children. */
7220 /* Mark this DIE so it gets processed when unmarking. */
7221 if (c->die_mark == 0)
7222 c->die_mark = -1;
7223 die_checksum_ordered (c, ctx, mark);
7224 }
7225 } while (c != die->die_child);
7226
7227 CHECKSUM_ULEB128 (0);
7228 }
7229
7230 /* Add a type name and tag to a hash. */
7231 static void
7232 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7233 {
7234 CHECKSUM_ULEB128 (tag);
7235 CHECKSUM_STRING (name);
7236 }
7237
7238 #undef CHECKSUM
7239 #undef CHECKSUM_STRING
7240 #undef CHECKSUM_ATTR
7241 #undef CHECKSUM_LEB128
7242 #undef CHECKSUM_ULEB128
7243
7244 /* Generate the type signature for DIE. This is computed by generating an
7245 MD5 checksum over the DIE's tag, its relevant attributes, and its
7246 children. Attributes that are references to other DIEs are processed
7247 by recursion, using the MARK field to prevent infinite recursion.
7248 If the DIE is nested inside a namespace or another type, we also
7249 need to include that context in the signature. The lower 64 bits
7250 of the resulting MD5 checksum comprise the signature. */
7251
7252 static void
7253 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7254 {
7255 int mark;
7256 const char *name;
7257 unsigned char checksum[16];
7258 struct md5_ctx ctx;
7259 dw_die_ref decl;
7260 dw_die_ref parent;
7261
7262 name = get_AT_string (die, DW_AT_name);
7263 decl = get_AT_ref (die, DW_AT_specification);
7264 parent = get_die_parent (die);
7265
7266 /* First, compute a signature for just the type name (and its surrounding
7267 context, if any. This is stored in the type unit DIE for link-time
7268 ODR (one-definition rule) checking. */
7269
7270 if (is_cxx () && name != NULL)
7271 {
7272 md5_init_ctx (&ctx);
7273
7274 /* Checksum the names of surrounding namespaces and structures. */
7275 if (parent != NULL)
7276 checksum_die_context (parent, &ctx);
7277
7278 /* Checksum the current DIE. */
7279 die_odr_checksum (die->die_tag, name, &ctx);
7280 md5_finish_ctx (&ctx, checksum);
7281
7282 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7283 }
7284
7285 /* Next, compute the complete type signature. */
7286
7287 md5_init_ctx (&ctx);
7288 mark = 1;
7289 die->die_mark = mark;
7290
7291 /* Checksum the names of surrounding namespaces and structures. */
7292 if (parent != NULL)
7293 checksum_die_context (parent, &ctx);
7294
7295 /* Checksum the DIE and its children. */
7296 die_checksum_ordered (die, &ctx, &mark);
7297 unmark_all_dies (die);
7298 md5_finish_ctx (&ctx, checksum);
7299
7300 /* Store the signature in the type node and link the type DIE and the
7301 type node together. */
7302 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7303 DWARF_TYPE_SIGNATURE_SIZE);
7304 die->comdat_type_p = true;
7305 die->die_id.die_type_node = type_node;
7306 type_node->type_die = die;
7307
7308 /* If the DIE is a specification, link its declaration to the type node
7309 as well. */
7310 if (decl != NULL)
7311 {
7312 decl->comdat_type_p = true;
7313 decl->die_id.die_type_node = type_node;
7314 }
7315 }
7316
7317 /* Do the location expressions look same? */
7318 static inline int
7319 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7320 {
7321 return loc1->dw_loc_opc == loc2->dw_loc_opc
7322 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7323 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7324 }
7325
7326 /* Do the values look the same? */
7327 static int
7328 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7329 {
7330 dw_loc_descr_ref loc1, loc2;
7331 rtx r1, r2;
7332
7333 if (v1->val_class != v2->val_class)
7334 return 0;
7335
7336 switch (v1->val_class)
7337 {
7338 case dw_val_class_const:
7339 case dw_val_class_const_implicit:
7340 return v1->v.val_int == v2->v.val_int;
7341 case dw_val_class_unsigned_const:
7342 case dw_val_class_unsigned_const_implicit:
7343 return v1->v.val_unsigned == v2->v.val_unsigned;
7344 case dw_val_class_const_double:
7345 return v1->v.val_double.high == v2->v.val_double.high
7346 && v1->v.val_double.low == v2->v.val_double.low;
7347 case dw_val_class_wide_int:
7348 return *v1->v.val_wide == *v2->v.val_wide;
7349 case dw_val_class_vec:
7350 if (v1->v.val_vec.length != v2->v.val_vec.length
7351 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7352 return 0;
7353 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7354 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7355 return 0;
7356 return 1;
7357 case dw_val_class_flag:
7358 return v1->v.val_flag == v2->v.val_flag;
7359 case dw_val_class_str:
7360 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7361
7362 case dw_val_class_addr:
7363 r1 = v1->v.val_addr;
7364 r2 = v2->v.val_addr;
7365 if (GET_CODE (r1) != GET_CODE (r2))
7366 return 0;
7367 return !rtx_equal_p (r1, r2);
7368
7369 case dw_val_class_offset:
7370 return v1->v.val_offset == v2->v.val_offset;
7371
7372 case dw_val_class_loc:
7373 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7374 loc1 && loc2;
7375 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7376 if (!same_loc_p (loc1, loc2, mark))
7377 return 0;
7378 return !loc1 && !loc2;
7379
7380 case dw_val_class_die_ref:
7381 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7382
7383 case dw_val_class_fde_ref:
7384 case dw_val_class_vms_delta:
7385 case dw_val_class_lbl_id:
7386 case dw_val_class_lineptr:
7387 case dw_val_class_macptr:
7388 case dw_val_class_loclistsptr:
7389 case dw_val_class_high_pc:
7390 return 1;
7391
7392 case dw_val_class_file:
7393 case dw_val_class_file_implicit:
7394 return v1->v.val_file == v2->v.val_file;
7395
7396 case dw_val_class_data8:
7397 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7398
7399 default:
7400 return 1;
7401 }
7402 }
7403
7404 /* Do the attributes look the same? */
7405
7406 static int
7407 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7408 {
7409 if (at1->dw_attr != at2->dw_attr)
7410 return 0;
7411
7412 /* We don't care that this was compiled with a different compiler
7413 snapshot; if the output is the same, that's what matters. */
7414 if (at1->dw_attr == DW_AT_producer)
7415 return 1;
7416
7417 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7418 }
7419
7420 /* Do the dies look the same? */
7421
7422 static int
7423 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7424 {
7425 dw_die_ref c1, c2;
7426 dw_attr_node *a1;
7427 unsigned ix;
7428
7429 /* To avoid infinite recursion. */
7430 if (die1->die_mark)
7431 return die1->die_mark == die2->die_mark;
7432 die1->die_mark = die2->die_mark = ++(*mark);
7433
7434 if (die1->die_tag != die2->die_tag)
7435 return 0;
7436
7437 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7438 return 0;
7439
7440 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7441 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7442 return 0;
7443
7444 c1 = die1->die_child;
7445 c2 = die2->die_child;
7446 if (! c1)
7447 {
7448 if (c2)
7449 return 0;
7450 }
7451 else
7452 for (;;)
7453 {
7454 if (!same_die_p (c1, c2, mark))
7455 return 0;
7456 c1 = c1->die_sib;
7457 c2 = c2->die_sib;
7458 if (c1 == die1->die_child)
7459 {
7460 if (c2 == die2->die_child)
7461 break;
7462 else
7463 return 0;
7464 }
7465 }
7466
7467 return 1;
7468 }
7469
7470 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7471 children, and set die_symbol. */
7472
7473 static void
7474 compute_comp_unit_symbol (dw_die_ref unit_die)
7475 {
7476 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7477 const char *base = die_name ? lbasename (die_name) : "anonymous";
7478 char *name = XALLOCAVEC (char, strlen (base) + 64);
7479 char *p;
7480 int i, mark;
7481 unsigned char checksum[16];
7482 struct md5_ctx ctx;
7483
7484 /* Compute the checksum of the DIE, then append part of it as hex digits to
7485 the name filename of the unit. */
7486
7487 md5_init_ctx (&ctx);
7488 mark = 0;
7489 die_checksum (unit_die, &ctx, &mark);
7490 unmark_all_dies (unit_die);
7491 md5_finish_ctx (&ctx, checksum);
7492
7493 /* When we this for comp_unit_die () we have a DW_AT_name that might
7494 not start with a letter but with anything valid for filenames and
7495 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7496 character is not a letter. */
7497 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7498 clean_symbol_name (name);
7499
7500 p = name + strlen (name);
7501 for (i = 0; i < 4; i++)
7502 {
7503 sprintf (p, "%.2x", checksum[i]);
7504 p += 2;
7505 }
7506
7507 unit_die->die_id.die_symbol = xstrdup (name);
7508 }
7509
7510 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7511
7512 static int
7513 is_type_die (dw_die_ref die)
7514 {
7515 switch (die->die_tag)
7516 {
7517 case DW_TAG_array_type:
7518 case DW_TAG_class_type:
7519 case DW_TAG_interface_type:
7520 case DW_TAG_enumeration_type:
7521 case DW_TAG_pointer_type:
7522 case DW_TAG_reference_type:
7523 case DW_TAG_rvalue_reference_type:
7524 case DW_TAG_string_type:
7525 case DW_TAG_structure_type:
7526 case DW_TAG_subroutine_type:
7527 case DW_TAG_union_type:
7528 case DW_TAG_ptr_to_member_type:
7529 case DW_TAG_set_type:
7530 case DW_TAG_subrange_type:
7531 case DW_TAG_base_type:
7532 case DW_TAG_const_type:
7533 case DW_TAG_file_type:
7534 case DW_TAG_packed_type:
7535 case DW_TAG_volatile_type:
7536 case DW_TAG_typedef:
7537 return 1;
7538 default:
7539 return 0;
7540 }
7541 }
7542
7543 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7544 Basically, we want to choose the bits that are likely to be shared between
7545 compilations (types) and leave out the bits that are specific to individual
7546 compilations (functions). */
7547
7548 static int
7549 is_comdat_die (dw_die_ref c)
7550 {
7551 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7552 we do for stabs. The advantage is a greater likelihood of sharing between
7553 objects that don't include headers in the same order (and therefore would
7554 put the base types in a different comdat). jason 8/28/00 */
7555
7556 if (c->die_tag == DW_TAG_base_type)
7557 return 0;
7558
7559 if (c->die_tag == DW_TAG_pointer_type
7560 || c->die_tag == DW_TAG_reference_type
7561 || c->die_tag == DW_TAG_rvalue_reference_type
7562 || c->die_tag == DW_TAG_const_type
7563 || c->die_tag == DW_TAG_volatile_type)
7564 {
7565 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7566
7567 return t ? is_comdat_die (t) : 0;
7568 }
7569
7570 return is_type_die (c);
7571 }
7572
7573 /* Returns true iff C is a compile-unit DIE. */
7574
7575 static inline bool
7576 is_cu_die (dw_die_ref c)
7577 {
7578 return c && (c->die_tag == DW_TAG_compile_unit
7579 || c->die_tag == DW_TAG_skeleton_unit);
7580 }
7581
7582 /* Returns true iff C is a unit DIE of some sort. */
7583
7584 static inline bool
7585 is_unit_die (dw_die_ref c)
7586 {
7587 return c && (c->die_tag == DW_TAG_compile_unit
7588 || c->die_tag == DW_TAG_partial_unit
7589 || c->die_tag == DW_TAG_type_unit
7590 || c->die_tag == DW_TAG_skeleton_unit);
7591 }
7592
7593 /* Returns true iff C is a namespace DIE. */
7594
7595 static inline bool
7596 is_namespace_die (dw_die_ref c)
7597 {
7598 return c && c->die_tag == DW_TAG_namespace;
7599 }
7600
7601 /* Returns true iff C is a class or structure DIE. */
7602
7603 static inline bool
7604 is_class_die (dw_die_ref c)
7605 {
7606 return c && (c->die_tag == DW_TAG_class_type
7607 || c->die_tag == DW_TAG_structure_type);
7608 }
7609
7610 /* Return non-zero if this DIE is a template parameter. */
7611
7612 static inline bool
7613 is_template_parameter (dw_die_ref die)
7614 {
7615 switch (die->die_tag)
7616 {
7617 case DW_TAG_template_type_param:
7618 case DW_TAG_template_value_param:
7619 case DW_TAG_GNU_template_template_param:
7620 case DW_TAG_GNU_template_parameter_pack:
7621 return true;
7622 default:
7623 return false;
7624 }
7625 }
7626
7627 /* Return non-zero if this DIE represents a template instantiation. */
7628
7629 static inline bool
7630 is_template_instantiation (dw_die_ref die)
7631 {
7632 dw_die_ref c;
7633
7634 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7635 return false;
7636 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7637 return false;
7638 }
7639
7640 static char *
7641 gen_internal_sym (const char *prefix)
7642 {
7643 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7644
7645 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7646 return xstrdup (buf);
7647 }
7648
7649 /* Return non-zero if this DIE is a declaration. */
7650
7651 static int
7652 is_declaration_die (dw_die_ref die)
7653 {
7654 dw_attr_node *a;
7655 unsigned ix;
7656
7657 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7658 if (a->dw_attr == DW_AT_declaration)
7659 return 1;
7660
7661 return 0;
7662 }
7663
7664 /* Return non-zero if this DIE is nested inside a subprogram. */
7665
7666 static int
7667 is_nested_in_subprogram (dw_die_ref die)
7668 {
7669 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7670
7671 if (decl == NULL)
7672 decl = die;
7673 return local_scope_p (decl);
7674 }
7675
7676 /* Return non-zero if this DIE contains a defining declaration of a
7677 subprogram. */
7678
7679 static int
7680 contains_subprogram_definition (dw_die_ref die)
7681 {
7682 dw_die_ref c;
7683
7684 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7685 return 1;
7686 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7687 return 0;
7688 }
7689
7690 /* Return non-zero if this is a type DIE that should be moved to a
7691 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7692 unit type. */
7693
7694 static int
7695 should_move_die_to_comdat (dw_die_ref die)
7696 {
7697 switch (die->die_tag)
7698 {
7699 case DW_TAG_class_type:
7700 case DW_TAG_structure_type:
7701 case DW_TAG_enumeration_type:
7702 case DW_TAG_union_type:
7703 /* Don't move declarations, inlined instances, types nested in a
7704 subprogram, or types that contain subprogram definitions. */
7705 if (is_declaration_die (die)
7706 || get_AT (die, DW_AT_abstract_origin)
7707 || is_nested_in_subprogram (die)
7708 || contains_subprogram_definition (die))
7709 return 0;
7710 return 1;
7711 case DW_TAG_array_type:
7712 case DW_TAG_interface_type:
7713 case DW_TAG_pointer_type:
7714 case DW_TAG_reference_type:
7715 case DW_TAG_rvalue_reference_type:
7716 case DW_TAG_string_type:
7717 case DW_TAG_subroutine_type:
7718 case DW_TAG_ptr_to_member_type:
7719 case DW_TAG_set_type:
7720 case DW_TAG_subrange_type:
7721 case DW_TAG_base_type:
7722 case DW_TAG_const_type:
7723 case DW_TAG_file_type:
7724 case DW_TAG_packed_type:
7725 case DW_TAG_volatile_type:
7726 case DW_TAG_typedef:
7727 default:
7728 return 0;
7729 }
7730 }
7731
7732 /* Make a clone of DIE. */
7733
7734 static dw_die_ref
7735 clone_die (dw_die_ref die)
7736 {
7737 dw_die_ref clone = new_die_raw (die->die_tag);
7738 dw_attr_node *a;
7739 unsigned ix;
7740
7741 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7742 add_dwarf_attr (clone, a);
7743
7744 return clone;
7745 }
7746
7747 /* Make a clone of the tree rooted at DIE. */
7748
7749 static dw_die_ref
7750 clone_tree (dw_die_ref die)
7751 {
7752 dw_die_ref c;
7753 dw_die_ref clone = clone_die (die);
7754
7755 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7756
7757 return clone;
7758 }
7759
7760 /* Make a clone of DIE as a declaration. */
7761
7762 static dw_die_ref
7763 clone_as_declaration (dw_die_ref die)
7764 {
7765 dw_die_ref clone;
7766 dw_die_ref decl;
7767 dw_attr_node *a;
7768 unsigned ix;
7769
7770 /* If the DIE is already a declaration, just clone it. */
7771 if (is_declaration_die (die))
7772 return clone_die (die);
7773
7774 /* If the DIE is a specification, just clone its declaration DIE. */
7775 decl = get_AT_ref (die, DW_AT_specification);
7776 if (decl != NULL)
7777 {
7778 clone = clone_die (decl);
7779 if (die->comdat_type_p)
7780 add_AT_die_ref (clone, DW_AT_signature, die);
7781 return clone;
7782 }
7783
7784 clone = new_die_raw (die->die_tag);
7785
7786 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7787 {
7788 /* We don't want to copy over all attributes.
7789 For example we don't want DW_AT_byte_size because otherwise we will no
7790 longer have a declaration and GDB will treat it as a definition. */
7791
7792 switch (a->dw_attr)
7793 {
7794 case DW_AT_abstract_origin:
7795 case DW_AT_artificial:
7796 case DW_AT_containing_type:
7797 case DW_AT_external:
7798 case DW_AT_name:
7799 case DW_AT_type:
7800 case DW_AT_virtuality:
7801 case DW_AT_linkage_name:
7802 case DW_AT_MIPS_linkage_name:
7803 add_dwarf_attr (clone, a);
7804 break;
7805 case DW_AT_byte_size:
7806 case DW_AT_alignment:
7807 default:
7808 break;
7809 }
7810 }
7811
7812 if (die->comdat_type_p)
7813 add_AT_die_ref (clone, DW_AT_signature, die);
7814
7815 add_AT_flag (clone, DW_AT_declaration, 1);
7816 return clone;
7817 }
7818
7819
7820 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7821
7822 struct decl_table_entry
7823 {
7824 dw_die_ref orig;
7825 dw_die_ref copy;
7826 };
7827
7828 /* Helpers to manipulate hash table of copied declarations. */
7829
7830 /* Hashtable helpers. */
7831
7832 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7833 {
7834 typedef die_struct *compare_type;
7835 static inline hashval_t hash (const decl_table_entry *);
7836 static inline bool equal (const decl_table_entry *, const die_struct *);
7837 };
7838
7839 inline hashval_t
7840 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7841 {
7842 return htab_hash_pointer (entry->orig);
7843 }
7844
7845 inline bool
7846 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7847 const die_struct *entry2)
7848 {
7849 return entry1->orig == entry2;
7850 }
7851
7852 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7853
7854 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7855 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7856 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7857 to check if the ancestor has already been copied into UNIT. */
7858
7859 static dw_die_ref
7860 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7861 decl_hash_type *decl_table)
7862 {
7863 dw_die_ref parent = die->die_parent;
7864 dw_die_ref new_parent = unit;
7865 dw_die_ref copy;
7866 decl_table_entry **slot = NULL;
7867 struct decl_table_entry *entry = NULL;
7868
7869 if (decl_table)
7870 {
7871 /* Check if the entry has already been copied to UNIT. */
7872 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7873 INSERT);
7874 if (*slot != HTAB_EMPTY_ENTRY)
7875 {
7876 entry = *slot;
7877 return entry->copy;
7878 }
7879
7880 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7881 entry = XCNEW (struct decl_table_entry);
7882 entry->orig = die;
7883 entry->copy = NULL;
7884 *slot = entry;
7885 }
7886
7887 if (parent != NULL)
7888 {
7889 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7890 if (spec != NULL)
7891 parent = spec;
7892 if (!is_unit_die (parent))
7893 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7894 }
7895
7896 copy = clone_as_declaration (die);
7897 add_child_die (new_parent, copy);
7898
7899 if (decl_table)
7900 {
7901 /* Record the pointer to the copy. */
7902 entry->copy = copy;
7903 }
7904
7905 return copy;
7906 }
7907 /* Copy the declaration context to the new type unit DIE. This includes
7908 any surrounding namespace or type declarations. If the DIE has an
7909 AT_specification attribute, it also includes attributes and children
7910 attached to the specification, and returns a pointer to the original
7911 parent of the declaration DIE. Returns NULL otherwise. */
7912
7913 static dw_die_ref
7914 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7915 {
7916 dw_die_ref decl;
7917 dw_die_ref new_decl;
7918 dw_die_ref orig_parent = NULL;
7919
7920 decl = get_AT_ref (die, DW_AT_specification);
7921 if (decl == NULL)
7922 decl = die;
7923 else
7924 {
7925 unsigned ix;
7926 dw_die_ref c;
7927 dw_attr_node *a;
7928
7929 /* The original DIE will be changed to a declaration, and must
7930 be moved to be a child of the original declaration DIE. */
7931 orig_parent = decl->die_parent;
7932
7933 /* Copy the type node pointer from the new DIE to the original
7934 declaration DIE so we can forward references later. */
7935 decl->comdat_type_p = true;
7936 decl->die_id.die_type_node = die->die_id.die_type_node;
7937
7938 remove_AT (die, DW_AT_specification);
7939
7940 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7941 {
7942 if (a->dw_attr != DW_AT_name
7943 && a->dw_attr != DW_AT_declaration
7944 && a->dw_attr != DW_AT_external)
7945 add_dwarf_attr (die, a);
7946 }
7947
7948 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7949 }
7950
7951 if (decl->die_parent != NULL
7952 && !is_unit_die (decl->die_parent))
7953 {
7954 new_decl = copy_ancestor_tree (unit, decl, NULL);
7955 if (new_decl != NULL)
7956 {
7957 remove_AT (new_decl, DW_AT_signature);
7958 add_AT_specification (die, new_decl);
7959 }
7960 }
7961
7962 return orig_parent;
7963 }
7964
7965 /* Generate the skeleton ancestor tree for the given NODE, then clone
7966 the DIE and add the clone into the tree. */
7967
7968 static void
7969 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7970 {
7971 if (node->new_die != NULL)
7972 return;
7973
7974 node->new_die = clone_as_declaration (node->old_die);
7975
7976 if (node->parent != NULL)
7977 {
7978 generate_skeleton_ancestor_tree (node->parent);
7979 add_child_die (node->parent->new_die, node->new_die);
7980 }
7981 }
7982
7983 /* Generate a skeleton tree of DIEs containing any declarations that are
7984 found in the original tree. We traverse the tree looking for declaration
7985 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7986
7987 static void
7988 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7989 {
7990 skeleton_chain_node node;
7991 dw_die_ref c;
7992 dw_die_ref first;
7993 dw_die_ref prev = NULL;
7994 dw_die_ref next = NULL;
7995
7996 node.parent = parent;
7997
7998 first = c = parent->old_die->die_child;
7999 if (c)
8000 next = c->die_sib;
8001 if (c) do {
8002 if (prev == NULL || prev->die_sib == c)
8003 prev = c;
8004 c = next;
8005 next = (c == first ? NULL : c->die_sib);
8006 node.old_die = c;
8007 node.new_die = NULL;
8008 if (is_declaration_die (c))
8009 {
8010 if (is_template_instantiation (c))
8011 {
8012 /* Instantiated templates do not need to be cloned into the
8013 type unit. Just move the DIE and its children back to
8014 the skeleton tree (in the main CU). */
8015 remove_child_with_prev (c, prev);
8016 add_child_die (parent->new_die, c);
8017 c = prev;
8018 }
8019 else if (c->comdat_type_p)
8020 {
8021 /* This is the skeleton of earlier break_out_comdat_types
8022 type. Clone the existing DIE, but keep the children
8023 under the original (which is in the main CU). */
8024 dw_die_ref clone = clone_die (c);
8025
8026 replace_child (c, clone, prev);
8027 generate_skeleton_ancestor_tree (parent);
8028 add_child_die (parent->new_die, c);
8029 c = clone;
8030 continue;
8031 }
8032 else
8033 {
8034 /* Clone the existing DIE, move the original to the skeleton
8035 tree (which is in the main CU), and put the clone, with
8036 all the original's children, where the original came from
8037 (which is about to be moved to the type unit). */
8038 dw_die_ref clone = clone_die (c);
8039 move_all_children (c, clone);
8040
8041 /* If the original has a DW_AT_object_pointer attribute,
8042 it would now point to a child DIE just moved to the
8043 cloned tree, so we need to remove that attribute from
8044 the original. */
8045 remove_AT (c, DW_AT_object_pointer);
8046
8047 replace_child (c, clone, prev);
8048 generate_skeleton_ancestor_tree (parent);
8049 add_child_die (parent->new_die, c);
8050 node.old_die = clone;
8051 node.new_die = c;
8052 c = clone;
8053 }
8054 }
8055 generate_skeleton_bottom_up (&node);
8056 } while (next != NULL);
8057 }
8058
8059 /* Wrapper function for generate_skeleton_bottom_up. */
8060
8061 static dw_die_ref
8062 generate_skeleton (dw_die_ref die)
8063 {
8064 skeleton_chain_node node;
8065
8066 node.old_die = die;
8067 node.new_die = NULL;
8068 node.parent = NULL;
8069
8070 /* If this type definition is nested inside another type,
8071 and is not an instantiation of a template, always leave
8072 at least a declaration in its place. */
8073 if (die->die_parent != NULL
8074 && is_type_die (die->die_parent)
8075 && !is_template_instantiation (die))
8076 node.new_die = clone_as_declaration (die);
8077
8078 generate_skeleton_bottom_up (&node);
8079 return node.new_die;
8080 }
8081
8082 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8083 declaration. The original DIE is moved to a new compile unit so that
8084 existing references to it follow it to the new location. If any of the
8085 original DIE's descendants is a declaration, we need to replace the
8086 original DIE with a skeleton tree and move the declarations back into the
8087 skeleton tree. */
8088
8089 static dw_die_ref
8090 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8091 dw_die_ref prev)
8092 {
8093 dw_die_ref skeleton, orig_parent;
8094
8095 /* Copy the declaration context to the type unit DIE. If the returned
8096 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8097 that DIE. */
8098 orig_parent = copy_declaration_context (unit, child);
8099
8100 skeleton = generate_skeleton (child);
8101 if (skeleton == NULL)
8102 remove_child_with_prev (child, prev);
8103 else
8104 {
8105 skeleton->comdat_type_p = true;
8106 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8107
8108 /* If the original DIE was a specification, we need to put
8109 the skeleton under the parent DIE of the declaration.
8110 This leaves the original declaration in the tree, but
8111 it will be pruned later since there are no longer any
8112 references to it. */
8113 if (orig_parent != NULL)
8114 {
8115 remove_child_with_prev (child, prev);
8116 add_child_die (orig_parent, skeleton);
8117 }
8118 else
8119 replace_child (child, skeleton, prev);
8120 }
8121
8122 return skeleton;
8123 }
8124
8125 static void
8126 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8127 comdat_type_node *type_node,
8128 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8129
8130 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8131 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8132 DWARF procedure references in the DW_AT_location attribute. */
8133
8134 static dw_die_ref
8135 copy_dwarf_procedure (dw_die_ref die,
8136 comdat_type_node *type_node,
8137 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8138 {
8139 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8140
8141 /* DWARF procedures are not supposed to have children... */
8142 gcc_assert (die->die_child == NULL);
8143
8144 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8145 gcc_assert (vec_safe_length (die->die_attr) == 1
8146 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8147
8148 /* Do not copy more than once DWARF procedures. */
8149 bool existed;
8150 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8151 if (existed)
8152 return die_copy;
8153
8154 die_copy = clone_die (die);
8155 add_child_die (type_node->root_die, die_copy);
8156 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8157 return die_copy;
8158 }
8159
8160 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8161 procedures in DIE's attributes. */
8162
8163 static void
8164 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8165 comdat_type_node *type_node,
8166 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8167 {
8168 dw_attr_node *a;
8169 unsigned i;
8170
8171 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8172 {
8173 dw_loc_descr_ref loc;
8174
8175 if (a->dw_attr_val.val_class != dw_val_class_loc)
8176 continue;
8177
8178 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8179 {
8180 switch (loc->dw_loc_opc)
8181 {
8182 case DW_OP_call2:
8183 case DW_OP_call4:
8184 case DW_OP_call_ref:
8185 gcc_assert (loc->dw_loc_oprnd1.val_class
8186 == dw_val_class_die_ref);
8187 loc->dw_loc_oprnd1.v.val_die_ref.die
8188 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8189 type_node,
8190 copied_dwarf_procs);
8191
8192 default:
8193 break;
8194 }
8195 }
8196 }
8197 }
8198
8199 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8200 rewrite references to point to the copies.
8201
8202 References are looked for in DIE's attributes and recursively in all its
8203 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8204 mapping from old DWARF procedures to their copy. It is used not to copy
8205 twice the same DWARF procedure under TYPE_NODE. */
8206
8207 static void
8208 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8209 comdat_type_node *type_node,
8210 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8211 {
8212 dw_die_ref c;
8213
8214 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8215 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8216 type_node,
8217 copied_dwarf_procs));
8218 }
8219
8220 /* Traverse the DIE and set up additional .debug_types or .debug_info
8221 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8222 section. */
8223
8224 static void
8225 break_out_comdat_types (dw_die_ref die)
8226 {
8227 dw_die_ref c;
8228 dw_die_ref first;
8229 dw_die_ref prev = NULL;
8230 dw_die_ref next = NULL;
8231 dw_die_ref unit = NULL;
8232
8233 first = c = die->die_child;
8234 if (c)
8235 next = c->die_sib;
8236 if (c) do {
8237 if (prev == NULL || prev->die_sib == c)
8238 prev = c;
8239 c = next;
8240 next = (c == first ? NULL : c->die_sib);
8241 if (should_move_die_to_comdat (c))
8242 {
8243 dw_die_ref replacement;
8244 comdat_type_node *type_node;
8245
8246 /* Break out nested types into their own type units. */
8247 break_out_comdat_types (c);
8248
8249 /* Create a new type unit DIE as the root for the new tree, and
8250 add it to the list of comdat types. */
8251 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8252 add_AT_unsigned (unit, DW_AT_language,
8253 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8254 type_node = ggc_cleared_alloc<comdat_type_node> ();
8255 type_node->root_die = unit;
8256 type_node->next = comdat_type_list;
8257 comdat_type_list = type_node;
8258
8259 /* Generate the type signature. */
8260 generate_type_signature (c, type_node);
8261
8262 /* Copy the declaration context, attributes, and children of the
8263 declaration into the new type unit DIE, then remove this DIE
8264 from the main CU (or replace it with a skeleton if necessary). */
8265 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8266 type_node->skeleton_die = replacement;
8267
8268 /* Add the DIE to the new compunit. */
8269 add_child_die (unit, c);
8270
8271 /* Types can reference DWARF procedures for type size or data location
8272 expressions. Calls in DWARF expressions cannot target procedures
8273 that are not in the same section. So we must copy DWARF procedures
8274 along with this type and then rewrite references to them. */
8275 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8276 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8277
8278 if (replacement != NULL)
8279 c = replacement;
8280 }
8281 else if (c->die_tag == DW_TAG_namespace
8282 || c->die_tag == DW_TAG_class_type
8283 || c->die_tag == DW_TAG_structure_type
8284 || c->die_tag == DW_TAG_union_type)
8285 {
8286 /* Look for nested types that can be broken out. */
8287 break_out_comdat_types (c);
8288 }
8289 } while (next != NULL);
8290 }
8291
8292 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8293 Enter all the cloned children into the hash table decl_table. */
8294
8295 static dw_die_ref
8296 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8297 {
8298 dw_die_ref c;
8299 dw_die_ref clone;
8300 struct decl_table_entry *entry;
8301 decl_table_entry **slot;
8302
8303 if (die->die_tag == DW_TAG_subprogram)
8304 clone = clone_as_declaration (die);
8305 else
8306 clone = clone_die (die);
8307
8308 slot = decl_table->find_slot_with_hash (die,
8309 htab_hash_pointer (die), INSERT);
8310
8311 /* Assert that DIE isn't in the hash table yet. If it would be there
8312 before, the ancestors would be necessarily there as well, therefore
8313 clone_tree_partial wouldn't be called. */
8314 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8315
8316 entry = XCNEW (struct decl_table_entry);
8317 entry->orig = die;
8318 entry->copy = clone;
8319 *slot = entry;
8320
8321 if (die->die_tag != DW_TAG_subprogram)
8322 FOR_EACH_CHILD (die, c,
8323 add_child_die (clone, clone_tree_partial (c, decl_table)));
8324
8325 return clone;
8326 }
8327
8328 /* Walk the DIE and its children, looking for references to incomplete
8329 or trivial types that are unmarked (i.e., that are not in the current
8330 type_unit). */
8331
8332 static void
8333 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8334 {
8335 dw_die_ref c;
8336 dw_attr_node *a;
8337 unsigned ix;
8338
8339 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8340 {
8341 if (AT_class (a) == dw_val_class_die_ref)
8342 {
8343 dw_die_ref targ = AT_ref (a);
8344 decl_table_entry **slot;
8345 struct decl_table_entry *entry;
8346
8347 if (targ->die_mark != 0 || targ->comdat_type_p)
8348 continue;
8349
8350 slot = decl_table->find_slot_with_hash (targ,
8351 htab_hash_pointer (targ),
8352 INSERT);
8353
8354 if (*slot != HTAB_EMPTY_ENTRY)
8355 {
8356 /* TARG has already been copied, so we just need to
8357 modify the reference to point to the copy. */
8358 entry = *slot;
8359 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8360 }
8361 else
8362 {
8363 dw_die_ref parent = unit;
8364 dw_die_ref copy = clone_die (targ);
8365
8366 /* Record in DECL_TABLE that TARG has been copied.
8367 Need to do this now, before the recursive call,
8368 because DECL_TABLE may be expanded and SLOT
8369 would no longer be a valid pointer. */
8370 entry = XCNEW (struct decl_table_entry);
8371 entry->orig = targ;
8372 entry->copy = copy;
8373 *slot = entry;
8374
8375 /* If TARG is not a declaration DIE, we need to copy its
8376 children. */
8377 if (!is_declaration_die (targ))
8378 {
8379 FOR_EACH_CHILD (
8380 targ, c,
8381 add_child_die (copy,
8382 clone_tree_partial (c, decl_table)));
8383 }
8384
8385 /* Make sure the cloned tree is marked as part of the
8386 type unit. */
8387 mark_dies (copy);
8388
8389 /* If TARG has surrounding context, copy its ancestor tree
8390 into the new type unit. */
8391 if (targ->die_parent != NULL
8392 && !is_unit_die (targ->die_parent))
8393 parent = copy_ancestor_tree (unit, targ->die_parent,
8394 decl_table);
8395
8396 add_child_die (parent, copy);
8397 a->dw_attr_val.v.val_die_ref.die = copy;
8398
8399 /* Make sure the newly-copied DIE is walked. If it was
8400 installed in a previously-added context, it won't
8401 get visited otherwise. */
8402 if (parent != unit)
8403 {
8404 /* Find the highest point of the newly-added tree,
8405 mark each node along the way, and walk from there. */
8406 parent->die_mark = 1;
8407 while (parent->die_parent
8408 && parent->die_parent->die_mark == 0)
8409 {
8410 parent = parent->die_parent;
8411 parent->die_mark = 1;
8412 }
8413 copy_decls_walk (unit, parent, decl_table);
8414 }
8415 }
8416 }
8417 }
8418
8419 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8420 }
8421
8422 /* Copy declarations for "unworthy" types into the new comdat section.
8423 Incomplete types, modified types, and certain other types aren't broken
8424 out into comdat sections of their own, so they don't have a signature,
8425 and we need to copy the declaration into the same section so that we
8426 don't have an external reference. */
8427
8428 static void
8429 copy_decls_for_unworthy_types (dw_die_ref unit)
8430 {
8431 mark_dies (unit);
8432 decl_hash_type decl_table (10);
8433 copy_decls_walk (unit, unit, &decl_table);
8434 unmark_dies (unit);
8435 }
8436
8437 /* Traverse the DIE and add a sibling attribute if it may have the
8438 effect of speeding up access to siblings. To save some space,
8439 avoid generating sibling attributes for DIE's without children. */
8440
8441 static void
8442 add_sibling_attributes (dw_die_ref die)
8443 {
8444 dw_die_ref c;
8445
8446 if (! die->die_child)
8447 return;
8448
8449 if (die->die_parent && die != die->die_parent->die_child)
8450 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8451
8452 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8453 }
8454
8455 /* Output all location lists for the DIE and its children. */
8456
8457 static void
8458 output_location_lists (dw_die_ref die)
8459 {
8460 dw_die_ref c;
8461 dw_attr_node *a;
8462 unsigned ix;
8463
8464 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8465 if (AT_class (a) == dw_val_class_loc_list)
8466 output_loc_list (AT_loc_list (a));
8467
8468 FOR_EACH_CHILD (die, c, output_location_lists (c));
8469 }
8470
8471 /* During assign_location_list_indexes and output_loclists_offset the
8472 current index, after it the number of assigned indexes (i.e. how
8473 large the .debug_loclists* offset table should be). */
8474 static unsigned int loc_list_idx;
8475
8476 /* Output all location list offsets for the DIE and its children. */
8477
8478 static void
8479 output_loclists_offsets (dw_die_ref die)
8480 {
8481 dw_die_ref c;
8482 dw_attr_node *a;
8483 unsigned ix;
8484
8485 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8486 if (AT_class (a) == dw_val_class_loc_list)
8487 {
8488 dw_loc_list_ref l = AT_loc_list (a);
8489 if (l->offset_emitted)
8490 continue;
8491 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8492 loc_section_label, NULL);
8493 gcc_assert (l->hash == loc_list_idx);
8494 loc_list_idx++;
8495 l->offset_emitted = true;
8496 }
8497
8498 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8499 }
8500
8501 /* Recursively set indexes of location lists. */
8502
8503 static void
8504 assign_location_list_indexes (dw_die_ref die)
8505 {
8506 dw_die_ref c;
8507 dw_attr_node *a;
8508 unsigned ix;
8509
8510 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8511 if (AT_class (a) == dw_val_class_loc_list)
8512 {
8513 dw_loc_list_ref list = AT_loc_list (a);
8514 if (!list->num_assigned)
8515 {
8516 list->num_assigned = true;
8517 list->hash = loc_list_idx++;
8518 }
8519 }
8520
8521 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8522 }
8523
8524 /* We want to limit the number of external references, because they are
8525 larger than local references: a relocation takes multiple words, and
8526 even a sig8 reference is always eight bytes, whereas a local reference
8527 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8528 So if we encounter multiple external references to the same type DIE, we
8529 make a local typedef stub for it and redirect all references there.
8530
8531 This is the element of the hash table for keeping track of these
8532 references. */
8533
8534 struct external_ref
8535 {
8536 dw_die_ref type;
8537 dw_die_ref stub;
8538 unsigned n_refs;
8539 };
8540
8541 /* Hashtable helpers. */
8542
8543 struct external_ref_hasher : free_ptr_hash <external_ref>
8544 {
8545 static inline hashval_t hash (const external_ref *);
8546 static inline bool equal (const external_ref *, const external_ref *);
8547 };
8548
8549 inline hashval_t
8550 external_ref_hasher::hash (const external_ref *r)
8551 {
8552 dw_die_ref die = r->type;
8553 hashval_t h = 0;
8554
8555 /* We can't use the address of the DIE for hashing, because
8556 that will make the order of the stub DIEs non-deterministic. */
8557 if (! die->comdat_type_p)
8558 /* We have a symbol; use it to compute a hash. */
8559 h = htab_hash_string (die->die_id.die_symbol);
8560 else
8561 {
8562 /* We have a type signature; use a subset of the bits as the hash.
8563 The 8-byte signature is at least as large as hashval_t. */
8564 comdat_type_node *type_node = die->die_id.die_type_node;
8565 memcpy (&h, type_node->signature, sizeof (h));
8566 }
8567 return h;
8568 }
8569
8570 inline bool
8571 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8572 {
8573 return r1->type == r2->type;
8574 }
8575
8576 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8577
8578 /* Return a pointer to the external_ref for references to DIE. */
8579
8580 static struct external_ref *
8581 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8582 {
8583 struct external_ref ref, *ref_p;
8584 external_ref **slot;
8585
8586 ref.type = die;
8587 slot = map->find_slot (&ref, INSERT);
8588 if (*slot != HTAB_EMPTY_ENTRY)
8589 return *slot;
8590
8591 ref_p = XCNEW (struct external_ref);
8592 ref_p->type = die;
8593 *slot = ref_p;
8594 return ref_p;
8595 }
8596
8597 /* Subroutine of optimize_external_refs, below.
8598
8599 If we see a type skeleton, record it as our stub. If we see external
8600 references, remember how many we've seen. */
8601
8602 static void
8603 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8604 {
8605 dw_die_ref c;
8606 dw_attr_node *a;
8607 unsigned ix;
8608 struct external_ref *ref_p;
8609
8610 if (is_type_die (die)
8611 && (c = get_AT_ref (die, DW_AT_signature)))
8612 {
8613 /* This is a local skeleton; use it for local references. */
8614 ref_p = lookup_external_ref (map, c);
8615 ref_p->stub = die;
8616 }
8617
8618 /* Scan the DIE references, and remember any that refer to DIEs from
8619 other CUs (i.e. those which are not marked). */
8620 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8621 if (AT_class (a) == dw_val_class_die_ref
8622 && (c = AT_ref (a))->die_mark == 0
8623 && is_type_die (c))
8624 {
8625 ref_p = lookup_external_ref (map, c);
8626 ref_p->n_refs++;
8627 }
8628
8629 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8630 }
8631
8632 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8633 points to an external_ref, DATA is the CU we're processing. If we don't
8634 already have a local stub, and we have multiple refs, build a stub. */
8635
8636 int
8637 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8638 {
8639 struct external_ref *ref_p = *slot;
8640
8641 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8642 {
8643 /* We have multiple references to this type, so build a small stub.
8644 Both of these forms are a bit dodgy from the perspective of the
8645 DWARF standard, since technically they should have names. */
8646 dw_die_ref cu = data;
8647 dw_die_ref type = ref_p->type;
8648 dw_die_ref stub = NULL;
8649
8650 if (type->comdat_type_p)
8651 {
8652 /* If we refer to this type via sig8, use AT_signature. */
8653 stub = new_die (type->die_tag, cu, NULL_TREE);
8654 add_AT_die_ref (stub, DW_AT_signature, type);
8655 }
8656 else
8657 {
8658 /* Otherwise, use a typedef with no name. */
8659 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8660 add_AT_die_ref (stub, DW_AT_type, type);
8661 }
8662
8663 stub->die_mark++;
8664 ref_p->stub = stub;
8665 }
8666 return 1;
8667 }
8668
8669 /* DIE is a unit; look through all the DIE references to see if there are
8670 any external references to types, and if so, create local stubs for
8671 them which will be applied in build_abbrev_table. This is useful because
8672 references to local DIEs are smaller. */
8673
8674 static external_ref_hash_type *
8675 optimize_external_refs (dw_die_ref die)
8676 {
8677 external_ref_hash_type *map = new external_ref_hash_type (10);
8678 optimize_external_refs_1 (die, map);
8679 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8680 return map;
8681 }
8682
8683 /* The following 3 variables are temporaries that are computed only during the
8684 build_abbrev_table call and used and released during the following
8685 optimize_abbrev_table call. */
8686
8687 /* First abbrev_id that can be optimized based on usage. */
8688 static unsigned int abbrev_opt_start;
8689
8690 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8691 abbrev_id smaller than this, because they must be already sized
8692 during build_abbrev_table). */
8693 static unsigned int abbrev_opt_base_type_end;
8694
8695 /* Vector of usage counts during build_abbrev_table. Indexed by
8696 abbrev_id - abbrev_opt_start. */
8697 static vec<unsigned int> abbrev_usage_count;
8698
8699 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8700 static vec<dw_die_ref> sorted_abbrev_dies;
8701
8702 /* The format of each DIE (and its attribute value pairs) is encoded in an
8703 abbreviation table. This routine builds the abbreviation table and assigns
8704 a unique abbreviation id for each abbreviation entry. The children of each
8705 die are visited recursively. */
8706
8707 static void
8708 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8709 {
8710 unsigned int abbrev_id = 0;
8711 dw_die_ref c;
8712 dw_attr_node *a;
8713 unsigned ix;
8714 dw_die_ref abbrev;
8715
8716 /* Scan the DIE references, and replace any that refer to
8717 DIEs from other CUs (i.e. those which are not marked) with
8718 the local stubs we built in optimize_external_refs. */
8719 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8720 if (AT_class (a) == dw_val_class_die_ref
8721 && (c = AT_ref (a))->die_mark == 0)
8722 {
8723 struct external_ref *ref_p;
8724 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8725
8726 ref_p = lookup_external_ref (extern_map, c);
8727 if (ref_p->stub && ref_p->stub != die)
8728 change_AT_die_ref (a, ref_p->stub);
8729 else
8730 /* We aren't changing this reference, so mark it external. */
8731 set_AT_ref_external (a, 1);
8732 }
8733
8734 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8735 {
8736 dw_attr_node *die_a, *abbrev_a;
8737 unsigned ix;
8738 bool ok = true;
8739
8740 if (abbrev_id == 0)
8741 continue;
8742 if (abbrev->die_tag != die->die_tag)
8743 continue;
8744 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8745 continue;
8746
8747 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8748 continue;
8749
8750 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8751 {
8752 abbrev_a = &(*abbrev->die_attr)[ix];
8753 if ((abbrev_a->dw_attr != die_a->dw_attr)
8754 || (value_format (abbrev_a) != value_format (die_a)))
8755 {
8756 ok = false;
8757 break;
8758 }
8759 }
8760 if (ok)
8761 break;
8762 }
8763
8764 if (abbrev_id >= vec_safe_length (abbrev_die_table))
8765 {
8766 vec_safe_push (abbrev_die_table, die);
8767 if (abbrev_opt_start)
8768 abbrev_usage_count.safe_push (0);
8769 }
8770 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
8771 {
8772 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
8773 sorted_abbrev_dies.safe_push (die);
8774 }
8775
8776 die->die_abbrev = abbrev_id;
8777 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8778 }
8779
8780 /* Callback function for sorted_abbrev_dies vector sorting. We sort
8781 by die_abbrev's usage count, from the most commonly used
8782 abbreviation to the least. */
8783
8784 static int
8785 die_abbrev_cmp (const void *p1, const void *p2)
8786 {
8787 dw_die_ref die1 = *(const dw_die_ref *) p1;
8788 dw_die_ref die2 = *(const dw_die_ref *) p2;
8789
8790 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
8791 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
8792
8793 if (die1->die_abbrev >= abbrev_opt_base_type_end
8794 && die2->die_abbrev >= abbrev_opt_base_type_end)
8795 {
8796 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8797 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8798 return -1;
8799 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8800 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8801 return 1;
8802 }
8803
8804 /* Stabilize the sort. */
8805 if (die1->die_abbrev < die2->die_abbrev)
8806 return -1;
8807 if (die1->die_abbrev > die2->die_abbrev)
8808 return 1;
8809
8810 return 0;
8811 }
8812
8813 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
8814 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
8815 into dw_val_class_const_implicit or
8816 dw_val_class_unsigned_const_implicit. */
8817
8818 static void
8819 optimize_implicit_const (unsigned int first_id, unsigned int end,
8820 vec<bool> &implicit_consts)
8821 {
8822 /* It never makes sense if there is just one DIE using the abbreviation. */
8823 if (end < first_id + 2)
8824 return;
8825
8826 dw_attr_node *a;
8827 unsigned ix, i;
8828 dw_die_ref die = sorted_abbrev_dies[first_id];
8829 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8830 if (implicit_consts[ix])
8831 {
8832 enum dw_val_class new_class = dw_val_class_none;
8833 switch (AT_class (a))
8834 {
8835 case dw_val_class_unsigned_const:
8836 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
8837 continue;
8838
8839 /* The .debug_abbrev section will grow by
8840 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
8841 in all the DIEs using that abbreviation. */
8842 if (constant_size (AT_unsigned (a)) * (end - first_id)
8843 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
8844 continue;
8845
8846 new_class = dw_val_class_unsigned_const_implicit;
8847 break;
8848
8849 case dw_val_class_const:
8850 new_class = dw_val_class_const_implicit;
8851 break;
8852
8853 case dw_val_class_file:
8854 new_class = dw_val_class_file_implicit;
8855 break;
8856
8857 default:
8858 continue;
8859 }
8860 for (i = first_id; i < end; i++)
8861 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
8862 = new_class;
8863 }
8864 }
8865
8866 /* Attempt to optimize abbreviation table from abbrev_opt_start
8867 abbreviation above. */
8868
8869 static void
8870 optimize_abbrev_table (void)
8871 {
8872 if (abbrev_opt_start
8873 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
8874 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
8875 {
8876 auto_vec<bool, 32> implicit_consts;
8877 sorted_abbrev_dies.qsort (die_abbrev_cmp);
8878
8879 unsigned int abbrev_id = abbrev_opt_start - 1;
8880 unsigned int first_id = ~0U;
8881 unsigned int last_abbrev_id = 0;
8882 unsigned int i;
8883 dw_die_ref die;
8884 if (abbrev_opt_base_type_end > abbrev_opt_start)
8885 abbrev_id = abbrev_opt_base_type_end - 1;
8886 /* Reassign abbreviation ids from abbrev_opt_start above, so that
8887 most commonly used abbreviations come first. */
8888 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
8889 {
8890 dw_attr_node *a;
8891 unsigned ix;
8892
8893 /* If calc_base_type_die_sizes has been called, the CU and
8894 base types after it can't be optimized, because we've already
8895 calculated their DIE offsets. We've sorted them first. */
8896 if (die->die_abbrev < abbrev_opt_base_type_end)
8897 continue;
8898 if (die->die_abbrev != last_abbrev_id)
8899 {
8900 last_abbrev_id = die->die_abbrev;
8901 if (dwarf_version >= 5 && first_id != ~0U)
8902 optimize_implicit_const (first_id, i, implicit_consts);
8903 abbrev_id++;
8904 (*abbrev_die_table)[abbrev_id] = die;
8905 if (dwarf_version >= 5)
8906 {
8907 first_id = i;
8908 implicit_consts.truncate (0);
8909
8910 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8911 switch (AT_class (a))
8912 {
8913 case dw_val_class_const:
8914 case dw_val_class_unsigned_const:
8915 case dw_val_class_file:
8916 implicit_consts.safe_push (true);
8917 break;
8918 default:
8919 implicit_consts.safe_push (false);
8920 break;
8921 }
8922 }
8923 }
8924 else if (dwarf_version >= 5)
8925 {
8926 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8927 if (!implicit_consts[ix])
8928 continue;
8929 else
8930 {
8931 dw_attr_node *other_a
8932 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
8933 if (!dw_val_equal_p (&a->dw_attr_val,
8934 &other_a->dw_attr_val))
8935 implicit_consts[ix] = false;
8936 }
8937 }
8938 die->die_abbrev = abbrev_id;
8939 }
8940 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
8941 if (dwarf_version >= 5 && first_id != ~0U)
8942 optimize_implicit_const (first_id, i, implicit_consts);
8943 }
8944
8945 abbrev_opt_start = 0;
8946 abbrev_opt_base_type_end = 0;
8947 abbrev_usage_count.release ();
8948 sorted_abbrev_dies.release ();
8949 }
8950 \f
8951 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8952
8953 static int
8954 constant_size (unsigned HOST_WIDE_INT value)
8955 {
8956 int log;
8957
8958 if (value == 0)
8959 log = 0;
8960 else
8961 log = floor_log2 (value);
8962
8963 log = log / 8;
8964 log = 1 << (floor_log2 (log) + 1);
8965
8966 return log;
8967 }
8968
8969 /* Return the size of a DIE as it is represented in the
8970 .debug_info section. */
8971
8972 static unsigned long
8973 size_of_die (dw_die_ref die)
8974 {
8975 unsigned long size = 0;
8976 dw_attr_node *a;
8977 unsigned ix;
8978 enum dwarf_form form;
8979
8980 size += size_of_uleb128 (die->die_abbrev);
8981 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8982 {
8983 switch (AT_class (a))
8984 {
8985 case dw_val_class_addr:
8986 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8987 {
8988 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8989 size += size_of_uleb128 (AT_index (a));
8990 }
8991 else
8992 size += DWARF2_ADDR_SIZE;
8993 break;
8994 case dw_val_class_offset:
8995 size += DWARF_OFFSET_SIZE;
8996 break;
8997 case dw_val_class_loc:
8998 {
8999 unsigned long lsize = size_of_locs (AT_loc (a));
9000
9001 /* Block length. */
9002 if (dwarf_version >= 4)
9003 size += size_of_uleb128 (lsize);
9004 else
9005 size += constant_size (lsize);
9006 size += lsize;
9007 }
9008 break;
9009 case dw_val_class_loc_list:
9010 if (dwarf_split_debug_info && dwarf_version >= 5)
9011 {
9012 gcc_assert (AT_loc_list (a)->num_assigned);
9013 size += size_of_uleb128 (AT_loc_list (a)->hash);
9014 }
9015 else
9016 size += DWARF_OFFSET_SIZE;
9017 break;
9018 case dw_val_class_range_list:
9019 if (value_format (a) == DW_FORM_rnglistx)
9020 {
9021 gcc_assert (rnglist_idx);
9022 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9023 size += size_of_uleb128 (r->idx);
9024 }
9025 else
9026 size += DWARF_OFFSET_SIZE;
9027 break;
9028 case dw_val_class_const:
9029 size += size_of_sleb128 (AT_int (a));
9030 break;
9031 case dw_val_class_unsigned_const:
9032 {
9033 int csize = constant_size (AT_unsigned (a));
9034 if (dwarf_version == 3
9035 && a->dw_attr == DW_AT_data_member_location
9036 && csize >= 4)
9037 size += size_of_uleb128 (AT_unsigned (a));
9038 else
9039 size += csize;
9040 }
9041 break;
9042 case dw_val_class_const_implicit:
9043 case dw_val_class_unsigned_const_implicit:
9044 case dw_val_class_file_implicit:
9045 /* These occupy no size in the DIE, just an extra sleb128 in
9046 .debug_abbrev. */
9047 break;
9048 case dw_val_class_const_double:
9049 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9050 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9051 size++; /* block */
9052 break;
9053 case dw_val_class_wide_int:
9054 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9055 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9056 if (get_full_len (*a->dw_attr_val.v.val_wide)
9057 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9058 size++; /* block */
9059 break;
9060 case dw_val_class_vec:
9061 size += constant_size (a->dw_attr_val.v.val_vec.length
9062 * a->dw_attr_val.v.val_vec.elt_size)
9063 + a->dw_attr_val.v.val_vec.length
9064 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9065 break;
9066 case dw_val_class_flag:
9067 if (dwarf_version >= 4)
9068 /* Currently all add_AT_flag calls pass in 1 as last argument,
9069 so DW_FORM_flag_present can be used. If that ever changes,
9070 we'll need to use DW_FORM_flag and have some optimization
9071 in build_abbrev_table that will change those to
9072 DW_FORM_flag_present if it is set to 1 in all DIEs using
9073 the same abbrev entry. */
9074 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9075 else
9076 size += 1;
9077 break;
9078 case dw_val_class_die_ref:
9079 if (AT_ref_external (a))
9080 {
9081 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9082 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9083 is sized by target address length, whereas in DWARF3
9084 it's always sized as an offset. */
9085 if (use_debug_types)
9086 size += DWARF_TYPE_SIGNATURE_SIZE;
9087 else if (dwarf_version == 2)
9088 size += DWARF2_ADDR_SIZE;
9089 else
9090 size += DWARF_OFFSET_SIZE;
9091 }
9092 else
9093 size += DWARF_OFFSET_SIZE;
9094 break;
9095 case dw_val_class_fde_ref:
9096 size += DWARF_OFFSET_SIZE;
9097 break;
9098 case dw_val_class_lbl_id:
9099 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9100 {
9101 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9102 size += size_of_uleb128 (AT_index (a));
9103 }
9104 else
9105 size += DWARF2_ADDR_SIZE;
9106 break;
9107 case dw_val_class_lineptr:
9108 case dw_val_class_macptr:
9109 case dw_val_class_loclistsptr:
9110 size += DWARF_OFFSET_SIZE;
9111 break;
9112 case dw_val_class_str:
9113 form = AT_string_form (a);
9114 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9115 size += DWARF_OFFSET_SIZE;
9116 else if (form == DW_FORM_GNU_str_index)
9117 size += size_of_uleb128 (AT_index (a));
9118 else
9119 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9120 break;
9121 case dw_val_class_file:
9122 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9123 break;
9124 case dw_val_class_data8:
9125 size += 8;
9126 break;
9127 case dw_val_class_vms_delta:
9128 size += DWARF_OFFSET_SIZE;
9129 break;
9130 case dw_val_class_high_pc:
9131 size += DWARF2_ADDR_SIZE;
9132 break;
9133 case dw_val_class_discr_value:
9134 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9135 break;
9136 case dw_val_class_discr_list:
9137 {
9138 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9139
9140 /* This is a block, so we have the block length and then its
9141 data. */
9142 size += constant_size (block_size) + block_size;
9143 }
9144 break;
9145 default:
9146 gcc_unreachable ();
9147 }
9148 }
9149
9150 return size;
9151 }
9152
9153 /* Size the debugging information associated with a given DIE. Visits the
9154 DIE's children recursively. Updates the global variable next_die_offset, on
9155 each time through. Uses the current value of next_die_offset to update the
9156 die_offset field in each DIE. */
9157
9158 static void
9159 calc_die_sizes (dw_die_ref die)
9160 {
9161 dw_die_ref c;
9162
9163 gcc_assert (die->die_offset == 0
9164 || (unsigned long int) die->die_offset == next_die_offset);
9165 die->die_offset = next_die_offset;
9166 next_die_offset += size_of_die (die);
9167
9168 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9169
9170 if (die->die_child != NULL)
9171 /* Count the null byte used to terminate sibling lists. */
9172 next_die_offset += 1;
9173 }
9174
9175 /* Size just the base type children at the start of the CU.
9176 This is needed because build_abbrev needs to size locs
9177 and sizing of type based stack ops needs to know die_offset
9178 values for the base types. */
9179
9180 static void
9181 calc_base_type_die_sizes (void)
9182 {
9183 unsigned long die_offset = (dwarf_split_debug_info
9184 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9185 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9186 unsigned int i;
9187 dw_die_ref base_type;
9188 #if ENABLE_ASSERT_CHECKING
9189 dw_die_ref prev = comp_unit_die ()->die_child;
9190 #endif
9191
9192 die_offset += size_of_die (comp_unit_die ());
9193 for (i = 0; base_types.iterate (i, &base_type); i++)
9194 {
9195 #if ENABLE_ASSERT_CHECKING
9196 gcc_assert (base_type->die_offset == 0
9197 && prev->die_sib == base_type
9198 && base_type->die_child == NULL
9199 && base_type->die_abbrev);
9200 prev = base_type;
9201 #endif
9202 if (abbrev_opt_start
9203 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9204 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9205 base_type->die_offset = die_offset;
9206 die_offset += size_of_die (base_type);
9207 }
9208 }
9209
9210 /* Set the marks for a die and its children. We do this so
9211 that we know whether or not a reference needs to use FORM_ref_addr; only
9212 DIEs in the same CU will be marked. We used to clear out the offset
9213 and use that as the flag, but ran into ordering problems. */
9214
9215 static void
9216 mark_dies (dw_die_ref die)
9217 {
9218 dw_die_ref c;
9219
9220 gcc_assert (!die->die_mark);
9221
9222 die->die_mark = 1;
9223 FOR_EACH_CHILD (die, c, mark_dies (c));
9224 }
9225
9226 /* Clear the marks for a die and its children. */
9227
9228 static void
9229 unmark_dies (dw_die_ref die)
9230 {
9231 dw_die_ref c;
9232
9233 if (! use_debug_types)
9234 gcc_assert (die->die_mark);
9235
9236 die->die_mark = 0;
9237 FOR_EACH_CHILD (die, c, unmark_dies (c));
9238 }
9239
9240 /* Clear the marks for a die, its children and referred dies. */
9241
9242 static void
9243 unmark_all_dies (dw_die_ref die)
9244 {
9245 dw_die_ref c;
9246 dw_attr_node *a;
9247 unsigned ix;
9248
9249 if (!die->die_mark)
9250 return;
9251 die->die_mark = 0;
9252
9253 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9254
9255 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9256 if (AT_class (a) == dw_val_class_die_ref)
9257 unmark_all_dies (AT_ref (a));
9258 }
9259
9260 /* Calculate if the entry should appear in the final output file. It may be
9261 from a pruned a type. */
9262
9263 static bool
9264 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9265 {
9266 /* By limiting gnu pubnames to definitions only, gold can generate a
9267 gdb index without entries for declarations, which don't include
9268 enough information to be useful. */
9269 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9270 return false;
9271
9272 if (table == pubname_table)
9273 {
9274 /* Enumerator names are part of the pubname table, but the
9275 parent DW_TAG_enumeration_type die may have been pruned.
9276 Don't output them if that is the case. */
9277 if (p->die->die_tag == DW_TAG_enumerator &&
9278 (p->die->die_parent == NULL
9279 || !p->die->die_parent->die_perennial_p))
9280 return false;
9281
9282 /* Everything else in the pubname table is included. */
9283 return true;
9284 }
9285
9286 /* The pubtypes table shouldn't include types that have been
9287 pruned. */
9288 return (p->die->die_offset != 0
9289 || !flag_eliminate_unused_debug_types);
9290 }
9291
9292 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9293 generated for the compilation unit. */
9294
9295 static unsigned long
9296 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9297 {
9298 unsigned long size;
9299 unsigned i;
9300 pubname_entry *p;
9301 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9302
9303 size = DWARF_PUBNAMES_HEADER_SIZE;
9304 FOR_EACH_VEC_ELT (*names, i, p)
9305 if (include_pubname_in_output (names, p))
9306 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9307
9308 size += DWARF_OFFSET_SIZE;
9309 return size;
9310 }
9311
9312 /* Return the size of the information in the .debug_aranges section. */
9313
9314 static unsigned long
9315 size_of_aranges (void)
9316 {
9317 unsigned long size;
9318
9319 size = DWARF_ARANGES_HEADER_SIZE;
9320
9321 /* Count the address/length pair for this compilation unit. */
9322 if (text_section_used)
9323 size += 2 * DWARF2_ADDR_SIZE;
9324 if (cold_text_section_used)
9325 size += 2 * DWARF2_ADDR_SIZE;
9326 if (have_multiple_function_sections)
9327 {
9328 unsigned fde_idx;
9329 dw_fde_ref fde;
9330
9331 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9332 {
9333 if (DECL_IGNORED_P (fde->decl))
9334 continue;
9335 if (!fde->in_std_section)
9336 size += 2 * DWARF2_ADDR_SIZE;
9337 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9338 size += 2 * DWARF2_ADDR_SIZE;
9339 }
9340 }
9341
9342 /* Count the two zero words used to terminated the address range table. */
9343 size += 2 * DWARF2_ADDR_SIZE;
9344 return size;
9345 }
9346 \f
9347 /* Select the encoding of an attribute value. */
9348
9349 static enum dwarf_form
9350 value_format (dw_attr_node *a)
9351 {
9352 switch (AT_class (a))
9353 {
9354 case dw_val_class_addr:
9355 /* Only very few attributes allow DW_FORM_addr. */
9356 switch (a->dw_attr)
9357 {
9358 case DW_AT_low_pc:
9359 case DW_AT_high_pc:
9360 case DW_AT_entry_pc:
9361 case DW_AT_trampoline:
9362 return (AT_index (a) == NOT_INDEXED
9363 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9364 default:
9365 break;
9366 }
9367 switch (DWARF2_ADDR_SIZE)
9368 {
9369 case 1:
9370 return DW_FORM_data1;
9371 case 2:
9372 return DW_FORM_data2;
9373 case 4:
9374 return DW_FORM_data4;
9375 case 8:
9376 return DW_FORM_data8;
9377 default:
9378 gcc_unreachable ();
9379 }
9380 case dw_val_class_loc_list:
9381 if (dwarf_split_debug_info
9382 && dwarf_version >= 5
9383 && AT_loc_list (a)->num_assigned)
9384 return DW_FORM_loclistx;
9385 /* FALLTHRU */
9386 case dw_val_class_range_list:
9387 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9388 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9389 care about sizes of .debug* sections in shared libraries and
9390 executables and don't take into account relocations that affect just
9391 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9392 table in the .debug_rnglists section. */
9393 if (dwarf_split_debug_info
9394 && dwarf_version >= 5
9395 && AT_class (a) == dw_val_class_range_list
9396 && rnglist_idx
9397 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9398 return DW_FORM_rnglistx;
9399 if (dwarf_version >= 4)
9400 return DW_FORM_sec_offset;
9401 /* FALLTHRU */
9402 case dw_val_class_vms_delta:
9403 case dw_val_class_offset:
9404 switch (DWARF_OFFSET_SIZE)
9405 {
9406 case 4:
9407 return DW_FORM_data4;
9408 case 8:
9409 return DW_FORM_data8;
9410 default:
9411 gcc_unreachable ();
9412 }
9413 case dw_val_class_loc:
9414 if (dwarf_version >= 4)
9415 return DW_FORM_exprloc;
9416 switch (constant_size (size_of_locs (AT_loc (a))))
9417 {
9418 case 1:
9419 return DW_FORM_block1;
9420 case 2:
9421 return DW_FORM_block2;
9422 case 4:
9423 return DW_FORM_block4;
9424 default:
9425 gcc_unreachable ();
9426 }
9427 case dw_val_class_const:
9428 return DW_FORM_sdata;
9429 case dw_val_class_unsigned_const:
9430 switch (constant_size (AT_unsigned (a)))
9431 {
9432 case 1:
9433 return DW_FORM_data1;
9434 case 2:
9435 return DW_FORM_data2;
9436 case 4:
9437 /* In DWARF3 DW_AT_data_member_location with
9438 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9439 constant, so we need to use DW_FORM_udata if we need
9440 a large constant. */
9441 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9442 return DW_FORM_udata;
9443 return DW_FORM_data4;
9444 case 8:
9445 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9446 return DW_FORM_udata;
9447 return DW_FORM_data8;
9448 default:
9449 gcc_unreachable ();
9450 }
9451 case dw_val_class_const_implicit:
9452 case dw_val_class_unsigned_const_implicit:
9453 case dw_val_class_file_implicit:
9454 return DW_FORM_implicit_const;
9455 case dw_val_class_const_double:
9456 switch (HOST_BITS_PER_WIDE_INT)
9457 {
9458 case 8:
9459 return DW_FORM_data2;
9460 case 16:
9461 return DW_FORM_data4;
9462 case 32:
9463 return DW_FORM_data8;
9464 case 64:
9465 if (dwarf_version >= 5)
9466 return DW_FORM_data16;
9467 /* FALLTHRU */
9468 default:
9469 return DW_FORM_block1;
9470 }
9471 case dw_val_class_wide_int:
9472 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9473 {
9474 case 8:
9475 return DW_FORM_data1;
9476 case 16:
9477 return DW_FORM_data2;
9478 case 32:
9479 return DW_FORM_data4;
9480 case 64:
9481 return DW_FORM_data8;
9482 case 128:
9483 if (dwarf_version >= 5)
9484 return DW_FORM_data16;
9485 /* FALLTHRU */
9486 default:
9487 return DW_FORM_block1;
9488 }
9489 case dw_val_class_vec:
9490 switch (constant_size (a->dw_attr_val.v.val_vec.length
9491 * a->dw_attr_val.v.val_vec.elt_size))
9492 {
9493 case 1:
9494 return DW_FORM_block1;
9495 case 2:
9496 return DW_FORM_block2;
9497 case 4:
9498 return DW_FORM_block4;
9499 default:
9500 gcc_unreachable ();
9501 }
9502 case dw_val_class_flag:
9503 if (dwarf_version >= 4)
9504 {
9505 /* Currently all add_AT_flag calls pass in 1 as last argument,
9506 so DW_FORM_flag_present can be used. If that ever changes,
9507 we'll need to use DW_FORM_flag and have some optimization
9508 in build_abbrev_table that will change those to
9509 DW_FORM_flag_present if it is set to 1 in all DIEs using
9510 the same abbrev entry. */
9511 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9512 return DW_FORM_flag_present;
9513 }
9514 return DW_FORM_flag;
9515 case dw_val_class_die_ref:
9516 if (AT_ref_external (a))
9517 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9518 else
9519 return DW_FORM_ref;
9520 case dw_val_class_fde_ref:
9521 return DW_FORM_data;
9522 case dw_val_class_lbl_id:
9523 return (AT_index (a) == NOT_INDEXED
9524 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9525 case dw_val_class_lineptr:
9526 case dw_val_class_macptr:
9527 case dw_val_class_loclistsptr:
9528 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9529 case dw_val_class_str:
9530 return AT_string_form (a);
9531 case dw_val_class_file:
9532 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9533 {
9534 case 1:
9535 return DW_FORM_data1;
9536 case 2:
9537 return DW_FORM_data2;
9538 case 4:
9539 return DW_FORM_data4;
9540 default:
9541 gcc_unreachable ();
9542 }
9543
9544 case dw_val_class_data8:
9545 return DW_FORM_data8;
9546
9547 case dw_val_class_high_pc:
9548 switch (DWARF2_ADDR_SIZE)
9549 {
9550 case 1:
9551 return DW_FORM_data1;
9552 case 2:
9553 return DW_FORM_data2;
9554 case 4:
9555 return DW_FORM_data4;
9556 case 8:
9557 return DW_FORM_data8;
9558 default:
9559 gcc_unreachable ();
9560 }
9561
9562 case dw_val_class_discr_value:
9563 return (a->dw_attr_val.v.val_discr_value.pos
9564 ? DW_FORM_udata
9565 : DW_FORM_sdata);
9566 case dw_val_class_discr_list:
9567 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9568 {
9569 case 1:
9570 return DW_FORM_block1;
9571 case 2:
9572 return DW_FORM_block2;
9573 case 4:
9574 return DW_FORM_block4;
9575 default:
9576 gcc_unreachable ();
9577 }
9578
9579 default:
9580 gcc_unreachable ();
9581 }
9582 }
9583
9584 /* Output the encoding of an attribute value. */
9585
9586 static void
9587 output_value_format (dw_attr_node *a)
9588 {
9589 enum dwarf_form form = value_format (a);
9590
9591 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9592 }
9593
9594 /* Given a die and id, produce the appropriate abbreviations. */
9595
9596 static void
9597 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9598 {
9599 unsigned ix;
9600 dw_attr_node *a_attr;
9601
9602 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9603 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9604 dwarf_tag_name (abbrev->die_tag));
9605
9606 if (abbrev->die_child != NULL)
9607 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9608 else
9609 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9610
9611 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9612 {
9613 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9614 dwarf_attr_name (a_attr->dw_attr));
9615 output_value_format (a_attr);
9616 if (value_format (a_attr) == DW_FORM_implicit_const)
9617 {
9618 if (AT_class (a_attr) == dw_val_class_file_implicit)
9619 {
9620 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9621 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9622 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9623 }
9624 else
9625 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9626 }
9627 }
9628
9629 dw2_asm_output_data (1, 0, NULL);
9630 dw2_asm_output_data (1, 0, NULL);
9631 }
9632
9633
9634 /* Output the .debug_abbrev section which defines the DIE abbreviation
9635 table. */
9636
9637 static void
9638 output_abbrev_section (void)
9639 {
9640 unsigned int abbrev_id;
9641 dw_die_ref abbrev;
9642
9643 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9644 if (abbrev_id != 0)
9645 output_die_abbrevs (abbrev_id, abbrev);
9646
9647 /* Terminate the table. */
9648 dw2_asm_output_data (1, 0, NULL);
9649 }
9650
9651 /* Return a new location list, given the begin and end range, and the
9652 expression. */
9653
9654 static inline dw_loc_list_ref
9655 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9656 const char *section)
9657 {
9658 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9659
9660 retlist->begin = begin;
9661 retlist->begin_entry = NULL;
9662 retlist->end = end;
9663 retlist->expr = expr;
9664 retlist->section = section;
9665
9666 return retlist;
9667 }
9668
9669 /* Generate a new internal symbol for this location list node, if it
9670 hasn't got one yet. */
9671
9672 static inline void
9673 gen_llsym (dw_loc_list_ref list)
9674 {
9675 gcc_assert (!list->ll_symbol);
9676 list->ll_symbol = gen_internal_sym ("LLST");
9677 }
9678
9679 /* Output the location list given to us. */
9680
9681 static void
9682 output_loc_list (dw_loc_list_ref list_head)
9683 {
9684 if (list_head->emitted)
9685 return;
9686 list_head->emitted = true;
9687
9688 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9689
9690 dw_loc_list_ref curr = list_head;
9691 const char *last_section = NULL;
9692 const char *base_label = NULL;
9693
9694 /* Walk the location list, and output each range + expression. */
9695 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9696 {
9697 unsigned long size;
9698 /* Don't output an entry that starts and ends at the same address. */
9699 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9700 continue;
9701 size = size_of_locs (curr->expr);
9702 /* If the expression is too large, drop it on the floor. We could
9703 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9704 in the expression, but >= 64KB expressions for a single value
9705 in a single range are unlikely very useful. */
9706 if (dwarf_version < 5 && size > 0xffff)
9707 continue;
9708 if (dwarf_version >= 5)
9709 {
9710 if (dwarf_split_debug_info)
9711 {
9712 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
9713 uleb128 index into .debug_addr and uleb128 length. */
9714 dw2_asm_output_data (1, DW_LLE_startx_length,
9715 "DW_LLE_startx_length (%s)",
9716 list_head->ll_symbol);
9717 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9718 "Location list range start index "
9719 "(%s)", curr->begin);
9720 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
9721 For that case we probably need to emit DW_LLE_startx_endx,
9722 but we'd need 2 .debug_addr entries rather than just one. */
9723 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9724 "Location list length (%s)",
9725 list_head->ll_symbol);
9726 }
9727 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
9728 {
9729 /* If all code is in .text section, the base address is
9730 already provided by the CU attributes. Use
9731 DW_LLE_offset_pair where both addresses are uleb128 encoded
9732 offsets against that base. */
9733 dw2_asm_output_data (1, DW_LLE_offset_pair,
9734 "DW_LLE_offset_pair (%s)",
9735 list_head->ll_symbol);
9736 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
9737 "Location list begin address (%s)",
9738 list_head->ll_symbol);
9739 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
9740 "Location list end address (%s)",
9741 list_head->ll_symbol);
9742 }
9743 else if (HAVE_AS_LEB128)
9744 {
9745 /* Otherwise, find out how many consecutive entries could share
9746 the same base entry. If just one, emit DW_LLE_start_length,
9747 otherwise emit DW_LLE_base_address for the base address
9748 followed by a series of DW_LLE_offset_pair. */
9749 if (last_section == NULL || curr->section != last_section)
9750 {
9751 dw_loc_list_ref curr2;
9752 for (curr2 = curr->dw_loc_next; curr2 != NULL;
9753 curr2 = curr2->dw_loc_next)
9754 {
9755 if (strcmp (curr2->begin, curr2->end) == 0
9756 && !curr2->force)
9757 continue;
9758 break;
9759 }
9760 if (curr2 == NULL || curr->section != curr2->section)
9761 last_section = NULL;
9762 else
9763 {
9764 last_section = curr->section;
9765 base_label = curr->begin;
9766 dw2_asm_output_data (1, DW_LLE_base_address,
9767 "DW_LLE_base_address (%s)",
9768 list_head->ll_symbol);
9769 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
9770 "Base address (%s)",
9771 list_head->ll_symbol);
9772 }
9773 }
9774 /* Only one entry with the same base address. Use
9775 DW_LLE_start_length with absolute address and uleb128
9776 length. */
9777 if (last_section == NULL)
9778 {
9779 dw2_asm_output_data (1, DW_LLE_start_length,
9780 "DW_LLE_start_length (%s)",
9781 list_head->ll_symbol);
9782 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9783 "Location list begin address (%s)",
9784 list_head->ll_symbol);
9785 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9786 "Location list length "
9787 "(%s)", list_head->ll_symbol);
9788 }
9789 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
9790 DW_LLE_base_address. */
9791 else
9792 {
9793 dw2_asm_output_data (1, DW_LLE_offset_pair,
9794 "DW_LLE_offset_pair (%s)",
9795 list_head->ll_symbol);
9796 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
9797 "Location list begin address "
9798 "(%s)", list_head->ll_symbol);
9799 dw2_asm_output_delta_uleb128 (curr->end, base_label,
9800 "Location list end address "
9801 "(%s)", list_head->ll_symbol);
9802 }
9803 }
9804 /* The assembler does not support .uleb128 directive. Emit
9805 DW_LLE_start_end with a pair of absolute addresses. */
9806 else
9807 {
9808 dw2_asm_output_data (1, DW_LLE_start_end,
9809 "DW_LLE_start_end (%s)",
9810 list_head->ll_symbol);
9811 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9812 "Location list begin address (%s)",
9813 list_head->ll_symbol);
9814 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9815 "Location list end address (%s)",
9816 list_head->ll_symbol);
9817 }
9818 }
9819 else if (dwarf_split_debug_info)
9820 {
9821 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
9822 and 4 byte length. */
9823 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9824 "Location list start/length entry (%s)",
9825 list_head->ll_symbol);
9826 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9827 "Location list range start index (%s)",
9828 curr->begin);
9829 /* The length field is 4 bytes. If we ever need to support
9830 an 8-byte length, we can add a new DW_LLE code or fall back
9831 to DW_LLE_GNU_start_end_entry. */
9832 dw2_asm_output_delta (4, curr->end, curr->begin,
9833 "Location list range length (%s)",
9834 list_head->ll_symbol);
9835 }
9836 else if (!have_multiple_function_sections)
9837 {
9838 /* Pair of relative addresses against start of text section. */
9839 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9840 "Location list begin address (%s)",
9841 list_head->ll_symbol);
9842 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9843 "Location list end address (%s)",
9844 list_head->ll_symbol);
9845 }
9846 else
9847 {
9848 /* Pair of absolute addresses. */
9849 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9850 "Location list begin address (%s)",
9851 list_head->ll_symbol);
9852 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9853 "Location list end address (%s)",
9854 list_head->ll_symbol);
9855 }
9856
9857 /* Output the block length for this list of location operations. */
9858 if (dwarf_version >= 5)
9859 dw2_asm_output_data_uleb128 (size, "Location expression size");
9860 else
9861 {
9862 gcc_assert (size <= 0xffff);
9863 dw2_asm_output_data (2, size, "Location expression size");
9864 }
9865
9866 output_loc_sequence (curr->expr, -1);
9867 }
9868
9869 /* And finally list termination. */
9870 if (dwarf_version >= 5)
9871 dw2_asm_output_data (1, DW_LLE_end_of_list,
9872 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
9873 else if (dwarf_split_debug_info)
9874 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9875 "Location list terminator (%s)",
9876 list_head->ll_symbol);
9877 else
9878 {
9879 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9880 "Location list terminator begin (%s)",
9881 list_head->ll_symbol);
9882 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9883 "Location list terminator end (%s)",
9884 list_head->ll_symbol);
9885 }
9886 }
9887
9888 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
9889 section. Emit a relocated reference if val_entry is NULL, otherwise,
9890 emit an indirect reference. */
9891
9892 static void
9893 output_range_list_offset (dw_attr_node *a)
9894 {
9895 const char *name = dwarf_attr_name (a->dw_attr);
9896
9897 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9898 {
9899 if (dwarf_version >= 5)
9900 {
9901 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9902 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
9903 debug_ranges_section, "%s", name);
9904 }
9905 else
9906 {
9907 char *p = strchr (ranges_section_label, '\0');
9908 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
9909 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
9910 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9911 debug_ranges_section, "%s", name);
9912 *p = '\0';
9913 }
9914 }
9915 else if (dwarf_version >= 5)
9916 {
9917 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9918 gcc_assert (rnglist_idx);
9919 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
9920 }
9921 else
9922 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9923 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
9924 "%s (offset from %s)", name, ranges_section_label);
9925 }
9926
9927 /* Output the offset into the debug_loc section. */
9928
9929 static void
9930 output_loc_list_offset (dw_attr_node *a)
9931 {
9932 char *sym = AT_loc_list (a)->ll_symbol;
9933
9934 gcc_assert (sym);
9935 if (!dwarf_split_debug_info)
9936 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9937 "%s", dwarf_attr_name (a->dw_attr));
9938 else if (dwarf_version >= 5)
9939 {
9940 gcc_assert (AT_loc_list (a)->num_assigned);
9941 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
9942 dwarf_attr_name (a->dw_attr),
9943 sym);
9944 }
9945 else
9946 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9947 "%s", dwarf_attr_name (a->dw_attr));
9948 }
9949
9950 /* Output an attribute's index or value appropriately. */
9951
9952 static void
9953 output_attr_index_or_value (dw_attr_node *a)
9954 {
9955 const char *name = dwarf_attr_name (a->dw_attr);
9956
9957 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9958 {
9959 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9960 return;
9961 }
9962 switch (AT_class (a))
9963 {
9964 case dw_val_class_addr:
9965 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9966 break;
9967 case dw_val_class_high_pc:
9968 case dw_val_class_lbl_id:
9969 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9970 break;
9971 default:
9972 gcc_unreachable ();
9973 }
9974 }
9975
9976 /* Output a type signature. */
9977
9978 static inline void
9979 output_signature (const char *sig, const char *name)
9980 {
9981 int i;
9982
9983 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9984 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9985 }
9986
9987 /* Output a discriminant value. */
9988
9989 static inline void
9990 output_discr_value (dw_discr_value *discr_value, const char *name)
9991 {
9992 if (discr_value->pos)
9993 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9994 else
9995 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9996 }
9997
9998 /* Output the DIE and its attributes. Called recursively to generate
9999 the definitions of each child DIE. */
10000
10001 static void
10002 output_die (dw_die_ref die)
10003 {
10004 dw_attr_node *a;
10005 dw_die_ref c;
10006 unsigned long size;
10007 unsigned ix;
10008
10009 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10010 (unsigned long)die->die_offset,
10011 dwarf_tag_name (die->die_tag));
10012
10013 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10014 {
10015 const char *name = dwarf_attr_name (a->dw_attr);
10016
10017 switch (AT_class (a))
10018 {
10019 case dw_val_class_addr:
10020 output_attr_index_or_value (a);
10021 break;
10022
10023 case dw_val_class_offset:
10024 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10025 "%s", name);
10026 break;
10027
10028 case dw_val_class_range_list:
10029 output_range_list_offset (a);
10030 break;
10031
10032 case dw_val_class_loc:
10033 size = size_of_locs (AT_loc (a));
10034
10035 /* Output the block length for this list of location operations. */
10036 if (dwarf_version >= 4)
10037 dw2_asm_output_data_uleb128 (size, "%s", name);
10038 else
10039 dw2_asm_output_data (constant_size (size), size, "%s", name);
10040
10041 output_loc_sequence (AT_loc (a), -1);
10042 break;
10043
10044 case dw_val_class_const:
10045 /* ??? It would be slightly more efficient to use a scheme like is
10046 used for unsigned constants below, but gdb 4.x does not sign
10047 extend. Gdb 5.x does sign extend. */
10048 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10049 break;
10050
10051 case dw_val_class_unsigned_const:
10052 {
10053 int csize = constant_size (AT_unsigned (a));
10054 if (dwarf_version == 3
10055 && a->dw_attr == DW_AT_data_member_location
10056 && csize >= 4)
10057 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10058 else
10059 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10060 }
10061 break;
10062
10063 case dw_val_class_const_implicit:
10064 if (flag_debug_asm)
10065 fprintf (asm_out_file, "\t\t\t%s %s ("
10066 HOST_WIDE_INT_PRINT_DEC ")\n",
10067 ASM_COMMENT_START, name, AT_int (a));
10068 break;
10069
10070 case dw_val_class_unsigned_const_implicit:
10071 if (flag_debug_asm)
10072 fprintf (asm_out_file, "\t\t\t%s %s ("
10073 HOST_WIDE_INT_PRINT_HEX ")\n",
10074 ASM_COMMENT_START, name, AT_unsigned (a));
10075 break;
10076
10077 case dw_val_class_const_double:
10078 {
10079 unsigned HOST_WIDE_INT first, second;
10080
10081 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10082 dw2_asm_output_data (1,
10083 HOST_BITS_PER_DOUBLE_INT
10084 / HOST_BITS_PER_CHAR,
10085 NULL);
10086
10087 if (WORDS_BIG_ENDIAN)
10088 {
10089 first = a->dw_attr_val.v.val_double.high;
10090 second = a->dw_attr_val.v.val_double.low;
10091 }
10092 else
10093 {
10094 first = a->dw_attr_val.v.val_double.low;
10095 second = a->dw_attr_val.v.val_double.high;
10096 }
10097
10098 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10099 first, "%s", name);
10100 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10101 second, NULL);
10102 }
10103 break;
10104
10105 case dw_val_class_wide_int:
10106 {
10107 int i;
10108 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10109 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10110 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10111 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10112 * l, NULL);
10113
10114 if (WORDS_BIG_ENDIAN)
10115 for (i = len - 1; i >= 0; --i)
10116 {
10117 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10118 "%s", name);
10119 name = "";
10120 }
10121 else
10122 for (i = 0; i < len; ++i)
10123 {
10124 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10125 "%s", name);
10126 name = "";
10127 }
10128 }
10129 break;
10130
10131 case dw_val_class_vec:
10132 {
10133 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10134 unsigned int len = a->dw_attr_val.v.val_vec.length;
10135 unsigned int i;
10136 unsigned char *p;
10137
10138 dw2_asm_output_data (constant_size (len * elt_size),
10139 len * elt_size, "%s", name);
10140 if (elt_size > sizeof (HOST_WIDE_INT))
10141 {
10142 elt_size /= 2;
10143 len *= 2;
10144 }
10145 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10146 i < len;
10147 i++, p += elt_size)
10148 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10149 "fp or vector constant word %u", i);
10150 break;
10151 }
10152
10153 case dw_val_class_flag:
10154 if (dwarf_version >= 4)
10155 {
10156 /* Currently all add_AT_flag calls pass in 1 as last argument,
10157 so DW_FORM_flag_present can be used. If that ever changes,
10158 we'll need to use DW_FORM_flag and have some optimization
10159 in build_abbrev_table that will change those to
10160 DW_FORM_flag_present if it is set to 1 in all DIEs using
10161 the same abbrev entry. */
10162 gcc_assert (AT_flag (a) == 1);
10163 if (flag_debug_asm)
10164 fprintf (asm_out_file, "\t\t\t%s %s\n",
10165 ASM_COMMENT_START, name);
10166 break;
10167 }
10168 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10169 break;
10170
10171 case dw_val_class_loc_list:
10172 output_loc_list_offset (a);
10173 break;
10174
10175 case dw_val_class_die_ref:
10176 if (AT_ref_external (a))
10177 {
10178 if (AT_ref (a)->comdat_type_p)
10179 {
10180 comdat_type_node *type_node
10181 = AT_ref (a)->die_id.die_type_node;
10182
10183 gcc_assert (type_node);
10184 output_signature (type_node->signature, name);
10185 }
10186 else
10187 {
10188 const char *sym = AT_ref (a)->die_id.die_symbol;
10189 int size;
10190
10191 gcc_assert (sym);
10192 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10193 length, whereas in DWARF3 it's always sized as an
10194 offset. */
10195 if (dwarf_version == 2)
10196 size = DWARF2_ADDR_SIZE;
10197 else
10198 size = DWARF_OFFSET_SIZE;
10199 /* ??? We cannot unconditionally output die_offset if
10200 non-zero - others might create references to those
10201 DIEs via symbols.
10202 And we do not clear its DIE offset after outputting it
10203 (and the label refers to the actual DIEs, not the
10204 DWARF CU unit header which is when using label + offset
10205 would be the correct thing to do).
10206 ??? This is the reason for the with_offset flag. */
10207 if (AT_ref (a)->with_offset)
10208 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10209 debug_info_section, "%s", name);
10210 else
10211 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10212 name);
10213 }
10214 }
10215 else
10216 {
10217 gcc_assert (AT_ref (a)->die_offset);
10218 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10219 "%s", name);
10220 }
10221 break;
10222
10223 case dw_val_class_fde_ref:
10224 {
10225 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10226
10227 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10228 a->dw_attr_val.v.val_fde_index * 2);
10229 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10230 "%s", name);
10231 }
10232 break;
10233
10234 case dw_val_class_vms_delta:
10235 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10236 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10237 AT_vms_delta2 (a), AT_vms_delta1 (a),
10238 "%s", name);
10239 #else
10240 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10241 AT_vms_delta2 (a), AT_vms_delta1 (a),
10242 "%s", name);
10243 #endif
10244 break;
10245
10246 case dw_val_class_lbl_id:
10247 output_attr_index_or_value (a);
10248 break;
10249
10250 case dw_val_class_lineptr:
10251 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10252 debug_line_section, "%s", name);
10253 break;
10254
10255 case dw_val_class_macptr:
10256 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10257 debug_macinfo_section, "%s", name);
10258 break;
10259
10260 case dw_val_class_loclistsptr:
10261 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10262 debug_loc_section, "%s", name);
10263 break;
10264
10265 case dw_val_class_str:
10266 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10267 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10268 a->dw_attr_val.v.val_str->label,
10269 debug_str_section,
10270 "%s: \"%s\"", name, AT_string (a));
10271 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10272 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10273 a->dw_attr_val.v.val_str->label,
10274 debug_line_str_section,
10275 "%s: \"%s\"", name, AT_string (a));
10276 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10277 dw2_asm_output_data_uleb128 (AT_index (a),
10278 "%s: \"%s\"", name, AT_string (a));
10279 else
10280 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10281 break;
10282
10283 case dw_val_class_file:
10284 {
10285 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10286
10287 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10288 a->dw_attr_val.v.val_file->filename);
10289 break;
10290 }
10291
10292 case dw_val_class_file_implicit:
10293 if (flag_debug_asm)
10294 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10295 ASM_COMMENT_START, name,
10296 maybe_emit_file (a->dw_attr_val.v.val_file),
10297 a->dw_attr_val.v.val_file->filename);
10298 break;
10299
10300 case dw_val_class_data8:
10301 {
10302 int i;
10303
10304 for (i = 0; i < 8; i++)
10305 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10306 i == 0 ? "%s" : NULL, name);
10307 break;
10308 }
10309
10310 case dw_val_class_high_pc:
10311 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10312 get_AT_low_pc (die), "DW_AT_high_pc");
10313 break;
10314
10315 case dw_val_class_discr_value:
10316 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10317 break;
10318
10319 case dw_val_class_discr_list:
10320 {
10321 dw_discr_list_ref list = AT_discr_list (a);
10322 const int size = size_of_discr_list (list);
10323
10324 /* This is a block, so output its length first. */
10325 dw2_asm_output_data (constant_size (size), size,
10326 "%s: block size", name);
10327
10328 for (; list != NULL; list = list->dw_discr_next)
10329 {
10330 /* One byte for the discriminant value descriptor, and then as
10331 many LEB128 numbers as required. */
10332 if (list->dw_discr_range)
10333 dw2_asm_output_data (1, DW_DSC_range,
10334 "%s: DW_DSC_range", name);
10335 else
10336 dw2_asm_output_data (1, DW_DSC_label,
10337 "%s: DW_DSC_label", name);
10338
10339 output_discr_value (&list->dw_discr_lower_bound, name);
10340 if (list->dw_discr_range)
10341 output_discr_value (&list->dw_discr_upper_bound, name);
10342 }
10343 break;
10344 }
10345
10346 default:
10347 gcc_unreachable ();
10348 }
10349 }
10350
10351 FOR_EACH_CHILD (die, c, output_die (c));
10352
10353 /* Add null byte to terminate sibling list. */
10354 if (die->die_child != NULL)
10355 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10356 (unsigned long) die->die_offset);
10357 }
10358
10359 /* Output the compilation unit that appears at the beginning of the
10360 .debug_info section, and precedes the DIE descriptions. */
10361
10362 static void
10363 output_compilation_unit_header (enum dwarf_unit_type ut)
10364 {
10365 if (!XCOFF_DEBUGGING_INFO)
10366 {
10367 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10368 dw2_asm_output_data (4, 0xffffffff,
10369 "Initial length escape value indicating 64-bit DWARF extension");
10370 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10371 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10372 "Length of Compilation Unit Info");
10373 }
10374
10375 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10376 if (dwarf_version >= 5)
10377 {
10378 const char *name;
10379 switch (ut)
10380 {
10381 case DW_UT_compile: name = "DW_UT_compile"; break;
10382 case DW_UT_type: name = "DW_UT_type"; break;
10383 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10384 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10385 default: gcc_unreachable ();
10386 }
10387 dw2_asm_output_data (1, ut, "%s", name);
10388 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10389 }
10390 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10391 debug_abbrev_section,
10392 "Offset Into Abbrev. Section");
10393 if (dwarf_version < 5)
10394 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10395 }
10396
10397 /* Output the compilation unit DIE and its children. */
10398
10399 static void
10400 output_comp_unit (dw_die_ref die, int output_if_empty,
10401 const unsigned char *dwo_id)
10402 {
10403 const char *secname, *oldsym;
10404 char *tmp;
10405
10406 /* Unless we are outputting main CU, we may throw away empty ones. */
10407 if (!output_if_empty && die->die_child == NULL)
10408 return;
10409
10410 /* Even if there are no children of this DIE, we must output the information
10411 about the compilation unit. Otherwise, on an empty translation unit, we
10412 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10413 will then complain when examining the file. First mark all the DIEs in
10414 this CU so we know which get local refs. */
10415 mark_dies (die);
10416
10417 external_ref_hash_type *extern_map = optimize_external_refs (die);
10418
10419 /* For now, optimize only the main CU, in order to optimize the rest
10420 we'd need to see all of them earlier. Leave the rest for post-linking
10421 tools like DWZ. */
10422 if (die == comp_unit_die ())
10423 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10424
10425 build_abbrev_table (die, extern_map);
10426
10427 optimize_abbrev_table ();
10428
10429 delete extern_map;
10430
10431 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10432 next_die_offset = (dwo_id
10433 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10434 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10435 calc_die_sizes (die);
10436
10437 oldsym = die->die_id.die_symbol;
10438 if (oldsym && die->comdat_type_p)
10439 {
10440 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10441
10442 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10443 secname = tmp;
10444 die->die_id.die_symbol = NULL;
10445 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10446 }
10447 else
10448 {
10449 switch_to_section (debug_info_section);
10450 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10451 info_section_emitted = true;
10452 }
10453
10454 /* For LTO cross unit DIE refs we want a symbol on the start of the
10455 debuginfo section, not on the CU DIE. */
10456 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10457 {
10458 /* ??? No way to get visibility assembled without a decl. */
10459 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10460 get_identifier (oldsym), char_type_node);
10461 TREE_PUBLIC (decl) = true;
10462 TREE_STATIC (decl) = true;
10463 DECL_ARTIFICIAL (decl) = true;
10464 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10465 DECL_VISIBILITY_SPECIFIED (decl) = true;
10466 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10467 #ifdef ASM_WEAKEN_LABEL
10468 /* We prefer a .weak because that handles duplicates from duplicate
10469 archive members in a graceful way. */
10470 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10471 #else
10472 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10473 #endif
10474 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10475 }
10476
10477 /* Output debugging information. */
10478 output_compilation_unit_header (dwo_id
10479 ? DW_UT_split_compile : DW_UT_compile);
10480 if (dwarf_version >= 5)
10481 {
10482 if (dwo_id != NULL)
10483 for (int i = 0; i < 8; i++)
10484 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10485 }
10486 output_die (die);
10487
10488 /* Leave the marks on the main CU, so we can check them in
10489 output_pubnames. */
10490 if (oldsym)
10491 {
10492 unmark_dies (die);
10493 die->die_id.die_symbol = oldsym;
10494 }
10495 }
10496
10497 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10498 and .debug_pubtypes. This is configured per-target, but can be
10499 overridden by the -gpubnames or -gno-pubnames options. */
10500
10501 static inline bool
10502 want_pubnames (void)
10503 {
10504 if (debug_info_level <= DINFO_LEVEL_TERSE)
10505 return false;
10506 if (debug_generate_pub_sections != -1)
10507 return debug_generate_pub_sections;
10508 return targetm.want_debug_pub_sections;
10509 }
10510
10511 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10512
10513 static void
10514 add_AT_pubnames (dw_die_ref die)
10515 {
10516 if (want_pubnames ())
10517 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10518 }
10519
10520 /* Add a string attribute value to a skeleton DIE. */
10521
10522 static inline void
10523 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10524 const char *str)
10525 {
10526 dw_attr_node attr;
10527 struct indirect_string_node *node;
10528
10529 if (! skeleton_debug_str_hash)
10530 skeleton_debug_str_hash
10531 = hash_table<indirect_string_hasher>::create_ggc (10);
10532
10533 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
10534 find_string_form (node);
10535 if (node->form == DW_FORM_GNU_str_index)
10536 node->form = DW_FORM_strp;
10537
10538 attr.dw_attr = attr_kind;
10539 attr.dw_attr_val.val_class = dw_val_class_str;
10540 attr.dw_attr_val.val_entry = NULL;
10541 attr.dw_attr_val.v.val_str = node;
10542 add_dwarf_attr (die, &attr);
10543 }
10544
10545 /* Helper function to generate top-level dies for skeleton debug_info and
10546 debug_types. */
10547
10548 static void
10549 add_top_level_skeleton_die_attrs (dw_die_ref die)
10550 {
10551 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
10552 const char *comp_dir = comp_dir_string ();
10553
10554 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
10555 if (comp_dir != NULL)
10556 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
10557 add_AT_pubnames (die);
10558 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
10559 }
10560
10561 /* Output skeleton debug sections that point to the dwo file. */
10562
10563 static void
10564 output_skeleton_debug_sections (dw_die_ref comp_unit,
10565 const unsigned char *dwo_id)
10566 {
10567 /* These attributes will be found in the full debug_info section. */
10568 remove_AT (comp_unit, DW_AT_producer);
10569 remove_AT (comp_unit, DW_AT_language);
10570
10571 switch_to_section (debug_skeleton_info_section);
10572 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
10573
10574 /* Produce the skeleton compilation-unit header. This one differs enough from
10575 a normal CU header that it's better not to call output_compilation_unit
10576 header. */
10577 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10578 dw2_asm_output_data (4, 0xffffffff,
10579 "Initial length escape value indicating 64-bit "
10580 "DWARF extension");
10581
10582 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10583 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10584 - DWARF_INITIAL_LENGTH_SIZE
10585 + size_of_die (comp_unit),
10586 "Length of Compilation Unit Info");
10587 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10588 if (dwarf_version >= 5)
10589 {
10590 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
10591 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10592 }
10593 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
10594 debug_skeleton_abbrev_section,
10595 "Offset Into Abbrev. Section");
10596 if (dwarf_version < 5)
10597 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10598 else
10599 for (int i = 0; i < 8; i++)
10600 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10601
10602 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
10603 output_die (comp_unit);
10604
10605 /* Build the skeleton debug_abbrev section. */
10606 switch_to_section (debug_skeleton_abbrev_section);
10607 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
10608
10609 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
10610
10611 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
10612 }
10613
10614 /* Output a comdat type unit DIE and its children. */
10615
10616 static void
10617 output_comdat_type_unit (comdat_type_node *node)
10618 {
10619 const char *secname;
10620 char *tmp;
10621 int i;
10622 #if defined (OBJECT_FORMAT_ELF)
10623 tree comdat_key;
10624 #endif
10625
10626 /* First mark all the DIEs in this CU so we know which get local refs. */
10627 mark_dies (node->root_die);
10628
10629 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
10630
10631 build_abbrev_table (node->root_die, extern_map);
10632
10633 delete extern_map;
10634 extern_map = NULL;
10635
10636 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10637 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
10638 calc_die_sizes (node->root_die);
10639
10640 #if defined (OBJECT_FORMAT_ELF)
10641 if (dwarf_version >= 5)
10642 {
10643 if (!dwarf_split_debug_info)
10644 secname = ".debug_info";
10645 else
10646 secname = ".debug_info.dwo";
10647 }
10648 else if (!dwarf_split_debug_info)
10649 secname = ".debug_types";
10650 else
10651 secname = ".debug_types.dwo";
10652
10653 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10654 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
10655 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10656 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
10657 comdat_key = get_identifier (tmp);
10658 targetm.asm_out.named_section (secname,
10659 SECTION_DEBUG | SECTION_LINKONCE,
10660 comdat_key);
10661 #else
10662 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10663 sprintf (tmp, (dwarf_version >= 5
10664 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
10665 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10666 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
10667 secname = tmp;
10668 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10669 #endif
10670
10671 /* Output debugging information. */
10672 output_compilation_unit_header (dwarf_split_debug_info
10673 ? DW_UT_split_type : DW_UT_type);
10674 output_signature (node->signature, "Type Signature");
10675 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
10676 "Offset to Type DIE");
10677 output_die (node->root_die);
10678
10679 unmark_dies (node->root_die);
10680 }
10681
10682 /* Return the DWARF2/3 pubname associated with a decl. */
10683
10684 static const char *
10685 dwarf2_name (tree decl, int scope)
10686 {
10687 if (DECL_NAMELESS (decl))
10688 return NULL;
10689 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
10690 }
10691
10692 /* Add a new entry to .debug_pubnames if appropriate. */
10693
10694 static void
10695 add_pubname_string (const char *str, dw_die_ref die)
10696 {
10697 pubname_entry e;
10698
10699 e.die = die;
10700 e.name = xstrdup (str);
10701 vec_safe_push (pubname_table, e);
10702 }
10703
10704 static void
10705 add_pubname (tree decl, dw_die_ref die)
10706 {
10707 if (!want_pubnames ())
10708 return;
10709
10710 /* Don't add items to the table when we expect that the consumer will have
10711 just read the enclosing die. For example, if the consumer is looking at a
10712 class_member, it will either be inside the class already, or will have just
10713 looked up the class to find the member. Either way, searching the class is
10714 faster than searching the index. */
10715 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
10716 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10717 {
10718 const char *name = dwarf2_name (decl, 1);
10719
10720 if (name)
10721 add_pubname_string (name, die);
10722 }
10723 }
10724
10725 /* Add an enumerator to the pubnames section. */
10726
10727 static void
10728 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
10729 {
10730 pubname_entry e;
10731
10732 gcc_assert (scope_name);
10733 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
10734 e.die = die;
10735 vec_safe_push (pubname_table, e);
10736 }
10737
10738 /* Add a new entry to .debug_pubtypes if appropriate. */
10739
10740 static void
10741 add_pubtype (tree decl, dw_die_ref die)
10742 {
10743 pubname_entry e;
10744
10745 if (!want_pubnames ())
10746 return;
10747
10748 if ((TREE_PUBLIC (decl)
10749 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10750 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
10751 {
10752 tree scope = NULL;
10753 const char *scope_name = "";
10754 const char *sep = is_cxx () ? "::" : ".";
10755 const char *name;
10756
10757 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
10758 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
10759 {
10760 scope_name = lang_hooks.dwarf_name (scope, 1);
10761 if (scope_name != NULL && scope_name[0] != '\0')
10762 scope_name = concat (scope_name, sep, NULL);
10763 else
10764 scope_name = "";
10765 }
10766
10767 if (TYPE_P (decl))
10768 name = type_tag (decl);
10769 else
10770 name = lang_hooks.dwarf_name (decl, 1);
10771
10772 /* If we don't have a name for the type, there's no point in adding
10773 it to the table. */
10774 if (name != NULL && name[0] != '\0')
10775 {
10776 e.die = die;
10777 e.name = concat (scope_name, name, NULL);
10778 vec_safe_push (pubtype_table, e);
10779 }
10780
10781 /* Although it might be more consistent to add the pubinfo for the
10782 enumerators as their dies are created, they should only be added if the
10783 enum type meets the criteria above. So rather than re-check the parent
10784 enum type whenever an enumerator die is created, just output them all
10785 here. This isn't protected by the name conditional because anonymous
10786 enums don't have names. */
10787 if (die->die_tag == DW_TAG_enumeration_type)
10788 {
10789 dw_die_ref c;
10790
10791 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
10792 }
10793 }
10794 }
10795
10796 /* Output a single entry in the pubnames table. */
10797
10798 static void
10799 output_pubname (dw_offset die_offset, pubname_entry *entry)
10800 {
10801 dw_die_ref die = entry->die;
10802 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
10803
10804 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
10805
10806 if (debug_generate_pub_sections == 2)
10807 {
10808 /* This logic follows gdb's method for determining the value of the flag
10809 byte. */
10810 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
10811 switch (die->die_tag)
10812 {
10813 case DW_TAG_typedef:
10814 case DW_TAG_base_type:
10815 case DW_TAG_subrange_type:
10816 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10817 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10818 break;
10819 case DW_TAG_enumerator:
10820 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10821 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10822 if (!is_cxx ())
10823 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10824 break;
10825 case DW_TAG_subprogram:
10826 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10827 GDB_INDEX_SYMBOL_KIND_FUNCTION);
10828 if (!is_ada ())
10829 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10830 break;
10831 case DW_TAG_constant:
10832 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10833 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10834 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10835 break;
10836 case DW_TAG_variable:
10837 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10838 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10839 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10840 break;
10841 case DW_TAG_namespace:
10842 case DW_TAG_imported_declaration:
10843 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10844 break;
10845 case DW_TAG_class_type:
10846 case DW_TAG_interface_type:
10847 case DW_TAG_structure_type:
10848 case DW_TAG_union_type:
10849 case DW_TAG_enumeration_type:
10850 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10851 if (!is_cxx ())
10852 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10853 break;
10854 default:
10855 /* An unusual tag. Leave the flag-byte empty. */
10856 break;
10857 }
10858 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
10859 "GDB-index flags");
10860 }
10861
10862 dw2_asm_output_nstring (entry->name, -1, "external name");
10863 }
10864
10865
10866 /* Output the public names table used to speed up access to externally
10867 visible names; or the public types table used to find type definitions. */
10868
10869 static void
10870 output_pubnames (vec<pubname_entry, va_gc> *names)
10871 {
10872 unsigned i;
10873 unsigned long pubnames_length = size_of_pubnames (names);
10874 pubname_entry *pub;
10875
10876 if (!XCOFF_DEBUGGING_INFO)
10877 {
10878 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10879 dw2_asm_output_data (4, 0xffffffff,
10880 "Initial length escape value indicating 64-bit DWARF extension");
10881 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
10882 "Pub Info Length");
10883 }
10884
10885 /* Version number for pubnames/pubtypes is independent of dwarf version. */
10886 dw2_asm_output_data (2, 2, "DWARF Version");
10887
10888 if (dwarf_split_debug_info)
10889 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10890 debug_skeleton_info_section,
10891 "Offset of Compilation Unit Info");
10892 else
10893 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10894 debug_info_section,
10895 "Offset of Compilation Unit Info");
10896 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10897 "Compilation Unit Length");
10898
10899 FOR_EACH_VEC_ELT (*names, i, pub)
10900 {
10901 if (include_pubname_in_output (names, pub))
10902 {
10903 dw_offset die_offset = pub->die->die_offset;
10904
10905 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10906 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10907 gcc_assert (pub->die->die_mark);
10908
10909 /* If we're putting types in their own .debug_types sections,
10910 the .debug_pubtypes table will still point to the compile
10911 unit (not the type unit), so we want to use the offset of
10912 the skeleton DIE (if there is one). */
10913 if (pub->die->comdat_type_p && names == pubtype_table)
10914 {
10915 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10916
10917 if (type_node != NULL)
10918 die_offset = (type_node->skeleton_die != NULL
10919 ? type_node->skeleton_die->die_offset
10920 : comp_unit_die ()->die_offset);
10921 }
10922
10923 output_pubname (die_offset, pub);
10924 }
10925 }
10926
10927 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10928 }
10929
10930 /* Output public names and types tables if necessary. */
10931
10932 static void
10933 output_pubtables (void)
10934 {
10935 if (!want_pubnames () || !info_section_emitted)
10936 return;
10937
10938 switch_to_section (debug_pubnames_section);
10939 output_pubnames (pubname_table);
10940 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10941 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10942 simply won't look for the section. */
10943 switch_to_section (debug_pubtypes_section);
10944 output_pubnames (pubtype_table);
10945 }
10946
10947
10948 /* Output the information that goes into the .debug_aranges table.
10949 Namely, define the beginning and ending address range of the
10950 text section generated for this compilation unit. */
10951
10952 static void
10953 output_aranges (void)
10954 {
10955 unsigned i;
10956 unsigned long aranges_length = size_of_aranges ();
10957
10958 if (!XCOFF_DEBUGGING_INFO)
10959 {
10960 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10961 dw2_asm_output_data (4, 0xffffffff,
10962 "Initial length escape value indicating 64-bit DWARF extension");
10963 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10964 "Length of Address Ranges Info");
10965 }
10966
10967 /* Version number for aranges is still 2, even up to DWARF5. */
10968 dw2_asm_output_data (2, 2, "DWARF Version");
10969 if (dwarf_split_debug_info)
10970 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10971 debug_skeleton_info_section,
10972 "Offset of Compilation Unit Info");
10973 else
10974 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10975 debug_info_section,
10976 "Offset of Compilation Unit Info");
10977 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10978 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10979
10980 /* We need to align to twice the pointer size here. */
10981 if (DWARF_ARANGES_PAD_SIZE)
10982 {
10983 /* Pad using a 2 byte words so that padding is correct for any
10984 pointer size. */
10985 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10986 2 * DWARF2_ADDR_SIZE);
10987 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10988 dw2_asm_output_data (2, 0, NULL);
10989 }
10990
10991 /* It is necessary not to output these entries if the sections were
10992 not used; if the sections were not used, the length will be 0 and
10993 the address may end up as 0 if the section is discarded by ld
10994 --gc-sections, leaving an invalid (0, 0) entry that can be
10995 confused with the terminator. */
10996 if (text_section_used)
10997 {
10998 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10999 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11000 text_section_label, "Length");
11001 }
11002 if (cold_text_section_used)
11003 {
11004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11005 "Address");
11006 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11007 cold_text_section_label, "Length");
11008 }
11009
11010 if (have_multiple_function_sections)
11011 {
11012 unsigned fde_idx;
11013 dw_fde_ref fde;
11014
11015 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11016 {
11017 if (DECL_IGNORED_P (fde->decl))
11018 continue;
11019 if (!fde->in_std_section)
11020 {
11021 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11022 "Address");
11023 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11024 fde->dw_fde_begin, "Length");
11025 }
11026 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11027 {
11028 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11029 "Address");
11030 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11031 fde->dw_fde_second_begin, "Length");
11032 }
11033 }
11034 }
11035
11036 /* Output the terminator words. */
11037 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11038 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11039 }
11040
11041 /* Add a new entry to .debug_ranges. Return its index into
11042 ranges_table vector. */
11043
11044 static unsigned int
11045 add_ranges_num (int num, bool maybe_new_sec)
11046 {
11047 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11048 vec_safe_push (ranges_table, r);
11049 return vec_safe_length (ranges_table) - 1;
11050 }
11051
11052 /* Add a new entry to .debug_ranges corresponding to a block, or a
11053 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11054 this entry might be in a different section from previous range. */
11055
11056 static unsigned int
11057 add_ranges (const_tree block, bool maybe_new_sec)
11058 {
11059 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11060 }
11061
11062 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11063 chain, or middle entry of a chain that will be directly referred to. */
11064
11065 static void
11066 note_rnglist_head (unsigned int offset)
11067 {
11068 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11069 return;
11070 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11071 }
11072
11073 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11074 When using dwarf_split_debug_info, address attributes in dies destined
11075 for the final executable should be direct references--setting the
11076 parameter force_direct ensures this behavior. */
11077
11078 static void
11079 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11080 bool *added, bool force_direct)
11081 {
11082 unsigned int in_use = vec_safe_length (ranges_by_label);
11083 unsigned int offset;
11084 dw_ranges_by_label rbl = { begin, end };
11085 vec_safe_push (ranges_by_label, rbl);
11086 offset = add_ranges_num (-(int)in_use - 1, true);
11087 if (!*added)
11088 {
11089 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11090 *added = true;
11091 note_rnglist_head (offset);
11092 }
11093 }
11094
11095 /* Emit .debug_ranges section. */
11096
11097 static void
11098 output_ranges (void)
11099 {
11100 unsigned i;
11101 static const char *const start_fmt = "Offset %#x";
11102 const char *fmt = start_fmt;
11103 dw_ranges *r;
11104
11105 switch_to_section (debug_ranges_section);
11106 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11107 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11108 {
11109 int block_num = r->num;
11110
11111 if (block_num > 0)
11112 {
11113 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11114 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11115
11116 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11117 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11118
11119 /* If all code is in the text section, then the compilation
11120 unit base address defaults to DW_AT_low_pc, which is the
11121 base of the text section. */
11122 if (!have_multiple_function_sections)
11123 {
11124 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11125 text_section_label,
11126 fmt, i * 2 * DWARF2_ADDR_SIZE);
11127 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11128 text_section_label, NULL);
11129 }
11130
11131 /* Otherwise, the compilation unit base address is zero,
11132 which allows us to use absolute addresses, and not worry
11133 about whether the target supports cross-section
11134 arithmetic. */
11135 else
11136 {
11137 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11138 fmt, i * 2 * DWARF2_ADDR_SIZE);
11139 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11140 }
11141
11142 fmt = NULL;
11143 }
11144
11145 /* Negative block_num stands for an index into ranges_by_label. */
11146 else if (block_num < 0)
11147 {
11148 int lab_idx = - block_num - 1;
11149
11150 if (!have_multiple_function_sections)
11151 {
11152 gcc_unreachable ();
11153 #if 0
11154 /* If we ever use add_ranges_by_labels () for a single
11155 function section, all we have to do is to take out
11156 the #if 0 above. */
11157 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11158 (*ranges_by_label)[lab_idx].begin,
11159 text_section_label,
11160 fmt, i * 2 * DWARF2_ADDR_SIZE);
11161 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11162 (*ranges_by_label)[lab_idx].end,
11163 text_section_label, NULL);
11164 #endif
11165 }
11166 else
11167 {
11168 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11169 (*ranges_by_label)[lab_idx].begin,
11170 fmt, i * 2 * DWARF2_ADDR_SIZE);
11171 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11172 (*ranges_by_label)[lab_idx].end,
11173 NULL);
11174 }
11175 }
11176 else
11177 {
11178 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11179 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11180 fmt = start_fmt;
11181 }
11182 }
11183 }
11184
11185 /* Non-zero if .debug_line_str should be used for .debug_line section
11186 strings or strings that are likely shareable with those. */
11187 #define DWARF5_USE_DEBUG_LINE_STR \
11188 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11189 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11190 /* FIXME: there is no .debug_line_str.dwo section, \
11191 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11192 && !dwarf_split_debug_info)
11193
11194 /* Assign .debug_rnglists indexes. */
11195
11196 static void
11197 index_rnglists (void)
11198 {
11199 unsigned i;
11200 dw_ranges *r;
11201
11202 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11203 if (r->label)
11204 r->idx = rnglist_idx++;
11205 }
11206
11207 /* Emit .debug_rnglists section. */
11208
11209 static void
11210 output_rnglists (unsigned generation)
11211 {
11212 unsigned i;
11213 dw_ranges *r;
11214 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11215 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11216 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11217
11218 switch_to_section (debug_ranges_section);
11219 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11220 /* There are up to 4 unique ranges labels per generation.
11221 See also init_sections_and_labels. */
11222 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11223 2 + generation * 4);
11224 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11225 3 + generation * 4);
11226 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11227 dw2_asm_output_data (4, 0xffffffff,
11228 "Initial length escape value indicating "
11229 "64-bit DWARF extension");
11230 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11231 "Length of Range Lists");
11232 ASM_OUTPUT_LABEL (asm_out_file, l1);
11233 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11234 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11235 dw2_asm_output_data (1, 0, "Segment Size");
11236 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11237 about relocation sizes and primarily care about the size of .debug*
11238 sections in linked shared libraries and executables, then
11239 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11240 into it are usually larger than just DW_FORM_sec_offset offsets
11241 into the .debug_rnglists section. */
11242 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11243 "Offset Entry Count");
11244 if (dwarf_split_debug_info)
11245 {
11246 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11247 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11248 if (r->label)
11249 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11250 ranges_base_label, NULL);
11251 }
11252
11253 const char *lab = "";
11254 unsigned int len = vec_safe_length (ranges_table);
11255 const char *base = NULL;
11256 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11257 {
11258 int block_num = r->num;
11259
11260 if (r->label)
11261 {
11262 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11263 lab = r->label;
11264 }
11265 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11266 base = NULL;
11267 if (block_num > 0)
11268 {
11269 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11270 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11271
11272 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11273 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11274
11275 if (HAVE_AS_LEB128)
11276 {
11277 /* If all code is in the text section, then the compilation
11278 unit base address defaults to DW_AT_low_pc, which is the
11279 base of the text section. */
11280 if (!have_multiple_function_sections)
11281 {
11282 dw2_asm_output_data (1, DW_RLE_offset_pair,
11283 "DW_RLE_offset_pair (%s)", lab);
11284 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11285 "Range begin address (%s)", lab);
11286 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11287 "Range end address (%s)", lab);
11288 continue;
11289 }
11290 if (base == NULL)
11291 {
11292 dw_ranges *r2 = NULL;
11293 if (i < len - 1)
11294 r2 = &(*ranges_table)[i + 1];
11295 if (r2
11296 && r2->num != 0
11297 && r2->label == NULL
11298 && !r2->maybe_new_sec)
11299 {
11300 dw2_asm_output_data (1, DW_RLE_base_address,
11301 "DW_RLE_base_address (%s)", lab);
11302 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11303 "Base address (%s)", lab);
11304 strcpy (basebuf, blabel);
11305 base = basebuf;
11306 }
11307 }
11308 if (base)
11309 {
11310 dw2_asm_output_data (1, DW_RLE_offset_pair,
11311 "DW_RLE_offset_pair (%s)", lab);
11312 dw2_asm_output_delta_uleb128 (blabel, base,
11313 "Range begin address (%s)", lab);
11314 dw2_asm_output_delta_uleb128 (elabel, base,
11315 "Range end address (%s)", lab);
11316 continue;
11317 }
11318 dw2_asm_output_data (1, DW_RLE_start_length,
11319 "DW_RLE_start_length (%s)", lab);
11320 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11321 "Range begin address (%s)", lab);
11322 dw2_asm_output_delta_uleb128 (elabel, blabel,
11323 "Range length (%s)", lab);
11324 }
11325 else
11326 {
11327 dw2_asm_output_data (1, DW_RLE_start_end,
11328 "DW_RLE_start_end (%s)", lab);
11329 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11330 "Range begin address (%s)", lab);
11331 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11332 "Range end address (%s)", lab);
11333 }
11334 }
11335
11336 /* Negative block_num stands for an index into ranges_by_label. */
11337 else if (block_num < 0)
11338 {
11339 int lab_idx = - block_num - 1;
11340 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11341 const char *elabel = (*ranges_by_label)[lab_idx].end;
11342
11343 if (!have_multiple_function_sections)
11344 gcc_unreachable ();
11345 if (HAVE_AS_LEB128)
11346 {
11347 dw2_asm_output_data (1, DW_RLE_start_length,
11348 "DW_RLE_start_length (%s)", lab);
11349 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11350 "Range begin address (%s)", lab);
11351 dw2_asm_output_delta_uleb128 (elabel, blabel,
11352 "Range length (%s)", lab);
11353 }
11354 else
11355 {
11356 dw2_asm_output_data (1, DW_RLE_start_end,
11357 "DW_RLE_start_end (%s)", lab);
11358 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11359 "Range begin address (%s)", lab);
11360 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11361 "Range end address (%s)", lab);
11362 }
11363 }
11364 else
11365 dw2_asm_output_data (1, DW_RLE_end_of_list,
11366 "DW_RLE_end_of_list (%s)", lab);
11367 }
11368 ASM_OUTPUT_LABEL (asm_out_file, l2);
11369 }
11370
11371 /* Data structure containing information about input files. */
11372 struct file_info
11373 {
11374 const char *path; /* Complete file name. */
11375 const char *fname; /* File name part. */
11376 int length; /* Length of entire string. */
11377 struct dwarf_file_data * file_idx; /* Index in input file table. */
11378 int dir_idx; /* Index in directory table. */
11379 };
11380
11381 /* Data structure containing information about directories with source
11382 files. */
11383 struct dir_info
11384 {
11385 const char *path; /* Path including directory name. */
11386 int length; /* Path length. */
11387 int prefix; /* Index of directory entry which is a prefix. */
11388 int count; /* Number of files in this directory. */
11389 int dir_idx; /* Index of directory used as base. */
11390 };
11391
11392 /* Callback function for file_info comparison. We sort by looking at
11393 the directories in the path. */
11394
11395 static int
11396 file_info_cmp (const void *p1, const void *p2)
11397 {
11398 const struct file_info *const s1 = (const struct file_info *) p1;
11399 const struct file_info *const s2 = (const struct file_info *) p2;
11400 const unsigned char *cp1;
11401 const unsigned char *cp2;
11402
11403 /* Take care of file names without directories. We need to make sure that
11404 we return consistent values to qsort since some will get confused if
11405 we return the same value when identical operands are passed in opposite
11406 orders. So if neither has a directory, return 0 and otherwise return
11407 1 or -1 depending on which one has the directory. */
11408 if ((s1->path == s1->fname || s2->path == s2->fname))
11409 return (s2->path == s2->fname) - (s1->path == s1->fname);
11410
11411 cp1 = (const unsigned char *) s1->path;
11412 cp2 = (const unsigned char *) s2->path;
11413
11414 while (1)
11415 {
11416 ++cp1;
11417 ++cp2;
11418 /* Reached the end of the first path? If so, handle like above. */
11419 if ((cp1 == (const unsigned char *) s1->fname)
11420 || (cp2 == (const unsigned char *) s2->fname))
11421 return ((cp2 == (const unsigned char *) s2->fname)
11422 - (cp1 == (const unsigned char *) s1->fname));
11423
11424 /* Character of current path component the same? */
11425 else if (*cp1 != *cp2)
11426 return *cp1 - *cp2;
11427 }
11428 }
11429
11430 struct file_name_acquire_data
11431 {
11432 struct file_info *files;
11433 int used_files;
11434 int max_files;
11435 };
11436
11437 /* Traversal function for the hash table. */
11438
11439 int
11440 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11441 {
11442 struct dwarf_file_data *d = *slot;
11443 struct file_info *fi;
11444 const char *f;
11445
11446 gcc_assert (fnad->max_files >= d->emitted_number);
11447
11448 if (! d->emitted_number)
11449 return 1;
11450
11451 gcc_assert (fnad->max_files != fnad->used_files);
11452
11453 fi = fnad->files + fnad->used_files++;
11454
11455 /* Skip all leading "./". */
11456 f = d->filename;
11457 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11458 f += 2;
11459
11460 /* Create a new array entry. */
11461 fi->path = f;
11462 fi->length = strlen (f);
11463 fi->file_idx = d;
11464
11465 /* Search for the file name part. */
11466 f = strrchr (f, DIR_SEPARATOR);
11467 #if defined (DIR_SEPARATOR_2)
11468 {
11469 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11470
11471 if (g != NULL)
11472 {
11473 if (f == NULL || f < g)
11474 f = g;
11475 }
11476 }
11477 #endif
11478
11479 fi->fname = f == NULL ? fi->path : f + 1;
11480 return 1;
11481 }
11482
11483 /* Helper function for output_file_names. Emit a FORM encoded
11484 string STR, with assembly comment start ENTRY_KIND and
11485 index IDX */
11486
11487 static void
11488 output_line_string (enum dwarf_form form, const char *str,
11489 const char *entry_kind, unsigned int idx)
11490 {
11491 switch (form)
11492 {
11493 case DW_FORM_string:
11494 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11495 break;
11496 case DW_FORM_line_strp:
11497 if (!debug_line_str_hash)
11498 debug_line_str_hash
11499 = hash_table<indirect_string_hasher>::create_ggc (10);
11500
11501 struct indirect_string_node *node;
11502 node = find_AT_string_in_table (str, debug_line_str_hash);
11503 set_indirect_string (node);
11504 node->form = form;
11505 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11506 debug_line_str_section, "%s: %#x: \"%s\"",
11507 entry_kind, 0, node->str);
11508 break;
11509 default:
11510 gcc_unreachable ();
11511 }
11512 }
11513
11514 /* Output the directory table and the file name table. We try to minimize
11515 the total amount of memory needed. A heuristic is used to avoid large
11516 slowdowns with many input files. */
11517
11518 static void
11519 output_file_names (void)
11520 {
11521 struct file_name_acquire_data fnad;
11522 int numfiles;
11523 struct file_info *files;
11524 struct dir_info *dirs;
11525 int *saved;
11526 int *savehere;
11527 int *backmap;
11528 int ndirs;
11529 int idx_offset;
11530 int i;
11531
11532 if (!last_emitted_file)
11533 {
11534 if (dwarf_version >= 5)
11535 {
11536 dw2_asm_output_data (1, 0, "Directory entry format count");
11537 dw2_asm_output_data_uleb128 (0, "Directories count");
11538 dw2_asm_output_data (1, 0, "File name entry format count");
11539 dw2_asm_output_data_uleb128 (0, "File names count");
11540 }
11541 else
11542 {
11543 dw2_asm_output_data (1, 0, "End directory table");
11544 dw2_asm_output_data (1, 0, "End file name table");
11545 }
11546 return;
11547 }
11548
11549 numfiles = last_emitted_file->emitted_number;
11550
11551 /* Allocate the various arrays we need. */
11552 files = XALLOCAVEC (struct file_info, numfiles);
11553 dirs = XALLOCAVEC (struct dir_info, numfiles);
11554
11555 fnad.files = files;
11556 fnad.used_files = 0;
11557 fnad.max_files = numfiles;
11558 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
11559 gcc_assert (fnad.used_files == fnad.max_files);
11560
11561 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
11562
11563 /* Find all the different directories used. */
11564 dirs[0].path = files[0].path;
11565 dirs[0].length = files[0].fname - files[0].path;
11566 dirs[0].prefix = -1;
11567 dirs[0].count = 1;
11568 dirs[0].dir_idx = 0;
11569 files[0].dir_idx = 0;
11570 ndirs = 1;
11571
11572 for (i = 1; i < numfiles; i++)
11573 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
11574 && memcmp (dirs[ndirs - 1].path, files[i].path,
11575 dirs[ndirs - 1].length) == 0)
11576 {
11577 /* Same directory as last entry. */
11578 files[i].dir_idx = ndirs - 1;
11579 ++dirs[ndirs - 1].count;
11580 }
11581 else
11582 {
11583 int j;
11584
11585 /* This is a new directory. */
11586 dirs[ndirs].path = files[i].path;
11587 dirs[ndirs].length = files[i].fname - files[i].path;
11588 dirs[ndirs].count = 1;
11589 dirs[ndirs].dir_idx = ndirs;
11590 files[i].dir_idx = ndirs;
11591
11592 /* Search for a prefix. */
11593 dirs[ndirs].prefix = -1;
11594 for (j = 0; j < ndirs; j++)
11595 if (dirs[j].length < dirs[ndirs].length
11596 && dirs[j].length > 1
11597 && (dirs[ndirs].prefix == -1
11598 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
11599 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
11600 dirs[ndirs].prefix = j;
11601
11602 ++ndirs;
11603 }
11604
11605 /* Now to the actual work. We have to find a subset of the directories which
11606 allow expressing the file name using references to the directory table
11607 with the least amount of characters. We do not do an exhaustive search
11608 where we would have to check out every combination of every single
11609 possible prefix. Instead we use a heuristic which provides nearly optimal
11610 results in most cases and never is much off. */
11611 saved = XALLOCAVEC (int, ndirs);
11612 savehere = XALLOCAVEC (int, ndirs);
11613
11614 memset (saved, '\0', ndirs * sizeof (saved[0]));
11615 for (i = 0; i < ndirs; i++)
11616 {
11617 int j;
11618 int total;
11619
11620 /* We can always save some space for the current directory. But this
11621 does not mean it will be enough to justify adding the directory. */
11622 savehere[i] = dirs[i].length;
11623 total = (savehere[i] - saved[i]) * dirs[i].count;
11624
11625 for (j = i + 1; j < ndirs; j++)
11626 {
11627 savehere[j] = 0;
11628 if (saved[j] < dirs[i].length)
11629 {
11630 /* Determine whether the dirs[i] path is a prefix of the
11631 dirs[j] path. */
11632 int k;
11633
11634 k = dirs[j].prefix;
11635 while (k != -1 && k != (int) i)
11636 k = dirs[k].prefix;
11637
11638 if (k == (int) i)
11639 {
11640 /* Yes it is. We can possibly save some memory by
11641 writing the filenames in dirs[j] relative to
11642 dirs[i]. */
11643 savehere[j] = dirs[i].length;
11644 total += (savehere[j] - saved[j]) * dirs[j].count;
11645 }
11646 }
11647 }
11648
11649 /* Check whether we can save enough to justify adding the dirs[i]
11650 directory. */
11651 if (total > dirs[i].length + 1)
11652 {
11653 /* It's worthwhile adding. */
11654 for (j = i; j < ndirs; j++)
11655 if (savehere[j] > 0)
11656 {
11657 /* Remember how much we saved for this directory so far. */
11658 saved[j] = savehere[j];
11659
11660 /* Remember the prefix directory. */
11661 dirs[j].dir_idx = i;
11662 }
11663 }
11664 }
11665
11666 /* Emit the directory name table. */
11667 idx_offset = dirs[0].length > 0 ? 1 : 0;
11668 enum dwarf_form str_form = DW_FORM_string;
11669 enum dwarf_form idx_form = DW_FORM_udata;
11670 if (dwarf_version >= 5)
11671 {
11672 const char *comp_dir = comp_dir_string ();
11673 if (comp_dir == NULL)
11674 comp_dir = "";
11675 dw2_asm_output_data (1, 1, "Directory entry format count");
11676 if (DWARF5_USE_DEBUG_LINE_STR)
11677 str_form = DW_FORM_line_strp;
11678 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11679 dw2_asm_output_data_uleb128 (str_form, "%s",
11680 get_DW_FORM_name (str_form));
11681 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
11682 if (str_form == DW_FORM_string)
11683 {
11684 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
11685 for (i = 1 - idx_offset; i < ndirs; i++)
11686 dw2_asm_output_nstring (dirs[i].path,
11687 dirs[i].length
11688 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11689 "Directory Entry: %#x", i + idx_offset);
11690 }
11691 else
11692 {
11693 output_line_string (str_form, comp_dir, "Directory Entry", 0);
11694 for (i = 1 - idx_offset; i < ndirs; i++)
11695 {
11696 const char *str
11697 = ggc_alloc_string (dirs[i].path,
11698 dirs[i].length
11699 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
11700 output_line_string (str_form, str, "Directory Entry",
11701 (unsigned) i + idx_offset);
11702 }
11703 }
11704 }
11705 else
11706 {
11707 for (i = 1 - idx_offset; i < ndirs; i++)
11708 dw2_asm_output_nstring (dirs[i].path,
11709 dirs[i].length
11710 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11711 "Directory Entry: %#x", i + idx_offset);
11712
11713 dw2_asm_output_data (1, 0, "End directory table");
11714 }
11715
11716 /* We have to emit them in the order of emitted_number since that's
11717 used in the debug info generation. To do this efficiently we
11718 generate a back-mapping of the indices first. */
11719 backmap = XALLOCAVEC (int, numfiles);
11720 for (i = 0; i < numfiles; i++)
11721 backmap[files[i].file_idx->emitted_number - 1] = i;
11722
11723 if (dwarf_version >= 5)
11724 {
11725 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
11726 if (filename0 == NULL)
11727 filename0 = "";
11728 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
11729 DW_FORM_data2. Choose one based on the number of directories
11730 and how much space would they occupy in each encoding.
11731 If we have at most 256 directories, all indexes fit into
11732 a single byte, so DW_FORM_data1 is most compact (if there
11733 are at most 128 directories, DW_FORM_udata would be as
11734 compact as that, but not shorter and slower to decode). */
11735 if (ndirs + idx_offset <= 256)
11736 idx_form = DW_FORM_data1;
11737 /* If there are more than 65536 directories, we have to use
11738 DW_FORM_udata, DW_FORM_data2 can't refer to them.
11739 Otherwise, compute what space would occupy if all the indexes
11740 used DW_FORM_udata - sum - and compare that to how large would
11741 be DW_FORM_data2 encoding, and pick the more efficient one. */
11742 else if (ndirs + idx_offset <= 65536)
11743 {
11744 unsigned HOST_WIDE_INT sum = 1;
11745 for (i = 0; i < numfiles; i++)
11746 {
11747 int file_idx = backmap[i];
11748 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11749 sum += size_of_uleb128 (dir_idx);
11750 }
11751 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
11752 idx_form = DW_FORM_data2;
11753 }
11754 #ifdef VMS_DEBUGGING_INFO
11755 dw2_asm_output_data (1, 4, "File name entry format count");
11756 #else
11757 dw2_asm_output_data (1, 2, "File name entry format count");
11758 #endif
11759 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11760 dw2_asm_output_data_uleb128 (str_form, "%s",
11761 get_DW_FORM_name (str_form));
11762 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
11763 "DW_LNCT_directory_index");
11764 dw2_asm_output_data_uleb128 (idx_form, "%s",
11765 get_DW_FORM_name (idx_form));
11766 #ifdef VMS_DEBUGGING_INFO
11767 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
11768 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11769 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
11770 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11771 #endif
11772 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
11773
11774 output_line_string (str_form, filename0, "File Entry", 0);
11775
11776 /* Include directory index. */
11777 if (idx_form != DW_FORM_udata)
11778 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11779 0, NULL);
11780 else
11781 dw2_asm_output_data_uleb128 (0, NULL);
11782
11783 #ifdef VMS_DEBUGGING_INFO
11784 dw2_asm_output_data_uleb128 (0, NULL);
11785 dw2_asm_output_data_uleb128 (0, NULL);
11786 #endif
11787 }
11788
11789 /* Now write all the file names. */
11790 for (i = 0; i < numfiles; i++)
11791 {
11792 int file_idx = backmap[i];
11793 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11794
11795 #ifdef VMS_DEBUGGING_INFO
11796 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
11797
11798 /* Setting these fields can lead to debugger miscomparisons,
11799 but VMS Debug requires them to be set correctly. */
11800
11801 int ver;
11802 long long cdt;
11803 long siz;
11804 int maxfilelen = (strlen (files[file_idx].path)
11805 + dirs[dir_idx].length
11806 + MAX_VMS_VERSION_LEN + 1);
11807 char *filebuf = XALLOCAVEC (char, maxfilelen);
11808
11809 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
11810 snprintf (filebuf, maxfilelen, "%s;%d",
11811 files[file_idx].path + dirs[dir_idx].length, ver);
11812
11813 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
11814
11815 /* Include directory index. */
11816 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11817 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11818 dir_idx + idx_offset, NULL);
11819 else
11820 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11821
11822 /* Modification time. */
11823 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11824 &cdt, 0, 0, 0) == 0)
11825 ? cdt : 0, NULL);
11826
11827 /* File length in bytes. */
11828 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11829 0, &siz, 0, 0) == 0)
11830 ? siz : 0, NULL);
11831 #else
11832 output_line_string (str_form,
11833 files[file_idx].path + dirs[dir_idx].length,
11834 "File Entry", (unsigned) i + 1);
11835
11836 /* Include directory index. */
11837 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11838 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11839 dir_idx + idx_offset, NULL);
11840 else
11841 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11842
11843 if (dwarf_version >= 5)
11844 continue;
11845
11846 /* Modification time. */
11847 dw2_asm_output_data_uleb128 (0, NULL);
11848
11849 /* File length in bytes. */
11850 dw2_asm_output_data_uleb128 (0, NULL);
11851 #endif /* VMS_DEBUGGING_INFO */
11852 }
11853
11854 if (dwarf_version < 5)
11855 dw2_asm_output_data (1, 0, "End file name table");
11856 }
11857
11858
11859 /* Output one line number table into the .debug_line section. */
11860
11861 static void
11862 output_one_line_info_table (dw_line_info_table *table)
11863 {
11864 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
11865 unsigned int current_line = 1;
11866 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
11867 dw_line_info_entry *ent;
11868 size_t i;
11869
11870 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
11871 {
11872 switch (ent->opcode)
11873 {
11874 case LI_set_address:
11875 /* ??? Unfortunately, we have little choice here currently, and
11876 must always use the most general form. GCC does not know the
11877 address delta itself, so we can't use DW_LNS_advance_pc. Many
11878 ports do have length attributes which will give an upper bound
11879 on the address range. We could perhaps use length attributes
11880 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
11881 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
11882
11883 /* This can handle any delta. This takes
11884 4+DWARF2_ADDR_SIZE bytes. */
11885 dw2_asm_output_data (1, 0, "set address %s", line_label);
11886 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11887 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11888 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
11889 break;
11890
11891 case LI_set_line:
11892 if (ent->val == current_line)
11893 {
11894 /* We still need to start a new row, so output a copy insn. */
11895 dw2_asm_output_data (1, DW_LNS_copy,
11896 "copy line %u", current_line);
11897 }
11898 else
11899 {
11900 int line_offset = ent->val - current_line;
11901 int line_delta = line_offset - DWARF_LINE_BASE;
11902
11903 current_line = ent->val;
11904 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
11905 {
11906 /* This can handle deltas from -10 to 234, using the current
11907 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
11908 This takes 1 byte. */
11909 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
11910 "line %u", current_line);
11911 }
11912 else
11913 {
11914 /* This can handle any delta. This takes at least 4 bytes,
11915 depending on the value being encoded. */
11916 dw2_asm_output_data (1, DW_LNS_advance_line,
11917 "advance to line %u", current_line);
11918 dw2_asm_output_data_sleb128 (line_offset, NULL);
11919 dw2_asm_output_data (1, DW_LNS_copy, NULL);
11920 }
11921 }
11922 break;
11923
11924 case LI_set_file:
11925 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
11926 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11927 break;
11928
11929 case LI_set_column:
11930 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
11931 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11932 break;
11933
11934 case LI_negate_stmt:
11935 current_is_stmt = !current_is_stmt;
11936 dw2_asm_output_data (1, DW_LNS_negate_stmt,
11937 "is_stmt %d", current_is_stmt);
11938 break;
11939
11940 case LI_set_prologue_end:
11941 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
11942 "set prologue end");
11943 break;
11944
11945 case LI_set_epilogue_begin:
11946 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
11947 "set epilogue begin");
11948 break;
11949
11950 case LI_set_discriminator:
11951 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
11952 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
11953 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
11954 dw2_asm_output_data_uleb128 (ent->val, NULL);
11955 break;
11956 }
11957 }
11958
11959 /* Emit debug info for the address of the end of the table. */
11960 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
11961 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11962 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11963 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
11964
11965 dw2_asm_output_data (1, 0, "end sequence");
11966 dw2_asm_output_data_uleb128 (1, NULL);
11967 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
11968 }
11969
11970 /* Output the source line number correspondence information. This
11971 information goes into the .debug_line section. */
11972
11973 static void
11974 output_line_info (bool prologue_only)
11975 {
11976 static unsigned int generation;
11977 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
11978 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
11979 bool saw_one = false;
11980 int opc;
11981
11982 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
11983 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
11984 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
11985 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
11986
11987 if (!XCOFF_DEBUGGING_INFO)
11988 {
11989 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11990 dw2_asm_output_data (4, 0xffffffff,
11991 "Initial length escape value indicating 64-bit DWARF extension");
11992 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11993 "Length of Source Line Info");
11994 }
11995
11996 ASM_OUTPUT_LABEL (asm_out_file, l1);
11997
11998 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11999 if (dwarf_version >= 5)
12000 {
12001 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12002 dw2_asm_output_data (1, 0, "Segment Size");
12003 }
12004 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12005 ASM_OUTPUT_LABEL (asm_out_file, p1);
12006
12007 /* Define the architecture-dependent minimum instruction length (in bytes).
12008 In this implementation of DWARF, this field is used for information
12009 purposes only. Since GCC generates assembly language, we have no
12010 a priori knowledge of how many instruction bytes are generated for each
12011 source line, and therefore can use only the DW_LNE_set_address and
12012 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12013 this as '1', which is "correct enough" for all architectures,
12014 and don't let the target override. */
12015 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12016
12017 if (dwarf_version >= 4)
12018 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12019 "Maximum Operations Per Instruction");
12020 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12021 "Default is_stmt_start flag");
12022 dw2_asm_output_data (1, DWARF_LINE_BASE,
12023 "Line Base Value (Special Opcodes)");
12024 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12025 "Line Range Value (Special Opcodes)");
12026 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12027 "Special Opcode Base");
12028
12029 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12030 {
12031 int n_op_args;
12032 switch (opc)
12033 {
12034 case DW_LNS_advance_pc:
12035 case DW_LNS_advance_line:
12036 case DW_LNS_set_file:
12037 case DW_LNS_set_column:
12038 case DW_LNS_fixed_advance_pc:
12039 case DW_LNS_set_isa:
12040 n_op_args = 1;
12041 break;
12042 default:
12043 n_op_args = 0;
12044 break;
12045 }
12046
12047 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12048 opc, n_op_args);
12049 }
12050
12051 /* Write out the information about the files we use. */
12052 output_file_names ();
12053 ASM_OUTPUT_LABEL (asm_out_file, p2);
12054 if (prologue_only)
12055 {
12056 /* Output the marker for the end of the line number info. */
12057 ASM_OUTPUT_LABEL (asm_out_file, l2);
12058 return;
12059 }
12060
12061 if (separate_line_info)
12062 {
12063 dw_line_info_table *table;
12064 size_t i;
12065
12066 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12067 if (table->in_use)
12068 {
12069 output_one_line_info_table (table);
12070 saw_one = true;
12071 }
12072 }
12073 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12074 {
12075 output_one_line_info_table (cold_text_section_line_info);
12076 saw_one = true;
12077 }
12078
12079 /* ??? Some Darwin linkers crash on a .debug_line section with no
12080 sequences. Further, merely a DW_LNE_end_sequence entry is not
12081 sufficient -- the address column must also be initialized.
12082 Make sure to output at least one set_address/end_sequence pair,
12083 choosing .text since that section is always present. */
12084 if (text_section_line_info->in_use || !saw_one)
12085 output_one_line_info_table (text_section_line_info);
12086
12087 /* Output the marker for the end of the line number info. */
12088 ASM_OUTPUT_LABEL (asm_out_file, l2);
12089 }
12090 \f
12091 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12092
12093 static inline bool
12094 need_endianity_attribute_p (bool reverse)
12095 {
12096 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12097 }
12098
12099 /* Given a pointer to a tree node for some base type, return a pointer to
12100 a DIE that describes the given type. REVERSE is true if the type is
12101 to be interpreted in the reverse storage order wrt the target order.
12102
12103 This routine must only be called for GCC type nodes that correspond to
12104 Dwarf base (fundamental) types. */
12105
12106 static dw_die_ref
12107 base_type_die (tree type, bool reverse)
12108 {
12109 dw_die_ref base_type_result;
12110 enum dwarf_type encoding;
12111 bool fpt_used = false;
12112 struct fixed_point_type_info fpt_info;
12113 tree type_bias = NULL_TREE;
12114
12115 /* If this is a subtype that should not be emitted as a subrange type,
12116 use the base type. See subrange_type_for_debug_p. */
12117 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12118 type = TREE_TYPE (type);
12119
12120 switch (TREE_CODE (type))
12121 {
12122 case INTEGER_TYPE:
12123 if ((dwarf_version >= 4 || !dwarf_strict)
12124 && TYPE_NAME (type)
12125 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12126 && DECL_IS_BUILTIN (TYPE_NAME (type))
12127 && DECL_NAME (TYPE_NAME (type)))
12128 {
12129 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12130 if (strcmp (name, "char16_t") == 0
12131 || strcmp (name, "char32_t") == 0)
12132 {
12133 encoding = DW_ATE_UTF;
12134 break;
12135 }
12136 }
12137 if ((dwarf_version >= 3 || !dwarf_strict)
12138 && lang_hooks.types.get_fixed_point_type_info)
12139 {
12140 memset (&fpt_info, 0, sizeof (fpt_info));
12141 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12142 {
12143 fpt_used = true;
12144 encoding = ((TYPE_UNSIGNED (type))
12145 ? DW_ATE_unsigned_fixed
12146 : DW_ATE_signed_fixed);
12147 break;
12148 }
12149 }
12150 if (TYPE_STRING_FLAG (type))
12151 {
12152 if (TYPE_UNSIGNED (type))
12153 encoding = DW_ATE_unsigned_char;
12154 else
12155 encoding = DW_ATE_signed_char;
12156 }
12157 else if (TYPE_UNSIGNED (type))
12158 encoding = DW_ATE_unsigned;
12159 else
12160 encoding = DW_ATE_signed;
12161
12162 if (!dwarf_strict
12163 && lang_hooks.types.get_type_bias)
12164 type_bias = lang_hooks.types.get_type_bias (type);
12165 break;
12166
12167 case REAL_TYPE:
12168 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12169 {
12170 if (dwarf_version >= 3 || !dwarf_strict)
12171 encoding = DW_ATE_decimal_float;
12172 else
12173 encoding = DW_ATE_lo_user;
12174 }
12175 else
12176 encoding = DW_ATE_float;
12177 break;
12178
12179 case FIXED_POINT_TYPE:
12180 if (!(dwarf_version >= 3 || !dwarf_strict))
12181 encoding = DW_ATE_lo_user;
12182 else if (TYPE_UNSIGNED (type))
12183 encoding = DW_ATE_unsigned_fixed;
12184 else
12185 encoding = DW_ATE_signed_fixed;
12186 break;
12187
12188 /* Dwarf2 doesn't know anything about complex ints, so use
12189 a user defined type for it. */
12190 case COMPLEX_TYPE:
12191 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12192 encoding = DW_ATE_complex_float;
12193 else
12194 encoding = DW_ATE_lo_user;
12195 break;
12196
12197 case BOOLEAN_TYPE:
12198 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12199 encoding = DW_ATE_boolean;
12200 break;
12201
12202 default:
12203 /* No other TREE_CODEs are Dwarf fundamental types. */
12204 gcc_unreachable ();
12205 }
12206
12207 base_type_result = new_die_raw (DW_TAG_base_type);
12208
12209 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12210 int_size_in_bytes (type));
12211 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12212
12213 if (need_endianity_attribute_p (reverse))
12214 add_AT_unsigned (base_type_result, DW_AT_endianity,
12215 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12216
12217 add_alignment_attribute (base_type_result, type);
12218
12219 if (fpt_used)
12220 {
12221 switch (fpt_info.scale_factor_kind)
12222 {
12223 case fixed_point_scale_factor_binary:
12224 add_AT_int (base_type_result, DW_AT_binary_scale,
12225 fpt_info.scale_factor.binary);
12226 break;
12227
12228 case fixed_point_scale_factor_decimal:
12229 add_AT_int (base_type_result, DW_AT_decimal_scale,
12230 fpt_info.scale_factor.decimal);
12231 break;
12232
12233 case fixed_point_scale_factor_arbitrary:
12234 /* Arbitrary scale factors cannot be described in standard DWARF,
12235 yet. */
12236 if (!dwarf_strict)
12237 {
12238 /* Describe the scale factor as a rational constant. */
12239 const dw_die_ref scale_factor
12240 = new_die (DW_TAG_constant, comp_unit_die (), type);
12241
12242 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12243 fpt_info.scale_factor.arbitrary.numerator);
12244 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12245 fpt_info.scale_factor.arbitrary.denominator);
12246
12247 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12248 }
12249 break;
12250
12251 default:
12252 gcc_unreachable ();
12253 }
12254 }
12255
12256 if (type_bias)
12257 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12258 dw_scalar_form_constant
12259 | dw_scalar_form_exprloc
12260 | dw_scalar_form_reference,
12261 NULL);
12262
12263 return base_type_result;
12264 }
12265
12266 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12267 named 'auto' in its type: return true for it, false otherwise. */
12268
12269 static inline bool
12270 is_cxx_auto (tree type)
12271 {
12272 if (is_cxx ())
12273 {
12274 tree name = TYPE_IDENTIFIER (type);
12275 if (name == get_identifier ("auto")
12276 || name == get_identifier ("decltype(auto)"))
12277 return true;
12278 }
12279 return false;
12280 }
12281
12282 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12283 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12284
12285 static inline int
12286 is_base_type (tree type)
12287 {
12288 switch (TREE_CODE (type))
12289 {
12290 case INTEGER_TYPE:
12291 case REAL_TYPE:
12292 case FIXED_POINT_TYPE:
12293 case COMPLEX_TYPE:
12294 case BOOLEAN_TYPE:
12295 case POINTER_BOUNDS_TYPE:
12296 return 1;
12297
12298 case VOID_TYPE:
12299 case ARRAY_TYPE:
12300 case RECORD_TYPE:
12301 case UNION_TYPE:
12302 case QUAL_UNION_TYPE:
12303 case ENUMERAL_TYPE:
12304 case FUNCTION_TYPE:
12305 case METHOD_TYPE:
12306 case POINTER_TYPE:
12307 case REFERENCE_TYPE:
12308 case NULLPTR_TYPE:
12309 case OFFSET_TYPE:
12310 case LANG_TYPE:
12311 case VECTOR_TYPE:
12312 return 0;
12313
12314 default:
12315 if (is_cxx_auto (type))
12316 return 0;
12317 gcc_unreachable ();
12318 }
12319
12320 return 0;
12321 }
12322
12323 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12324 node, return the size in bits for the type if it is a constant, or else
12325 return the alignment for the type if the type's size is not constant, or
12326 else return BITS_PER_WORD if the type actually turns out to be an
12327 ERROR_MARK node. */
12328
12329 static inline unsigned HOST_WIDE_INT
12330 simple_type_size_in_bits (const_tree type)
12331 {
12332 if (TREE_CODE (type) == ERROR_MARK)
12333 return BITS_PER_WORD;
12334 else if (TYPE_SIZE (type) == NULL_TREE)
12335 return 0;
12336 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12337 return tree_to_uhwi (TYPE_SIZE (type));
12338 else
12339 return TYPE_ALIGN (type);
12340 }
12341
12342 /* Similarly, but return an offset_int instead of UHWI. */
12343
12344 static inline offset_int
12345 offset_int_type_size_in_bits (const_tree type)
12346 {
12347 if (TREE_CODE (type) == ERROR_MARK)
12348 return BITS_PER_WORD;
12349 else if (TYPE_SIZE (type) == NULL_TREE)
12350 return 0;
12351 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12352 return wi::to_offset (TYPE_SIZE (type));
12353 else
12354 return TYPE_ALIGN (type);
12355 }
12356
12357 /* Given a pointer to a tree node for a subrange type, return a pointer
12358 to a DIE that describes the given type. */
12359
12360 static dw_die_ref
12361 subrange_type_die (tree type, tree low, tree high, tree bias,
12362 dw_die_ref context_die)
12363 {
12364 dw_die_ref subrange_die;
12365 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12366
12367 if (context_die == NULL)
12368 context_die = comp_unit_die ();
12369
12370 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12371
12372 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12373 {
12374 /* The size of the subrange type and its base type do not match,
12375 so we need to generate a size attribute for the subrange type. */
12376 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12377 }
12378
12379 add_alignment_attribute (subrange_die, type);
12380
12381 if (low)
12382 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12383 if (high)
12384 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12385 if (bias && !dwarf_strict)
12386 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12387 dw_scalar_form_constant
12388 | dw_scalar_form_exprloc
12389 | dw_scalar_form_reference,
12390 NULL);
12391
12392 return subrange_die;
12393 }
12394
12395 /* Returns the (const and/or volatile) cv_qualifiers associated with
12396 the decl node. This will normally be augmented with the
12397 cv_qualifiers of the underlying type in add_type_attribute. */
12398
12399 static int
12400 decl_quals (const_tree decl)
12401 {
12402 return ((TREE_READONLY (decl)
12403 /* The C++ front-end correctly marks reference-typed
12404 variables as readonly, but from a language (and debug
12405 info) standpoint they are not const-qualified. */
12406 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12407 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12408 | (TREE_THIS_VOLATILE (decl)
12409 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12410 }
12411
12412 /* Determine the TYPE whose qualifiers match the largest strict subset
12413 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12414 qualifiers outside QUAL_MASK. */
12415
12416 static int
12417 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12418 {
12419 tree t;
12420 int best_rank = 0, best_qual = 0, max_rank;
12421
12422 type_quals &= qual_mask;
12423 max_rank = popcount_hwi (type_quals) - 1;
12424
12425 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12426 t = TYPE_NEXT_VARIANT (t))
12427 {
12428 int q = TYPE_QUALS (t) & qual_mask;
12429
12430 if ((q & type_quals) == q && q != type_quals
12431 && check_base_type (t, type))
12432 {
12433 int rank = popcount_hwi (q);
12434
12435 if (rank > best_rank)
12436 {
12437 best_rank = rank;
12438 best_qual = q;
12439 }
12440 }
12441 }
12442
12443 return best_qual;
12444 }
12445
12446 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12447 static const dwarf_qual_info_t dwarf_qual_info[] =
12448 {
12449 { TYPE_QUAL_CONST, DW_TAG_const_type },
12450 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12451 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12452 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12453 };
12454 static const unsigned int dwarf_qual_info_size
12455 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12456
12457 /* If DIE is a qualified DIE of some base DIE with the same parent,
12458 return the base DIE, otherwise return NULL. Set MASK to the
12459 qualifiers added compared to the returned DIE. */
12460
12461 static dw_die_ref
12462 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12463 {
12464 unsigned int i;
12465 for (i = 0; i < dwarf_qual_info_size; i++)
12466 if (die->die_tag == dwarf_qual_info[i].t)
12467 break;
12468 if (i == dwarf_qual_info_size)
12469 return NULL;
12470 if (vec_safe_length (die->die_attr) != 1)
12471 return NULL;
12472 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12473 if (type == NULL || type->die_parent != die->die_parent)
12474 return NULL;
12475 *mask |= dwarf_qual_info[i].q;
12476 if (depth)
12477 {
12478 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12479 if (ret)
12480 return ret;
12481 }
12482 return type;
12483 }
12484
12485 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12486 entry that chains the modifiers specified by CV_QUALS in front of the
12487 given type. REVERSE is true if the type is to be interpreted in the
12488 reverse storage order wrt the target order. */
12489
12490 static dw_die_ref
12491 modified_type_die (tree type, int cv_quals, bool reverse,
12492 dw_die_ref context_die)
12493 {
12494 enum tree_code code = TREE_CODE (type);
12495 dw_die_ref mod_type_die;
12496 dw_die_ref sub_die = NULL;
12497 tree item_type = NULL;
12498 tree qualified_type;
12499 tree name, low, high;
12500 dw_die_ref mod_scope;
12501 /* Only these cv-qualifiers are currently handled. */
12502 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12503 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
12504 ENCODE_QUAL_ADDR_SPACE(~0U));
12505 const bool reverse_base_type
12506 = need_endianity_attribute_p (reverse) && is_base_type (type);
12507
12508 if (code == ERROR_MARK)
12509 return NULL;
12510
12511 if (lang_hooks.types.get_debug_type)
12512 {
12513 tree debug_type = lang_hooks.types.get_debug_type (type);
12514
12515 if (debug_type != NULL_TREE && debug_type != type)
12516 return modified_type_die (debug_type, cv_quals, reverse, context_die);
12517 }
12518
12519 cv_quals &= cv_qual_mask;
12520
12521 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
12522 tag modifier (and not an attribute) old consumers won't be able
12523 to handle it. */
12524 if (dwarf_version < 3)
12525 cv_quals &= ~TYPE_QUAL_RESTRICT;
12526
12527 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
12528 if (dwarf_version < 5)
12529 cv_quals &= ~TYPE_QUAL_ATOMIC;
12530
12531 /* See if we already have the appropriately qualified variant of
12532 this type. */
12533 qualified_type = get_qualified_type (type, cv_quals);
12534
12535 if (qualified_type == sizetype)
12536 {
12537 /* Try not to expose the internal sizetype type's name. */
12538 if (TYPE_NAME (qualified_type)
12539 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
12540 {
12541 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
12542
12543 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
12544 && (TYPE_PRECISION (t)
12545 == TYPE_PRECISION (qualified_type))
12546 && (TYPE_UNSIGNED (t)
12547 == TYPE_UNSIGNED (qualified_type)));
12548 qualified_type = t;
12549 }
12550 else if (qualified_type == sizetype
12551 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
12552 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
12553 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
12554 qualified_type = size_type_node;
12555 }
12556
12557 /* If we do, then we can just use its DIE, if it exists. */
12558 if (qualified_type)
12559 {
12560 mod_type_die = lookup_type_die (qualified_type);
12561
12562 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
12563 dealt with specially: the DIE with the attribute, if it exists, is
12564 placed immediately after the regular DIE for the same base type. */
12565 if (mod_type_die
12566 && (!reverse_base_type
12567 || ((mod_type_die = mod_type_die->die_sib) != NULL
12568 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
12569 return mod_type_die;
12570 }
12571
12572 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
12573
12574 /* Handle C typedef types. */
12575 if (name
12576 && TREE_CODE (name) == TYPE_DECL
12577 && DECL_ORIGINAL_TYPE (name)
12578 && !DECL_ARTIFICIAL (name))
12579 {
12580 tree dtype = TREE_TYPE (name);
12581
12582 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
12583 if (qualified_type == dtype && !reverse_base_type)
12584 {
12585 tree origin = decl_ultimate_origin (name);
12586
12587 /* Typedef variants that have an abstract origin don't get their own
12588 type DIE (see gen_typedef_die), so fall back on the ultimate
12589 abstract origin instead. */
12590 if (origin != NULL && origin != name)
12591 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
12592 context_die);
12593
12594 /* For a named type, use the typedef. */
12595 gen_type_die (qualified_type, context_die);
12596 return lookup_type_die (qualified_type);
12597 }
12598 else
12599 {
12600 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
12601 dquals &= cv_qual_mask;
12602 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
12603 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
12604 /* cv-unqualified version of named type. Just use
12605 the unnamed type to which it refers. */
12606 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
12607 reverse, context_die);
12608 /* Else cv-qualified version of named type; fall through. */
12609 }
12610 }
12611
12612 mod_scope = scope_die_for (type, context_die);
12613
12614 if (cv_quals)
12615 {
12616 int sub_quals = 0, first_quals = 0;
12617 unsigned i;
12618 dw_die_ref first = NULL, last = NULL;
12619
12620 /* Determine a lesser qualified type that most closely matches
12621 this one. Then generate DW_TAG_* entries for the remaining
12622 qualifiers. */
12623 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
12624 cv_qual_mask);
12625 if (sub_quals && use_debug_types)
12626 {
12627 bool needed = false;
12628 /* If emitting type units, make sure the order of qualifiers
12629 is canonical. Thus, start from unqualified type if
12630 an earlier qualifier is missing in sub_quals, but some later
12631 one is present there. */
12632 for (i = 0; i < dwarf_qual_info_size; i++)
12633 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12634 needed = true;
12635 else if (needed && (dwarf_qual_info[i].q & cv_quals))
12636 {
12637 sub_quals = 0;
12638 break;
12639 }
12640 }
12641 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
12642 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
12643 {
12644 /* As not all intermediate qualified DIEs have corresponding
12645 tree types, ensure that qualified DIEs in the same scope
12646 as their DW_AT_type are emitted after their DW_AT_type,
12647 only with other qualified DIEs for the same type possibly
12648 in between them. Determine the range of such qualified
12649 DIEs now (first being the base type, last being corresponding
12650 last qualified DIE for it). */
12651 unsigned int count = 0;
12652 first = qualified_die_p (mod_type_die, &first_quals,
12653 dwarf_qual_info_size);
12654 if (first == NULL)
12655 first = mod_type_die;
12656 gcc_assert ((first_quals & ~sub_quals) == 0);
12657 for (count = 0, last = first;
12658 count < (1U << dwarf_qual_info_size);
12659 count++, last = last->die_sib)
12660 {
12661 int quals = 0;
12662 if (last == mod_scope->die_child)
12663 break;
12664 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
12665 != first)
12666 break;
12667 }
12668 }
12669
12670 for (i = 0; i < dwarf_qual_info_size; i++)
12671 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12672 {
12673 dw_die_ref d;
12674 if (first && first != last)
12675 {
12676 for (d = first->die_sib; ; d = d->die_sib)
12677 {
12678 int quals = 0;
12679 qualified_die_p (d, &quals, dwarf_qual_info_size);
12680 if (quals == (first_quals | dwarf_qual_info[i].q))
12681 break;
12682 if (d == last)
12683 {
12684 d = NULL;
12685 break;
12686 }
12687 }
12688 if (d)
12689 {
12690 mod_type_die = d;
12691 continue;
12692 }
12693 }
12694 if (first)
12695 {
12696 d = new_die_raw (dwarf_qual_info[i].t);
12697 add_child_die_after (mod_scope, d, last);
12698 last = d;
12699 }
12700 else
12701 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
12702 if (mod_type_die)
12703 add_AT_die_ref (d, DW_AT_type, mod_type_die);
12704 mod_type_die = d;
12705 first_quals |= dwarf_qual_info[i].q;
12706 }
12707 }
12708 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
12709 {
12710 dwarf_tag tag = DW_TAG_pointer_type;
12711 if (code == REFERENCE_TYPE)
12712 {
12713 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
12714 tag = DW_TAG_rvalue_reference_type;
12715 else
12716 tag = DW_TAG_reference_type;
12717 }
12718 mod_type_die = new_die (tag, mod_scope, type);
12719
12720 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
12721 simple_type_size_in_bits (type) / BITS_PER_UNIT);
12722 add_alignment_attribute (mod_type_die, type);
12723 item_type = TREE_TYPE (type);
12724
12725 addr_space_t as = TYPE_ADDR_SPACE (item_type);
12726 if (!ADDR_SPACE_GENERIC_P (as))
12727 {
12728 int action = targetm.addr_space.debug (as);
12729 if (action >= 0)
12730 {
12731 /* Positive values indicate an address_class. */
12732 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
12733 }
12734 else
12735 {
12736 /* Negative values indicate an (inverted) segment base reg. */
12737 dw_loc_descr_ref d
12738 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
12739 add_AT_loc (mod_type_die, DW_AT_segment, d);
12740 }
12741 }
12742 }
12743 else if (code == INTEGER_TYPE
12744 && TREE_TYPE (type) != NULL_TREE
12745 && subrange_type_for_debug_p (type, &low, &high))
12746 {
12747 tree bias = NULL_TREE;
12748 if (lang_hooks.types.get_type_bias)
12749 bias = lang_hooks.types.get_type_bias (type);
12750 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
12751 item_type = TREE_TYPE (type);
12752 }
12753 else if (is_base_type (type))
12754 {
12755 mod_type_die = base_type_die (type, reverse);
12756
12757 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
12758 if (reverse_base_type)
12759 {
12760 dw_die_ref after_die
12761 = modified_type_die (type, cv_quals, false, context_die);
12762 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
12763 }
12764 else
12765 add_child_die (comp_unit_die (), mod_type_die);
12766
12767 add_pubtype (type, mod_type_die);
12768 }
12769 else
12770 {
12771 gen_type_die (type, context_die);
12772
12773 /* We have to get the type_main_variant here (and pass that to the
12774 `lookup_type_die' routine) because the ..._TYPE node we have
12775 might simply be a *copy* of some original type node (where the
12776 copy was created to help us keep track of typedef names) and
12777 that copy might have a different TYPE_UID from the original
12778 ..._TYPE node. */
12779 if (TREE_CODE (type) == FUNCTION_TYPE
12780 || TREE_CODE (type) == METHOD_TYPE)
12781 {
12782 /* For function/method types, can't just use type_main_variant here,
12783 because that can have different ref-qualifiers for C++,
12784 but try to canonicalize. */
12785 tree main = TYPE_MAIN_VARIANT (type);
12786 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
12787 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
12788 && check_base_type (t, main)
12789 && check_lang_type (t, type))
12790 return lookup_type_die (t);
12791 return lookup_type_die (type);
12792 }
12793 else if (TREE_CODE (type) != VECTOR_TYPE
12794 && TREE_CODE (type) != ARRAY_TYPE)
12795 return lookup_type_die (type_main_variant (type));
12796 else
12797 /* Vectors have the debugging information in the type,
12798 not the main variant. */
12799 return lookup_type_die (type);
12800 }
12801
12802 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
12803 don't output a DW_TAG_typedef, since there isn't one in the
12804 user's program; just attach a DW_AT_name to the type.
12805 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
12806 if the base type already has the same name. */
12807 if (name
12808 && ((TREE_CODE (name) != TYPE_DECL
12809 && (qualified_type == TYPE_MAIN_VARIANT (type)
12810 || (cv_quals == TYPE_UNQUALIFIED)))
12811 || (TREE_CODE (name) == TYPE_DECL
12812 && TREE_TYPE (name) == qualified_type
12813 && DECL_NAME (name))))
12814 {
12815 if (TREE_CODE (name) == TYPE_DECL)
12816 /* Could just call add_name_and_src_coords_attributes here,
12817 but since this is a builtin type it doesn't have any
12818 useful source coordinates anyway. */
12819 name = DECL_NAME (name);
12820 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
12821 }
12822 /* This probably indicates a bug. */
12823 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
12824 {
12825 name = TYPE_IDENTIFIER (type);
12826 add_name_attribute (mod_type_die,
12827 name ? IDENTIFIER_POINTER (name) : "__unknown__");
12828 }
12829
12830 if (qualified_type && !reverse_base_type)
12831 equate_type_number_to_die (qualified_type, mod_type_die);
12832
12833 if (item_type)
12834 /* We must do this after the equate_type_number_to_die call, in case
12835 this is a recursive type. This ensures that the modified_type_die
12836 recursion will terminate even if the type is recursive. Recursive
12837 types are possible in Ada. */
12838 sub_die = modified_type_die (item_type,
12839 TYPE_QUALS_NO_ADDR_SPACE (item_type),
12840 reverse,
12841 context_die);
12842
12843 if (sub_die != NULL)
12844 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
12845
12846 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
12847 if (TYPE_ARTIFICIAL (type))
12848 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
12849
12850 return mod_type_die;
12851 }
12852
12853 /* Generate DIEs for the generic parameters of T.
12854 T must be either a generic type or a generic function.
12855 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
12856
12857 static void
12858 gen_generic_params_dies (tree t)
12859 {
12860 tree parms, args;
12861 int parms_num, i;
12862 dw_die_ref die = NULL;
12863 int non_default;
12864
12865 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
12866 return;
12867
12868 if (TYPE_P (t))
12869 die = lookup_type_die (t);
12870 else if (DECL_P (t))
12871 die = lookup_decl_die (t);
12872
12873 gcc_assert (die);
12874
12875 parms = lang_hooks.get_innermost_generic_parms (t);
12876 if (!parms)
12877 /* T has no generic parameter. It means T is neither a generic type
12878 or function. End of story. */
12879 return;
12880
12881 parms_num = TREE_VEC_LENGTH (parms);
12882 args = lang_hooks.get_innermost_generic_args (t);
12883 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
12884 non_default = int_cst_value (TREE_CHAIN (args));
12885 else
12886 non_default = TREE_VEC_LENGTH (args);
12887 for (i = 0; i < parms_num; i++)
12888 {
12889 tree parm, arg, arg_pack_elems;
12890 dw_die_ref parm_die;
12891
12892 parm = TREE_VEC_ELT (parms, i);
12893 arg = TREE_VEC_ELT (args, i);
12894 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
12895 gcc_assert (parm && TREE_VALUE (parm) && arg);
12896
12897 if (parm && TREE_VALUE (parm) && arg)
12898 {
12899 /* If PARM represents a template parameter pack,
12900 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
12901 by DW_TAG_template_*_parameter DIEs for the argument
12902 pack elements of ARG. Note that ARG would then be
12903 an argument pack. */
12904 if (arg_pack_elems)
12905 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
12906 arg_pack_elems,
12907 die);
12908 else
12909 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
12910 true /* emit name */, die);
12911 if (i >= non_default)
12912 add_AT_flag (parm_die, DW_AT_default_value, 1);
12913 }
12914 }
12915 }
12916
12917 /* Create and return a DIE for PARM which should be
12918 the representation of a generic type parameter.
12919 For instance, in the C++ front end, PARM would be a template parameter.
12920 ARG is the argument to PARM.
12921 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
12922 name of the PARM.
12923 PARENT_DIE is the parent DIE which the new created DIE should be added to,
12924 as a child node. */
12925
12926 static dw_die_ref
12927 generic_parameter_die (tree parm, tree arg,
12928 bool emit_name_p,
12929 dw_die_ref parent_die)
12930 {
12931 dw_die_ref tmpl_die = NULL;
12932 const char *name = NULL;
12933
12934 if (!parm || !DECL_NAME (parm) || !arg)
12935 return NULL;
12936
12937 /* We support non-type generic parameters and arguments,
12938 type generic parameters and arguments, as well as
12939 generic generic parameters (a.k.a. template template parameters in C++)
12940 and arguments. */
12941 if (TREE_CODE (parm) == PARM_DECL)
12942 /* PARM is a nontype generic parameter */
12943 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
12944 else if (TREE_CODE (parm) == TYPE_DECL)
12945 /* PARM is a type generic parameter. */
12946 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
12947 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12948 /* PARM is a generic generic parameter.
12949 Its DIE is a GNU extension. It shall have a
12950 DW_AT_name attribute to represent the name of the template template
12951 parameter, and a DW_AT_GNU_template_name attribute to represent the
12952 name of the template template argument. */
12953 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
12954 parent_die, parm);
12955 else
12956 gcc_unreachable ();
12957
12958 if (tmpl_die)
12959 {
12960 tree tmpl_type;
12961
12962 /* If PARM is a generic parameter pack, it means we are
12963 emitting debug info for a template argument pack element.
12964 In other terms, ARG is a template argument pack element.
12965 In that case, we don't emit any DW_AT_name attribute for
12966 the die. */
12967 if (emit_name_p)
12968 {
12969 name = IDENTIFIER_POINTER (DECL_NAME (parm));
12970 gcc_assert (name);
12971 add_AT_string (tmpl_die, DW_AT_name, name);
12972 }
12973
12974 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12975 {
12976 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
12977 TMPL_DIE should have a child DW_AT_type attribute that is set
12978 to the type of the argument to PARM, which is ARG.
12979 If PARM is a type generic parameter, TMPL_DIE should have a
12980 child DW_AT_type that is set to ARG. */
12981 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
12982 add_type_attribute (tmpl_die, tmpl_type,
12983 (TREE_THIS_VOLATILE (tmpl_type)
12984 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
12985 false, parent_die);
12986 }
12987 else
12988 {
12989 /* So TMPL_DIE is a DIE representing a
12990 a generic generic template parameter, a.k.a template template
12991 parameter in C++ and arg is a template. */
12992
12993 /* The DW_AT_GNU_template_name attribute of the DIE must be set
12994 to the name of the argument. */
12995 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
12996 if (name)
12997 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
12998 }
12999
13000 if (TREE_CODE (parm) == PARM_DECL)
13001 /* So PARM is a non-type generic parameter.
13002 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13003 attribute of TMPL_DIE which value represents the value
13004 of ARG.
13005 We must be careful here:
13006 The value of ARG might reference some function decls.
13007 We might currently be emitting debug info for a generic
13008 type and types are emitted before function decls, we don't
13009 know if the function decls referenced by ARG will actually be
13010 emitted after cgraph computations.
13011 So must defer the generation of the DW_AT_const_value to
13012 after cgraph is ready. */
13013 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13014 }
13015
13016 return tmpl_die;
13017 }
13018
13019 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13020 PARM_PACK must be a template parameter pack. The returned DIE
13021 will be child DIE of PARENT_DIE. */
13022
13023 static dw_die_ref
13024 template_parameter_pack_die (tree parm_pack,
13025 tree parm_pack_args,
13026 dw_die_ref parent_die)
13027 {
13028 dw_die_ref die;
13029 int j;
13030
13031 gcc_assert (parent_die && parm_pack);
13032
13033 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13034 add_name_and_src_coords_attributes (die, parm_pack);
13035 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13036 generic_parameter_die (parm_pack,
13037 TREE_VEC_ELT (parm_pack_args, j),
13038 false /* Don't emit DW_AT_name */,
13039 die);
13040 return die;
13041 }
13042
13043 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13044 an enumerated type. */
13045
13046 static inline int
13047 type_is_enum (const_tree type)
13048 {
13049 return TREE_CODE (type) == ENUMERAL_TYPE;
13050 }
13051
13052 /* Return the DBX register number described by a given RTL node. */
13053
13054 static unsigned int
13055 dbx_reg_number (const_rtx rtl)
13056 {
13057 unsigned regno = REGNO (rtl);
13058
13059 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13060
13061 #ifdef LEAF_REG_REMAP
13062 if (crtl->uses_only_leaf_regs)
13063 {
13064 int leaf_reg = LEAF_REG_REMAP (regno);
13065 if (leaf_reg != -1)
13066 regno = (unsigned) leaf_reg;
13067 }
13068 #endif
13069
13070 regno = DBX_REGISTER_NUMBER (regno);
13071 gcc_assert (regno != INVALID_REGNUM);
13072 return regno;
13073 }
13074
13075 /* Optionally add a DW_OP_piece term to a location description expression.
13076 DW_OP_piece is only added if the location description expression already
13077 doesn't end with DW_OP_piece. */
13078
13079 static void
13080 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13081 {
13082 dw_loc_descr_ref loc;
13083
13084 if (*list_head != NULL)
13085 {
13086 /* Find the end of the chain. */
13087 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13088 ;
13089
13090 if (loc->dw_loc_opc != DW_OP_piece)
13091 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13092 }
13093 }
13094
13095 /* Return a location descriptor that designates a machine register or
13096 zero if there is none. */
13097
13098 static dw_loc_descr_ref
13099 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13100 {
13101 rtx regs;
13102
13103 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13104 return 0;
13105
13106 /* We only use "frame base" when we're sure we're talking about the
13107 post-prologue local stack frame. We do this by *not* running
13108 register elimination until this point, and recognizing the special
13109 argument pointer and soft frame pointer rtx's.
13110 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13111 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13112 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13113 {
13114 dw_loc_descr_ref result = NULL;
13115
13116 if (dwarf_version >= 4 || !dwarf_strict)
13117 {
13118 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13119 initialized);
13120 if (result)
13121 add_loc_descr (&result,
13122 new_loc_descr (DW_OP_stack_value, 0, 0));
13123 }
13124 return result;
13125 }
13126
13127 regs = targetm.dwarf_register_span (rtl);
13128
13129 if (REG_NREGS (rtl) > 1 || regs)
13130 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13131 else
13132 {
13133 unsigned int dbx_regnum = dbx_reg_number (rtl);
13134 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13135 return 0;
13136 return one_reg_loc_descriptor (dbx_regnum, initialized);
13137 }
13138 }
13139
13140 /* Return a location descriptor that designates a machine register for
13141 a given hard register number. */
13142
13143 static dw_loc_descr_ref
13144 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13145 {
13146 dw_loc_descr_ref reg_loc_descr;
13147
13148 if (regno <= 31)
13149 reg_loc_descr
13150 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13151 else
13152 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13153
13154 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13155 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13156
13157 return reg_loc_descr;
13158 }
13159
13160 /* Given an RTL of a register, return a location descriptor that
13161 designates a value that spans more than one register. */
13162
13163 static dw_loc_descr_ref
13164 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13165 enum var_init_status initialized)
13166 {
13167 int size, i;
13168 dw_loc_descr_ref loc_result = NULL;
13169
13170 /* Simple, contiguous registers. */
13171 if (regs == NULL_RTX)
13172 {
13173 unsigned reg = REGNO (rtl);
13174 int nregs;
13175
13176 #ifdef LEAF_REG_REMAP
13177 if (crtl->uses_only_leaf_regs)
13178 {
13179 int leaf_reg = LEAF_REG_REMAP (reg);
13180 if (leaf_reg != -1)
13181 reg = (unsigned) leaf_reg;
13182 }
13183 #endif
13184
13185 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13186 nregs = REG_NREGS (rtl);
13187
13188 /* At present we only track constant-sized pieces. */
13189 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13190 return NULL;
13191 size /= nregs;
13192
13193 loc_result = NULL;
13194 while (nregs--)
13195 {
13196 dw_loc_descr_ref t;
13197
13198 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13199 VAR_INIT_STATUS_INITIALIZED);
13200 add_loc_descr (&loc_result, t);
13201 add_loc_descr_op_piece (&loc_result, size);
13202 ++reg;
13203 }
13204 return loc_result;
13205 }
13206
13207 /* Now onto stupid register sets in non contiguous locations. */
13208
13209 gcc_assert (GET_CODE (regs) == PARALLEL);
13210
13211 /* At present we only track constant-sized pieces. */
13212 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13213 return NULL;
13214 loc_result = NULL;
13215
13216 for (i = 0; i < XVECLEN (regs, 0); ++i)
13217 {
13218 dw_loc_descr_ref t;
13219
13220 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13221 VAR_INIT_STATUS_INITIALIZED);
13222 add_loc_descr (&loc_result, t);
13223 add_loc_descr_op_piece (&loc_result, size);
13224 }
13225
13226 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13227 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13228 return loc_result;
13229 }
13230
13231 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13232
13233 /* Return a location descriptor that designates a constant i,
13234 as a compound operation from constant (i >> shift), constant shift
13235 and DW_OP_shl. */
13236
13237 static dw_loc_descr_ref
13238 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13239 {
13240 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13241 add_loc_descr (&ret, int_loc_descriptor (shift));
13242 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13243 return ret;
13244 }
13245
13246 /* Return a location descriptor that designates constant POLY_I. */
13247
13248 static dw_loc_descr_ref
13249 int_loc_descriptor (poly_int64 poly_i)
13250 {
13251 enum dwarf_location_atom op;
13252
13253 HOST_WIDE_INT i;
13254 if (!poly_i.is_constant (&i))
13255 {
13256 /* Create location descriptions for the non-constant part and
13257 add any constant offset at the end. */
13258 dw_loc_descr_ref ret = NULL;
13259 HOST_WIDE_INT constant = poly_i.coeffs[0];
13260 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13261 {
13262 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13263 if (coeff != 0)
13264 {
13265 dw_loc_descr_ref start = ret;
13266 unsigned int factor;
13267 int bias;
13268 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13269 (j, &factor, &bias);
13270
13271 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13272 add COEFF * (REGNO / FACTOR) now and subtract
13273 COEFF * BIAS from the final constant part. */
13274 constant -= coeff * bias;
13275 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13276 if (coeff % factor == 0)
13277 coeff /= factor;
13278 else
13279 {
13280 int amount = exact_log2 (factor);
13281 gcc_assert (amount >= 0);
13282 add_loc_descr (&ret, int_loc_descriptor (amount));
13283 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13284 }
13285 if (coeff != 1)
13286 {
13287 add_loc_descr (&ret, int_loc_descriptor (coeff));
13288 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13289 }
13290 if (start)
13291 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13292 }
13293 }
13294 loc_descr_plus_const (&ret, constant);
13295 return ret;
13296 }
13297
13298 /* Pick the smallest representation of a constant, rather than just
13299 defaulting to the LEB encoding. */
13300 if (i >= 0)
13301 {
13302 int clz = clz_hwi (i);
13303 int ctz = ctz_hwi (i);
13304 if (i <= 31)
13305 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13306 else if (i <= 0xff)
13307 op = DW_OP_const1u;
13308 else if (i <= 0xffff)
13309 op = DW_OP_const2u;
13310 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13311 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13312 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13313 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13314 while DW_OP_const4u is 5 bytes. */
13315 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13316 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13317 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13318 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13319 while DW_OP_const4u is 5 bytes. */
13320 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13321
13322 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13323 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13324 <= 4)
13325 {
13326 /* As i >= 2**31, the double cast above will yield a negative number.
13327 Since wrapping is defined in DWARF expressions we can output big
13328 positive integers as small negative ones, regardless of the size
13329 of host wide ints.
13330
13331 Here, since the evaluator will handle 32-bit values and since i >=
13332 2**31, we know it's going to be interpreted as a negative literal:
13333 store it this way if we can do better than 5 bytes this way. */
13334 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13335 }
13336 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13337 op = DW_OP_const4u;
13338
13339 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13340 least 6 bytes: see if we can do better before falling back to it. */
13341 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13342 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13343 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13344 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13345 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13346 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13347 >= HOST_BITS_PER_WIDE_INT)
13348 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13349 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13350 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13351 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13352 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13353 && size_of_uleb128 (i) > 6)
13354 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13355 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13356 else
13357 op = DW_OP_constu;
13358 }
13359 else
13360 {
13361 if (i >= -0x80)
13362 op = DW_OP_const1s;
13363 else if (i >= -0x8000)
13364 op = DW_OP_const2s;
13365 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13366 {
13367 if (size_of_int_loc_descriptor (i) < 5)
13368 {
13369 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13370 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13371 return ret;
13372 }
13373 op = DW_OP_const4s;
13374 }
13375 else
13376 {
13377 if (size_of_int_loc_descriptor (i)
13378 < (unsigned long) 1 + size_of_sleb128 (i))
13379 {
13380 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13381 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13382 return ret;
13383 }
13384 op = DW_OP_consts;
13385 }
13386 }
13387
13388 return new_loc_descr (op, i, 0);
13389 }
13390
13391 /* Likewise, for unsigned constants. */
13392
13393 static dw_loc_descr_ref
13394 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13395 {
13396 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13397 const unsigned HOST_WIDE_INT max_uint
13398 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13399
13400 /* If possible, use the clever signed constants handling. */
13401 if (i <= max_int)
13402 return int_loc_descriptor ((HOST_WIDE_INT) i);
13403
13404 /* Here, we are left with positive numbers that cannot be represented as
13405 HOST_WIDE_INT, i.e.:
13406 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13407
13408 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13409 whereas may be better to output a negative integer: thanks to integer
13410 wrapping, we know that:
13411 x = x - 2 ** DWARF2_ADDR_SIZE
13412 = x - 2 * (max (HOST_WIDE_INT) + 1)
13413 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13414 small negative integers. Let's try that in cases it will clearly improve
13415 the encoding: there is no gain turning DW_OP_const4u into
13416 DW_OP_const4s. */
13417 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13418 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13419 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13420 {
13421 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13422
13423 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13424 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13425 const HOST_WIDE_INT second_shift
13426 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13427
13428 /* So we finally have:
13429 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13430 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13431 return int_loc_descriptor (second_shift);
13432 }
13433
13434 /* Last chance: fallback to a simple constant operation. */
13435 return new_loc_descr
13436 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13437 ? DW_OP_const4u
13438 : DW_OP_const8u,
13439 i, 0);
13440 }
13441
13442 /* Generate and return a location description that computes the unsigned
13443 comparison of the two stack top entries (a OP b where b is the top-most
13444 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13445 LE_EXPR, GT_EXPR or GE_EXPR. */
13446
13447 static dw_loc_descr_ref
13448 uint_comparison_loc_list (enum tree_code kind)
13449 {
13450 enum dwarf_location_atom op, flip_op;
13451 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13452
13453 switch (kind)
13454 {
13455 case LT_EXPR:
13456 op = DW_OP_lt;
13457 break;
13458 case LE_EXPR:
13459 op = DW_OP_le;
13460 break;
13461 case GT_EXPR:
13462 op = DW_OP_gt;
13463 break;
13464 case GE_EXPR:
13465 op = DW_OP_ge;
13466 break;
13467 default:
13468 gcc_unreachable ();
13469 }
13470
13471 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13472 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13473
13474 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13475 possible to perform unsigned comparisons: we just have to distinguish
13476 three cases:
13477
13478 1. when a and b have the same sign (as signed integers); then we should
13479 return: a OP(signed) b;
13480
13481 2. when a is a negative signed integer while b is a positive one, then a
13482 is a greater unsigned integer than b; likewise when a and b's roles
13483 are flipped.
13484
13485 So first, compare the sign of the two operands. */
13486 ret = new_loc_descr (DW_OP_over, 0, 0);
13487 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13488 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13489 /* If they have different signs (i.e. they have different sign bits), then
13490 the stack top value has now the sign bit set and thus it's smaller than
13491 zero. */
13492 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13493 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13494 add_loc_descr (&ret, bra_node);
13495
13496 /* We are in case 1. At this point, we know both operands have the same
13497 sign, to it's safe to use the built-in signed comparison. */
13498 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13499 add_loc_descr (&ret, jmp_node);
13500
13501 /* We are in case 2. Here, we know both operands do not have the same sign,
13502 so we have to flip the signed comparison. */
13503 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13504 tmp = new_loc_descr (flip_op, 0, 0);
13505 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13506 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13507 add_loc_descr (&ret, tmp);
13508
13509 /* This dummy operation is necessary to make the two branches join. */
13510 tmp = new_loc_descr (DW_OP_nop, 0, 0);
13511 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13512 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
13513 add_loc_descr (&ret, tmp);
13514
13515 return ret;
13516 }
13517
13518 /* Likewise, but takes the location description lists (might be destructive on
13519 them). Return NULL if either is NULL or if concatenation fails. */
13520
13521 static dw_loc_list_ref
13522 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
13523 enum tree_code kind)
13524 {
13525 if (left == NULL || right == NULL)
13526 return NULL;
13527
13528 add_loc_list (&left, right);
13529 if (left == NULL)
13530 return NULL;
13531
13532 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
13533 return left;
13534 }
13535
13536 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
13537 without actually allocating it. */
13538
13539 static unsigned long
13540 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13541 {
13542 return size_of_int_loc_descriptor (i >> shift)
13543 + size_of_int_loc_descriptor (shift)
13544 + 1;
13545 }
13546
13547 /* Return size_of_locs (int_loc_descriptor (i)) without
13548 actually allocating it. */
13549
13550 static unsigned long
13551 size_of_int_loc_descriptor (HOST_WIDE_INT i)
13552 {
13553 unsigned long s;
13554
13555 if (i >= 0)
13556 {
13557 int clz, ctz;
13558 if (i <= 31)
13559 return 1;
13560 else if (i <= 0xff)
13561 return 2;
13562 else if (i <= 0xffff)
13563 return 3;
13564 clz = clz_hwi (i);
13565 ctz = ctz_hwi (i);
13566 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13567 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13568 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13569 - clz - 5);
13570 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13571 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13572 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13573 - clz - 8);
13574 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13575 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13576 <= 4)
13577 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13578 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13579 return 5;
13580 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
13581 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13582 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13583 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13584 - clz - 8);
13585 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13586 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
13587 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13588 - clz - 16);
13589 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13590 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13591 && s > 6)
13592 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13593 - clz - 32);
13594 else
13595 return 1 + s;
13596 }
13597 else
13598 {
13599 if (i >= -0x80)
13600 return 2;
13601 else if (i >= -0x8000)
13602 return 3;
13603 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13604 {
13605 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13606 {
13607 s = size_of_int_loc_descriptor (-i) + 1;
13608 if (s < 5)
13609 return s;
13610 }
13611 return 5;
13612 }
13613 else
13614 {
13615 unsigned long r = 1 + size_of_sleb128 (i);
13616 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13617 {
13618 s = size_of_int_loc_descriptor (-i) + 1;
13619 if (s < r)
13620 return s;
13621 }
13622 return r;
13623 }
13624 }
13625 }
13626
13627 /* Return loc description representing "address" of integer value.
13628 This can appear only as toplevel expression. */
13629
13630 static dw_loc_descr_ref
13631 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
13632 {
13633 int litsize;
13634 dw_loc_descr_ref loc_result = NULL;
13635
13636 if (!(dwarf_version >= 4 || !dwarf_strict))
13637 return NULL;
13638
13639 litsize = size_of_int_loc_descriptor (i);
13640 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
13641 is more compact. For DW_OP_stack_value we need:
13642 litsize + 1 (DW_OP_stack_value)
13643 and for DW_OP_implicit_value:
13644 1 (DW_OP_implicit_value) + 1 (length) + size. */
13645 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
13646 {
13647 loc_result = int_loc_descriptor (i);
13648 add_loc_descr (&loc_result,
13649 new_loc_descr (DW_OP_stack_value, 0, 0));
13650 return loc_result;
13651 }
13652
13653 loc_result = new_loc_descr (DW_OP_implicit_value,
13654 size, 0);
13655 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13656 loc_result->dw_loc_oprnd2.v.val_int = i;
13657 return loc_result;
13658 }
13659
13660 /* Return a location descriptor that designates a base+offset location. */
13661
13662 static dw_loc_descr_ref
13663 based_loc_descr (rtx reg, poly_int64 offset,
13664 enum var_init_status initialized)
13665 {
13666 unsigned int regno;
13667 dw_loc_descr_ref result;
13668 dw_fde_ref fde = cfun->fde;
13669
13670 /* We only use "frame base" when we're sure we're talking about the
13671 post-prologue local stack frame. We do this by *not* running
13672 register elimination until this point, and recognizing the special
13673 argument pointer and soft frame pointer rtx's. */
13674 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
13675 {
13676 rtx elim = (ira_use_lra_p
13677 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
13678 : eliminate_regs (reg, VOIDmode, NULL_RTX));
13679
13680 if (elim != reg)
13681 {
13682 elim = strip_offset_and_add (elim, &offset);
13683 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
13684 && (elim == hard_frame_pointer_rtx
13685 || elim == stack_pointer_rtx))
13686 || elim == (frame_pointer_needed
13687 ? hard_frame_pointer_rtx
13688 : stack_pointer_rtx));
13689
13690 /* If drap register is used to align stack, use frame
13691 pointer + offset to access stack variables. If stack
13692 is aligned without drap, use stack pointer + offset to
13693 access stack variables. */
13694 if (crtl->stack_realign_tried
13695 && reg == frame_pointer_rtx)
13696 {
13697 int base_reg
13698 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
13699 ? HARD_FRAME_POINTER_REGNUM
13700 : REGNO (elim));
13701 return new_reg_loc_descr (base_reg, offset);
13702 }
13703
13704 gcc_assert (frame_pointer_fb_offset_valid);
13705 offset += frame_pointer_fb_offset;
13706 HOST_WIDE_INT const_offset;
13707 if (offset.is_constant (&const_offset))
13708 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
13709 else
13710 {
13711 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
13712 loc_descr_plus_const (&ret, offset);
13713 return ret;
13714 }
13715 }
13716 }
13717
13718 regno = REGNO (reg);
13719 #ifdef LEAF_REG_REMAP
13720 if (crtl->uses_only_leaf_regs)
13721 {
13722 int leaf_reg = LEAF_REG_REMAP (regno);
13723 if (leaf_reg != -1)
13724 regno = (unsigned) leaf_reg;
13725 }
13726 #endif
13727 regno = DWARF_FRAME_REGNUM (regno);
13728
13729 HOST_WIDE_INT const_offset;
13730 if (!optimize && fde
13731 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
13732 && offset.is_constant (&const_offset))
13733 {
13734 /* Use cfa+offset to represent the location of arguments passed
13735 on the stack when drap is used to align stack.
13736 Only do this when not optimizing, for optimized code var-tracking
13737 is supposed to track where the arguments live and the register
13738 used as vdrap or drap in some spot might be used for something
13739 else in other part of the routine. */
13740 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
13741 }
13742
13743 result = new_reg_loc_descr (regno, offset);
13744
13745 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13746 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13747
13748 return result;
13749 }
13750
13751 /* Return true if this RTL expression describes a base+offset calculation. */
13752
13753 static inline int
13754 is_based_loc (const_rtx rtl)
13755 {
13756 return (GET_CODE (rtl) == PLUS
13757 && ((REG_P (XEXP (rtl, 0))
13758 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
13759 && CONST_INT_P (XEXP (rtl, 1)))));
13760 }
13761
13762 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
13763 failed. */
13764
13765 static dw_loc_descr_ref
13766 tls_mem_loc_descriptor (rtx mem)
13767 {
13768 tree base;
13769 dw_loc_descr_ref loc_result;
13770
13771 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
13772 return NULL;
13773
13774 base = get_base_address (MEM_EXPR (mem));
13775 if (base == NULL
13776 || !VAR_P (base)
13777 || !DECL_THREAD_LOCAL_P (base))
13778 return NULL;
13779
13780 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
13781 if (loc_result == NULL)
13782 return NULL;
13783
13784 if (maybe_ne (MEM_OFFSET (mem), 0))
13785 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
13786
13787 return loc_result;
13788 }
13789
13790 /* Output debug info about reason why we failed to expand expression as dwarf
13791 expression. */
13792
13793 static void
13794 expansion_failed (tree expr, rtx rtl, char const *reason)
13795 {
13796 if (dump_file && (dump_flags & TDF_DETAILS))
13797 {
13798 fprintf (dump_file, "Failed to expand as dwarf: ");
13799 if (expr)
13800 print_generic_expr (dump_file, expr, dump_flags);
13801 if (rtl)
13802 {
13803 fprintf (dump_file, "\n");
13804 print_rtl (dump_file, rtl);
13805 }
13806 fprintf (dump_file, "\nReason: %s\n", reason);
13807 }
13808 }
13809
13810 /* Helper function for const_ok_for_output. */
13811
13812 static bool
13813 const_ok_for_output_1 (rtx rtl)
13814 {
13815 if (targetm.const_not_ok_for_debug_p (rtl))
13816 {
13817 if (GET_CODE (rtl) != UNSPEC)
13818 {
13819 expansion_failed (NULL_TREE, rtl,
13820 "Expression rejected for debug by the backend.\n");
13821 return false;
13822 }
13823
13824 /* If delegitimize_address couldn't do anything with the UNSPEC, and
13825 the target hook doesn't explicitly allow it in debug info, assume
13826 we can't express it in the debug info. */
13827 /* Don't complain about TLS UNSPECs, those are just too hard to
13828 delegitimize. Note this could be a non-decl SYMBOL_REF such as
13829 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
13830 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
13831 if (flag_checking
13832 && (XVECLEN (rtl, 0) == 0
13833 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
13834 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
13835 inform (current_function_decl
13836 ? DECL_SOURCE_LOCATION (current_function_decl)
13837 : UNKNOWN_LOCATION,
13838 #if NUM_UNSPEC_VALUES > 0
13839 "non-delegitimized UNSPEC %s (%d) found in variable location",
13840 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
13841 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
13842 XINT (rtl, 1));
13843 #else
13844 "non-delegitimized UNSPEC %d found in variable location",
13845 XINT (rtl, 1));
13846 #endif
13847 expansion_failed (NULL_TREE, rtl,
13848 "UNSPEC hasn't been delegitimized.\n");
13849 return false;
13850 }
13851
13852 if (CONST_POLY_INT_P (rtl))
13853 return false;
13854
13855 if (targetm.const_not_ok_for_debug_p (rtl))
13856 {
13857 expansion_failed (NULL_TREE, rtl,
13858 "Expression rejected for debug by the backend.\n");
13859 return false;
13860 }
13861
13862 /* FIXME: Refer to PR60655. It is possible for simplification
13863 of rtl expressions in var tracking to produce such expressions.
13864 We should really identify / validate expressions
13865 enclosed in CONST that can be handled by assemblers on various
13866 targets and only handle legitimate cases here. */
13867 switch (GET_CODE (rtl))
13868 {
13869 case SYMBOL_REF:
13870 break;
13871 case NOT:
13872 case NEG:
13873 return false;
13874 default:
13875 return true;
13876 }
13877
13878 if (CONSTANT_POOL_ADDRESS_P (rtl))
13879 {
13880 bool marked;
13881 get_pool_constant_mark (rtl, &marked);
13882 /* If all references to this pool constant were optimized away,
13883 it was not output and thus we can't represent it. */
13884 if (!marked)
13885 {
13886 expansion_failed (NULL_TREE, rtl,
13887 "Constant was removed from constant pool.\n");
13888 return false;
13889 }
13890 }
13891
13892 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13893 return false;
13894
13895 /* Avoid references to external symbols in debug info, on several targets
13896 the linker might even refuse to link when linking a shared library,
13897 and in many other cases the relocations for .debug_info/.debug_loc are
13898 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
13899 to be defined within the same shared library or executable are fine. */
13900 if (SYMBOL_REF_EXTERNAL_P (rtl))
13901 {
13902 tree decl = SYMBOL_REF_DECL (rtl);
13903
13904 if (decl == NULL || !targetm.binds_local_p (decl))
13905 {
13906 expansion_failed (NULL_TREE, rtl,
13907 "Symbol not defined in current TU.\n");
13908 return false;
13909 }
13910 }
13911
13912 return true;
13913 }
13914
13915 /* Return true if constant RTL can be emitted in DW_OP_addr or
13916 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
13917 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
13918
13919 static bool
13920 const_ok_for_output (rtx rtl)
13921 {
13922 if (GET_CODE (rtl) == SYMBOL_REF)
13923 return const_ok_for_output_1 (rtl);
13924
13925 if (GET_CODE (rtl) == CONST)
13926 {
13927 subrtx_var_iterator::array_type array;
13928 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
13929 if (!const_ok_for_output_1 (*iter))
13930 return false;
13931 return true;
13932 }
13933
13934 return true;
13935 }
13936
13937 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
13938 if possible, NULL otherwise. */
13939
13940 static dw_die_ref
13941 base_type_for_mode (machine_mode mode, bool unsignedp)
13942 {
13943 dw_die_ref type_die;
13944 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
13945
13946 if (type == NULL)
13947 return NULL;
13948 switch (TREE_CODE (type))
13949 {
13950 case INTEGER_TYPE:
13951 case REAL_TYPE:
13952 break;
13953 default:
13954 return NULL;
13955 }
13956 type_die = lookup_type_die (type);
13957 if (!type_die)
13958 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
13959 comp_unit_die ());
13960 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
13961 return NULL;
13962 return type_die;
13963 }
13964
13965 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
13966 type matching MODE, or, if MODE is narrower than or as wide as
13967 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
13968 possible. */
13969
13970 static dw_loc_descr_ref
13971 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
13972 {
13973 machine_mode outer_mode = mode;
13974 dw_die_ref type_die;
13975 dw_loc_descr_ref cvt;
13976
13977 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13978 {
13979 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
13980 return op;
13981 }
13982 type_die = base_type_for_mode (outer_mode, 1);
13983 if (type_die == NULL)
13984 return NULL;
13985 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13986 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13987 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13988 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13989 add_loc_descr (&op, cvt);
13990 return op;
13991 }
13992
13993 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
13994
13995 static dw_loc_descr_ref
13996 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
13997 dw_loc_descr_ref op1)
13998 {
13999 dw_loc_descr_ref ret = op0;
14000 add_loc_descr (&ret, op1);
14001 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14002 if (STORE_FLAG_VALUE != 1)
14003 {
14004 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14005 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14006 }
14007 return ret;
14008 }
14009
14010 /* Subroutine of scompare_loc_descriptor for the case in which we're
14011 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14012 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14013
14014 static dw_loc_descr_ref
14015 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14016 scalar_int_mode op_mode,
14017 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14018 {
14019 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14020 dw_loc_descr_ref cvt;
14021
14022 if (type_die == NULL)
14023 return NULL;
14024 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14025 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14026 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14027 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14028 add_loc_descr (&op0, cvt);
14029 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14030 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14031 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14032 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14033 add_loc_descr (&op1, cvt);
14034 return compare_loc_descriptor (op, op0, op1);
14035 }
14036
14037 /* Subroutine of scompare_loc_descriptor for the case in which we're
14038 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14039 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14040
14041 static dw_loc_descr_ref
14042 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14043 scalar_int_mode op_mode,
14044 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14045 {
14046 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14047 /* For eq/ne, if the operands are known to be zero-extended,
14048 there is no need to do the fancy shifting up. */
14049 if (op == DW_OP_eq || op == DW_OP_ne)
14050 {
14051 dw_loc_descr_ref last0, last1;
14052 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14053 ;
14054 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14055 ;
14056 /* deref_size zero extends, and for constants we can check
14057 whether they are zero extended or not. */
14058 if (((last0->dw_loc_opc == DW_OP_deref_size
14059 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14060 || (CONST_INT_P (XEXP (rtl, 0))
14061 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14062 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14063 && ((last1->dw_loc_opc == DW_OP_deref_size
14064 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14065 || (CONST_INT_P (XEXP (rtl, 1))
14066 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14067 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14068 return compare_loc_descriptor (op, op0, op1);
14069
14070 /* EQ/NE comparison against constant in narrower type than
14071 DWARF2_ADDR_SIZE can be performed either as
14072 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14073 DW_OP_{eq,ne}
14074 or
14075 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14076 DW_OP_{eq,ne}. Pick whatever is shorter. */
14077 if (CONST_INT_P (XEXP (rtl, 1))
14078 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14079 && (size_of_int_loc_descriptor (shift) + 1
14080 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14081 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14082 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14083 & GET_MODE_MASK (op_mode))))
14084 {
14085 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14086 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14087 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14088 & GET_MODE_MASK (op_mode));
14089 return compare_loc_descriptor (op, op0, op1);
14090 }
14091 }
14092 add_loc_descr (&op0, int_loc_descriptor (shift));
14093 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14094 if (CONST_INT_P (XEXP (rtl, 1)))
14095 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14096 else
14097 {
14098 add_loc_descr (&op1, int_loc_descriptor (shift));
14099 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14100 }
14101 return compare_loc_descriptor (op, op0, op1);
14102 }
14103
14104 /* Return location descriptor for unsigned comparison OP RTL. */
14105
14106 static dw_loc_descr_ref
14107 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14108 machine_mode mem_mode)
14109 {
14110 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14111 dw_loc_descr_ref op0, op1;
14112
14113 if (op_mode == VOIDmode)
14114 op_mode = GET_MODE (XEXP (rtl, 1));
14115 if (op_mode == VOIDmode)
14116 return NULL;
14117
14118 scalar_int_mode int_op_mode;
14119 if (dwarf_strict
14120 && dwarf_version < 5
14121 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14122 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14123 return NULL;
14124
14125 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14126 VAR_INIT_STATUS_INITIALIZED);
14127 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14128 VAR_INIT_STATUS_INITIALIZED);
14129
14130 if (op0 == NULL || op1 == NULL)
14131 return NULL;
14132
14133 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14134 {
14135 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14136 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14137
14138 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14139 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14140 }
14141 return compare_loc_descriptor (op, op0, op1);
14142 }
14143
14144 /* Return location descriptor for unsigned comparison OP RTL. */
14145
14146 static dw_loc_descr_ref
14147 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14148 machine_mode mem_mode)
14149 {
14150 dw_loc_descr_ref op0, op1;
14151
14152 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14153 if (test_op_mode == VOIDmode)
14154 test_op_mode = GET_MODE (XEXP (rtl, 1));
14155
14156 scalar_int_mode op_mode;
14157 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14158 return NULL;
14159
14160 if (dwarf_strict
14161 && dwarf_version < 5
14162 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14163 return NULL;
14164
14165 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14166 VAR_INIT_STATUS_INITIALIZED);
14167 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14168 VAR_INIT_STATUS_INITIALIZED);
14169
14170 if (op0 == NULL || op1 == NULL)
14171 return NULL;
14172
14173 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14174 {
14175 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14176 dw_loc_descr_ref last0, last1;
14177 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14178 ;
14179 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14180 ;
14181 if (CONST_INT_P (XEXP (rtl, 0)))
14182 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14183 /* deref_size zero extends, so no need to mask it again. */
14184 else if (last0->dw_loc_opc != DW_OP_deref_size
14185 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14186 {
14187 add_loc_descr (&op0, int_loc_descriptor (mask));
14188 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14189 }
14190 if (CONST_INT_P (XEXP (rtl, 1)))
14191 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14192 /* deref_size zero extends, so no need to mask it again. */
14193 else if (last1->dw_loc_opc != DW_OP_deref_size
14194 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14195 {
14196 add_loc_descr (&op1, int_loc_descriptor (mask));
14197 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14198 }
14199 }
14200 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14201 {
14202 HOST_WIDE_INT bias = 1;
14203 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14204 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14205 if (CONST_INT_P (XEXP (rtl, 1)))
14206 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14207 + INTVAL (XEXP (rtl, 1)));
14208 else
14209 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14210 bias, 0));
14211 }
14212 return compare_loc_descriptor (op, op0, op1);
14213 }
14214
14215 /* Return location descriptor for {U,S}{MIN,MAX}. */
14216
14217 static dw_loc_descr_ref
14218 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14219 machine_mode mem_mode)
14220 {
14221 enum dwarf_location_atom op;
14222 dw_loc_descr_ref op0, op1, ret;
14223 dw_loc_descr_ref bra_node, drop_node;
14224
14225 scalar_int_mode int_mode;
14226 if (dwarf_strict
14227 && dwarf_version < 5
14228 && (!is_a <scalar_int_mode> (mode, &int_mode)
14229 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14230 return NULL;
14231
14232 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14233 VAR_INIT_STATUS_INITIALIZED);
14234 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14235 VAR_INIT_STATUS_INITIALIZED);
14236
14237 if (op0 == NULL || op1 == NULL)
14238 return NULL;
14239
14240 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14241 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14242 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14243 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14244 {
14245 /* Checked by the caller. */
14246 int_mode = as_a <scalar_int_mode> (mode);
14247 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14248 {
14249 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14250 add_loc_descr (&op0, int_loc_descriptor (mask));
14251 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14252 add_loc_descr (&op1, int_loc_descriptor (mask));
14253 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14254 }
14255 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14256 {
14257 HOST_WIDE_INT bias = 1;
14258 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14259 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14260 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14261 }
14262 }
14263 else if (is_a <scalar_int_mode> (mode, &int_mode)
14264 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14265 {
14266 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14267 add_loc_descr (&op0, int_loc_descriptor (shift));
14268 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14269 add_loc_descr (&op1, int_loc_descriptor (shift));
14270 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14271 }
14272 else if (is_a <scalar_int_mode> (mode, &int_mode)
14273 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14274 {
14275 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14276 dw_loc_descr_ref cvt;
14277 if (type_die == NULL)
14278 return NULL;
14279 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14280 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14281 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14282 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14283 add_loc_descr (&op0, cvt);
14284 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14285 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14286 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14287 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14288 add_loc_descr (&op1, cvt);
14289 }
14290
14291 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14292 op = DW_OP_lt;
14293 else
14294 op = DW_OP_gt;
14295 ret = op0;
14296 add_loc_descr (&ret, op1);
14297 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14298 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14299 add_loc_descr (&ret, bra_node);
14300 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14301 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14302 add_loc_descr (&ret, drop_node);
14303 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14304 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14305 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14306 && is_a <scalar_int_mode> (mode, &int_mode)
14307 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14308 ret = convert_descriptor_to_mode (int_mode, ret);
14309 return ret;
14310 }
14311
14312 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14313 but after converting arguments to type_die, afterwards
14314 convert back to unsigned. */
14315
14316 static dw_loc_descr_ref
14317 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14318 scalar_int_mode mode, machine_mode mem_mode)
14319 {
14320 dw_loc_descr_ref cvt, op0, op1;
14321
14322 if (type_die == NULL)
14323 return NULL;
14324 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14325 VAR_INIT_STATUS_INITIALIZED);
14326 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14327 VAR_INIT_STATUS_INITIALIZED);
14328 if (op0 == NULL || op1 == NULL)
14329 return NULL;
14330 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14331 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14332 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14333 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14334 add_loc_descr (&op0, cvt);
14335 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14336 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14337 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14338 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14339 add_loc_descr (&op1, cvt);
14340 add_loc_descr (&op0, op1);
14341 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14342 return convert_descriptor_to_mode (mode, op0);
14343 }
14344
14345 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14346 const0 is DW_OP_lit0 or corresponding typed constant,
14347 const1 is DW_OP_lit1 or corresponding typed constant
14348 and constMSB is constant with just the MSB bit set
14349 for the mode):
14350 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14351 L1: const0 DW_OP_swap
14352 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14353 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14354 L3: DW_OP_drop
14355 L4: DW_OP_nop
14356
14357 CTZ is similar:
14358 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14359 L1: const0 DW_OP_swap
14360 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14361 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14362 L3: DW_OP_drop
14363 L4: DW_OP_nop
14364
14365 FFS is similar:
14366 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14367 L1: const1 DW_OP_swap
14368 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14369 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14370 L3: DW_OP_drop
14371 L4: DW_OP_nop */
14372
14373 static dw_loc_descr_ref
14374 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14375 machine_mode mem_mode)
14376 {
14377 dw_loc_descr_ref op0, ret, tmp;
14378 HOST_WIDE_INT valv;
14379 dw_loc_descr_ref l1jump, l1label;
14380 dw_loc_descr_ref l2jump, l2label;
14381 dw_loc_descr_ref l3jump, l3label;
14382 dw_loc_descr_ref l4jump, l4label;
14383 rtx msb;
14384
14385 if (GET_MODE (XEXP (rtl, 0)) != mode)
14386 return NULL;
14387
14388 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14389 VAR_INIT_STATUS_INITIALIZED);
14390 if (op0 == NULL)
14391 return NULL;
14392 ret = op0;
14393 if (GET_CODE (rtl) == CLZ)
14394 {
14395 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14396 valv = GET_MODE_BITSIZE (mode);
14397 }
14398 else if (GET_CODE (rtl) == FFS)
14399 valv = 0;
14400 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14401 valv = GET_MODE_BITSIZE (mode);
14402 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14403 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14404 add_loc_descr (&ret, l1jump);
14405 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14406 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14407 VAR_INIT_STATUS_INITIALIZED);
14408 if (tmp == NULL)
14409 return NULL;
14410 add_loc_descr (&ret, tmp);
14411 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14412 add_loc_descr (&ret, l4jump);
14413 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14414 ? const1_rtx : const0_rtx,
14415 mode, mem_mode,
14416 VAR_INIT_STATUS_INITIALIZED);
14417 if (l1label == NULL)
14418 return NULL;
14419 add_loc_descr (&ret, l1label);
14420 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14421 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14422 add_loc_descr (&ret, l2label);
14423 if (GET_CODE (rtl) != CLZ)
14424 msb = const1_rtx;
14425 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14426 msb = GEN_INT (HOST_WIDE_INT_1U
14427 << (GET_MODE_BITSIZE (mode) - 1));
14428 else
14429 msb = immed_wide_int_const
14430 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14431 GET_MODE_PRECISION (mode)), mode);
14432 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14433 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14434 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14435 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14436 else
14437 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14438 VAR_INIT_STATUS_INITIALIZED);
14439 if (tmp == NULL)
14440 return NULL;
14441 add_loc_descr (&ret, tmp);
14442 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14443 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14444 add_loc_descr (&ret, l3jump);
14445 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14446 VAR_INIT_STATUS_INITIALIZED);
14447 if (tmp == NULL)
14448 return NULL;
14449 add_loc_descr (&ret, tmp);
14450 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14451 ? DW_OP_shl : DW_OP_shr, 0, 0));
14452 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14453 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14454 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14455 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14456 add_loc_descr (&ret, l2jump);
14457 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14458 add_loc_descr (&ret, l3label);
14459 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14460 add_loc_descr (&ret, l4label);
14461 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14462 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14463 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14464 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14465 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14466 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14467 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14468 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14469 return ret;
14470 }
14471
14472 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14473 const1 is DW_OP_lit1 or corresponding typed constant):
14474 const0 DW_OP_swap
14475 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14476 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14477 L2: DW_OP_drop
14478
14479 PARITY is similar:
14480 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14481 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14482 L2: DW_OP_drop */
14483
14484 static dw_loc_descr_ref
14485 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
14486 machine_mode mem_mode)
14487 {
14488 dw_loc_descr_ref op0, ret, tmp;
14489 dw_loc_descr_ref l1jump, l1label;
14490 dw_loc_descr_ref l2jump, l2label;
14491
14492 if (GET_MODE (XEXP (rtl, 0)) != mode)
14493 return NULL;
14494
14495 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14496 VAR_INIT_STATUS_INITIALIZED);
14497 if (op0 == NULL)
14498 return NULL;
14499 ret = op0;
14500 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14501 VAR_INIT_STATUS_INITIALIZED);
14502 if (tmp == NULL)
14503 return NULL;
14504 add_loc_descr (&ret, tmp);
14505 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14506 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14507 add_loc_descr (&ret, l1label);
14508 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14509 add_loc_descr (&ret, l2jump);
14510 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14511 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14512 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14513 VAR_INIT_STATUS_INITIALIZED);
14514 if (tmp == NULL)
14515 return NULL;
14516 add_loc_descr (&ret, tmp);
14517 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14518 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
14519 ? DW_OP_plus : DW_OP_xor, 0, 0));
14520 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14521 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14522 VAR_INIT_STATUS_INITIALIZED);
14523 add_loc_descr (&ret, tmp);
14524 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14525 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14526 add_loc_descr (&ret, l1jump);
14527 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14528 add_loc_descr (&ret, l2label);
14529 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14530 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14531 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14532 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14533 return ret;
14534 }
14535
14536 /* BSWAP (constS is initial shift count, either 56 or 24):
14537 constS const0
14538 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
14539 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
14540 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
14541 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
14542 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
14543
14544 static dw_loc_descr_ref
14545 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
14546 machine_mode mem_mode)
14547 {
14548 dw_loc_descr_ref op0, ret, tmp;
14549 dw_loc_descr_ref l1jump, l1label;
14550 dw_loc_descr_ref l2jump, l2label;
14551
14552 if (BITS_PER_UNIT != 8
14553 || (GET_MODE_BITSIZE (mode) != 32
14554 && GET_MODE_BITSIZE (mode) != 64))
14555 return NULL;
14556
14557 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14558 VAR_INIT_STATUS_INITIALIZED);
14559 if (op0 == NULL)
14560 return NULL;
14561
14562 ret = op0;
14563 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14564 mode, mem_mode,
14565 VAR_INIT_STATUS_INITIALIZED);
14566 if (tmp == NULL)
14567 return NULL;
14568 add_loc_descr (&ret, tmp);
14569 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14570 VAR_INIT_STATUS_INITIALIZED);
14571 if (tmp == NULL)
14572 return NULL;
14573 add_loc_descr (&ret, tmp);
14574 l1label = new_loc_descr (DW_OP_pick, 2, 0);
14575 add_loc_descr (&ret, l1label);
14576 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14577 mode, mem_mode,
14578 VAR_INIT_STATUS_INITIALIZED);
14579 add_loc_descr (&ret, tmp);
14580 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
14581 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14582 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14583 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
14584 VAR_INIT_STATUS_INITIALIZED);
14585 if (tmp == NULL)
14586 return NULL;
14587 add_loc_descr (&ret, tmp);
14588 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14589 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
14590 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14591 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14592 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14593 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14594 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14595 VAR_INIT_STATUS_INITIALIZED);
14596 add_loc_descr (&ret, tmp);
14597 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
14598 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14599 add_loc_descr (&ret, l2jump);
14600 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
14601 VAR_INIT_STATUS_INITIALIZED);
14602 add_loc_descr (&ret, tmp);
14603 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14604 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14605 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14606 add_loc_descr (&ret, l1jump);
14607 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14608 add_loc_descr (&ret, l2label);
14609 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14610 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14611 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14612 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14613 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14614 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14615 return ret;
14616 }
14617
14618 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
14619 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14620 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
14621 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
14622
14623 ROTATERT is similar:
14624 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
14625 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14626 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
14627
14628 static dw_loc_descr_ref
14629 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
14630 machine_mode mem_mode)
14631 {
14632 rtx rtlop1 = XEXP (rtl, 1);
14633 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
14634 int i;
14635
14636 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
14637 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14638 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14639 VAR_INIT_STATUS_INITIALIZED);
14640 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14641 VAR_INIT_STATUS_INITIALIZED);
14642 if (op0 == NULL || op1 == NULL)
14643 return NULL;
14644 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14645 for (i = 0; i < 2; i++)
14646 {
14647 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
14648 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
14649 mode, mem_mode,
14650 VAR_INIT_STATUS_INITIALIZED);
14651 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
14652 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14653 ? DW_OP_const4u
14654 : HOST_BITS_PER_WIDE_INT == 64
14655 ? DW_OP_const8u : DW_OP_constu,
14656 GET_MODE_MASK (mode), 0);
14657 else
14658 mask[i] = NULL;
14659 if (mask[i] == NULL)
14660 return NULL;
14661 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
14662 }
14663 ret = op0;
14664 add_loc_descr (&ret, op1);
14665 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14666 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14667 if (GET_CODE (rtl) == ROTATERT)
14668 {
14669 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14670 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14671 GET_MODE_BITSIZE (mode), 0));
14672 }
14673 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14674 if (mask[0] != NULL)
14675 add_loc_descr (&ret, mask[0]);
14676 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14677 if (mask[1] != NULL)
14678 {
14679 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14680 add_loc_descr (&ret, mask[1]);
14681 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14682 }
14683 if (GET_CODE (rtl) == ROTATE)
14684 {
14685 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14686 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14687 GET_MODE_BITSIZE (mode), 0));
14688 }
14689 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14690 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14691 return ret;
14692 }
14693
14694 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
14695 for DEBUG_PARAMETER_REF RTL. */
14696
14697 static dw_loc_descr_ref
14698 parameter_ref_descriptor (rtx rtl)
14699 {
14700 dw_loc_descr_ref ret;
14701 dw_die_ref ref;
14702
14703 if (dwarf_strict)
14704 return NULL;
14705 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
14706 /* With LTO during LTRANS we get the late DIE that refers to the early
14707 DIE, thus we add another indirection here. This seems to confuse
14708 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
14709 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
14710 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
14711 if (ref)
14712 {
14713 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14714 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14715 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14716 }
14717 else
14718 {
14719 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14720 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
14721 }
14722 return ret;
14723 }
14724
14725 /* The following routine converts the RTL for a variable or parameter
14726 (resident in memory) into an equivalent Dwarf representation of a
14727 mechanism for getting the address of that same variable onto the top of a
14728 hypothetical "address evaluation" stack.
14729
14730 When creating memory location descriptors, we are effectively transforming
14731 the RTL for a memory-resident object into its Dwarf postfix expression
14732 equivalent. This routine recursively descends an RTL tree, turning
14733 it into Dwarf postfix code as it goes.
14734
14735 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
14736
14737 MEM_MODE is the mode of the memory reference, needed to handle some
14738 autoincrement addressing modes.
14739
14740 Return 0 if we can't represent the location. */
14741
14742 dw_loc_descr_ref
14743 mem_loc_descriptor (rtx rtl, machine_mode mode,
14744 machine_mode mem_mode,
14745 enum var_init_status initialized)
14746 {
14747 dw_loc_descr_ref mem_loc_result = NULL;
14748 enum dwarf_location_atom op;
14749 dw_loc_descr_ref op0, op1;
14750 rtx inner = NULL_RTX;
14751 poly_int64 offset;
14752
14753 if (mode == VOIDmode)
14754 mode = GET_MODE (rtl);
14755
14756 /* Note that for a dynamically sized array, the location we will generate a
14757 description of here will be the lowest numbered location which is
14758 actually within the array. That's *not* necessarily the same as the
14759 zeroth element of the array. */
14760
14761 rtl = targetm.delegitimize_address (rtl);
14762
14763 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
14764 return NULL;
14765
14766 scalar_int_mode int_mode, inner_mode, op1_mode;
14767 switch (GET_CODE (rtl))
14768 {
14769 case POST_INC:
14770 case POST_DEC:
14771 case POST_MODIFY:
14772 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
14773
14774 case SUBREG:
14775 /* The case of a subreg may arise when we have a local (register)
14776 variable or a formal (register) parameter which doesn't quite fill
14777 up an entire register. For now, just assume that it is
14778 legitimate to make the Dwarf info refer to the whole register which
14779 contains the given subreg. */
14780 if (!subreg_lowpart_p (rtl))
14781 break;
14782 inner = SUBREG_REG (rtl);
14783 /* FALLTHRU */
14784 case TRUNCATE:
14785 if (inner == NULL_RTX)
14786 inner = XEXP (rtl, 0);
14787 if (is_a <scalar_int_mode> (mode, &int_mode)
14788 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14789 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14790 #ifdef POINTERS_EXTEND_UNSIGNED
14791 || (int_mode == Pmode && mem_mode != VOIDmode)
14792 #endif
14793 )
14794 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
14795 {
14796 mem_loc_result = mem_loc_descriptor (inner,
14797 inner_mode,
14798 mem_mode, initialized);
14799 break;
14800 }
14801 if (dwarf_strict && dwarf_version < 5)
14802 break;
14803 if (is_a <scalar_int_mode> (mode, &int_mode)
14804 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14805 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
14806 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
14807 {
14808 dw_die_ref type_die;
14809 dw_loc_descr_ref cvt;
14810
14811 mem_loc_result = mem_loc_descriptor (inner,
14812 GET_MODE (inner),
14813 mem_mode, initialized);
14814 if (mem_loc_result == NULL)
14815 break;
14816 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14817 if (type_die == NULL)
14818 {
14819 mem_loc_result = NULL;
14820 break;
14821 }
14822 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
14823 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14824 else
14825 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
14826 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14827 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14828 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14829 add_loc_descr (&mem_loc_result, cvt);
14830 if (is_a <scalar_int_mode> (mode, &int_mode)
14831 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14832 {
14833 /* Convert it to untyped afterwards. */
14834 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14835 add_loc_descr (&mem_loc_result, cvt);
14836 }
14837 }
14838 break;
14839
14840 case REG:
14841 if (!is_a <scalar_int_mode> (mode, &int_mode)
14842 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14843 && rtl != arg_pointer_rtx
14844 && rtl != frame_pointer_rtx
14845 #ifdef POINTERS_EXTEND_UNSIGNED
14846 && (int_mode != Pmode || mem_mode == VOIDmode)
14847 #endif
14848 ))
14849 {
14850 dw_die_ref type_die;
14851 unsigned int dbx_regnum;
14852
14853 if (dwarf_strict && dwarf_version < 5)
14854 break;
14855 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
14856 break;
14857 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14858 if (type_die == NULL)
14859 break;
14860
14861 dbx_regnum = dbx_reg_number (rtl);
14862 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14863 break;
14864 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
14865 dbx_regnum, 0);
14866 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14867 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14868 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
14869 break;
14870 }
14871 /* Whenever a register number forms a part of the description of the
14872 method for calculating the (dynamic) address of a memory resident
14873 object, DWARF rules require the register number be referred to as
14874 a "base register". This distinction is not based in any way upon
14875 what category of register the hardware believes the given register
14876 belongs to. This is strictly DWARF terminology we're dealing with
14877 here. Note that in cases where the location of a memory-resident
14878 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
14879 OP_CONST (0)) the actual DWARF location descriptor that we generate
14880 may just be OP_BASEREG (basereg). This may look deceptively like
14881 the object in question was allocated to a register (rather than in
14882 memory) so DWARF consumers need to be aware of the subtle
14883 distinction between OP_REG and OP_BASEREG. */
14884 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
14885 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
14886 else if (stack_realign_drap
14887 && crtl->drap_reg
14888 && crtl->args.internal_arg_pointer == rtl
14889 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
14890 {
14891 /* If RTL is internal_arg_pointer, which has been optimized
14892 out, use DRAP instead. */
14893 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
14894 VAR_INIT_STATUS_INITIALIZED);
14895 }
14896 break;
14897
14898 case SIGN_EXTEND:
14899 case ZERO_EXTEND:
14900 if (!is_a <scalar_int_mode> (mode, &int_mode)
14901 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
14902 break;
14903 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
14904 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14905 if (op0 == 0)
14906 break;
14907 else if (GET_CODE (rtl) == ZERO_EXTEND
14908 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14909 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
14910 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
14911 to expand zero extend as two shifts instead of
14912 masking. */
14913 && GET_MODE_SIZE (inner_mode) <= 4)
14914 {
14915 mem_loc_result = op0;
14916 add_loc_descr (&mem_loc_result,
14917 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
14918 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
14919 }
14920 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14921 {
14922 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
14923 shift *= BITS_PER_UNIT;
14924 if (GET_CODE (rtl) == SIGN_EXTEND)
14925 op = DW_OP_shra;
14926 else
14927 op = DW_OP_shr;
14928 mem_loc_result = op0;
14929 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14930 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14931 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14932 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14933 }
14934 else if (!dwarf_strict || dwarf_version >= 5)
14935 {
14936 dw_die_ref type_die1, type_die2;
14937 dw_loc_descr_ref cvt;
14938
14939 type_die1 = base_type_for_mode (inner_mode,
14940 GET_CODE (rtl) == ZERO_EXTEND);
14941 if (type_die1 == NULL)
14942 break;
14943 type_die2 = base_type_for_mode (int_mode, 1);
14944 if (type_die2 == NULL)
14945 break;
14946 mem_loc_result = op0;
14947 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14948 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14949 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
14950 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14951 add_loc_descr (&mem_loc_result, cvt);
14952 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14953 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14954 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
14955 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14956 add_loc_descr (&mem_loc_result, cvt);
14957 }
14958 break;
14959
14960 case MEM:
14961 {
14962 rtx new_rtl = avoid_constant_pool_reference (rtl);
14963 if (new_rtl != rtl)
14964 {
14965 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
14966 initialized);
14967 if (mem_loc_result != NULL)
14968 return mem_loc_result;
14969 }
14970 }
14971 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
14972 get_address_mode (rtl), mode,
14973 VAR_INIT_STATUS_INITIALIZED);
14974 if (mem_loc_result == NULL)
14975 mem_loc_result = tls_mem_loc_descriptor (rtl);
14976 if (mem_loc_result != NULL)
14977 {
14978 if (!is_a <scalar_int_mode> (mode, &int_mode)
14979 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14980 {
14981 dw_die_ref type_die;
14982 dw_loc_descr_ref deref;
14983 HOST_WIDE_INT size;
14984
14985 if (dwarf_strict && dwarf_version < 5)
14986 return NULL;
14987 if (!GET_MODE_SIZE (mode).is_constant (&size))
14988 return NULL;
14989 type_die
14990 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14991 if (type_die == NULL)
14992 return NULL;
14993 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
14994 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14995 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14996 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
14997 add_loc_descr (&mem_loc_result, deref);
14998 }
14999 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15000 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15001 else
15002 add_loc_descr (&mem_loc_result,
15003 new_loc_descr (DW_OP_deref_size,
15004 GET_MODE_SIZE (int_mode), 0));
15005 }
15006 break;
15007
15008 case LO_SUM:
15009 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15010
15011 case LABEL_REF:
15012 /* Some ports can transform a symbol ref into a label ref, because
15013 the symbol ref is too far away and has to be dumped into a constant
15014 pool. */
15015 case CONST:
15016 case SYMBOL_REF:
15017 if (!is_a <scalar_int_mode> (mode, &int_mode)
15018 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15019 #ifdef POINTERS_EXTEND_UNSIGNED
15020 && (int_mode != Pmode || mem_mode == VOIDmode)
15021 #endif
15022 ))
15023 break;
15024 if (GET_CODE (rtl) == SYMBOL_REF
15025 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15026 {
15027 dw_loc_descr_ref temp;
15028
15029 /* If this is not defined, we have no way to emit the data. */
15030 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15031 break;
15032
15033 temp = new_addr_loc_descr (rtl, dtprel_true);
15034
15035 /* We check for DWARF 5 here because gdb did not implement
15036 DW_OP_form_tls_address until after 7.12. */
15037 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15038 ? DW_OP_form_tls_address
15039 : DW_OP_GNU_push_tls_address),
15040 0, 0);
15041 add_loc_descr (&mem_loc_result, temp);
15042
15043 break;
15044 }
15045
15046 if (!const_ok_for_output (rtl))
15047 {
15048 if (GET_CODE (rtl) == CONST)
15049 switch (GET_CODE (XEXP (rtl, 0)))
15050 {
15051 case NOT:
15052 op = DW_OP_not;
15053 goto try_const_unop;
15054 case NEG:
15055 op = DW_OP_neg;
15056 goto try_const_unop;
15057 try_const_unop:
15058 rtx arg;
15059 arg = XEXP (XEXP (rtl, 0), 0);
15060 if (!CONSTANT_P (arg))
15061 arg = gen_rtx_CONST (int_mode, arg);
15062 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15063 initialized);
15064 if (op0)
15065 {
15066 mem_loc_result = op0;
15067 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15068 }
15069 break;
15070 default:
15071 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15072 mem_mode, initialized);
15073 break;
15074 }
15075 break;
15076 }
15077
15078 symref:
15079 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15080 vec_safe_push (used_rtx_array, rtl);
15081 break;
15082
15083 case CONCAT:
15084 case CONCATN:
15085 case VAR_LOCATION:
15086 case DEBUG_IMPLICIT_PTR:
15087 expansion_failed (NULL_TREE, rtl,
15088 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15089 return 0;
15090
15091 case ENTRY_VALUE:
15092 if (dwarf_strict && dwarf_version < 5)
15093 return NULL;
15094 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15095 {
15096 if (!is_a <scalar_int_mode> (mode, &int_mode)
15097 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15098 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15099 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15100 else
15101 {
15102 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15103 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15104 return NULL;
15105 op0 = one_reg_loc_descriptor (dbx_regnum,
15106 VAR_INIT_STATUS_INITIALIZED);
15107 }
15108 }
15109 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15110 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15111 {
15112 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15113 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15114 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15115 return NULL;
15116 }
15117 else
15118 gcc_unreachable ();
15119 if (op0 == NULL)
15120 return NULL;
15121 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15122 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15123 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15124 break;
15125
15126 case DEBUG_PARAMETER_REF:
15127 mem_loc_result = parameter_ref_descriptor (rtl);
15128 break;
15129
15130 case PRE_MODIFY:
15131 /* Extract the PLUS expression nested inside and fall into
15132 PLUS code below. */
15133 rtl = XEXP (rtl, 1);
15134 goto plus;
15135
15136 case PRE_INC:
15137 case PRE_DEC:
15138 /* Turn these into a PLUS expression and fall into the PLUS code
15139 below. */
15140 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15141 gen_int_mode (GET_CODE (rtl) == PRE_INC
15142 ? GET_MODE_UNIT_SIZE (mem_mode)
15143 : -GET_MODE_UNIT_SIZE (mem_mode),
15144 mode));
15145
15146 /* fall through */
15147
15148 case PLUS:
15149 plus:
15150 if (is_based_loc (rtl)
15151 && is_a <scalar_int_mode> (mode, &int_mode)
15152 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15153 || XEXP (rtl, 0) == arg_pointer_rtx
15154 || XEXP (rtl, 0) == frame_pointer_rtx))
15155 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15156 INTVAL (XEXP (rtl, 1)),
15157 VAR_INIT_STATUS_INITIALIZED);
15158 else
15159 {
15160 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15161 VAR_INIT_STATUS_INITIALIZED);
15162 if (mem_loc_result == 0)
15163 break;
15164
15165 if (CONST_INT_P (XEXP (rtl, 1))
15166 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15167 <= DWARF2_ADDR_SIZE))
15168 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15169 else
15170 {
15171 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15172 VAR_INIT_STATUS_INITIALIZED);
15173 if (op1 == 0)
15174 return NULL;
15175 add_loc_descr (&mem_loc_result, op1);
15176 add_loc_descr (&mem_loc_result,
15177 new_loc_descr (DW_OP_plus, 0, 0));
15178 }
15179 }
15180 break;
15181
15182 /* If a pseudo-reg is optimized away, it is possible for it to
15183 be replaced with a MEM containing a multiply or shift. */
15184 case MINUS:
15185 op = DW_OP_minus;
15186 goto do_binop;
15187
15188 case MULT:
15189 op = DW_OP_mul;
15190 goto do_binop;
15191
15192 case DIV:
15193 if ((!dwarf_strict || dwarf_version >= 5)
15194 && is_a <scalar_int_mode> (mode, &int_mode)
15195 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15196 {
15197 mem_loc_result = typed_binop (DW_OP_div, rtl,
15198 base_type_for_mode (mode, 0),
15199 int_mode, mem_mode);
15200 break;
15201 }
15202 op = DW_OP_div;
15203 goto do_binop;
15204
15205 case UMOD:
15206 op = DW_OP_mod;
15207 goto do_binop;
15208
15209 case ASHIFT:
15210 op = DW_OP_shl;
15211 goto do_shift;
15212
15213 case ASHIFTRT:
15214 op = DW_OP_shra;
15215 goto do_shift;
15216
15217 case LSHIFTRT:
15218 op = DW_OP_shr;
15219 goto do_shift;
15220
15221 do_shift:
15222 if (!is_a <scalar_int_mode> (mode, &int_mode))
15223 break;
15224 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 {
15227 rtx rtlop1 = XEXP (rtl, 1);
15228 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15229 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15230 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15231 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15232 VAR_INIT_STATUS_INITIALIZED);
15233 }
15234
15235 if (op0 == 0 || op1 == 0)
15236 break;
15237
15238 mem_loc_result = op0;
15239 add_loc_descr (&mem_loc_result, op1);
15240 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15241 break;
15242
15243 case AND:
15244 op = DW_OP_and;
15245 goto do_binop;
15246
15247 case IOR:
15248 op = DW_OP_or;
15249 goto do_binop;
15250
15251 case XOR:
15252 op = DW_OP_xor;
15253 goto do_binop;
15254
15255 do_binop:
15256 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15257 VAR_INIT_STATUS_INITIALIZED);
15258 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15259 VAR_INIT_STATUS_INITIALIZED);
15260
15261 if (op0 == 0 || op1 == 0)
15262 break;
15263
15264 mem_loc_result = op0;
15265 add_loc_descr (&mem_loc_result, op1);
15266 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15267 break;
15268
15269 case MOD:
15270 if ((!dwarf_strict || dwarf_version >= 5)
15271 && is_a <scalar_int_mode> (mode, &int_mode)
15272 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15273 {
15274 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15275 base_type_for_mode (mode, 0),
15276 int_mode, mem_mode);
15277 break;
15278 }
15279
15280 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15281 VAR_INIT_STATUS_INITIALIZED);
15282 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15283 VAR_INIT_STATUS_INITIALIZED);
15284
15285 if (op0 == 0 || op1 == 0)
15286 break;
15287
15288 mem_loc_result = op0;
15289 add_loc_descr (&mem_loc_result, op1);
15290 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15291 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15292 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15293 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15294 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15295 break;
15296
15297 case UDIV:
15298 if ((!dwarf_strict || dwarf_version >= 5)
15299 && is_a <scalar_int_mode> (mode, &int_mode))
15300 {
15301 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15302 {
15303 op = DW_OP_div;
15304 goto do_binop;
15305 }
15306 mem_loc_result = typed_binop (DW_OP_div, rtl,
15307 base_type_for_mode (int_mode, 1),
15308 int_mode, mem_mode);
15309 }
15310 break;
15311
15312 case NOT:
15313 op = DW_OP_not;
15314 goto do_unop;
15315
15316 case ABS:
15317 op = DW_OP_abs;
15318 goto do_unop;
15319
15320 case NEG:
15321 op = DW_OP_neg;
15322 goto do_unop;
15323
15324 do_unop:
15325 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15326 VAR_INIT_STATUS_INITIALIZED);
15327
15328 if (op0 == 0)
15329 break;
15330
15331 mem_loc_result = op0;
15332 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15333 break;
15334
15335 case CONST_INT:
15336 if (!is_a <scalar_int_mode> (mode, &int_mode)
15337 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15338 #ifdef POINTERS_EXTEND_UNSIGNED
15339 || (int_mode == Pmode
15340 && mem_mode != VOIDmode
15341 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15342 #endif
15343 )
15344 {
15345 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15346 break;
15347 }
15348 if ((!dwarf_strict || dwarf_version >= 5)
15349 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15350 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15351 {
15352 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15353 scalar_int_mode amode;
15354 if (type_die == NULL)
15355 return NULL;
15356 if (INTVAL (rtl) >= 0
15357 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15358 .exists (&amode))
15359 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15360 /* const DW_OP_convert <XXX> vs.
15361 DW_OP_const_type <XXX, 1, const>. */
15362 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15363 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15364 {
15365 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15366 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15367 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15368 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15369 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15370 add_loc_descr (&mem_loc_result, op0);
15371 return mem_loc_result;
15372 }
15373 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15374 INTVAL (rtl));
15375 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15376 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15377 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15378 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15379 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15380 else
15381 {
15382 mem_loc_result->dw_loc_oprnd2.val_class
15383 = dw_val_class_const_double;
15384 mem_loc_result->dw_loc_oprnd2.v.val_double
15385 = double_int::from_shwi (INTVAL (rtl));
15386 }
15387 }
15388 break;
15389
15390 case CONST_DOUBLE:
15391 if (!dwarf_strict || dwarf_version >= 5)
15392 {
15393 dw_die_ref type_die;
15394
15395 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15396 CONST_DOUBLE rtx could represent either a large integer
15397 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15398 the value is always a floating point constant.
15399
15400 When it is an integer, a CONST_DOUBLE is used whenever
15401 the constant requires 2 HWIs to be adequately represented.
15402 We output CONST_DOUBLEs as blocks. */
15403 if (mode == VOIDmode
15404 || (GET_MODE (rtl) == VOIDmode
15405 && maybe_ne (GET_MODE_BITSIZE (mode),
15406 HOST_BITS_PER_DOUBLE_INT)))
15407 break;
15408 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15409 if (type_die == NULL)
15410 return NULL;
15411 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15412 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15413 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15414 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15415 #if TARGET_SUPPORTS_WIDE_INT == 0
15416 if (!SCALAR_FLOAT_MODE_P (mode))
15417 {
15418 mem_loc_result->dw_loc_oprnd2.val_class
15419 = dw_val_class_const_double;
15420 mem_loc_result->dw_loc_oprnd2.v.val_double
15421 = rtx_to_double_int (rtl);
15422 }
15423 else
15424 #endif
15425 {
15426 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15427 unsigned int length = GET_MODE_SIZE (float_mode);
15428 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15429
15430 insert_float (rtl, array);
15431 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15432 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15433 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15434 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15435 }
15436 }
15437 break;
15438
15439 case CONST_WIDE_INT:
15440 if (!dwarf_strict || dwarf_version >= 5)
15441 {
15442 dw_die_ref type_die;
15443
15444 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15445 if (type_die == NULL)
15446 return NULL;
15447 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15448 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15449 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15450 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15451 mem_loc_result->dw_loc_oprnd2.val_class
15452 = dw_val_class_wide_int;
15453 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15454 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15455 }
15456 break;
15457
15458 case CONST_POLY_INT:
15459 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
15460 break;
15461
15462 case EQ:
15463 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15464 break;
15465
15466 case GE:
15467 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15468 break;
15469
15470 case GT:
15471 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15472 break;
15473
15474 case LE:
15475 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15476 break;
15477
15478 case LT:
15479 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15480 break;
15481
15482 case NE:
15483 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15484 break;
15485
15486 case GEU:
15487 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15488 break;
15489
15490 case GTU:
15491 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15492 break;
15493
15494 case LEU:
15495 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15496 break;
15497
15498 case LTU:
15499 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15500 break;
15501
15502 case UMIN:
15503 case UMAX:
15504 if (!SCALAR_INT_MODE_P (mode))
15505 break;
15506 /* FALLTHRU */
15507 case SMIN:
15508 case SMAX:
15509 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
15510 break;
15511
15512 case ZERO_EXTRACT:
15513 case SIGN_EXTRACT:
15514 if (CONST_INT_P (XEXP (rtl, 1))
15515 && CONST_INT_P (XEXP (rtl, 2))
15516 && is_a <scalar_int_mode> (mode, &int_mode)
15517 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
15518 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15519 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
15520 && ((unsigned) INTVAL (XEXP (rtl, 1))
15521 + (unsigned) INTVAL (XEXP (rtl, 2))
15522 <= GET_MODE_BITSIZE (int_mode)))
15523 {
15524 int shift, size;
15525 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15526 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15527 if (op0 == 0)
15528 break;
15529 if (GET_CODE (rtl) == SIGN_EXTRACT)
15530 op = DW_OP_shra;
15531 else
15532 op = DW_OP_shr;
15533 mem_loc_result = op0;
15534 size = INTVAL (XEXP (rtl, 1));
15535 shift = INTVAL (XEXP (rtl, 2));
15536 if (BITS_BIG_ENDIAN)
15537 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
15538 if (shift + size != (int) DWARF2_ADDR_SIZE)
15539 {
15540 add_loc_descr (&mem_loc_result,
15541 int_loc_descriptor (DWARF2_ADDR_SIZE
15542 - shift - size));
15543 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15544 }
15545 if (size != (int) DWARF2_ADDR_SIZE)
15546 {
15547 add_loc_descr (&mem_loc_result,
15548 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
15549 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15550 }
15551 }
15552 break;
15553
15554 case IF_THEN_ELSE:
15555 {
15556 dw_loc_descr_ref op2, bra_node, drop_node;
15557 op0 = mem_loc_descriptor (XEXP (rtl, 0),
15558 GET_MODE (XEXP (rtl, 0)) == VOIDmode
15559 ? word_mode : GET_MODE (XEXP (rtl, 0)),
15560 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15561 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15562 VAR_INIT_STATUS_INITIALIZED);
15563 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
15564 VAR_INIT_STATUS_INITIALIZED);
15565 if (op0 == NULL || op1 == NULL || op2 == NULL)
15566 break;
15567
15568 mem_loc_result = op1;
15569 add_loc_descr (&mem_loc_result, op2);
15570 add_loc_descr (&mem_loc_result, op0);
15571 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15572 add_loc_descr (&mem_loc_result, bra_node);
15573 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
15574 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15575 add_loc_descr (&mem_loc_result, drop_node);
15576 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15577 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15578 }
15579 break;
15580
15581 case FLOAT_EXTEND:
15582 case FLOAT_TRUNCATE:
15583 case FLOAT:
15584 case UNSIGNED_FLOAT:
15585 case FIX:
15586 case UNSIGNED_FIX:
15587 if (!dwarf_strict || dwarf_version >= 5)
15588 {
15589 dw_die_ref type_die;
15590 dw_loc_descr_ref cvt;
15591
15592 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15593 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15594 if (op0 == NULL)
15595 break;
15596 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
15597 && (GET_CODE (rtl) == FLOAT
15598 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
15599 {
15600 type_die = base_type_for_mode (int_mode,
15601 GET_CODE (rtl) == UNSIGNED_FLOAT);
15602 if (type_die == NULL)
15603 break;
15604 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15605 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15606 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15607 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15608 add_loc_descr (&op0, cvt);
15609 }
15610 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
15611 if (type_die == NULL)
15612 break;
15613 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15614 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15615 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15616 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15617 add_loc_descr (&op0, cvt);
15618 if (is_a <scalar_int_mode> (mode, &int_mode)
15619 && (GET_CODE (rtl) == FIX
15620 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
15621 {
15622 op0 = convert_descriptor_to_mode (int_mode, op0);
15623 if (op0 == NULL)
15624 break;
15625 }
15626 mem_loc_result = op0;
15627 }
15628 break;
15629
15630 case CLZ:
15631 case CTZ:
15632 case FFS:
15633 if (is_a <scalar_int_mode> (mode, &int_mode))
15634 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
15635 break;
15636
15637 case POPCOUNT:
15638 case PARITY:
15639 if (is_a <scalar_int_mode> (mode, &int_mode))
15640 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
15641 break;
15642
15643 case BSWAP:
15644 if (is_a <scalar_int_mode> (mode, &int_mode))
15645 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
15646 break;
15647
15648 case ROTATE:
15649 case ROTATERT:
15650 if (is_a <scalar_int_mode> (mode, &int_mode))
15651 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
15652 break;
15653
15654 case COMPARE:
15655 /* In theory, we could implement the above. */
15656 /* DWARF cannot represent the unsigned compare operations
15657 natively. */
15658 case SS_MULT:
15659 case US_MULT:
15660 case SS_DIV:
15661 case US_DIV:
15662 case SS_PLUS:
15663 case US_PLUS:
15664 case SS_MINUS:
15665 case US_MINUS:
15666 case SS_NEG:
15667 case US_NEG:
15668 case SS_ABS:
15669 case SS_ASHIFT:
15670 case US_ASHIFT:
15671 case SS_TRUNCATE:
15672 case US_TRUNCATE:
15673 case UNORDERED:
15674 case ORDERED:
15675 case UNEQ:
15676 case UNGE:
15677 case UNGT:
15678 case UNLE:
15679 case UNLT:
15680 case LTGT:
15681 case FRACT_CONVERT:
15682 case UNSIGNED_FRACT_CONVERT:
15683 case SAT_FRACT:
15684 case UNSIGNED_SAT_FRACT:
15685 case SQRT:
15686 case ASM_OPERANDS:
15687 case VEC_MERGE:
15688 case VEC_SELECT:
15689 case VEC_CONCAT:
15690 case VEC_DUPLICATE:
15691 case VEC_SERIES:
15692 case UNSPEC:
15693 case HIGH:
15694 case FMA:
15695 case STRICT_LOW_PART:
15696 case CONST_VECTOR:
15697 case CONST_FIXED:
15698 case CLRSB:
15699 case CLOBBER:
15700 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15701 can't express it in the debug info. This can happen e.g. with some
15702 TLS UNSPECs. */
15703 break;
15704
15705 case CONST_STRING:
15706 resolve_one_addr (&rtl);
15707 goto symref;
15708
15709 /* RTL sequences inside PARALLEL record a series of DWARF operations for
15710 the expression. An UNSPEC rtx represents a raw DWARF operation,
15711 new_loc_descr is called for it to build the operation directly.
15712 Otherwise mem_loc_descriptor is called recursively. */
15713 case PARALLEL:
15714 {
15715 int index = 0;
15716 dw_loc_descr_ref exp_result = NULL;
15717
15718 for (; index < XVECLEN (rtl, 0); index++)
15719 {
15720 rtx elem = XVECEXP (rtl, 0, index);
15721 if (GET_CODE (elem) == UNSPEC)
15722 {
15723 /* Each DWARF operation UNSPEC contain two operands, if
15724 one operand is not used for the operation, const0_rtx is
15725 passed. */
15726 gcc_assert (XVECLEN (elem, 0) == 2);
15727
15728 HOST_WIDE_INT dw_op = XINT (elem, 1);
15729 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
15730 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
15731 exp_result
15732 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
15733 oprnd2);
15734 }
15735 else
15736 exp_result
15737 = mem_loc_descriptor (elem, mode, mem_mode,
15738 VAR_INIT_STATUS_INITIALIZED);
15739
15740 if (!mem_loc_result)
15741 mem_loc_result = exp_result;
15742 else
15743 add_loc_descr (&mem_loc_result, exp_result);
15744 }
15745
15746 break;
15747 }
15748
15749 default:
15750 if (flag_checking)
15751 {
15752 print_rtl (stderr, rtl);
15753 gcc_unreachable ();
15754 }
15755 break;
15756 }
15757
15758 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15759 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15760
15761 return mem_loc_result;
15762 }
15763
15764 /* Return a descriptor that describes the concatenation of two locations.
15765 This is typically a complex variable. */
15766
15767 static dw_loc_descr_ref
15768 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
15769 {
15770 /* At present we only track constant-sized pieces. */
15771 unsigned int size0, size1;
15772 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
15773 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
15774 return 0;
15775
15776 dw_loc_descr_ref cc_loc_result = NULL;
15777 dw_loc_descr_ref x0_ref
15778 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15779 dw_loc_descr_ref x1_ref
15780 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15781
15782 if (x0_ref == 0 || x1_ref == 0)
15783 return 0;
15784
15785 cc_loc_result = x0_ref;
15786 add_loc_descr_op_piece (&cc_loc_result, size0);
15787
15788 add_loc_descr (&cc_loc_result, x1_ref);
15789 add_loc_descr_op_piece (&cc_loc_result, size1);
15790
15791 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
15792 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15793
15794 return cc_loc_result;
15795 }
15796
15797 /* Return a descriptor that describes the concatenation of N
15798 locations. */
15799
15800 static dw_loc_descr_ref
15801 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
15802 {
15803 unsigned int i;
15804 dw_loc_descr_ref cc_loc_result = NULL;
15805 unsigned int n = XVECLEN (concatn, 0);
15806 unsigned int size;
15807
15808 for (i = 0; i < n; ++i)
15809 {
15810 dw_loc_descr_ref ref;
15811 rtx x = XVECEXP (concatn, 0, i);
15812
15813 /* At present we only track constant-sized pieces. */
15814 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
15815 return NULL;
15816
15817 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15818 if (ref == NULL)
15819 return NULL;
15820
15821 add_loc_descr (&cc_loc_result, ref);
15822 add_loc_descr_op_piece (&cc_loc_result, size);
15823 }
15824
15825 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15826 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15827
15828 return cc_loc_result;
15829 }
15830
15831 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
15832 for DEBUG_IMPLICIT_PTR RTL. */
15833
15834 static dw_loc_descr_ref
15835 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
15836 {
15837 dw_loc_descr_ref ret;
15838 dw_die_ref ref;
15839
15840 if (dwarf_strict && dwarf_version < 5)
15841 return NULL;
15842 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
15843 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
15844 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
15845 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
15846 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
15847 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
15848 if (ref)
15849 {
15850 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15851 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15852 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15853 }
15854 else
15855 {
15856 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15857 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
15858 }
15859 return ret;
15860 }
15861
15862 /* Output a proper Dwarf location descriptor for a variable or parameter
15863 which is either allocated in a register or in a memory location. For a
15864 register, we just generate an OP_REG and the register number. For a
15865 memory location we provide a Dwarf postfix expression describing how to
15866 generate the (dynamic) address of the object onto the address stack.
15867
15868 MODE is mode of the decl if this loc_descriptor is going to be used in
15869 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
15870 allowed, VOIDmode otherwise.
15871
15872 If we don't know how to describe it, return 0. */
15873
15874 static dw_loc_descr_ref
15875 loc_descriptor (rtx rtl, machine_mode mode,
15876 enum var_init_status initialized)
15877 {
15878 dw_loc_descr_ref loc_result = NULL;
15879 scalar_int_mode int_mode;
15880
15881 switch (GET_CODE (rtl))
15882 {
15883 case SUBREG:
15884 /* The case of a subreg may arise when we have a local (register)
15885 variable or a formal (register) parameter which doesn't quite fill
15886 up an entire register. For now, just assume that it is
15887 legitimate to make the Dwarf info refer to the whole register which
15888 contains the given subreg. */
15889 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
15890 loc_result = loc_descriptor (SUBREG_REG (rtl),
15891 GET_MODE (SUBREG_REG (rtl)), initialized);
15892 else
15893 goto do_default;
15894 break;
15895
15896 case REG:
15897 loc_result = reg_loc_descriptor (rtl, initialized);
15898 break;
15899
15900 case MEM:
15901 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15902 GET_MODE (rtl), initialized);
15903 if (loc_result == NULL)
15904 loc_result = tls_mem_loc_descriptor (rtl);
15905 if (loc_result == NULL)
15906 {
15907 rtx new_rtl = avoid_constant_pool_reference (rtl);
15908 if (new_rtl != rtl)
15909 loc_result = loc_descriptor (new_rtl, mode, initialized);
15910 }
15911 break;
15912
15913 case CONCAT:
15914 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
15915 initialized);
15916 break;
15917
15918 case CONCATN:
15919 loc_result = concatn_loc_descriptor (rtl, initialized);
15920 break;
15921
15922 case VAR_LOCATION:
15923 /* Single part. */
15924 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
15925 {
15926 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
15927 if (GET_CODE (loc) == EXPR_LIST)
15928 loc = XEXP (loc, 0);
15929 loc_result = loc_descriptor (loc, mode, initialized);
15930 break;
15931 }
15932
15933 rtl = XEXP (rtl, 1);
15934 /* FALLTHRU */
15935
15936 case PARALLEL:
15937 {
15938 rtvec par_elems = XVEC (rtl, 0);
15939 int num_elem = GET_NUM_ELEM (par_elems);
15940 machine_mode mode;
15941 int i, size;
15942
15943 /* Create the first one, so we have something to add to. */
15944 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
15945 VOIDmode, initialized);
15946 if (loc_result == NULL)
15947 return NULL;
15948 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
15949 /* At present we only track constant-sized pieces. */
15950 if (!GET_MODE_SIZE (mode).is_constant (&size))
15951 return NULL;
15952 add_loc_descr_op_piece (&loc_result, size);
15953 for (i = 1; i < num_elem; i++)
15954 {
15955 dw_loc_descr_ref temp;
15956
15957 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
15958 VOIDmode, initialized);
15959 if (temp == NULL)
15960 return NULL;
15961 add_loc_descr (&loc_result, temp);
15962 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
15963 /* At present we only track constant-sized pieces. */
15964 if (!GET_MODE_SIZE (mode).is_constant (&size))
15965 return NULL;
15966 add_loc_descr_op_piece (&loc_result, size);
15967 }
15968 }
15969 break;
15970
15971 case CONST_INT:
15972 if (mode != VOIDmode && mode != BLKmode)
15973 {
15974 int_mode = as_a <scalar_int_mode> (mode);
15975 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
15976 INTVAL (rtl));
15977 }
15978 break;
15979
15980 case CONST_DOUBLE:
15981 if (mode == VOIDmode)
15982 mode = GET_MODE (rtl);
15983
15984 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15985 {
15986 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15987
15988 /* Note that a CONST_DOUBLE rtx could represent either an integer
15989 or a floating-point constant. A CONST_DOUBLE is used whenever
15990 the constant requires more than one word in order to be
15991 adequately represented. We output CONST_DOUBLEs as blocks. */
15992 scalar_mode smode = as_a <scalar_mode> (mode);
15993 loc_result = new_loc_descr (DW_OP_implicit_value,
15994 GET_MODE_SIZE (smode), 0);
15995 #if TARGET_SUPPORTS_WIDE_INT == 0
15996 if (!SCALAR_FLOAT_MODE_P (smode))
15997 {
15998 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
15999 loc_result->dw_loc_oprnd2.v.val_double
16000 = rtx_to_double_int (rtl);
16001 }
16002 else
16003 #endif
16004 {
16005 unsigned int length = GET_MODE_SIZE (smode);
16006 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16007
16008 insert_float (rtl, array);
16009 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16010 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16011 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16012 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16013 }
16014 }
16015 break;
16016
16017 case CONST_WIDE_INT:
16018 if (mode == VOIDmode)
16019 mode = GET_MODE (rtl);
16020
16021 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16022 {
16023 int_mode = as_a <scalar_int_mode> (mode);
16024 loc_result = new_loc_descr (DW_OP_implicit_value,
16025 GET_MODE_SIZE (int_mode), 0);
16026 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16027 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16028 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16029 }
16030 break;
16031
16032 case CONST_VECTOR:
16033 if (mode == VOIDmode)
16034 mode = GET_MODE (rtl);
16035
16036 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16037 {
16038 unsigned int length;
16039 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16040 return NULL;
16041
16042 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16043 unsigned char *array
16044 = ggc_vec_alloc<unsigned char> (length * elt_size);
16045 unsigned int i;
16046 unsigned char *p;
16047 machine_mode imode = GET_MODE_INNER (mode);
16048
16049 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16050 switch (GET_MODE_CLASS (mode))
16051 {
16052 case MODE_VECTOR_INT:
16053 for (i = 0, p = array; i < length; i++, p += elt_size)
16054 {
16055 rtx elt = CONST_VECTOR_ELT (rtl, i);
16056 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16057 }
16058 break;
16059
16060 case MODE_VECTOR_FLOAT:
16061 for (i = 0, p = array; i < length; i++, p += elt_size)
16062 {
16063 rtx elt = CONST_VECTOR_ELT (rtl, i);
16064 insert_float (elt, p);
16065 }
16066 break;
16067
16068 default:
16069 gcc_unreachable ();
16070 }
16071
16072 loc_result = new_loc_descr (DW_OP_implicit_value,
16073 length * elt_size, 0);
16074 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16075 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16076 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16077 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16078 }
16079 break;
16080
16081 case CONST:
16082 if (mode == VOIDmode
16083 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16084 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16085 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16086 {
16087 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16088 break;
16089 }
16090 /* FALLTHROUGH */
16091 case SYMBOL_REF:
16092 if (!const_ok_for_output (rtl))
16093 break;
16094 /* FALLTHROUGH */
16095 case LABEL_REF:
16096 if (is_a <scalar_int_mode> (mode, &int_mode)
16097 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16098 && (dwarf_version >= 4 || !dwarf_strict))
16099 {
16100 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16101 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16102 vec_safe_push (used_rtx_array, rtl);
16103 }
16104 break;
16105
16106 case DEBUG_IMPLICIT_PTR:
16107 loc_result = implicit_ptr_descriptor (rtl, 0);
16108 break;
16109
16110 case PLUS:
16111 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16112 && CONST_INT_P (XEXP (rtl, 1)))
16113 {
16114 loc_result
16115 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16116 break;
16117 }
16118 /* FALLTHRU */
16119 do_default:
16120 default:
16121 if ((is_a <scalar_int_mode> (mode, &int_mode)
16122 && GET_MODE (rtl) == int_mode
16123 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16124 && dwarf_version >= 4)
16125 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16126 {
16127 /* Value expression. */
16128 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16129 if (loc_result)
16130 add_loc_descr (&loc_result,
16131 new_loc_descr (DW_OP_stack_value, 0, 0));
16132 }
16133 break;
16134 }
16135
16136 return loc_result;
16137 }
16138
16139 /* We need to figure out what section we should use as the base for the
16140 address ranges where a given location is valid.
16141 1. If this particular DECL has a section associated with it, use that.
16142 2. If this function has a section associated with it, use that.
16143 3. Otherwise, use the text section.
16144 XXX: If you split a variable across multiple sections, we won't notice. */
16145
16146 static const char *
16147 secname_for_decl (const_tree decl)
16148 {
16149 const char *secname;
16150
16151 if (VAR_OR_FUNCTION_DECL_P (decl)
16152 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16153 && DECL_SECTION_NAME (decl))
16154 secname = DECL_SECTION_NAME (decl);
16155 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16156 secname = DECL_SECTION_NAME (current_function_decl);
16157 else if (cfun && in_cold_section_p)
16158 secname = crtl->subsections.cold_section_label;
16159 else
16160 secname = text_section_label;
16161
16162 return secname;
16163 }
16164
16165 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16166
16167 static bool
16168 decl_by_reference_p (tree decl)
16169 {
16170 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16171 || VAR_P (decl))
16172 && DECL_BY_REFERENCE (decl));
16173 }
16174
16175 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16176 for VARLOC. */
16177
16178 static dw_loc_descr_ref
16179 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16180 enum var_init_status initialized)
16181 {
16182 int have_address = 0;
16183 dw_loc_descr_ref descr;
16184 machine_mode mode;
16185
16186 if (want_address != 2)
16187 {
16188 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16189 /* Single part. */
16190 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16191 {
16192 varloc = PAT_VAR_LOCATION_LOC (varloc);
16193 if (GET_CODE (varloc) == EXPR_LIST)
16194 varloc = XEXP (varloc, 0);
16195 mode = GET_MODE (varloc);
16196 if (MEM_P (varloc))
16197 {
16198 rtx addr = XEXP (varloc, 0);
16199 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16200 mode, initialized);
16201 if (descr)
16202 have_address = 1;
16203 else
16204 {
16205 rtx x = avoid_constant_pool_reference (varloc);
16206 if (x != varloc)
16207 descr = mem_loc_descriptor (x, mode, VOIDmode,
16208 initialized);
16209 }
16210 }
16211 else
16212 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16213 }
16214 else
16215 return 0;
16216 }
16217 else
16218 {
16219 if (GET_CODE (varloc) == VAR_LOCATION)
16220 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16221 else
16222 mode = DECL_MODE (loc);
16223 descr = loc_descriptor (varloc, mode, initialized);
16224 have_address = 1;
16225 }
16226
16227 if (!descr)
16228 return 0;
16229
16230 if (want_address == 2 && !have_address
16231 && (dwarf_version >= 4 || !dwarf_strict))
16232 {
16233 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16234 {
16235 expansion_failed (loc, NULL_RTX,
16236 "DWARF address size mismatch");
16237 return 0;
16238 }
16239 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16240 have_address = 1;
16241 }
16242 /* Show if we can't fill the request for an address. */
16243 if (want_address && !have_address)
16244 {
16245 expansion_failed (loc, NULL_RTX,
16246 "Want address and only have value");
16247 return 0;
16248 }
16249
16250 /* If we've got an address and don't want one, dereference. */
16251 if (!want_address && have_address)
16252 {
16253 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16254 enum dwarf_location_atom op;
16255
16256 if (size > DWARF2_ADDR_SIZE || size == -1)
16257 {
16258 expansion_failed (loc, NULL_RTX,
16259 "DWARF address size mismatch");
16260 return 0;
16261 }
16262 else if (size == DWARF2_ADDR_SIZE)
16263 op = DW_OP_deref;
16264 else
16265 op = DW_OP_deref_size;
16266
16267 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16268 }
16269
16270 return descr;
16271 }
16272
16273 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16274 if it is not possible. */
16275
16276 static dw_loc_descr_ref
16277 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16278 {
16279 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16280 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16281 else if (dwarf_version >= 3 || !dwarf_strict)
16282 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16283 else
16284 return NULL;
16285 }
16286
16287 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16288 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16289
16290 static dw_loc_descr_ref
16291 dw_sra_loc_expr (tree decl, rtx loc)
16292 {
16293 rtx p;
16294 unsigned HOST_WIDE_INT padsize = 0;
16295 dw_loc_descr_ref descr, *descr_tail;
16296 unsigned HOST_WIDE_INT decl_size;
16297 rtx varloc;
16298 enum var_init_status initialized;
16299
16300 if (DECL_SIZE (decl) == NULL
16301 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16302 return NULL;
16303
16304 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16305 descr = NULL;
16306 descr_tail = &descr;
16307
16308 for (p = loc; p; p = XEXP (p, 1))
16309 {
16310 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16311 rtx loc_note = *decl_piece_varloc_ptr (p);
16312 dw_loc_descr_ref cur_descr;
16313 dw_loc_descr_ref *tail, last = NULL;
16314 unsigned HOST_WIDE_INT opsize = 0;
16315
16316 if (loc_note == NULL_RTX
16317 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16318 {
16319 padsize += bitsize;
16320 continue;
16321 }
16322 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16323 varloc = NOTE_VAR_LOCATION (loc_note);
16324 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16325 if (cur_descr == NULL)
16326 {
16327 padsize += bitsize;
16328 continue;
16329 }
16330
16331 /* Check that cur_descr either doesn't use
16332 DW_OP_*piece operations, or their sum is equal
16333 to bitsize. Otherwise we can't embed it. */
16334 for (tail = &cur_descr; *tail != NULL;
16335 tail = &(*tail)->dw_loc_next)
16336 if ((*tail)->dw_loc_opc == DW_OP_piece)
16337 {
16338 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16339 * BITS_PER_UNIT;
16340 last = *tail;
16341 }
16342 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16343 {
16344 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16345 last = *tail;
16346 }
16347
16348 if (last != NULL && opsize != bitsize)
16349 {
16350 padsize += bitsize;
16351 /* Discard the current piece of the descriptor and release any
16352 addr_table entries it uses. */
16353 remove_loc_list_addr_table_entries (cur_descr);
16354 continue;
16355 }
16356
16357 /* If there is a hole, add DW_OP_*piece after empty DWARF
16358 expression, which means that those bits are optimized out. */
16359 if (padsize)
16360 {
16361 if (padsize > decl_size)
16362 {
16363 remove_loc_list_addr_table_entries (cur_descr);
16364 goto discard_descr;
16365 }
16366 decl_size -= padsize;
16367 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16368 if (*descr_tail == NULL)
16369 {
16370 remove_loc_list_addr_table_entries (cur_descr);
16371 goto discard_descr;
16372 }
16373 descr_tail = &(*descr_tail)->dw_loc_next;
16374 padsize = 0;
16375 }
16376 *descr_tail = cur_descr;
16377 descr_tail = tail;
16378 if (bitsize > decl_size)
16379 goto discard_descr;
16380 decl_size -= bitsize;
16381 if (last == NULL)
16382 {
16383 HOST_WIDE_INT offset = 0;
16384 if (GET_CODE (varloc) == VAR_LOCATION
16385 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16386 {
16387 varloc = PAT_VAR_LOCATION_LOC (varloc);
16388 if (GET_CODE (varloc) == EXPR_LIST)
16389 varloc = XEXP (varloc, 0);
16390 }
16391 do
16392 {
16393 if (GET_CODE (varloc) == CONST
16394 || GET_CODE (varloc) == SIGN_EXTEND
16395 || GET_CODE (varloc) == ZERO_EXTEND)
16396 varloc = XEXP (varloc, 0);
16397 else if (GET_CODE (varloc) == SUBREG)
16398 varloc = SUBREG_REG (varloc);
16399 else
16400 break;
16401 }
16402 while (1);
16403 /* DW_OP_bit_size offset should be zero for register
16404 or implicit location descriptions and empty location
16405 descriptions, but for memory addresses needs big endian
16406 adjustment. */
16407 if (MEM_P (varloc))
16408 {
16409 unsigned HOST_WIDE_INT memsize;
16410 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16411 goto discard_descr;
16412 memsize *= BITS_PER_UNIT;
16413 if (memsize != bitsize)
16414 {
16415 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16416 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16417 goto discard_descr;
16418 if (memsize < bitsize)
16419 goto discard_descr;
16420 if (BITS_BIG_ENDIAN)
16421 offset = memsize - bitsize;
16422 }
16423 }
16424
16425 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16426 if (*descr_tail == NULL)
16427 goto discard_descr;
16428 descr_tail = &(*descr_tail)->dw_loc_next;
16429 }
16430 }
16431
16432 /* If there were any non-empty expressions, add padding till the end of
16433 the decl. */
16434 if (descr != NULL && decl_size != 0)
16435 {
16436 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16437 if (*descr_tail == NULL)
16438 goto discard_descr;
16439 }
16440 return descr;
16441
16442 discard_descr:
16443 /* Discard the descriptor and release any addr_table entries it uses. */
16444 remove_loc_list_addr_table_entries (descr);
16445 return NULL;
16446 }
16447
16448 /* Return the dwarf representation of the location list LOC_LIST of
16449 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16450 function. */
16451
16452 static dw_loc_list_ref
16453 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16454 {
16455 const char *endname, *secname;
16456 rtx varloc;
16457 enum var_init_status initialized;
16458 struct var_loc_node *node;
16459 dw_loc_descr_ref descr;
16460 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16461 dw_loc_list_ref list = NULL;
16462 dw_loc_list_ref *listp = &list;
16463
16464 /* Now that we know what section we are using for a base,
16465 actually construct the list of locations.
16466 The first location information is what is passed to the
16467 function that creates the location list, and the remaining
16468 locations just get added on to that list.
16469 Note that we only know the start address for a location
16470 (IE location changes), so to build the range, we use
16471 the range [current location start, next location start].
16472 This means we have to special case the last node, and generate
16473 a range of [last location start, end of function label]. */
16474
16475 if (cfun && crtl->has_bb_partition)
16476 {
16477 bool save_in_cold_section_p = in_cold_section_p;
16478 in_cold_section_p = first_function_block_is_cold;
16479 if (loc_list->last_before_switch == NULL)
16480 in_cold_section_p = !in_cold_section_p;
16481 secname = secname_for_decl (decl);
16482 in_cold_section_p = save_in_cold_section_p;
16483 }
16484 else
16485 secname = secname_for_decl (decl);
16486
16487 for (node = loc_list->first; node; node = node->next)
16488 {
16489 bool range_across_switch = false;
16490 if (GET_CODE (node->loc) == EXPR_LIST
16491 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16492 {
16493 if (GET_CODE (node->loc) == EXPR_LIST)
16494 {
16495 descr = NULL;
16496 /* This requires DW_OP_{,bit_}piece, which is not usable
16497 inside DWARF expressions. */
16498 if (want_address == 2)
16499 descr = dw_sra_loc_expr (decl, node->loc);
16500 }
16501 else
16502 {
16503 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16504 varloc = NOTE_VAR_LOCATION (node->loc);
16505 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16506 }
16507 if (descr)
16508 {
16509 /* If section switch happens in between node->label
16510 and node->next->label (or end of function) and
16511 we can't emit it as a single entry list,
16512 emit two ranges, first one ending at the end
16513 of first partition and second one starting at the
16514 beginning of second partition. */
16515 if (node == loc_list->last_before_switch
16516 && (node != loc_list->first || loc_list->first->next)
16517 && current_function_decl)
16518 {
16519 endname = cfun->fde->dw_fde_end;
16520 range_across_switch = true;
16521 }
16522 /* The variable has a location between NODE->LABEL and
16523 NODE->NEXT->LABEL. */
16524 else if (node->next)
16525 endname = node->next->label;
16526 /* If the variable has a location at the last label
16527 it keeps its location until the end of function. */
16528 else if (!current_function_decl)
16529 endname = text_end_label;
16530 else
16531 {
16532 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
16533 current_function_funcdef_no);
16534 endname = ggc_strdup (label_id);
16535 }
16536
16537 *listp = new_loc_list (descr, node->label, endname, secname);
16538 if (TREE_CODE (decl) == PARM_DECL
16539 && node == loc_list->first
16540 && NOTE_P (node->loc)
16541 && strcmp (node->label, endname) == 0)
16542 (*listp)->force = true;
16543 listp = &(*listp)->dw_loc_next;
16544 }
16545 }
16546
16547 if (cfun
16548 && crtl->has_bb_partition
16549 && node == loc_list->last_before_switch)
16550 {
16551 bool save_in_cold_section_p = in_cold_section_p;
16552 in_cold_section_p = !first_function_block_is_cold;
16553 secname = secname_for_decl (decl);
16554 in_cold_section_p = save_in_cold_section_p;
16555 }
16556
16557 if (range_across_switch)
16558 {
16559 if (GET_CODE (node->loc) == EXPR_LIST)
16560 descr = dw_sra_loc_expr (decl, node->loc);
16561 else
16562 {
16563 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16564 varloc = NOTE_VAR_LOCATION (node->loc);
16565 descr = dw_loc_list_1 (decl, varloc, want_address,
16566 initialized);
16567 }
16568 gcc_assert (descr);
16569 /* The variable has a location between NODE->LABEL and
16570 NODE->NEXT->LABEL. */
16571 if (node->next)
16572 endname = node->next->label;
16573 else
16574 endname = cfun->fde->dw_fde_second_end;
16575 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin,
16576 endname, secname);
16577 listp = &(*listp)->dw_loc_next;
16578 }
16579 }
16580
16581 /* Try to avoid the overhead of a location list emitting a location
16582 expression instead, but only if we didn't have more than one
16583 location entry in the first place. If some entries were not
16584 representable, we don't want to pretend a single entry that was
16585 applies to the entire scope in which the variable is
16586 available. */
16587 if (list && loc_list->first->next)
16588 gen_llsym (list);
16589
16590 return list;
16591 }
16592
16593 /* Return if the loc_list has only single element and thus can be represented
16594 as location description. */
16595
16596 static bool
16597 single_element_loc_list_p (dw_loc_list_ref list)
16598 {
16599 gcc_assert (!list->dw_loc_next || list->ll_symbol);
16600 return !list->ll_symbol;
16601 }
16602
16603 /* Duplicate a single element of location list. */
16604
16605 static inline dw_loc_descr_ref
16606 copy_loc_descr (dw_loc_descr_ref ref)
16607 {
16608 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
16609 memcpy (copy, ref, sizeof (dw_loc_descr_node));
16610 return copy;
16611 }
16612
16613 /* To each location in list LIST append loc descr REF. */
16614
16615 static void
16616 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16617 {
16618 dw_loc_descr_ref copy;
16619 add_loc_descr (&list->expr, ref);
16620 list = list->dw_loc_next;
16621 while (list)
16622 {
16623 copy = copy_loc_descr (ref);
16624 add_loc_descr (&list->expr, copy);
16625 while (copy->dw_loc_next)
16626 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16627 list = list->dw_loc_next;
16628 }
16629 }
16630
16631 /* To each location in list LIST prepend loc descr REF. */
16632
16633 static void
16634 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16635 {
16636 dw_loc_descr_ref copy;
16637 dw_loc_descr_ref ref_end = list->expr;
16638 add_loc_descr (&ref, list->expr);
16639 list->expr = ref;
16640 list = list->dw_loc_next;
16641 while (list)
16642 {
16643 dw_loc_descr_ref end = list->expr;
16644 list->expr = copy = copy_loc_descr (ref);
16645 while (copy->dw_loc_next != ref_end)
16646 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16647 copy->dw_loc_next = end;
16648 list = list->dw_loc_next;
16649 }
16650 }
16651
16652 /* Given two lists RET and LIST
16653 produce location list that is result of adding expression in LIST
16654 to expression in RET on each position in program.
16655 Might be destructive on both RET and LIST.
16656
16657 TODO: We handle only simple cases of RET or LIST having at most one
16658 element. General case would involve sorting the lists in program order
16659 and merging them that will need some additional work.
16660 Adding that will improve quality of debug info especially for SRA-ed
16661 structures. */
16662
16663 static void
16664 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
16665 {
16666 if (!list)
16667 return;
16668 if (!*ret)
16669 {
16670 *ret = list;
16671 return;
16672 }
16673 if (!list->dw_loc_next)
16674 {
16675 add_loc_descr_to_each (*ret, list->expr);
16676 return;
16677 }
16678 if (!(*ret)->dw_loc_next)
16679 {
16680 prepend_loc_descr_to_each (list, (*ret)->expr);
16681 *ret = list;
16682 return;
16683 }
16684 expansion_failed (NULL_TREE, NULL_RTX,
16685 "Don't know how to merge two non-trivial"
16686 " location lists.\n");
16687 *ret = NULL;
16688 return;
16689 }
16690
16691 /* LOC is constant expression. Try a luck, look it up in constant
16692 pool and return its loc_descr of its address. */
16693
16694 static dw_loc_descr_ref
16695 cst_pool_loc_descr (tree loc)
16696 {
16697 /* Get an RTL for this, if something has been emitted. */
16698 rtx rtl = lookup_constant_def (loc);
16699
16700 if (!rtl || !MEM_P (rtl))
16701 {
16702 gcc_assert (!rtl);
16703 return 0;
16704 }
16705 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
16706
16707 /* TODO: We might get more coverage if we was actually delaying expansion
16708 of all expressions till end of compilation when constant pools are fully
16709 populated. */
16710 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
16711 {
16712 expansion_failed (loc, NULL_RTX,
16713 "CST value in contant pool but not marked.");
16714 return 0;
16715 }
16716 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16717 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
16718 }
16719
16720 /* Return dw_loc_list representing address of addr_expr LOC
16721 by looking for inner INDIRECT_REF expression and turning
16722 it into simple arithmetics.
16723
16724 See loc_list_from_tree for the meaning of CONTEXT. */
16725
16726 static dw_loc_list_ref
16727 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
16728 loc_descr_context *context)
16729 {
16730 tree obj, offset;
16731 poly_int64 bitsize, bitpos, bytepos;
16732 machine_mode mode;
16733 int unsignedp, reversep, volatilep = 0;
16734 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
16735
16736 obj = get_inner_reference (TREE_OPERAND (loc, 0),
16737 &bitsize, &bitpos, &offset, &mode,
16738 &unsignedp, &reversep, &volatilep);
16739 STRIP_NOPS (obj);
16740 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
16741 {
16742 expansion_failed (loc, NULL_RTX, "bitfield access");
16743 return 0;
16744 }
16745 if (!INDIRECT_REF_P (obj))
16746 {
16747 expansion_failed (obj,
16748 NULL_RTX, "no indirect ref in inner refrence");
16749 return 0;
16750 }
16751 if (!offset && known_eq (bitpos, 0))
16752 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
16753 context);
16754 else if (toplev
16755 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
16756 && (dwarf_version >= 4 || !dwarf_strict))
16757 {
16758 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
16759 if (!list_ret)
16760 return 0;
16761 if (offset)
16762 {
16763 /* Variable offset. */
16764 list_ret1 = loc_list_from_tree (offset, 0, context);
16765 if (list_ret1 == 0)
16766 return 0;
16767 add_loc_list (&list_ret, list_ret1);
16768 if (!list_ret)
16769 return 0;
16770 add_loc_descr_to_each (list_ret,
16771 new_loc_descr (DW_OP_plus, 0, 0));
16772 }
16773 HOST_WIDE_INT value;
16774 if (bytepos.is_constant (&value) && value > 0)
16775 add_loc_descr_to_each (list_ret,
16776 new_loc_descr (DW_OP_plus_uconst, value, 0));
16777 else if (maybe_ne (bytepos, 0))
16778 loc_list_plus_const (list_ret, bytepos);
16779 add_loc_descr_to_each (list_ret,
16780 new_loc_descr (DW_OP_stack_value, 0, 0));
16781 }
16782 return list_ret;
16783 }
16784
16785 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
16786 all operations from LOC are nops, move to the last one. Insert in NOPS all
16787 operations that are skipped. */
16788
16789 static void
16790 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
16791 hash_set<dw_loc_descr_ref> &nops)
16792 {
16793 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
16794 {
16795 nops.add (loc);
16796 loc = loc->dw_loc_next;
16797 }
16798 }
16799
16800 /* Helper for loc_descr_without_nops: free the location description operation
16801 P. */
16802
16803 bool
16804 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
16805 {
16806 ggc_free (loc);
16807 return true;
16808 }
16809
16810 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
16811 finishes LOC. */
16812
16813 static void
16814 loc_descr_without_nops (dw_loc_descr_ref &loc)
16815 {
16816 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
16817 return;
16818
16819 /* Set of all DW_OP_nop operations we remove. */
16820 hash_set<dw_loc_descr_ref> nops;
16821
16822 /* First, strip all prefix NOP operations in order to keep the head of the
16823 operations list. */
16824 loc_descr_to_next_no_nop (loc, nops);
16825
16826 for (dw_loc_descr_ref cur = loc; cur != NULL;)
16827 {
16828 /* For control flow operations: strip "prefix" nops in destination
16829 labels. */
16830 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
16831 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
16832 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
16833 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
16834
16835 /* Do the same for the operations that follow, then move to the next
16836 iteration. */
16837 if (cur->dw_loc_next != NULL)
16838 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
16839 cur = cur->dw_loc_next;
16840 }
16841
16842 nops.traverse<void *, free_loc_descr> (NULL);
16843 }
16844
16845
16846 struct dwarf_procedure_info;
16847
16848 /* Helper structure for location descriptions generation. */
16849 struct loc_descr_context
16850 {
16851 /* The type that is implicitly referenced by DW_OP_push_object_address, or
16852 NULL_TREE if DW_OP_push_object_address in invalid for this location
16853 description. This is used when processing PLACEHOLDER_EXPR nodes. */
16854 tree context_type;
16855 /* The ..._DECL node that should be translated as a
16856 DW_OP_push_object_address operation. */
16857 tree base_decl;
16858 /* Information about the DWARF procedure we are currently generating. NULL if
16859 we are not generating a DWARF procedure. */
16860 struct dwarf_procedure_info *dpi;
16861 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
16862 by consumer. Used for DW_TAG_generic_subrange attributes. */
16863 bool placeholder_arg;
16864 /* True if PLACEHOLDER_EXPR has been seen. */
16865 bool placeholder_seen;
16866 };
16867
16868 /* DWARF procedures generation
16869
16870 DWARF expressions (aka. location descriptions) are used to encode variable
16871 things such as sizes or offsets. Such computations can have redundant parts
16872 that can be factorized in order to reduce the size of the output debug
16873 information. This is the whole point of DWARF procedures.
16874
16875 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
16876 already factorized into functions ("size functions") in order to handle very
16877 big and complex types. Such functions are quite simple: they have integral
16878 arguments, they return an integral result and their body contains only a
16879 return statement with arithmetic expressions. This is the only kind of
16880 function we are interested in translating into DWARF procedures, here.
16881
16882 DWARF expressions and DWARF procedure are executed using a stack, so we have
16883 to define some calling convention for them to interact. Let's say that:
16884
16885 - Before calling a DWARF procedure, DWARF expressions must push on the stack
16886 all arguments in reverse order (right-to-left) so that when the DWARF
16887 procedure execution starts, the first argument is the top of the stack.
16888
16889 - Then, when returning, the DWARF procedure must have consumed all arguments
16890 on the stack, must have pushed the result and touched nothing else.
16891
16892 - Each integral argument and the result are integral types can be hold in a
16893 single stack slot.
16894
16895 - We call "frame offset" the number of stack slots that are "under DWARF
16896 procedure control": it includes the arguments slots, the temporaries and
16897 the result slot. Thus, it is equal to the number of arguments when the
16898 procedure execution starts and must be equal to one (the result) when it
16899 returns. */
16900
16901 /* Helper structure used when generating operations for a DWARF procedure. */
16902 struct dwarf_procedure_info
16903 {
16904 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
16905 currently translated. */
16906 tree fndecl;
16907 /* The number of arguments FNDECL takes. */
16908 unsigned args_count;
16909 };
16910
16911 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
16912 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
16913 equate it to this DIE. */
16914
16915 static dw_die_ref
16916 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
16917 dw_die_ref parent_die)
16918 {
16919 dw_die_ref dwarf_proc_die;
16920
16921 if ((dwarf_version < 3 && dwarf_strict)
16922 || location == NULL)
16923 return NULL;
16924
16925 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
16926 if (fndecl)
16927 equate_decl_number_to_die (fndecl, dwarf_proc_die);
16928 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
16929 return dwarf_proc_die;
16930 }
16931
16932 /* Return whether TYPE is a supported type as a DWARF procedure argument
16933 type or return type (we handle only scalar types and pointer types that
16934 aren't wider than the DWARF expression evaluation stack. */
16935
16936 static bool
16937 is_handled_procedure_type (tree type)
16938 {
16939 return ((INTEGRAL_TYPE_P (type)
16940 || TREE_CODE (type) == OFFSET_TYPE
16941 || TREE_CODE (type) == POINTER_TYPE)
16942 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
16943 }
16944
16945 /* Helper for resolve_args_picking: do the same but stop when coming across
16946 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
16947 offset *before* evaluating the corresponding operation. */
16948
16949 static bool
16950 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16951 struct dwarf_procedure_info *dpi,
16952 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
16953 {
16954 /* The "frame_offset" identifier is already used to name a macro... */
16955 unsigned frame_offset_ = initial_frame_offset;
16956 dw_loc_descr_ref l;
16957
16958 for (l = loc; l != NULL;)
16959 {
16960 bool existed;
16961 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
16962
16963 /* If we already met this node, there is nothing to compute anymore. */
16964 if (existed)
16965 {
16966 /* Make sure that the stack size is consistent wherever the execution
16967 flow comes from. */
16968 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
16969 break;
16970 }
16971 l_frame_offset = frame_offset_;
16972
16973 /* If needed, relocate the picking offset with respect to the frame
16974 offset. */
16975 if (l->frame_offset_rel)
16976 {
16977 unsigned HOST_WIDE_INT off;
16978 switch (l->dw_loc_opc)
16979 {
16980 case DW_OP_pick:
16981 off = l->dw_loc_oprnd1.v.val_unsigned;
16982 break;
16983 case DW_OP_dup:
16984 off = 0;
16985 break;
16986 case DW_OP_over:
16987 off = 1;
16988 break;
16989 default:
16990 gcc_unreachable ();
16991 }
16992 /* frame_offset_ is the size of the current stack frame, including
16993 incoming arguments. Besides, the arguments are pushed
16994 right-to-left. Thus, in order to access the Nth argument from
16995 this operation node, the picking has to skip temporaries *plus*
16996 one stack slot per argument (0 for the first one, 1 for the second
16997 one, etc.).
16998
16999 The targetted argument number (N) is already set as the operand,
17000 and the number of temporaries can be computed with:
17001 frame_offsets_ - dpi->args_count */
17002 off += frame_offset_ - dpi->args_count;
17003
17004 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17005 if (off > 255)
17006 return false;
17007
17008 if (off == 0)
17009 {
17010 l->dw_loc_opc = DW_OP_dup;
17011 l->dw_loc_oprnd1.v.val_unsigned = 0;
17012 }
17013 else if (off == 1)
17014 {
17015 l->dw_loc_opc = DW_OP_over;
17016 l->dw_loc_oprnd1.v.val_unsigned = 0;
17017 }
17018 else
17019 {
17020 l->dw_loc_opc = DW_OP_pick;
17021 l->dw_loc_oprnd1.v.val_unsigned = off;
17022 }
17023 }
17024
17025 /* Update frame_offset according to the effect the current operation has
17026 on the stack. */
17027 switch (l->dw_loc_opc)
17028 {
17029 case DW_OP_deref:
17030 case DW_OP_swap:
17031 case DW_OP_rot:
17032 case DW_OP_abs:
17033 case DW_OP_neg:
17034 case DW_OP_not:
17035 case DW_OP_plus_uconst:
17036 case DW_OP_skip:
17037 case DW_OP_reg0:
17038 case DW_OP_reg1:
17039 case DW_OP_reg2:
17040 case DW_OP_reg3:
17041 case DW_OP_reg4:
17042 case DW_OP_reg5:
17043 case DW_OP_reg6:
17044 case DW_OP_reg7:
17045 case DW_OP_reg8:
17046 case DW_OP_reg9:
17047 case DW_OP_reg10:
17048 case DW_OP_reg11:
17049 case DW_OP_reg12:
17050 case DW_OP_reg13:
17051 case DW_OP_reg14:
17052 case DW_OP_reg15:
17053 case DW_OP_reg16:
17054 case DW_OP_reg17:
17055 case DW_OP_reg18:
17056 case DW_OP_reg19:
17057 case DW_OP_reg20:
17058 case DW_OP_reg21:
17059 case DW_OP_reg22:
17060 case DW_OP_reg23:
17061 case DW_OP_reg24:
17062 case DW_OP_reg25:
17063 case DW_OP_reg26:
17064 case DW_OP_reg27:
17065 case DW_OP_reg28:
17066 case DW_OP_reg29:
17067 case DW_OP_reg30:
17068 case DW_OP_reg31:
17069 case DW_OP_bregx:
17070 case DW_OP_piece:
17071 case DW_OP_deref_size:
17072 case DW_OP_nop:
17073 case DW_OP_bit_piece:
17074 case DW_OP_implicit_value:
17075 case DW_OP_stack_value:
17076 break;
17077
17078 case DW_OP_addr:
17079 case DW_OP_const1u:
17080 case DW_OP_const1s:
17081 case DW_OP_const2u:
17082 case DW_OP_const2s:
17083 case DW_OP_const4u:
17084 case DW_OP_const4s:
17085 case DW_OP_const8u:
17086 case DW_OP_const8s:
17087 case DW_OP_constu:
17088 case DW_OP_consts:
17089 case DW_OP_dup:
17090 case DW_OP_over:
17091 case DW_OP_pick:
17092 case DW_OP_lit0:
17093 case DW_OP_lit1:
17094 case DW_OP_lit2:
17095 case DW_OP_lit3:
17096 case DW_OP_lit4:
17097 case DW_OP_lit5:
17098 case DW_OP_lit6:
17099 case DW_OP_lit7:
17100 case DW_OP_lit8:
17101 case DW_OP_lit9:
17102 case DW_OP_lit10:
17103 case DW_OP_lit11:
17104 case DW_OP_lit12:
17105 case DW_OP_lit13:
17106 case DW_OP_lit14:
17107 case DW_OP_lit15:
17108 case DW_OP_lit16:
17109 case DW_OP_lit17:
17110 case DW_OP_lit18:
17111 case DW_OP_lit19:
17112 case DW_OP_lit20:
17113 case DW_OP_lit21:
17114 case DW_OP_lit22:
17115 case DW_OP_lit23:
17116 case DW_OP_lit24:
17117 case DW_OP_lit25:
17118 case DW_OP_lit26:
17119 case DW_OP_lit27:
17120 case DW_OP_lit28:
17121 case DW_OP_lit29:
17122 case DW_OP_lit30:
17123 case DW_OP_lit31:
17124 case DW_OP_breg0:
17125 case DW_OP_breg1:
17126 case DW_OP_breg2:
17127 case DW_OP_breg3:
17128 case DW_OP_breg4:
17129 case DW_OP_breg5:
17130 case DW_OP_breg6:
17131 case DW_OP_breg7:
17132 case DW_OP_breg8:
17133 case DW_OP_breg9:
17134 case DW_OP_breg10:
17135 case DW_OP_breg11:
17136 case DW_OP_breg12:
17137 case DW_OP_breg13:
17138 case DW_OP_breg14:
17139 case DW_OP_breg15:
17140 case DW_OP_breg16:
17141 case DW_OP_breg17:
17142 case DW_OP_breg18:
17143 case DW_OP_breg19:
17144 case DW_OP_breg20:
17145 case DW_OP_breg21:
17146 case DW_OP_breg22:
17147 case DW_OP_breg23:
17148 case DW_OP_breg24:
17149 case DW_OP_breg25:
17150 case DW_OP_breg26:
17151 case DW_OP_breg27:
17152 case DW_OP_breg28:
17153 case DW_OP_breg29:
17154 case DW_OP_breg30:
17155 case DW_OP_breg31:
17156 case DW_OP_fbreg:
17157 case DW_OP_push_object_address:
17158 case DW_OP_call_frame_cfa:
17159 case DW_OP_GNU_variable_value:
17160 ++frame_offset_;
17161 break;
17162
17163 case DW_OP_drop:
17164 case DW_OP_xderef:
17165 case DW_OP_and:
17166 case DW_OP_div:
17167 case DW_OP_minus:
17168 case DW_OP_mod:
17169 case DW_OP_mul:
17170 case DW_OP_or:
17171 case DW_OP_plus:
17172 case DW_OP_shl:
17173 case DW_OP_shr:
17174 case DW_OP_shra:
17175 case DW_OP_xor:
17176 case DW_OP_bra:
17177 case DW_OP_eq:
17178 case DW_OP_ge:
17179 case DW_OP_gt:
17180 case DW_OP_le:
17181 case DW_OP_lt:
17182 case DW_OP_ne:
17183 case DW_OP_regx:
17184 case DW_OP_xderef_size:
17185 --frame_offset_;
17186 break;
17187
17188 case DW_OP_call2:
17189 case DW_OP_call4:
17190 case DW_OP_call_ref:
17191 {
17192 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17193 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17194
17195 if (stack_usage == NULL)
17196 return false;
17197 frame_offset_ += *stack_usage;
17198 break;
17199 }
17200
17201 case DW_OP_implicit_pointer:
17202 case DW_OP_entry_value:
17203 case DW_OP_const_type:
17204 case DW_OP_regval_type:
17205 case DW_OP_deref_type:
17206 case DW_OP_convert:
17207 case DW_OP_reinterpret:
17208 case DW_OP_form_tls_address:
17209 case DW_OP_GNU_push_tls_address:
17210 case DW_OP_GNU_uninit:
17211 case DW_OP_GNU_encoded_addr:
17212 case DW_OP_GNU_implicit_pointer:
17213 case DW_OP_GNU_entry_value:
17214 case DW_OP_GNU_const_type:
17215 case DW_OP_GNU_regval_type:
17216 case DW_OP_GNU_deref_type:
17217 case DW_OP_GNU_convert:
17218 case DW_OP_GNU_reinterpret:
17219 case DW_OP_GNU_parameter_ref:
17220 /* loc_list_from_tree will probably not output these operations for
17221 size functions, so assume they will not appear here. */
17222 /* Fall through... */
17223
17224 default:
17225 gcc_unreachable ();
17226 }
17227
17228 /* Now, follow the control flow (except subroutine calls). */
17229 switch (l->dw_loc_opc)
17230 {
17231 case DW_OP_bra:
17232 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17233 frame_offsets))
17234 return false;
17235 /* Fall through. */
17236
17237 case DW_OP_skip:
17238 l = l->dw_loc_oprnd1.v.val_loc;
17239 break;
17240
17241 case DW_OP_stack_value:
17242 return true;
17243
17244 default:
17245 l = l->dw_loc_next;
17246 break;
17247 }
17248 }
17249
17250 return true;
17251 }
17252
17253 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17254 operations) in order to resolve the operand of DW_OP_pick operations that
17255 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17256 offset *before* LOC is executed. Return if all relocations were
17257 successful. */
17258
17259 static bool
17260 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17261 struct dwarf_procedure_info *dpi)
17262 {
17263 /* Associate to all visited operations the frame offset *before* evaluating
17264 this operation. */
17265 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17266
17267 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17268 frame_offsets);
17269 }
17270
17271 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17272 Return NULL if it is not possible. */
17273
17274 static dw_die_ref
17275 function_to_dwarf_procedure (tree fndecl)
17276 {
17277 struct loc_descr_context ctx;
17278 struct dwarf_procedure_info dpi;
17279 dw_die_ref dwarf_proc_die;
17280 tree tree_body = DECL_SAVED_TREE (fndecl);
17281 dw_loc_descr_ref loc_body, epilogue;
17282
17283 tree cursor;
17284 unsigned i;
17285
17286 /* Do not generate multiple DWARF procedures for the same function
17287 declaration. */
17288 dwarf_proc_die = lookup_decl_die (fndecl);
17289 if (dwarf_proc_die != NULL)
17290 return dwarf_proc_die;
17291
17292 /* DWARF procedures are available starting with the DWARFv3 standard. */
17293 if (dwarf_version < 3 && dwarf_strict)
17294 return NULL;
17295
17296 /* We handle only functions for which we still have a body, that return a
17297 supported type and that takes arguments with supported types. Note that
17298 there is no point translating functions that return nothing. */
17299 if (tree_body == NULL_TREE
17300 || DECL_RESULT (fndecl) == NULL_TREE
17301 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17302 return NULL;
17303
17304 for (cursor = DECL_ARGUMENTS (fndecl);
17305 cursor != NULL_TREE;
17306 cursor = TREE_CHAIN (cursor))
17307 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17308 return NULL;
17309
17310 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17311 if (TREE_CODE (tree_body) != RETURN_EXPR)
17312 return NULL;
17313 tree_body = TREE_OPERAND (tree_body, 0);
17314 if (TREE_CODE (tree_body) != MODIFY_EXPR
17315 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17316 return NULL;
17317 tree_body = TREE_OPERAND (tree_body, 1);
17318
17319 /* Try to translate the body expression itself. Note that this will probably
17320 cause an infinite recursion if its call graph has a cycle. This is very
17321 unlikely for size functions, however, so don't bother with such things at
17322 the moment. */
17323 ctx.context_type = NULL_TREE;
17324 ctx.base_decl = NULL_TREE;
17325 ctx.dpi = &dpi;
17326 ctx.placeholder_arg = false;
17327 ctx.placeholder_seen = false;
17328 dpi.fndecl = fndecl;
17329 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17330 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17331 if (!loc_body)
17332 return NULL;
17333
17334 /* After evaluating all operands in "loc_body", we should still have on the
17335 stack all arguments plus the desired function result (top of the stack).
17336 Generate code in order to keep only the result in our stack frame. */
17337 epilogue = NULL;
17338 for (i = 0; i < dpi.args_count; ++i)
17339 {
17340 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17341 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17342 op_couple->dw_loc_next->dw_loc_next = epilogue;
17343 epilogue = op_couple;
17344 }
17345 add_loc_descr (&loc_body, epilogue);
17346 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17347 return NULL;
17348
17349 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17350 because they are considered useful. Now there is an epilogue, they are
17351 not anymore, so give it another try. */
17352 loc_descr_without_nops (loc_body);
17353
17354 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17355 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17356 though, given that size functions do not come from source, so they should
17357 not have a dedicated DW_TAG_subprogram DIE. */
17358 dwarf_proc_die
17359 = new_dwarf_proc_die (loc_body, fndecl,
17360 get_context_die (DECL_CONTEXT (fndecl)));
17361
17362 /* The called DWARF procedure consumes one stack slot per argument and
17363 returns one stack slot. */
17364 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17365
17366 return dwarf_proc_die;
17367 }
17368
17369
17370 /* Generate Dwarf location list representing LOC.
17371 If WANT_ADDRESS is false, expression computing LOC will be computed
17372 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17373 if WANT_ADDRESS is 2, expression computing address useable in location
17374 will be returned (i.e. DW_OP_reg can be used
17375 to refer to register values).
17376
17377 CONTEXT provides information to customize the location descriptions
17378 generation. Its context_type field specifies what type is implicitly
17379 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17380 will not be generated.
17381
17382 Its DPI field determines whether we are generating a DWARF expression for a
17383 DWARF procedure, so PARM_DECL references are processed specifically.
17384
17385 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17386 and dpi fields were null. */
17387
17388 static dw_loc_list_ref
17389 loc_list_from_tree_1 (tree loc, int want_address,
17390 struct loc_descr_context *context)
17391 {
17392 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17393 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17394 int have_address = 0;
17395 enum dwarf_location_atom op;
17396
17397 /* ??? Most of the time we do not take proper care for sign/zero
17398 extending the values properly. Hopefully this won't be a real
17399 problem... */
17400
17401 if (context != NULL
17402 && context->base_decl == loc
17403 && want_address == 0)
17404 {
17405 if (dwarf_version >= 3 || !dwarf_strict)
17406 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17407 NULL, NULL, NULL);
17408 else
17409 return NULL;
17410 }
17411
17412 switch (TREE_CODE (loc))
17413 {
17414 case ERROR_MARK:
17415 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17416 return 0;
17417
17418 case PLACEHOLDER_EXPR:
17419 /* This case involves extracting fields from an object to determine the
17420 position of other fields. It is supposed to appear only as the first
17421 operand of COMPONENT_REF nodes and to reference precisely the type
17422 that the context allows. */
17423 if (context != NULL
17424 && TREE_TYPE (loc) == context->context_type
17425 && want_address >= 1)
17426 {
17427 if (dwarf_version >= 3 || !dwarf_strict)
17428 {
17429 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17430 have_address = 1;
17431 break;
17432 }
17433 else
17434 return NULL;
17435 }
17436 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17437 the single argument passed by consumer. */
17438 else if (context != NULL
17439 && context->placeholder_arg
17440 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17441 && want_address == 0)
17442 {
17443 ret = new_loc_descr (DW_OP_pick, 0, 0);
17444 ret->frame_offset_rel = 1;
17445 context->placeholder_seen = true;
17446 break;
17447 }
17448 else
17449 expansion_failed (loc, NULL_RTX,
17450 "PLACEHOLDER_EXPR for an unexpected type");
17451 break;
17452
17453 case CALL_EXPR:
17454 {
17455 const int nargs = call_expr_nargs (loc);
17456 tree callee = get_callee_fndecl (loc);
17457 int i;
17458 dw_die_ref dwarf_proc;
17459
17460 if (callee == NULL_TREE)
17461 goto call_expansion_failed;
17462
17463 /* We handle only functions that return an integer. */
17464 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17465 goto call_expansion_failed;
17466
17467 dwarf_proc = function_to_dwarf_procedure (callee);
17468 if (dwarf_proc == NULL)
17469 goto call_expansion_failed;
17470
17471 /* Evaluate arguments right-to-left so that the first argument will
17472 be the top-most one on the stack. */
17473 for (i = nargs - 1; i >= 0; --i)
17474 {
17475 dw_loc_descr_ref loc_descr
17476 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17477 context);
17478
17479 if (loc_descr == NULL)
17480 goto call_expansion_failed;
17481
17482 add_loc_descr (&ret, loc_descr);
17483 }
17484
17485 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17486 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17487 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17488 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17489 add_loc_descr (&ret, ret1);
17490 break;
17491
17492 call_expansion_failed:
17493 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17494 /* There are no opcodes for these operations. */
17495 return 0;
17496 }
17497
17498 case PREINCREMENT_EXPR:
17499 case PREDECREMENT_EXPR:
17500 case POSTINCREMENT_EXPR:
17501 case POSTDECREMENT_EXPR:
17502 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17503 /* There are no opcodes for these operations. */
17504 return 0;
17505
17506 case ADDR_EXPR:
17507 /* If we already want an address, see if there is INDIRECT_REF inside
17508 e.g. for &this->field. */
17509 if (want_address)
17510 {
17511 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
17512 (loc, want_address == 2, context);
17513 if (list_ret)
17514 have_address = 1;
17515 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
17516 && (ret = cst_pool_loc_descr (loc)))
17517 have_address = 1;
17518 }
17519 /* Otherwise, process the argument and look for the address. */
17520 if (!list_ret && !ret)
17521 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
17522 else
17523 {
17524 if (want_address)
17525 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
17526 return NULL;
17527 }
17528 break;
17529
17530 case VAR_DECL:
17531 if (DECL_THREAD_LOCAL_P (loc))
17532 {
17533 rtx rtl;
17534 enum dwarf_location_atom tls_op;
17535 enum dtprel_bool dtprel = dtprel_false;
17536
17537 if (targetm.have_tls)
17538 {
17539 /* If this is not defined, we have no way to emit the
17540 data. */
17541 if (!targetm.asm_out.output_dwarf_dtprel)
17542 return 0;
17543
17544 /* The way DW_OP_GNU_push_tls_address is specified, we
17545 can only look up addresses of objects in the current
17546 module. We used DW_OP_addr as first op, but that's
17547 wrong, because DW_OP_addr is relocated by the debug
17548 info consumer, while DW_OP_GNU_push_tls_address
17549 operand shouldn't be. */
17550 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
17551 return 0;
17552 dtprel = dtprel_true;
17553 /* We check for DWARF 5 here because gdb did not implement
17554 DW_OP_form_tls_address until after 7.12. */
17555 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
17556 : DW_OP_GNU_push_tls_address);
17557 }
17558 else
17559 {
17560 if (!targetm.emutls.debug_form_tls_address
17561 || !(dwarf_version >= 3 || !dwarf_strict))
17562 return 0;
17563 /* We stuffed the control variable into the DECL_VALUE_EXPR
17564 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
17565 no longer appear in gimple code. We used the control
17566 variable in specific so that we could pick it up here. */
17567 loc = DECL_VALUE_EXPR (loc);
17568 tls_op = DW_OP_form_tls_address;
17569 }
17570
17571 rtl = rtl_for_decl_location (loc);
17572 if (rtl == NULL_RTX)
17573 return 0;
17574
17575 if (!MEM_P (rtl))
17576 return 0;
17577 rtl = XEXP (rtl, 0);
17578 if (! CONSTANT_P (rtl))
17579 return 0;
17580
17581 ret = new_addr_loc_descr (rtl, dtprel);
17582 ret1 = new_loc_descr (tls_op, 0, 0);
17583 add_loc_descr (&ret, ret1);
17584
17585 have_address = 1;
17586 break;
17587 }
17588 /* FALLTHRU */
17589
17590 case PARM_DECL:
17591 if (context != NULL && context->dpi != NULL
17592 && DECL_CONTEXT (loc) == context->dpi->fndecl)
17593 {
17594 /* We are generating code for a DWARF procedure and we want to access
17595 one of its arguments: find the appropriate argument offset and let
17596 the resolve_args_picking pass compute the offset that complies
17597 with the stack frame size. */
17598 unsigned i = 0;
17599 tree cursor;
17600
17601 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
17602 cursor != NULL_TREE && cursor != loc;
17603 cursor = TREE_CHAIN (cursor), ++i)
17604 ;
17605 /* If we are translating a DWARF procedure, all referenced parameters
17606 must belong to the current function. */
17607 gcc_assert (cursor != NULL_TREE);
17608
17609 ret = new_loc_descr (DW_OP_pick, i, 0);
17610 ret->frame_offset_rel = 1;
17611 break;
17612 }
17613 /* FALLTHRU */
17614
17615 case RESULT_DECL:
17616 if (DECL_HAS_VALUE_EXPR_P (loc))
17617 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
17618 want_address, context);
17619 /* FALLTHRU */
17620
17621 case FUNCTION_DECL:
17622 {
17623 rtx rtl;
17624 var_loc_list *loc_list = lookup_decl_loc (loc);
17625
17626 if (loc_list && loc_list->first)
17627 {
17628 list_ret = dw_loc_list (loc_list, loc, want_address);
17629 have_address = want_address != 0;
17630 break;
17631 }
17632 rtl = rtl_for_decl_location (loc);
17633 if (rtl == NULL_RTX)
17634 {
17635 if (TREE_CODE (loc) != FUNCTION_DECL
17636 && early_dwarf
17637 && current_function_decl
17638 && want_address != 1
17639 && ! DECL_IGNORED_P (loc)
17640 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
17641 || POINTER_TYPE_P (TREE_TYPE (loc)))
17642 && DECL_CONTEXT (loc) == current_function_decl
17643 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
17644 <= DWARF2_ADDR_SIZE))
17645 {
17646 dw_die_ref ref = lookup_decl_die (loc);
17647 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
17648 if (ref)
17649 {
17650 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17651 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
17652 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
17653 }
17654 else
17655 {
17656 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
17657 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
17658 }
17659 break;
17660 }
17661 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
17662 return 0;
17663 }
17664 else if (CONST_INT_P (rtl))
17665 {
17666 HOST_WIDE_INT val = INTVAL (rtl);
17667 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17668 val &= GET_MODE_MASK (DECL_MODE (loc));
17669 ret = int_loc_descriptor (val);
17670 }
17671 else if (GET_CODE (rtl) == CONST_STRING)
17672 {
17673 expansion_failed (loc, NULL_RTX, "CONST_STRING");
17674 return 0;
17675 }
17676 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
17677 ret = new_addr_loc_descr (rtl, dtprel_false);
17678 else
17679 {
17680 machine_mode mode, mem_mode;
17681
17682 /* Certain constructs can only be represented at top-level. */
17683 if (want_address == 2)
17684 {
17685 ret = loc_descriptor (rtl, VOIDmode,
17686 VAR_INIT_STATUS_INITIALIZED);
17687 have_address = 1;
17688 }
17689 else
17690 {
17691 mode = GET_MODE (rtl);
17692 mem_mode = VOIDmode;
17693 if (MEM_P (rtl))
17694 {
17695 mem_mode = mode;
17696 mode = get_address_mode (rtl);
17697 rtl = XEXP (rtl, 0);
17698 have_address = 1;
17699 }
17700 ret = mem_loc_descriptor (rtl, mode, mem_mode,
17701 VAR_INIT_STATUS_INITIALIZED);
17702 }
17703 if (!ret)
17704 expansion_failed (loc, rtl,
17705 "failed to produce loc descriptor for rtl");
17706 }
17707 }
17708 break;
17709
17710 case MEM_REF:
17711 if (!integer_zerop (TREE_OPERAND (loc, 1)))
17712 {
17713 have_address = 1;
17714 goto do_plus;
17715 }
17716 /* Fallthru. */
17717 case INDIRECT_REF:
17718 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17719 have_address = 1;
17720 break;
17721
17722 case TARGET_MEM_REF:
17723 case SSA_NAME:
17724 case DEBUG_EXPR_DECL:
17725 return NULL;
17726
17727 case COMPOUND_EXPR:
17728 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
17729 context);
17730
17731 CASE_CONVERT:
17732 case VIEW_CONVERT_EXPR:
17733 case SAVE_EXPR:
17734 case MODIFY_EXPR:
17735 case NON_LVALUE_EXPR:
17736 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
17737 context);
17738
17739 case COMPONENT_REF:
17740 case BIT_FIELD_REF:
17741 case ARRAY_REF:
17742 case ARRAY_RANGE_REF:
17743 case REALPART_EXPR:
17744 case IMAGPART_EXPR:
17745 {
17746 tree obj, offset;
17747 poly_int64 bitsize, bitpos, bytepos;
17748 machine_mode mode;
17749 int unsignedp, reversep, volatilep = 0;
17750
17751 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
17752 &unsignedp, &reversep, &volatilep);
17753
17754 gcc_assert (obj != loc);
17755
17756 list_ret = loc_list_from_tree_1 (obj,
17757 want_address == 2
17758 && known_eq (bitpos, 0)
17759 && !offset ? 2 : 1,
17760 context);
17761 /* TODO: We can extract value of the small expression via shifting even
17762 for nonzero bitpos. */
17763 if (list_ret == 0)
17764 return 0;
17765 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
17766 || !multiple_p (bitsize, BITS_PER_UNIT))
17767 {
17768 expansion_failed (loc, NULL_RTX,
17769 "bitfield access");
17770 return 0;
17771 }
17772
17773 if (offset != NULL_TREE)
17774 {
17775 /* Variable offset. */
17776 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
17777 if (list_ret1 == 0)
17778 return 0;
17779 add_loc_list (&list_ret, list_ret1);
17780 if (!list_ret)
17781 return 0;
17782 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
17783 }
17784
17785 HOST_WIDE_INT value;
17786 if (bytepos.is_constant (&value) && value > 0)
17787 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
17788 value, 0));
17789 else if (maybe_ne (bytepos, 0))
17790 loc_list_plus_const (list_ret, bytepos);
17791
17792 have_address = 1;
17793 break;
17794 }
17795
17796 case INTEGER_CST:
17797 if ((want_address || !tree_fits_shwi_p (loc))
17798 && (ret = cst_pool_loc_descr (loc)))
17799 have_address = 1;
17800 else if (want_address == 2
17801 && tree_fits_shwi_p (loc)
17802 && (ret = address_of_int_loc_descriptor
17803 (int_size_in_bytes (TREE_TYPE (loc)),
17804 tree_to_shwi (loc))))
17805 have_address = 1;
17806 else if (tree_fits_shwi_p (loc))
17807 ret = int_loc_descriptor (tree_to_shwi (loc));
17808 else if (tree_fits_uhwi_p (loc))
17809 ret = uint_loc_descriptor (tree_to_uhwi (loc));
17810 else
17811 {
17812 expansion_failed (loc, NULL_RTX,
17813 "Integer operand is not host integer");
17814 return 0;
17815 }
17816 break;
17817
17818 case CONSTRUCTOR:
17819 case REAL_CST:
17820 case STRING_CST:
17821 case COMPLEX_CST:
17822 if ((ret = cst_pool_loc_descr (loc)))
17823 have_address = 1;
17824 else if (TREE_CODE (loc) == CONSTRUCTOR)
17825 {
17826 tree type = TREE_TYPE (loc);
17827 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
17828 unsigned HOST_WIDE_INT offset = 0;
17829 unsigned HOST_WIDE_INT cnt;
17830 constructor_elt *ce;
17831
17832 if (TREE_CODE (type) == RECORD_TYPE)
17833 {
17834 /* This is very limited, but it's enough to output
17835 pointers to member functions, as long as the
17836 referenced function is defined in the current
17837 translation unit. */
17838 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
17839 {
17840 tree val = ce->value;
17841
17842 tree field = ce->index;
17843
17844 if (val)
17845 STRIP_NOPS (val);
17846
17847 if (!field || DECL_BIT_FIELD (field))
17848 {
17849 expansion_failed (loc, NULL_RTX,
17850 "bitfield in record type constructor");
17851 size = offset = (unsigned HOST_WIDE_INT)-1;
17852 ret = NULL;
17853 break;
17854 }
17855
17856 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17857 unsigned HOST_WIDE_INT pos = int_byte_position (field);
17858 gcc_assert (pos + fieldsize <= size);
17859 if (pos < offset)
17860 {
17861 expansion_failed (loc, NULL_RTX,
17862 "out-of-order fields in record constructor");
17863 size = offset = (unsigned HOST_WIDE_INT)-1;
17864 ret = NULL;
17865 break;
17866 }
17867 if (pos > offset)
17868 {
17869 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
17870 add_loc_descr (&ret, ret1);
17871 offset = pos;
17872 }
17873 if (val && fieldsize != 0)
17874 {
17875 ret1 = loc_descriptor_from_tree (val, want_address, context);
17876 if (!ret1)
17877 {
17878 expansion_failed (loc, NULL_RTX,
17879 "unsupported expression in field");
17880 size = offset = (unsigned HOST_WIDE_INT)-1;
17881 ret = NULL;
17882 break;
17883 }
17884 add_loc_descr (&ret, ret1);
17885 }
17886 if (fieldsize)
17887 {
17888 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
17889 add_loc_descr (&ret, ret1);
17890 offset = pos + fieldsize;
17891 }
17892 }
17893
17894 if (offset != size)
17895 {
17896 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
17897 add_loc_descr (&ret, ret1);
17898 offset = size;
17899 }
17900
17901 have_address = !!want_address;
17902 }
17903 else
17904 expansion_failed (loc, NULL_RTX,
17905 "constructor of non-record type");
17906 }
17907 else
17908 /* We can construct small constants here using int_loc_descriptor. */
17909 expansion_failed (loc, NULL_RTX,
17910 "constructor or constant not in constant pool");
17911 break;
17912
17913 case TRUTH_AND_EXPR:
17914 case TRUTH_ANDIF_EXPR:
17915 case BIT_AND_EXPR:
17916 op = DW_OP_and;
17917 goto do_binop;
17918
17919 case TRUTH_XOR_EXPR:
17920 case BIT_XOR_EXPR:
17921 op = DW_OP_xor;
17922 goto do_binop;
17923
17924 case TRUTH_OR_EXPR:
17925 case TRUTH_ORIF_EXPR:
17926 case BIT_IOR_EXPR:
17927 op = DW_OP_or;
17928 goto do_binop;
17929
17930 case FLOOR_DIV_EXPR:
17931 case CEIL_DIV_EXPR:
17932 case ROUND_DIV_EXPR:
17933 case TRUNC_DIV_EXPR:
17934 case EXACT_DIV_EXPR:
17935 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17936 return 0;
17937 op = DW_OP_div;
17938 goto do_binop;
17939
17940 case MINUS_EXPR:
17941 op = DW_OP_minus;
17942 goto do_binop;
17943
17944 case FLOOR_MOD_EXPR:
17945 case CEIL_MOD_EXPR:
17946 case ROUND_MOD_EXPR:
17947 case TRUNC_MOD_EXPR:
17948 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17949 {
17950 op = DW_OP_mod;
17951 goto do_binop;
17952 }
17953 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17954 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17955 if (list_ret == 0 || list_ret1 == 0)
17956 return 0;
17957
17958 add_loc_list (&list_ret, list_ret1);
17959 if (list_ret == 0)
17960 return 0;
17961 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17962 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17963 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
17964 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
17965 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
17966 break;
17967
17968 case MULT_EXPR:
17969 op = DW_OP_mul;
17970 goto do_binop;
17971
17972 case LSHIFT_EXPR:
17973 op = DW_OP_shl;
17974 goto do_binop;
17975
17976 case RSHIFT_EXPR:
17977 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
17978 goto do_binop;
17979
17980 case POINTER_PLUS_EXPR:
17981 case PLUS_EXPR:
17982 do_plus:
17983 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
17984 {
17985 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
17986 smarter to encode their opposite. The DW_OP_plus_uconst operation
17987 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
17988 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
17989 bytes, Y being the size of the operation that pushes the opposite
17990 of the addend. So let's choose the smallest representation. */
17991 const tree tree_addend = TREE_OPERAND (loc, 1);
17992 offset_int wi_addend;
17993 HOST_WIDE_INT shwi_addend;
17994 dw_loc_descr_ref loc_naddend;
17995
17996 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17997 if (list_ret == 0)
17998 return 0;
17999
18000 /* Try to get the literal to push. It is the opposite of the addend,
18001 so as we rely on wrapping during DWARF evaluation, first decode
18002 the literal as a "DWARF-sized" signed number. */
18003 wi_addend = wi::to_offset (tree_addend);
18004 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18005 shwi_addend = wi_addend.to_shwi ();
18006 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18007 ? int_loc_descriptor (-shwi_addend)
18008 : NULL;
18009
18010 if (loc_naddend != NULL
18011 && ((unsigned) size_of_uleb128 (shwi_addend)
18012 > size_of_loc_descr (loc_naddend)))
18013 {
18014 add_loc_descr_to_each (list_ret, loc_naddend);
18015 add_loc_descr_to_each (list_ret,
18016 new_loc_descr (DW_OP_minus, 0, 0));
18017 }
18018 else
18019 {
18020 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18021 {
18022 loc_naddend = loc_cur;
18023 loc_cur = loc_cur->dw_loc_next;
18024 ggc_free (loc_naddend);
18025 }
18026 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18027 }
18028 break;
18029 }
18030
18031 op = DW_OP_plus;
18032 goto do_binop;
18033
18034 case LE_EXPR:
18035 op = DW_OP_le;
18036 goto do_comp_binop;
18037
18038 case GE_EXPR:
18039 op = DW_OP_ge;
18040 goto do_comp_binop;
18041
18042 case LT_EXPR:
18043 op = DW_OP_lt;
18044 goto do_comp_binop;
18045
18046 case GT_EXPR:
18047 op = DW_OP_gt;
18048 goto do_comp_binop;
18049
18050 do_comp_binop:
18051 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18052 {
18053 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18054 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18055 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18056 TREE_CODE (loc));
18057 break;
18058 }
18059 else
18060 goto do_binop;
18061
18062 case EQ_EXPR:
18063 op = DW_OP_eq;
18064 goto do_binop;
18065
18066 case NE_EXPR:
18067 op = DW_OP_ne;
18068 goto do_binop;
18069
18070 do_binop:
18071 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18072 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18073 if (list_ret == 0 || list_ret1 == 0)
18074 return 0;
18075
18076 add_loc_list (&list_ret, list_ret1);
18077 if (list_ret == 0)
18078 return 0;
18079 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18080 break;
18081
18082 case TRUTH_NOT_EXPR:
18083 case BIT_NOT_EXPR:
18084 op = DW_OP_not;
18085 goto do_unop;
18086
18087 case ABS_EXPR:
18088 op = DW_OP_abs;
18089 goto do_unop;
18090
18091 case NEGATE_EXPR:
18092 op = DW_OP_neg;
18093 goto do_unop;
18094
18095 do_unop:
18096 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18097 if (list_ret == 0)
18098 return 0;
18099
18100 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18101 break;
18102
18103 case MIN_EXPR:
18104 case MAX_EXPR:
18105 {
18106 const enum tree_code code =
18107 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18108
18109 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18110 build2 (code, integer_type_node,
18111 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18112 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18113 }
18114
18115 /* fall through */
18116
18117 case COND_EXPR:
18118 {
18119 dw_loc_descr_ref lhs
18120 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18121 dw_loc_list_ref rhs
18122 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18123 dw_loc_descr_ref bra_node, jump_node, tmp;
18124
18125 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18126 if (list_ret == 0 || lhs == 0 || rhs == 0)
18127 return 0;
18128
18129 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18130 add_loc_descr_to_each (list_ret, bra_node);
18131
18132 add_loc_list (&list_ret, rhs);
18133 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18134 add_loc_descr_to_each (list_ret, jump_node);
18135
18136 add_loc_descr_to_each (list_ret, lhs);
18137 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18138 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18139
18140 /* ??? Need a node to point the skip at. Use a nop. */
18141 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18142 add_loc_descr_to_each (list_ret, tmp);
18143 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18144 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18145 }
18146 break;
18147
18148 case FIX_TRUNC_EXPR:
18149 return 0;
18150
18151 default:
18152 /* Leave front-end specific codes as simply unknown. This comes
18153 up, for instance, with the C STMT_EXPR. */
18154 if ((unsigned int) TREE_CODE (loc)
18155 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18156 {
18157 expansion_failed (loc, NULL_RTX,
18158 "language specific tree node");
18159 return 0;
18160 }
18161
18162 /* Otherwise this is a generic code; we should just lists all of
18163 these explicitly. We forgot one. */
18164 if (flag_checking)
18165 gcc_unreachable ();
18166
18167 /* In a release build, we want to degrade gracefully: better to
18168 generate incomplete debugging information than to crash. */
18169 return NULL;
18170 }
18171
18172 if (!ret && !list_ret)
18173 return 0;
18174
18175 if (want_address == 2 && !have_address
18176 && (dwarf_version >= 4 || !dwarf_strict))
18177 {
18178 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18179 {
18180 expansion_failed (loc, NULL_RTX,
18181 "DWARF address size mismatch");
18182 return 0;
18183 }
18184 if (ret)
18185 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18186 else
18187 add_loc_descr_to_each (list_ret,
18188 new_loc_descr (DW_OP_stack_value, 0, 0));
18189 have_address = 1;
18190 }
18191 /* Show if we can't fill the request for an address. */
18192 if (want_address && !have_address)
18193 {
18194 expansion_failed (loc, NULL_RTX,
18195 "Want address and only have value");
18196 return 0;
18197 }
18198
18199 gcc_assert (!ret || !list_ret);
18200
18201 /* If we've got an address and don't want one, dereference. */
18202 if (!want_address && have_address)
18203 {
18204 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18205
18206 if (size > DWARF2_ADDR_SIZE || size == -1)
18207 {
18208 expansion_failed (loc, NULL_RTX,
18209 "DWARF address size mismatch");
18210 return 0;
18211 }
18212 else if (size == DWARF2_ADDR_SIZE)
18213 op = DW_OP_deref;
18214 else
18215 op = DW_OP_deref_size;
18216
18217 if (ret)
18218 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18219 else
18220 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18221 }
18222 if (ret)
18223 list_ret = new_loc_list (ret, NULL, NULL, NULL);
18224
18225 return list_ret;
18226 }
18227
18228 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18229 expressions. */
18230
18231 static dw_loc_list_ref
18232 loc_list_from_tree (tree loc, int want_address,
18233 struct loc_descr_context *context)
18234 {
18235 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18236
18237 for (dw_loc_list_ref loc_cur = result;
18238 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18239 loc_descr_without_nops (loc_cur->expr);
18240 return result;
18241 }
18242
18243 /* Same as above but return only single location expression. */
18244 static dw_loc_descr_ref
18245 loc_descriptor_from_tree (tree loc, int want_address,
18246 struct loc_descr_context *context)
18247 {
18248 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18249 if (!ret)
18250 return NULL;
18251 if (ret->dw_loc_next)
18252 {
18253 expansion_failed (loc, NULL_RTX,
18254 "Location list where only loc descriptor needed");
18255 return NULL;
18256 }
18257 return ret->expr;
18258 }
18259
18260 /* Given a value, round it up to the lowest multiple of `boundary'
18261 which is not less than the value itself. */
18262
18263 static inline HOST_WIDE_INT
18264 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18265 {
18266 return (((value + boundary - 1) / boundary) * boundary);
18267 }
18268
18269 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18270 pointer to the declared type for the relevant field variable, or return
18271 `integer_type_node' if the given node turns out to be an
18272 ERROR_MARK node. */
18273
18274 static inline tree
18275 field_type (const_tree decl)
18276 {
18277 tree type;
18278
18279 if (TREE_CODE (decl) == ERROR_MARK)
18280 return integer_type_node;
18281
18282 type = DECL_BIT_FIELD_TYPE (decl);
18283 if (type == NULL_TREE)
18284 type = TREE_TYPE (decl);
18285
18286 return type;
18287 }
18288
18289 /* Given a pointer to a tree node, return the alignment in bits for
18290 it, or else return BITS_PER_WORD if the node actually turns out to
18291 be an ERROR_MARK node. */
18292
18293 static inline unsigned
18294 simple_type_align_in_bits (const_tree type)
18295 {
18296 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18297 }
18298
18299 static inline unsigned
18300 simple_decl_align_in_bits (const_tree decl)
18301 {
18302 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18303 }
18304
18305 /* Return the result of rounding T up to ALIGN. */
18306
18307 static inline offset_int
18308 round_up_to_align (const offset_int &t, unsigned int align)
18309 {
18310 return wi::udiv_trunc (t + align - 1, align) * align;
18311 }
18312
18313 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18314 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18315 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18316 if we fail to return the size in one of these two forms. */
18317
18318 static dw_loc_descr_ref
18319 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18320 {
18321 tree tree_size;
18322 struct loc_descr_context ctx;
18323
18324 /* Return a constant integer in priority, if possible. */
18325 *cst_size = int_size_in_bytes (type);
18326 if (*cst_size != -1)
18327 return NULL;
18328
18329 ctx.context_type = const_cast<tree> (type);
18330 ctx.base_decl = NULL_TREE;
18331 ctx.dpi = NULL;
18332 ctx.placeholder_arg = false;
18333 ctx.placeholder_seen = false;
18334
18335 type = TYPE_MAIN_VARIANT (type);
18336 tree_size = TYPE_SIZE_UNIT (type);
18337 return ((tree_size != NULL_TREE)
18338 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18339 : NULL);
18340 }
18341
18342 /* Helper structure for RECORD_TYPE processing. */
18343 struct vlr_context
18344 {
18345 /* Root RECORD_TYPE. It is needed to generate data member location
18346 descriptions in variable-length records (VLR), but also to cope with
18347 variants, which are composed of nested structures multiplexed with
18348 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18349 function processing a FIELD_DECL, it is required to be non null. */
18350 tree struct_type;
18351 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18352 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18353 this variant part as part of the root record (in storage units). For
18354 regular records, it must be NULL_TREE. */
18355 tree variant_part_offset;
18356 };
18357
18358 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18359 addressed byte of the "containing object" for the given FIELD_DECL. If
18360 possible, return a native constant through CST_OFFSET (in which case NULL is
18361 returned); otherwise return a DWARF expression that computes the offset.
18362
18363 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18364 that offset is, either because the argument turns out to be a pointer to an
18365 ERROR_MARK node, or because the offset expression is too complex for us.
18366
18367 CTX is required: see the comment for VLR_CONTEXT. */
18368
18369 static dw_loc_descr_ref
18370 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18371 HOST_WIDE_INT *cst_offset)
18372 {
18373 tree tree_result;
18374 dw_loc_list_ref loc_result;
18375
18376 *cst_offset = 0;
18377
18378 if (TREE_CODE (decl) == ERROR_MARK)
18379 return NULL;
18380 else
18381 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18382
18383 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18384 case. */
18385 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18386 return NULL;
18387
18388 #ifdef PCC_BITFIELD_TYPE_MATTERS
18389 /* We used to handle only constant offsets in all cases. Now, we handle
18390 properly dynamic byte offsets only when PCC bitfield type doesn't
18391 matter. */
18392 if (PCC_BITFIELD_TYPE_MATTERS
18393 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18394 {
18395 offset_int object_offset_in_bits;
18396 offset_int object_offset_in_bytes;
18397 offset_int bitpos_int;
18398 tree type;
18399 tree field_size_tree;
18400 offset_int deepest_bitpos;
18401 offset_int field_size_in_bits;
18402 unsigned int type_align_in_bits;
18403 unsigned int decl_align_in_bits;
18404 offset_int type_size_in_bits;
18405
18406 bitpos_int = wi::to_offset (bit_position (decl));
18407 type = field_type (decl);
18408 type_size_in_bits = offset_int_type_size_in_bits (type);
18409 type_align_in_bits = simple_type_align_in_bits (type);
18410
18411 field_size_tree = DECL_SIZE (decl);
18412
18413 /* The size could be unspecified if there was an error, or for
18414 a flexible array member. */
18415 if (!field_size_tree)
18416 field_size_tree = bitsize_zero_node;
18417
18418 /* If the size of the field is not constant, use the type size. */
18419 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18420 field_size_in_bits = wi::to_offset (field_size_tree);
18421 else
18422 field_size_in_bits = type_size_in_bits;
18423
18424 decl_align_in_bits = simple_decl_align_in_bits (decl);
18425
18426 /* The GCC front-end doesn't make any attempt to keep track of the
18427 starting bit offset (relative to the start of the containing
18428 structure type) of the hypothetical "containing object" for a
18429 bit-field. Thus, when computing the byte offset value for the
18430 start of the "containing object" of a bit-field, we must deduce
18431 this information on our own. This can be rather tricky to do in
18432 some cases. For example, handling the following structure type
18433 definition when compiling for an i386/i486 target (which only
18434 aligns long long's to 32-bit boundaries) can be very tricky:
18435
18436 struct S { int field1; long long field2:31; };
18437
18438 Fortunately, there is a simple rule-of-thumb which can be used
18439 in such cases. When compiling for an i386/i486, GCC will
18440 allocate 8 bytes for the structure shown above. It decides to
18441 do this based upon one simple rule for bit-field allocation.
18442 GCC allocates each "containing object" for each bit-field at
18443 the first (i.e. lowest addressed) legitimate alignment boundary
18444 (based upon the required minimum alignment for the declared
18445 type of the field) which it can possibly use, subject to the
18446 condition that there is still enough available space remaining
18447 in the containing object (when allocated at the selected point)
18448 to fully accommodate all of the bits of the bit-field itself.
18449
18450 This simple rule makes it obvious why GCC allocates 8 bytes for
18451 each object of the structure type shown above. When looking
18452 for a place to allocate the "containing object" for `field2',
18453 the compiler simply tries to allocate a 64-bit "containing
18454 object" at each successive 32-bit boundary (starting at zero)
18455 until it finds a place to allocate that 64- bit field such that
18456 at least 31 contiguous (and previously unallocated) bits remain
18457 within that selected 64 bit field. (As it turns out, for the
18458 example above, the compiler finds it is OK to allocate the
18459 "containing object" 64-bit field at bit-offset zero within the
18460 structure type.)
18461
18462 Here we attempt to work backwards from the limited set of facts
18463 we're given, and we try to deduce from those facts, where GCC
18464 must have believed that the containing object started (within
18465 the structure type). The value we deduce is then used (by the
18466 callers of this routine) to generate DW_AT_location and
18467 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18468 the case of DW_AT_location, regular fields as well). */
18469
18470 /* Figure out the bit-distance from the start of the structure to
18471 the "deepest" bit of the bit-field. */
18472 deepest_bitpos = bitpos_int + field_size_in_bits;
18473
18474 /* This is the tricky part. Use some fancy footwork to deduce
18475 where the lowest addressed bit of the containing object must
18476 be. */
18477 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18478
18479 /* Round up to type_align by default. This works best for
18480 bitfields. */
18481 object_offset_in_bits
18482 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18483
18484 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18485 {
18486 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18487
18488 /* Round up to decl_align instead. */
18489 object_offset_in_bits
18490 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18491 }
18492
18493 object_offset_in_bytes
18494 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18495 if (ctx->variant_part_offset == NULL_TREE)
18496 {
18497 *cst_offset = object_offset_in_bytes.to_shwi ();
18498 return NULL;
18499 }
18500 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18501 }
18502 else
18503 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18504 tree_result = byte_position (decl);
18505
18506 if (ctx->variant_part_offset != NULL_TREE)
18507 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
18508 ctx->variant_part_offset, tree_result);
18509
18510 /* If the byte offset is a constant, it's simplier to handle a native
18511 constant rather than a DWARF expression. */
18512 if (TREE_CODE (tree_result) == INTEGER_CST)
18513 {
18514 *cst_offset = wi::to_offset (tree_result).to_shwi ();
18515 return NULL;
18516 }
18517 struct loc_descr_context loc_ctx = {
18518 ctx->struct_type, /* context_type */
18519 NULL_TREE, /* base_decl */
18520 NULL, /* dpi */
18521 false, /* placeholder_arg */
18522 false /* placeholder_seen */
18523 };
18524 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
18525
18526 /* We want a DWARF expression: abort if we only have a location list with
18527 multiple elements. */
18528 if (!loc_result || !single_element_loc_list_p (loc_result))
18529 return NULL;
18530 else
18531 return loc_result->expr;
18532 }
18533 \f
18534 /* The following routines define various Dwarf attributes and any data
18535 associated with them. */
18536
18537 /* Add a location description attribute value to a DIE.
18538
18539 This emits location attributes suitable for whole variables and
18540 whole parameters. Note that the location attributes for struct fields are
18541 generated by the routine `data_member_location_attribute' below. */
18542
18543 static inline void
18544 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
18545 dw_loc_list_ref descr)
18546 {
18547 if (descr == 0)
18548 return;
18549 if (single_element_loc_list_p (descr))
18550 add_AT_loc (die, attr_kind, descr->expr);
18551 else
18552 add_AT_loc_list (die, attr_kind, descr);
18553 }
18554
18555 /* Add DW_AT_accessibility attribute to DIE if needed. */
18556
18557 static void
18558 add_accessibility_attribute (dw_die_ref die, tree decl)
18559 {
18560 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
18561 children, otherwise the default is DW_ACCESS_public. In DWARF2
18562 the default has always been DW_ACCESS_public. */
18563 if (TREE_PROTECTED (decl))
18564 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
18565 else if (TREE_PRIVATE (decl))
18566 {
18567 if (dwarf_version == 2
18568 || die->die_parent == NULL
18569 || die->die_parent->die_tag != DW_TAG_class_type)
18570 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
18571 }
18572 else if (dwarf_version > 2
18573 && die->die_parent
18574 && die->die_parent->die_tag == DW_TAG_class_type)
18575 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
18576 }
18577
18578 /* Attach the specialized form of location attribute used for data members of
18579 struct and union types. In the special case of a FIELD_DECL node which
18580 represents a bit-field, the "offset" part of this special location
18581 descriptor must indicate the distance in bytes from the lowest-addressed
18582 byte of the containing struct or union type to the lowest-addressed byte of
18583 the "containing object" for the bit-field. (See the `field_byte_offset'
18584 function above).
18585
18586 For any given bit-field, the "containing object" is a hypothetical object
18587 (of some integral or enum type) within which the given bit-field lives. The
18588 type of this hypothetical "containing object" is always the same as the
18589 declared type of the individual bit-field itself (for GCC anyway... the
18590 DWARF spec doesn't actually mandate this). Note that it is the size (in
18591 bytes) of the hypothetical "containing object" which will be given in the
18592 DW_AT_byte_size attribute for this bit-field. (See the
18593 `byte_size_attribute' function below.) It is also used when calculating the
18594 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
18595 function below.)
18596
18597 CTX is required: see the comment for VLR_CONTEXT. */
18598
18599 static void
18600 add_data_member_location_attribute (dw_die_ref die,
18601 tree decl,
18602 struct vlr_context *ctx)
18603 {
18604 HOST_WIDE_INT offset;
18605 dw_loc_descr_ref loc_descr = 0;
18606
18607 if (TREE_CODE (decl) == TREE_BINFO)
18608 {
18609 /* We're working on the TAG_inheritance for a base class. */
18610 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
18611 {
18612 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
18613 aren't at a fixed offset from all (sub)objects of the same
18614 type. We need to extract the appropriate offset from our
18615 vtable. The following dwarf expression means
18616
18617 BaseAddr = ObAddr + *((*ObAddr) - Offset)
18618
18619 This is specific to the V3 ABI, of course. */
18620
18621 dw_loc_descr_ref tmp;
18622
18623 /* Make a copy of the object address. */
18624 tmp = new_loc_descr (DW_OP_dup, 0, 0);
18625 add_loc_descr (&loc_descr, tmp);
18626
18627 /* Extract the vtable address. */
18628 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18629 add_loc_descr (&loc_descr, tmp);
18630
18631 /* Calculate the address of the offset. */
18632 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
18633 gcc_assert (offset < 0);
18634
18635 tmp = int_loc_descriptor (-offset);
18636 add_loc_descr (&loc_descr, tmp);
18637 tmp = new_loc_descr (DW_OP_minus, 0, 0);
18638 add_loc_descr (&loc_descr, tmp);
18639
18640 /* Extract the offset. */
18641 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18642 add_loc_descr (&loc_descr, tmp);
18643
18644 /* Add it to the object address. */
18645 tmp = new_loc_descr (DW_OP_plus, 0, 0);
18646 add_loc_descr (&loc_descr, tmp);
18647 }
18648 else
18649 offset = tree_to_shwi (BINFO_OFFSET (decl));
18650 }
18651 else
18652 {
18653 loc_descr = field_byte_offset (decl, ctx, &offset);
18654
18655 /* If loc_descr is available then we know the field offset is dynamic.
18656 However, GDB does not handle dynamic field offsets very well at the
18657 moment. */
18658 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
18659 {
18660 loc_descr = NULL;
18661 offset = 0;
18662 }
18663
18664 /* Data member location evalutation starts with the base address on the
18665 stack. Compute the field offset and add it to this base address. */
18666 else if (loc_descr != NULL)
18667 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
18668 }
18669
18670 if (! loc_descr)
18671 {
18672 /* While DW_AT_data_bit_offset has been added already in DWARF4,
18673 e.g. GDB only added support to it in November 2016. For DWARF5
18674 we need newer debug info consumers anyway. We might change this
18675 to dwarf_version >= 4 once most consumers catched up. */
18676 if (dwarf_version >= 5
18677 && TREE_CODE (decl) == FIELD_DECL
18678 && DECL_BIT_FIELD_TYPE (decl))
18679 {
18680 tree off = bit_position (decl);
18681 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
18682 {
18683 remove_AT (die, DW_AT_byte_size);
18684 remove_AT (die, DW_AT_bit_offset);
18685 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
18686 return;
18687 }
18688 }
18689 if (dwarf_version > 2)
18690 {
18691 /* Don't need to output a location expression, just the constant. */
18692 if (offset < 0)
18693 add_AT_int (die, DW_AT_data_member_location, offset);
18694 else
18695 add_AT_unsigned (die, DW_AT_data_member_location, offset);
18696 return;
18697 }
18698 else
18699 {
18700 enum dwarf_location_atom op;
18701
18702 /* The DWARF2 standard says that we should assume that the structure
18703 address is already on the stack, so we can specify a structure
18704 field address by using DW_OP_plus_uconst. */
18705 op = DW_OP_plus_uconst;
18706 loc_descr = new_loc_descr (op, offset, 0);
18707 }
18708 }
18709
18710 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
18711 }
18712
18713 /* Writes integer values to dw_vec_const array. */
18714
18715 static void
18716 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
18717 {
18718 while (size != 0)
18719 {
18720 *dest++ = val & 0xff;
18721 val >>= 8;
18722 --size;
18723 }
18724 }
18725
18726 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
18727
18728 static HOST_WIDE_INT
18729 extract_int (const unsigned char *src, unsigned int size)
18730 {
18731 HOST_WIDE_INT val = 0;
18732
18733 src += size;
18734 while (size != 0)
18735 {
18736 val <<= 8;
18737 val |= *--src & 0xff;
18738 --size;
18739 }
18740 return val;
18741 }
18742
18743 /* Writes wide_int values to dw_vec_const array. */
18744
18745 static void
18746 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
18747 {
18748 int i;
18749
18750 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
18751 {
18752 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
18753 return;
18754 }
18755
18756 /* We'd have to extend this code to support odd sizes. */
18757 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
18758
18759 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
18760
18761 if (WORDS_BIG_ENDIAN)
18762 for (i = n - 1; i >= 0; i--)
18763 {
18764 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18765 dest += sizeof (HOST_WIDE_INT);
18766 }
18767 else
18768 for (i = 0; i < n; i++)
18769 {
18770 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18771 dest += sizeof (HOST_WIDE_INT);
18772 }
18773 }
18774
18775 /* Writes floating point values to dw_vec_const array. */
18776
18777 static void
18778 insert_float (const_rtx rtl, unsigned char *array)
18779 {
18780 long val[4];
18781 int i;
18782 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18783
18784 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
18785
18786 /* real_to_target puts 32-bit pieces in each long. Pack them. */
18787 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
18788 {
18789 insert_int (val[i], 4, array);
18790 array += 4;
18791 }
18792 }
18793
18794 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
18795 does not have a "location" either in memory or in a register. These
18796 things can arise in GNU C when a constant is passed as an actual parameter
18797 to an inlined function. They can also arise in C++ where declared
18798 constants do not necessarily get memory "homes". */
18799
18800 static bool
18801 add_const_value_attribute (dw_die_ref die, rtx rtl)
18802 {
18803 switch (GET_CODE (rtl))
18804 {
18805 case CONST_INT:
18806 {
18807 HOST_WIDE_INT val = INTVAL (rtl);
18808
18809 if (val < 0)
18810 add_AT_int (die, DW_AT_const_value, val);
18811 else
18812 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
18813 }
18814 return true;
18815
18816 case CONST_WIDE_INT:
18817 {
18818 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
18819 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
18820 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
18821 wide_int w = wi::zext (w1, prec);
18822 add_AT_wide (die, DW_AT_const_value, w);
18823 }
18824 return true;
18825
18826 case CONST_DOUBLE:
18827 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
18828 floating-point constant. A CONST_DOUBLE is used whenever the
18829 constant requires more than one word in order to be adequately
18830 represented. */
18831 if (TARGET_SUPPORTS_WIDE_INT == 0
18832 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
18833 add_AT_double (die, DW_AT_const_value,
18834 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
18835 else
18836 {
18837 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18838 unsigned int length = GET_MODE_SIZE (mode);
18839 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
18840
18841 insert_float (rtl, array);
18842 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
18843 }
18844 return true;
18845
18846 case CONST_VECTOR:
18847 {
18848 unsigned int length;
18849 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
18850 return false;
18851
18852 machine_mode mode = GET_MODE (rtl);
18853 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
18854 unsigned char *array
18855 = ggc_vec_alloc<unsigned char> (length * elt_size);
18856 unsigned int i;
18857 unsigned char *p;
18858 machine_mode imode = GET_MODE_INNER (mode);
18859
18860 switch (GET_MODE_CLASS (mode))
18861 {
18862 case MODE_VECTOR_INT:
18863 for (i = 0, p = array; i < length; i++, p += elt_size)
18864 {
18865 rtx elt = CONST_VECTOR_ELT (rtl, i);
18866 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
18867 }
18868 break;
18869
18870 case MODE_VECTOR_FLOAT:
18871 for (i = 0, p = array; i < length; i++, p += elt_size)
18872 {
18873 rtx elt = CONST_VECTOR_ELT (rtl, i);
18874 insert_float (elt, p);
18875 }
18876 break;
18877
18878 default:
18879 gcc_unreachable ();
18880 }
18881
18882 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
18883 }
18884 return true;
18885
18886 case CONST_STRING:
18887 if (dwarf_version >= 4 || !dwarf_strict)
18888 {
18889 dw_loc_descr_ref loc_result;
18890 resolve_one_addr (&rtl);
18891 rtl_addr:
18892 loc_result = new_addr_loc_descr (rtl, dtprel_false);
18893 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
18894 add_AT_loc (die, DW_AT_location, loc_result);
18895 vec_safe_push (used_rtx_array, rtl);
18896 return true;
18897 }
18898 return false;
18899
18900 case CONST:
18901 if (CONSTANT_P (XEXP (rtl, 0)))
18902 return add_const_value_attribute (die, XEXP (rtl, 0));
18903 /* FALLTHROUGH */
18904 case SYMBOL_REF:
18905 if (!const_ok_for_output (rtl))
18906 return false;
18907 /* FALLTHROUGH */
18908 case LABEL_REF:
18909 if (dwarf_version >= 4 || !dwarf_strict)
18910 goto rtl_addr;
18911 return false;
18912
18913 case PLUS:
18914 /* In cases where an inlined instance of an inline function is passed
18915 the address of an `auto' variable (which is local to the caller) we
18916 can get a situation where the DECL_RTL of the artificial local
18917 variable (for the inlining) which acts as a stand-in for the
18918 corresponding formal parameter (of the inline function) will look
18919 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
18920 exactly a compile-time constant expression, but it isn't the address
18921 of the (artificial) local variable either. Rather, it represents the
18922 *value* which the artificial local variable always has during its
18923 lifetime. We currently have no way to represent such quasi-constant
18924 values in Dwarf, so for now we just punt and generate nothing. */
18925 return false;
18926
18927 case HIGH:
18928 case CONST_FIXED:
18929 return false;
18930
18931 case MEM:
18932 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
18933 && MEM_READONLY_P (rtl)
18934 && GET_MODE (rtl) == BLKmode)
18935 {
18936 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
18937 return true;
18938 }
18939 return false;
18940
18941 default:
18942 /* No other kinds of rtx should be possible here. */
18943 gcc_unreachable ();
18944 }
18945 return false;
18946 }
18947
18948 /* Determine whether the evaluation of EXPR references any variables
18949 or functions which aren't otherwise used (and therefore may not be
18950 output). */
18951 static tree
18952 reference_to_unused (tree * tp, int * walk_subtrees,
18953 void * data ATTRIBUTE_UNUSED)
18954 {
18955 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
18956 *walk_subtrees = 0;
18957
18958 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
18959 && ! TREE_ASM_WRITTEN (*tp))
18960 return *tp;
18961 /* ??? The C++ FE emits debug information for using decls, so
18962 putting gcc_unreachable here falls over. See PR31899. For now
18963 be conservative. */
18964 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
18965 return *tp;
18966 else if (VAR_P (*tp))
18967 {
18968 varpool_node *node = varpool_node::get (*tp);
18969 if (!node || !node->definition)
18970 return *tp;
18971 }
18972 else if (TREE_CODE (*tp) == FUNCTION_DECL
18973 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
18974 {
18975 /* The call graph machinery must have finished analyzing,
18976 optimizing and gimplifying the CU by now.
18977 So if *TP has no call graph node associated
18978 to it, it means *TP will not be emitted. */
18979 if (!cgraph_node::get (*tp))
18980 return *tp;
18981 }
18982 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
18983 return *tp;
18984
18985 return NULL_TREE;
18986 }
18987
18988 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
18989 for use in a later add_const_value_attribute call. */
18990
18991 static rtx
18992 rtl_for_decl_init (tree init, tree type)
18993 {
18994 rtx rtl = NULL_RTX;
18995
18996 STRIP_NOPS (init);
18997
18998 /* If a variable is initialized with a string constant without embedded
18999 zeros, build CONST_STRING. */
19000 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19001 {
19002 tree enttype = TREE_TYPE (type);
19003 tree domain = TYPE_DOMAIN (type);
19004 scalar_int_mode mode;
19005
19006 if (is_int_mode (TYPE_MODE (enttype), &mode)
19007 && GET_MODE_SIZE (mode) == 1
19008 && domain
19009 && integer_zerop (TYPE_MIN_VALUE (domain))
19010 && compare_tree_int (TYPE_MAX_VALUE (domain),
19011 TREE_STRING_LENGTH (init) - 1) == 0
19012 && ((size_t) TREE_STRING_LENGTH (init)
19013 == strlen (TREE_STRING_POINTER (init)) + 1))
19014 {
19015 rtl = gen_rtx_CONST_STRING (VOIDmode,
19016 ggc_strdup (TREE_STRING_POINTER (init)));
19017 rtl = gen_rtx_MEM (BLKmode, rtl);
19018 MEM_READONLY_P (rtl) = 1;
19019 }
19020 }
19021 /* Other aggregates, and complex values, could be represented using
19022 CONCAT: FIXME! */
19023 else if (AGGREGATE_TYPE_P (type)
19024 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19025 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19026 || TREE_CODE (type) == COMPLEX_TYPE)
19027 ;
19028 /* Vectors only work if their mode is supported by the target.
19029 FIXME: generic vectors ought to work too. */
19030 else if (TREE_CODE (type) == VECTOR_TYPE
19031 && !VECTOR_MODE_P (TYPE_MODE (type)))
19032 ;
19033 /* If the initializer is something that we know will expand into an
19034 immediate RTL constant, expand it now. We must be careful not to
19035 reference variables which won't be output. */
19036 else if (initializer_constant_valid_p (init, type)
19037 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19038 {
19039 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19040 possible. */
19041 if (TREE_CODE (type) == VECTOR_TYPE)
19042 switch (TREE_CODE (init))
19043 {
19044 case VECTOR_CST:
19045 break;
19046 case CONSTRUCTOR:
19047 if (TREE_CONSTANT (init))
19048 {
19049 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19050 bool constant_p = true;
19051 tree value;
19052 unsigned HOST_WIDE_INT ix;
19053
19054 /* Even when ctor is constant, it might contain non-*_CST
19055 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19056 belong into VECTOR_CST nodes. */
19057 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19058 if (!CONSTANT_CLASS_P (value))
19059 {
19060 constant_p = false;
19061 break;
19062 }
19063
19064 if (constant_p)
19065 {
19066 init = build_vector_from_ctor (type, elts);
19067 break;
19068 }
19069 }
19070 /* FALLTHRU */
19071
19072 default:
19073 return NULL;
19074 }
19075
19076 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19077
19078 /* If expand_expr returns a MEM, it wasn't immediate. */
19079 gcc_assert (!rtl || !MEM_P (rtl));
19080 }
19081
19082 return rtl;
19083 }
19084
19085 /* Generate RTL for the variable DECL to represent its location. */
19086
19087 static rtx
19088 rtl_for_decl_location (tree decl)
19089 {
19090 rtx rtl;
19091
19092 /* Here we have to decide where we are going to say the parameter "lives"
19093 (as far as the debugger is concerned). We only have a couple of
19094 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19095
19096 DECL_RTL normally indicates where the parameter lives during most of the
19097 activation of the function. If optimization is enabled however, this
19098 could be either NULL or else a pseudo-reg. Both of those cases indicate
19099 that the parameter doesn't really live anywhere (as far as the code
19100 generation parts of GCC are concerned) during most of the function's
19101 activation. That will happen (for example) if the parameter is never
19102 referenced within the function.
19103
19104 We could just generate a location descriptor here for all non-NULL
19105 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19106 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19107 where DECL_RTL is NULL or is a pseudo-reg.
19108
19109 Note however that we can only get away with using DECL_INCOMING_RTL as
19110 a backup substitute for DECL_RTL in certain limited cases. In cases
19111 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19112 we can be sure that the parameter was passed using the same type as it is
19113 declared to have within the function, and that its DECL_INCOMING_RTL
19114 points us to a place where a value of that type is passed.
19115
19116 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19117 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19118 because in these cases DECL_INCOMING_RTL points us to a value of some
19119 type which is *different* from the type of the parameter itself. Thus,
19120 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19121 such cases, the debugger would end up (for example) trying to fetch a
19122 `float' from a place which actually contains the first part of a
19123 `double'. That would lead to really incorrect and confusing
19124 output at debug-time.
19125
19126 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19127 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19128 are a couple of exceptions however. On little-endian machines we can
19129 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19130 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19131 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19132 when (on a little-endian machine) a non-prototyped function has a
19133 parameter declared to be of type `short' or `char'. In such cases,
19134 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19135 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19136 passed `int' value. If the debugger then uses that address to fetch
19137 a `short' or a `char' (on a little-endian machine) the result will be
19138 the correct data, so we allow for such exceptional cases below.
19139
19140 Note that our goal here is to describe the place where the given formal
19141 parameter lives during most of the function's activation (i.e. between the
19142 end of the prologue and the start of the epilogue). We'll do that as best
19143 as we can. Note however that if the given formal parameter is modified
19144 sometime during the execution of the function, then a stack backtrace (at
19145 debug-time) will show the function as having been called with the *new*
19146 value rather than the value which was originally passed in. This happens
19147 rarely enough that it is not a major problem, but it *is* a problem, and
19148 I'd like to fix it.
19149
19150 A future version of dwarf2out.c may generate two additional attributes for
19151 any given DW_TAG_formal_parameter DIE which will describe the "passed
19152 type" and the "passed location" for the given formal parameter in addition
19153 to the attributes we now generate to indicate the "declared type" and the
19154 "active location" for each parameter. This additional set of attributes
19155 could be used by debuggers for stack backtraces. Separately, note that
19156 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19157 This happens (for example) for inlined-instances of inline function formal
19158 parameters which are never referenced. This really shouldn't be
19159 happening. All PARM_DECL nodes should get valid non-NULL
19160 DECL_INCOMING_RTL values. FIXME. */
19161
19162 /* Use DECL_RTL as the "location" unless we find something better. */
19163 rtl = DECL_RTL_IF_SET (decl);
19164
19165 /* When generating abstract instances, ignore everything except
19166 constants, symbols living in memory, and symbols living in
19167 fixed registers. */
19168 if (! reload_completed)
19169 {
19170 if (rtl
19171 && (CONSTANT_P (rtl)
19172 || (MEM_P (rtl)
19173 && CONSTANT_P (XEXP (rtl, 0)))
19174 || (REG_P (rtl)
19175 && VAR_P (decl)
19176 && TREE_STATIC (decl))))
19177 {
19178 rtl = targetm.delegitimize_address (rtl);
19179 return rtl;
19180 }
19181 rtl = NULL_RTX;
19182 }
19183 else if (TREE_CODE (decl) == PARM_DECL)
19184 {
19185 if (rtl == NULL_RTX
19186 || is_pseudo_reg (rtl)
19187 || (MEM_P (rtl)
19188 && is_pseudo_reg (XEXP (rtl, 0))
19189 && DECL_INCOMING_RTL (decl)
19190 && MEM_P (DECL_INCOMING_RTL (decl))
19191 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19192 {
19193 tree declared_type = TREE_TYPE (decl);
19194 tree passed_type = DECL_ARG_TYPE (decl);
19195 machine_mode dmode = TYPE_MODE (declared_type);
19196 machine_mode pmode = TYPE_MODE (passed_type);
19197
19198 /* This decl represents a formal parameter which was optimized out.
19199 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19200 all cases where (rtl == NULL_RTX) just below. */
19201 if (dmode == pmode)
19202 rtl = DECL_INCOMING_RTL (decl);
19203 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19204 && SCALAR_INT_MODE_P (dmode)
19205 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19206 && DECL_INCOMING_RTL (decl))
19207 {
19208 rtx inc = DECL_INCOMING_RTL (decl);
19209 if (REG_P (inc))
19210 rtl = inc;
19211 else if (MEM_P (inc))
19212 {
19213 if (BYTES_BIG_ENDIAN)
19214 rtl = adjust_address_nv (inc, dmode,
19215 GET_MODE_SIZE (pmode)
19216 - GET_MODE_SIZE (dmode));
19217 else
19218 rtl = inc;
19219 }
19220 }
19221 }
19222
19223 /* If the parm was passed in registers, but lives on the stack, then
19224 make a big endian correction if the mode of the type of the
19225 parameter is not the same as the mode of the rtl. */
19226 /* ??? This is the same series of checks that are made in dbxout.c before
19227 we reach the big endian correction code there. It isn't clear if all
19228 of these checks are necessary here, but keeping them all is the safe
19229 thing to do. */
19230 else if (MEM_P (rtl)
19231 && XEXP (rtl, 0) != const0_rtx
19232 && ! CONSTANT_P (XEXP (rtl, 0))
19233 /* Not passed in memory. */
19234 && !MEM_P (DECL_INCOMING_RTL (decl))
19235 /* Not passed by invisible reference. */
19236 && (!REG_P (XEXP (rtl, 0))
19237 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19238 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19239 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19240 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19241 #endif
19242 )
19243 /* Big endian correction check. */
19244 && BYTES_BIG_ENDIAN
19245 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19246 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19247 UNITS_PER_WORD))
19248 {
19249 machine_mode addr_mode = get_address_mode (rtl);
19250 poly_int64 offset = (UNITS_PER_WORD
19251 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19252
19253 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19254 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19255 }
19256 }
19257 else if (VAR_P (decl)
19258 && rtl
19259 && MEM_P (rtl)
19260 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19261 {
19262 machine_mode addr_mode = get_address_mode (rtl);
19263 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19264 GET_MODE (rtl));
19265
19266 /* If a variable is declared "register" yet is smaller than
19267 a register, then if we store the variable to memory, it
19268 looks like we're storing a register-sized value, when in
19269 fact we are not. We need to adjust the offset of the
19270 storage location to reflect the actual value's bytes,
19271 else gdb will not be able to display it. */
19272 if (maybe_ne (offset, 0))
19273 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19274 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19275 }
19276
19277 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19278 and will have been substituted directly into all expressions that use it.
19279 C does not have such a concept, but C++ and other languages do. */
19280 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19281 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19282
19283 if (rtl)
19284 rtl = targetm.delegitimize_address (rtl);
19285
19286 /* If we don't look past the constant pool, we risk emitting a
19287 reference to a constant pool entry that isn't referenced from
19288 code, and thus is not emitted. */
19289 if (rtl)
19290 rtl = avoid_constant_pool_reference (rtl);
19291
19292 /* Try harder to get a rtl. If this symbol ends up not being emitted
19293 in the current CU, resolve_addr will remove the expression referencing
19294 it. */
19295 if (rtl == NULL_RTX
19296 && VAR_P (decl)
19297 && !DECL_EXTERNAL (decl)
19298 && TREE_STATIC (decl)
19299 && DECL_NAME (decl)
19300 && !DECL_HARD_REGISTER (decl)
19301 && DECL_MODE (decl) != VOIDmode)
19302 {
19303 rtl = make_decl_rtl_for_debug (decl);
19304 if (!MEM_P (rtl)
19305 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19306 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19307 rtl = NULL_RTX;
19308 }
19309
19310 return rtl;
19311 }
19312
19313 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19314 returned. If so, the decl for the COMMON block is returned, and the
19315 value is the offset into the common block for the symbol. */
19316
19317 static tree
19318 fortran_common (tree decl, HOST_WIDE_INT *value)
19319 {
19320 tree val_expr, cvar;
19321 machine_mode mode;
19322 poly_int64 bitsize, bitpos;
19323 tree offset;
19324 HOST_WIDE_INT cbitpos;
19325 int unsignedp, reversep, volatilep = 0;
19326
19327 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19328 it does not have a value (the offset into the common area), or if it
19329 is thread local (as opposed to global) then it isn't common, and shouldn't
19330 be handled as such. */
19331 if (!VAR_P (decl)
19332 || !TREE_STATIC (decl)
19333 || !DECL_HAS_VALUE_EXPR_P (decl)
19334 || !is_fortran ())
19335 return NULL_TREE;
19336
19337 val_expr = DECL_VALUE_EXPR (decl);
19338 if (TREE_CODE (val_expr) != COMPONENT_REF)
19339 return NULL_TREE;
19340
19341 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19342 &unsignedp, &reversep, &volatilep);
19343
19344 if (cvar == NULL_TREE
19345 || !VAR_P (cvar)
19346 || DECL_ARTIFICIAL (cvar)
19347 || !TREE_PUBLIC (cvar)
19348 /* We don't expect to have to cope with variable offsets,
19349 since at present all static data must have a constant size. */
19350 || !bitpos.is_constant (&cbitpos))
19351 return NULL_TREE;
19352
19353 *value = 0;
19354 if (offset != NULL)
19355 {
19356 if (!tree_fits_shwi_p (offset))
19357 return NULL_TREE;
19358 *value = tree_to_shwi (offset);
19359 }
19360 if (cbitpos != 0)
19361 *value += cbitpos / BITS_PER_UNIT;
19362
19363 return cvar;
19364 }
19365
19366 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19367 data attribute for a variable or a parameter. We generate the
19368 DW_AT_const_value attribute only in those cases where the given variable
19369 or parameter does not have a true "location" either in memory or in a
19370 register. This can happen (for example) when a constant is passed as an
19371 actual argument in a call to an inline function. (It's possible that
19372 these things can crop up in other ways also.) Note that one type of
19373 constant value which can be passed into an inlined function is a constant
19374 pointer. This can happen for example if an actual argument in an inlined
19375 function call evaluates to a compile-time constant address.
19376
19377 CACHE_P is true if it is worth caching the location list for DECL,
19378 so that future calls can reuse it rather than regenerate it from scratch.
19379 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19380 since we will need to refer to them each time the function is inlined. */
19381
19382 static bool
19383 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19384 {
19385 rtx rtl;
19386 dw_loc_list_ref list;
19387 var_loc_list *loc_list;
19388 cached_dw_loc_list *cache;
19389
19390 if (early_dwarf)
19391 return false;
19392
19393 if (TREE_CODE (decl) == ERROR_MARK)
19394 return false;
19395
19396 if (get_AT (die, DW_AT_location)
19397 || get_AT (die, DW_AT_const_value))
19398 return true;
19399
19400 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19401 || TREE_CODE (decl) == RESULT_DECL);
19402
19403 /* Try to get some constant RTL for this decl, and use that as the value of
19404 the location. */
19405
19406 rtl = rtl_for_decl_location (decl);
19407 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19408 && add_const_value_attribute (die, rtl))
19409 return true;
19410
19411 /* See if we have single element location list that is equivalent to
19412 a constant value. That way we are better to use add_const_value_attribute
19413 rather than expanding constant value equivalent. */
19414 loc_list = lookup_decl_loc (decl);
19415 if (loc_list
19416 && loc_list->first
19417 && loc_list->first->next == NULL
19418 && NOTE_P (loc_list->first->loc)
19419 && NOTE_VAR_LOCATION (loc_list->first->loc)
19420 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19421 {
19422 struct var_loc_node *node;
19423
19424 node = loc_list->first;
19425 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19426 if (GET_CODE (rtl) == EXPR_LIST)
19427 rtl = XEXP (rtl, 0);
19428 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19429 && add_const_value_attribute (die, rtl))
19430 return true;
19431 }
19432 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19433 list several times. See if we've already cached the contents. */
19434 list = NULL;
19435 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19436 cache_p = false;
19437 if (cache_p)
19438 {
19439 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19440 if (cache)
19441 list = cache->loc_list;
19442 }
19443 if (list == NULL)
19444 {
19445 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19446 NULL);
19447 /* It is usually worth caching this result if the decl is from
19448 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19449 if (cache_p && list && list->dw_loc_next)
19450 {
19451 cached_dw_loc_list **slot
19452 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19453 DECL_UID (decl),
19454 INSERT);
19455 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19456 cache->decl_id = DECL_UID (decl);
19457 cache->loc_list = list;
19458 *slot = cache;
19459 }
19460 }
19461 if (list)
19462 {
19463 add_AT_location_description (die, DW_AT_location, list);
19464 return true;
19465 }
19466 /* None of that worked, so it must not really have a location;
19467 try adding a constant value attribute from the DECL_INITIAL. */
19468 return tree_add_const_value_attribute_for_decl (die, decl);
19469 }
19470
19471 /* Helper function for tree_add_const_value_attribute. Natively encode
19472 initializer INIT into an array. Return true if successful. */
19473
19474 static bool
19475 native_encode_initializer (tree init, unsigned char *array, int size)
19476 {
19477 tree type;
19478
19479 if (init == NULL_TREE)
19480 return false;
19481
19482 STRIP_NOPS (init);
19483 switch (TREE_CODE (init))
19484 {
19485 case STRING_CST:
19486 type = TREE_TYPE (init);
19487 if (TREE_CODE (type) == ARRAY_TYPE)
19488 {
19489 tree enttype = TREE_TYPE (type);
19490 scalar_int_mode mode;
19491
19492 if (!is_int_mode (TYPE_MODE (enttype), &mode)
19493 || GET_MODE_SIZE (mode) != 1)
19494 return false;
19495 if (int_size_in_bytes (type) != size)
19496 return false;
19497 if (size > TREE_STRING_LENGTH (init))
19498 {
19499 memcpy (array, TREE_STRING_POINTER (init),
19500 TREE_STRING_LENGTH (init));
19501 memset (array + TREE_STRING_LENGTH (init),
19502 '\0', size - TREE_STRING_LENGTH (init));
19503 }
19504 else
19505 memcpy (array, TREE_STRING_POINTER (init), size);
19506 return true;
19507 }
19508 return false;
19509 case CONSTRUCTOR:
19510 type = TREE_TYPE (init);
19511 if (int_size_in_bytes (type) != size)
19512 return false;
19513 if (TREE_CODE (type) == ARRAY_TYPE)
19514 {
19515 HOST_WIDE_INT min_index;
19516 unsigned HOST_WIDE_INT cnt;
19517 int curpos = 0, fieldsize;
19518 constructor_elt *ce;
19519
19520 if (TYPE_DOMAIN (type) == NULL_TREE
19521 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
19522 return false;
19523
19524 fieldsize = int_size_in_bytes (TREE_TYPE (type));
19525 if (fieldsize <= 0)
19526 return false;
19527
19528 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
19529 memset (array, '\0', size);
19530 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19531 {
19532 tree val = ce->value;
19533 tree index = ce->index;
19534 int pos = curpos;
19535 if (index && TREE_CODE (index) == RANGE_EXPR)
19536 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
19537 * fieldsize;
19538 else if (index)
19539 pos = (tree_to_shwi (index) - min_index) * fieldsize;
19540
19541 if (val)
19542 {
19543 STRIP_NOPS (val);
19544 if (!native_encode_initializer (val, array + pos, fieldsize))
19545 return false;
19546 }
19547 curpos = pos + fieldsize;
19548 if (index && TREE_CODE (index) == RANGE_EXPR)
19549 {
19550 int count = tree_to_shwi (TREE_OPERAND (index, 1))
19551 - tree_to_shwi (TREE_OPERAND (index, 0));
19552 while (count-- > 0)
19553 {
19554 if (val)
19555 memcpy (array + curpos, array + pos, fieldsize);
19556 curpos += fieldsize;
19557 }
19558 }
19559 gcc_assert (curpos <= size);
19560 }
19561 return true;
19562 }
19563 else if (TREE_CODE (type) == RECORD_TYPE
19564 || TREE_CODE (type) == UNION_TYPE)
19565 {
19566 tree field = NULL_TREE;
19567 unsigned HOST_WIDE_INT cnt;
19568 constructor_elt *ce;
19569
19570 if (int_size_in_bytes (type) != size)
19571 return false;
19572
19573 if (TREE_CODE (type) == RECORD_TYPE)
19574 field = TYPE_FIELDS (type);
19575
19576 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19577 {
19578 tree val = ce->value;
19579 int pos, fieldsize;
19580
19581 if (ce->index != 0)
19582 field = ce->index;
19583
19584 if (val)
19585 STRIP_NOPS (val);
19586
19587 if (field == NULL_TREE || DECL_BIT_FIELD (field))
19588 return false;
19589
19590 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
19591 && TYPE_DOMAIN (TREE_TYPE (field))
19592 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
19593 return false;
19594 else if (DECL_SIZE_UNIT (field) == NULL_TREE
19595 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
19596 return false;
19597 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
19598 pos = int_byte_position (field);
19599 gcc_assert (pos + fieldsize <= size);
19600 if (val && fieldsize != 0
19601 && !native_encode_initializer (val, array + pos, fieldsize))
19602 return false;
19603 }
19604 return true;
19605 }
19606 return false;
19607 case VIEW_CONVERT_EXPR:
19608 case NON_LVALUE_EXPR:
19609 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
19610 default:
19611 return native_encode_expr (init, array, size) == size;
19612 }
19613 }
19614
19615 /* Attach a DW_AT_const_value attribute to DIE. The value of the
19616 attribute is the const value T. */
19617
19618 static bool
19619 tree_add_const_value_attribute (dw_die_ref die, tree t)
19620 {
19621 tree init;
19622 tree type = TREE_TYPE (t);
19623 rtx rtl;
19624
19625 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
19626 return false;
19627
19628 init = t;
19629 gcc_assert (!DECL_P (init));
19630
19631 if (TREE_CODE (init) == INTEGER_CST)
19632 {
19633 if (tree_fits_uhwi_p (init))
19634 {
19635 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
19636 return true;
19637 }
19638 if (tree_fits_shwi_p (init))
19639 {
19640 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
19641 return true;
19642 }
19643 }
19644 if (! early_dwarf)
19645 {
19646 rtl = rtl_for_decl_init (init, type);
19647 if (rtl)
19648 return add_const_value_attribute (die, rtl);
19649 }
19650 /* If the host and target are sane, try harder. */
19651 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
19652 && initializer_constant_valid_p (init, type))
19653 {
19654 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
19655 if (size > 0 && (int) size == size)
19656 {
19657 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
19658
19659 if (native_encode_initializer (init, array, size))
19660 {
19661 add_AT_vec (die, DW_AT_const_value, size, 1, array);
19662 return true;
19663 }
19664 ggc_free (array);
19665 }
19666 }
19667 return false;
19668 }
19669
19670 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
19671 attribute is the const value of T, where T is an integral constant
19672 variable with static storage duration
19673 (so it can't be a PARM_DECL or a RESULT_DECL). */
19674
19675 static bool
19676 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
19677 {
19678
19679 if (!decl
19680 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
19681 || (VAR_P (decl) && !TREE_STATIC (decl)))
19682 return false;
19683
19684 if (TREE_READONLY (decl)
19685 && ! TREE_THIS_VOLATILE (decl)
19686 && DECL_INITIAL (decl))
19687 /* OK */;
19688 else
19689 return false;
19690
19691 /* Don't add DW_AT_const_value if abstract origin already has one. */
19692 if (get_AT (var_die, DW_AT_const_value))
19693 return false;
19694
19695 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
19696 }
19697
19698 /* Convert the CFI instructions for the current function into a
19699 location list. This is used for DW_AT_frame_base when we targeting
19700 a dwarf2 consumer that does not support the dwarf3
19701 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
19702 expressions. */
19703
19704 static dw_loc_list_ref
19705 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
19706 {
19707 int ix;
19708 dw_fde_ref fde;
19709 dw_loc_list_ref list, *list_tail;
19710 dw_cfi_ref cfi;
19711 dw_cfa_location last_cfa, next_cfa;
19712 const char *start_label, *last_label, *section;
19713 dw_cfa_location remember;
19714
19715 fde = cfun->fde;
19716 gcc_assert (fde != NULL);
19717
19718 section = secname_for_decl (current_function_decl);
19719 list_tail = &list;
19720 list = NULL;
19721
19722 memset (&next_cfa, 0, sizeof (next_cfa));
19723 next_cfa.reg = INVALID_REGNUM;
19724 remember = next_cfa;
19725
19726 start_label = fde->dw_fde_begin;
19727
19728 /* ??? Bald assumption that the CIE opcode list does not contain
19729 advance opcodes. */
19730 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
19731 lookup_cfa_1 (cfi, &next_cfa, &remember);
19732
19733 last_cfa = next_cfa;
19734 last_label = start_label;
19735
19736 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
19737 {
19738 /* If the first partition contained no CFI adjustments, the
19739 CIE opcodes apply to the whole first partition. */
19740 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19741 fde->dw_fde_begin, fde->dw_fde_end, section);
19742 list_tail =&(*list_tail)->dw_loc_next;
19743 start_label = last_label = fde->dw_fde_second_begin;
19744 }
19745
19746 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
19747 {
19748 switch (cfi->dw_cfi_opc)
19749 {
19750 case DW_CFA_set_loc:
19751 case DW_CFA_advance_loc1:
19752 case DW_CFA_advance_loc2:
19753 case DW_CFA_advance_loc4:
19754 if (!cfa_equal_p (&last_cfa, &next_cfa))
19755 {
19756 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19757 start_label, last_label, section);
19758
19759 list_tail = &(*list_tail)->dw_loc_next;
19760 last_cfa = next_cfa;
19761 start_label = last_label;
19762 }
19763 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
19764 break;
19765
19766 case DW_CFA_advance_loc:
19767 /* The encoding is complex enough that we should never emit this. */
19768 gcc_unreachable ();
19769
19770 default:
19771 lookup_cfa_1 (cfi, &next_cfa, &remember);
19772 break;
19773 }
19774 if (ix + 1 == fde->dw_fde_switch_cfi_index)
19775 {
19776 if (!cfa_equal_p (&last_cfa, &next_cfa))
19777 {
19778 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19779 start_label, last_label, section);
19780
19781 list_tail = &(*list_tail)->dw_loc_next;
19782 last_cfa = next_cfa;
19783 start_label = last_label;
19784 }
19785 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19786 start_label, fde->dw_fde_end, section);
19787 list_tail = &(*list_tail)->dw_loc_next;
19788 start_label = last_label = fde->dw_fde_second_begin;
19789 }
19790 }
19791
19792 if (!cfa_equal_p (&last_cfa, &next_cfa))
19793 {
19794 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19795 start_label, last_label, section);
19796 list_tail = &(*list_tail)->dw_loc_next;
19797 start_label = last_label;
19798 }
19799
19800 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
19801 start_label,
19802 fde->dw_fde_second_begin
19803 ? fde->dw_fde_second_end : fde->dw_fde_end,
19804 section);
19805
19806 if (list && list->dw_loc_next)
19807 gen_llsym (list);
19808
19809 return list;
19810 }
19811
19812 /* Compute a displacement from the "steady-state frame pointer" to the
19813 frame base (often the same as the CFA), and store it in
19814 frame_pointer_fb_offset. OFFSET is added to the displacement
19815 before the latter is negated. */
19816
19817 static void
19818 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
19819 {
19820 rtx reg, elim;
19821
19822 #ifdef FRAME_POINTER_CFA_OFFSET
19823 reg = frame_pointer_rtx;
19824 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
19825 #else
19826 reg = arg_pointer_rtx;
19827 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
19828 #endif
19829
19830 elim = (ira_use_lra_p
19831 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
19832 : eliminate_regs (reg, VOIDmode, NULL_RTX));
19833 elim = strip_offset_and_add (elim, &offset);
19834
19835 frame_pointer_fb_offset = -offset;
19836
19837 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
19838 in which to eliminate. This is because it's stack pointer isn't
19839 directly accessible as a register within the ISA. To work around
19840 this, assume that while we cannot provide a proper value for
19841 frame_pointer_fb_offset, we won't need one either. */
19842 frame_pointer_fb_offset_valid
19843 = ((SUPPORTS_STACK_ALIGNMENT
19844 && (elim == hard_frame_pointer_rtx
19845 || elim == stack_pointer_rtx))
19846 || elim == (frame_pointer_needed
19847 ? hard_frame_pointer_rtx
19848 : stack_pointer_rtx));
19849 }
19850
19851 /* Generate a DW_AT_name attribute given some string value to be included as
19852 the value of the attribute. */
19853
19854 static void
19855 add_name_attribute (dw_die_ref die, const char *name_string)
19856 {
19857 if (name_string != NULL && *name_string != 0)
19858 {
19859 if (demangle_name_func)
19860 name_string = (*demangle_name_func) (name_string);
19861
19862 add_AT_string (die, DW_AT_name, name_string);
19863 }
19864 }
19865
19866 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
19867 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
19868 of TYPE accordingly.
19869
19870 ??? This is a temporary measure until after we're able to generate
19871 regular DWARF for the complex Ada type system. */
19872
19873 static void
19874 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
19875 dw_die_ref context_die)
19876 {
19877 tree dtype;
19878 dw_die_ref dtype_die;
19879
19880 if (!lang_hooks.types.descriptive_type)
19881 return;
19882
19883 dtype = lang_hooks.types.descriptive_type (type);
19884 if (!dtype)
19885 return;
19886
19887 dtype_die = lookup_type_die (dtype);
19888 if (!dtype_die)
19889 {
19890 gen_type_die (dtype, context_die);
19891 dtype_die = lookup_type_die (dtype);
19892 gcc_assert (dtype_die);
19893 }
19894
19895 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
19896 }
19897
19898 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
19899
19900 static const char *
19901 comp_dir_string (void)
19902 {
19903 const char *wd;
19904 char *wd1;
19905 static const char *cached_wd = NULL;
19906
19907 if (cached_wd != NULL)
19908 return cached_wd;
19909
19910 wd = get_src_pwd ();
19911 if (wd == NULL)
19912 return NULL;
19913
19914 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
19915 {
19916 int wdlen;
19917
19918 wdlen = strlen (wd);
19919 wd1 = ggc_vec_alloc<char> (wdlen + 2);
19920 strcpy (wd1, wd);
19921 wd1 [wdlen] = DIR_SEPARATOR;
19922 wd1 [wdlen + 1] = 0;
19923 wd = wd1;
19924 }
19925
19926 cached_wd = remap_debug_filename (wd);
19927 return cached_wd;
19928 }
19929
19930 /* Generate a DW_AT_comp_dir attribute for DIE. */
19931
19932 static void
19933 add_comp_dir_attribute (dw_die_ref die)
19934 {
19935 const char * wd = comp_dir_string ();
19936 if (wd != NULL)
19937 add_AT_string (die, DW_AT_comp_dir, wd);
19938 }
19939
19940 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
19941 pointer computation, ...), output a representation for that bound according
19942 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
19943 loc_list_from_tree for the meaning of CONTEXT. */
19944
19945 static void
19946 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
19947 int forms, struct loc_descr_context *context)
19948 {
19949 dw_die_ref context_die, decl_die;
19950 dw_loc_list_ref list;
19951 bool strip_conversions = true;
19952 bool placeholder_seen = false;
19953
19954 while (strip_conversions)
19955 switch (TREE_CODE (value))
19956 {
19957 case ERROR_MARK:
19958 case SAVE_EXPR:
19959 return;
19960
19961 CASE_CONVERT:
19962 case VIEW_CONVERT_EXPR:
19963 value = TREE_OPERAND (value, 0);
19964 break;
19965
19966 default:
19967 strip_conversions = false;
19968 break;
19969 }
19970
19971 /* If possible and permitted, output the attribute as a constant. */
19972 if ((forms & dw_scalar_form_constant) != 0
19973 && TREE_CODE (value) == INTEGER_CST)
19974 {
19975 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
19976
19977 /* If HOST_WIDE_INT is big enough then represent the bound as
19978 a constant value. We need to choose a form based on
19979 whether the type is signed or unsigned. We cannot just
19980 call add_AT_unsigned if the value itself is positive
19981 (add_AT_unsigned might add the unsigned value encoded as
19982 DW_FORM_data[1248]). Some DWARF consumers will lookup the
19983 bounds type and then sign extend any unsigned values found
19984 for signed types. This is needed only for
19985 DW_AT_{lower,upper}_bound, since for most other attributes,
19986 consumers will treat DW_FORM_data[1248] as unsigned values,
19987 regardless of the underlying type. */
19988 if (prec <= HOST_BITS_PER_WIDE_INT
19989 || tree_fits_uhwi_p (value))
19990 {
19991 if (TYPE_UNSIGNED (TREE_TYPE (value)))
19992 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
19993 else
19994 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
19995 }
19996 else
19997 /* Otherwise represent the bound as an unsigned value with
19998 the precision of its type. The precision and signedness
19999 of the type will be necessary to re-interpret it
20000 unambiguously. */
20001 add_AT_wide (die, attr, wi::to_wide (value));
20002 return;
20003 }
20004
20005 /* Otherwise, if it's possible and permitted too, output a reference to
20006 another DIE. */
20007 if ((forms & dw_scalar_form_reference) != 0)
20008 {
20009 tree decl = NULL_TREE;
20010
20011 /* Some type attributes reference an outer type. For instance, the upper
20012 bound of an array may reference an embedding record (this happens in
20013 Ada). */
20014 if (TREE_CODE (value) == COMPONENT_REF
20015 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20016 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20017 decl = TREE_OPERAND (value, 1);
20018
20019 else if (VAR_P (value)
20020 || TREE_CODE (value) == PARM_DECL
20021 || TREE_CODE (value) == RESULT_DECL)
20022 decl = value;
20023
20024 if (decl != NULL_TREE)
20025 {
20026 dw_die_ref decl_die = lookup_decl_die (decl);
20027
20028 /* ??? Can this happen, or should the variable have been bound
20029 first? Probably it can, since I imagine that we try to create
20030 the types of parameters in the order in which they exist in
20031 the list, and won't have created a forward reference to a
20032 later parameter. */
20033 if (decl_die != NULL)
20034 {
20035 add_AT_die_ref (die, attr, decl_die);
20036 return;
20037 }
20038 }
20039 }
20040
20041 /* Last chance: try to create a stack operation procedure to evaluate the
20042 value. Do nothing if even that is not possible or permitted. */
20043 if ((forms & dw_scalar_form_exprloc) == 0)
20044 return;
20045
20046 list = loc_list_from_tree (value, 2, context);
20047 if (context && context->placeholder_arg)
20048 {
20049 placeholder_seen = context->placeholder_seen;
20050 context->placeholder_seen = false;
20051 }
20052 if (list == NULL || single_element_loc_list_p (list))
20053 {
20054 /* If this attribute is not a reference nor constant, it is
20055 a DWARF expression rather than location description. For that
20056 loc_list_from_tree (value, 0, &context) is needed. */
20057 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20058 if (list2 && single_element_loc_list_p (list2))
20059 {
20060 if (placeholder_seen)
20061 {
20062 struct dwarf_procedure_info dpi;
20063 dpi.fndecl = NULL_TREE;
20064 dpi.args_count = 1;
20065 if (!resolve_args_picking (list2->expr, 1, &dpi))
20066 return;
20067 }
20068 add_AT_loc (die, attr, list2->expr);
20069 return;
20070 }
20071 }
20072
20073 /* If that failed to give a single element location list, fall back to
20074 outputting this as a reference... still if permitted. */
20075 if (list == NULL
20076 || (forms & dw_scalar_form_reference) == 0
20077 || placeholder_seen)
20078 return;
20079
20080 if (current_function_decl == 0)
20081 context_die = comp_unit_die ();
20082 else
20083 context_die = lookup_decl_die (current_function_decl);
20084
20085 decl_die = new_die (DW_TAG_variable, context_die, value);
20086 add_AT_flag (decl_die, DW_AT_artificial, 1);
20087 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20088 context_die);
20089 add_AT_location_description (decl_die, DW_AT_location, list);
20090 add_AT_die_ref (die, attr, decl_die);
20091 }
20092
20093 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20094 default. */
20095
20096 static int
20097 lower_bound_default (void)
20098 {
20099 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20100 {
20101 case DW_LANG_C:
20102 case DW_LANG_C89:
20103 case DW_LANG_C99:
20104 case DW_LANG_C11:
20105 case DW_LANG_C_plus_plus:
20106 case DW_LANG_C_plus_plus_11:
20107 case DW_LANG_C_plus_plus_14:
20108 case DW_LANG_ObjC:
20109 case DW_LANG_ObjC_plus_plus:
20110 return 0;
20111 case DW_LANG_Fortran77:
20112 case DW_LANG_Fortran90:
20113 case DW_LANG_Fortran95:
20114 case DW_LANG_Fortran03:
20115 case DW_LANG_Fortran08:
20116 return 1;
20117 case DW_LANG_UPC:
20118 case DW_LANG_D:
20119 case DW_LANG_Python:
20120 return dwarf_version >= 4 ? 0 : -1;
20121 case DW_LANG_Ada95:
20122 case DW_LANG_Ada83:
20123 case DW_LANG_Cobol74:
20124 case DW_LANG_Cobol85:
20125 case DW_LANG_Modula2:
20126 case DW_LANG_PLI:
20127 return dwarf_version >= 4 ? 1 : -1;
20128 default:
20129 return -1;
20130 }
20131 }
20132
20133 /* Given a tree node describing an array bound (either lower or upper) output
20134 a representation for that bound. */
20135
20136 static void
20137 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20138 tree bound, struct loc_descr_context *context)
20139 {
20140 int dflt;
20141
20142 while (1)
20143 switch (TREE_CODE (bound))
20144 {
20145 /* Strip all conversions. */
20146 CASE_CONVERT:
20147 case VIEW_CONVERT_EXPR:
20148 bound = TREE_OPERAND (bound, 0);
20149 break;
20150
20151 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20152 are even omitted when they are the default. */
20153 case INTEGER_CST:
20154 /* If the value for this bound is the default one, we can even omit the
20155 attribute. */
20156 if (bound_attr == DW_AT_lower_bound
20157 && tree_fits_shwi_p (bound)
20158 && (dflt = lower_bound_default ()) != -1
20159 && tree_to_shwi (bound) == dflt)
20160 return;
20161
20162 /* FALLTHRU */
20163
20164 default:
20165 /* Because of the complex interaction there can be with other GNAT
20166 encodings, GDB isn't ready yet to handle proper DWARF description
20167 for self-referencial subrange bounds: let GNAT encodings do the
20168 magic in such a case. */
20169 if (is_ada ()
20170 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20171 && contains_placeholder_p (bound))
20172 return;
20173
20174 add_scalar_info (subrange_die, bound_attr, bound,
20175 dw_scalar_form_constant
20176 | dw_scalar_form_exprloc
20177 | dw_scalar_form_reference,
20178 context);
20179 return;
20180 }
20181 }
20182
20183 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20184 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20185 Note that the block of subscript information for an array type also
20186 includes information about the element type of the given array type.
20187
20188 This function reuses previously set type and bound information if
20189 available. */
20190
20191 static void
20192 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20193 {
20194 unsigned dimension_number;
20195 tree lower, upper;
20196 dw_die_ref child = type_die->die_child;
20197
20198 for (dimension_number = 0;
20199 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20200 type = TREE_TYPE (type), dimension_number++)
20201 {
20202 tree domain = TYPE_DOMAIN (type);
20203
20204 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20205 break;
20206
20207 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20208 and (in GNU C only) variable bounds. Handle all three forms
20209 here. */
20210
20211 /* Find and reuse a previously generated DW_TAG_subrange_type if
20212 available.
20213
20214 For multi-dimensional arrays, as we iterate through the
20215 various dimensions in the enclosing for loop above, we also
20216 iterate through the DIE children and pick at each
20217 DW_TAG_subrange_type previously generated (if available).
20218 Each child DW_TAG_subrange_type DIE describes the range of
20219 the current dimension. At this point we should have as many
20220 DW_TAG_subrange_type's as we have dimensions in the
20221 array. */
20222 dw_die_ref subrange_die = NULL;
20223 if (child)
20224 while (1)
20225 {
20226 child = child->die_sib;
20227 if (child->die_tag == DW_TAG_subrange_type)
20228 subrange_die = child;
20229 if (child == type_die->die_child)
20230 {
20231 /* If we wrapped around, stop looking next time. */
20232 child = NULL;
20233 break;
20234 }
20235 if (child->die_tag == DW_TAG_subrange_type)
20236 break;
20237 }
20238 if (!subrange_die)
20239 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20240
20241 if (domain)
20242 {
20243 /* We have an array type with specified bounds. */
20244 lower = TYPE_MIN_VALUE (domain);
20245 upper = TYPE_MAX_VALUE (domain);
20246
20247 /* Define the index type. */
20248 if (TREE_TYPE (domain)
20249 && !get_AT (subrange_die, DW_AT_type))
20250 {
20251 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20252 TREE_TYPE field. We can't emit debug info for this
20253 because it is an unnamed integral type. */
20254 if (TREE_CODE (domain) == INTEGER_TYPE
20255 && TYPE_NAME (domain) == NULL_TREE
20256 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20257 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20258 ;
20259 else
20260 add_type_attribute (subrange_die, TREE_TYPE (domain),
20261 TYPE_UNQUALIFIED, false, type_die);
20262 }
20263
20264 /* ??? If upper is NULL, the array has unspecified length,
20265 but it does have a lower bound. This happens with Fortran
20266 dimension arr(N:*)
20267 Since the debugger is definitely going to need to know N
20268 to produce useful results, go ahead and output the lower
20269 bound solo, and hope the debugger can cope. */
20270
20271 if (!get_AT (subrange_die, DW_AT_lower_bound))
20272 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20273 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20274 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20275 }
20276
20277 /* Otherwise we have an array type with an unspecified length. The
20278 DWARF-2 spec does not say how to handle this; let's just leave out the
20279 bounds. */
20280 }
20281 }
20282
20283 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20284
20285 static void
20286 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20287 {
20288 dw_die_ref decl_die;
20289 HOST_WIDE_INT size;
20290 dw_loc_descr_ref size_expr = NULL;
20291
20292 switch (TREE_CODE (tree_node))
20293 {
20294 case ERROR_MARK:
20295 size = 0;
20296 break;
20297 case ENUMERAL_TYPE:
20298 case RECORD_TYPE:
20299 case UNION_TYPE:
20300 case QUAL_UNION_TYPE:
20301 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20302 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20303 {
20304 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20305 return;
20306 }
20307 size_expr = type_byte_size (tree_node, &size);
20308 break;
20309 case FIELD_DECL:
20310 /* For a data member of a struct or union, the DW_AT_byte_size is
20311 generally given as the number of bytes normally allocated for an
20312 object of the *declared* type of the member itself. This is true
20313 even for bit-fields. */
20314 size = int_size_in_bytes (field_type (tree_node));
20315 break;
20316 default:
20317 gcc_unreachable ();
20318 }
20319
20320 /* Support for dynamically-sized objects was introduced by DWARFv3.
20321 At the moment, GDB does not handle variable byte sizes very well,
20322 though. */
20323 if ((dwarf_version >= 3 || !dwarf_strict)
20324 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20325 && size_expr != NULL)
20326 add_AT_loc (die, DW_AT_byte_size, size_expr);
20327
20328 /* Note that `size' might be -1 when we get to this point. If it is, that
20329 indicates that the byte size of the entity in question is variable and
20330 that we could not generate a DWARF expression that computes it. */
20331 if (size >= 0)
20332 add_AT_unsigned (die, DW_AT_byte_size, size);
20333 }
20334
20335 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20336 alignment. */
20337
20338 static void
20339 add_alignment_attribute (dw_die_ref die, tree tree_node)
20340 {
20341 if (dwarf_version < 5 && dwarf_strict)
20342 return;
20343
20344 unsigned align;
20345
20346 if (DECL_P (tree_node))
20347 {
20348 if (!DECL_USER_ALIGN (tree_node))
20349 return;
20350
20351 align = DECL_ALIGN_UNIT (tree_node);
20352 }
20353 else if (TYPE_P (tree_node))
20354 {
20355 if (!TYPE_USER_ALIGN (tree_node))
20356 return;
20357
20358 align = TYPE_ALIGN_UNIT (tree_node);
20359 }
20360 else
20361 gcc_unreachable ();
20362
20363 add_AT_unsigned (die, DW_AT_alignment, align);
20364 }
20365
20366 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20367 which specifies the distance in bits from the highest order bit of the
20368 "containing object" for the bit-field to the highest order bit of the
20369 bit-field itself.
20370
20371 For any given bit-field, the "containing object" is a hypothetical object
20372 (of some integral or enum type) within which the given bit-field lives. The
20373 type of this hypothetical "containing object" is always the same as the
20374 declared type of the individual bit-field itself. The determination of the
20375 exact location of the "containing object" for a bit-field is rather
20376 complicated. It's handled by the `field_byte_offset' function (above).
20377
20378 CTX is required: see the comment for VLR_CONTEXT.
20379
20380 Note that it is the size (in bytes) of the hypothetical "containing object"
20381 which will be given in the DW_AT_byte_size attribute for this bit-field.
20382 (See `byte_size_attribute' above). */
20383
20384 static inline void
20385 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20386 {
20387 HOST_WIDE_INT object_offset_in_bytes;
20388 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20389 HOST_WIDE_INT bitpos_int;
20390 HOST_WIDE_INT highest_order_object_bit_offset;
20391 HOST_WIDE_INT highest_order_field_bit_offset;
20392 HOST_WIDE_INT bit_offset;
20393
20394 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20395
20396 /* Must be a field and a bit field. */
20397 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20398
20399 /* We can't yet handle bit-fields whose offsets are variable, so if we
20400 encounter such things, just return without generating any attribute
20401 whatsoever. Likewise for variable or too large size. */
20402 if (! tree_fits_shwi_p (bit_position (decl))
20403 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20404 return;
20405
20406 bitpos_int = int_bit_position (decl);
20407
20408 /* Note that the bit offset is always the distance (in bits) from the
20409 highest-order bit of the "containing object" to the highest-order bit of
20410 the bit-field itself. Since the "high-order end" of any object or field
20411 is different on big-endian and little-endian machines, the computation
20412 below must take account of these differences. */
20413 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20414 highest_order_field_bit_offset = bitpos_int;
20415
20416 if (! BYTES_BIG_ENDIAN)
20417 {
20418 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20419 highest_order_object_bit_offset +=
20420 simple_type_size_in_bits (original_type);
20421 }
20422
20423 bit_offset
20424 = (! BYTES_BIG_ENDIAN
20425 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20426 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20427
20428 if (bit_offset < 0)
20429 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20430 else
20431 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20432 }
20433
20434 /* For a FIELD_DECL node which represents a bit field, output an attribute
20435 which specifies the length in bits of the given field. */
20436
20437 static inline void
20438 add_bit_size_attribute (dw_die_ref die, tree decl)
20439 {
20440 /* Must be a field and a bit field. */
20441 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20442 && DECL_BIT_FIELD_TYPE (decl));
20443
20444 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20445 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20446 }
20447
20448 /* If the compiled language is ANSI C, then add a 'prototyped'
20449 attribute, if arg types are given for the parameters of a function. */
20450
20451 static inline void
20452 add_prototyped_attribute (dw_die_ref die, tree func_type)
20453 {
20454 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20455 {
20456 case DW_LANG_C:
20457 case DW_LANG_C89:
20458 case DW_LANG_C99:
20459 case DW_LANG_C11:
20460 case DW_LANG_ObjC:
20461 if (prototype_p (func_type))
20462 add_AT_flag (die, DW_AT_prototyped, 1);
20463 break;
20464 default:
20465 break;
20466 }
20467 }
20468
20469 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20470 by looking in the type declaration, the object declaration equate table or
20471 the block mapping. */
20472
20473 static inline dw_die_ref
20474 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20475 {
20476 dw_die_ref origin_die = NULL;
20477
20478 if (DECL_P (origin))
20479 {
20480 dw_die_ref c;
20481 origin_die = lookup_decl_die (origin);
20482 /* "Unwrap" the decls DIE which we put in the imported unit context.
20483 We are looking for the abstract copy here. */
20484 if (in_lto_p
20485 && origin_die
20486 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
20487 /* ??? Identify this better. */
20488 && c->with_offset)
20489 origin_die = c;
20490 }
20491 else if (TYPE_P (origin))
20492 origin_die = lookup_type_die (origin);
20493 else if (TREE_CODE (origin) == BLOCK)
20494 origin_die = BLOCK_DIE (origin);
20495
20496 /* XXX: Functions that are never lowered don't always have correct block
20497 trees (in the case of java, they simply have no block tree, in some other
20498 languages). For these functions, there is nothing we can really do to
20499 output correct debug info for inlined functions in all cases. Rather
20500 than die, we'll just produce deficient debug info now, in that we will
20501 have variables without a proper abstract origin. In the future, when all
20502 functions are lowered, we should re-add a gcc_assert (origin_die)
20503 here. */
20504
20505 if (origin_die)
20506 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
20507 return origin_die;
20508 }
20509
20510 /* We do not currently support the pure_virtual attribute. */
20511
20512 static inline void
20513 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
20514 {
20515 if (DECL_VINDEX (func_decl))
20516 {
20517 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
20518
20519 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
20520 add_AT_loc (die, DW_AT_vtable_elem_location,
20521 new_loc_descr (DW_OP_constu,
20522 tree_to_shwi (DECL_VINDEX (func_decl)),
20523 0));
20524
20525 /* GNU extension: Record what type this method came from originally. */
20526 if (debug_info_level > DINFO_LEVEL_TERSE
20527 && DECL_CONTEXT (func_decl))
20528 add_AT_die_ref (die, DW_AT_containing_type,
20529 lookup_type_die (DECL_CONTEXT (func_decl)));
20530 }
20531 }
20532 \f
20533 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
20534 given decl. This used to be a vendor extension until after DWARF 4
20535 standardized it. */
20536
20537 static void
20538 add_linkage_attr (dw_die_ref die, tree decl)
20539 {
20540 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20541
20542 /* Mimic what assemble_name_raw does with a leading '*'. */
20543 if (name[0] == '*')
20544 name = &name[1];
20545
20546 if (dwarf_version >= 4)
20547 add_AT_string (die, DW_AT_linkage_name, name);
20548 else
20549 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
20550 }
20551
20552 /* Add source coordinate attributes for the given decl. */
20553
20554 static void
20555 add_src_coords_attributes (dw_die_ref die, tree decl)
20556 {
20557 expanded_location s;
20558
20559 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
20560 return;
20561 s = expand_location (DECL_SOURCE_LOCATION (decl));
20562 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
20563 add_AT_unsigned (die, DW_AT_decl_line, s.line);
20564 if (debug_column_info && s.column)
20565 add_AT_unsigned (die, DW_AT_decl_column, s.column);
20566 }
20567
20568 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
20569
20570 static void
20571 add_linkage_name_raw (dw_die_ref die, tree decl)
20572 {
20573 /* Defer until we have an assembler name set. */
20574 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
20575 {
20576 limbo_die_node *asm_name;
20577
20578 asm_name = ggc_cleared_alloc<limbo_die_node> ();
20579 asm_name->die = die;
20580 asm_name->created_for = decl;
20581 asm_name->next = deferred_asm_name;
20582 deferred_asm_name = asm_name;
20583 }
20584 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
20585 add_linkage_attr (die, decl);
20586 }
20587
20588 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
20589
20590 static void
20591 add_linkage_name (dw_die_ref die, tree decl)
20592 {
20593 if (debug_info_level > DINFO_LEVEL_NONE
20594 && VAR_OR_FUNCTION_DECL_P (decl)
20595 && TREE_PUBLIC (decl)
20596 && !(VAR_P (decl) && DECL_REGISTER (decl))
20597 && die->die_tag != DW_TAG_member)
20598 add_linkage_name_raw (die, decl);
20599 }
20600
20601 /* Add a DW_AT_name attribute and source coordinate attribute for the
20602 given decl, but only if it actually has a name. */
20603
20604 static void
20605 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
20606 bool no_linkage_name)
20607 {
20608 tree decl_name;
20609
20610 decl_name = DECL_NAME (decl);
20611 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20612 {
20613 const char *name = dwarf2_name (decl, 0);
20614 if (name)
20615 add_name_attribute (die, name);
20616 if (! DECL_ARTIFICIAL (decl))
20617 add_src_coords_attributes (die, decl);
20618
20619 if (!no_linkage_name)
20620 add_linkage_name (die, decl);
20621 }
20622
20623 #ifdef VMS_DEBUGGING_INFO
20624 /* Get the function's name, as described by its RTL. This may be different
20625 from the DECL_NAME name used in the source file. */
20626 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
20627 {
20628 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
20629 XEXP (DECL_RTL (decl), 0), false);
20630 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
20631 }
20632 #endif /* VMS_DEBUGGING_INFO */
20633 }
20634
20635 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
20636
20637 static void
20638 add_discr_value (dw_die_ref die, dw_discr_value *value)
20639 {
20640 dw_attr_node attr;
20641
20642 attr.dw_attr = DW_AT_discr_value;
20643 attr.dw_attr_val.val_class = dw_val_class_discr_value;
20644 attr.dw_attr_val.val_entry = NULL;
20645 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
20646 if (value->pos)
20647 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
20648 else
20649 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
20650 add_dwarf_attr (die, &attr);
20651 }
20652
20653 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
20654
20655 static void
20656 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
20657 {
20658 dw_attr_node attr;
20659
20660 attr.dw_attr = DW_AT_discr_list;
20661 attr.dw_attr_val.val_class = dw_val_class_discr_list;
20662 attr.dw_attr_val.val_entry = NULL;
20663 attr.dw_attr_val.v.val_discr_list = discr_list;
20664 add_dwarf_attr (die, &attr);
20665 }
20666
20667 static inline dw_discr_list_ref
20668 AT_discr_list (dw_attr_node *attr)
20669 {
20670 return attr->dw_attr_val.v.val_discr_list;
20671 }
20672
20673 #ifdef VMS_DEBUGGING_INFO
20674 /* Output the debug main pointer die for VMS */
20675
20676 void
20677 dwarf2out_vms_debug_main_pointer (void)
20678 {
20679 char label[MAX_ARTIFICIAL_LABEL_BYTES];
20680 dw_die_ref die;
20681
20682 /* Allocate the VMS debug main subprogram die. */
20683 die = new_die_raw (DW_TAG_subprogram);
20684 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
20685 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
20686 current_function_funcdef_no);
20687 add_AT_lbl_id (die, DW_AT_entry_pc, label);
20688
20689 /* Make it the first child of comp_unit_die (). */
20690 die->die_parent = comp_unit_die ();
20691 if (comp_unit_die ()->die_child)
20692 {
20693 die->die_sib = comp_unit_die ()->die_child->die_sib;
20694 comp_unit_die ()->die_child->die_sib = die;
20695 }
20696 else
20697 {
20698 die->die_sib = die;
20699 comp_unit_die ()->die_child = die;
20700 }
20701 }
20702 #endif /* VMS_DEBUGGING_INFO */
20703
20704 /* Push a new declaration scope. */
20705
20706 static void
20707 push_decl_scope (tree scope)
20708 {
20709 vec_safe_push (decl_scope_table, scope);
20710 }
20711
20712 /* Pop a declaration scope. */
20713
20714 static inline void
20715 pop_decl_scope (void)
20716 {
20717 decl_scope_table->pop ();
20718 }
20719
20720 /* walk_tree helper function for uses_local_type, below. */
20721
20722 static tree
20723 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
20724 {
20725 if (!TYPE_P (*tp))
20726 *walk_subtrees = 0;
20727 else
20728 {
20729 tree name = TYPE_NAME (*tp);
20730 if (name && DECL_P (name) && decl_function_context (name))
20731 return *tp;
20732 }
20733 return NULL_TREE;
20734 }
20735
20736 /* If TYPE involves a function-local type (including a local typedef to a
20737 non-local type), returns that type; otherwise returns NULL_TREE. */
20738
20739 static tree
20740 uses_local_type (tree type)
20741 {
20742 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
20743 return used;
20744 }
20745
20746 /* Return the DIE for the scope that immediately contains this type.
20747 Non-named types that do not involve a function-local type get global
20748 scope. Named types nested in namespaces or other types get their
20749 containing scope. All other types (i.e. function-local named types) get
20750 the current active scope. */
20751
20752 static dw_die_ref
20753 scope_die_for (tree t, dw_die_ref context_die)
20754 {
20755 dw_die_ref scope_die = NULL;
20756 tree containing_scope;
20757
20758 /* Non-types always go in the current scope. */
20759 gcc_assert (TYPE_P (t));
20760
20761 /* Use the scope of the typedef, rather than the scope of the type
20762 it refers to. */
20763 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
20764 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
20765 else
20766 containing_scope = TYPE_CONTEXT (t);
20767
20768 /* Use the containing namespace if there is one. */
20769 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
20770 {
20771 if (context_die == lookup_decl_die (containing_scope))
20772 /* OK */;
20773 else if (debug_info_level > DINFO_LEVEL_TERSE)
20774 context_die = get_context_die (containing_scope);
20775 else
20776 containing_scope = NULL_TREE;
20777 }
20778
20779 /* Ignore function type "scopes" from the C frontend. They mean that
20780 a tagged type is local to a parmlist of a function declarator, but
20781 that isn't useful to DWARF. */
20782 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
20783 containing_scope = NULL_TREE;
20784
20785 if (SCOPE_FILE_SCOPE_P (containing_scope))
20786 {
20787 /* If T uses a local type keep it local as well, to avoid references
20788 to function-local DIEs from outside the function. */
20789 if (current_function_decl && uses_local_type (t))
20790 scope_die = context_die;
20791 else
20792 scope_die = comp_unit_die ();
20793 }
20794 else if (TYPE_P (containing_scope))
20795 {
20796 /* For types, we can just look up the appropriate DIE. */
20797 if (debug_info_level > DINFO_LEVEL_TERSE)
20798 scope_die = get_context_die (containing_scope);
20799 else
20800 {
20801 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
20802 if (scope_die == NULL)
20803 scope_die = comp_unit_die ();
20804 }
20805 }
20806 else
20807 scope_die = context_die;
20808
20809 return scope_die;
20810 }
20811
20812 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
20813
20814 static inline int
20815 local_scope_p (dw_die_ref context_die)
20816 {
20817 for (; context_die; context_die = context_die->die_parent)
20818 if (context_die->die_tag == DW_TAG_inlined_subroutine
20819 || context_die->die_tag == DW_TAG_subprogram)
20820 return 1;
20821
20822 return 0;
20823 }
20824
20825 /* Returns nonzero if CONTEXT_DIE is a class. */
20826
20827 static inline int
20828 class_scope_p (dw_die_ref context_die)
20829 {
20830 return (context_die
20831 && (context_die->die_tag == DW_TAG_structure_type
20832 || context_die->die_tag == DW_TAG_class_type
20833 || context_die->die_tag == DW_TAG_interface_type
20834 || context_die->die_tag == DW_TAG_union_type));
20835 }
20836
20837 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
20838 whether or not to treat a DIE in this context as a declaration. */
20839
20840 static inline int
20841 class_or_namespace_scope_p (dw_die_ref context_die)
20842 {
20843 return (class_scope_p (context_die)
20844 || (context_die && context_die->die_tag == DW_TAG_namespace));
20845 }
20846
20847 /* Many forms of DIEs require a "type description" attribute. This
20848 routine locates the proper "type descriptor" die for the type given
20849 by 'type' plus any additional qualifiers given by 'cv_quals', and
20850 adds a DW_AT_type attribute below the given die. */
20851
20852 static void
20853 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
20854 bool reverse, dw_die_ref context_die)
20855 {
20856 enum tree_code code = TREE_CODE (type);
20857 dw_die_ref type_die = NULL;
20858
20859 /* ??? If this type is an unnamed subrange type of an integral, floating-point
20860 or fixed-point type, use the inner type. This is because we have no
20861 support for unnamed types in base_type_die. This can happen if this is
20862 an Ada subrange type. Correct solution is emit a subrange type die. */
20863 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
20864 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
20865 type = TREE_TYPE (type), code = TREE_CODE (type);
20866
20867 if (code == ERROR_MARK
20868 /* Handle a special case. For functions whose return type is void, we
20869 generate *no* type attribute. (Note that no object may have type
20870 `void', so this only applies to function return types). */
20871 || code == VOID_TYPE)
20872 return;
20873
20874 type_die = modified_type_die (type,
20875 cv_quals | TYPE_QUALS (type),
20876 reverse,
20877 context_die);
20878
20879 if (type_die != NULL)
20880 add_AT_die_ref (object_die, DW_AT_type, type_die);
20881 }
20882
20883 /* Given an object die, add the calling convention attribute for the
20884 function call type. */
20885 static void
20886 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
20887 {
20888 enum dwarf_calling_convention value = DW_CC_normal;
20889
20890 value = ((enum dwarf_calling_convention)
20891 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
20892
20893 if (is_fortran ()
20894 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
20895 {
20896 /* DWARF 2 doesn't provide a way to identify a program's source-level
20897 entry point. DW_AT_calling_convention attributes are only meant
20898 to describe functions' calling conventions. However, lacking a
20899 better way to signal the Fortran main program, we used this for
20900 a long time, following existing custom. Now, DWARF 4 has
20901 DW_AT_main_subprogram, which we add below, but some tools still
20902 rely on the old way, which we thus keep. */
20903 value = DW_CC_program;
20904
20905 if (dwarf_version >= 4 || !dwarf_strict)
20906 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
20907 }
20908
20909 /* Only add the attribute if the backend requests it, and
20910 is not DW_CC_normal. */
20911 if (value && (value != DW_CC_normal))
20912 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
20913 }
20914
20915 /* Given a tree pointer to a struct, class, union, or enum type node, return
20916 a pointer to the (string) tag name for the given type, or zero if the type
20917 was declared without a tag. */
20918
20919 static const char *
20920 type_tag (const_tree type)
20921 {
20922 const char *name = 0;
20923
20924 if (TYPE_NAME (type) != 0)
20925 {
20926 tree t = 0;
20927
20928 /* Find the IDENTIFIER_NODE for the type name. */
20929 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
20930 && !TYPE_NAMELESS (type))
20931 t = TYPE_NAME (type);
20932
20933 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
20934 a TYPE_DECL node, regardless of whether or not a `typedef' was
20935 involved. */
20936 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
20937 && ! DECL_IGNORED_P (TYPE_NAME (type)))
20938 {
20939 /* We want to be extra verbose. Don't call dwarf_name if
20940 DECL_NAME isn't set. The default hook for decl_printable_name
20941 doesn't like that, and in this context it's correct to return
20942 0, instead of "<anonymous>" or the like. */
20943 if (DECL_NAME (TYPE_NAME (type))
20944 && !DECL_NAMELESS (TYPE_NAME (type)))
20945 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
20946 }
20947
20948 /* Now get the name as a string, or invent one. */
20949 if (!name && t != 0)
20950 name = IDENTIFIER_POINTER (t);
20951 }
20952
20953 return (name == 0 || *name == '\0') ? 0 : name;
20954 }
20955
20956 /* Return the type associated with a data member, make a special check
20957 for bit field types. */
20958
20959 static inline tree
20960 member_declared_type (const_tree member)
20961 {
20962 return (DECL_BIT_FIELD_TYPE (member)
20963 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
20964 }
20965
20966 /* Get the decl's label, as described by its RTL. This may be different
20967 from the DECL_NAME name used in the source file. */
20968
20969 #if 0
20970 static const char *
20971 decl_start_label (tree decl)
20972 {
20973 rtx x;
20974 const char *fnname;
20975
20976 x = DECL_RTL (decl);
20977 gcc_assert (MEM_P (x));
20978
20979 x = XEXP (x, 0);
20980 gcc_assert (GET_CODE (x) == SYMBOL_REF);
20981
20982 fnname = XSTR (x, 0);
20983 return fnname;
20984 }
20985 #endif
20986 \f
20987 /* For variable-length arrays that have been previously generated, but
20988 may be incomplete due to missing subscript info, fill the subscript
20989 info. Return TRUE if this is one of those cases. */
20990 static bool
20991 fill_variable_array_bounds (tree type)
20992 {
20993 if (TREE_ASM_WRITTEN (type)
20994 && TREE_CODE (type) == ARRAY_TYPE
20995 && variably_modified_type_p (type, NULL))
20996 {
20997 dw_die_ref array_die = lookup_type_die (type);
20998 if (!array_die)
20999 return false;
21000 add_subscript_info (array_die, type, !is_ada ());
21001 return true;
21002 }
21003 return false;
21004 }
21005
21006 /* These routines generate the internal representation of the DIE's for
21007 the compilation unit. Debugging information is collected by walking
21008 the declaration trees passed in from dwarf2out_decl(). */
21009
21010 static void
21011 gen_array_type_die (tree type, dw_die_ref context_die)
21012 {
21013 dw_die_ref array_die;
21014
21015 /* GNU compilers represent multidimensional array types as sequences of one
21016 dimensional array types whose element types are themselves array types.
21017 We sometimes squish that down to a single array_type DIE with multiple
21018 subscripts in the Dwarf debugging info. The draft Dwarf specification
21019 say that we are allowed to do this kind of compression in C, because
21020 there is no difference between an array of arrays and a multidimensional
21021 array. We don't do this for Ada to remain as close as possible to the
21022 actual representation, which is especially important against the language
21023 flexibilty wrt arrays of variable size. */
21024
21025 bool collapse_nested_arrays = !is_ada ();
21026
21027 if (fill_variable_array_bounds (type))
21028 return;
21029
21030 dw_die_ref scope_die = scope_die_for (type, context_die);
21031 tree element_type;
21032
21033 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21034 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21035 if (TYPE_STRING_FLAG (type)
21036 && TREE_CODE (type) == ARRAY_TYPE
21037 && is_fortran ()
21038 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21039 {
21040 HOST_WIDE_INT size;
21041
21042 array_die = new_die (DW_TAG_string_type, scope_die, type);
21043 add_name_attribute (array_die, type_tag (type));
21044 equate_type_number_to_die (type, array_die);
21045 size = int_size_in_bytes (type);
21046 if (size >= 0)
21047 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21048 /* ??? We can't annotate types late, but for LTO we may not
21049 generate a location early either (gfortran.dg/save_6.f90). */
21050 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21051 && TYPE_DOMAIN (type) != NULL_TREE
21052 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21053 {
21054 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21055 tree rszdecl = szdecl;
21056
21057 size = int_size_in_bytes (TREE_TYPE (szdecl));
21058 if (!DECL_P (szdecl))
21059 {
21060 if (TREE_CODE (szdecl) == INDIRECT_REF
21061 && DECL_P (TREE_OPERAND (szdecl, 0)))
21062 {
21063 rszdecl = TREE_OPERAND (szdecl, 0);
21064 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21065 != DWARF2_ADDR_SIZE)
21066 size = 0;
21067 }
21068 else
21069 size = 0;
21070 }
21071 if (size > 0)
21072 {
21073 dw_loc_list_ref loc
21074 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21075 NULL);
21076 if (loc)
21077 {
21078 add_AT_location_description (array_die, DW_AT_string_length,
21079 loc);
21080 if (size != DWARF2_ADDR_SIZE)
21081 add_AT_unsigned (array_die, dwarf_version >= 5
21082 ? DW_AT_string_length_byte_size
21083 : DW_AT_byte_size, size);
21084 }
21085 }
21086 }
21087 return;
21088 }
21089
21090 array_die = new_die (DW_TAG_array_type, scope_die, type);
21091 add_name_attribute (array_die, type_tag (type));
21092 equate_type_number_to_die (type, array_die);
21093
21094 if (TREE_CODE (type) == VECTOR_TYPE)
21095 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21096
21097 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21098 if (is_fortran ()
21099 && TREE_CODE (type) == ARRAY_TYPE
21100 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21101 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21102 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21103
21104 #if 0
21105 /* We default the array ordering. Debuggers will probably do the right
21106 things even if DW_AT_ordering is not present. It's not even an issue
21107 until we start to get into multidimensional arrays anyway. If a debugger
21108 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21109 then we'll have to put the DW_AT_ordering attribute back in. (But if
21110 and when we find out that we need to put these in, we will only do so
21111 for multidimensional arrays. */
21112 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21113 #endif
21114
21115 if (TREE_CODE (type) == VECTOR_TYPE)
21116 {
21117 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21118 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21119 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21120 add_bound_info (subrange_die, DW_AT_upper_bound,
21121 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21122 }
21123 else
21124 add_subscript_info (array_die, type, collapse_nested_arrays);
21125
21126 /* Add representation of the type of the elements of this array type and
21127 emit the corresponding DIE if we haven't done it already. */
21128 element_type = TREE_TYPE (type);
21129 if (collapse_nested_arrays)
21130 while (TREE_CODE (element_type) == ARRAY_TYPE)
21131 {
21132 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21133 break;
21134 element_type = TREE_TYPE (element_type);
21135 }
21136
21137 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21138 TREE_CODE (type) == ARRAY_TYPE
21139 && TYPE_REVERSE_STORAGE_ORDER (type),
21140 context_die);
21141
21142 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21143 if (TYPE_ARTIFICIAL (type))
21144 add_AT_flag (array_die, DW_AT_artificial, 1);
21145
21146 if (get_AT (array_die, DW_AT_name))
21147 add_pubtype (type, array_die);
21148
21149 add_alignment_attribute (array_die, type);
21150 }
21151
21152 /* This routine generates DIE for array with hidden descriptor, details
21153 are filled into *info by a langhook. */
21154
21155 static void
21156 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21157 dw_die_ref context_die)
21158 {
21159 const dw_die_ref scope_die = scope_die_for (type, context_die);
21160 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21161 struct loc_descr_context context = { type, info->base_decl, NULL,
21162 false, false };
21163 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21164 int dim;
21165
21166 add_name_attribute (array_die, type_tag (type));
21167 equate_type_number_to_die (type, array_die);
21168
21169 if (info->ndimensions > 1)
21170 switch (info->ordering)
21171 {
21172 case array_descr_ordering_row_major:
21173 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21174 break;
21175 case array_descr_ordering_column_major:
21176 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21177 break;
21178 default:
21179 break;
21180 }
21181
21182 if (dwarf_version >= 3 || !dwarf_strict)
21183 {
21184 if (info->data_location)
21185 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21186 dw_scalar_form_exprloc, &context);
21187 if (info->associated)
21188 add_scalar_info (array_die, DW_AT_associated, info->associated,
21189 dw_scalar_form_constant
21190 | dw_scalar_form_exprloc
21191 | dw_scalar_form_reference, &context);
21192 if (info->allocated)
21193 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21194 dw_scalar_form_constant
21195 | dw_scalar_form_exprloc
21196 | dw_scalar_form_reference, &context);
21197 if (info->stride)
21198 {
21199 const enum dwarf_attribute attr
21200 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21201 const int forms
21202 = (info->stride_in_bits)
21203 ? dw_scalar_form_constant
21204 : (dw_scalar_form_constant
21205 | dw_scalar_form_exprloc
21206 | dw_scalar_form_reference);
21207
21208 add_scalar_info (array_die, attr, info->stride, forms, &context);
21209 }
21210 }
21211 if (dwarf_version >= 5)
21212 {
21213 if (info->rank)
21214 {
21215 add_scalar_info (array_die, DW_AT_rank, info->rank,
21216 dw_scalar_form_constant
21217 | dw_scalar_form_exprloc, &context);
21218 subrange_tag = DW_TAG_generic_subrange;
21219 context.placeholder_arg = true;
21220 }
21221 }
21222
21223 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21224
21225 for (dim = 0; dim < info->ndimensions; dim++)
21226 {
21227 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21228
21229 if (info->dimen[dim].bounds_type)
21230 add_type_attribute (subrange_die,
21231 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21232 false, context_die);
21233 if (info->dimen[dim].lower_bound)
21234 add_bound_info (subrange_die, DW_AT_lower_bound,
21235 info->dimen[dim].lower_bound, &context);
21236 if (info->dimen[dim].upper_bound)
21237 add_bound_info (subrange_die, DW_AT_upper_bound,
21238 info->dimen[dim].upper_bound, &context);
21239 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21240 add_scalar_info (subrange_die, DW_AT_byte_stride,
21241 info->dimen[dim].stride,
21242 dw_scalar_form_constant
21243 | dw_scalar_form_exprloc
21244 | dw_scalar_form_reference,
21245 &context);
21246 }
21247
21248 gen_type_die (info->element_type, context_die);
21249 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21250 TREE_CODE (type) == ARRAY_TYPE
21251 && TYPE_REVERSE_STORAGE_ORDER (type),
21252 context_die);
21253
21254 if (get_AT (array_die, DW_AT_name))
21255 add_pubtype (type, array_die);
21256
21257 add_alignment_attribute (array_die, type);
21258 }
21259
21260 #if 0
21261 static void
21262 gen_entry_point_die (tree decl, dw_die_ref context_die)
21263 {
21264 tree origin = decl_ultimate_origin (decl);
21265 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21266
21267 if (origin != NULL)
21268 add_abstract_origin_attribute (decl_die, origin);
21269 else
21270 {
21271 add_name_and_src_coords_attributes (decl_die, decl);
21272 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21273 TYPE_UNQUALIFIED, false, context_die);
21274 }
21275
21276 if (DECL_ABSTRACT_P (decl))
21277 equate_decl_number_to_die (decl, decl_die);
21278 else
21279 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21280 }
21281 #endif
21282
21283 /* Walk through the list of incomplete types again, trying once more to
21284 emit full debugging info for them. */
21285
21286 static void
21287 retry_incomplete_types (void)
21288 {
21289 set_early_dwarf s;
21290 int i;
21291
21292 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21293 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21294 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21295 vec_safe_truncate (incomplete_types, 0);
21296 }
21297
21298 /* Determine what tag to use for a record type. */
21299
21300 static enum dwarf_tag
21301 record_type_tag (tree type)
21302 {
21303 if (! lang_hooks.types.classify_record)
21304 return DW_TAG_structure_type;
21305
21306 switch (lang_hooks.types.classify_record (type))
21307 {
21308 case RECORD_IS_STRUCT:
21309 return DW_TAG_structure_type;
21310
21311 case RECORD_IS_CLASS:
21312 return DW_TAG_class_type;
21313
21314 case RECORD_IS_INTERFACE:
21315 if (dwarf_version >= 3 || !dwarf_strict)
21316 return DW_TAG_interface_type;
21317 return DW_TAG_structure_type;
21318
21319 default:
21320 gcc_unreachable ();
21321 }
21322 }
21323
21324 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21325 include all of the information about the enumeration values also. Each
21326 enumerated type name/value is listed as a child of the enumerated type
21327 DIE. */
21328
21329 static dw_die_ref
21330 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21331 {
21332 dw_die_ref type_die = lookup_type_die (type);
21333
21334 if (type_die == NULL)
21335 {
21336 type_die = new_die (DW_TAG_enumeration_type,
21337 scope_die_for (type, context_die), type);
21338 equate_type_number_to_die (type, type_die);
21339 add_name_attribute (type_die, type_tag (type));
21340 if (dwarf_version >= 4 || !dwarf_strict)
21341 {
21342 if (ENUM_IS_SCOPED (type))
21343 add_AT_flag (type_die, DW_AT_enum_class, 1);
21344 if (ENUM_IS_OPAQUE (type))
21345 add_AT_flag (type_die, DW_AT_declaration, 1);
21346 }
21347 if (!dwarf_strict)
21348 add_AT_unsigned (type_die, DW_AT_encoding,
21349 TYPE_UNSIGNED (type)
21350 ? DW_ATE_unsigned
21351 : DW_ATE_signed);
21352 }
21353 else if (! TYPE_SIZE (type))
21354 return type_die;
21355 else
21356 remove_AT (type_die, DW_AT_declaration);
21357
21358 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21359 given enum type is incomplete, do not generate the DW_AT_byte_size
21360 attribute or the DW_AT_element_list attribute. */
21361 if (TYPE_SIZE (type))
21362 {
21363 tree link;
21364
21365 TREE_ASM_WRITTEN (type) = 1;
21366 add_byte_size_attribute (type_die, type);
21367 add_alignment_attribute (type_die, type);
21368 if (dwarf_version >= 3 || !dwarf_strict)
21369 {
21370 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21371 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21372 context_die);
21373 }
21374 if (TYPE_STUB_DECL (type) != NULL_TREE)
21375 {
21376 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21377 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21378 }
21379
21380 /* If the first reference to this type was as the return type of an
21381 inline function, then it may not have a parent. Fix this now. */
21382 if (type_die->die_parent == NULL)
21383 add_child_die (scope_die_for (type, context_die), type_die);
21384
21385 for (link = TYPE_VALUES (type);
21386 link != NULL; link = TREE_CHAIN (link))
21387 {
21388 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21389 tree value = TREE_VALUE (link);
21390
21391 add_name_attribute (enum_die,
21392 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21393
21394 if (TREE_CODE (value) == CONST_DECL)
21395 value = DECL_INITIAL (value);
21396
21397 if (simple_type_size_in_bits (TREE_TYPE (value))
21398 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21399 {
21400 /* For constant forms created by add_AT_unsigned DWARF
21401 consumers (GDB, elfutils, etc.) always zero extend
21402 the value. Only when the actual value is negative
21403 do we need to use add_AT_int to generate a constant
21404 form that can represent negative values. */
21405 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21406 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21407 add_AT_unsigned (enum_die, DW_AT_const_value,
21408 (unsigned HOST_WIDE_INT) val);
21409 else
21410 add_AT_int (enum_die, DW_AT_const_value, val);
21411 }
21412 else
21413 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21414 that here. TODO: This should be re-worked to use correct
21415 signed/unsigned double tags for all cases. */
21416 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
21417 }
21418
21419 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21420 if (TYPE_ARTIFICIAL (type))
21421 add_AT_flag (type_die, DW_AT_artificial, 1);
21422 }
21423 else
21424 add_AT_flag (type_die, DW_AT_declaration, 1);
21425
21426 add_pubtype (type, type_die);
21427
21428 return type_die;
21429 }
21430
21431 /* Generate a DIE to represent either a real live formal parameter decl or to
21432 represent just the type of some formal parameter position in some function
21433 type.
21434
21435 Note that this routine is a bit unusual because its argument may be a
21436 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21437 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21438 node. If it's the former then this function is being called to output a
21439 DIE to represent a formal parameter object (or some inlining thereof). If
21440 it's the latter, then this function is only being called to output a
21441 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21442 argument type of some subprogram type.
21443 If EMIT_NAME_P is true, name and source coordinate attributes
21444 are emitted. */
21445
21446 static dw_die_ref
21447 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21448 dw_die_ref context_die)
21449 {
21450 tree node_or_origin = node ? node : origin;
21451 tree ultimate_origin;
21452 dw_die_ref parm_die = NULL;
21453
21454 if (DECL_P (node_or_origin))
21455 {
21456 parm_die = lookup_decl_die (node);
21457
21458 /* If the contexts differ, we may not be talking about the same
21459 thing.
21460 ??? When in LTO the DIE parent is the "abstract" copy and the
21461 context_die is the specification "copy". But this whole block
21462 should eventually be no longer needed. */
21463 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
21464 {
21465 if (!DECL_ABSTRACT_P (node))
21466 {
21467 /* This can happen when creating an inlined instance, in
21468 which case we need to create a new DIE that will get
21469 annotated with DW_AT_abstract_origin. */
21470 parm_die = NULL;
21471 }
21472 else
21473 gcc_unreachable ();
21474 }
21475
21476 if (parm_die && parm_die->die_parent == NULL)
21477 {
21478 /* Check that parm_die already has the right attributes that
21479 we would have added below. If any attributes are
21480 missing, fall through to add them. */
21481 if (! DECL_ABSTRACT_P (node_or_origin)
21482 && !get_AT (parm_die, DW_AT_location)
21483 && !get_AT (parm_die, DW_AT_const_value))
21484 /* We are missing location info, and are about to add it. */
21485 ;
21486 else
21487 {
21488 add_child_die (context_die, parm_die);
21489 return parm_die;
21490 }
21491 }
21492 }
21493
21494 /* If we have a previously generated DIE, use it, unless this is an
21495 concrete instance (origin != NULL), in which case we need a new
21496 DIE with a corresponding DW_AT_abstract_origin. */
21497 bool reusing_die;
21498 if (parm_die && origin == NULL)
21499 reusing_die = true;
21500 else
21501 {
21502 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
21503 reusing_die = false;
21504 }
21505
21506 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
21507 {
21508 case tcc_declaration:
21509 ultimate_origin = decl_ultimate_origin (node_or_origin);
21510 if (node || ultimate_origin)
21511 origin = ultimate_origin;
21512
21513 if (reusing_die)
21514 goto add_location;
21515
21516 if (origin != NULL)
21517 add_abstract_origin_attribute (parm_die, origin);
21518 else if (emit_name_p)
21519 add_name_and_src_coords_attributes (parm_die, node);
21520 if (origin == NULL
21521 || (! DECL_ABSTRACT_P (node_or_origin)
21522 && variably_modified_type_p (TREE_TYPE (node_or_origin),
21523 decl_function_context
21524 (node_or_origin))))
21525 {
21526 tree type = TREE_TYPE (node_or_origin);
21527 if (decl_by_reference_p (node_or_origin))
21528 add_type_attribute (parm_die, TREE_TYPE (type),
21529 TYPE_UNQUALIFIED,
21530 false, context_die);
21531 else
21532 add_type_attribute (parm_die, type,
21533 decl_quals (node_or_origin),
21534 false, context_die);
21535 }
21536 if (origin == NULL && DECL_ARTIFICIAL (node))
21537 add_AT_flag (parm_die, DW_AT_artificial, 1);
21538 add_location:
21539 if (node && node != origin)
21540 equate_decl_number_to_die (node, parm_die);
21541 if (! DECL_ABSTRACT_P (node_or_origin))
21542 add_location_or_const_value_attribute (parm_die, node_or_origin,
21543 node == NULL);
21544
21545 break;
21546
21547 case tcc_type:
21548 /* We were called with some kind of a ..._TYPE node. */
21549 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
21550 context_die);
21551 break;
21552
21553 default:
21554 gcc_unreachable ();
21555 }
21556
21557 return parm_die;
21558 }
21559
21560 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
21561 children DW_TAG_formal_parameter DIEs representing the arguments of the
21562 parameter pack.
21563
21564 PARM_PACK must be a function parameter pack.
21565 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
21566 must point to the subsequent arguments of the function PACK_ARG belongs to.
21567 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
21568 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
21569 following the last one for which a DIE was generated. */
21570
21571 static dw_die_ref
21572 gen_formal_parameter_pack_die (tree parm_pack,
21573 tree pack_arg,
21574 dw_die_ref subr_die,
21575 tree *next_arg)
21576 {
21577 tree arg;
21578 dw_die_ref parm_pack_die;
21579
21580 gcc_assert (parm_pack
21581 && lang_hooks.function_parameter_pack_p (parm_pack)
21582 && subr_die);
21583
21584 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
21585 add_src_coords_attributes (parm_pack_die, parm_pack);
21586
21587 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
21588 {
21589 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
21590 parm_pack))
21591 break;
21592 gen_formal_parameter_die (arg, NULL,
21593 false /* Don't emit name attribute. */,
21594 parm_pack_die);
21595 }
21596 if (next_arg)
21597 *next_arg = arg;
21598 return parm_pack_die;
21599 }
21600
21601 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
21602 at the end of an (ANSI prototyped) formal parameters list. */
21603
21604 static void
21605 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
21606 {
21607 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
21608 }
21609
21610 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
21611 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
21612 parameters as specified in some function type specification (except for
21613 those which appear as part of a function *definition*). */
21614
21615 static void
21616 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
21617 {
21618 tree link;
21619 tree formal_type = NULL;
21620 tree first_parm_type;
21621 tree arg;
21622
21623 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
21624 {
21625 arg = DECL_ARGUMENTS (function_or_method_type);
21626 function_or_method_type = TREE_TYPE (function_or_method_type);
21627 }
21628 else
21629 arg = NULL_TREE;
21630
21631 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
21632
21633 /* Make our first pass over the list of formal parameter types and output a
21634 DW_TAG_formal_parameter DIE for each one. */
21635 for (link = first_parm_type; link; )
21636 {
21637 dw_die_ref parm_die;
21638
21639 formal_type = TREE_VALUE (link);
21640 if (formal_type == void_type_node)
21641 break;
21642
21643 /* Output a (nameless) DIE to represent the formal parameter itself. */
21644 if (!POINTER_BOUNDS_TYPE_P (formal_type))
21645 {
21646 parm_die = gen_formal_parameter_die (formal_type, NULL,
21647 true /* Emit name attribute. */,
21648 context_die);
21649 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
21650 && link == first_parm_type)
21651 {
21652 add_AT_flag (parm_die, DW_AT_artificial, 1);
21653 if (dwarf_version >= 3 || !dwarf_strict)
21654 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
21655 }
21656 else if (arg && DECL_ARTIFICIAL (arg))
21657 add_AT_flag (parm_die, DW_AT_artificial, 1);
21658 }
21659
21660 link = TREE_CHAIN (link);
21661 if (arg)
21662 arg = DECL_CHAIN (arg);
21663 }
21664
21665 /* If this function type has an ellipsis, add a
21666 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
21667 if (formal_type != void_type_node)
21668 gen_unspecified_parameters_die (function_or_method_type, context_die);
21669
21670 /* Make our second (and final) pass over the list of formal parameter types
21671 and output DIEs to represent those types (as necessary). */
21672 for (link = TYPE_ARG_TYPES (function_or_method_type);
21673 link && TREE_VALUE (link);
21674 link = TREE_CHAIN (link))
21675 gen_type_die (TREE_VALUE (link), context_die);
21676 }
21677
21678 /* We want to generate the DIE for TYPE so that we can generate the
21679 die for MEMBER, which has been defined; we will need to refer back
21680 to the member declaration nested within TYPE. If we're trying to
21681 generate minimal debug info for TYPE, processing TYPE won't do the
21682 trick; we need to attach the member declaration by hand. */
21683
21684 static void
21685 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
21686 {
21687 gen_type_die (type, context_die);
21688
21689 /* If we're trying to avoid duplicate debug info, we may not have
21690 emitted the member decl for this function. Emit it now. */
21691 if (TYPE_STUB_DECL (type)
21692 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
21693 && ! lookup_decl_die (member))
21694 {
21695 dw_die_ref type_die;
21696 gcc_assert (!decl_ultimate_origin (member));
21697
21698 push_decl_scope (type);
21699 type_die = lookup_type_die_strip_naming_typedef (type);
21700 if (TREE_CODE (member) == FUNCTION_DECL)
21701 gen_subprogram_die (member, type_die);
21702 else if (TREE_CODE (member) == FIELD_DECL)
21703 {
21704 /* Ignore the nameless fields that are used to skip bits but handle
21705 C++ anonymous unions and structs. */
21706 if (DECL_NAME (member) != NULL_TREE
21707 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
21708 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
21709 {
21710 struct vlr_context vlr_ctx = {
21711 DECL_CONTEXT (member), /* struct_type */
21712 NULL_TREE /* variant_part_offset */
21713 };
21714 gen_type_die (member_declared_type (member), type_die);
21715 gen_field_die (member, &vlr_ctx, type_die);
21716 }
21717 }
21718 else
21719 gen_variable_die (member, NULL_TREE, type_die);
21720
21721 pop_decl_scope ();
21722 }
21723 }
21724 \f
21725 /* Forward declare these functions, because they are mutually recursive
21726 with their set_block_* pairing functions. */
21727 static void set_decl_origin_self (tree);
21728
21729 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
21730 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
21731 that it points to the node itself, thus indicating that the node is its
21732 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
21733 the given node is NULL, recursively descend the decl/block tree which
21734 it is the root of, and for each other ..._DECL or BLOCK node contained
21735 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
21736 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
21737 values to point to themselves. */
21738
21739 static void
21740 set_block_origin_self (tree stmt)
21741 {
21742 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
21743 {
21744 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
21745
21746 {
21747 tree local_decl;
21748
21749 for (local_decl = BLOCK_VARS (stmt);
21750 local_decl != NULL_TREE;
21751 local_decl = DECL_CHAIN (local_decl))
21752 /* Do not recurse on nested functions since the inlining status
21753 of parent and child can be different as per the DWARF spec. */
21754 if (TREE_CODE (local_decl) != FUNCTION_DECL
21755 && !DECL_EXTERNAL (local_decl))
21756 set_decl_origin_self (local_decl);
21757 }
21758
21759 {
21760 tree subblock;
21761
21762 for (subblock = BLOCK_SUBBLOCKS (stmt);
21763 subblock != NULL_TREE;
21764 subblock = BLOCK_CHAIN (subblock))
21765 set_block_origin_self (subblock); /* Recurse. */
21766 }
21767 }
21768 }
21769
21770 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
21771 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
21772 node to so that it points to the node itself, thus indicating that the
21773 node represents its own (abstract) origin. Additionally, if the
21774 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
21775 the decl/block tree of which the given node is the root of, and for
21776 each other ..._DECL or BLOCK node contained therein whose
21777 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
21778 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
21779 point to themselves. */
21780
21781 static void
21782 set_decl_origin_self (tree decl)
21783 {
21784 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
21785 {
21786 DECL_ABSTRACT_ORIGIN (decl) = decl;
21787 if (TREE_CODE (decl) == FUNCTION_DECL)
21788 {
21789 tree arg;
21790
21791 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21792 DECL_ABSTRACT_ORIGIN (arg) = arg;
21793 if (DECL_INITIAL (decl) != NULL_TREE
21794 && DECL_INITIAL (decl) != error_mark_node)
21795 set_block_origin_self (DECL_INITIAL (decl));
21796 }
21797 }
21798 }
21799 \f
21800 /* Mark the early DIE for DECL as the abstract instance. */
21801
21802 static void
21803 dwarf2out_abstract_function (tree decl)
21804 {
21805 dw_die_ref old_die;
21806
21807 /* Make sure we have the actual abstract inline, not a clone. */
21808 decl = DECL_ORIGIN (decl);
21809
21810 if (DECL_IGNORED_P (decl))
21811 return;
21812
21813 old_die = lookup_decl_die (decl);
21814 /* With early debug we always have an old DIE unless we are in LTO
21815 and the user did not compile but only link with debug. */
21816 if (in_lto_p && ! old_die)
21817 return;
21818 gcc_assert (old_die != NULL);
21819 if (get_AT (old_die, DW_AT_inline)
21820 || get_AT (old_die, DW_AT_abstract_origin))
21821 /* We've already generated the abstract instance. */
21822 return;
21823
21824 /* Go ahead and put DW_AT_inline on the DIE. */
21825 if (DECL_DECLARED_INLINE_P (decl))
21826 {
21827 if (cgraph_function_possibly_inlined_p (decl))
21828 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
21829 else
21830 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
21831 }
21832 else
21833 {
21834 if (cgraph_function_possibly_inlined_p (decl))
21835 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
21836 else
21837 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
21838 }
21839
21840 if (DECL_DECLARED_INLINE_P (decl)
21841 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
21842 add_AT_flag (old_die, DW_AT_artificial, 1);
21843
21844 set_decl_origin_self (decl);
21845 }
21846
21847 /* Helper function of premark_used_types() which gets called through
21848 htab_traverse.
21849
21850 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21851 marked as unused by prune_unused_types. */
21852
21853 bool
21854 premark_used_types_helper (tree const &type, void *)
21855 {
21856 dw_die_ref die;
21857
21858 die = lookup_type_die (type);
21859 if (die != NULL)
21860 die->die_perennial_p = 1;
21861 return true;
21862 }
21863
21864 /* Helper function of premark_types_used_by_global_vars which gets called
21865 through htab_traverse.
21866
21867 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21868 marked as unused by prune_unused_types. The DIE of the type is marked
21869 only if the global variable using the type will actually be emitted. */
21870
21871 int
21872 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
21873 void *)
21874 {
21875 struct types_used_by_vars_entry *entry;
21876 dw_die_ref die;
21877
21878 entry = (struct types_used_by_vars_entry *) *slot;
21879 gcc_assert (entry->type != NULL
21880 && entry->var_decl != NULL);
21881 die = lookup_type_die (entry->type);
21882 if (die)
21883 {
21884 /* Ask cgraph if the global variable really is to be emitted.
21885 If yes, then we'll keep the DIE of ENTRY->TYPE. */
21886 varpool_node *node = varpool_node::get (entry->var_decl);
21887 if (node && node->definition)
21888 {
21889 die->die_perennial_p = 1;
21890 /* Keep the parent DIEs as well. */
21891 while ((die = die->die_parent) && die->die_perennial_p == 0)
21892 die->die_perennial_p = 1;
21893 }
21894 }
21895 return 1;
21896 }
21897
21898 /* Mark all members of used_types_hash as perennial. */
21899
21900 static void
21901 premark_used_types (struct function *fun)
21902 {
21903 if (fun && fun->used_types_hash)
21904 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
21905 }
21906
21907 /* Mark all members of types_used_by_vars_entry as perennial. */
21908
21909 static void
21910 premark_types_used_by_global_vars (void)
21911 {
21912 if (types_used_by_vars_hash)
21913 types_used_by_vars_hash
21914 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
21915 }
21916
21917 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
21918 for CA_LOC call arg loc node. */
21919
21920 static dw_die_ref
21921 gen_call_site_die (tree decl, dw_die_ref subr_die,
21922 struct call_arg_loc_node *ca_loc)
21923 {
21924 dw_die_ref stmt_die = NULL, die;
21925 tree block = ca_loc->block;
21926
21927 while (block
21928 && block != DECL_INITIAL (decl)
21929 && TREE_CODE (block) == BLOCK)
21930 {
21931 stmt_die = BLOCK_DIE (block);
21932 if (stmt_die)
21933 break;
21934 block = BLOCK_SUPERCONTEXT (block);
21935 }
21936 if (stmt_die == NULL)
21937 stmt_die = subr_die;
21938 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
21939 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
21940 if (ca_loc->tail_call_p)
21941 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
21942 if (ca_loc->symbol_ref)
21943 {
21944 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
21945 if (tdie)
21946 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
21947 else
21948 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
21949 false);
21950 }
21951 return die;
21952 }
21953
21954 /* Generate a DIE to represent a declared function (either file-scope or
21955 block-local). */
21956
21957 static void
21958 gen_subprogram_die (tree decl, dw_die_ref context_die)
21959 {
21960 tree origin = decl_ultimate_origin (decl);
21961 dw_die_ref subr_die;
21962 dw_die_ref old_die = lookup_decl_die (decl);
21963
21964 /* This function gets called multiple times for different stages of
21965 the debug process. For example, for func() in this code:
21966
21967 namespace S
21968 {
21969 void func() { ... }
21970 }
21971
21972 ...we get called 4 times. Twice in early debug and twice in
21973 late debug:
21974
21975 Early debug
21976 -----------
21977
21978 1. Once while generating func() within the namespace. This is
21979 the declaration. The declaration bit below is set, as the
21980 context is the namespace.
21981
21982 A new DIE will be generated with DW_AT_declaration set.
21983
21984 2. Once for func() itself. This is the specification. The
21985 declaration bit below is clear as the context is the CU.
21986
21987 We will use the cached DIE from (1) to create a new DIE with
21988 DW_AT_specification pointing to the declaration in (1).
21989
21990 Late debug via rest_of_handle_final()
21991 -------------------------------------
21992
21993 3. Once generating func() within the namespace. This is also the
21994 declaration, as in (1), but this time we will early exit below
21995 as we have a cached DIE and a declaration needs no additional
21996 annotations (no locations), as the source declaration line
21997 info is enough.
21998
21999 4. Once for func() itself. As in (2), this is the specification,
22000 but this time we will re-use the cached DIE, and just annotate
22001 it with the location information that should now be available.
22002
22003 For something without namespaces, but with abstract instances, we
22004 are also called a multiple times:
22005
22006 class Base
22007 {
22008 public:
22009 Base (); // constructor declaration (1)
22010 };
22011
22012 Base::Base () { } // constructor specification (2)
22013
22014 Early debug
22015 -----------
22016
22017 1. Once for the Base() constructor by virtue of it being a
22018 member of the Base class. This is done via
22019 rest_of_type_compilation.
22020
22021 This is a declaration, so a new DIE will be created with
22022 DW_AT_declaration.
22023
22024 2. Once for the Base() constructor definition, but this time
22025 while generating the abstract instance of the base
22026 constructor (__base_ctor) which is being generated via early
22027 debug of reachable functions.
22028
22029 Even though we have a cached version of the declaration (1),
22030 we will create a DW_AT_specification of the declaration DIE
22031 in (1).
22032
22033 3. Once for the __base_ctor itself, but this time, we generate
22034 an DW_AT_abstract_origin version of the DW_AT_specification in
22035 (2).
22036
22037 Late debug via rest_of_handle_final
22038 -----------------------------------
22039
22040 4. One final time for the __base_ctor (which will have a cached
22041 DIE with DW_AT_abstract_origin created in (3). This time,
22042 we will just annotate the location information now
22043 available.
22044 */
22045 int declaration = (current_function_decl != decl
22046 || class_or_namespace_scope_p (context_die));
22047
22048 /* A declaration that has been previously dumped needs no
22049 additional information. */
22050 if (old_die && declaration)
22051 return;
22052
22053 /* Now that the C++ front end lazily declares artificial member fns, we
22054 might need to retrofit the declaration into its class. */
22055 if (!declaration && !origin && !old_die
22056 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22057 && !class_or_namespace_scope_p (context_die)
22058 && debug_info_level > DINFO_LEVEL_TERSE)
22059 old_die = force_decl_die (decl);
22060
22061 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22062 if (origin != NULL)
22063 {
22064 gcc_assert (!declaration || local_scope_p (context_die));
22065
22066 /* Fixup die_parent for the abstract instance of a nested
22067 inline function. */
22068 if (old_die && old_die->die_parent == NULL)
22069 add_child_die (context_die, old_die);
22070
22071 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22072 {
22073 /* If we have a DW_AT_abstract_origin we have a working
22074 cached version. */
22075 subr_die = old_die;
22076 }
22077 else
22078 {
22079 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22080 add_abstract_origin_attribute (subr_die, origin);
22081 /* This is where the actual code for a cloned function is.
22082 Let's emit linkage name attribute for it. This helps
22083 debuggers to e.g, set breakpoints into
22084 constructors/destructors when the user asks "break
22085 K::K". */
22086 add_linkage_name (subr_die, decl);
22087 }
22088 }
22089 /* A cached copy, possibly from early dwarf generation. Reuse as
22090 much as possible. */
22091 else if (old_die)
22092 {
22093 if (!get_AT_flag (old_die, DW_AT_declaration)
22094 /* We can have a normal definition following an inline one in the
22095 case of redefinition of GNU C extern inlines.
22096 It seems reasonable to use AT_specification in this case. */
22097 && !get_AT (old_die, DW_AT_inline))
22098 {
22099 /* Detect and ignore this case, where we are trying to output
22100 something we have already output. */
22101 if (get_AT (old_die, DW_AT_low_pc)
22102 || get_AT (old_die, DW_AT_ranges))
22103 return;
22104
22105 /* If we have no location information, this must be a
22106 partially generated DIE from early dwarf generation.
22107 Fall through and generate it. */
22108 }
22109
22110 /* If the definition comes from the same place as the declaration,
22111 maybe use the old DIE. We always want the DIE for this function
22112 that has the *_pc attributes to be under comp_unit_die so the
22113 debugger can find it. We also need to do this for abstract
22114 instances of inlines, since the spec requires the out-of-line copy
22115 to have the same parent. For local class methods, this doesn't
22116 apply; we just use the old DIE. */
22117 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22118 struct dwarf_file_data * file_index = lookup_filename (s.file);
22119 if ((is_cu_die (old_die->die_parent)
22120 /* This condition fixes the inconsistency/ICE with the
22121 following Fortran test (or some derivative thereof) while
22122 building libgfortran:
22123
22124 module some_m
22125 contains
22126 logical function funky (FLAG)
22127 funky = .true.
22128 end function
22129 end module
22130 */
22131 || (old_die->die_parent
22132 && old_die->die_parent->die_tag == DW_TAG_module)
22133 || context_die == NULL)
22134 && (DECL_ARTIFICIAL (decl)
22135 /* The location attributes may be in the abstract origin
22136 which in the case of LTO might be not available to
22137 look at. */
22138 || get_AT (old_die, DW_AT_abstract_origin)
22139 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22140 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22141 == (unsigned) s.line)
22142 && (!debug_column_info
22143 || s.column == 0
22144 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22145 == (unsigned) s.column)))))
22146 {
22147 subr_die = old_die;
22148
22149 /* Clear out the declaration attribute, but leave the
22150 parameters so they can be augmented with location
22151 information later. Unless this was a declaration, in
22152 which case, wipe out the nameless parameters and recreate
22153 them further down. */
22154 if (remove_AT (subr_die, DW_AT_declaration))
22155 {
22156
22157 remove_AT (subr_die, DW_AT_object_pointer);
22158 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22159 }
22160 }
22161 /* Make a specification pointing to the previously built
22162 declaration. */
22163 else
22164 {
22165 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22166 add_AT_specification (subr_die, old_die);
22167 add_pubname (decl, subr_die);
22168 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22169 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22170 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22171 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22172 if (debug_column_info
22173 && s.column
22174 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22175 != (unsigned) s.column))
22176 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22177
22178 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22179 emit the real type on the definition die. */
22180 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22181 {
22182 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22183 if (die == auto_die || die == decltype_auto_die)
22184 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22185 TYPE_UNQUALIFIED, false, context_die);
22186 }
22187
22188 /* When we process the method declaration, we haven't seen
22189 the out-of-class defaulted definition yet, so we have to
22190 recheck now. */
22191 if ((dwarf_version >= 5 || ! dwarf_strict)
22192 && !get_AT (subr_die, DW_AT_defaulted))
22193 {
22194 int defaulted
22195 = lang_hooks.decls.decl_dwarf_attribute (decl,
22196 DW_AT_defaulted);
22197 if (defaulted != -1)
22198 {
22199 /* Other values must have been handled before. */
22200 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22201 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22202 }
22203 }
22204 }
22205 }
22206 /* Create a fresh DIE for anything else. */
22207 else
22208 {
22209 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22210
22211 if (TREE_PUBLIC (decl))
22212 add_AT_flag (subr_die, DW_AT_external, 1);
22213
22214 add_name_and_src_coords_attributes (subr_die, decl);
22215 add_pubname (decl, subr_die);
22216 if (debug_info_level > DINFO_LEVEL_TERSE)
22217 {
22218 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22219 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22220 TYPE_UNQUALIFIED, false, context_die);
22221 }
22222
22223 add_pure_or_virtual_attribute (subr_die, decl);
22224 if (DECL_ARTIFICIAL (decl))
22225 add_AT_flag (subr_die, DW_AT_artificial, 1);
22226
22227 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22228 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22229
22230 add_alignment_attribute (subr_die, decl);
22231
22232 add_accessibility_attribute (subr_die, decl);
22233 }
22234
22235 /* Unless we have an existing non-declaration DIE, equate the new
22236 DIE. */
22237 if (!old_die || is_declaration_die (old_die))
22238 equate_decl_number_to_die (decl, subr_die);
22239
22240 if (declaration)
22241 {
22242 if (!old_die || !get_AT (old_die, DW_AT_inline))
22243 {
22244 add_AT_flag (subr_die, DW_AT_declaration, 1);
22245
22246 /* If this is an explicit function declaration then generate
22247 a DW_AT_explicit attribute. */
22248 if ((dwarf_version >= 3 || !dwarf_strict)
22249 && lang_hooks.decls.decl_dwarf_attribute (decl,
22250 DW_AT_explicit) == 1)
22251 add_AT_flag (subr_die, DW_AT_explicit, 1);
22252
22253 /* If this is a C++11 deleted special function member then generate
22254 a DW_AT_deleted attribute. */
22255 if ((dwarf_version >= 5 || !dwarf_strict)
22256 && lang_hooks.decls.decl_dwarf_attribute (decl,
22257 DW_AT_deleted) == 1)
22258 add_AT_flag (subr_die, DW_AT_deleted, 1);
22259
22260 /* If this is a C++11 defaulted special function member then
22261 generate a DW_AT_defaulted attribute. */
22262 if (dwarf_version >= 5 || !dwarf_strict)
22263 {
22264 int defaulted
22265 = lang_hooks.decls.decl_dwarf_attribute (decl,
22266 DW_AT_defaulted);
22267 if (defaulted != -1)
22268 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22269 }
22270
22271 /* If this is a C++11 non-static member function with & ref-qualifier
22272 then generate a DW_AT_reference attribute. */
22273 if ((dwarf_version >= 5 || !dwarf_strict)
22274 && lang_hooks.decls.decl_dwarf_attribute (decl,
22275 DW_AT_reference) == 1)
22276 add_AT_flag (subr_die, DW_AT_reference, 1);
22277
22278 /* If this is a C++11 non-static member function with &&
22279 ref-qualifier then generate a DW_AT_reference attribute. */
22280 if ((dwarf_version >= 5 || !dwarf_strict)
22281 && lang_hooks.decls.decl_dwarf_attribute (decl,
22282 DW_AT_rvalue_reference)
22283 == 1)
22284 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22285 }
22286 }
22287 /* For non DECL_EXTERNALs, if range information is available, fill
22288 the DIE with it. */
22289 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22290 {
22291 HOST_WIDE_INT cfa_fb_offset;
22292
22293 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22294
22295 if (!crtl->has_bb_partition)
22296 {
22297 dw_fde_ref fde = fun->fde;
22298 if (fde->dw_fde_begin)
22299 {
22300 /* We have already generated the labels. */
22301 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22302 fde->dw_fde_end, false);
22303 }
22304 else
22305 {
22306 /* Create start/end labels and add the range. */
22307 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22308 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22309 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22310 current_function_funcdef_no);
22311 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22312 current_function_funcdef_no);
22313 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22314 false);
22315 }
22316
22317 #if VMS_DEBUGGING_INFO
22318 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22319 Section 2.3 Prologue and Epilogue Attributes:
22320 When a breakpoint is set on entry to a function, it is generally
22321 desirable for execution to be suspended, not on the very first
22322 instruction of the function, but rather at a point after the
22323 function's frame has been set up, after any language defined local
22324 declaration processing has been completed, and before execution of
22325 the first statement of the function begins. Debuggers generally
22326 cannot properly determine where this point is. Similarly for a
22327 breakpoint set on exit from a function. The prologue and epilogue
22328 attributes allow a compiler to communicate the location(s) to use. */
22329
22330 {
22331 if (fde->dw_fde_vms_end_prologue)
22332 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22333 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22334
22335 if (fde->dw_fde_vms_begin_epilogue)
22336 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22337 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22338 }
22339 #endif
22340
22341 }
22342 else
22343 {
22344 /* Generate pubnames entries for the split function code ranges. */
22345 dw_fde_ref fde = fun->fde;
22346
22347 if (fde->dw_fde_second_begin)
22348 {
22349 if (dwarf_version >= 3 || !dwarf_strict)
22350 {
22351 /* We should use ranges for non-contiguous code section
22352 addresses. Use the actual code range for the initial
22353 section, since the HOT/COLD labels might precede an
22354 alignment offset. */
22355 bool range_list_added = false;
22356 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22357 fde->dw_fde_end, &range_list_added,
22358 false);
22359 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22360 fde->dw_fde_second_end,
22361 &range_list_added, false);
22362 if (range_list_added)
22363 add_ranges (NULL);
22364 }
22365 else
22366 {
22367 /* There is no real support in DW2 for this .. so we make
22368 a work-around. First, emit the pub name for the segment
22369 containing the function label. Then make and emit a
22370 simplified subprogram DIE for the second segment with the
22371 name pre-fixed by __hot/cold_sect_of_. We use the same
22372 linkage name for the second die so that gdb will find both
22373 sections when given "b foo". */
22374 const char *name = NULL;
22375 tree decl_name = DECL_NAME (decl);
22376 dw_die_ref seg_die;
22377
22378 /* Do the 'primary' section. */
22379 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22380 fde->dw_fde_end, false);
22381
22382 /* Build a minimal DIE for the secondary section. */
22383 seg_die = new_die (DW_TAG_subprogram,
22384 subr_die->die_parent, decl);
22385
22386 if (TREE_PUBLIC (decl))
22387 add_AT_flag (seg_die, DW_AT_external, 1);
22388
22389 if (decl_name != NULL
22390 && IDENTIFIER_POINTER (decl_name) != NULL)
22391 {
22392 name = dwarf2_name (decl, 1);
22393 if (! DECL_ARTIFICIAL (decl))
22394 add_src_coords_attributes (seg_die, decl);
22395
22396 add_linkage_name (seg_die, decl);
22397 }
22398 gcc_assert (name != NULL);
22399 add_pure_or_virtual_attribute (seg_die, decl);
22400 if (DECL_ARTIFICIAL (decl))
22401 add_AT_flag (seg_die, DW_AT_artificial, 1);
22402
22403 name = concat ("__second_sect_of_", name, NULL);
22404 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22405 fde->dw_fde_second_end, false);
22406 add_name_attribute (seg_die, name);
22407 if (want_pubnames ())
22408 add_pubname_string (name, seg_die);
22409 }
22410 }
22411 else
22412 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22413 false);
22414 }
22415
22416 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22417
22418 /* We define the "frame base" as the function's CFA. This is more
22419 convenient for several reasons: (1) It's stable across the prologue
22420 and epilogue, which makes it better than just a frame pointer,
22421 (2) With dwarf3, there exists a one-byte encoding that allows us
22422 to reference the .debug_frame data by proxy, but failing that,
22423 (3) We can at least reuse the code inspection and interpretation
22424 code that determines the CFA position at various points in the
22425 function. */
22426 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22427 {
22428 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22429 add_AT_loc (subr_die, DW_AT_frame_base, op);
22430 }
22431 else
22432 {
22433 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22434 if (list->dw_loc_next)
22435 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22436 else
22437 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22438 }
22439
22440 /* Compute a displacement from the "steady-state frame pointer" to
22441 the CFA. The former is what all stack slots and argument slots
22442 will reference in the rtl; the latter is what we've told the
22443 debugger about. We'll need to adjust all frame_base references
22444 by this displacement. */
22445 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22446
22447 if (fun->static_chain_decl)
22448 {
22449 /* DWARF requires here a location expression that computes the
22450 address of the enclosing subprogram's frame base. The machinery
22451 in tree-nested.c is supposed to store this specific address in the
22452 last field of the FRAME record. */
22453 const tree frame_type
22454 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22455 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22456
22457 tree fb_expr
22458 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22459 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22460 fb_expr, fb_decl, NULL_TREE);
22461
22462 add_AT_location_description (subr_die, DW_AT_static_link,
22463 loc_list_from_tree (fb_expr, 0, NULL));
22464 }
22465
22466 resolve_variable_values ();
22467 }
22468
22469 /* Generate child dies for template paramaters. */
22470 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22471 gen_generic_params_dies (decl);
22472
22473 /* Now output descriptions of the arguments for this function. This gets
22474 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22475 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22476 `...' at the end of the formal parameter list. In order to find out if
22477 there was a trailing ellipsis or not, we must instead look at the type
22478 associated with the FUNCTION_DECL. This will be a node of type
22479 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22480 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22481 an ellipsis at the end. */
22482
22483 /* In the case where we are describing a mere function declaration, all we
22484 need to do here (and all we *can* do here) is to describe the *types* of
22485 its formal parameters. */
22486 if (debug_info_level <= DINFO_LEVEL_TERSE)
22487 ;
22488 else if (declaration)
22489 gen_formal_types_die (decl, subr_die);
22490 else
22491 {
22492 /* Generate DIEs to represent all known formal parameters. */
22493 tree parm = DECL_ARGUMENTS (decl);
22494 tree generic_decl = early_dwarf
22495 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
22496 tree generic_decl_parm = generic_decl
22497 ? DECL_ARGUMENTS (generic_decl)
22498 : NULL;
22499
22500 /* Now we want to walk the list of parameters of the function and
22501 emit their relevant DIEs.
22502
22503 We consider the case of DECL being an instance of a generic function
22504 as well as it being a normal function.
22505
22506 If DECL is an instance of a generic function we walk the
22507 parameters of the generic function declaration _and_ the parameters of
22508 DECL itself. This is useful because we want to emit specific DIEs for
22509 function parameter packs and those are declared as part of the
22510 generic function declaration. In that particular case,
22511 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
22512 That DIE has children DIEs representing the set of arguments
22513 of the pack. Note that the set of pack arguments can be empty.
22514 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
22515 children DIE.
22516
22517 Otherwise, we just consider the parameters of DECL. */
22518 while (generic_decl_parm || parm)
22519 {
22520 if (generic_decl_parm
22521 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
22522 gen_formal_parameter_pack_die (generic_decl_parm,
22523 parm, subr_die,
22524 &parm);
22525 else if (parm && !POINTER_BOUNDS_P (parm))
22526 {
22527 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
22528
22529 if (early_dwarf
22530 && parm == DECL_ARGUMENTS (decl)
22531 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
22532 && parm_die
22533 && (dwarf_version >= 3 || !dwarf_strict))
22534 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
22535
22536 parm = DECL_CHAIN (parm);
22537 }
22538 else if (parm)
22539 parm = DECL_CHAIN (parm);
22540
22541 if (generic_decl_parm)
22542 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
22543 }
22544
22545 /* Decide whether we need an unspecified_parameters DIE at the end.
22546 There are 2 more cases to do this for: 1) the ansi ... declaration -
22547 this is detectable when the end of the arg list is not a
22548 void_type_node 2) an unprototyped function declaration (not a
22549 definition). This just means that we have no info about the
22550 parameters at all. */
22551 if (early_dwarf)
22552 {
22553 if (prototype_p (TREE_TYPE (decl)))
22554 {
22555 /* This is the prototyped case, check for.... */
22556 if (stdarg_p (TREE_TYPE (decl)))
22557 gen_unspecified_parameters_die (decl, subr_die);
22558 }
22559 else if (DECL_INITIAL (decl) == NULL_TREE)
22560 gen_unspecified_parameters_die (decl, subr_die);
22561 }
22562 }
22563
22564 if (subr_die != old_die)
22565 /* Add the calling convention attribute if requested. */
22566 add_calling_convention_attribute (subr_die, decl);
22567
22568 /* Output Dwarf info for all of the stuff within the body of the function
22569 (if it has one - it may be just a declaration).
22570
22571 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
22572 a function. This BLOCK actually represents the outermost binding contour
22573 for the function, i.e. the contour in which the function's formal
22574 parameters and labels get declared. Curiously, it appears that the front
22575 end doesn't actually put the PARM_DECL nodes for the current function onto
22576 the BLOCK_VARS list for this outer scope, but are strung off of the
22577 DECL_ARGUMENTS list for the function instead.
22578
22579 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
22580 the LABEL_DECL nodes for the function however, and we output DWARF info
22581 for those in decls_for_scope. Just within the `outer_scope' there will be
22582 a BLOCK node representing the function's outermost pair of curly braces,
22583 and any blocks used for the base and member initializers of a C++
22584 constructor function. */
22585 tree outer_scope = DECL_INITIAL (decl);
22586 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
22587 {
22588 int call_site_note_count = 0;
22589 int tail_call_site_note_count = 0;
22590
22591 /* Emit a DW_TAG_variable DIE for a named return value. */
22592 if (DECL_NAME (DECL_RESULT (decl)))
22593 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
22594
22595 /* The first time through decls_for_scope we will generate the
22596 DIEs for the locals. The second time, we fill in the
22597 location info. */
22598 decls_for_scope (outer_scope, subr_die);
22599
22600 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
22601 {
22602 struct call_arg_loc_node *ca_loc;
22603 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
22604 {
22605 dw_die_ref die = NULL;
22606 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
22607 rtx arg, next_arg;
22608
22609 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
22610 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
22611 : NULL_RTX);
22612 arg; arg = next_arg)
22613 {
22614 dw_loc_descr_ref reg, val;
22615 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
22616 dw_die_ref cdie, tdie = NULL;
22617
22618 next_arg = XEXP (arg, 1);
22619 if (REG_P (XEXP (XEXP (arg, 0), 0))
22620 && next_arg
22621 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
22622 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
22623 && REGNO (XEXP (XEXP (arg, 0), 0))
22624 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
22625 next_arg = XEXP (next_arg, 1);
22626 if (mode == VOIDmode)
22627 {
22628 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
22629 if (mode == VOIDmode)
22630 mode = GET_MODE (XEXP (arg, 0));
22631 }
22632 if (mode == VOIDmode || mode == BLKmode)
22633 continue;
22634 /* Get dynamic information about call target only if we
22635 have no static information: we cannot generate both
22636 DW_AT_call_origin and DW_AT_call_target
22637 attributes. */
22638 if (ca_loc->symbol_ref == NULL_RTX)
22639 {
22640 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
22641 {
22642 tloc = XEXP (XEXP (arg, 0), 1);
22643 continue;
22644 }
22645 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
22646 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
22647 {
22648 tlocc = XEXP (XEXP (arg, 0), 1);
22649 continue;
22650 }
22651 }
22652 reg = NULL;
22653 if (REG_P (XEXP (XEXP (arg, 0), 0)))
22654 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
22655 VAR_INIT_STATUS_INITIALIZED);
22656 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
22657 {
22658 rtx mem = XEXP (XEXP (arg, 0), 0);
22659 reg = mem_loc_descriptor (XEXP (mem, 0),
22660 get_address_mode (mem),
22661 GET_MODE (mem),
22662 VAR_INIT_STATUS_INITIALIZED);
22663 }
22664 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
22665 == DEBUG_PARAMETER_REF)
22666 {
22667 tree tdecl
22668 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
22669 tdie = lookup_decl_die (tdecl);
22670 if (tdie == NULL)
22671 continue;
22672 }
22673 else
22674 continue;
22675 if (reg == NULL
22676 && GET_CODE (XEXP (XEXP (arg, 0), 0))
22677 != DEBUG_PARAMETER_REF)
22678 continue;
22679 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
22680 VOIDmode,
22681 VAR_INIT_STATUS_INITIALIZED);
22682 if (val == NULL)
22683 continue;
22684 if (die == NULL)
22685 die = gen_call_site_die (decl, subr_die, ca_loc);
22686 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
22687 NULL_TREE);
22688 if (reg != NULL)
22689 add_AT_loc (cdie, DW_AT_location, reg);
22690 else if (tdie != NULL)
22691 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
22692 tdie);
22693 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
22694 if (next_arg != XEXP (arg, 1))
22695 {
22696 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
22697 if (mode == VOIDmode)
22698 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
22699 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
22700 0), 1),
22701 mode, VOIDmode,
22702 VAR_INIT_STATUS_INITIALIZED);
22703 if (val != NULL)
22704 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
22705 val);
22706 }
22707 }
22708 if (die == NULL
22709 && (ca_loc->symbol_ref || tloc))
22710 die = gen_call_site_die (decl, subr_die, ca_loc);
22711 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
22712 {
22713 dw_loc_descr_ref tval = NULL;
22714
22715 if (tloc != NULL_RTX)
22716 tval = mem_loc_descriptor (tloc,
22717 GET_MODE (tloc) == VOIDmode
22718 ? Pmode : GET_MODE (tloc),
22719 VOIDmode,
22720 VAR_INIT_STATUS_INITIALIZED);
22721 if (tval)
22722 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
22723 else if (tlocc != NULL_RTX)
22724 {
22725 tval = mem_loc_descriptor (tlocc,
22726 GET_MODE (tlocc) == VOIDmode
22727 ? Pmode : GET_MODE (tlocc),
22728 VOIDmode,
22729 VAR_INIT_STATUS_INITIALIZED);
22730 if (tval)
22731 add_AT_loc (die,
22732 dwarf_AT (DW_AT_call_target_clobbered),
22733 tval);
22734 }
22735 }
22736 if (die != NULL)
22737 {
22738 call_site_note_count++;
22739 if (ca_loc->tail_call_p)
22740 tail_call_site_note_count++;
22741 }
22742 }
22743 }
22744 call_arg_locations = NULL;
22745 call_arg_loc_last = NULL;
22746 if (tail_call_site_count >= 0
22747 && tail_call_site_count == tail_call_site_note_count
22748 && (!dwarf_strict || dwarf_version >= 5))
22749 {
22750 if (call_site_count >= 0
22751 && call_site_count == call_site_note_count)
22752 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
22753 else
22754 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
22755 }
22756 call_site_count = -1;
22757 tail_call_site_count = -1;
22758 }
22759
22760 /* Mark used types after we have created DIEs for the functions scopes. */
22761 premark_used_types (DECL_STRUCT_FUNCTION (decl));
22762 }
22763
22764 /* Returns a hash value for X (which really is a die_struct). */
22765
22766 hashval_t
22767 block_die_hasher::hash (die_struct *d)
22768 {
22769 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
22770 }
22771
22772 /* Return nonzero if decl_id and die_parent of die_struct X is the same
22773 as decl_id and die_parent of die_struct Y. */
22774
22775 bool
22776 block_die_hasher::equal (die_struct *x, die_struct *y)
22777 {
22778 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
22779 }
22780
22781 /* Return TRUE if DECL, which may have been previously generated as
22782 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
22783 true if decl (or its origin) is either an extern declaration or a
22784 class/namespace scoped declaration.
22785
22786 The declare_in_namespace support causes us to get two DIEs for one
22787 variable, both of which are declarations. We want to avoid
22788 considering one to be a specification, so we must test for
22789 DECLARATION and DW_AT_declaration. */
22790 static inline bool
22791 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
22792 {
22793 return (old_die && TREE_STATIC (decl) && !declaration
22794 && get_AT_flag (old_die, DW_AT_declaration) == 1);
22795 }
22796
22797 /* Return true if DECL is a local static. */
22798
22799 static inline bool
22800 local_function_static (tree decl)
22801 {
22802 gcc_assert (VAR_P (decl));
22803 return TREE_STATIC (decl)
22804 && DECL_CONTEXT (decl)
22805 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
22806 }
22807
22808 /* Generate a DIE to represent a declared data object.
22809 Either DECL or ORIGIN must be non-null. */
22810
22811 static void
22812 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
22813 {
22814 HOST_WIDE_INT off = 0;
22815 tree com_decl;
22816 tree decl_or_origin = decl ? decl : origin;
22817 tree ultimate_origin;
22818 dw_die_ref var_die;
22819 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
22820 bool declaration = (DECL_EXTERNAL (decl_or_origin)
22821 || class_or_namespace_scope_p (context_die));
22822 bool specialization_p = false;
22823 bool no_linkage_name = false;
22824
22825 /* While C++ inline static data members have definitions inside of the
22826 class, force the first DIE to be a declaration, then let gen_member_die
22827 reparent it to the class context and call gen_variable_die again
22828 to create the outside of the class DIE for the definition. */
22829 if (!declaration
22830 && old_die == NULL
22831 && decl
22832 && DECL_CONTEXT (decl)
22833 && TYPE_P (DECL_CONTEXT (decl))
22834 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
22835 {
22836 declaration = true;
22837 if (dwarf_version < 5)
22838 no_linkage_name = true;
22839 }
22840
22841 ultimate_origin = decl_ultimate_origin (decl_or_origin);
22842 if (decl || ultimate_origin)
22843 origin = ultimate_origin;
22844 com_decl = fortran_common (decl_or_origin, &off);
22845
22846 /* Symbol in common gets emitted as a child of the common block, in the form
22847 of a data member. */
22848 if (com_decl)
22849 {
22850 dw_die_ref com_die;
22851 dw_loc_list_ref loc = NULL;
22852 die_node com_die_arg;
22853
22854 var_die = lookup_decl_die (decl_or_origin);
22855 if (var_die)
22856 {
22857 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
22858 {
22859 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
22860 if (loc)
22861 {
22862 if (off)
22863 {
22864 /* Optimize the common case. */
22865 if (single_element_loc_list_p (loc)
22866 && loc->expr->dw_loc_opc == DW_OP_addr
22867 && loc->expr->dw_loc_next == NULL
22868 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
22869 == SYMBOL_REF)
22870 {
22871 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22872 loc->expr->dw_loc_oprnd1.v.val_addr
22873 = plus_constant (GET_MODE (x), x , off);
22874 }
22875 else
22876 loc_list_plus_const (loc, off);
22877 }
22878 add_AT_location_description (var_die, DW_AT_location, loc);
22879 remove_AT (var_die, DW_AT_declaration);
22880 }
22881 }
22882 return;
22883 }
22884
22885 if (common_block_die_table == NULL)
22886 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
22887
22888 com_die_arg.decl_id = DECL_UID (com_decl);
22889 com_die_arg.die_parent = context_die;
22890 com_die = common_block_die_table->find (&com_die_arg);
22891 if (! early_dwarf)
22892 loc = loc_list_from_tree (com_decl, 2, NULL);
22893 if (com_die == NULL)
22894 {
22895 const char *cnam
22896 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
22897 die_node **slot;
22898
22899 com_die = new_die (DW_TAG_common_block, context_die, decl);
22900 add_name_and_src_coords_attributes (com_die, com_decl);
22901 if (loc)
22902 {
22903 add_AT_location_description (com_die, DW_AT_location, loc);
22904 /* Avoid sharing the same loc descriptor between
22905 DW_TAG_common_block and DW_TAG_variable. */
22906 loc = loc_list_from_tree (com_decl, 2, NULL);
22907 }
22908 else if (DECL_EXTERNAL (decl_or_origin))
22909 add_AT_flag (com_die, DW_AT_declaration, 1);
22910 if (want_pubnames ())
22911 add_pubname_string (cnam, com_die); /* ??? needed? */
22912 com_die->decl_id = DECL_UID (com_decl);
22913 slot = common_block_die_table->find_slot (com_die, INSERT);
22914 *slot = com_die;
22915 }
22916 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
22917 {
22918 add_AT_location_description (com_die, DW_AT_location, loc);
22919 loc = loc_list_from_tree (com_decl, 2, NULL);
22920 remove_AT (com_die, DW_AT_declaration);
22921 }
22922 var_die = new_die (DW_TAG_variable, com_die, decl);
22923 add_name_and_src_coords_attributes (var_die, decl_or_origin);
22924 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
22925 decl_quals (decl_or_origin), false,
22926 context_die);
22927 add_alignment_attribute (var_die, decl);
22928 add_AT_flag (var_die, DW_AT_external, 1);
22929 if (loc)
22930 {
22931 if (off)
22932 {
22933 /* Optimize the common case. */
22934 if (single_element_loc_list_p (loc)
22935 && loc->expr->dw_loc_opc == DW_OP_addr
22936 && loc->expr->dw_loc_next == NULL
22937 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
22938 {
22939 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22940 loc->expr->dw_loc_oprnd1.v.val_addr
22941 = plus_constant (GET_MODE (x), x, off);
22942 }
22943 else
22944 loc_list_plus_const (loc, off);
22945 }
22946 add_AT_location_description (var_die, DW_AT_location, loc);
22947 }
22948 else if (DECL_EXTERNAL (decl_or_origin))
22949 add_AT_flag (var_die, DW_AT_declaration, 1);
22950 if (decl)
22951 equate_decl_number_to_die (decl, var_die);
22952 return;
22953 }
22954
22955 if (old_die)
22956 {
22957 if (declaration)
22958 {
22959 /* A declaration that has been previously dumped, needs no
22960 further annotations, since it doesn't need location on
22961 the second pass. */
22962 return;
22963 }
22964 else if (decl_will_get_specification_p (old_die, decl, declaration)
22965 && !get_AT (old_die, DW_AT_specification))
22966 {
22967 /* Fall-thru so we can make a new variable die along with a
22968 DW_AT_specification. */
22969 }
22970 else if (origin && old_die->die_parent != context_die)
22971 {
22972 /* If we will be creating an inlined instance, we need a
22973 new DIE that will get annotated with
22974 DW_AT_abstract_origin. */
22975 gcc_assert (!DECL_ABSTRACT_P (decl));
22976 }
22977 else
22978 {
22979 /* If a DIE was dumped early, it still needs location info.
22980 Skip to where we fill the location bits. */
22981 var_die = old_die;
22982
22983 /* ??? In LTRANS we cannot annotate early created variably
22984 modified type DIEs without copying them and adjusting all
22985 references to them. Thus we dumped them again, also add a
22986 reference to them. */
22987 tree type = TREE_TYPE (decl_or_origin);
22988 if (in_lto_p
22989 && variably_modified_type_p
22990 (type, decl_function_context (decl_or_origin)))
22991 {
22992 if (decl_by_reference_p (decl_or_origin))
22993 add_type_attribute (var_die, TREE_TYPE (type),
22994 TYPE_UNQUALIFIED, false, context_die);
22995 else
22996 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
22997 false, context_die);
22998 }
22999
23000 goto gen_variable_die_location;
23001 }
23002 }
23003
23004 /* For static data members, the declaration in the class is supposed
23005 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23006 also in DWARF2; the specification should still be DW_TAG_variable
23007 referencing the DW_TAG_member DIE. */
23008 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23009 var_die = new_die (DW_TAG_member, context_die, decl);
23010 else
23011 var_die = new_die (DW_TAG_variable, context_die, decl);
23012
23013 if (origin != NULL)
23014 add_abstract_origin_attribute (var_die, origin);
23015
23016 /* Loop unrolling can create multiple blocks that refer to the same
23017 static variable, so we must test for the DW_AT_declaration flag.
23018
23019 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23020 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23021 sharing them.
23022
23023 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23024 else if (decl_will_get_specification_p (old_die, decl, declaration))
23025 {
23026 /* This is a definition of a C++ class level static. */
23027 add_AT_specification (var_die, old_die);
23028 specialization_p = true;
23029 if (DECL_NAME (decl))
23030 {
23031 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23032 struct dwarf_file_data * file_index = lookup_filename (s.file);
23033
23034 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23035 add_AT_file (var_die, DW_AT_decl_file, file_index);
23036
23037 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23038 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23039
23040 if (debug_column_info
23041 && s.column
23042 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23043 != (unsigned) s.column))
23044 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23045
23046 if (old_die->die_tag == DW_TAG_member)
23047 add_linkage_name (var_die, decl);
23048 }
23049 }
23050 else
23051 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23052
23053 if ((origin == NULL && !specialization_p)
23054 || (origin != NULL
23055 && !DECL_ABSTRACT_P (decl_or_origin)
23056 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23057 decl_function_context
23058 (decl_or_origin))))
23059 {
23060 tree type = TREE_TYPE (decl_or_origin);
23061
23062 if (decl_by_reference_p (decl_or_origin))
23063 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23064 context_die);
23065 else
23066 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23067 context_die);
23068 }
23069
23070 if (origin == NULL && !specialization_p)
23071 {
23072 if (TREE_PUBLIC (decl))
23073 add_AT_flag (var_die, DW_AT_external, 1);
23074
23075 if (DECL_ARTIFICIAL (decl))
23076 add_AT_flag (var_die, DW_AT_artificial, 1);
23077
23078 add_alignment_attribute (var_die, decl);
23079
23080 add_accessibility_attribute (var_die, decl);
23081 }
23082
23083 if (declaration)
23084 add_AT_flag (var_die, DW_AT_declaration, 1);
23085
23086 if (decl && (DECL_ABSTRACT_P (decl)
23087 || !old_die || is_declaration_die (old_die)))
23088 equate_decl_number_to_die (decl, var_die);
23089
23090 gen_variable_die_location:
23091 if (! declaration
23092 && (! DECL_ABSTRACT_P (decl_or_origin)
23093 /* Local static vars are shared between all clones/inlines,
23094 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23095 already set. */
23096 || (VAR_P (decl_or_origin)
23097 && TREE_STATIC (decl_or_origin)
23098 && DECL_RTL_SET_P (decl_or_origin))))
23099 {
23100 if (early_dwarf)
23101 add_pubname (decl_or_origin, var_die);
23102 else
23103 add_location_or_const_value_attribute (var_die, decl_or_origin,
23104 decl == NULL);
23105 }
23106 else
23107 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23108
23109 if ((dwarf_version >= 4 || !dwarf_strict)
23110 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23111 DW_AT_const_expr) == 1
23112 && !get_AT (var_die, DW_AT_const_expr)
23113 && !specialization_p)
23114 add_AT_flag (var_die, DW_AT_const_expr, 1);
23115
23116 if (!dwarf_strict)
23117 {
23118 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23119 DW_AT_inline);
23120 if (inl != -1
23121 && !get_AT (var_die, DW_AT_inline)
23122 && !specialization_p)
23123 add_AT_unsigned (var_die, DW_AT_inline, inl);
23124 }
23125 }
23126
23127 /* Generate a DIE to represent a named constant. */
23128
23129 static void
23130 gen_const_die (tree decl, dw_die_ref context_die)
23131 {
23132 dw_die_ref const_die;
23133 tree type = TREE_TYPE (decl);
23134
23135 const_die = lookup_decl_die (decl);
23136 if (const_die)
23137 return;
23138
23139 const_die = new_die (DW_TAG_constant, context_die, decl);
23140 equate_decl_number_to_die (decl, const_die);
23141 add_name_and_src_coords_attributes (const_die, decl);
23142 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23143 if (TREE_PUBLIC (decl))
23144 add_AT_flag (const_die, DW_AT_external, 1);
23145 if (DECL_ARTIFICIAL (decl))
23146 add_AT_flag (const_die, DW_AT_artificial, 1);
23147 tree_add_const_value_attribute_for_decl (const_die, decl);
23148 }
23149
23150 /* Generate a DIE to represent a label identifier. */
23151
23152 static void
23153 gen_label_die (tree decl, dw_die_ref context_die)
23154 {
23155 tree origin = decl_ultimate_origin (decl);
23156 dw_die_ref lbl_die = lookup_decl_die (decl);
23157 rtx insn;
23158 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23159
23160 if (!lbl_die)
23161 {
23162 lbl_die = new_die (DW_TAG_label, context_die, decl);
23163 equate_decl_number_to_die (decl, lbl_die);
23164
23165 if (origin != NULL)
23166 add_abstract_origin_attribute (lbl_die, origin);
23167 else
23168 add_name_and_src_coords_attributes (lbl_die, decl);
23169 }
23170
23171 if (DECL_ABSTRACT_P (decl))
23172 equate_decl_number_to_die (decl, lbl_die);
23173 else if (! early_dwarf)
23174 {
23175 insn = DECL_RTL_IF_SET (decl);
23176
23177 /* Deleted labels are programmer specified labels which have been
23178 eliminated because of various optimizations. We still emit them
23179 here so that it is possible to put breakpoints on them. */
23180 if (insn
23181 && (LABEL_P (insn)
23182 || ((NOTE_P (insn)
23183 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23184 {
23185 /* When optimization is enabled (via -O) some parts of the compiler
23186 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23187 represent source-level labels which were explicitly declared by
23188 the user. This really shouldn't be happening though, so catch
23189 it if it ever does happen. */
23190 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23191
23192 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23193 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23194 }
23195 else if (insn
23196 && NOTE_P (insn)
23197 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23198 && CODE_LABEL_NUMBER (insn) != -1)
23199 {
23200 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23201 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23202 }
23203 }
23204 }
23205
23206 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23207 attributes to the DIE for a block STMT, to describe where the inlined
23208 function was called from. This is similar to add_src_coords_attributes. */
23209
23210 static inline void
23211 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23212 {
23213 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23214
23215 if (dwarf_version >= 3 || !dwarf_strict)
23216 {
23217 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23218 add_AT_unsigned (die, DW_AT_call_line, s.line);
23219 if (debug_column_info && s.column)
23220 add_AT_unsigned (die, DW_AT_call_column, s.column);
23221 }
23222 }
23223
23224
23225 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23226 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23227
23228 static inline void
23229 add_high_low_attributes (tree stmt, dw_die_ref die)
23230 {
23231 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23232
23233 if (BLOCK_FRAGMENT_CHAIN (stmt)
23234 && (dwarf_version >= 3 || !dwarf_strict))
23235 {
23236 tree chain, superblock = NULL_TREE;
23237 dw_die_ref pdie;
23238 dw_attr_node *attr = NULL;
23239
23240 if (inlined_function_outer_scope_p (stmt))
23241 {
23242 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23243 BLOCK_NUMBER (stmt));
23244 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23245 }
23246
23247 /* Optimize duplicate .debug_ranges lists or even tails of
23248 lists. If this BLOCK has same ranges as its supercontext,
23249 lookup DW_AT_ranges attribute in the supercontext (and
23250 recursively so), verify that the ranges_table contains the
23251 right values and use it instead of adding a new .debug_range. */
23252 for (chain = stmt, pdie = die;
23253 BLOCK_SAME_RANGE (chain);
23254 chain = BLOCK_SUPERCONTEXT (chain))
23255 {
23256 dw_attr_node *new_attr;
23257
23258 pdie = pdie->die_parent;
23259 if (pdie == NULL)
23260 break;
23261 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23262 break;
23263 new_attr = get_AT (pdie, DW_AT_ranges);
23264 if (new_attr == NULL
23265 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23266 break;
23267 attr = new_attr;
23268 superblock = BLOCK_SUPERCONTEXT (chain);
23269 }
23270 if (attr != NULL
23271 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23272 == BLOCK_NUMBER (superblock))
23273 && BLOCK_FRAGMENT_CHAIN (superblock))
23274 {
23275 unsigned long off = attr->dw_attr_val.v.val_offset;
23276 unsigned long supercnt = 0, thiscnt = 0;
23277 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23278 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23279 {
23280 ++supercnt;
23281 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23282 == BLOCK_NUMBER (chain));
23283 }
23284 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23285 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23286 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23287 ++thiscnt;
23288 gcc_assert (supercnt >= thiscnt);
23289 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23290 false);
23291 note_rnglist_head (off + supercnt - thiscnt);
23292 return;
23293 }
23294
23295 unsigned int offset = add_ranges (stmt, true);
23296 add_AT_range_list (die, DW_AT_ranges, offset, false);
23297 note_rnglist_head (offset);
23298
23299 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23300 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23301 do
23302 {
23303 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23304 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23305 chain = BLOCK_FRAGMENT_CHAIN (chain);
23306 }
23307 while (chain);
23308 add_ranges (NULL);
23309 }
23310 else
23311 {
23312 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23313 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23314 BLOCK_NUMBER (stmt));
23315 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23316 BLOCK_NUMBER (stmt));
23317 add_AT_low_high_pc (die, label, label_high, false);
23318 }
23319 }
23320
23321 /* Generate a DIE for a lexical block. */
23322
23323 static void
23324 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23325 {
23326 dw_die_ref old_die = BLOCK_DIE (stmt);
23327 dw_die_ref stmt_die = NULL;
23328 if (!old_die)
23329 {
23330 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23331 BLOCK_DIE (stmt) = stmt_die;
23332 }
23333
23334 if (BLOCK_ABSTRACT (stmt))
23335 {
23336 if (old_die)
23337 {
23338 /* This must have been generated early and it won't even
23339 need location information since it's a DW_AT_inline
23340 function. */
23341 if (flag_checking)
23342 for (dw_die_ref c = context_die; c; c = c->die_parent)
23343 if (c->die_tag == DW_TAG_inlined_subroutine
23344 || c->die_tag == DW_TAG_subprogram)
23345 {
23346 gcc_assert (get_AT (c, DW_AT_inline));
23347 break;
23348 }
23349 return;
23350 }
23351 }
23352 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23353 {
23354 /* If this is an inlined instance, create a new lexical die for
23355 anything below to attach DW_AT_abstract_origin to. */
23356 if (old_die)
23357 {
23358 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23359 BLOCK_DIE (stmt) = stmt_die;
23360 old_die = NULL;
23361 }
23362
23363 tree origin = block_ultimate_origin (stmt);
23364 if (origin != NULL_TREE && origin != stmt)
23365 add_abstract_origin_attribute (stmt_die, origin);
23366 }
23367
23368 if (old_die)
23369 stmt_die = old_die;
23370
23371 /* A non abstract block whose blocks have already been reordered
23372 should have the instruction range for this block. If so, set the
23373 high/low attributes. */
23374 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23375 {
23376 gcc_assert (stmt_die);
23377 add_high_low_attributes (stmt, stmt_die);
23378 }
23379
23380 decls_for_scope (stmt, stmt_die);
23381 }
23382
23383 /* Generate a DIE for an inlined subprogram. */
23384
23385 static void
23386 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23387 {
23388 tree decl;
23389
23390 /* The instance of function that is effectively being inlined shall not
23391 be abstract. */
23392 gcc_assert (! BLOCK_ABSTRACT (stmt));
23393
23394 decl = block_ultimate_origin (stmt);
23395
23396 /* Make sure any inlined functions are known to be inlineable. */
23397 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23398 || cgraph_function_possibly_inlined_p (decl));
23399
23400 if (! BLOCK_ABSTRACT (stmt))
23401 {
23402 dw_die_ref subr_die
23403 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23404
23405 if (call_arg_locations)
23406 BLOCK_DIE (stmt) = subr_die;
23407 add_abstract_origin_attribute (subr_die, decl);
23408 if (TREE_ASM_WRITTEN (stmt))
23409 add_high_low_attributes (stmt, subr_die);
23410 add_call_src_coords_attributes (stmt, subr_die);
23411
23412 decls_for_scope (stmt, subr_die);
23413 }
23414 }
23415
23416 /* Generate a DIE for a field in a record, or structure. CTX is required: see
23417 the comment for VLR_CONTEXT. */
23418
23419 static void
23420 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
23421 {
23422 dw_die_ref decl_die;
23423
23424 if (TREE_TYPE (decl) == error_mark_node)
23425 return;
23426
23427 decl_die = new_die (DW_TAG_member, context_die, decl);
23428 add_name_and_src_coords_attributes (decl_die, decl);
23429 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
23430 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
23431 context_die);
23432
23433 if (DECL_BIT_FIELD_TYPE (decl))
23434 {
23435 add_byte_size_attribute (decl_die, decl);
23436 add_bit_size_attribute (decl_die, decl);
23437 add_bit_offset_attribute (decl_die, decl, ctx);
23438 }
23439
23440 add_alignment_attribute (decl_die, decl);
23441
23442 /* If we have a variant part offset, then we are supposed to process a member
23443 of a QUAL_UNION_TYPE, which is how we represent variant parts in
23444 trees. */
23445 gcc_assert (ctx->variant_part_offset == NULL_TREE
23446 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
23447 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
23448 add_data_member_location_attribute (decl_die, decl, ctx);
23449
23450 if (DECL_ARTIFICIAL (decl))
23451 add_AT_flag (decl_die, DW_AT_artificial, 1);
23452
23453 add_accessibility_attribute (decl_die, decl);
23454
23455 /* Equate decl number to die, so that we can look up this decl later on. */
23456 equate_decl_number_to_die (decl, decl_die);
23457 }
23458
23459 /* Generate a DIE for a pointer to a member type. TYPE can be an
23460 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
23461 pointer to member function. */
23462
23463 static void
23464 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
23465 {
23466 if (lookup_type_die (type))
23467 return;
23468
23469 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
23470 scope_die_for (type, context_die), type);
23471
23472 equate_type_number_to_die (type, ptr_die);
23473 add_AT_die_ref (ptr_die, DW_AT_containing_type,
23474 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
23475 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23476 context_die);
23477 add_alignment_attribute (ptr_die, type);
23478
23479 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
23480 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
23481 {
23482 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
23483 add_AT_loc (ptr_die, DW_AT_use_location, op);
23484 }
23485 }
23486
23487 static char *producer_string;
23488
23489 /* Return a heap allocated producer string including command line options
23490 if -grecord-gcc-switches. */
23491
23492 static char *
23493 gen_producer_string (void)
23494 {
23495 size_t j;
23496 auto_vec<const char *> switches;
23497 const char *language_string = lang_hooks.name;
23498 char *producer, *tail;
23499 const char *p;
23500 size_t len = dwarf_record_gcc_switches ? 0 : 3;
23501 size_t plen = strlen (language_string) + 1 + strlen (version_string);
23502
23503 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
23504 switch (save_decoded_options[j].opt_index)
23505 {
23506 case OPT_o:
23507 case OPT_d:
23508 case OPT_dumpbase:
23509 case OPT_dumpdir:
23510 case OPT_auxbase:
23511 case OPT_auxbase_strip:
23512 case OPT_quiet:
23513 case OPT_version:
23514 case OPT_v:
23515 case OPT_w:
23516 case OPT_L:
23517 case OPT_D:
23518 case OPT_I:
23519 case OPT_U:
23520 case OPT_SPECIAL_unknown:
23521 case OPT_SPECIAL_ignore:
23522 case OPT_SPECIAL_program_name:
23523 case OPT_SPECIAL_input_file:
23524 case OPT_grecord_gcc_switches:
23525 case OPT__output_pch_:
23526 case OPT_fdiagnostics_show_location_:
23527 case OPT_fdiagnostics_show_option:
23528 case OPT_fdiagnostics_show_caret:
23529 case OPT_fdiagnostics_color_:
23530 case OPT_fverbose_asm:
23531 case OPT____:
23532 case OPT__sysroot_:
23533 case OPT_nostdinc:
23534 case OPT_nostdinc__:
23535 case OPT_fpreprocessed:
23536 case OPT_fltrans_output_list_:
23537 case OPT_fresolution_:
23538 case OPT_fdebug_prefix_map_:
23539 case OPT_fmacro_prefix_map_:
23540 case OPT_ffile_prefix_map_:
23541 case OPT_fcompare_debug:
23542 /* Ignore these. */
23543 continue;
23544 default:
23545 if (cl_options[save_decoded_options[j].opt_index].flags
23546 & CL_NO_DWARF_RECORD)
23547 continue;
23548 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
23549 == '-');
23550 switch (save_decoded_options[j].canonical_option[0][1])
23551 {
23552 case 'M':
23553 case 'i':
23554 case 'W':
23555 continue;
23556 case 'f':
23557 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
23558 "dump", 4) == 0)
23559 continue;
23560 break;
23561 default:
23562 break;
23563 }
23564 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
23565 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
23566 break;
23567 }
23568
23569 producer = XNEWVEC (char, plen + 1 + len + 1);
23570 tail = producer;
23571 sprintf (tail, "%s %s", language_string, version_string);
23572 tail += plen;
23573
23574 FOR_EACH_VEC_ELT (switches, j, p)
23575 {
23576 len = strlen (p);
23577 *tail = ' ';
23578 memcpy (tail + 1, p, len);
23579 tail += len + 1;
23580 }
23581
23582 *tail = '\0';
23583 return producer;
23584 }
23585
23586 /* Given a C and/or C++ language/version string return the "highest".
23587 C++ is assumed to be "higher" than C in this case. Used for merging
23588 LTO translation unit languages. */
23589 static const char *
23590 highest_c_language (const char *lang1, const char *lang2)
23591 {
23592 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
23593 return "GNU C++17";
23594 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
23595 return "GNU C++14";
23596 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
23597 return "GNU C++11";
23598 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
23599 return "GNU C++98";
23600
23601 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
23602 return "GNU C17";
23603 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
23604 return "GNU C11";
23605 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
23606 return "GNU C99";
23607 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
23608 return "GNU C89";
23609
23610 gcc_unreachable ();
23611 }
23612
23613
23614 /* Generate the DIE for the compilation unit. */
23615
23616 static dw_die_ref
23617 gen_compile_unit_die (const char *filename)
23618 {
23619 dw_die_ref die;
23620 const char *language_string = lang_hooks.name;
23621 int language;
23622
23623 die = new_die (DW_TAG_compile_unit, NULL, NULL);
23624
23625 if (filename)
23626 {
23627 add_name_attribute (die, filename);
23628 /* Don't add cwd for <built-in>. */
23629 if (filename[0] != '<')
23630 add_comp_dir_attribute (die);
23631 }
23632
23633 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
23634
23635 /* If our producer is LTO try to figure out a common language to use
23636 from the global list of translation units. */
23637 if (strcmp (language_string, "GNU GIMPLE") == 0)
23638 {
23639 unsigned i;
23640 tree t;
23641 const char *common_lang = NULL;
23642
23643 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
23644 {
23645 if (!TRANSLATION_UNIT_LANGUAGE (t))
23646 continue;
23647 if (!common_lang)
23648 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
23649 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
23650 ;
23651 else if (strncmp (common_lang, "GNU C", 5) == 0
23652 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
23653 /* Mixing C and C++ is ok, use C++ in that case. */
23654 common_lang = highest_c_language (common_lang,
23655 TRANSLATION_UNIT_LANGUAGE (t));
23656 else
23657 {
23658 /* Fall back to C. */
23659 common_lang = NULL;
23660 break;
23661 }
23662 }
23663
23664 if (common_lang)
23665 language_string = common_lang;
23666 }
23667
23668 language = DW_LANG_C;
23669 if (strncmp (language_string, "GNU C", 5) == 0
23670 && ISDIGIT (language_string[5]))
23671 {
23672 language = DW_LANG_C89;
23673 if (dwarf_version >= 3 || !dwarf_strict)
23674 {
23675 if (strcmp (language_string, "GNU C89") != 0)
23676 language = DW_LANG_C99;
23677
23678 if (dwarf_version >= 5 /* || !dwarf_strict */)
23679 if (strcmp (language_string, "GNU C11") == 0
23680 || strcmp (language_string, "GNU C17") == 0)
23681 language = DW_LANG_C11;
23682 }
23683 }
23684 else if (strncmp (language_string, "GNU C++", 7) == 0)
23685 {
23686 language = DW_LANG_C_plus_plus;
23687 if (dwarf_version >= 5 /* || !dwarf_strict */)
23688 {
23689 if (strcmp (language_string, "GNU C++11") == 0)
23690 language = DW_LANG_C_plus_plus_11;
23691 else if (strcmp (language_string, "GNU C++14") == 0)
23692 language = DW_LANG_C_plus_plus_14;
23693 else if (strcmp (language_string, "GNU C++17") == 0)
23694 /* For now. */
23695 language = DW_LANG_C_plus_plus_14;
23696 }
23697 }
23698 else if (strcmp (language_string, "GNU F77") == 0)
23699 language = DW_LANG_Fortran77;
23700 else if (dwarf_version >= 3 || !dwarf_strict)
23701 {
23702 if (strcmp (language_string, "GNU Ada") == 0)
23703 language = DW_LANG_Ada95;
23704 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23705 {
23706 language = DW_LANG_Fortran95;
23707 if (dwarf_version >= 5 /* || !dwarf_strict */)
23708 {
23709 if (strcmp (language_string, "GNU Fortran2003") == 0)
23710 language = DW_LANG_Fortran03;
23711 else if (strcmp (language_string, "GNU Fortran2008") == 0)
23712 language = DW_LANG_Fortran08;
23713 }
23714 }
23715 else if (strcmp (language_string, "GNU Objective-C") == 0)
23716 language = DW_LANG_ObjC;
23717 else if (strcmp (language_string, "GNU Objective-C++") == 0)
23718 language = DW_LANG_ObjC_plus_plus;
23719 else if (dwarf_version >= 5 || !dwarf_strict)
23720 {
23721 if (strcmp (language_string, "GNU Go") == 0)
23722 language = DW_LANG_Go;
23723 }
23724 }
23725 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
23726 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23727 language = DW_LANG_Fortran90;
23728
23729 add_AT_unsigned (die, DW_AT_language, language);
23730
23731 switch (language)
23732 {
23733 case DW_LANG_Fortran77:
23734 case DW_LANG_Fortran90:
23735 case DW_LANG_Fortran95:
23736 case DW_LANG_Fortran03:
23737 case DW_LANG_Fortran08:
23738 /* Fortran has case insensitive identifiers and the front-end
23739 lowercases everything. */
23740 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
23741 break;
23742 default:
23743 /* The default DW_ID_case_sensitive doesn't need to be specified. */
23744 break;
23745 }
23746 return die;
23747 }
23748
23749 /* Generate the DIE for a base class. */
23750
23751 static void
23752 gen_inheritance_die (tree binfo, tree access, tree type,
23753 dw_die_ref context_die)
23754 {
23755 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
23756 struct vlr_context ctx = { type, NULL };
23757
23758 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
23759 context_die);
23760 add_data_member_location_attribute (die, binfo, &ctx);
23761
23762 if (BINFO_VIRTUAL_P (binfo))
23763 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
23764
23765 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
23766 children, otherwise the default is DW_ACCESS_public. In DWARF2
23767 the default has always been DW_ACCESS_private. */
23768 if (access == access_public_node)
23769 {
23770 if (dwarf_version == 2
23771 || context_die->die_tag == DW_TAG_class_type)
23772 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
23773 }
23774 else if (access == access_protected_node)
23775 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
23776 else if (dwarf_version > 2
23777 && context_die->die_tag != DW_TAG_class_type)
23778 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
23779 }
23780
23781 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
23782 structure. */
23783 static bool
23784 is_variant_part (tree decl)
23785 {
23786 return (TREE_CODE (decl) == FIELD_DECL
23787 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
23788 }
23789
23790 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
23791 return the FIELD_DECL. Return NULL_TREE otherwise. */
23792
23793 static tree
23794 analyze_discr_in_predicate (tree operand, tree struct_type)
23795 {
23796 bool continue_stripping = true;
23797 while (continue_stripping)
23798 switch (TREE_CODE (operand))
23799 {
23800 CASE_CONVERT:
23801 operand = TREE_OPERAND (operand, 0);
23802 break;
23803 default:
23804 continue_stripping = false;
23805 break;
23806 }
23807
23808 /* Match field access to members of struct_type only. */
23809 if (TREE_CODE (operand) == COMPONENT_REF
23810 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
23811 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
23812 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
23813 return TREE_OPERAND (operand, 1);
23814 else
23815 return NULL_TREE;
23816 }
23817
23818 /* Check that SRC is a constant integer that can be represented as a native
23819 integer constant (either signed or unsigned). If so, store it into DEST and
23820 return true. Return false otherwise. */
23821
23822 static bool
23823 get_discr_value (tree src, dw_discr_value *dest)
23824 {
23825 tree discr_type = TREE_TYPE (src);
23826
23827 if (lang_hooks.types.get_debug_type)
23828 {
23829 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
23830 if (debug_type != NULL)
23831 discr_type = debug_type;
23832 }
23833
23834 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
23835 return false;
23836
23837 /* Signedness can vary between the original type and the debug type. This
23838 can happen for character types in Ada for instance: the character type
23839 used for code generation can be signed, to be compatible with the C one,
23840 but from a debugger point of view, it must be unsigned. */
23841 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
23842 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
23843
23844 if (is_orig_unsigned != is_debug_unsigned)
23845 src = fold_convert (discr_type, src);
23846
23847 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
23848 return false;
23849
23850 dest->pos = is_debug_unsigned;
23851 if (is_debug_unsigned)
23852 dest->v.uval = tree_to_uhwi (src);
23853 else
23854 dest->v.sval = tree_to_shwi (src);
23855
23856 return true;
23857 }
23858
23859 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
23860 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
23861 store NULL_TREE in DISCR_DECL. Otherwise:
23862
23863 - store the discriminant field in STRUCT_TYPE that controls the variant
23864 part to *DISCR_DECL
23865
23866 - put in *DISCR_LISTS_P an array where for each variant, the item
23867 represents the corresponding matching list of discriminant values.
23868
23869 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
23870 the above array.
23871
23872 Note that when the array is allocated (i.e. when the analysis is
23873 successful), it is up to the caller to free the array. */
23874
23875 static void
23876 analyze_variants_discr (tree variant_part_decl,
23877 tree struct_type,
23878 tree *discr_decl,
23879 dw_discr_list_ref **discr_lists_p,
23880 unsigned *discr_lists_length)
23881 {
23882 tree variant_part_type = TREE_TYPE (variant_part_decl);
23883 tree variant;
23884 dw_discr_list_ref *discr_lists;
23885 unsigned i;
23886
23887 /* Compute how many variants there are in this variant part. */
23888 *discr_lists_length = 0;
23889 for (variant = TYPE_FIELDS (variant_part_type);
23890 variant != NULL_TREE;
23891 variant = DECL_CHAIN (variant))
23892 ++*discr_lists_length;
23893
23894 *discr_decl = NULL_TREE;
23895 *discr_lists_p
23896 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
23897 sizeof (**discr_lists_p));
23898 discr_lists = *discr_lists_p;
23899
23900 /* And then analyze all variants to extract discriminant information for all
23901 of them. This analysis is conservative: as soon as we detect something we
23902 do not support, abort everything and pretend we found nothing. */
23903 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
23904 variant != NULL_TREE;
23905 variant = DECL_CHAIN (variant), ++i)
23906 {
23907 tree match_expr = DECL_QUALIFIER (variant);
23908
23909 /* Now, try to analyze the predicate and deduce a discriminant for
23910 it. */
23911 if (match_expr == boolean_true_node)
23912 /* Typically happens for the default variant: it matches all cases that
23913 previous variants rejected. Don't output any matching value for
23914 this one. */
23915 continue;
23916
23917 /* The following loop tries to iterate over each discriminant
23918 possibility: single values or ranges. */
23919 while (match_expr != NULL_TREE)
23920 {
23921 tree next_round_match_expr;
23922 tree candidate_discr = NULL_TREE;
23923 dw_discr_list_ref new_node = NULL;
23924
23925 /* Possibilities are matched one after the other by nested
23926 TRUTH_ORIF_EXPR expressions. Process the current possibility and
23927 continue with the rest at next iteration. */
23928 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
23929 {
23930 next_round_match_expr = TREE_OPERAND (match_expr, 0);
23931 match_expr = TREE_OPERAND (match_expr, 1);
23932 }
23933 else
23934 next_round_match_expr = NULL_TREE;
23935
23936 if (match_expr == boolean_false_node)
23937 /* This sub-expression matches nothing: just wait for the next
23938 one. */
23939 ;
23940
23941 else if (TREE_CODE (match_expr) == EQ_EXPR)
23942 {
23943 /* We are matching: <discr_field> == <integer_cst>
23944 This sub-expression matches a single value. */
23945 tree integer_cst = TREE_OPERAND (match_expr, 1);
23946
23947 candidate_discr
23948 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
23949 struct_type);
23950
23951 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23952 if (!get_discr_value (integer_cst,
23953 &new_node->dw_discr_lower_bound))
23954 goto abort;
23955 new_node->dw_discr_range = false;
23956 }
23957
23958 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
23959 {
23960 /* We are matching:
23961 <discr_field> > <integer_cst>
23962 && <discr_field> < <integer_cst>.
23963 This sub-expression matches the range of values between the
23964 two matched integer constants. Note that comparisons can be
23965 inclusive or exclusive. */
23966 tree candidate_discr_1, candidate_discr_2;
23967 tree lower_cst, upper_cst;
23968 bool lower_cst_included, upper_cst_included;
23969 tree lower_op = TREE_OPERAND (match_expr, 0);
23970 tree upper_op = TREE_OPERAND (match_expr, 1);
23971
23972 /* When the comparison is exclusive, the integer constant is not
23973 the discriminant range bound we are looking for: we will have
23974 to increment or decrement it. */
23975 if (TREE_CODE (lower_op) == GE_EXPR)
23976 lower_cst_included = true;
23977 else if (TREE_CODE (lower_op) == GT_EXPR)
23978 lower_cst_included = false;
23979 else
23980 goto abort;
23981
23982 if (TREE_CODE (upper_op) == LE_EXPR)
23983 upper_cst_included = true;
23984 else if (TREE_CODE (upper_op) == LT_EXPR)
23985 upper_cst_included = false;
23986 else
23987 goto abort;
23988
23989 /* Extract the discriminant from the first operand and check it
23990 is consistant with the same analysis in the second
23991 operand. */
23992 candidate_discr_1
23993 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
23994 struct_type);
23995 candidate_discr_2
23996 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
23997 struct_type);
23998 if (candidate_discr_1 == candidate_discr_2)
23999 candidate_discr = candidate_discr_1;
24000 else
24001 goto abort;
24002
24003 /* Extract bounds from both. */
24004 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24005 lower_cst = TREE_OPERAND (lower_op, 1);
24006 upper_cst = TREE_OPERAND (upper_op, 1);
24007
24008 if (!lower_cst_included)
24009 lower_cst
24010 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24011 build_int_cst (TREE_TYPE (lower_cst), 1));
24012 if (!upper_cst_included)
24013 upper_cst
24014 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24015 build_int_cst (TREE_TYPE (upper_cst), 1));
24016
24017 if (!get_discr_value (lower_cst,
24018 &new_node->dw_discr_lower_bound)
24019 || !get_discr_value (upper_cst,
24020 &new_node->dw_discr_upper_bound))
24021 goto abort;
24022
24023 new_node->dw_discr_range = true;
24024 }
24025
24026 else
24027 /* Unsupported sub-expression: we cannot determine the set of
24028 matching discriminant values. Abort everything. */
24029 goto abort;
24030
24031 /* If the discriminant info is not consistant with what we saw so
24032 far, consider the analysis failed and abort everything. */
24033 if (candidate_discr == NULL_TREE
24034 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24035 goto abort;
24036 else
24037 *discr_decl = candidate_discr;
24038
24039 if (new_node != NULL)
24040 {
24041 new_node->dw_discr_next = discr_lists[i];
24042 discr_lists[i] = new_node;
24043 }
24044 match_expr = next_round_match_expr;
24045 }
24046 }
24047
24048 /* If we reach this point, we could match everything we were interested
24049 in. */
24050 return;
24051
24052 abort:
24053 /* Clean all data structure and return no result. */
24054 free (*discr_lists_p);
24055 *discr_lists_p = NULL;
24056 *discr_decl = NULL_TREE;
24057 }
24058
24059 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24060 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24061 under CONTEXT_DIE.
24062
24063 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24064 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24065 this type, which are record types, represent the available variants and each
24066 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24067 values are inferred from these attributes.
24068
24069 In trees, the offsets for the fields inside these sub-records are relative
24070 to the variant part itself, whereas the corresponding DIEs should have
24071 offset attributes that are relative to the embedding record base address.
24072 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24073 must be an expression that computes the offset of the variant part to
24074 describe in DWARF. */
24075
24076 static void
24077 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24078 dw_die_ref context_die)
24079 {
24080 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24081 tree variant_part_offset = vlr_ctx->variant_part_offset;
24082 struct loc_descr_context ctx = {
24083 vlr_ctx->struct_type, /* context_type */
24084 NULL_TREE, /* base_decl */
24085 NULL, /* dpi */
24086 false, /* placeholder_arg */
24087 false /* placeholder_seen */
24088 };
24089
24090 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24091 NULL_TREE if there is no such field. */
24092 tree discr_decl = NULL_TREE;
24093 dw_discr_list_ref *discr_lists;
24094 unsigned discr_lists_length = 0;
24095 unsigned i;
24096
24097 dw_die_ref dwarf_proc_die = NULL;
24098 dw_die_ref variant_part_die
24099 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24100
24101 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24102
24103 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24104 &discr_decl, &discr_lists, &discr_lists_length);
24105
24106 if (discr_decl != NULL_TREE)
24107 {
24108 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24109
24110 if (discr_die)
24111 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24112 else
24113 /* We have no DIE for the discriminant, so just discard all
24114 discrimimant information in the output. */
24115 discr_decl = NULL_TREE;
24116 }
24117
24118 /* If the offset for this variant part is more complex than a constant,
24119 create a DWARF procedure for it so that we will not have to generate DWARF
24120 expressions for it for each member. */
24121 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24122 && (dwarf_version >= 3 || !dwarf_strict))
24123 {
24124 const tree dwarf_proc_fndecl
24125 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24126 build_function_type (TREE_TYPE (variant_part_offset),
24127 NULL_TREE));
24128 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24129 const dw_loc_descr_ref dwarf_proc_body
24130 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24131
24132 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24133 dwarf_proc_fndecl, context_die);
24134 if (dwarf_proc_die != NULL)
24135 variant_part_offset = dwarf_proc_call;
24136 }
24137
24138 /* Output DIEs for all variants. */
24139 i = 0;
24140 for (tree variant = TYPE_FIELDS (variant_part_type);
24141 variant != NULL_TREE;
24142 variant = DECL_CHAIN (variant), ++i)
24143 {
24144 tree variant_type = TREE_TYPE (variant);
24145 dw_die_ref variant_die;
24146
24147 /* All variants (i.e. members of a variant part) are supposed to be
24148 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24149 under these records. */
24150 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24151
24152 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24153 equate_decl_number_to_die (variant, variant_die);
24154
24155 /* Output discriminant values this variant matches, if any. */
24156 if (discr_decl == NULL || discr_lists[i] == NULL)
24157 /* In the case we have discriminant information at all, this is
24158 probably the default variant: as the standard says, don't
24159 output any discriminant value/list attribute. */
24160 ;
24161 else if (discr_lists[i]->dw_discr_next == NULL
24162 && !discr_lists[i]->dw_discr_range)
24163 /* If there is only one accepted value, don't bother outputting a
24164 list. */
24165 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24166 else
24167 add_discr_list (variant_die, discr_lists[i]);
24168
24169 for (tree member = TYPE_FIELDS (variant_type);
24170 member != NULL_TREE;
24171 member = DECL_CHAIN (member))
24172 {
24173 struct vlr_context vlr_sub_ctx = {
24174 vlr_ctx->struct_type, /* struct_type */
24175 NULL /* variant_part_offset */
24176 };
24177 if (is_variant_part (member))
24178 {
24179 /* All offsets for fields inside variant parts are relative to
24180 the top-level embedding RECORD_TYPE's base address. On the
24181 other hand, offsets in GCC's types are relative to the
24182 nested-most variant part. So we have to sum offsets each time
24183 we recurse. */
24184
24185 vlr_sub_ctx.variant_part_offset
24186 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24187 variant_part_offset, byte_position (member));
24188 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24189 }
24190 else
24191 {
24192 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24193 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24194 }
24195 }
24196 }
24197
24198 free (discr_lists);
24199 }
24200
24201 /* Generate a DIE for a class member. */
24202
24203 static void
24204 gen_member_die (tree type, dw_die_ref context_die)
24205 {
24206 tree member;
24207 tree binfo = TYPE_BINFO (type);
24208
24209 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24210
24211 /* If this is not an incomplete type, output descriptions of each of its
24212 members. Note that as we output the DIEs necessary to represent the
24213 members of this record or union type, we will also be trying to output
24214 DIEs to represent the *types* of those members. However the `type'
24215 function (above) will specifically avoid generating type DIEs for member
24216 types *within* the list of member DIEs for this (containing) type except
24217 for those types (of members) which are explicitly marked as also being
24218 members of this (containing) type themselves. The g++ front- end can
24219 force any given type to be treated as a member of some other (containing)
24220 type by setting the TYPE_CONTEXT of the given (member) type to point to
24221 the TREE node representing the appropriate (containing) type. */
24222
24223 /* First output info about the base classes. */
24224 if (binfo)
24225 {
24226 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24227 int i;
24228 tree base;
24229
24230 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24231 gen_inheritance_die (base,
24232 (accesses ? (*accesses)[i] : access_public_node),
24233 type,
24234 context_die);
24235 }
24236
24237 /* Now output info about the data members and type members. */
24238 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24239 {
24240 struct vlr_context vlr_ctx = { type, NULL_TREE };
24241 bool static_inline_p
24242 = (TREE_STATIC (member)
24243 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24244 != -1));
24245
24246 /* Ignore clones. */
24247 if (DECL_ABSTRACT_ORIGIN (member))
24248 continue;
24249
24250 /* If we thought we were generating minimal debug info for TYPE
24251 and then changed our minds, some of the member declarations
24252 may have already been defined. Don't define them again, but
24253 do put them in the right order. */
24254
24255 if (dw_die_ref child = lookup_decl_die (member))
24256 {
24257 /* Handle inline static data members, which only have in-class
24258 declarations. */
24259 dw_die_ref ref = NULL;
24260 if (child->die_tag == DW_TAG_variable
24261 && child->die_parent == comp_unit_die ())
24262 {
24263 ref = get_AT_ref (child, DW_AT_specification);
24264 /* For C++17 inline static data members followed by redundant
24265 out of class redeclaration, we might get here with
24266 child being the DIE created for the out of class
24267 redeclaration and with its DW_AT_specification being
24268 the DIE created for in-class definition. We want to
24269 reparent the latter, and don't want to create another
24270 DIE with DW_AT_specification in that case, because
24271 we already have one. */
24272 if (ref
24273 && static_inline_p
24274 && ref->die_tag == DW_TAG_variable
24275 && ref->die_parent == comp_unit_die ()
24276 && get_AT (ref, DW_AT_specification) == NULL)
24277 {
24278 child = ref;
24279 ref = NULL;
24280 static_inline_p = false;
24281 }
24282 }
24283
24284 if (child->die_tag == DW_TAG_variable
24285 && child->die_parent == comp_unit_die ()
24286 && ref == NULL)
24287 {
24288 reparent_child (child, context_die);
24289 if (dwarf_version < 5)
24290 child->die_tag = DW_TAG_member;
24291 }
24292 else
24293 splice_child_die (context_die, child);
24294 }
24295
24296 /* Do not generate standard DWARF for variant parts if we are generating
24297 the corresponding GNAT encodings: DIEs generated for both would
24298 conflict in our mappings. */
24299 else if (is_variant_part (member)
24300 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24301 {
24302 vlr_ctx.variant_part_offset = byte_position (member);
24303 gen_variant_part (member, &vlr_ctx, context_die);
24304 }
24305 else
24306 {
24307 vlr_ctx.variant_part_offset = NULL_TREE;
24308 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24309 }
24310
24311 /* For C++ inline static data members emit immediately a DW_TAG_variable
24312 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24313 DW_AT_specification. */
24314 if (static_inline_p)
24315 {
24316 int old_extern = DECL_EXTERNAL (member);
24317 DECL_EXTERNAL (member) = 0;
24318 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24319 DECL_EXTERNAL (member) = old_extern;
24320 }
24321 }
24322 }
24323
24324 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24325 is set, we pretend that the type was never defined, so we only get the
24326 member DIEs needed by later specification DIEs. */
24327
24328 static void
24329 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24330 enum debug_info_usage usage)
24331 {
24332 if (TREE_ASM_WRITTEN (type))
24333 {
24334 /* Fill in the bound of variable-length fields in late dwarf if
24335 still incomplete. */
24336 if (!early_dwarf && variably_modified_type_p (type, NULL))
24337 for (tree member = TYPE_FIELDS (type);
24338 member;
24339 member = DECL_CHAIN (member))
24340 fill_variable_array_bounds (TREE_TYPE (member));
24341 return;
24342 }
24343
24344 dw_die_ref type_die = lookup_type_die (type);
24345 dw_die_ref scope_die = 0;
24346 int nested = 0;
24347 int complete = (TYPE_SIZE (type)
24348 && (! TYPE_STUB_DECL (type)
24349 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24350 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24351 complete = complete && should_emit_struct_debug (type, usage);
24352
24353 if (type_die && ! complete)
24354 return;
24355
24356 if (TYPE_CONTEXT (type) != NULL_TREE
24357 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24358 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24359 nested = 1;
24360
24361 scope_die = scope_die_for (type, context_die);
24362
24363 /* Generate child dies for template paramaters. */
24364 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24365 schedule_generic_params_dies_gen (type);
24366
24367 if (! type_die || (nested && is_cu_die (scope_die)))
24368 /* First occurrence of type or toplevel definition of nested class. */
24369 {
24370 dw_die_ref old_die = type_die;
24371
24372 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24373 ? record_type_tag (type) : DW_TAG_union_type,
24374 scope_die, type);
24375 equate_type_number_to_die (type, type_die);
24376 if (old_die)
24377 add_AT_specification (type_die, old_die);
24378 else
24379 add_name_attribute (type_die, type_tag (type));
24380 }
24381 else
24382 remove_AT (type_die, DW_AT_declaration);
24383
24384 /* If this type has been completed, then give it a byte_size attribute and
24385 then give a list of members. */
24386 if (complete && !ns_decl)
24387 {
24388 /* Prevent infinite recursion in cases where the type of some member of
24389 this type is expressed in terms of this type itself. */
24390 TREE_ASM_WRITTEN (type) = 1;
24391 add_byte_size_attribute (type_die, type);
24392 add_alignment_attribute (type_die, type);
24393 if (TYPE_STUB_DECL (type) != NULL_TREE)
24394 {
24395 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24396 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24397 }
24398
24399 /* If the first reference to this type was as the return type of an
24400 inline function, then it may not have a parent. Fix this now. */
24401 if (type_die->die_parent == NULL)
24402 add_child_die (scope_die, type_die);
24403
24404 push_decl_scope (type);
24405 gen_member_die (type, type_die);
24406 pop_decl_scope ();
24407
24408 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24409 if (TYPE_ARTIFICIAL (type))
24410 add_AT_flag (type_die, DW_AT_artificial, 1);
24411
24412 /* GNU extension: Record what type our vtable lives in. */
24413 if (TYPE_VFIELD (type))
24414 {
24415 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
24416
24417 gen_type_die (vtype, context_die);
24418 add_AT_die_ref (type_die, DW_AT_containing_type,
24419 lookup_type_die (vtype));
24420 }
24421 }
24422 else
24423 {
24424 add_AT_flag (type_die, DW_AT_declaration, 1);
24425
24426 /* We don't need to do this for function-local types. */
24427 if (TYPE_STUB_DECL (type)
24428 && ! decl_function_context (TYPE_STUB_DECL (type)))
24429 vec_safe_push (incomplete_types, type);
24430 }
24431
24432 if (get_AT (type_die, DW_AT_name))
24433 add_pubtype (type, type_die);
24434 }
24435
24436 /* Generate a DIE for a subroutine _type_. */
24437
24438 static void
24439 gen_subroutine_type_die (tree type, dw_die_ref context_die)
24440 {
24441 tree return_type = TREE_TYPE (type);
24442 dw_die_ref subr_die
24443 = new_die (DW_TAG_subroutine_type,
24444 scope_die_for (type, context_die), type);
24445
24446 equate_type_number_to_die (type, subr_die);
24447 add_prototyped_attribute (subr_die, type);
24448 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
24449 context_die);
24450 add_alignment_attribute (subr_die, type);
24451 gen_formal_types_die (type, subr_die);
24452
24453 if (get_AT (subr_die, DW_AT_name))
24454 add_pubtype (type, subr_die);
24455 if ((dwarf_version >= 5 || !dwarf_strict)
24456 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
24457 add_AT_flag (subr_die, DW_AT_reference, 1);
24458 if ((dwarf_version >= 5 || !dwarf_strict)
24459 && lang_hooks.types.type_dwarf_attribute (type,
24460 DW_AT_rvalue_reference) != -1)
24461 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
24462 }
24463
24464 /* Generate a DIE for a type definition. */
24465
24466 static void
24467 gen_typedef_die (tree decl, dw_die_ref context_die)
24468 {
24469 dw_die_ref type_die;
24470 tree type;
24471
24472 if (TREE_ASM_WRITTEN (decl))
24473 {
24474 if (DECL_ORIGINAL_TYPE (decl))
24475 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
24476 return;
24477 }
24478
24479 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
24480 checks in process_scope_var and modified_type_die), this should be called
24481 only for original types. */
24482 gcc_assert (decl_ultimate_origin (decl) == NULL
24483 || decl_ultimate_origin (decl) == decl);
24484
24485 TREE_ASM_WRITTEN (decl) = 1;
24486 type_die = new_die (DW_TAG_typedef, context_die, decl);
24487
24488 add_name_and_src_coords_attributes (type_die, decl);
24489 if (DECL_ORIGINAL_TYPE (decl))
24490 {
24491 type = DECL_ORIGINAL_TYPE (decl);
24492 if (type == error_mark_node)
24493 return;
24494
24495 gcc_assert (type != TREE_TYPE (decl));
24496 equate_type_number_to_die (TREE_TYPE (decl), type_die);
24497 }
24498 else
24499 {
24500 type = TREE_TYPE (decl);
24501 if (type == error_mark_node)
24502 return;
24503
24504 if (is_naming_typedef_decl (TYPE_NAME (type)))
24505 {
24506 /* Here, we are in the case of decl being a typedef naming
24507 an anonymous type, e.g:
24508 typedef struct {...} foo;
24509 In that case TREE_TYPE (decl) is not a typedef variant
24510 type and TYPE_NAME of the anonymous type is set to the
24511 TYPE_DECL of the typedef. This construct is emitted by
24512 the C++ FE.
24513
24514 TYPE is the anonymous struct named by the typedef
24515 DECL. As we need the DW_AT_type attribute of the
24516 DW_TAG_typedef to point to the DIE of TYPE, let's
24517 generate that DIE right away. add_type_attribute
24518 called below will then pick (via lookup_type_die) that
24519 anonymous struct DIE. */
24520 if (!TREE_ASM_WRITTEN (type))
24521 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
24522
24523 /* This is a GNU Extension. We are adding a
24524 DW_AT_linkage_name attribute to the DIE of the
24525 anonymous struct TYPE. The value of that attribute
24526 is the name of the typedef decl naming the anonymous
24527 struct. This greatly eases the work of consumers of
24528 this debug info. */
24529 add_linkage_name_raw (lookup_type_die (type), decl);
24530 }
24531 }
24532
24533 add_type_attribute (type_die, type, decl_quals (decl), false,
24534 context_die);
24535
24536 if (is_naming_typedef_decl (decl))
24537 /* We want that all subsequent calls to lookup_type_die with
24538 TYPE in argument yield the DW_TAG_typedef we have just
24539 created. */
24540 equate_type_number_to_die (type, type_die);
24541
24542 add_alignment_attribute (type_die, TREE_TYPE (decl));
24543
24544 add_accessibility_attribute (type_die, decl);
24545
24546 if (DECL_ABSTRACT_P (decl))
24547 equate_decl_number_to_die (decl, type_die);
24548
24549 if (get_AT (type_die, DW_AT_name))
24550 add_pubtype (decl, type_die);
24551 }
24552
24553 /* Generate a DIE for a struct, class, enum or union type. */
24554
24555 static void
24556 gen_tagged_type_die (tree type,
24557 dw_die_ref context_die,
24558 enum debug_info_usage usage)
24559 {
24560 int need_pop;
24561
24562 if (type == NULL_TREE
24563 || !is_tagged_type (type))
24564 return;
24565
24566 if (TREE_ASM_WRITTEN (type))
24567 need_pop = 0;
24568 /* If this is a nested type whose containing class hasn't been written
24569 out yet, writing it out will cover this one, too. This does not apply
24570 to instantiations of member class templates; they need to be added to
24571 the containing class as they are generated. FIXME: This hurts the
24572 idea of combining type decls from multiple TUs, since we can't predict
24573 what set of template instantiations we'll get. */
24574 else if (TYPE_CONTEXT (type)
24575 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24576 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
24577 {
24578 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
24579
24580 if (TREE_ASM_WRITTEN (type))
24581 return;
24582
24583 /* If that failed, attach ourselves to the stub. */
24584 push_decl_scope (TYPE_CONTEXT (type));
24585 context_die = lookup_type_die (TYPE_CONTEXT (type));
24586 need_pop = 1;
24587 }
24588 else if (TYPE_CONTEXT (type) != NULL_TREE
24589 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
24590 {
24591 /* If this type is local to a function that hasn't been written
24592 out yet, use a NULL context for now; it will be fixed up in
24593 decls_for_scope. */
24594 context_die = lookup_decl_die (TYPE_CONTEXT (type));
24595 /* A declaration DIE doesn't count; nested types need to go in the
24596 specification. */
24597 if (context_die && is_declaration_die (context_die))
24598 context_die = NULL;
24599 need_pop = 0;
24600 }
24601 else
24602 {
24603 context_die = declare_in_namespace (type, context_die);
24604 need_pop = 0;
24605 }
24606
24607 if (TREE_CODE (type) == ENUMERAL_TYPE)
24608 {
24609 /* This might have been written out by the call to
24610 declare_in_namespace. */
24611 if (!TREE_ASM_WRITTEN (type))
24612 gen_enumeration_type_die (type, context_die);
24613 }
24614 else
24615 gen_struct_or_union_type_die (type, context_die, usage);
24616
24617 if (need_pop)
24618 pop_decl_scope ();
24619
24620 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
24621 it up if it is ever completed. gen_*_type_die will set it for us
24622 when appropriate. */
24623 }
24624
24625 /* Generate a type description DIE. */
24626
24627 static void
24628 gen_type_die_with_usage (tree type, dw_die_ref context_die,
24629 enum debug_info_usage usage)
24630 {
24631 struct array_descr_info info;
24632
24633 if (type == NULL_TREE || type == error_mark_node)
24634 return;
24635
24636 if (flag_checking && type)
24637 verify_type (type);
24638
24639 if (TYPE_NAME (type) != NULL_TREE
24640 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
24641 && is_redundant_typedef (TYPE_NAME (type))
24642 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
24643 /* The DECL of this type is a typedef we don't want to emit debug
24644 info for but we want debug info for its underlying typedef.
24645 This can happen for e.g, the injected-class-name of a C++
24646 type. */
24647 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
24648
24649 /* If TYPE is a typedef type variant, let's generate debug info
24650 for the parent typedef which TYPE is a type of. */
24651 if (typedef_variant_p (type))
24652 {
24653 if (TREE_ASM_WRITTEN (type))
24654 return;
24655
24656 tree name = TYPE_NAME (type);
24657 tree origin = decl_ultimate_origin (name);
24658 if (origin != NULL && origin != name)
24659 {
24660 gen_decl_die (origin, NULL, NULL, context_die);
24661 return;
24662 }
24663
24664 /* Prevent broken recursion; we can't hand off to the same type. */
24665 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
24666
24667 /* Give typedefs the right scope. */
24668 context_die = scope_die_for (type, context_die);
24669
24670 TREE_ASM_WRITTEN (type) = 1;
24671
24672 gen_decl_die (name, NULL, NULL, context_die);
24673 return;
24674 }
24675
24676 /* If type is an anonymous tagged type named by a typedef, let's
24677 generate debug info for the typedef. */
24678 if (is_naming_typedef_decl (TYPE_NAME (type)))
24679 {
24680 /* Use the DIE of the containing namespace as the parent DIE of
24681 the type description DIE we want to generate. */
24682 if (DECL_CONTEXT (TYPE_NAME (type))
24683 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
24684 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
24685
24686 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24687 return;
24688 }
24689
24690 if (lang_hooks.types.get_debug_type)
24691 {
24692 tree debug_type = lang_hooks.types.get_debug_type (type);
24693
24694 if (debug_type != NULL_TREE && debug_type != type)
24695 {
24696 gen_type_die_with_usage (debug_type, context_die, usage);
24697 return;
24698 }
24699 }
24700
24701 /* We are going to output a DIE to represent the unqualified version
24702 of this type (i.e. without any const or volatile qualifiers) so
24703 get the main variant (i.e. the unqualified version) of this type
24704 now. (Vectors and arrays are special because the debugging info is in the
24705 cloned type itself. Similarly function/method types can contain extra
24706 ref-qualification). */
24707 if (TREE_CODE (type) == FUNCTION_TYPE
24708 || TREE_CODE (type) == METHOD_TYPE)
24709 {
24710 /* For function/method types, can't use type_main_variant here,
24711 because that can have different ref-qualifiers for C++,
24712 but try to canonicalize. */
24713 tree main = TYPE_MAIN_VARIANT (type);
24714 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
24715 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
24716 && check_base_type (t, main)
24717 && check_lang_type (t, type))
24718 {
24719 type = t;
24720 break;
24721 }
24722 }
24723 else if (TREE_CODE (type) != VECTOR_TYPE
24724 && TREE_CODE (type) != ARRAY_TYPE)
24725 type = type_main_variant (type);
24726
24727 /* If this is an array type with hidden descriptor, handle it first. */
24728 if (!TREE_ASM_WRITTEN (type)
24729 && lang_hooks.types.get_array_descr_info)
24730 {
24731 memset (&info, 0, sizeof (info));
24732 if (lang_hooks.types.get_array_descr_info (type, &info))
24733 {
24734 /* Fortran sometimes emits array types with no dimension. */
24735 gcc_assert (info.ndimensions >= 0
24736 && (info.ndimensions
24737 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
24738 gen_descr_array_type_die (type, &info, context_die);
24739 TREE_ASM_WRITTEN (type) = 1;
24740 return;
24741 }
24742 }
24743
24744 if (TREE_ASM_WRITTEN (type))
24745 {
24746 /* Variable-length types may be incomplete even if
24747 TREE_ASM_WRITTEN. For such types, fall through to
24748 gen_array_type_die() and possibly fill in
24749 DW_AT_{upper,lower}_bound attributes. */
24750 if ((TREE_CODE (type) != ARRAY_TYPE
24751 && TREE_CODE (type) != RECORD_TYPE
24752 && TREE_CODE (type) != UNION_TYPE
24753 && TREE_CODE (type) != QUAL_UNION_TYPE)
24754 || !variably_modified_type_p (type, NULL))
24755 return;
24756 }
24757
24758 switch (TREE_CODE (type))
24759 {
24760 case ERROR_MARK:
24761 break;
24762
24763 case POINTER_TYPE:
24764 case REFERENCE_TYPE:
24765 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
24766 ensures that the gen_type_die recursion will terminate even if the
24767 type is recursive. Recursive types are possible in Ada. */
24768 /* ??? We could perhaps do this for all types before the switch
24769 statement. */
24770 TREE_ASM_WRITTEN (type) = 1;
24771
24772 /* For these types, all that is required is that we output a DIE (or a
24773 set of DIEs) to represent the "basis" type. */
24774 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24775 DINFO_USAGE_IND_USE);
24776 break;
24777
24778 case OFFSET_TYPE:
24779 /* This code is used for C++ pointer-to-data-member types.
24780 Output a description of the relevant class type. */
24781 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
24782 DINFO_USAGE_IND_USE);
24783
24784 /* Output a description of the type of the object pointed to. */
24785 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24786 DINFO_USAGE_IND_USE);
24787
24788 /* Now output a DIE to represent this pointer-to-data-member type
24789 itself. */
24790 gen_ptr_to_mbr_type_die (type, context_die);
24791 break;
24792
24793 case FUNCTION_TYPE:
24794 /* Force out return type (in case it wasn't forced out already). */
24795 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24796 DINFO_USAGE_DIR_USE);
24797 gen_subroutine_type_die (type, context_die);
24798 break;
24799
24800 case METHOD_TYPE:
24801 /* Force out return type (in case it wasn't forced out already). */
24802 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24803 DINFO_USAGE_DIR_USE);
24804 gen_subroutine_type_die (type, context_die);
24805 break;
24806
24807 case ARRAY_TYPE:
24808 case VECTOR_TYPE:
24809 gen_array_type_die (type, context_die);
24810 break;
24811
24812 case ENUMERAL_TYPE:
24813 case RECORD_TYPE:
24814 case UNION_TYPE:
24815 case QUAL_UNION_TYPE:
24816 gen_tagged_type_die (type, context_die, usage);
24817 return;
24818
24819 case VOID_TYPE:
24820 case INTEGER_TYPE:
24821 case REAL_TYPE:
24822 case FIXED_POINT_TYPE:
24823 case COMPLEX_TYPE:
24824 case BOOLEAN_TYPE:
24825 case POINTER_BOUNDS_TYPE:
24826 /* No DIEs needed for fundamental types. */
24827 break;
24828
24829 case NULLPTR_TYPE:
24830 case LANG_TYPE:
24831 /* Just use DW_TAG_unspecified_type. */
24832 {
24833 dw_die_ref type_die = lookup_type_die (type);
24834 if (type_die == NULL)
24835 {
24836 tree name = TYPE_IDENTIFIER (type);
24837 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
24838 type);
24839 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
24840 equate_type_number_to_die (type, type_die);
24841 }
24842 }
24843 break;
24844
24845 default:
24846 if (is_cxx_auto (type))
24847 {
24848 tree name = TYPE_IDENTIFIER (type);
24849 dw_die_ref *die = (name == get_identifier ("auto")
24850 ? &auto_die : &decltype_auto_die);
24851 if (!*die)
24852 {
24853 *die = new_die (DW_TAG_unspecified_type,
24854 comp_unit_die (), NULL_TREE);
24855 add_name_attribute (*die, IDENTIFIER_POINTER (name));
24856 }
24857 equate_type_number_to_die (type, *die);
24858 break;
24859 }
24860 gcc_unreachable ();
24861 }
24862
24863 TREE_ASM_WRITTEN (type) = 1;
24864 }
24865
24866 static void
24867 gen_type_die (tree type, dw_die_ref context_die)
24868 {
24869 if (type != error_mark_node)
24870 {
24871 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
24872 if (flag_checking)
24873 {
24874 dw_die_ref die = lookup_type_die (type);
24875 if (die)
24876 check_die (die);
24877 }
24878 }
24879 }
24880
24881 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
24882 things which are local to the given block. */
24883
24884 static void
24885 gen_block_die (tree stmt, dw_die_ref context_die)
24886 {
24887 int must_output_die = 0;
24888 bool inlined_func;
24889
24890 /* Ignore blocks that are NULL. */
24891 if (stmt == NULL_TREE)
24892 return;
24893
24894 inlined_func = inlined_function_outer_scope_p (stmt);
24895
24896 /* If the block is one fragment of a non-contiguous block, do not
24897 process the variables, since they will have been done by the
24898 origin block. Do process subblocks. */
24899 if (BLOCK_FRAGMENT_ORIGIN (stmt))
24900 {
24901 tree sub;
24902
24903 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
24904 gen_block_die (sub, context_die);
24905
24906 return;
24907 }
24908
24909 /* Determine if we need to output any Dwarf DIEs at all to represent this
24910 block. */
24911 if (inlined_func)
24912 /* The outer scopes for inlinings *must* always be represented. We
24913 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
24914 must_output_die = 1;
24915 else
24916 {
24917 /* Determine if this block directly contains any "significant"
24918 local declarations which we will need to output DIEs for. */
24919 if (debug_info_level > DINFO_LEVEL_TERSE)
24920 /* We are not in terse mode so *any* local declaration counts
24921 as being a "significant" one. */
24922 must_output_die = ((BLOCK_VARS (stmt) != NULL
24923 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
24924 && (TREE_USED (stmt)
24925 || TREE_ASM_WRITTEN (stmt)
24926 || BLOCK_ABSTRACT (stmt)));
24927 else if ((TREE_USED (stmt)
24928 || TREE_ASM_WRITTEN (stmt)
24929 || BLOCK_ABSTRACT (stmt))
24930 && !dwarf2out_ignore_block (stmt))
24931 must_output_die = 1;
24932 }
24933
24934 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
24935 DIE for any block which contains no significant local declarations at
24936 all. Rather, in such cases we just call `decls_for_scope' so that any
24937 needed Dwarf info for any sub-blocks will get properly generated. Note
24938 that in terse mode, our definition of what constitutes a "significant"
24939 local declaration gets restricted to include only inlined function
24940 instances and local (nested) function definitions. */
24941 if (must_output_die)
24942 {
24943 if (inlined_func)
24944 {
24945 /* If STMT block is abstract, that means we have been called
24946 indirectly from dwarf2out_abstract_function.
24947 That function rightfully marks the descendent blocks (of
24948 the abstract function it is dealing with) as being abstract,
24949 precisely to prevent us from emitting any
24950 DW_TAG_inlined_subroutine DIE as a descendent
24951 of an abstract function instance. So in that case, we should
24952 not call gen_inlined_subroutine_die.
24953
24954 Later though, when cgraph asks dwarf2out to emit info
24955 for the concrete instance of the function decl into which
24956 the concrete instance of STMT got inlined, the later will lead
24957 to the generation of a DW_TAG_inlined_subroutine DIE. */
24958 if (! BLOCK_ABSTRACT (stmt))
24959 gen_inlined_subroutine_die (stmt, context_die);
24960 }
24961 else
24962 gen_lexical_block_die (stmt, context_die);
24963 }
24964 else
24965 decls_for_scope (stmt, context_die);
24966 }
24967
24968 /* Process variable DECL (or variable with origin ORIGIN) within
24969 block STMT and add it to CONTEXT_DIE. */
24970 static void
24971 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
24972 {
24973 dw_die_ref die;
24974 tree decl_or_origin = decl ? decl : origin;
24975
24976 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
24977 die = lookup_decl_die (decl_or_origin);
24978 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
24979 {
24980 if (TYPE_DECL_IS_STUB (decl_or_origin))
24981 die = lookup_type_die (TREE_TYPE (decl_or_origin));
24982 else
24983 die = lookup_decl_die (decl_or_origin);
24984 /* Avoid re-creating the DIE late if it was optimized as unused early. */
24985 if (! die && ! early_dwarf)
24986 return;
24987 }
24988 else
24989 die = NULL;
24990
24991 /* Avoid creating DIEs for local typedefs and concrete static variables that
24992 will only be pruned later. */
24993 if ((origin || decl_ultimate_origin (decl))
24994 && (TREE_CODE (decl_or_origin) == TYPE_DECL
24995 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
24996 {
24997 origin = decl_ultimate_origin (decl_or_origin);
24998 if (decl && VAR_P (decl) && die != NULL)
24999 {
25000 die = lookup_decl_die (origin);
25001 if (die != NULL)
25002 equate_decl_number_to_die (decl, die);
25003 }
25004 return;
25005 }
25006
25007 if (die != NULL && die->die_parent == NULL)
25008 add_child_die (context_die, die);
25009 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25010 {
25011 if (early_dwarf)
25012 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25013 stmt, context_die);
25014 }
25015 else
25016 {
25017 if (decl && DECL_P (decl))
25018 {
25019 die = lookup_decl_die (decl);
25020
25021 /* Early created DIEs do not have a parent as the decls refer
25022 to the function as DECL_CONTEXT rather than the BLOCK. */
25023 if (die && die->die_parent == NULL)
25024 {
25025 gcc_assert (in_lto_p);
25026 add_child_die (context_die, die);
25027 }
25028 }
25029
25030 gen_decl_die (decl, origin, NULL, context_die);
25031 }
25032 }
25033
25034 /* Generate all of the decls declared within a given scope and (recursively)
25035 all of its sub-blocks. */
25036
25037 static void
25038 decls_for_scope (tree stmt, dw_die_ref context_die)
25039 {
25040 tree decl;
25041 unsigned int i;
25042 tree subblocks;
25043
25044 /* Ignore NULL blocks. */
25045 if (stmt == NULL_TREE)
25046 return;
25047
25048 /* Output the DIEs to represent all of the data objects and typedefs
25049 declared directly within this block but not within any nested
25050 sub-blocks. Also, nested function and tag DIEs have been
25051 generated with a parent of NULL; fix that up now. We don't
25052 have to do this if we're at -g1. */
25053 if (debug_info_level > DINFO_LEVEL_TERSE)
25054 {
25055 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25056 process_scope_var (stmt, decl, NULL_TREE, context_die);
25057 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25058 origin - avoid doing this twice as we have no good way to see
25059 if we've done it once already. */
25060 if (! early_dwarf)
25061 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25062 {
25063 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25064 if (decl == current_function_decl)
25065 /* Ignore declarations of the current function, while they
25066 are declarations, gen_subprogram_die would treat them
25067 as definitions again, because they are equal to
25068 current_function_decl and endlessly recurse. */;
25069 else if (TREE_CODE (decl) == FUNCTION_DECL)
25070 process_scope_var (stmt, decl, NULL_TREE, context_die);
25071 else
25072 process_scope_var (stmt, NULL_TREE, decl, context_die);
25073 }
25074 }
25075
25076 /* Even if we're at -g1, we need to process the subblocks in order to get
25077 inlined call information. */
25078
25079 /* Output the DIEs to represent all sub-blocks (and the items declared
25080 therein) of this block. */
25081 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25082 subblocks != NULL;
25083 subblocks = BLOCK_CHAIN (subblocks))
25084 gen_block_die (subblocks, context_die);
25085 }
25086
25087 /* Is this a typedef we can avoid emitting? */
25088
25089 bool
25090 is_redundant_typedef (const_tree decl)
25091 {
25092 if (TYPE_DECL_IS_STUB (decl))
25093 return true;
25094
25095 if (DECL_ARTIFICIAL (decl)
25096 && DECL_CONTEXT (decl)
25097 && is_tagged_type (DECL_CONTEXT (decl))
25098 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25099 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25100 /* Also ignore the artificial member typedef for the class name. */
25101 return true;
25102
25103 return false;
25104 }
25105
25106 /* Return TRUE if TYPE is a typedef that names a type for linkage
25107 purposes. This kind of typedefs is produced by the C++ FE for
25108 constructs like:
25109
25110 typedef struct {...} foo;
25111
25112 In that case, there is no typedef variant type produced for foo.
25113 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25114 struct type. */
25115
25116 static bool
25117 is_naming_typedef_decl (const_tree decl)
25118 {
25119 if (decl == NULL_TREE
25120 || TREE_CODE (decl) != TYPE_DECL
25121 || DECL_NAMELESS (decl)
25122 || !is_tagged_type (TREE_TYPE (decl))
25123 || DECL_IS_BUILTIN (decl)
25124 || is_redundant_typedef (decl)
25125 /* It looks like Ada produces TYPE_DECLs that are very similar
25126 to C++ naming typedefs but that have different
25127 semantics. Let's be specific to c++ for now. */
25128 || !is_cxx (decl))
25129 return FALSE;
25130
25131 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25132 && TYPE_NAME (TREE_TYPE (decl)) == decl
25133 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25134 != TYPE_NAME (TREE_TYPE (decl))));
25135 }
25136
25137 /* Looks up the DIE for a context. */
25138
25139 static inline dw_die_ref
25140 lookup_context_die (tree context)
25141 {
25142 if (context)
25143 {
25144 /* Find die that represents this context. */
25145 if (TYPE_P (context))
25146 {
25147 context = TYPE_MAIN_VARIANT (context);
25148 dw_die_ref ctx = lookup_type_die (context);
25149 if (!ctx)
25150 return NULL;
25151 return strip_naming_typedef (context, ctx);
25152 }
25153 else
25154 return lookup_decl_die (context);
25155 }
25156 return comp_unit_die ();
25157 }
25158
25159 /* Returns the DIE for a context. */
25160
25161 static inline dw_die_ref
25162 get_context_die (tree context)
25163 {
25164 if (context)
25165 {
25166 /* Find die that represents this context. */
25167 if (TYPE_P (context))
25168 {
25169 context = TYPE_MAIN_VARIANT (context);
25170 return strip_naming_typedef (context, force_type_die (context));
25171 }
25172 else
25173 return force_decl_die (context);
25174 }
25175 return comp_unit_die ();
25176 }
25177
25178 /* Returns the DIE for decl. A DIE will always be returned. */
25179
25180 static dw_die_ref
25181 force_decl_die (tree decl)
25182 {
25183 dw_die_ref decl_die;
25184 unsigned saved_external_flag;
25185 tree save_fn = NULL_TREE;
25186 decl_die = lookup_decl_die (decl);
25187 if (!decl_die)
25188 {
25189 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25190
25191 decl_die = lookup_decl_die (decl);
25192 if (decl_die)
25193 return decl_die;
25194
25195 switch (TREE_CODE (decl))
25196 {
25197 case FUNCTION_DECL:
25198 /* Clear current_function_decl, so that gen_subprogram_die thinks
25199 that this is a declaration. At this point, we just want to force
25200 declaration die. */
25201 save_fn = current_function_decl;
25202 current_function_decl = NULL_TREE;
25203 gen_subprogram_die (decl, context_die);
25204 current_function_decl = save_fn;
25205 break;
25206
25207 case VAR_DECL:
25208 /* Set external flag to force declaration die. Restore it after
25209 gen_decl_die() call. */
25210 saved_external_flag = DECL_EXTERNAL (decl);
25211 DECL_EXTERNAL (decl) = 1;
25212 gen_decl_die (decl, NULL, NULL, context_die);
25213 DECL_EXTERNAL (decl) = saved_external_flag;
25214 break;
25215
25216 case NAMESPACE_DECL:
25217 if (dwarf_version >= 3 || !dwarf_strict)
25218 dwarf2out_decl (decl);
25219 else
25220 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25221 decl_die = comp_unit_die ();
25222 break;
25223
25224 case TRANSLATION_UNIT_DECL:
25225 decl_die = comp_unit_die ();
25226 break;
25227
25228 default:
25229 gcc_unreachable ();
25230 }
25231
25232 /* We should be able to find the DIE now. */
25233 if (!decl_die)
25234 decl_die = lookup_decl_die (decl);
25235 gcc_assert (decl_die);
25236 }
25237
25238 return decl_die;
25239 }
25240
25241 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25242 always returned. */
25243
25244 static dw_die_ref
25245 force_type_die (tree type)
25246 {
25247 dw_die_ref type_die;
25248
25249 type_die = lookup_type_die (type);
25250 if (!type_die)
25251 {
25252 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25253
25254 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25255 false, context_die);
25256 gcc_assert (type_die);
25257 }
25258 return type_die;
25259 }
25260
25261 /* Force out any required namespaces to be able to output DECL,
25262 and return the new context_die for it, if it's changed. */
25263
25264 static dw_die_ref
25265 setup_namespace_context (tree thing, dw_die_ref context_die)
25266 {
25267 tree context = (DECL_P (thing)
25268 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25269 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25270 /* Force out the namespace. */
25271 context_die = force_decl_die (context);
25272
25273 return context_die;
25274 }
25275
25276 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25277 type) within its namespace, if appropriate.
25278
25279 For compatibility with older debuggers, namespace DIEs only contain
25280 declarations; all definitions are emitted at CU scope, with
25281 DW_AT_specification pointing to the declaration (like with class
25282 members). */
25283
25284 static dw_die_ref
25285 declare_in_namespace (tree thing, dw_die_ref context_die)
25286 {
25287 dw_die_ref ns_context;
25288
25289 if (debug_info_level <= DINFO_LEVEL_TERSE)
25290 return context_die;
25291
25292 /* External declarations in the local scope only need to be emitted
25293 once, not once in the namespace and once in the scope.
25294
25295 This avoids declaring the `extern' below in the
25296 namespace DIE as well as in the innermost scope:
25297
25298 namespace S
25299 {
25300 int i=5;
25301 int foo()
25302 {
25303 int i=8;
25304 extern int i;
25305 return i;
25306 }
25307 }
25308 */
25309 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25310 return context_die;
25311
25312 /* If this decl is from an inlined function, then don't try to emit it in its
25313 namespace, as we will get confused. It would have already been emitted
25314 when the abstract instance of the inline function was emitted anyways. */
25315 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25316 return context_die;
25317
25318 ns_context = setup_namespace_context (thing, context_die);
25319
25320 if (ns_context != context_die)
25321 {
25322 if (is_fortran ())
25323 return ns_context;
25324 if (DECL_P (thing))
25325 gen_decl_die (thing, NULL, NULL, ns_context);
25326 else
25327 gen_type_die (thing, ns_context);
25328 }
25329 return context_die;
25330 }
25331
25332 /* Generate a DIE for a namespace or namespace alias. */
25333
25334 static void
25335 gen_namespace_die (tree decl, dw_die_ref context_die)
25336 {
25337 dw_die_ref namespace_die;
25338
25339 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25340 they are an alias of. */
25341 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25342 {
25343 /* Output a real namespace or module. */
25344 context_die = setup_namespace_context (decl, comp_unit_die ());
25345 namespace_die = new_die (is_fortran ()
25346 ? DW_TAG_module : DW_TAG_namespace,
25347 context_die, decl);
25348 /* For Fortran modules defined in different CU don't add src coords. */
25349 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25350 {
25351 const char *name = dwarf2_name (decl, 0);
25352 if (name)
25353 add_name_attribute (namespace_die, name);
25354 }
25355 else
25356 add_name_and_src_coords_attributes (namespace_die, decl);
25357 if (DECL_EXTERNAL (decl))
25358 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25359 equate_decl_number_to_die (decl, namespace_die);
25360 }
25361 else
25362 {
25363 /* Output a namespace alias. */
25364
25365 /* Force out the namespace we are an alias of, if necessary. */
25366 dw_die_ref origin_die
25367 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25368
25369 if (DECL_FILE_SCOPE_P (decl)
25370 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25371 context_die = setup_namespace_context (decl, comp_unit_die ());
25372 /* Now create the namespace alias DIE. */
25373 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25374 add_name_and_src_coords_attributes (namespace_die, decl);
25375 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25376 equate_decl_number_to_die (decl, namespace_die);
25377 }
25378 if ((dwarf_version >= 5 || !dwarf_strict)
25379 && lang_hooks.decls.decl_dwarf_attribute (decl,
25380 DW_AT_export_symbols) == 1)
25381 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
25382
25383 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25384 if (want_pubnames ())
25385 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25386 }
25387
25388 /* Generate Dwarf debug information for a decl described by DECL.
25389 The return value is currently only meaningful for PARM_DECLs,
25390 for all other decls it returns NULL.
25391
25392 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25393 It can be NULL otherwise. */
25394
25395 static dw_die_ref
25396 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25397 dw_die_ref context_die)
25398 {
25399 tree decl_or_origin = decl ? decl : origin;
25400 tree class_origin = NULL, ultimate_origin;
25401
25402 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25403 return NULL;
25404
25405 /* Ignore pointer bounds decls. */
25406 if (DECL_P (decl_or_origin)
25407 && TREE_TYPE (decl_or_origin)
25408 && POINTER_BOUNDS_P (decl_or_origin))
25409 return NULL;
25410
25411 switch (TREE_CODE (decl_or_origin))
25412 {
25413 case ERROR_MARK:
25414 break;
25415
25416 case CONST_DECL:
25417 if (!is_fortran () && !is_ada ())
25418 {
25419 /* The individual enumerators of an enum type get output when we output
25420 the Dwarf representation of the relevant enum type itself. */
25421 break;
25422 }
25423
25424 /* Emit its type. */
25425 gen_type_die (TREE_TYPE (decl), context_die);
25426
25427 /* And its containing namespace. */
25428 context_die = declare_in_namespace (decl, context_die);
25429
25430 gen_const_die (decl, context_die);
25431 break;
25432
25433 case FUNCTION_DECL:
25434 #if 0
25435 /* FIXME */
25436 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
25437 on local redeclarations of global functions. That seems broken. */
25438 if (current_function_decl != decl)
25439 /* This is only a declaration. */;
25440 #endif
25441
25442 /* We should have abstract copies already and should not generate
25443 stray type DIEs in late LTO dumping. */
25444 if (! early_dwarf)
25445 ;
25446
25447 /* If we're emitting a clone, emit info for the abstract instance. */
25448 else if (origin || DECL_ORIGIN (decl) != decl)
25449 dwarf2out_abstract_function (origin
25450 ? DECL_ORIGIN (origin)
25451 : DECL_ABSTRACT_ORIGIN (decl));
25452
25453 /* If we're emitting a possibly inlined function emit it as
25454 abstract instance. */
25455 else if (cgraph_function_possibly_inlined_p (decl)
25456 && ! DECL_ABSTRACT_P (decl)
25457 && ! class_or_namespace_scope_p (context_die)
25458 /* dwarf2out_abstract_function won't emit a die if this is just
25459 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
25460 that case, because that works only if we have a die. */
25461 && DECL_INITIAL (decl) != NULL_TREE)
25462 dwarf2out_abstract_function (decl);
25463
25464 /* Otherwise we're emitting the primary DIE for this decl. */
25465 else if (debug_info_level > DINFO_LEVEL_TERSE)
25466 {
25467 /* Before we describe the FUNCTION_DECL itself, make sure that we
25468 have its containing type. */
25469 if (!origin)
25470 origin = decl_class_context (decl);
25471 if (origin != NULL_TREE)
25472 gen_type_die (origin, context_die);
25473
25474 /* And its return type. */
25475 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
25476
25477 /* And its virtual context. */
25478 if (DECL_VINDEX (decl) != NULL_TREE)
25479 gen_type_die (DECL_CONTEXT (decl), context_die);
25480
25481 /* Make sure we have a member DIE for decl. */
25482 if (origin != NULL_TREE)
25483 gen_type_die_for_member (origin, decl, context_die);
25484
25485 /* And its containing namespace. */
25486 context_die = declare_in_namespace (decl, context_die);
25487 }
25488
25489 /* Now output a DIE to represent the function itself. */
25490 if (decl)
25491 gen_subprogram_die (decl, context_die);
25492 break;
25493
25494 case TYPE_DECL:
25495 /* If we are in terse mode, don't generate any DIEs to represent any
25496 actual typedefs. */
25497 if (debug_info_level <= DINFO_LEVEL_TERSE)
25498 break;
25499
25500 /* In the special case of a TYPE_DECL node representing the declaration
25501 of some type tag, if the given TYPE_DECL is marked as having been
25502 instantiated from some other (original) TYPE_DECL node (e.g. one which
25503 was generated within the original definition of an inline function) we
25504 used to generate a special (abbreviated) DW_TAG_structure_type,
25505 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
25506 should be actually referencing those DIEs, as variable DIEs with that
25507 type would be emitted already in the abstract origin, so it was always
25508 removed during unused type prunning. Don't add anything in this
25509 case. */
25510 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
25511 break;
25512
25513 if (is_redundant_typedef (decl))
25514 gen_type_die (TREE_TYPE (decl), context_die);
25515 else
25516 /* Output a DIE to represent the typedef itself. */
25517 gen_typedef_die (decl, context_die);
25518 break;
25519
25520 case LABEL_DECL:
25521 if (debug_info_level >= DINFO_LEVEL_NORMAL)
25522 gen_label_die (decl, context_die);
25523 break;
25524
25525 case VAR_DECL:
25526 case RESULT_DECL:
25527 /* If we are in terse mode, don't generate any DIEs to represent any
25528 variable declarations or definitions. */
25529 if (debug_info_level <= DINFO_LEVEL_TERSE)
25530 break;
25531
25532 /* Avoid generating stray type DIEs during late dwarf dumping.
25533 All types have been dumped early. */
25534 if (early_dwarf
25535 /* ??? But in LTRANS we cannot annotate early created variably
25536 modified type DIEs without copying them and adjusting all
25537 references to them. Dump them again as happens for inlining
25538 which copies both the decl and the types. */
25539 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25540 in VLA bound information for example. */
25541 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25542 current_function_decl)))
25543 {
25544 /* Output any DIEs that are needed to specify the type of this data
25545 object. */
25546 if (decl_by_reference_p (decl_or_origin))
25547 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25548 else
25549 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25550 }
25551
25552 if (early_dwarf)
25553 {
25554 /* And its containing type. */
25555 class_origin = decl_class_context (decl_or_origin);
25556 if (class_origin != NULL_TREE)
25557 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
25558
25559 /* And its containing namespace. */
25560 context_die = declare_in_namespace (decl_or_origin, context_die);
25561 }
25562
25563 /* Now output the DIE to represent the data object itself. This gets
25564 complicated because of the possibility that the VAR_DECL really
25565 represents an inlined instance of a formal parameter for an inline
25566 function. */
25567 ultimate_origin = decl_ultimate_origin (decl_or_origin);
25568 if (ultimate_origin != NULL_TREE
25569 && TREE_CODE (ultimate_origin) == PARM_DECL)
25570 gen_formal_parameter_die (decl, origin,
25571 true /* Emit name attribute. */,
25572 context_die);
25573 else
25574 gen_variable_die (decl, origin, context_die);
25575 break;
25576
25577 case FIELD_DECL:
25578 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
25579 /* Ignore the nameless fields that are used to skip bits but handle C++
25580 anonymous unions and structs. */
25581 if (DECL_NAME (decl) != NULL_TREE
25582 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
25583 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
25584 {
25585 gen_type_die (member_declared_type (decl), context_die);
25586 gen_field_die (decl, ctx, context_die);
25587 }
25588 break;
25589
25590 case PARM_DECL:
25591 /* Avoid generating stray type DIEs during late dwarf dumping.
25592 All types have been dumped early. */
25593 if (early_dwarf
25594 /* ??? But in LTRANS we cannot annotate early created variably
25595 modified type DIEs without copying them and adjusting all
25596 references to them. Dump them again as happens for inlining
25597 which copies both the decl and the types. */
25598 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25599 in VLA bound information for example. */
25600 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25601 current_function_decl)))
25602 {
25603 if (DECL_BY_REFERENCE (decl_or_origin))
25604 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25605 else
25606 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25607 }
25608 return gen_formal_parameter_die (decl, origin,
25609 true /* Emit name attribute. */,
25610 context_die);
25611
25612 case NAMESPACE_DECL:
25613 if (dwarf_version >= 3 || !dwarf_strict)
25614 gen_namespace_die (decl, context_die);
25615 break;
25616
25617 case IMPORTED_DECL:
25618 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
25619 DECL_CONTEXT (decl), context_die);
25620 break;
25621
25622 case NAMELIST_DECL:
25623 gen_namelist_decl (DECL_NAME (decl), context_die,
25624 NAMELIST_DECL_ASSOCIATED_DECL (decl));
25625 break;
25626
25627 default:
25628 /* Probably some frontend-internal decl. Assume we don't care. */
25629 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
25630 break;
25631 }
25632
25633 return NULL;
25634 }
25635 \f
25636 /* Output initial debug information for global DECL. Called at the
25637 end of the parsing process.
25638
25639 This is the initial debug generation process. As such, the DIEs
25640 generated may be incomplete. A later debug generation pass
25641 (dwarf2out_late_global_decl) will augment the information generated
25642 in this pass (e.g., with complete location info). */
25643
25644 static void
25645 dwarf2out_early_global_decl (tree decl)
25646 {
25647 set_early_dwarf s;
25648
25649 /* gen_decl_die() will set DECL_ABSTRACT because
25650 cgraph_function_possibly_inlined_p() returns true. This is in
25651 turn will cause DW_AT_inline attributes to be set.
25652
25653 This happens because at early dwarf generation, there is no
25654 cgraph information, causing cgraph_function_possibly_inlined_p()
25655 to return true. Trick cgraph_function_possibly_inlined_p()
25656 while we generate dwarf early. */
25657 bool save = symtab->global_info_ready;
25658 symtab->global_info_ready = true;
25659
25660 /* We don't handle TYPE_DECLs. If required, they'll be reached via
25661 other DECLs and they can point to template types or other things
25662 that dwarf2out can't handle when done via dwarf2out_decl. */
25663 if (TREE_CODE (decl) != TYPE_DECL
25664 && TREE_CODE (decl) != PARM_DECL)
25665 {
25666 if (TREE_CODE (decl) == FUNCTION_DECL)
25667 {
25668 tree save_fndecl = current_function_decl;
25669
25670 /* For nested functions, make sure we have DIEs for the parents first
25671 so that all nested DIEs are generated at the proper scope in the
25672 first shot. */
25673 tree context = decl_function_context (decl);
25674 if (context != NULL)
25675 {
25676 dw_die_ref context_die = lookup_decl_die (context);
25677 current_function_decl = context;
25678
25679 /* Avoid emitting DIEs multiple times, but still process CONTEXT
25680 enough so that it lands in its own context. This avoids type
25681 pruning issues later on. */
25682 if (context_die == NULL || is_declaration_die (context_die))
25683 dwarf2out_decl (context);
25684 }
25685
25686 /* Emit an abstract origin of a function first. This happens
25687 with C++ constructor clones for example and makes
25688 dwarf2out_abstract_function happy which requires the early
25689 DIE of the abstract instance to be present. */
25690 tree origin = DECL_ABSTRACT_ORIGIN (decl);
25691 dw_die_ref origin_die;
25692 if (origin != NULL
25693 /* Do not emit the DIE multiple times but make sure to
25694 process it fully here in case we just saw a declaration. */
25695 && ((origin_die = lookup_decl_die (origin)) == NULL
25696 || is_declaration_die (origin_die)))
25697 {
25698 current_function_decl = origin;
25699 dwarf2out_decl (origin);
25700 }
25701
25702 /* Emit the DIE for decl but avoid doing that multiple times. */
25703 dw_die_ref old_die;
25704 if ((old_die = lookup_decl_die (decl)) == NULL
25705 || is_declaration_die (old_die))
25706 {
25707 current_function_decl = decl;
25708 dwarf2out_decl (decl);
25709 }
25710
25711 current_function_decl = save_fndecl;
25712 }
25713 else
25714 dwarf2out_decl (decl);
25715 }
25716 symtab->global_info_ready = save;
25717 }
25718
25719 /* Output debug information for global decl DECL. Called from
25720 toplev.c after compilation proper has finished. */
25721
25722 static void
25723 dwarf2out_late_global_decl (tree decl)
25724 {
25725 /* Fill-in any location information we were unable to determine
25726 on the first pass. */
25727 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
25728 {
25729 dw_die_ref die = lookup_decl_die (decl);
25730
25731 /* We may have to generate early debug late for LTO in case debug
25732 was not enabled at compile-time or the target doesn't support
25733 the LTO early debug scheme. */
25734 if (! die && in_lto_p)
25735 {
25736 dwarf2out_decl (decl);
25737 die = lookup_decl_die (decl);
25738 }
25739
25740 if (die)
25741 {
25742 /* We get called via the symtab code invoking late_global_decl
25743 for symbols that are optimized out. Do not add locations
25744 for those, except if they have a DECL_VALUE_EXPR, in which case
25745 they are relevant for debuggers. */
25746 varpool_node *node = varpool_node::get (decl);
25747 if ((! node || ! node->definition) && ! DECL_HAS_VALUE_EXPR_P (decl))
25748 tree_add_const_value_attribute_for_decl (die, decl);
25749 else
25750 add_location_or_const_value_attribute (die, decl, false);
25751 }
25752 }
25753 }
25754
25755 /* Output debug information for type decl DECL. Called from toplev.c
25756 and from language front ends (to record built-in types). */
25757 static void
25758 dwarf2out_type_decl (tree decl, int local)
25759 {
25760 if (!local)
25761 {
25762 set_early_dwarf s;
25763 dwarf2out_decl (decl);
25764 }
25765 }
25766
25767 /* Output debug information for imported module or decl DECL.
25768 NAME is non-NULL name in the lexical block if the decl has been renamed.
25769 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
25770 that DECL belongs to.
25771 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
25772 static void
25773 dwarf2out_imported_module_or_decl_1 (tree decl,
25774 tree name,
25775 tree lexical_block,
25776 dw_die_ref lexical_block_die)
25777 {
25778 expanded_location xloc;
25779 dw_die_ref imported_die = NULL;
25780 dw_die_ref at_import_die;
25781
25782 if (TREE_CODE (decl) == IMPORTED_DECL)
25783 {
25784 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
25785 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
25786 gcc_assert (decl);
25787 }
25788 else
25789 xloc = expand_location (input_location);
25790
25791 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
25792 {
25793 at_import_die = force_type_die (TREE_TYPE (decl));
25794 /* For namespace N { typedef void T; } using N::T; base_type_die
25795 returns NULL, but DW_TAG_imported_declaration requires
25796 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
25797 if (!at_import_die)
25798 {
25799 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
25800 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
25801 at_import_die = lookup_type_die (TREE_TYPE (decl));
25802 gcc_assert (at_import_die);
25803 }
25804 }
25805 else
25806 {
25807 at_import_die = lookup_decl_die (decl);
25808 if (!at_import_die)
25809 {
25810 /* If we're trying to avoid duplicate debug info, we may not have
25811 emitted the member decl for this field. Emit it now. */
25812 if (TREE_CODE (decl) == FIELD_DECL)
25813 {
25814 tree type = DECL_CONTEXT (decl);
25815
25816 if (TYPE_CONTEXT (type)
25817 && TYPE_P (TYPE_CONTEXT (type))
25818 && !should_emit_struct_debug (TYPE_CONTEXT (type),
25819 DINFO_USAGE_DIR_USE))
25820 return;
25821 gen_type_die_for_member (type, decl,
25822 get_context_die (TYPE_CONTEXT (type)));
25823 }
25824 if (TREE_CODE (decl) == NAMELIST_DECL)
25825 at_import_die = gen_namelist_decl (DECL_NAME (decl),
25826 get_context_die (DECL_CONTEXT (decl)),
25827 NULL_TREE);
25828 else
25829 at_import_die = force_decl_die (decl);
25830 }
25831 }
25832
25833 if (TREE_CODE (decl) == NAMESPACE_DECL)
25834 {
25835 if (dwarf_version >= 3 || !dwarf_strict)
25836 imported_die = new_die (DW_TAG_imported_module,
25837 lexical_block_die,
25838 lexical_block);
25839 else
25840 return;
25841 }
25842 else
25843 imported_die = new_die (DW_TAG_imported_declaration,
25844 lexical_block_die,
25845 lexical_block);
25846
25847 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
25848 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
25849 if (debug_column_info && xloc.column)
25850 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
25851 if (name)
25852 add_AT_string (imported_die, DW_AT_name,
25853 IDENTIFIER_POINTER (name));
25854 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
25855 }
25856
25857 /* Output debug information for imported module or decl DECL.
25858 NAME is non-NULL name in context if the decl has been renamed.
25859 CHILD is true if decl is one of the renamed decls as part of
25860 importing whole module.
25861 IMPLICIT is set if this hook is called for an implicit import
25862 such as inline namespace. */
25863
25864 static void
25865 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
25866 bool child, bool implicit)
25867 {
25868 /* dw_die_ref at_import_die; */
25869 dw_die_ref scope_die;
25870
25871 if (debug_info_level <= DINFO_LEVEL_TERSE)
25872 return;
25873
25874 gcc_assert (decl);
25875
25876 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
25877 should be enough, for DWARF4 and older even if we emit as extension
25878 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
25879 for the benefit of consumers unaware of DW_AT_export_symbols. */
25880 if (implicit
25881 && dwarf_version >= 5
25882 && lang_hooks.decls.decl_dwarf_attribute (decl,
25883 DW_AT_export_symbols) == 1)
25884 return;
25885
25886 set_early_dwarf s;
25887
25888 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
25889 We need decl DIE for reference and scope die. First, get DIE for the decl
25890 itself. */
25891
25892 /* Get the scope die for decl context. Use comp_unit_die for global module
25893 or decl. If die is not found for non globals, force new die. */
25894 if (context
25895 && TYPE_P (context)
25896 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
25897 return;
25898
25899 scope_die = get_context_die (context);
25900
25901 if (child)
25902 {
25903 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
25904 there is nothing we can do, here. */
25905 if (dwarf_version < 3 && dwarf_strict)
25906 return;
25907
25908 gcc_assert (scope_die->die_child);
25909 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
25910 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
25911 scope_die = scope_die->die_child;
25912 }
25913
25914 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
25915 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
25916 }
25917
25918 /* Output debug information for namelists. */
25919
25920 static dw_die_ref
25921 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
25922 {
25923 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
25924 tree value;
25925 unsigned i;
25926
25927 if (debug_info_level <= DINFO_LEVEL_TERSE)
25928 return NULL;
25929
25930 gcc_assert (scope_die != NULL);
25931 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
25932 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
25933
25934 /* If there are no item_decls, we have a nondefining namelist, e.g.
25935 with USE association; hence, set DW_AT_declaration. */
25936 if (item_decls == NULL_TREE)
25937 {
25938 add_AT_flag (nml_die, DW_AT_declaration, 1);
25939 return nml_die;
25940 }
25941
25942 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
25943 {
25944 nml_item_ref_die = lookup_decl_die (value);
25945 if (!nml_item_ref_die)
25946 nml_item_ref_die = force_decl_die (value);
25947
25948 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
25949 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
25950 }
25951 return nml_die;
25952 }
25953
25954
25955 /* Write the debugging output for DECL and return the DIE. */
25956
25957 static void
25958 dwarf2out_decl (tree decl)
25959 {
25960 dw_die_ref context_die = comp_unit_die ();
25961
25962 switch (TREE_CODE (decl))
25963 {
25964 case ERROR_MARK:
25965 return;
25966
25967 case FUNCTION_DECL:
25968 /* If we're a nested function, initially use a parent of NULL; if we're
25969 a plain function, this will be fixed up in decls_for_scope. If
25970 we're a method, it will be ignored, since we already have a DIE. */
25971 if (decl_function_context (decl)
25972 /* But if we're in terse mode, we don't care about scope. */
25973 && debug_info_level > DINFO_LEVEL_TERSE)
25974 context_die = NULL;
25975 break;
25976
25977 case VAR_DECL:
25978 /* For local statics lookup proper context die. */
25979 if (local_function_static (decl))
25980 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25981
25982 /* If we are in terse mode, don't generate any DIEs to represent any
25983 variable declarations or definitions. */
25984 if (debug_info_level <= DINFO_LEVEL_TERSE)
25985 return;
25986 break;
25987
25988 case CONST_DECL:
25989 if (debug_info_level <= DINFO_LEVEL_TERSE)
25990 return;
25991 if (!is_fortran () && !is_ada ())
25992 return;
25993 if (TREE_STATIC (decl) && decl_function_context (decl))
25994 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25995 break;
25996
25997 case NAMESPACE_DECL:
25998 case IMPORTED_DECL:
25999 if (debug_info_level <= DINFO_LEVEL_TERSE)
26000 return;
26001 if (lookup_decl_die (decl) != NULL)
26002 return;
26003 break;
26004
26005 case TYPE_DECL:
26006 /* Don't emit stubs for types unless they are needed by other DIEs. */
26007 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26008 return;
26009
26010 /* Don't bother trying to generate any DIEs to represent any of the
26011 normal built-in types for the language we are compiling. */
26012 if (DECL_IS_BUILTIN (decl))
26013 return;
26014
26015 /* If we are in terse mode, don't generate any DIEs for types. */
26016 if (debug_info_level <= DINFO_LEVEL_TERSE)
26017 return;
26018
26019 /* If we're a function-scope tag, initially use a parent of NULL;
26020 this will be fixed up in decls_for_scope. */
26021 if (decl_function_context (decl))
26022 context_die = NULL;
26023
26024 break;
26025
26026 case NAMELIST_DECL:
26027 break;
26028
26029 default:
26030 return;
26031 }
26032
26033 gen_decl_die (decl, NULL, NULL, context_die);
26034
26035 if (flag_checking)
26036 {
26037 dw_die_ref die = lookup_decl_die (decl);
26038 if (die)
26039 check_die (die);
26040 }
26041 }
26042
26043 /* Write the debugging output for DECL. */
26044
26045 static void
26046 dwarf2out_function_decl (tree decl)
26047 {
26048 dwarf2out_decl (decl);
26049 call_arg_locations = NULL;
26050 call_arg_loc_last = NULL;
26051 call_site_count = -1;
26052 tail_call_site_count = -1;
26053 decl_loc_table->empty ();
26054 cached_dw_loc_list_table->empty ();
26055 }
26056
26057 /* Output a marker (i.e. a label) for the beginning of the generated code for
26058 a lexical block. */
26059
26060 static void
26061 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26062 unsigned int blocknum)
26063 {
26064 switch_to_section (current_function_section ());
26065 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26066 }
26067
26068 /* Output a marker (i.e. a label) for the end of the generated code for a
26069 lexical block. */
26070
26071 static void
26072 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26073 {
26074 switch_to_section (current_function_section ());
26075 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26076 }
26077
26078 /* Returns nonzero if it is appropriate not to emit any debugging
26079 information for BLOCK, because it doesn't contain any instructions.
26080
26081 Don't allow this for blocks with nested functions or local classes
26082 as we would end up with orphans, and in the presence of scheduling
26083 we may end up calling them anyway. */
26084
26085 static bool
26086 dwarf2out_ignore_block (const_tree block)
26087 {
26088 tree decl;
26089 unsigned int i;
26090
26091 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26092 if (TREE_CODE (decl) == FUNCTION_DECL
26093 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26094 return 0;
26095 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26096 {
26097 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26098 if (TREE_CODE (decl) == FUNCTION_DECL
26099 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26100 return 0;
26101 }
26102
26103 return 1;
26104 }
26105
26106 /* Hash table routines for file_hash. */
26107
26108 bool
26109 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26110 {
26111 return filename_cmp (p1->filename, p2) == 0;
26112 }
26113
26114 hashval_t
26115 dwarf_file_hasher::hash (dwarf_file_data *p)
26116 {
26117 return htab_hash_string (p->filename);
26118 }
26119
26120 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26121 dwarf2out.c) and return its "index". The index of each (known) filename is
26122 just a unique number which is associated with only that one filename. We
26123 need such numbers for the sake of generating labels (in the .debug_sfnames
26124 section) and references to those files numbers (in the .debug_srcinfo
26125 and .debug_macinfo sections). If the filename given as an argument is not
26126 found in our current list, add it to the list and assign it the next
26127 available unique index number. */
26128
26129 static struct dwarf_file_data *
26130 lookup_filename (const char *file_name)
26131 {
26132 struct dwarf_file_data * created;
26133
26134 if (!file_name)
26135 return NULL;
26136
26137 dwarf_file_data **slot
26138 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26139 INSERT);
26140 if (*slot)
26141 return *slot;
26142
26143 created = ggc_alloc<dwarf_file_data> ();
26144 created->filename = file_name;
26145 created->emitted_number = 0;
26146 *slot = created;
26147 return created;
26148 }
26149
26150 /* If the assembler will construct the file table, then translate the compiler
26151 internal file table number into the assembler file table number, and emit
26152 a .file directive if we haven't already emitted one yet. The file table
26153 numbers are different because we prune debug info for unused variables and
26154 types, which may include filenames. */
26155
26156 static int
26157 maybe_emit_file (struct dwarf_file_data * fd)
26158 {
26159 if (! fd->emitted_number)
26160 {
26161 if (last_emitted_file)
26162 fd->emitted_number = last_emitted_file->emitted_number + 1;
26163 else
26164 fd->emitted_number = 1;
26165 last_emitted_file = fd;
26166
26167 if (DWARF2_ASM_LINE_DEBUG_INFO)
26168 {
26169 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26170 output_quoted_string (asm_out_file,
26171 remap_debug_filename (fd->filename));
26172 fputc ('\n', asm_out_file);
26173 }
26174 }
26175
26176 return fd->emitted_number;
26177 }
26178
26179 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26180 That generation should happen after function debug info has been
26181 generated. The value of the attribute is the constant value of ARG. */
26182
26183 static void
26184 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26185 {
26186 die_arg_entry entry;
26187
26188 if (!die || !arg)
26189 return;
26190
26191 gcc_assert (early_dwarf);
26192
26193 if (!tmpl_value_parm_die_table)
26194 vec_alloc (tmpl_value_parm_die_table, 32);
26195
26196 entry.die = die;
26197 entry.arg = arg;
26198 vec_safe_push (tmpl_value_parm_die_table, entry);
26199 }
26200
26201 /* Return TRUE if T is an instance of generic type, FALSE
26202 otherwise. */
26203
26204 static bool
26205 generic_type_p (tree t)
26206 {
26207 if (t == NULL_TREE || !TYPE_P (t))
26208 return false;
26209 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26210 }
26211
26212 /* Schedule the generation of the generic parameter dies for the
26213 instance of generic type T. The proper generation itself is later
26214 done by gen_scheduled_generic_parms_dies. */
26215
26216 static void
26217 schedule_generic_params_dies_gen (tree t)
26218 {
26219 if (!generic_type_p (t))
26220 return;
26221
26222 gcc_assert (early_dwarf);
26223
26224 if (!generic_type_instances)
26225 vec_alloc (generic_type_instances, 256);
26226
26227 vec_safe_push (generic_type_instances, t);
26228 }
26229
26230 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26231 by append_entry_to_tmpl_value_parm_die_table. This function must
26232 be called after function DIEs have been generated. */
26233
26234 static void
26235 gen_remaining_tmpl_value_param_die_attribute (void)
26236 {
26237 if (tmpl_value_parm_die_table)
26238 {
26239 unsigned i, j;
26240 die_arg_entry *e;
26241
26242 /* We do this in two phases - first get the cases we can
26243 handle during early-finish, preserving those we cannot
26244 (containing symbolic constants where we don't yet know
26245 whether we are going to output the referenced symbols).
26246 For those we try again at late-finish. */
26247 j = 0;
26248 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26249 {
26250 if (!e->die->removed
26251 && !tree_add_const_value_attribute (e->die, e->arg))
26252 {
26253 dw_loc_descr_ref loc = NULL;
26254 if (! early_dwarf
26255 && (dwarf_version >= 5 || !dwarf_strict))
26256 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26257 if (loc)
26258 add_AT_loc (e->die, DW_AT_location, loc);
26259 else
26260 (*tmpl_value_parm_die_table)[j++] = *e;
26261 }
26262 }
26263 tmpl_value_parm_die_table->truncate (j);
26264 }
26265 }
26266
26267 /* Generate generic parameters DIEs for instances of generic types
26268 that have been previously scheduled by
26269 schedule_generic_params_dies_gen. This function must be called
26270 after all the types of the CU have been laid out. */
26271
26272 static void
26273 gen_scheduled_generic_parms_dies (void)
26274 {
26275 unsigned i;
26276 tree t;
26277
26278 if (!generic_type_instances)
26279 return;
26280
26281 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26282 if (COMPLETE_TYPE_P (t))
26283 gen_generic_params_dies (t);
26284
26285 generic_type_instances = NULL;
26286 }
26287
26288
26289 /* Replace DW_AT_name for the decl with name. */
26290
26291 static void
26292 dwarf2out_set_name (tree decl, tree name)
26293 {
26294 dw_die_ref die;
26295 dw_attr_node *attr;
26296 const char *dname;
26297
26298 die = TYPE_SYMTAB_DIE (decl);
26299 if (!die)
26300 return;
26301
26302 dname = dwarf2_name (name, 0);
26303 if (!dname)
26304 return;
26305
26306 attr = get_AT (die, DW_AT_name);
26307 if (attr)
26308 {
26309 struct indirect_string_node *node;
26310
26311 node = find_AT_string (dname);
26312 /* replace the string. */
26313 attr->dw_attr_val.v.val_str = node;
26314 }
26315
26316 else
26317 add_name_attribute (die, dname);
26318 }
26319
26320 /* True if before or during processing of the first function being emitted. */
26321 static bool in_first_function_p = true;
26322 /* True if loc_note during dwarf2out_var_location call might still be
26323 before first real instruction at address equal to .Ltext0. */
26324 static bool maybe_at_text_label_p = true;
26325 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26326 static unsigned int first_loclabel_num_not_at_text_label;
26327
26328 /* Look ahead for a real insn, or for a begin stmt marker. */
26329
26330 static rtx_insn *
26331 dwarf2out_next_real_insn (rtx_insn *loc_note)
26332 {
26333 rtx_insn *next_real = NEXT_INSN (loc_note);
26334
26335 while (next_real)
26336 if (INSN_P (next_real))
26337 break;
26338 else
26339 next_real = NEXT_INSN (next_real);
26340
26341 return next_real;
26342 }
26343
26344 /* Called by the final INSN scan whenever we see a var location. We
26345 use it to drop labels in the right places, and throw the location in
26346 our lookup table. */
26347
26348 static void
26349 dwarf2out_var_location (rtx_insn *loc_note)
26350 {
26351 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26352 struct var_loc_node *newloc;
26353 rtx_insn *next_real, *next_note;
26354 rtx_insn *call_insn = NULL;
26355 static const char *last_label;
26356 static const char *last_postcall_label;
26357 static bool last_in_cold_section_p;
26358 static rtx_insn *expected_next_loc_note;
26359 tree decl;
26360 bool var_loc_p;
26361
26362 if (!NOTE_P (loc_note))
26363 {
26364 if (CALL_P (loc_note))
26365 {
26366 call_site_count++;
26367 if (SIBLING_CALL_P (loc_note))
26368 tail_call_site_count++;
26369 if (optimize == 0 && !flag_var_tracking)
26370 {
26371 /* When the var-tracking pass is not running, there is no note
26372 for indirect calls whose target is compile-time known. In this
26373 case, process such calls specifically so that we generate call
26374 sites for them anyway. */
26375 rtx x = PATTERN (loc_note);
26376 if (GET_CODE (x) == PARALLEL)
26377 x = XVECEXP (x, 0, 0);
26378 if (GET_CODE (x) == SET)
26379 x = SET_SRC (x);
26380 if (GET_CODE (x) == CALL)
26381 x = XEXP (x, 0);
26382 if (!MEM_P (x)
26383 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
26384 || !SYMBOL_REF_DECL (XEXP (x, 0))
26385 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
26386 != FUNCTION_DECL))
26387 {
26388 call_insn = loc_note;
26389 loc_note = NULL;
26390 var_loc_p = false;
26391
26392 next_real = dwarf2out_next_real_insn (call_insn);
26393 next_note = NULL;
26394 cached_next_real_insn = NULL;
26395 goto create_label;
26396 }
26397 }
26398 }
26399 return;
26400 }
26401
26402 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
26403 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
26404 return;
26405
26406 /* Optimize processing a large consecutive sequence of location
26407 notes so we don't spend too much time in next_real_insn. If the
26408 next insn is another location note, remember the next_real_insn
26409 calculation for next time. */
26410 next_real = cached_next_real_insn;
26411 if (next_real)
26412 {
26413 if (expected_next_loc_note != loc_note)
26414 next_real = NULL;
26415 }
26416
26417 next_note = NEXT_INSN (loc_note);
26418 if (! next_note
26419 || next_note->deleted ()
26420 || ! NOTE_P (next_note)
26421 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
26422 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
26423 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
26424 next_note = NULL;
26425
26426 if (! next_real)
26427 next_real = dwarf2out_next_real_insn (loc_note);
26428
26429 if (next_note)
26430 {
26431 expected_next_loc_note = next_note;
26432 cached_next_real_insn = next_real;
26433 }
26434 else
26435 cached_next_real_insn = NULL;
26436
26437 /* If there are no instructions which would be affected by this note,
26438 don't do anything. */
26439 if (var_loc_p
26440 && next_real == NULL_RTX
26441 && !NOTE_DURING_CALL_P (loc_note))
26442 return;
26443
26444 create_label:
26445
26446 if (next_real == NULL_RTX)
26447 next_real = get_last_insn ();
26448
26449 /* If there were any real insns between note we processed last time
26450 and this note (or if it is the first note), clear
26451 last_{,postcall_}label so that they are not reused this time. */
26452 if (last_var_location_insn == NULL_RTX
26453 || last_var_location_insn != next_real
26454 || last_in_cold_section_p != in_cold_section_p)
26455 {
26456 last_label = NULL;
26457 last_postcall_label = NULL;
26458 }
26459
26460 if (var_loc_p)
26461 {
26462 decl = NOTE_VAR_LOCATION_DECL (loc_note);
26463 newloc = add_var_loc_to_decl (decl, loc_note,
26464 NOTE_DURING_CALL_P (loc_note)
26465 ? last_postcall_label : last_label);
26466 if (newloc == NULL)
26467 return;
26468 }
26469 else
26470 {
26471 decl = NULL_TREE;
26472 newloc = NULL;
26473 }
26474
26475 /* If there were no real insns between note we processed last time
26476 and this note, use the label we emitted last time. Otherwise
26477 create a new label and emit it. */
26478 if (last_label == NULL)
26479 {
26480 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
26481 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
26482 loclabel_num++;
26483 last_label = ggc_strdup (loclabel);
26484 /* See if loclabel might be equal to .Ltext0. If yes,
26485 bump first_loclabel_num_not_at_text_label. */
26486 if (!have_multiple_function_sections
26487 && in_first_function_p
26488 && maybe_at_text_label_p)
26489 {
26490 static rtx_insn *last_start;
26491 rtx_insn *insn;
26492 for (insn = loc_note; insn; insn = previous_insn (insn))
26493 if (insn == last_start)
26494 break;
26495 else if (!NONDEBUG_INSN_P (insn))
26496 continue;
26497 else
26498 {
26499 rtx body = PATTERN (insn);
26500 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
26501 continue;
26502 /* Inline asm could occupy zero bytes. */
26503 else if (GET_CODE (body) == ASM_INPUT
26504 || asm_noperands (body) >= 0)
26505 continue;
26506 #ifdef HAVE_attr_length
26507 else if (get_attr_min_length (insn) == 0)
26508 continue;
26509 #endif
26510 else
26511 {
26512 /* Assume insn has non-zero length. */
26513 maybe_at_text_label_p = false;
26514 break;
26515 }
26516 }
26517 if (maybe_at_text_label_p)
26518 {
26519 last_start = loc_note;
26520 first_loclabel_num_not_at_text_label = loclabel_num;
26521 }
26522 }
26523 }
26524
26525 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
26526 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
26527
26528 if (!var_loc_p)
26529 {
26530 struct call_arg_loc_node *ca_loc
26531 = ggc_cleared_alloc<call_arg_loc_node> ();
26532 rtx_insn *prev
26533 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
26534
26535 ca_loc->call_arg_loc_note = loc_note;
26536 ca_loc->next = NULL;
26537 ca_loc->label = last_label;
26538 gcc_assert (prev
26539 && (CALL_P (prev)
26540 || (NONJUMP_INSN_P (prev)
26541 && GET_CODE (PATTERN (prev)) == SEQUENCE
26542 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
26543 if (!CALL_P (prev))
26544 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
26545 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
26546
26547 /* Look for a SYMBOL_REF in the "prev" instruction. */
26548 rtx x = get_call_rtx_from (PATTERN (prev));
26549 if (x)
26550 {
26551 /* Try to get the call symbol, if any. */
26552 if (MEM_P (XEXP (x, 0)))
26553 x = XEXP (x, 0);
26554 /* First, look for a memory access to a symbol_ref. */
26555 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
26556 && SYMBOL_REF_DECL (XEXP (x, 0))
26557 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
26558 ca_loc->symbol_ref = XEXP (x, 0);
26559 /* Otherwise, look at a compile-time known user-level function
26560 declaration. */
26561 else if (MEM_P (x)
26562 && MEM_EXPR (x)
26563 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
26564 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
26565 }
26566
26567 ca_loc->block = insn_scope (prev);
26568 if (call_arg_locations)
26569 call_arg_loc_last->next = ca_loc;
26570 else
26571 call_arg_locations = ca_loc;
26572 call_arg_loc_last = ca_loc;
26573 }
26574 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
26575 newloc->label = last_label;
26576 else
26577 {
26578 if (!last_postcall_label)
26579 {
26580 sprintf (loclabel, "%s-1", last_label);
26581 last_postcall_label = ggc_strdup (loclabel);
26582 }
26583 newloc->label = last_postcall_label;
26584 }
26585
26586 if (var_loc_p && flag_debug_asm)
26587 {
26588 const char *name, *sep, *patstr;
26589 if (decl && DECL_NAME (decl))
26590 name = IDENTIFIER_POINTER (DECL_NAME (decl));
26591 else
26592 name = "";
26593 if (NOTE_VAR_LOCATION_LOC (loc_note))
26594 {
26595 sep = " => ";
26596 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
26597 }
26598 else
26599 {
26600 sep = " ";
26601 patstr = "RESET";
26602 }
26603 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
26604 name, sep, patstr);
26605 }
26606
26607 last_var_location_insn = next_real;
26608 last_in_cold_section_p = in_cold_section_p;
26609 }
26610
26611 /* Called from finalize_size_functions for size functions so that their body
26612 can be encoded in the debug info to describe the layout of variable-length
26613 structures. */
26614
26615 static void
26616 dwarf2out_size_function (tree decl)
26617 {
26618 function_to_dwarf_procedure (decl);
26619 }
26620
26621 /* Note in one location list that text section has changed. */
26622
26623 int
26624 var_location_switch_text_section_1 (var_loc_list **slot, void *)
26625 {
26626 var_loc_list *list = *slot;
26627 if (list->first)
26628 list->last_before_switch
26629 = list->last->next ? list->last->next : list->last;
26630 return 1;
26631 }
26632
26633 /* Note in all location lists that text section has changed. */
26634
26635 static void
26636 var_location_switch_text_section (void)
26637 {
26638 if (decl_loc_table == NULL)
26639 return;
26640
26641 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
26642 }
26643
26644 /* Create a new line number table. */
26645
26646 static dw_line_info_table *
26647 new_line_info_table (void)
26648 {
26649 dw_line_info_table *table;
26650
26651 table = ggc_cleared_alloc<dw_line_info_table> ();
26652 table->file_num = 1;
26653 table->line_num = 1;
26654 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
26655
26656 return table;
26657 }
26658
26659 /* Lookup the "current" table into which we emit line info, so
26660 that we don't have to do it for every source line. */
26661
26662 static void
26663 set_cur_line_info_table (section *sec)
26664 {
26665 dw_line_info_table *table;
26666
26667 if (sec == text_section)
26668 table = text_section_line_info;
26669 else if (sec == cold_text_section)
26670 {
26671 table = cold_text_section_line_info;
26672 if (!table)
26673 {
26674 cold_text_section_line_info = table = new_line_info_table ();
26675 table->end_label = cold_end_label;
26676 }
26677 }
26678 else
26679 {
26680 const char *end_label;
26681
26682 if (crtl->has_bb_partition)
26683 {
26684 if (in_cold_section_p)
26685 end_label = crtl->subsections.cold_section_end_label;
26686 else
26687 end_label = crtl->subsections.hot_section_end_label;
26688 }
26689 else
26690 {
26691 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26692 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
26693 current_function_funcdef_no);
26694 end_label = ggc_strdup (label);
26695 }
26696
26697 table = new_line_info_table ();
26698 table->end_label = end_label;
26699
26700 vec_safe_push (separate_line_info, table);
26701 }
26702
26703 if (DWARF2_ASM_LINE_DEBUG_INFO)
26704 table->is_stmt = (cur_line_info_table
26705 ? cur_line_info_table->is_stmt
26706 : DWARF_LINE_DEFAULT_IS_STMT_START);
26707 cur_line_info_table = table;
26708 }
26709
26710
26711 /* We need to reset the locations at the beginning of each
26712 function. We can't do this in the end_function hook, because the
26713 declarations that use the locations won't have been output when
26714 that hook is called. Also compute have_multiple_function_sections here. */
26715
26716 static void
26717 dwarf2out_begin_function (tree fun)
26718 {
26719 section *sec = function_section (fun);
26720
26721 if (sec != text_section)
26722 have_multiple_function_sections = true;
26723
26724 if (crtl->has_bb_partition && !cold_text_section)
26725 {
26726 gcc_assert (current_function_decl == fun);
26727 cold_text_section = unlikely_text_section ();
26728 switch_to_section (cold_text_section);
26729 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
26730 switch_to_section (sec);
26731 }
26732
26733 dwarf2out_note_section_used ();
26734 call_site_count = 0;
26735 tail_call_site_count = 0;
26736
26737 set_cur_line_info_table (sec);
26738 }
26739
26740 /* Helper function of dwarf2out_end_function, called only after emitting
26741 the very first function into assembly. Check if some .debug_loc range
26742 might end with a .LVL* label that could be equal to .Ltext0.
26743 In that case we must force using absolute addresses in .debug_loc ranges,
26744 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
26745 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
26746 list terminator.
26747 Set have_multiple_function_sections to true in that case and
26748 terminate htab traversal. */
26749
26750 int
26751 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
26752 {
26753 var_loc_list *entry = *slot;
26754 struct var_loc_node *node;
26755
26756 node = entry->first;
26757 if (node && node->next && node->next->label)
26758 {
26759 unsigned int i;
26760 const char *label = node->next->label;
26761 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
26762
26763 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
26764 {
26765 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
26766 if (strcmp (label, loclabel) == 0)
26767 {
26768 have_multiple_function_sections = true;
26769 return 0;
26770 }
26771 }
26772 }
26773 return 1;
26774 }
26775
26776 /* Hook called after emitting a function into assembly.
26777 This does something only for the very first function emitted. */
26778
26779 static void
26780 dwarf2out_end_function (unsigned int)
26781 {
26782 if (in_first_function_p
26783 && !have_multiple_function_sections
26784 && first_loclabel_num_not_at_text_label
26785 && decl_loc_table)
26786 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
26787 in_first_function_p = false;
26788 maybe_at_text_label_p = false;
26789 }
26790
26791 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
26792 front-ends register a translation unit even before dwarf2out_init is
26793 called. */
26794 static tree main_translation_unit = NULL_TREE;
26795
26796 /* Hook called by front-ends after they built their main translation unit.
26797 Associate comp_unit_die to UNIT. */
26798
26799 static void
26800 dwarf2out_register_main_translation_unit (tree unit)
26801 {
26802 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
26803 && main_translation_unit == NULL_TREE);
26804 main_translation_unit = unit;
26805 /* If dwarf2out_init has not been called yet, it will perform the association
26806 itself looking at main_translation_unit. */
26807 if (decl_die_table != NULL)
26808 equate_decl_number_to_die (unit, comp_unit_die ());
26809 }
26810
26811 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
26812
26813 static void
26814 push_dw_line_info_entry (dw_line_info_table *table,
26815 enum dw_line_info_opcode opcode, unsigned int val)
26816 {
26817 dw_line_info_entry e;
26818 e.opcode = opcode;
26819 e.val = val;
26820 vec_safe_push (table->entries, e);
26821 }
26822
26823 /* Output a label to mark the beginning of a source code line entry
26824 and record information relating to this source line, in
26825 'line_info_table' for later output of the .debug_line section. */
26826 /* ??? The discriminator parameter ought to be unsigned. */
26827
26828 static void
26829 dwarf2out_source_line (unsigned int line, unsigned int column,
26830 const char *filename,
26831 int discriminator, bool is_stmt)
26832 {
26833 unsigned int file_num;
26834 dw_line_info_table *table;
26835
26836 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
26837 return;
26838
26839 /* The discriminator column was added in dwarf4. Simplify the below
26840 by simply removing it if we're not supposed to output it. */
26841 if (dwarf_version < 4 && dwarf_strict)
26842 discriminator = 0;
26843
26844 if (!debug_column_info)
26845 column = 0;
26846
26847 table = cur_line_info_table;
26848 file_num = maybe_emit_file (lookup_filename (filename));
26849
26850 /* ??? TODO: Elide duplicate line number entries. Traditionally,
26851 the debugger has used the second (possibly duplicate) line number
26852 at the beginning of the function to mark the end of the prologue.
26853 We could eliminate any other duplicates within the function. For
26854 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
26855 that second line number entry. */
26856 /* Recall that this end-of-prologue indication is *not* the same thing
26857 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
26858 to which the hook corresponds, follows the last insn that was
26859 emitted by gen_prologue. What we need is to precede the first insn
26860 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
26861 insn that corresponds to something the user wrote. These may be
26862 very different locations once scheduling is enabled. */
26863
26864 if (0 && file_num == table->file_num
26865 && line == table->line_num
26866 && column == table->column_num
26867 && discriminator == table->discrim_num
26868 && is_stmt == table->is_stmt)
26869 return;
26870
26871 switch_to_section (current_function_section ());
26872
26873 /* If requested, emit something human-readable. */
26874 if (flag_debug_asm)
26875 {
26876 if (debug_column_info)
26877 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
26878 filename, line, column);
26879 else
26880 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
26881 filename, line);
26882 }
26883
26884 if (DWARF2_ASM_LINE_DEBUG_INFO)
26885 {
26886 /* Emit the .loc directive understood by GNU as. */
26887 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
26888 file_num, line, is_stmt, discriminator */
26889 fputs ("\t.loc ", asm_out_file);
26890 fprint_ul (asm_out_file, file_num);
26891 putc (' ', asm_out_file);
26892 fprint_ul (asm_out_file, line);
26893 putc (' ', asm_out_file);
26894 fprint_ul (asm_out_file, column);
26895
26896 if (is_stmt != table->is_stmt)
26897 {
26898 fputs (" is_stmt ", asm_out_file);
26899 putc (is_stmt ? '1' : '0', asm_out_file);
26900 }
26901 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
26902 {
26903 gcc_assert (discriminator > 0);
26904 fputs (" discriminator ", asm_out_file);
26905 fprint_ul (asm_out_file, (unsigned long) discriminator);
26906 }
26907 putc ('\n', asm_out_file);
26908 }
26909 else
26910 {
26911 unsigned int label_num = ++line_info_label_num;
26912
26913 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
26914
26915 push_dw_line_info_entry (table, LI_set_address, label_num);
26916 if (file_num != table->file_num)
26917 push_dw_line_info_entry (table, LI_set_file, file_num);
26918 if (discriminator != table->discrim_num)
26919 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
26920 if (is_stmt != table->is_stmt)
26921 push_dw_line_info_entry (table, LI_negate_stmt, 0);
26922 push_dw_line_info_entry (table, LI_set_line, line);
26923 if (debug_column_info)
26924 push_dw_line_info_entry (table, LI_set_column, column);
26925 }
26926
26927 table->file_num = file_num;
26928 table->line_num = line;
26929 table->column_num = column;
26930 table->discrim_num = discriminator;
26931 table->is_stmt = is_stmt;
26932 table->in_use = true;
26933 }
26934
26935 /* Record the beginning of a new source file. */
26936
26937 static void
26938 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
26939 {
26940 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26941 {
26942 macinfo_entry e;
26943 e.code = DW_MACINFO_start_file;
26944 e.lineno = lineno;
26945 e.info = ggc_strdup (filename);
26946 vec_safe_push (macinfo_table, e);
26947 }
26948 }
26949
26950 /* Record the end of a source file. */
26951
26952 static void
26953 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
26954 {
26955 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26956 {
26957 macinfo_entry e;
26958 e.code = DW_MACINFO_end_file;
26959 e.lineno = lineno;
26960 e.info = NULL;
26961 vec_safe_push (macinfo_table, e);
26962 }
26963 }
26964
26965 /* Called from debug_define in toplev.c. The `buffer' parameter contains
26966 the tail part of the directive line, i.e. the part which is past the
26967 initial whitespace, #, whitespace, directive-name, whitespace part. */
26968
26969 static void
26970 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
26971 const char *buffer ATTRIBUTE_UNUSED)
26972 {
26973 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26974 {
26975 macinfo_entry e;
26976 /* Insert a dummy first entry to be able to optimize the whole
26977 predefined macro block using DW_MACRO_import. */
26978 if (macinfo_table->is_empty () && lineno <= 1)
26979 {
26980 e.code = 0;
26981 e.lineno = 0;
26982 e.info = NULL;
26983 vec_safe_push (macinfo_table, e);
26984 }
26985 e.code = DW_MACINFO_define;
26986 e.lineno = lineno;
26987 e.info = ggc_strdup (buffer);
26988 vec_safe_push (macinfo_table, e);
26989 }
26990 }
26991
26992 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
26993 the tail part of the directive line, i.e. the part which is past the
26994 initial whitespace, #, whitespace, directive-name, whitespace part. */
26995
26996 static void
26997 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
26998 const char *buffer ATTRIBUTE_UNUSED)
26999 {
27000 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27001 {
27002 macinfo_entry e;
27003 /* Insert a dummy first entry to be able to optimize the whole
27004 predefined macro block using DW_MACRO_import. */
27005 if (macinfo_table->is_empty () && lineno <= 1)
27006 {
27007 e.code = 0;
27008 e.lineno = 0;
27009 e.info = NULL;
27010 vec_safe_push (macinfo_table, e);
27011 }
27012 e.code = DW_MACINFO_undef;
27013 e.lineno = lineno;
27014 e.info = ggc_strdup (buffer);
27015 vec_safe_push (macinfo_table, e);
27016 }
27017 }
27018
27019 /* Helpers to manipulate hash table of CUs. */
27020
27021 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27022 {
27023 static inline hashval_t hash (const macinfo_entry *);
27024 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27025 };
27026
27027 inline hashval_t
27028 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27029 {
27030 return htab_hash_string (entry->info);
27031 }
27032
27033 inline bool
27034 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27035 const macinfo_entry *entry2)
27036 {
27037 return !strcmp (entry1->info, entry2->info);
27038 }
27039
27040 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27041
27042 /* Output a single .debug_macinfo entry. */
27043
27044 static void
27045 output_macinfo_op (macinfo_entry *ref)
27046 {
27047 int file_num;
27048 size_t len;
27049 struct indirect_string_node *node;
27050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27051 struct dwarf_file_data *fd;
27052
27053 switch (ref->code)
27054 {
27055 case DW_MACINFO_start_file:
27056 fd = lookup_filename (ref->info);
27057 file_num = maybe_emit_file (fd);
27058 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
27059 dw2_asm_output_data_uleb128 (ref->lineno,
27060 "Included from line number %lu",
27061 (unsigned long) ref->lineno);
27062 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
27063 break;
27064 case DW_MACINFO_end_file:
27065 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
27066 break;
27067 case DW_MACINFO_define:
27068 case DW_MACINFO_undef:
27069 len = strlen (ref->info) + 1;
27070 if (!dwarf_strict
27071 && len > DWARF_OFFSET_SIZE
27072 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27073 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27074 {
27075 ref->code = ref->code == DW_MACINFO_define
27076 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
27077 output_macinfo_op (ref);
27078 return;
27079 }
27080 dw2_asm_output_data (1, ref->code,
27081 ref->code == DW_MACINFO_define
27082 ? "Define macro" : "Undefine macro");
27083 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27084 (unsigned long) ref->lineno);
27085 dw2_asm_output_nstring (ref->info, -1, "The macro");
27086 break;
27087 case DW_MACRO_define_strp:
27088 case DW_MACRO_undef_strp:
27089 node = find_AT_string (ref->info);
27090 gcc_assert (node
27091 && (node->form == DW_FORM_strp
27092 || node->form == DW_FORM_GNU_str_index));
27093 dw2_asm_output_data (1, ref->code,
27094 ref->code == DW_MACRO_define_strp
27095 ? "Define macro strp"
27096 : "Undefine macro strp");
27097 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27098 (unsigned long) ref->lineno);
27099 if (node->form == DW_FORM_strp)
27100 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
27101 debug_str_section, "The macro: \"%s\"",
27102 ref->info);
27103 else
27104 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
27105 ref->info);
27106 break;
27107 case DW_MACRO_import:
27108 dw2_asm_output_data (1, ref->code, "Import");
27109 ASM_GENERATE_INTERNAL_LABEL (label,
27110 DEBUG_MACRO_SECTION_LABEL,
27111 ref->lineno + macinfo_label_base);
27112 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
27113 break;
27114 default:
27115 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
27116 ASM_COMMENT_START, (unsigned long) ref->code);
27117 break;
27118 }
27119 }
27120
27121 /* Attempt to make a sequence of define/undef macinfo ops shareable with
27122 other compilation unit .debug_macinfo sections. IDX is the first
27123 index of a define/undef, return the number of ops that should be
27124 emitted in a comdat .debug_macinfo section and emit
27125 a DW_MACRO_import entry referencing it.
27126 If the define/undef entry should be emitted normally, return 0. */
27127
27128 static unsigned
27129 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
27130 macinfo_hash_type **macinfo_htab)
27131 {
27132 macinfo_entry *first, *second, *cur, *inc;
27133 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
27134 unsigned char checksum[16];
27135 struct md5_ctx ctx;
27136 char *grp_name, *tail;
27137 const char *base;
27138 unsigned int i, count, encoded_filename_len, linebuf_len;
27139 macinfo_entry **slot;
27140
27141 first = &(*macinfo_table)[idx];
27142 second = &(*macinfo_table)[idx + 1];
27143
27144 /* Optimize only if there are at least two consecutive define/undef ops,
27145 and either all of them are before first DW_MACINFO_start_file
27146 with lineno {0,1} (i.e. predefined macro block), or all of them are
27147 in some included header file. */
27148 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
27149 return 0;
27150 if (vec_safe_is_empty (files))
27151 {
27152 if (first->lineno > 1 || second->lineno > 1)
27153 return 0;
27154 }
27155 else if (first->lineno == 0)
27156 return 0;
27157
27158 /* Find the last define/undef entry that can be grouped together
27159 with first and at the same time compute md5 checksum of their
27160 codes, linenumbers and strings. */
27161 md5_init_ctx (&ctx);
27162 for (i = idx; macinfo_table->iterate (i, &cur); i++)
27163 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
27164 break;
27165 else if (vec_safe_is_empty (files) && cur->lineno > 1)
27166 break;
27167 else
27168 {
27169 unsigned char code = cur->code;
27170 md5_process_bytes (&code, 1, &ctx);
27171 checksum_uleb128 (cur->lineno, &ctx);
27172 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
27173 }
27174 md5_finish_ctx (&ctx, checksum);
27175 count = i - idx;
27176
27177 /* From the containing include filename (if any) pick up just
27178 usable characters from its basename. */
27179 if (vec_safe_is_empty (files))
27180 base = "";
27181 else
27182 base = lbasename (files->last ().info);
27183 for (encoded_filename_len = 0, i = 0; base[i]; i++)
27184 if (ISIDNUM (base[i]) || base[i] == '.')
27185 encoded_filename_len++;
27186 /* Count . at the end. */
27187 if (encoded_filename_len)
27188 encoded_filename_len++;
27189
27190 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
27191 linebuf_len = strlen (linebuf);
27192
27193 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
27194 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
27195 + 16 * 2 + 1);
27196 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
27197 tail = grp_name + 4;
27198 if (encoded_filename_len)
27199 {
27200 for (i = 0; base[i]; i++)
27201 if (ISIDNUM (base[i]) || base[i] == '.')
27202 *tail++ = base[i];
27203 *tail++ = '.';
27204 }
27205 memcpy (tail, linebuf, linebuf_len);
27206 tail += linebuf_len;
27207 *tail++ = '.';
27208 for (i = 0; i < 16; i++)
27209 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
27210
27211 /* Construct a macinfo_entry for DW_MACRO_import
27212 in the empty vector entry before the first define/undef. */
27213 inc = &(*macinfo_table)[idx - 1];
27214 inc->code = DW_MACRO_import;
27215 inc->lineno = 0;
27216 inc->info = ggc_strdup (grp_name);
27217 if (!*macinfo_htab)
27218 *macinfo_htab = new macinfo_hash_type (10);
27219 /* Avoid emitting duplicates. */
27220 slot = (*macinfo_htab)->find_slot (inc, INSERT);
27221 if (*slot != NULL)
27222 {
27223 inc->code = 0;
27224 inc->info = NULL;
27225 /* If such an entry has been used before, just emit
27226 a DW_MACRO_import op. */
27227 inc = *slot;
27228 output_macinfo_op (inc);
27229 /* And clear all macinfo_entry in the range to avoid emitting them
27230 in the second pass. */
27231 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
27232 {
27233 cur->code = 0;
27234 cur->info = NULL;
27235 }
27236 }
27237 else
27238 {
27239 *slot = inc;
27240 inc->lineno = (*macinfo_htab)->elements ();
27241 output_macinfo_op (inc);
27242 }
27243 return count;
27244 }
27245
27246 /* Save any strings needed by the macinfo table in the debug str
27247 table. All strings must be collected into the table by the time
27248 index_string is called. */
27249
27250 static void
27251 save_macinfo_strings (void)
27252 {
27253 unsigned len;
27254 unsigned i;
27255 macinfo_entry *ref;
27256
27257 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
27258 {
27259 switch (ref->code)
27260 {
27261 /* Match the logic in output_macinfo_op to decide on
27262 indirect strings. */
27263 case DW_MACINFO_define:
27264 case DW_MACINFO_undef:
27265 len = strlen (ref->info) + 1;
27266 if (!dwarf_strict
27267 && len > DWARF_OFFSET_SIZE
27268 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27269 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27270 set_indirect_string (find_AT_string (ref->info));
27271 break;
27272 case DW_MACRO_define_strp:
27273 case DW_MACRO_undef_strp:
27274 set_indirect_string (find_AT_string (ref->info));
27275 break;
27276 default:
27277 break;
27278 }
27279 }
27280 }
27281
27282 /* Output macinfo section(s). */
27283
27284 static void
27285 output_macinfo (const char *debug_line_label, bool early_lto_debug)
27286 {
27287 unsigned i;
27288 unsigned long length = vec_safe_length (macinfo_table);
27289 macinfo_entry *ref;
27290 vec<macinfo_entry, va_gc> *files = NULL;
27291 macinfo_hash_type *macinfo_htab = NULL;
27292 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
27293
27294 if (! length)
27295 return;
27296
27297 /* output_macinfo* uses these interchangeably. */
27298 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
27299 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
27300 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
27301 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
27302
27303 /* AIX Assembler inserts the length, so adjust the reference to match the
27304 offset expected by debuggers. */
27305 strcpy (dl_section_ref, debug_line_label);
27306 if (XCOFF_DEBUGGING_INFO)
27307 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
27308
27309 /* For .debug_macro emit the section header. */
27310 if (!dwarf_strict || dwarf_version >= 5)
27311 {
27312 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27313 "DWARF macro version number");
27314 if (DWARF_OFFSET_SIZE == 8)
27315 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
27316 else
27317 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
27318 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
27319 debug_line_section, NULL);
27320 }
27321
27322 /* In the first loop, it emits the primary .debug_macinfo section
27323 and after each emitted op the macinfo_entry is cleared.
27324 If a longer range of define/undef ops can be optimized using
27325 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
27326 the vector before the first define/undef in the range and the
27327 whole range of define/undef ops is not emitted and kept. */
27328 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27329 {
27330 switch (ref->code)
27331 {
27332 case DW_MACINFO_start_file:
27333 vec_safe_push (files, *ref);
27334 break;
27335 case DW_MACINFO_end_file:
27336 if (!vec_safe_is_empty (files))
27337 files->pop ();
27338 break;
27339 case DW_MACINFO_define:
27340 case DW_MACINFO_undef:
27341 if ((!dwarf_strict || dwarf_version >= 5)
27342 && HAVE_COMDAT_GROUP
27343 && vec_safe_length (files) != 1
27344 && i > 0
27345 && i + 1 < length
27346 && (*macinfo_table)[i - 1].code == 0)
27347 {
27348 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
27349 if (count)
27350 {
27351 i += count - 1;
27352 continue;
27353 }
27354 }
27355 break;
27356 case 0:
27357 /* A dummy entry may be inserted at the beginning to be able
27358 to optimize the whole block of predefined macros. */
27359 if (i == 0)
27360 continue;
27361 default:
27362 break;
27363 }
27364 output_macinfo_op (ref);
27365 ref->info = NULL;
27366 ref->code = 0;
27367 }
27368
27369 if (!macinfo_htab)
27370 return;
27371
27372 /* Save the number of transparent includes so we can adjust the
27373 label number for the fat LTO object DWARF. */
27374 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
27375
27376 delete macinfo_htab;
27377 macinfo_htab = NULL;
27378
27379 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
27380 terminate the current chain and switch to a new comdat .debug_macinfo
27381 section and emit the define/undef entries within it. */
27382 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27383 switch (ref->code)
27384 {
27385 case 0:
27386 continue;
27387 case DW_MACRO_import:
27388 {
27389 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27390 tree comdat_key = get_identifier (ref->info);
27391 /* Terminate the previous .debug_macinfo section. */
27392 dw2_asm_output_data (1, 0, "End compilation unit");
27393 targetm.asm_out.named_section (debug_macinfo_section_name,
27394 SECTION_DEBUG
27395 | SECTION_LINKONCE
27396 | (early_lto_debug
27397 ? SECTION_EXCLUDE : 0),
27398 comdat_key);
27399 ASM_GENERATE_INTERNAL_LABEL (label,
27400 DEBUG_MACRO_SECTION_LABEL,
27401 ref->lineno + macinfo_label_base);
27402 ASM_OUTPUT_LABEL (asm_out_file, label);
27403 ref->code = 0;
27404 ref->info = NULL;
27405 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27406 "DWARF macro version number");
27407 if (DWARF_OFFSET_SIZE == 8)
27408 dw2_asm_output_data (1, 1, "Flags: 64-bit");
27409 else
27410 dw2_asm_output_data (1, 0, "Flags: 32-bit");
27411 }
27412 break;
27413 case DW_MACINFO_define:
27414 case DW_MACINFO_undef:
27415 output_macinfo_op (ref);
27416 ref->code = 0;
27417 ref->info = NULL;
27418 break;
27419 default:
27420 gcc_unreachable ();
27421 }
27422
27423 macinfo_label_base += macinfo_label_base_adj;
27424 }
27425
27426 /* Initialize the various sections and labels for dwarf output and prefix
27427 them with PREFIX if non-NULL. Returns the generation (zero based
27428 number of times function was called). */
27429
27430 static unsigned
27431 init_sections_and_labels (bool early_lto_debug)
27432 {
27433 /* As we may get called multiple times have a generation count for
27434 labels. */
27435 static unsigned generation = 0;
27436
27437 if (early_lto_debug)
27438 {
27439 if (!dwarf_split_debug_info)
27440 {
27441 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27442 SECTION_DEBUG | SECTION_EXCLUDE,
27443 NULL);
27444 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
27445 SECTION_DEBUG | SECTION_EXCLUDE,
27446 NULL);
27447 debug_macinfo_section_name
27448 = ((dwarf_strict && dwarf_version < 5)
27449 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
27450 debug_macinfo_section = get_section (debug_macinfo_section_name,
27451 SECTION_DEBUG
27452 | SECTION_EXCLUDE, NULL);
27453 /* For macro info we have to refer to a debug_line section, so
27454 similar to split-dwarf emit a skeleton one for early debug. */
27455 debug_skeleton_line_section
27456 = get_section (DEBUG_LTO_LINE_SECTION,
27457 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27458 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27459 DEBUG_SKELETON_LINE_SECTION_LABEL,
27460 generation);
27461 }
27462 else
27463 {
27464 /* ??? Which of the following do we need early? */
27465 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
27466 SECTION_DEBUG | SECTION_EXCLUDE,
27467 NULL);
27468 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
27469 SECTION_DEBUG | SECTION_EXCLUDE,
27470 NULL);
27471 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27472 SECTION_DEBUG
27473 | SECTION_EXCLUDE, NULL);
27474 debug_skeleton_abbrev_section
27475 = get_section (DEBUG_LTO_ABBREV_SECTION,
27476 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27477 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27478 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27479 generation);
27480
27481 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27482 stay in the main .o, but the skeleton_line goes into the split
27483 off dwo. */
27484 debug_skeleton_line_section
27485 = get_section (DEBUG_LTO_LINE_SECTION,
27486 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27487 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27488 DEBUG_SKELETON_LINE_SECTION_LABEL,
27489 generation);
27490 debug_str_offsets_section
27491 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
27492 SECTION_DEBUG | SECTION_EXCLUDE,
27493 NULL);
27494 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27495 DEBUG_SKELETON_INFO_SECTION_LABEL,
27496 generation);
27497 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
27498 DEBUG_STR_DWO_SECTION_FLAGS,
27499 NULL);
27500 debug_macinfo_section_name
27501 = ((dwarf_strict && dwarf_version < 5)
27502 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
27503 debug_macinfo_section = get_section (debug_macinfo_section_name,
27504 SECTION_DEBUG | SECTION_EXCLUDE,
27505 NULL);
27506 }
27507 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
27508 DEBUG_STR_SECTION_FLAGS
27509 | SECTION_EXCLUDE, NULL);
27510 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27511 debug_line_str_section
27512 = get_section (DEBUG_LTO_LINE_STR_SECTION,
27513 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
27514 }
27515 else
27516 {
27517 if (!dwarf_split_debug_info)
27518 {
27519 debug_info_section = get_section (DEBUG_INFO_SECTION,
27520 SECTION_DEBUG, NULL);
27521 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27522 SECTION_DEBUG, NULL);
27523 debug_loc_section = get_section (dwarf_version >= 5
27524 ? DEBUG_LOCLISTS_SECTION
27525 : DEBUG_LOC_SECTION,
27526 SECTION_DEBUG, NULL);
27527 debug_macinfo_section_name
27528 = ((dwarf_strict && dwarf_version < 5)
27529 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
27530 debug_macinfo_section = get_section (debug_macinfo_section_name,
27531 SECTION_DEBUG, NULL);
27532 }
27533 else
27534 {
27535 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
27536 SECTION_DEBUG | SECTION_EXCLUDE,
27537 NULL);
27538 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
27539 SECTION_DEBUG | SECTION_EXCLUDE,
27540 NULL);
27541 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
27542 SECTION_DEBUG, NULL);
27543 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
27544 SECTION_DEBUG, NULL);
27545 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27546 SECTION_DEBUG, NULL);
27547 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27548 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27549 generation);
27550
27551 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27552 stay in the main .o, but the skeleton_line goes into the
27553 split off dwo. */
27554 debug_skeleton_line_section
27555 = get_section (DEBUG_DWO_LINE_SECTION,
27556 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27557 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27558 DEBUG_SKELETON_LINE_SECTION_LABEL,
27559 generation);
27560 debug_str_offsets_section
27561 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
27562 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27563 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27564 DEBUG_SKELETON_INFO_SECTION_LABEL,
27565 generation);
27566 debug_loc_section = get_section (dwarf_version >= 5
27567 ? DEBUG_DWO_LOCLISTS_SECTION
27568 : DEBUG_DWO_LOC_SECTION,
27569 SECTION_DEBUG | SECTION_EXCLUDE,
27570 NULL);
27571 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
27572 DEBUG_STR_DWO_SECTION_FLAGS,
27573 NULL);
27574 debug_macinfo_section_name
27575 = ((dwarf_strict && dwarf_version < 5)
27576 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
27577 debug_macinfo_section = get_section (debug_macinfo_section_name,
27578 SECTION_DEBUG | SECTION_EXCLUDE,
27579 NULL);
27580 }
27581 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
27582 SECTION_DEBUG, NULL);
27583 debug_line_section = get_section (DEBUG_LINE_SECTION,
27584 SECTION_DEBUG, NULL);
27585 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
27586 SECTION_DEBUG, NULL);
27587 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
27588 SECTION_DEBUG, NULL);
27589 debug_str_section = get_section (DEBUG_STR_SECTION,
27590 DEBUG_STR_SECTION_FLAGS, NULL);
27591 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27592 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
27593 DEBUG_STR_SECTION_FLAGS, NULL);
27594 debug_ranges_section = get_section (dwarf_version >= 5
27595 ? DEBUG_RNGLISTS_SECTION
27596 : DEBUG_RANGES_SECTION,
27597 SECTION_DEBUG, NULL);
27598 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
27599 SECTION_DEBUG, NULL);
27600 }
27601
27602 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
27603 DEBUG_ABBREV_SECTION_LABEL, generation);
27604 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
27605 DEBUG_INFO_SECTION_LABEL, generation);
27606 info_section_emitted = false;
27607 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
27608 DEBUG_LINE_SECTION_LABEL, generation);
27609 /* There are up to 4 unique ranges labels per generation.
27610 See also output_rnglists. */
27611 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
27612 DEBUG_RANGES_SECTION_LABEL, generation * 4);
27613 if (dwarf_version >= 5 && dwarf_split_debug_info)
27614 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
27615 DEBUG_RANGES_SECTION_LABEL,
27616 1 + generation * 4);
27617 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
27618 DEBUG_ADDR_SECTION_LABEL, generation);
27619 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
27620 (dwarf_strict && dwarf_version < 5)
27621 ? DEBUG_MACINFO_SECTION_LABEL
27622 : DEBUG_MACRO_SECTION_LABEL, generation);
27623 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
27624 generation);
27625
27626 ++generation;
27627 return generation - 1;
27628 }
27629
27630 /* Set up for Dwarf output at the start of compilation. */
27631
27632 static void
27633 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
27634 {
27635 /* Allocate the file_table. */
27636 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
27637
27638 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27639 /* Allocate the decl_die_table. */
27640 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
27641
27642 /* Allocate the decl_loc_table. */
27643 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
27644
27645 /* Allocate the cached_dw_loc_list_table. */
27646 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
27647
27648 /* Allocate the initial hunk of the decl_scope_table. */
27649 vec_alloc (decl_scope_table, 256);
27650
27651 /* Allocate the initial hunk of the abbrev_die_table. */
27652 vec_alloc (abbrev_die_table, 256);
27653 /* Zero-th entry is allocated, but unused. */
27654 abbrev_die_table->quick_push (NULL);
27655
27656 /* Allocate the dwarf_proc_stack_usage_map. */
27657 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
27658
27659 /* Allocate the pubtypes and pubnames vectors. */
27660 vec_alloc (pubname_table, 32);
27661 vec_alloc (pubtype_table, 32);
27662
27663 vec_alloc (incomplete_types, 64);
27664
27665 vec_alloc (used_rtx_array, 32);
27666
27667 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27668 vec_alloc (macinfo_table, 64);
27669 #endif
27670
27671 /* If front-ends already registered a main translation unit but we were not
27672 ready to perform the association, do this now. */
27673 if (main_translation_unit != NULL_TREE)
27674 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
27675 }
27676
27677 /* Called before compile () starts outputtting functions, variables
27678 and toplevel asms into assembly. */
27679
27680 static void
27681 dwarf2out_assembly_start (void)
27682 {
27683 if (text_section_line_info)
27684 return;
27685
27686 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27687 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
27688 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
27689 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
27690 COLD_TEXT_SECTION_LABEL, 0);
27691 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
27692
27693 switch_to_section (text_section);
27694 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
27695 #endif
27696
27697 /* Make sure the line number table for .text always exists. */
27698 text_section_line_info = new_line_info_table ();
27699 text_section_line_info->end_label = text_end_label;
27700
27701 #ifdef DWARF2_LINENO_DEBUGGING_INFO
27702 cur_line_info_table = text_section_line_info;
27703 #endif
27704
27705 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
27706 && dwarf2out_do_cfi_asm ()
27707 && !dwarf2out_do_eh_frame ())
27708 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
27709 }
27710
27711 /* A helper function for dwarf2out_finish called through
27712 htab_traverse. Assign a string its index. All strings must be
27713 collected into the table by the time index_string is called,
27714 because the indexing code relies on htab_traverse to traverse nodes
27715 in the same order for each run. */
27716
27717 int
27718 index_string (indirect_string_node **h, unsigned int *index)
27719 {
27720 indirect_string_node *node = *h;
27721
27722 find_string_form (node);
27723 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27724 {
27725 gcc_assert (node->index == NO_INDEX_ASSIGNED);
27726 node->index = *index;
27727 *index += 1;
27728 }
27729 return 1;
27730 }
27731
27732 /* A helper function for output_indirect_strings called through
27733 htab_traverse. Output the offset to a string and update the
27734 current offset. */
27735
27736 int
27737 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
27738 {
27739 indirect_string_node *node = *h;
27740
27741 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27742 {
27743 /* Assert that this node has been assigned an index. */
27744 gcc_assert (node->index != NO_INDEX_ASSIGNED
27745 && node->index != NOT_INDEXED);
27746 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
27747 "indexed string 0x%x: %s", node->index, node->str);
27748 *offset += strlen (node->str) + 1;
27749 }
27750 return 1;
27751 }
27752
27753 /* A helper function for dwarf2out_finish called through
27754 htab_traverse. Output the indexed string. */
27755
27756 int
27757 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
27758 {
27759 struct indirect_string_node *node = *h;
27760
27761 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27762 {
27763 /* Assert that the strings are output in the same order as their
27764 indexes were assigned. */
27765 gcc_assert (*cur_idx == node->index);
27766 assemble_string (node->str, strlen (node->str) + 1);
27767 *cur_idx += 1;
27768 }
27769 return 1;
27770 }
27771
27772 /* A helper function for dwarf2out_finish called through
27773 htab_traverse. Emit one queued .debug_str string. */
27774
27775 int
27776 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
27777 {
27778 struct indirect_string_node *node = *h;
27779
27780 node->form = find_string_form (node);
27781 if (node->form == form && node->refcount > 0)
27782 {
27783 ASM_OUTPUT_LABEL (asm_out_file, node->label);
27784 assemble_string (node->str, strlen (node->str) + 1);
27785 }
27786
27787 return 1;
27788 }
27789
27790 /* Output the indexed string table. */
27791
27792 static void
27793 output_indirect_strings (void)
27794 {
27795 switch_to_section (debug_str_section);
27796 if (!dwarf_split_debug_info)
27797 debug_str_hash->traverse<enum dwarf_form,
27798 output_indirect_string> (DW_FORM_strp);
27799 else
27800 {
27801 unsigned int offset = 0;
27802 unsigned int cur_idx = 0;
27803
27804 if (skeleton_debug_str_hash)
27805 skeleton_debug_str_hash->traverse<enum dwarf_form,
27806 output_indirect_string> (DW_FORM_strp);
27807
27808 switch_to_section (debug_str_offsets_section);
27809 debug_str_hash->traverse_noresize
27810 <unsigned int *, output_index_string_offset> (&offset);
27811 switch_to_section (debug_str_dwo_section);
27812 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
27813 (&cur_idx);
27814 }
27815 }
27816
27817 /* Callback for htab_traverse to assign an index to an entry in the
27818 table, and to write that entry to the .debug_addr section. */
27819
27820 int
27821 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
27822 {
27823 addr_table_entry *entry = *slot;
27824
27825 if (entry->refcount == 0)
27826 {
27827 gcc_assert (entry->index == NO_INDEX_ASSIGNED
27828 || entry->index == NOT_INDEXED);
27829 return 1;
27830 }
27831
27832 gcc_assert (entry->index == *cur_index);
27833 (*cur_index)++;
27834
27835 switch (entry->kind)
27836 {
27837 case ate_kind_rtx:
27838 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
27839 "0x%x", entry->index);
27840 break;
27841 case ate_kind_rtx_dtprel:
27842 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
27843 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
27844 DWARF2_ADDR_SIZE,
27845 entry->addr.rtl);
27846 fputc ('\n', asm_out_file);
27847 break;
27848 case ate_kind_label:
27849 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
27850 "0x%x", entry->index);
27851 break;
27852 default:
27853 gcc_unreachable ();
27854 }
27855 return 1;
27856 }
27857
27858 /* Produce the .debug_addr section. */
27859
27860 static void
27861 output_addr_table (void)
27862 {
27863 unsigned int index = 0;
27864 if (addr_index_table == NULL || addr_index_table->size () == 0)
27865 return;
27866
27867 switch_to_section (debug_addr_section);
27868 addr_index_table
27869 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
27870 }
27871
27872 #if ENABLE_ASSERT_CHECKING
27873 /* Verify that all marks are clear. */
27874
27875 static void
27876 verify_marks_clear (dw_die_ref die)
27877 {
27878 dw_die_ref c;
27879
27880 gcc_assert (! die->die_mark);
27881 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
27882 }
27883 #endif /* ENABLE_ASSERT_CHECKING */
27884
27885 /* Clear the marks for a die and its children.
27886 Be cool if the mark isn't set. */
27887
27888 static void
27889 prune_unmark_dies (dw_die_ref die)
27890 {
27891 dw_die_ref c;
27892
27893 if (die->die_mark)
27894 die->die_mark = 0;
27895 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
27896 }
27897
27898 /* Given LOC that is referenced by a DIE we're marking as used, find all
27899 referenced DWARF procedures it references and mark them as used. */
27900
27901 static void
27902 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
27903 {
27904 for (; loc != NULL; loc = loc->dw_loc_next)
27905 switch (loc->dw_loc_opc)
27906 {
27907 case DW_OP_implicit_pointer:
27908 case DW_OP_convert:
27909 case DW_OP_reinterpret:
27910 case DW_OP_GNU_implicit_pointer:
27911 case DW_OP_GNU_convert:
27912 case DW_OP_GNU_reinterpret:
27913 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
27914 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27915 break;
27916 case DW_OP_GNU_variable_value:
27917 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
27918 {
27919 dw_die_ref ref
27920 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
27921 if (ref == NULL)
27922 break;
27923 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
27924 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
27925 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
27926 }
27927 /* FALLTHRU */
27928 case DW_OP_call2:
27929 case DW_OP_call4:
27930 case DW_OP_call_ref:
27931 case DW_OP_const_type:
27932 case DW_OP_GNU_const_type:
27933 case DW_OP_GNU_parameter_ref:
27934 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
27935 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27936 break;
27937 case DW_OP_regval_type:
27938 case DW_OP_deref_type:
27939 case DW_OP_GNU_regval_type:
27940 case DW_OP_GNU_deref_type:
27941 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
27942 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
27943 break;
27944 case DW_OP_entry_value:
27945 case DW_OP_GNU_entry_value:
27946 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
27947 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
27948 break;
27949 default:
27950 break;
27951 }
27952 }
27953
27954 /* Given DIE that we're marking as used, find any other dies
27955 it references as attributes and mark them as used. */
27956
27957 static void
27958 prune_unused_types_walk_attribs (dw_die_ref die)
27959 {
27960 dw_attr_node *a;
27961 unsigned ix;
27962
27963 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27964 {
27965 switch (AT_class (a))
27966 {
27967 /* Make sure DWARF procedures referenced by location descriptions will
27968 get emitted. */
27969 case dw_val_class_loc:
27970 prune_unused_types_walk_loc_descr (AT_loc (a));
27971 break;
27972 case dw_val_class_loc_list:
27973 for (dw_loc_list_ref list = AT_loc_list (a);
27974 list != NULL;
27975 list = list->dw_loc_next)
27976 prune_unused_types_walk_loc_descr (list->expr);
27977 break;
27978
27979 case dw_val_class_die_ref:
27980 /* A reference to another DIE.
27981 Make sure that it will get emitted.
27982 If it was broken out into a comdat group, don't follow it. */
27983 if (! AT_ref (a)->comdat_type_p
27984 || a->dw_attr == DW_AT_specification)
27985 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
27986 break;
27987
27988 case dw_val_class_str:
27989 /* Set the string's refcount to 0 so that prune_unused_types_mark
27990 accounts properly for it. */
27991 a->dw_attr_val.v.val_str->refcount = 0;
27992 break;
27993
27994 default:
27995 break;
27996 }
27997 }
27998 }
27999
28000 /* Mark the generic parameters and arguments children DIEs of DIE. */
28001
28002 static void
28003 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
28004 {
28005 dw_die_ref c;
28006
28007 if (die == NULL || die->die_child == NULL)
28008 return;
28009 c = die->die_child;
28010 do
28011 {
28012 if (is_template_parameter (c))
28013 prune_unused_types_mark (c, 1);
28014 c = c->die_sib;
28015 } while (c && c != die->die_child);
28016 }
28017
28018 /* Mark DIE as being used. If DOKIDS is true, then walk down
28019 to DIE's children. */
28020
28021 static void
28022 prune_unused_types_mark (dw_die_ref die, int dokids)
28023 {
28024 dw_die_ref c;
28025
28026 if (die->die_mark == 0)
28027 {
28028 /* We haven't done this node yet. Mark it as used. */
28029 die->die_mark = 1;
28030 /* If this is the DIE of a generic type instantiation,
28031 mark the children DIEs that describe its generic parms and
28032 args. */
28033 prune_unused_types_mark_generic_parms_dies (die);
28034
28035 /* We also have to mark its parents as used.
28036 (But we don't want to mark our parent's kids due to this,
28037 unless it is a class.) */
28038 if (die->die_parent)
28039 prune_unused_types_mark (die->die_parent,
28040 class_scope_p (die->die_parent));
28041
28042 /* Mark any referenced nodes. */
28043 prune_unused_types_walk_attribs (die);
28044
28045 /* If this node is a specification,
28046 also mark the definition, if it exists. */
28047 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
28048 prune_unused_types_mark (die->die_definition, 1);
28049 }
28050
28051 if (dokids && die->die_mark != 2)
28052 {
28053 /* We need to walk the children, but haven't done so yet.
28054 Remember that we've walked the kids. */
28055 die->die_mark = 2;
28056
28057 /* If this is an array type, we need to make sure our
28058 kids get marked, even if they're types. If we're
28059 breaking out types into comdat sections, do this
28060 for all type definitions. */
28061 if (die->die_tag == DW_TAG_array_type
28062 || (use_debug_types
28063 && is_type_die (die) && ! is_declaration_die (die)))
28064 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
28065 else
28066 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28067 }
28068 }
28069
28070 /* For local classes, look if any static member functions were emitted
28071 and if so, mark them. */
28072
28073 static void
28074 prune_unused_types_walk_local_classes (dw_die_ref die)
28075 {
28076 dw_die_ref c;
28077
28078 if (die->die_mark == 2)
28079 return;
28080
28081 switch (die->die_tag)
28082 {
28083 case DW_TAG_structure_type:
28084 case DW_TAG_union_type:
28085 case DW_TAG_class_type:
28086 break;
28087
28088 case DW_TAG_subprogram:
28089 if (!get_AT_flag (die, DW_AT_declaration)
28090 || die->die_definition != NULL)
28091 prune_unused_types_mark (die, 1);
28092 return;
28093
28094 default:
28095 return;
28096 }
28097
28098 /* Mark children. */
28099 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
28100 }
28101
28102 /* Walk the tree DIE and mark types that we actually use. */
28103
28104 static void
28105 prune_unused_types_walk (dw_die_ref die)
28106 {
28107 dw_die_ref c;
28108
28109 /* Don't do anything if this node is already marked and
28110 children have been marked as well. */
28111 if (die->die_mark == 2)
28112 return;
28113
28114 switch (die->die_tag)
28115 {
28116 case DW_TAG_structure_type:
28117 case DW_TAG_union_type:
28118 case DW_TAG_class_type:
28119 if (die->die_perennial_p)
28120 break;
28121
28122 for (c = die->die_parent; c; c = c->die_parent)
28123 if (c->die_tag == DW_TAG_subprogram)
28124 break;
28125
28126 /* Finding used static member functions inside of classes
28127 is needed just for local classes, because for other classes
28128 static member function DIEs with DW_AT_specification
28129 are emitted outside of the DW_TAG_*_type. If we ever change
28130 it, we'd need to call this even for non-local classes. */
28131 if (c)
28132 prune_unused_types_walk_local_classes (die);
28133
28134 /* It's a type node --- don't mark it. */
28135 return;
28136
28137 case DW_TAG_const_type:
28138 case DW_TAG_packed_type:
28139 case DW_TAG_pointer_type:
28140 case DW_TAG_reference_type:
28141 case DW_TAG_rvalue_reference_type:
28142 case DW_TAG_volatile_type:
28143 case DW_TAG_typedef:
28144 case DW_TAG_array_type:
28145 case DW_TAG_interface_type:
28146 case DW_TAG_friend:
28147 case DW_TAG_enumeration_type:
28148 case DW_TAG_subroutine_type:
28149 case DW_TAG_string_type:
28150 case DW_TAG_set_type:
28151 case DW_TAG_subrange_type:
28152 case DW_TAG_ptr_to_member_type:
28153 case DW_TAG_file_type:
28154 /* Type nodes are useful only when other DIEs reference them --- don't
28155 mark them. */
28156 /* FALLTHROUGH */
28157
28158 case DW_TAG_dwarf_procedure:
28159 /* Likewise for DWARF procedures. */
28160
28161 if (die->die_perennial_p)
28162 break;
28163
28164 return;
28165
28166 default:
28167 /* Mark everything else. */
28168 break;
28169 }
28170
28171 if (die->die_mark == 0)
28172 {
28173 die->die_mark = 1;
28174
28175 /* Now, mark any dies referenced from here. */
28176 prune_unused_types_walk_attribs (die);
28177 }
28178
28179 die->die_mark = 2;
28180
28181 /* Mark children. */
28182 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28183 }
28184
28185 /* Increment the string counts on strings referred to from DIE's
28186 attributes. */
28187
28188 static void
28189 prune_unused_types_update_strings (dw_die_ref die)
28190 {
28191 dw_attr_node *a;
28192 unsigned ix;
28193
28194 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28195 if (AT_class (a) == dw_val_class_str)
28196 {
28197 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
28198 s->refcount++;
28199 /* Avoid unnecessarily putting strings that are used less than
28200 twice in the hash table. */
28201 if (s->refcount
28202 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
28203 {
28204 indirect_string_node **slot
28205 = debug_str_hash->find_slot_with_hash (s->str,
28206 htab_hash_string (s->str),
28207 INSERT);
28208 gcc_assert (*slot == NULL);
28209 *slot = s;
28210 }
28211 }
28212 }
28213
28214 /* Mark DIE and its children as removed. */
28215
28216 static void
28217 mark_removed (dw_die_ref die)
28218 {
28219 dw_die_ref c;
28220 die->removed = true;
28221 FOR_EACH_CHILD (die, c, mark_removed (c));
28222 }
28223
28224 /* Remove from the tree DIE any dies that aren't marked. */
28225
28226 static void
28227 prune_unused_types_prune (dw_die_ref die)
28228 {
28229 dw_die_ref c;
28230
28231 gcc_assert (die->die_mark);
28232 prune_unused_types_update_strings (die);
28233
28234 if (! die->die_child)
28235 return;
28236
28237 c = die->die_child;
28238 do {
28239 dw_die_ref prev = c, next;
28240 for (c = c->die_sib; ! c->die_mark; c = next)
28241 if (c == die->die_child)
28242 {
28243 /* No marked children between 'prev' and the end of the list. */
28244 if (prev == c)
28245 /* No marked children at all. */
28246 die->die_child = NULL;
28247 else
28248 {
28249 prev->die_sib = c->die_sib;
28250 die->die_child = prev;
28251 }
28252 c->die_sib = NULL;
28253 mark_removed (c);
28254 return;
28255 }
28256 else
28257 {
28258 next = c->die_sib;
28259 c->die_sib = NULL;
28260 mark_removed (c);
28261 }
28262
28263 if (c != prev->die_sib)
28264 prev->die_sib = c;
28265 prune_unused_types_prune (c);
28266 } while (c != die->die_child);
28267 }
28268
28269 /* Remove dies representing declarations that we never use. */
28270
28271 static void
28272 prune_unused_types (void)
28273 {
28274 unsigned int i;
28275 limbo_die_node *node;
28276 comdat_type_node *ctnode;
28277 pubname_entry *pub;
28278 dw_die_ref base_type;
28279
28280 #if ENABLE_ASSERT_CHECKING
28281 /* All the marks should already be clear. */
28282 verify_marks_clear (comp_unit_die ());
28283 for (node = limbo_die_list; node; node = node->next)
28284 verify_marks_clear (node->die);
28285 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28286 verify_marks_clear (ctnode->root_die);
28287 #endif /* ENABLE_ASSERT_CHECKING */
28288
28289 /* Mark types that are used in global variables. */
28290 premark_types_used_by_global_vars ();
28291
28292 /* Set the mark on nodes that are actually used. */
28293 prune_unused_types_walk (comp_unit_die ());
28294 for (node = limbo_die_list; node; node = node->next)
28295 prune_unused_types_walk (node->die);
28296 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28297 {
28298 prune_unused_types_walk (ctnode->root_die);
28299 prune_unused_types_mark (ctnode->type_die, 1);
28300 }
28301
28302 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
28303 are unusual in that they are pubnames that are the children of pubtypes.
28304 They should only be marked via their parent DW_TAG_enumeration_type die,
28305 not as roots in themselves. */
28306 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
28307 if (pub->die->die_tag != DW_TAG_enumerator)
28308 prune_unused_types_mark (pub->die, 1);
28309 for (i = 0; base_types.iterate (i, &base_type); i++)
28310 prune_unused_types_mark (base_type, 1);
28311
28312 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
28313 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
28314 callees). */
28315 cgraph_node *cnode;
28316 FOR_EACH_FUNCTION (cnode)
28317 if (cnode->referred_to_p (false))
28318 {
28319 dw_die_ref die = lookup_decl_die (cnode->decl);
28320 if (die == NULL || die->die_mark)
28321 continue;
28322 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
28323 if (e->caller != cnode
28324 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
28325 {
28326 prune_unused_types_mark (die, 1);
28327 break;
28328 }
28329 }
28330
28331 if (debug_str_hash)
28332 debug_str_hash->empty ();
28333 if (skeleton_debug_str_hash)
28334 skeleton_debug_str_hash->empty ();
28335 prune_unused_types_prune (comp_unit_die ());
28336 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
28337 {
28338 node = *pnode;
28339 if (!node->die->die_mark)
28340 *pnode = node->next;
28341 else
28342 {
28343 prune_unused_types_prune (node->die);
28344 pnode = &node->next;
28345 }
28346 }
28347 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28348 prune_unused_types_prune (ctnode->root_die);
28349
28350 /* Leave the marks clear. */
28351 prune_unmark_dies (comp_unit_die ());
28352 for (node = limbo_die_list; node; node = node->next)
28353 prune_unmark_dies (node->die);
28354 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28355 prune_unmark_dies (ctnode->root_die);
28356 }
28357
28358 /* Helpers to manipulate hash table of comdat type units. */
28359
28360 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
28361 {
28362 static inline hashval_t hash (const comdat_type_node *);
28363 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
28364 };
28365
28366 inline hashval_t
28367 comdat_type_hasher::hash (const comdat_type_node *type_node)
28368 {
28369 hashval_t h;
28370 memcpy (&h, type_node->signature, sizeof (h));
28371 return h;
28372 }
28373
28374 inline bool
28375 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
28376 const comdat_type_node *type_node_2)
28377 {
28378 return (! memcmp (type_node_1->signature, type_node_2->signature,
28379 DWARF_TYPE_SIGNATURE_SIZE));
28380 }
28381
28382 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
28383 to the location it would have been added, should we know its
28384 DECL_ASSEMBLER_NAME when we added other attributes. This will
28385 probably improve compactness of debug info, removing equivalent
28386 abbrevs, and hide any differences caused by deferring the
28387 computation of the assembler name, triggered by e.g. PCH. */
28388
28389 static inline void
28390 move_linkage_attr (dw_die_ref die)
28391 {
28392 unsigned ix = vec_safe_length (die->die_attr);
28393 dw_attr_node linkage = (*die->die_attr)[ix - 1];
28394
28395 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
28396 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
28397
28398 while (--ix > 0)
28399 {
28400 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
28401
28402 if (prev->dw_attr == DW_AT_decl_line
28403 || prev->dw_attr == DW_AT_decl_column
28404 || prev->dw_attr == DW_AT_name)
28405 break;
28406 }
28407
28408 if (ix != vec_safe_length (die->die_attr) - 1)
28409 {
28410 die->die_attr->pop ();
28411 die->die_attr->quick_insert (ix, linkage);
28412 }
28413 }
28414
28415 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
28416 referenced from typed stack ops and count how often they are used. */
28417
28418 static void
28419 mark_base_types (dw_loc_descr_ref loc)
28420 {
28421 dw_die_ref base_type = NULL;
28422
28423 for (; loc; loc = loc->dw_loc_next)
28424 {
28425 switch (loc->dw_loc_opc)
28426 {
28427 case DW_OP_regval_type:
28428 case DW_OP_deref_type:
28429 case DW_OP_GNU_regval_type:
28430 case DW_OP_GNU_deref_type:
28431 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
28432 break;
28433 case DW_OP_convert:
28434 case DW_OP_reinterpret:
28435 case DW_OP_GNU_convert:
28436 case DW_OP_GNU_reinterpret:
28437 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
28438 continue;
28439 /* FALLTHRU */
28440 case DW_OP_const_type:
28441 case DW_OP_GNU_const_type:
28442 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
28443 break;
28444 case DW_OP_entry_value:
28445 case DW_OP_GNU_entry_value:
28446 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
28447 continue;
28448 default:
28449 continue;
28450 }
28451 gcc_assert (base_type->die_parent == comp_unit_die ());
28452 if (base_type->die_mark)
28453 base_type->die_mark++;
28454 else
28455 {
28456 base_types.safe_push (base_type);
28457 base_type->die_mark = 1;
28458 }
28459 }
28460 }
28461
28462 /* Comparison function for sorting marked base types. */
28463
28464 static int
28465 base_type_cmp (const void *x, const void *y)
28466 {
28467 dw_die_ref dx = *(const dw_die_ref *) x;
28468 dw_die_ref dy = *(const dw_die_ref *) y;
28469 unsigned int byte_size1, byte_size2;
28470 unsigned int encoding1, encoding2;
28471 unsigned int align1, align2;
28472 if (dx->die_mark > dy->die_mark)
28473 return -1;
28474 if (dx->die_mark < dy->die_mark)
28475 return 1;
28476 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
28477 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
28478 if (byte_size1 < byte_size2)
28479 return 1;
28480 if (byte_size1 > byte_size2)
28481 return -1;
28482 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
28483 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
28484 if (encoding1 < encoding2)
28485 return 1;
28486 if (encoding1 > encoding2)
28487 return -1;
28488 align1 = get_AT_unsigned (dx, DW_AT_alignment);
28489 align2 = get_AT_unsigned (dy, DW_AT_alignment);
28490 if (align1 < align2)
28491 return 1;
28492 if (align1 > align2)
28493 return -1;
28494 return 0;
28495 }
28496
28497 /* Move base types marked by mark_base_types as early as possible
28498 in the CU, sorted by decreasing usage count both to make the
28499 uleb128 references as small as possible and to make sure they
28500 will have die_offset already computed by calc_die_sizes when
28501 sizes of typed stack loc ops is computed. */
28502
28503 static void
28504 move_marked_base_types (void)
28505 {
28506 unsigned int i;
28507 dw_die_ref base_type, die, c;
28508
28509 if (base_types.is_empty ())
28510 return;
28511
28512 /* Sort by decreasing usage count, they will be added again in that
28513 order later on. */
28514 base_types.qsort (base_type_cmp);
28515 die = comp_unit_die ();
28516 c = die->die_child;
28517 do
28518 {
28519 dw_die_ref prev = c;
28520 c = c->die_sib;
28521 while (c->die_mark)
28522 {
28523 remove_child_with_prev (c, prev);
28524 /* As base types got marked, there must be at least
28525 one node other than DW_TAG_base_type. */
28526 gcc_assert (die->die_child != NULL);
28527 c = prev->die_sib;
28528 }
28529 }
28530 while (c != die->die_child);
28531 gcc_assert (die->die_child);
28532 c = die->die_child;
28533 for (i = 0; base_types.iterate (i, &base_type); i++)
28534 {
28535 base_type->die_mark = 0;
28536 base_type->die_sib = c->die_sib;
28537 c->die_sib = base_type;
28538 c = base_type;
28539 }
28540 }
28541
28542 /* Helper function for resolve_addr, attempt to resolve
28543 one CONST_STRING, return true if successful. Similarly verify that
28544 SYMBOL_REFs refer to variables emitted in the current CU. */
28545
28546 static bool
28547 resolve_one_addr (rtx *addr)
28548 {
28549 rtx rtl = *addr;
28550
28551 if (GET_CODE (rtl) == CONST_STRING)
28552 {
28553 size_t len = strlen (XSTR (rtl, 0)) + 1;
28554 tree t = build_string (len, XSTR (rtl, 0));
28555 tree tlen = size_int (len - 1);
28556 TREE_TYPE (t)
28557 = build_array_type (char_type_node, build_index_type (tlen));
28558 rtl = lookup_constant_def (t);
28559 if (!rtl || !MEM_P (rtl))
28560 return false;
28561 rtl = XEXP (rtl, 0);
28562 if (GET_CODE (rtl) == SYMBOL_REF
28563 && SYMBOL_REF_DECL (rtl)
28564 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28565 return false;
28566 vec_safe_push (used_rtx_array, rtl);
28567 *addr = rtl;
28568 return true;
28569 }
28570
28571 if (GET_CODE (rtl) == SYMBOL_REF
28572 && SYMBOL_REF_DECL (rtl))
28573 {
28574 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
28575 {
28576 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
28577 return false;
28578 }
28579 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28580 return false;
28581 }
28582
28583 if (GET_CODE (rtl) == CONST)
28584 {
28585 subrtx_ptr_iterator::array_type array;
28586 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
28587 if (!resolve_one_addr (*iter))
28588 return false;
28589 }
28590
28591 return true;
28592 }
28593
28594 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
28595 if possible, and create DW_TAG_dwarf_procedure that can be referenced
28596 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
28597
28598 static rtx
28599 string_cst_pool_decl (tree t)
28600 {
28601 rtx rtl = output_constant_def (t, 1);
28602 unsigned char *array;
28603 dw_loc_descr_ref l;
28604 tree decl;
28605 size_t len;
28606 dw_die_ref ref;
28607
28608 if (!rtl || !MEM_P (rtl))
28609 return NULL_RTX;
28610 rtl = XEXP (rtl, 0);
28611 if (GET_CODE (rtl) != SYMBOL_REF
28612 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
28613 return NULL_RTX;
28614
28615 decl = SYMBOL_REF_DECL (rtl);
28616 if (!lookup_decl_die (decl))
28617 {
28618 len = TREE_STRING_LENGTH (t);
28619 vec_safe_push (used_rtx_array, rtl);
28620 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
28621 array = ggc_vec_alloc<unsigned char> (len);
28622 memcpy (array, TREE_STRING_POINTER (t), len);
28623 l = new_loc_descr (DW_OP_implicit_value, len, 0);
28624 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
28625 l->dw_loc_oprnd2.v.val_vec.length = len;
28626 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
28627 l->dw_loc_oprnd2.v.val_vec.array = array;
28628 add_AT_loc (ref, DW_AT_location, l);
28629 equate_decl_number_to_die (decl, ref);
28630 }
28631 return rtl;
28632 }
28633
28634 /* Helper function of resolve_addr_in_expr. LOC is
28635 a DW_OP_addr followed by DW_OP_stack_value, either at the start
28636 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
28637 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
28638 with DW_OP_implicit_pointer if possible
28639 and return true, if unsuccessful, return false. */
28640
28641 static bool
28642 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
28643 {
28644 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
28645 HOST_WIDE_INT offset = 0;
28646 dw_die_ref ref = NULL;
28647 tree decl;
28648
28649 if (GET_CODE (rtl) == CONST
28650 && GET_CODE (XEXP (rtl, 0)) == PLUS
28651 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
28652 {
28653 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
28654 rtl = XEXP (XEXP (rtl, 0), 0);
28655 }
28656 if (GET_CODE (rtl) == CONST_STRING)
28657 {
28658 size_t len = strlen (XSTR (rtl, 0)) + 1;
28659 tree t = build_string (len, XSTR (rtl, 0));
28660 tree tlen = size_int (len - 1);
28661
28662 TREE_TYPE (t)
28663 = build_array_type (char_type_node, build_index_type (tlen));
28664 rtl = string_cst_pool_decl (t);
28665 if (!rtl)
28666 return false;
28667 }
28668 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
28669 {
28670 decl = SYMBOL_REF_DECL (rtl);
28671 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
28672 {
28673 ref = lookup_decl_die (decl);
28674 if (ref && (get_AT (ref, DW_AT_location)
28675 || get_AT (ref, DW_AT_const_value)))
28676 {
28677 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
28678 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28679 loc->dw_loc_oprnd1.val_entry = NULL;
28680 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28681 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28682 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28683 loc->dw_loc_oprnd2.v.val_int = offset;
28684 return true;
28685 }
28686 }
28687 }
28688 return false;
28689 }
28690
28691 /* Helper function for resolve_addr, handle one location
28692 expression, return false if at least one CONST_STRING or SYMBOL_REF in
28693 the location list couldn't be resolved. */
28694
28695 static bool
28696 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
28697 {
28698 dw_loc_descr_ref keep = NULL;
28699 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
28700 switch (loc->dw_loc_opc)
28701 {
28702 case DW_OP_addr:
28703 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28704 {
28705 if ((prev == NULL
28706 || prev->dw_loc_opc == DW_OP_piece
28707 || prev->dw_loc_opc == DW_OP_bit_piece)
28708 && loc->dw_loc_next
28709 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
28710 && (!dwarf_strict || dwarf_version >= 5)
28711 && optimize_one_addr_into_implicit_ptr (loc))
28712 break;
28713 return false;
28714 }
28715 break;
28716 case DW_OP_GNU_addr_index:
28717 case DW_OP_GNU_const_index:
28718 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
28719 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
28720 {
28721 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
28722 if (!resolve_one_addr (&rtl))
28723 return false;
28724 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
28725 loc->dw_loc_oprnd1.val_entry
28726 = add_addr_table_entry (rtl, ate_kind_rtx);
28727 }
28728 break;
28729 case DW_OP_const4u:
28730 case DW_OP_const8u:
28731 if (loc->dtprel
28732 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28733 return false;
28734 break;
28735 case DW_OP_plus_uconst:
28736 if (size_of_loc_descr (loc)
28737 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
28738 + 1
28739 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
28740 {
28741 dw_loc_descr_ref repl
28742 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
28743 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
28744 add_loc_descr (&repl, loc->dw_loc_next);
28745 *loc = *repl;
28746 }
28747 break;
28748 case DW_OP_implicit_value:
28749 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
28750 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
28751 return false;
28752 break;
28753 case DW_OP_implicit_pointer:
28754 case DW_OP_GNU_implicit_pointer:
28755 case DW_OP_GNU_parameter_ref:
28756 case DW_OP_GNU_variable_value:
28757 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28758 {
28759 dw_die_ref ref
28760 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28761 if (ref == NULL)
28762 return false;
28763 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28764 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28765 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28766 }
28767 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
28768 {
28769 if (prev == NULL
28770 && loc->dw_loc_next == NULL
28771 && AT_class (a) == dw_val_class_loc)
28772 switch (a->dw_attr)
28773 {
28774 /* Following attributes allow both exprloc and reference,
28775 so if the whole expression is DW_OP_GNU_variable_value
28776 alone we could transform it into reference. */
28777 case DW_AT_byte_size:
28778 case DW_AT_bit_size:
28779 case DW_AT_lower_bound:
28780 case DW_AT_upper_bound:
28781 case DW_AT_bit_stride:
28782 case DW_AT_count:
28783 case DW_AT_allocated:
28784 case DW_AT_associated:
28785 case DW_AT_byte_stride:
28786 a->dw_attr_val.val_class = dw_val_class_die_ref;
28787 a->dw_attr_val.val_entry = NULL;
28788 a->dw_attr_val.v.val_die_ref.die
28789 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28790 a->dw_attr_val.v.val_die_ref.external = 0;
28791 return true;
28792 default:
28793 break;
28794 }
28795 if (dwarf_strict)
28796 return false;
28797 }
28798 break;
28799 case DW_OP_const_type:
28800 case DW_OP_regval_type:
28801 case DW_OP_deref_type:
28802 case DW_OP_convert:
28803 case DW_OP_reinterpret:
28804 case DW_OP_GNU_const_type:
28805 case DW_OP_GNU_regval_type:
28806 case DW_OP_GNU_deref_type:
28807 case DW_OP_GNU_convert:
28808 case DW_OP_GNU_reinterpret:
28809 while (loc->dw_loc_next
28810 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
28811 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
28812 {
28813 dw_die_ref base1, base2;
28814 unsigned enc1, enc2, size1, size2;
28815 if (loc->dw_loc_opc == DW_OP_regval_type
28816 || loc->dw_loc_opc == DW_OP_deref_type
28817 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28818 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28819 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
28820 else if (loc->dw_loc_oprnd1.val_class
28821 == dw_val_class_unsigned_const)
28822 break;
28823 else
28824 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28825 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
28826 == dw_val_class_unsigned_const)
28827 break;
28828 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
28829 gcc_assert (base1->die_tag == DW_TAG_base_type
28830 && base2->die_tag == DW_TAG_base_type);
28831 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
28832 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
28833 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
28834 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
28835 if (size1 == size2
28836 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
28837 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
28838 && loc != keep)
28839 || enc1 == enc2))
28840 {
28841 /* Optimize away next DW_OP_convert after
28842 adjusting LOC's base type die reference. */
28843 if (loc->dw_loc_opc == DW_OP_regval_type
28844 || loc->dw_loc_opc == DW_OP_deref_type
28845 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28846 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28847 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
28848 else
28849 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
28850 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28851 continue;
28852 }
28853 /* Don't change integer DW_OP_convert after e.g. floating
28854 point typed stack entry. */
28855 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
28856 keep = loc->dw_loc_next;
28857 break;
28858 }
28859 break;
28860 default:
28861 break;
28862 }
28863 return true;
28864 }
28865
28866 /* Helper function of resolve_addr. DIE had DW_AT_location of
28867 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
28868 and DW_OP_addr couldn't be resolved. resolve_addr has already
28869 removed the DW_AT_location attribute. This function attempts to
28870 add a new DW_AT_location attribute with DW_OP_implicit_pointer
28871 to it or DW_AT_const_value attribute, if possible. */
28872
28873 static void
28874 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
28875 {
28876 if (!VAR_P (decl)
28877 || lookup_decl_die (decl) != die
28878 || DECL_EXTERNAL (decl)
28879 || !TREE_STATIC (decl)
28880 || DECL_INITIAL (decl) == NULL_TREE
28881 || DECL_P (DECL_INITIAL (decl))
28882 || get_AT (die, DW_AT_const_value))
28883 return;
28884
28885 tree init = DECL_INITIAL (decl);
28886 HOST_WIDE_INT offset = 0;
28887 /* For variables that have been optimized away and thus
28888 don't have a memory location, see if we can emit
28889 DW_AT_const_value instead. */
28890 if (tree_add_const_value_attribute (die, init))
28891 return;
28892 if (dwarf_strict && dwarf_version < 5)
28893 return;
28894 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
28895 and ADDR_EXPR refers to a decl that has DW_AT_location or
28896 DW_AT_const_value (but isn't addressable, otherwise
28897 resolving the original DW_OP_addr wouldn't fail), see if
28898 we can add DW_OP_implicit_pointer. */
28899 STRIP_NOPS (init);
28900 if (TREE_CODE (init) == POINTER_PLUS_EXPR
28901 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
28902 {
28903 offset = tree_to_shwi (TREE_OPERAND (init, 1));
28904 init = TREE_OPERAND (init, 0);
28905 STRIP_NOPS (init);
28906 }
28907 if (TREE_CODE (init) != ADDR_EXPR)
28908 return;
28909 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
28910 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
28911 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
28912 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
28913 && TREE_OPERAND (init, 0) != decl))
28914 {
28915 dw_die_ref ref;
28916 dw_loc_descr_ref l;
28917
28918 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
28919 {
28920 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
28921 if (!rtl)
28922 return;
28923 decl = SYMBOL_REF_DECL (rtl);
28924 }
28925 else
28926 decl = TREE_OPERAND (init, 0);
28927 ref = lookup_decl_die (decl);
28928 if (ref == NULL
28929 || (!get_AT (ref, DW_AT_location)
28930 && !get_AT (ref, DW_AT_const_value)))
28931 return;
28932 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
28933 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28934 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
28935 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28936 add_AT_loc (die, DW_AT_location, l);
28937 }
28938 }
28939
28940 /* Return NULL if l is a DWARF expression, or first op that is not
28941 valid DWARF expression. */
28942
28943 static dw_loc_descr_ref
28944 non_dwarf_expression (dw_loc_descr_ref l)
28945 {
28946 while (l)
28947 {
28948 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28949 return l;
28950 switch (l->dw_loc_opc)
28951 {
28952 case DW_OP_regx:
28953 case DW_OP_implicit_value:
28954 case DW_OP_stack_value:
28955 case DW_OP_implicit_pointer:
28956 case DW_OP_GNU_implicit_pointer:
28957 case DW_OP_GNU_parameter_ref:
28958 case DW_OP_piece:
28959 case DW_OP_bit_piece:
28960 return l;
28961 default:
28962 break;
28963 }
28964 l = l->dw_loc_next;
28965 }
28966 return NULL;
28967 }
28968
28969 /* Return adjusted copy of EXPR:
28970 If it is empty DWARF expression, return it.
28971 If it is valid non-empty DWARF expression,
28972 return copy of EXPR with DW_OP_deref appended to it.
28973 If it is DWARF expression followed by DW_OP_reg{N,x}, return
28974 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
28975 If it is DWARF expression followed by DW_OP_stack_value, return
28976 copy of the DWARF expression without anything appended.
28977 Otherwise, return NULL. */
28978
28979 static dw_loc_descr_ref
28980 copy_deref_exprloc (dw_loc_descr_ref expr)
28981 {
28982 dw_loc_descr_ref tail = NULL;
28983
28984 if (expr == NULL)
28985 return NULL;
28986
28987 dw_loc_descr_ref l = non_dwarf_expression (expr);
28988 if (l && l->dw_loc_next)
28989 return NULL;
28990
28991 if (l)
28992 {
28993 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28994 tail = new_loc_descr ((enum dwarf_location_atom)
28995 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
28996 0, 0);
28997 else
28998 switch (l->dw_loc_opc)
28999 {
29000 case DW_OP_regx:
29001 tail = new_loc_descr (DW_OP_bregx,
29002 l->dw_loc_oprnd1.v.val_unsigned, 0);
29003 break;
29004 case DW_OP_stack_value:
29005 break;
29006 default:
29007 return NULL;
29008 }
29009 }
29010 else
29011 tail = new_loc_descr (DW_OP_deref, 0, 0);
29012
29013 dw_loc_descr_ref ret = NULL, *p = &ret;
29014 while (expr != l)
29015 {
29016 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
29017 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
29018 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
29019 p = &(*p)->dw_loc_next;
29020 expr = expr->dw_loc_next;
29021 }
29022 *p = tail;
29023 return ret;
29024 }
29025
29026 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
29027 reference to a variable or argument, adjust it if needed and return:
29028 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
29029 attribute if present should be removed
29030 0 keep the attribute perhaps with minor modifications, no need to rescan
29031 1 if the attribute has been successfully adjusted. */
29032
29033 static int
29034 optimize_string_length (dw_attr_node *a)
29035 {
29036 dw_loc_descr_ref l = AT_loc (a), lv;
29037 dw_die_ref die;
29038 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29039 {
29040 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
29041 die = lookup_decl_die (decl);
29042 if (die)
29043 {
29044 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29045 l->dw_loc_oprnd1.v.val_die_ref.die = die;
29046 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29047 }
29048 else
29049 return -1;
29050 }
29051 else
29052 die = l->dw_loc_oprnd1.v.val_die_ref.die;
29053
29054 /* DWARF5 allows reference class, so we can then reference the DIE.
29055 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
29056 if (l->dw_loc_next != NULL && dwarf_version >= 5)
29057 {
29058 a->dw_attr_val.val_class = dw_val_class_die_ref;
29059 a->dw_attr_val.val_entry = NULL;
29060 a->dw_attr_val.v.val_die_ref.die = die;
29061 a->dw_attr_val.v.val_die_ref.external = 0;
29062 return 0;
29063 }
29064
29065 dw_attr_node *av = get_AT (die, DW_AT_location);
29066 dw_loc_list_ref d;
29067 bool non_dwarf_expr = false;
29068
29069 if (av == NULL)
29070 return dwarf_strict ? -1 : 0;
29071 switch (AT_class (av))
29072 {
29073 case dw_val_class_loc_list:
29074 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29075 if (d->expr && non_dwarf_expression (d->expr))
29076 non_dwarf_expr = true;
29077 break;
29078 case dw_val_class_loc:
29079 lv = AT_loc (av);
29080 if (lv == NULL)
29081 return dwarf_strict ? -1 : 0;
29082 if (non_dwarf_expression (lv))
29083 non_dwarf_expr = true;
29084 break;
29085 default:
29086 return dwarf_strict ? -1 : 0;
29087 }
29088
29089 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
29090 into DW_OP_call4 or DW_OP_GNU_variable_value into
29091 DW_OP_call4 DW_OP_deref, do so. */
29092 if (!non_dwarf_expr
29093 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
29094 {
29095 l->dw_loc_opc = DW_OP_call4;
29096 if (l->dw_loc_next)
29097 l->dw_loc_next = NULL;
29098 else
29099 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
29100 return 0;
29101 }
29102
29103 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
29104 copy over the DW_AT_location attribute from die to a. */
29105 if (l->dw_loc_next != NULL)
29106 {
29107 a->dw_attr_val = av->dw_attr_val;
29108 return 1;
29109 }
29110
29111 dw_loc_list_ref list, *p;
29112 switch (AT_class (av))
29113 {
29114 case dw_val_class_loc_list:
29115 p = &list;
29116 list = NULL;
29117 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29118 {
29119 lv = copy_deref_exprloc (d->expr);
29120 if (lv)
29121 {
29122 *p = new_loc_list (lv, d->begin, d->end, d->section);
29123 p = &(*p)->dw_loc_next;
29124 }
29125 else if (!dwarf_strict && d->expr)
29126 return 0;
29127 }
29128 if (list == NULL)
29129 return dwarf_strict ? -1 : 0;
29130 a->dw_attr_val.val_class = dw_val_class_loc_list;
29131 gen_llsym (list);
29132 *AT_loc_list_ptr (a) = list;
29133 return 1;
29134 case dw_val_class_loc:
29135 lv = copy_deref_exprloc (AT_loc (av));
29136 if (lv == NULL)
29137 return dwarf_strict ? -1 : 0;
29138 a->dw_attr_val.v.val_loc = lv;
29139 return 1;
29140 default:
29141 gcc_unreachable ();
29142 }
29143 }
29144
29145 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
29146 an address in .rodata section if the string literal is emitted there,
29147 or remove the containing location list or replace DW_AT_const_value
29148 with DW_AT_location and empty location expression, if it isn't found
29149 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
29150 to something that has been emitted in the current CU. */
29151
29152 static void
29153 resolve_addr (dw_die_ref die)
29154 {
29155 dw_die_ref c;
29156 dw_attr_node *a;
29157 dw_loc_list_ref *curr, *start, loc;
29158 unsigned ix;
29159 bool remove_AT_byte_size = false;
29160
29161 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29162 switch (AT_class (a))
29163 {
29164 case dw_val_class_loc_list:
29165 start = curr = AT_loc_list_ptr (a);
29166 loc = *curr;
29167 gcc_assert (loc);
29168 /* The same list can be referenced more than once. See if we have
29169 already recorded the result from a previous pass. */
29170 if (loc->replaced)
29171 *curr = loc->dw_loc_next;
29172 else if (!loc->resolved_addr)
29173 {
29174 /* As things stand, we do not expect or allow one die to
29175 reference a suffix of another die's location list chain.
29176 References must be identical or completely separate.
29177 There is therefore no need to cache the result of this
29178 pass on any list other than the first; doing so
29179 would lead to unnecessary writes. */
29180 while (*curr)
29181 {
29182 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
29183 if (!resolve_addr_in_expr (a, (*curr)->expr))
29184 {
29185 dw_loc_list_ref next = (*curr)->dw_loc_next;
29186 dw_loc_descr_ref l = (*curr)->expr;
29187
29188 if (next && (*curr)->ll_symbol)
29189 {
29190 gcc_assert (!next->ll_symbol);
29191 next->ll_symbol = (*curr)->ll_symbol;
29192 }
29193 if (dwarf_split_debug_info)
29194 remove_loc_list_addr_table_entries (l);
29195 *curr = next;
29196 }
29197 else
29198 {
29199 mark_base_types ((*curr)->expr);
29200 curr = &(*curr)->dw_loc_next;
29201 }
29202 }
29203 if (loc == *start)
29204 loc->resolved_addr = 1;
29205 else
29206 {
29207 loc->replaced = 1;
29208 loc->dw_loc_next = *start;
29209 }
29210 }
29211 if (!*start)
29212 {
29213 remove_AT (die, a->dw_attr);
29214 ix--;
29215 }
29216 break;
29217 case dw_val_class_loc:
29218 {
29219 dw_loc_descr_ref l = AT_loc (a);
29220 /* DW_OP_GNU_variable_value DW_OP_stack_value or
29221 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
29222 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
29223 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
29224 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
29225 with DW_FORM_ref referencing the same DIE as
29226 DW_OP_GNU_variable_value used to reference. */
29227 if (a->dw_attr == DW_AT_string_length
29228 && l
29229 && l->dw_loc_opc == DW_OP_GNU_variable_value
29230 && (l->dw_loc_next == NULL
29231 || (l->dw_loc_next->dw_loc_next == NULL
29232 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
29233 {
29234 switch (optimize_string_length (a))
29235 {
29236 case -1:
29237 remove_AT (die, a->dw_attr);
29238 ix--;
29239 /* If we drop DW_AT_string_length, we need to drop also
29240 DW_AT_{string_length_,}byte_size. */
29241 remove_AT_byte_size = true;
29242 continue;
29243 default:
29244 break;
29245 case 1:
29246 /* Even if we keep the optimized DW_AT_string_length,
29247 it might have changed AT_class, so process it again. */
29248 ix--;
29249 continue;
29250 }
29251 }
29252 /* For -gdwarf-2 don't attempt to optimize
29253 DW_AT_data_member_location containing
29254 DW_OP_plus_uconst - older consumers might
29255 rely on it being that op instead of a more complex,
29256 but shorter, location description. */
29257 if ((dwarf_version > 2
29258 || a->dw_attr != DW_AT_data_member_location
29259 || l == NULL
29260 || l->dw_loc_opc != DW_OP_plus_uconst
29261 || l->dw_loc_next != NULL)
29262 && !resolve_addr_in_expr (a, l))
29263 {
29264 if (dwarf_split_debug_info)
29265 remove_loc_list_addr_table_entries (l);
29266 if (l != NULL
29267 && l->dw_loc_next == NULL
29268 && l->dw_loc_opc == DW_OP_addr
29269 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
29270 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
29271 && a->dw_attr == DW_AT_location)
29272 {
29273 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
29274 remove_AT (die, a->dw_attr);
29275 ix--;
29276 optimize_location_into_implicit_ptr (die, decl);
29277 break;
29278 }
29279 if (a->dw_attr == DW_AT_string_length)
29280 /* If we drop DW_AT_string_length, we need to drop also
29281 DW_AT_{string_length_,}byte_size. */
29282 remove_AT_byte_size = true;
29283 remove_AT (die, a->dw_attr);
29284 ix--;
29285 }
29286 else
29287 mark_base_types (l);
29288 }
29289 break;
29290 case dw_val_class_addr:
29291 if (a->dw_attr == DW_AT_const_value
29292 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
29293 {
29294 if (AT_index (a) != NOT_INDEXED)
29295 remove_addr_table_entry (a->dw_attr_val.val_entry);
29296 remove_AT (die, a->dw_attr);
29297 ix--;
29298 }
29299 if ((die->die_tag == DW_TAG_call_site
29300 && a->dw_attr == DW_AT_call_origin)
29301 || (die->die_tag == DW_TAG_GNU_call_site
29302 && a->dw_attr == DW_AT_abstract_origin))
29303 {
29304 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
29305 dw_die_ref tdie = lookup_decl_die (tdecl);
29306 dw_die_ref cdie;
29307 if (tdie == NULL
29308 && DECL_EXTERNAL (tdecl)
29309 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
29310 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
29311 {
29312 dw_die_ref pdie = cdie;
29313 /* Make sure we don't add these DIEs into type units.
29314 We could emit skeleton DIEs for context (namespaces,
29315 outer structs/classes) and a skeleton DIE for the
29316 innermost context with DW_AT_signature pointing to the
29317 type unit. See PR78835. */
29318 while (pdie && pdie->die_tag != DW_TAG_type_unit)
29319 pdie = pdie->die_parent;
29320 if (pdie == NULL)
29321 {
29322 /* Creating a full DIE for tdecl is overly expensive and
29323 at this point even wrong when in the LTO phase
29324 as it can end up generating new type DIEs we didn't
29325 output and thus optimize_external_refs will crash. */
29326 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
29327 add_AT_flag (tdie, DW_AT_external, 1);
29328 add_AT_flag (tdie, DW_AT_declaration, 1);
29329 add_linkage_attr (tdie, tdecl);
29330 add_name_and_src_coords_attributes (tdie, tdecl, true);
29331 equate_decl_number_to_die (tdecl, tdie);
29332 }
29333 }
29334 if (tdie)
29335 {
29336 a->dw_attr_val.val_class = dw_val_class_die_ref;
29337 a->dw_attr_val.v.val_die_ref.die = tdie;
29338 a->dw_attr_val.v.val_die_ref.external = 0;
29339 }
29340 else
29341 {
29342 if (AT_index (a) != NOT_INDEXED)
29343 remove_addr_table_entry (a->dw_attr_val.val_entry);
29344 remove_AT (die, a->dw_attr);
29345 ix--;
29346 }
29347 }
29348 break;
29349 default:
29350 break;
29351 }
29352
29353 if (remove_AT_byte_size)
29354 remove_AT (die, dwarf_version >= 5
29355 ? DW_AT_string_length_byte_size
29356 : DW_AT_byte_size);
29357
29358 FOR_EACH_CHILD (die, c, resolve_addr (c));
29359 }
29360 \f
29361 /* Helper routines for optimize_location_lists.
29362 This pass tries to share identical local lists in .debug_loc
29363 section. */
29364
29365 /* Iteratively hash operands of LOC opcode into HSTATE. */
29366
29367 static void
29368 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
29369 {
29370 dw_val_ref val1 = &loc->dw_loc_oprnd1;
29371 dw_val_ref val2 = &loc->dw_loc_oprnd2;
29372
29373 switch (loc->dw_loc_opc)
29374 {
29375 case DW_OP_const4u:
29376 case DW_OP_const8u:
29377 if (loc->dtprel)
29378 goto hash_addr;
29379 /* FALLTHRU */
29380 case DW_OP_const1u:
29381 case DW_OP_const1s:
29382 case DW_OP_const2u:
29383 case DW_OP_const2s:
29384 case DW_OP_const4s:
29385 case DW_OP_const8s:
29386 case DW_OP_constu:
29387 case DW_OP_consts:
29388 case DW_OP_pick:
29389 case DW_OP_plus_uconst:
29390 case DW_OP_breg0:
29391 case DW_OP_breg1:
29392 case DW_OP_breg2:
29393 case DW_OP_breg3:
29394 case DW_OP_breg4:
29395 case DW_OP_breg5:
29396 case DW_OP_breg6:
29397 case DW_OP_breg7:
29398 case DW_OP_breg8:
29399 case DW_OP_breg9:
29400 case DW_OP_breg10:
29401 case DW_OP_breg11:
29402 case DW_OP_breg12:
29403 case DW_OP_breg13:
29404 case DW_OP_breg14:
29405 case DW_OP_breg15:
29406 case DW_OP_breg16:
29407 case DW_OP_breg17:
29408 case DW_OP_breg18:
29409 case DW_OP_breg19:
29410 case DW_OP_breg20:
29411 case DW_OP_breg21:
29412 case DW_OP_breg22:
29413 case DW_OP_breg23:
29414 case DW_OP_breg24:
29415 case DW_OP_breg25:
29416 case DW_OP_breg26:
29417 case DW_OP_breg27:
29418 case DW_OP_breg28:
29419 case DW_OP_breg29:
29420 case DW_OP_breg30:
29421 case DW_OP_breg31:
29422 case DW_OP_regx:
29423 case DW_OP_fbreg:
29424 case DW_OP_piece:
29425 case DW_OP_deref_size:
29426 case DW_OP_xderef_size:
29427 hstate.add_object (val1->v.val_int);
29428 break;
29429 case DW_OP_skip:
29430 case DW_OP_bra:
29431 {
29432 int offset;
29433
29434 gcc_assert (val1->val_class == dw_val_class_loc);
29435 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
29436 hstate.add_object (offset);
29437 }
29438 break;
29439 case DW_OP_implicit_value:
29440 hstate.add_object (val1->v.val_unsigned);
29441 switch (val2->val_class)
29442 {
29443 case dw_val_class_const:
29444 hstate.add_object (val2->v.val_int);
29445 break;
29446 case dw_val_class_vec:
29447 {
29448 unsigned int elt_size = val2->v.val_vec.elt_size;
29449 unsigned int len = val2->v.val_vec.length;
29450
29451 hstate.add_int (elt_size);
29452 hstate.add_int (len);
29453 hstate.add (val2->v.val_vec.array, len * elt_size);
29454 }
29455 break;
29456 case dw_val_class_const_double:
29457 hstate.add_object (val2->v.val_double.low);
29458 hstate.add_object (val2->v.val_double.high);
29459 break;
29460 case dw_val_class_wide_int:
29461 hstate.add (val2->v.val_wide->get_val (),
29462 get_full_len (*val2->v.val_wide)
29463 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29464 break;
29465 case dw_val_class_addr:
29466 inchash::add_rtx (val2->v.val_addr, hstate);
29467 break;
29468 default:
29469 gcc_unreachable ();
29470 }
29471 break;
29472 case DW_OP_bregx:
29473 case DW_OP_bit_piece:
29474 hstate.add_object (val1->v.val_int);
29475 hstate.add_object (val2->v.val_int);
29476 break;
29477 case DW_OP_addr:
29478 hash_addr:
29479 if (loc->dtprel)
29480 {
29481 unsigned char dtprel = 0xd1;
29482 hstate.add_object (dtprel);
29483 }
29484 inchash::add_rtx (val1->v.val_addr, hstate);
29485 break;
29486 case DW_OP_GNU_addr_index:
29487 case DW_OP_GNU_const_index:
29488 {
29489 if (loc->dtprel)
29490 {
29491 unsigned char dtprel = 0xd1;
29492 hstate.add_object (dtprel);
29493 }
29494 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
29495 }
29496 break;
29497 case DW_OP_implicit_pointer:
29498 case DW_OP_GNU_implicit_pointer:
29499 hstate.add_int (val2->v.val_int);
29500 break;
29501 case DW_OP_entry_value:
29502 case DW_OP_GNU_entry_value:
29503 hstate.add_object (val1->v.val_loc);
29504 break;
29505 case DW_OP_regval_type:
29506 case DW_OP_deref_type:
29507 case DW_OP_GNU_regval_type:
29508 case DW_OP_GNU_deref_type:
29509 {
29510 unsigned int byte_size
29511 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
29512 unsigned int encoding
29513 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
29514 hstate.add_object (val1->v.val_int);
29515 hstate.add_object (byte_size);
29516 hstate.add_object (encoding);
29517 }
29518 break;
29519 case DW_OP_convert:
29520 case DW_OP_reinterpret:
29521 case DW_OP_GNU_convert:
29522 case DW_OP_GNU_reinterpret:
29523 if (val1->val_class == dw_val_class_unsigned_const)
29524 {
29525 hstate.add_object (val1->v.val_unsigned);
29526 break;
29527 }
29528 /* FALLTHRU */
29529 case DW_OP_const_type:
29530 case DW_OP_GNU_const_type:
29531 {
29532 unsigned int byte_size
29533 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
29534 unsigned int encoding
29535 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
29536 hstate.add_object (byte_size);
29537 hstate.add_object (encoding);
29538 if (loc->dw_loc_opc != DW_OP_const_type
29539 && loc->dw_loc_opc != DW_OP_GNU_const_type)
29540 break;
29541 hstate.add_object (val2->val_class);
29542 switch (val2->val_class)
29543 {
29544 case dw_val_class_const:
29545 hstate.add_object (val2->v.val_int);
29546 break;
29547 case dw_val_class_vec:
29548 {
29549 unsigned int elt_size = val2->v.val_vec.elt_size;
29550 unsigned int len = val2->v.val_vec.length;
29551
29552 hstate.add_object (elt_size);
29553 hstate.add_object (len);
29554 hstate.add (val2->v.val_vec.array, len * elt_size);
29555 }
29556 break;
29557 case dw_val_class_const_double:
29558 hstate.add_object (val2->v.val_double.low);
29559 hstate.add_object (val2->v.val_double.high);
29560 break;
29561 case dw_val_class_wide_int:
29562 hstate.add (val2->v.val_wide->get_val (),
29563 get_full_len (*val2->v.val_wide)
29564 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29565 break;
29566 default:
29567 gcc_unreachable ();
29568 }
29569 }
29570 break;
29571
29572 default:
29573 /* Other codes have no operands. */
29574 break;
29575 }
29576 }
29577
29578 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
29579
29580 static inline void
29581 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
29582 {
29583 dw_loc_descr_ref l;
29584 bool sizes_computed = false;
29585 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
29586 size_of_locs (loc);
29587
29588 for (l = loc; l != NULL; l = l->dw_loc_next)
29589 {
29590 enum dwarf_location_atom opc = l->dw_loc_opc;
29591 hstate.add_object (opc);
29592 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
29593 {
29594 size_of_locs (loc);
29595 sizes_computed = true;
29596 }
29597 hash_loc_operands (l, hstate);
29598 }
29599 }
29600
29601 /* Compute hash of the whole location list LIST_HEAD. */
29602
29603 static inline void
29604 hash_loc_list (dw_loc_list_ref list_head)
29605 {
29606 dw_loc_list_ref curr = list_head;
29607 inchash::hash hstate;
29608
29609 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
29610 {
29611 hstate.add (curr->begin, strlen (curr->begin) + 1);
29612 hstate.add (curr->end, strlen (curr->end) + 1);
29613 if (curr->section)
29614 hstate.add (curr->section, strlen (curr->section) + 1);
29615 hash_locs (curr->expr, hstate);
29616 }
29617 list_head->hash = hstate.end ();
29618 }
29619
29620 /* Return true if X and Y opcodes have the same operands. */
29621
29622 static inline bool
29623 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
29624 {
29625 dw_val_ref valx1 = &x->dw_loc_oprnd1;
29626 dw_val_ref valx2 = &x->dw_loc_oprnd2;
29627 dw_val_ref valy1 = &y->dw_loc_oprnd1;
29628 dw_val_ref valy2 = &y->dw_loc_oprnd2;
29629
29630 switch (x->dw_loc_opc)
29631 {
29632 case DW_OP_const4u:
29633 case DW_OP_const8u:
29634 if (x->dtprel)
29635 goto hash_addr;
29636 /* FALLTHRU */
29637 case DW_OP_const1u:
29638 case DW_OP_const1s:
29639 case DW_OP_const2u:
29640 case DW_OP_const2s:
29641 case DW_OP_const4s:
29642 case DW_OP_const8s:
29643 case DW_OP_constu:
29644 case DW_OP_consts:
29645 case DW_OP_pick:
29646 case DW_OP_plus_uconst:
29647 case DW_OP_breg0:
29648 case DW_OP_breg1:
29649 case DW_OP_breg2:
29650 case DW_OP_breg3:
29651 case DW_OP_breg4:
29652 case DW_OP_breg5:
29653 case DW_OP_breg6:
29654 case DW_OP_breg7:
29655 case DW_OP_breg8:
29656 case DW_OP_breg9:
29657 case DW_OP_breg10:
29658 case DW_OP_breg11:
29659 case DW_OP_breg12:
29660 case DW_OP_breg13:
29661 case DW_OP_breg14:
29662 case DW_OP_breg15:
29663 case DW_OP_breg16:
29664 case DW_OP_breg17:
29665 case DW_OP_breg18:
29666 case DW_OP_breg19:
29667 case DW_OP_breg20:
29668 case DW_OP_breg21:
29669 case DW_OP_breg22:
29670 case DW_OP_breg23:
29671 case DW_OP_breg24:
29672 case DW_OP_breg25:
29673 case DW_OP_breg26:
29674 case DW_OP_breg27:
29675 case DW_OP_breg28:
29676 case DW_OP_breg29:
29677 case DW_OP_breg30:
29678 case DW_OP_breg31:
29679 case DW_OP_regx:
29680 case DW_OP_fbreg:
29681 case DW_OP_piece:
29682 case DW_OP_deref_size:
29683 case DW_OP_xderef_size:
29684 return valx1->v.val_int == valy1->v.val_int;
29685 case DW_OP_skip:
29686 case DW_OP_bra:
29687 /* If splitting debug info, the use of DW_OP_GNU_addr_index
29688 can cause irrelevant differences in dw_loc_addr. */
29689 gcc_assert (valx1->val_class == dw_val_class_loc
29690 && valy1->val_class == dw_val_class_loc
29691 && (dwarf_split_debug_info
29692 || x->dw_loc_addr == y->dw_loc_addr));
29693 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
29694 case DW_OP_implicit_value:
29695 if (valx1->v.val_unsigned != valy1->v.val_unsigned
29696 || valx2->val_class != valy2->val_class)
29697 return false;
29698 switch (valx2->val_class)
29699 {
29700 case dw_val_class_const:
29701 return valx2->v.val_int == valy2->v.val_int;
29702 case dw_val_class_vec:
29703 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29704 && valx2->v.val_vec.length == valy2->v.val_vec.length
29705 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29706 valx2->v.val_vec.elt_size
29707 * valx2->v.val_vec.length) == 0;
29708 case dw_val_class_const_double:
29709 return valx2->v.val_double.low == valy2->v.val_double.low
29710 && valx2->v.val_double.high == valy2->v.val_double.high;
29711 case dw_val_class_wide_int:
29712 return *valx2->v.val_wide == *valy2->v.val_wide;
29713 case dw_val_class_addr:
29714 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
29715 default:
29716 gcc_unreachable ();
29717 }
29718 case DW_OP_bregx:
29719 case DW_OP_bit_piece:
29720 return valx1->v.val_int == valy1->v.val_int
29721 && valx2->v.val_int == valy2->v.val_int;
29722 case DW_OP_addr:
29723 hash_addr:
29724 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
29725 case DW_OP_GNU_addr_index:
29726 case DW_OP_GNU_const_index:
29727 {
29728 rtx ax1 = valx1->val_entry->addr.rtl;
29729 rtx ay1 = valy1->val_entry->addr.rtl;
29730 return rtx_equal_p (ax1, ay1);
29731 }
29732 case DW_OP_implicit_pointer:
29733 case DW_OP_GNU_implicit_pointer:
29734 return valx1->val_class == dw_val_class_die_ref
29735 && valx1->val_class == valy1->val_class
29736 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
29737 && valx2->v.val_int == valy2->v.val_int;
29738 case DW_OP_entry_value:
29739 case DW_OP_GNU_entry_value:
29740 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
29741 case DW_OP_const_type:
29742 case DW_OP_GNU_const_type:
29743 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
29744 || valx2->val_class != valy2->val_class)
29745 return false;
29746 switch (valx2->val_class)
29747 {
29748 case dw_val_class_const:
29749 return valx2->v.val_int == valy2->v.val_int;
29750 case dw_val_class_vec:
29751 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29752 && valx2->v.val_vec.length == valy2->v.val_vec.length
29753 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29754 valx2->v.val_vec.elt_size
29755 * valx2->v.val_vec.length) == 0;
29756 case dw_val_class_const_double:
29757 return valx2->v.val_double.low == valy2->v.val_double.low
29758 && valx2->v.val_double.high == valy2->v.val_double.high;
29759 case dw_val_class_wide_int:
29760 return *valx2->v.val_wide == *valy2->v.val_wide;
29761 default:
29762 gcc_unreachable ();
29763 }
29764 case DW_OP_regval_type:
29765 case DW_OP_deref_type:
29766 case DW_OP_GNU_regval_type:
29767 case DW_OP_GNU_deref_type:
29768 return valx1->v.val_int == valy1->v.val_int
29769 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
29770 case DW_OP_convert:
29771 case DW_OP_reinterpret:
29772 case DW_OP_GNU_convert:
29773 case DW_OP_GNU_reinterpret:
29774 if (valx1->val_class != valy1->val_class)
29775 return false;
29776 if (valx1->val_class == dw_val_class_unsigned_const)
29777 return valx1->v.val_unsigned == valy1->v.val_unsigned;
29778 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29779 case DW_OP_GNU_parameter_ref:
29780 return valx1->val_class == dw_val_class_die_ref
29781 && valx1->val_class == valy1->val_class
29782 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29783 default:
29784 /* Other codes have no operands. */
29785 return true;
29786 }
29787 }
29788
29789 /* Return true if DWARF location expressions X and Y are the same. */
29790
29791 static inline bool
29792 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
29793 {
29794 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
29795 if (x->dw_loc_opc != y->dw_loc_opc
29796 || x->dtprel != y->dtprel
29797 || !compare_loc_operands (x, y))
29798 break;
29799 return x == NULL && y == NULL;
29800 }
29801
29802 /* Hashtable helpers. */
29803
29804 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
29805 {
29806 static inline hashval_t hash (const dw_loc_list_struct *);
29807 static inline bool equal (const dw_loc_list_struct *,
29808 const dw_loc_list_struct *);
29809 };
29810
29811 /* Return precomputed hash of location list X. */
29812
29813 inline hashval_t
29814 loc_list_hasher::hash (const dw_loc_list_struct *x)
29815 {
29816 return x->hash;
29817 }
29818
29819 /* Return true if location lists A and B are the same. */
29820
29821 inline bool
29822 loc_list_hasher::equal (const dw_loc_list_struct *a,
29823 const dw_loc_list_struct *b)
29824 {
29825 if (a == b)
29826 return 1;
29827 if (a->hash != b->hash)
29828 return 0;
29829 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
29830 if (strcmp (a->begin, b->begin) != 0
29831 || strcmp (a->end, b->end) != 0
29832 || (a->section == NULL) != (b->section == NULL)
29833 || (a->section && strcmp (a->section, b->section) != 0)
29834 || !compare_locs (a->expr, b->expr))
29835 break;
29836 return a == NULL && b == NULL;
29837 }
29838
29839 typedef hash_table<loc_list_hasher> loc_list_hash_type;
29840
29841
29842 /* Recursively optimize location lists referenced from DIE
29843 children and share them whenever possible. */
29844
29845 static void
29846 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
29847 {
29848 dw_die_ref c;
29849 dw_attr_node *a;
29850 unsigned ix;
29851 dw_loc_list_struct **slot;
29852
29853 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29854 if (AT_class (a) == dw_val_class_loc_list)
29855 {
29856 dw_loc_list_ref list = AT_loc_list (a);
29857 /* TODO: perform some optimizations here, before hashing
29858 it and storing into the hash table. */
29859 hash_loc_list (list);
29860 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
29861 if (*slot == NULL)
29862 *slot = list;
29863 else
29864 a->dw_attr_val.v.val_loc_list = *slot;
29865 }
29866
29867 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
29868 }
29869
29870
29871 /* Recursively assign each location list a unique index into the debug_addr
29872 section. */
29873
29874 static void
29875 index_location_lists (dw_die_ref die)
29876 {
29877 dw_die_ref c;
29878 dw_attr_node *a;
29879 unsigned ix;
29880
29881 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29882 if (AT_class (a) == dw_val_class_loc_list)
29883 {
29884 dw_loc_list_ref list = AT_loc_list (a);
29885 dw_loc_list_ref curr;
29886 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
29887 {
29888 /* Don't index an entry that has already been indexed
29889 or won't be output. */
29890 if (curr->begin_entry != NULL
29891 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
29892 continue;
29893
29894 curr->begin_entry
29895 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
29896 }
29897 }
29898
29899 FOR_EACH_CHILD (die, c, index_location_lists (c));
29900 }
29901
29902 /* Optimize location lists referenced from DIE
29903 children and share them whenever possible. */
29904
29905 static void
29906 optimize_location_lists (dw_die_ref die)
29907 {
29908 loc_list_hash_type htab (500);
29909 optimize_location_lists_1 (die, &htab);
29910 }
29911 \f
29912 /* Traverse the limbo die list, and add parent/child links. The only
29913 dies without parents that should be here are concrete instances of
29914 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
29915 For concrete instances, we can get the parent die from the abstract
29916 instance. */
29917
29918 static void
29919 flush_limbo_die_list (void)
29920 {
29921 limbo_die_node *node;
29922
29923 /* get_context_die calls force_decl_die, which can put new DIEs on the
29924 limbo list in LTO mode when nested functions are put in a different
29925 partition than that of their parent function. */
29926 while ((node = limbo_die_list))
29927 {
29928 dw_die_ref die = node->die;
29929 limbo_die_list = node->next;
29930
29931 if (die->die_parent == NULL)
29932 {
29933 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
29934
29935 if (origin && origin->die_parent)
29936 add_child_die (origin->die_parent, die);
29937 else if (is_cu_die (die))
29938 ;
29939 else if (seen_error ())
29940 /* It's OK to be confused by errors in the input. */
29941 add_child_die (comp_unit_die (), die);
29942 else
29943 {
29944 /* In certain situations, the lexical block containing a
29945 nested function can be optimized away, which results
29946 in the nested function die being orphaned. Likewise
29947 with the return type of that nested function. Force
29948 this to be a child of the containing function.
29949
29950 It may happen that even the containing function got fully
29951 inlined and optimized out. In that case we are lost and
29952 assign the empty child. This should not be big issue as
29953 the function is likely unreachable too. */
29954 gcc_assert (node->created_for);
29955
29956 if (DECL_P (node->created_for))
29957 origin = get_context_die (DECL_CONTEXT (node->created_for));
29958 else if (TYPE_P (node->created_for))
29959 origin = scope_die_for (node->created_for, comp_unit_die ());
29960 else
29961 origin = comp_unit_die ();
29962
29963 add_child_die (origin, die);
29964 }
29965 }
29966 }
29967 }
29968
29969 /* Reset DIEs so we can output them again. */
29970
29971 static void
29972 reset_dies (dw_die_ref die)
29973 {
29974 dw_die_ref c;
29975
29976 /* Remove stuff we re-generate. */
29977 die->die_mark = 0;
29978 die->die_offset = 0;
29979 die->die_abbrev = 0;
29980 remove_AT (die, DW_AT_sibling);
29981
29982 FOR_EACH_CHILD (die, c, reset_dies (c));
29983 }
29984
29985 /* Output stuff that dwarf requires at the end of every file,
29986 and generate the DWARF-2 debugging info. */
29987
29988 static void
29989 dwarf2out_finish (const char *)
29990 {
29991 comdat_type_node *ctnode;
29992 dw_die_ref main_comp_unit_die;
29993 unsigned char checksum[16];
29994 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
29995
29996 /* Flush out any latecomers to the limbo party. */
29997 flush_limbo_die_list ();
29998
29999 if (flag_checking)
30000 {
30001 verify_die (comp_unit_die ());
30002 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30003 verify_die (node->die);
30004 }
30005
30006 /* We shouldn't have any symbols with delayed asm names for
30007 DIEs generated after early finish. */
30008 gcc_assert (deferred_asm_name == NULL);
30009
30010 gen_remaining_tmpl_value_param_die_attribute ();
30011
30012 if (flag_generate_lto || flag_generate_offload)
30013 {
30014 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
30015
30016 /* Prune stuff so that dwarf2out_finish runs successfully
30017 for the fat part of the object. */
30018 reset_dies (comp_unit_die ());
30019 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30020 reset_dies (node->die);
30021
30022 hash_table<comdat_type_hasher> comdat_type_table (100);
30023 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30024 {
30025 comdat_type_node **slot
30026 = comdat_type_table.find_slot (ctnode, INSERT);
30027
30028 /* Don't reset types twice. */
30029 if (*slot != HTAB_EMPTY_ENTRY)
30030 continue;
30031
30032 /* Add a pointer to the line table for the main compilation unit
30033 so that the debugger can make sense of DW_AT_decl_file
30034 attributes. */
30035 if (debug_info_level >= DINFO_LEVEL_TERSE)
30036 reset_dies (ctnode->root_die);
30037
30038 *slot = ctnode;
30039 }
30040
30041 /* Reset die CU symbol so we don't output it twice. */
30042 comp_unit_die ()->die_id.die_symbol = NULL;
30043
30044 /* Remove DW_AT_macro from the early output. */
30045 if (have_macinfo)
30046 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
30047
30048 /* Remove indirect string decisions. */
30049 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
30050 }
30051
30052 #if ENABLE_ASSERT_CHECKING
30053 {
30054 dw_die_ref die = comp_unit_die (), c;
30055 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
30056 }
30057 #endif
30058 resolve_addr (comp_unit_die ());
30059 move_marked_base_types ();
30060
30061 /* Initialize sections and labels used for actual assembler output. */
30062 unsigned generation = init_sections_and_labels (false);
30063
30064 /* Traverse the DIE's and add sibling attributes to those DIE's that
30065 have children. */
30066 add_sibling_attributes (comp_unit_die ());
30067 limbo_die_node *node;
30068 for (node = cu_die_list; node; node = node->next)
30069 add_sibling_attributes (node->die);
30070 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30071 add_sibling_attributes (ctnode->root_die);
30072
30073 /* When splitting DWARF info, we put some attributes in the
30074 skeleton compile_unit DIE that remains in the .o, while
30075 most attributes go in the DWO compile_unit_die. */
30076 if (dwarf_split_debug_info)
30077 {
30078 limbo_die_node *cu;
30079 main_comp_unit_die = gen_compile_unit_die (NULL);
30080 if (dwarf_version >= 5)
30081 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
30082 cu = limbo_die_list;
30083 gcc_assert (cu->die == main_comp_unit_die);
30084 limbo_die_list = limbo_die_list->next;
30085 cu->next = cu_die_list;
30086 cu_die_list = cu;
30087 }
30088 else
30089 main_comp_unit_die = comp_unit_die ();
30090
30091 /* Output a terminator label for the .text section. */
30092 switch_to_section (text_section);
30093 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
30094 if (cold_text_section)
30095 {
30096 switch_to_section (cold_text_section);
30097 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
30098 }
30099
30100 /* We can only use the low/high_pc attributes if all of the code was
30101 in .text. */
30102 if (!have_multiple_function_sections
30103 || (dwarf_version < 3 && dwarf_strict))
30104 {
30105 /* Don't add if the CU has no associated code. */
30106 if (text_section_used)
30107 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
30108 text_end_label, true);
30109 }
30110 else
30111 {
30112 unsigned fde_idx;
30113 dw_fde_ref fde;
30114 bool range_list_added = false;
30115
30116 if (text_section_used)
30117 add_ranges_by_labels (main_comp_unit_die, text_section_label,
30118 text_end_label, &range_list_added, true);
30119 if (cold_text_section_used)
30120 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
30121 cold_end_label, &range_list_added, true);
30122
30123 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
30124 {
30125 if (DECL_IGNORED_P (fde->decl))
30126 continue;
30127 if (!fde->in_std_section)
30128 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
30129 fde->dw_fde_end, &range_list_added,
30130 true);
30131 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
30132 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
30133 fde->dw_fde_second_end, &range_list_added,
30134 true);
30135 }
30136
30137 if (range_list_added)
30138 {
30139 /* We need to give .debug_loc and .debug_ranges an appropriate
30140 "base address". Use zero so that these addresses become
30141 absolute. Historically, we've emitted the unexpected
30142 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
30143 Emit both to give time for other tools to adapt. */
30144 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
30145 if (! dwarf_strict && dwarf_version < 4)
30146 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
30147
30148 add_ranges (NULL);
30149 }
30150 }
30151
30152 /* AIX Assembler inserts the length, so adjust the reference to match the
30153 offset expected by debuggers. */
30154 strcpy (dl_section_ref, debug_line_section_label);
30155 if (XCOFF_DEBUGGING_INFO)
30156 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
30157
30158 if (debug_info_level >= DINFO_LEVEL_TERSE)
30159 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
30160 dl_section_ref);
30161
30162 if (have_macinfo)
30163 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30164 macinfo_section_label);
30165
30166 if (dwarf_split_debug_info)
30167 {
30168 if (have_location_lists)
30169 {
30170 if (dwarf_version >= 5)
30171 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
30172 loc_section_label);
30173 /* optimize_location_lists calculates the size of the lists,
30174 so index them first, and assign indices to the entries.
30175 Although optimize_location_lists will remove entries from
30176 the table, it only does so for duplicates, and therefore
30177 only reduces ref_counts to 1. */
30178 index_location_lists (comp_unit_die ());
30179 }
30180
30181 if (addr_index_table != NULL)
30182 {
30183 unsigned int index = 0;
30184 addr_index_table
30185 ->traverse_noresize<unsigned int *, index_addr_table_entry>
30186 (&index);
30187 }
30188 }
30189
30190 loc_list_idx = 0;
30191 if (have_location_lists)
30192 {
30193 optimize_location_lists (comp_unit_die ());
30194 /* And finally assign indexes to the entries for -gsplit-dwarf. */
30195 if (dwarf_version >= 5 && dwarf_split_debug_info)
30196 assign_location_list_indexes (comp_unit_die ());
30197 }
30198
30199 save_macinfo_strings ();
30200
30201 if (dwarf_split_debug_info)
30202 {
30203 unsigned int index = 0;
30204
30205 /* Add attributes common to skeleton compile_units and
30206 type_units. Because these attributes include strings, it
30207 must be done before freezing the string table. Top-level
30208 skeleton die attrs are added when the skeleton type unit is
30209 created, so ensure it is created by this point. */
30210 add_top_level_skeleton_die_attrs (main_comp_unit_die);
30211 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
30212 }
30213
30214 /* Output all of the compilation units. We put the main one last so that
30215 the offsets are available to output_pubnames. */
30216 for (node = cu_die_list; node; node = node->next)
30217 output_comp_unit (node->die, 0, NULL);
30218
30219 hash_table<comdat_type_hasher> comdat_type_table (100);
30220 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30221 {
30222 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30223
30224 /* Don't output duplicate types. */
30225 if (*slot != HTAB_EMPTY_ENTRY)
30226 continue;
30227
30228 /* Add a pointer to the line table for the main compilation unit
30229 so that the debugger can make sense of DW_AT_decl_file
30230 attributes. */
30231 if (debug_info_level >= DINFO_LEVEL_TERSE)
30232 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30233 (!dwarf_split_debug_info
30234 ? dl_section_ref
30235 : debug_skeleton_line_section_label));
30236
30237 output_comdat_type_unit (ctnode);
30238 *slot = ctnode;
30239 }
30240
30241 if (dwarf_split_debug_info)
30242 {
30243 int mark;
30244 struct md5_ctx ctx;
30245
30246 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
30247 index_rnglists ();
30248
30249 /* Compute a checksum of the comp_unit to use as the dwo_id. */
30250 md5_init_ctx (&ctx);
30251 mark = 0;
30252 die_checksum (comp_unit_die (), &ctx, &mark);
30253 unmark_all_dies (comp_unit_die ());
30254 md5_finish_ctx (&ctx, checksum);
30255
30256 if (dwarf_version < 5)
30257 {
30258 /* Use the first 8 bytes of the checksum as the dwo_id,
30259 and add it to both comp-unit DIEs. */
30260 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
30261 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
30262 }
30263
30264 /* Add the base offset of the ranges table to the skeleton
30265 comp-unit DIE. */
30266 if (!vec_safe_is_empty (ranges_table))
30267 {
30268 if (dwarf_version >= 5)
30269 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
30270 ranges_base_label);
30271 else
30272 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
30273 ranges_section_label);
30274 }
30275
30276 switch_to_section (debug_addr_section);
30277 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
30278 output_addr_table ();
30279 }
30280
30281 /* Output the main compilation unit if non-empty or if .debug_macinfo
30282 or .debug_macro will be emitted. */
30283 output_comp_unit (comp_unit_die (), have_macinfo,
30284 dwarf_split_debug_info ? checksum : NULL);
30285
30286 if (dwarf_split_debug_info && info_section_emitted)
30287 output_skeleton_debug_sections (main_comp_unit_die, checksum);
30288
30289 /* Output the abbreviation table. */
30290 if (vec_safe_length (abbrev_die_table) != 1)
30291 {
30292 switch_to_section (debug_abbrev_section);
30293 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30294 output_abbrev_section ();
30295 }
30296
30297 /* Output location list section if necessary. */
30298 if (have_location_lists)
30299 {
30300 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
30301 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
30302 /* Output the location lists info. */
30303 switch_to_section (debug_loc_section);
30304 if (dwarf_version >= 5)
30305 {
30306 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
30307 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
30308 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
30309 dw2_asm_output_data (4, 0xffffffff,
30310 "Initial length escape value indicating "
30311 "64-bit DWARF extension");
30312 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
30313 "Length of Location Lists");
30314 ASM_OUTPUT_LABEL (asm_out_file, l1);
30315 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
30316 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
30317 dw2_asm_output_data (1, 0, "Segment Size");
30318 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
30319 "Offset Entry Count");
30320 }
30321 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
30322 if (dwarf_version >= 5 && dwarf_split_debug_info)
30323 {
30324 unsigned int save_loc_list_idx = loc_list_idx;
30325 loc_list_idx = 0;
30326 output_loclists_offsets (comp_unit_die ());
30327 gcc_assert (save_loc_list_idx == loc_list_idx);
30328 }
30329 output_location_lists (comp_unit_die ());
30330 if (dwarf_version >= 5)
30331 ASM_OUTPUT_LABEL (asm_out_file, l2);
30332 }
30333
30334 output_pubtables ();
30335
30336 /* Output the address range information if a CU (.debug_info section)
30337 was emitted. We output an empty table even if we had no functions
30338 to put in it. This because the consumer has no way to tell the
30339 difference between an empty table that we omitted and failure to
30340 generate a table that would have contained data. */
30341 if (info_section_emitted)
30342 {
30343 switch_to_section (debug_aranges_section);
30344 output_aranges ();
30345 }
30346
30347 /* Output ranges section if necessary. */
30348 if (!vec_safe_is_empty (ranges_table))
30349 {
30350 if (dwarf_version >= 5)
30351 output_rnglists (generation);
30352 else
30353 output_ranges ();
30354 }
30355
30356 /* Have to end the macro section. */
30357 if (have_macinfo)
30358 {
30359 switch_to_section (debug_macinfo_section);
30360 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30361 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
30362 : debug_skeleton_line_section_label, false);
30363 dw2_asm_output_data (1, 0, "End compilation unit");
30364 }
30365
30366 /* Output the source line correspondence table. We must do this
30367 even if there is no line information. Otherwise, on an empty
30368 translation unit, we will generate a present, but empty,
30369 .debug_info section. IRIX 6.5 `nm' will then complain when
30370 examining the file. This is done late so that any filenames
30371 used by the debug_info section are marked as 'used'. */
30372 switch_to_section (debug_line_section);
30373 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
30374 if (! DWARF2_ASM_LINE_DEBUG_INFO)
30375 output_line_info (false);
30376
30377 if (dwarf_split_debug_info && info_section_emitted)
30378 {
30379 switch_to_section (debug_skeleton_line_section);
30380 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30381 output_line_info (true);
30382 }
30383
30384 /* If we emitted any indirect strings, output the string table too. */
30385 if (debug_str_hash || skeleton_debug_str_hash)
30386 output_indirect_strings ();
30387 if (debug_line_str_hash)
30388 {
30389 switch_to_section (debug_line_str_section);
30390 const enum dwarf_form form = DW_FORM_line_strp;
30391 debug_line_str_hash->traverse<enum dwarf_form,
30392 output_indirect_string> (form);
30393 }
30394 }
30395
30396 /* Returns a hash value for X (which really is a variable_value_struct). */
30397
30398 inline hashval_t
30399 variable_value_hasher::hash (variable_value_struct *x)
30400 {
30401 return (hashval_t) x->decl_id;
30402 }
30403
30404 /* Return nonzero if decl_id of variable_value_struct X is the same as
30405 UID of decl Y. */
30406
30407 inline bool
30408 variable_value_hasher::equal (variable_value_struct *x, tree y)
30409 {
30410 return x->decl_id == DECL_UID (y);
30411 }
30412
30413 /* Helper function for resolve_variable_value, handle
30414 DW_OP_GNU_variable_value in one location expression.
30415 Return true if exprloc has been changed into loclist. */
30416
30417 static bool
30418 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30419 {
30420 dw_loc_descr_ref next;
30421 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
30422 {
30423 next = loc->dw_loc_next;
30424 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
30425 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
30426 continue;
30427
30428 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30429 if (DECL_CONTEXT (decl) != current_function_decl)
30430 continue;
30431
30432 dw_die_ref ref = lookup_decl_die (decl);
30433 if (ref)
30434 {
30435 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30436 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30437 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30438 continue;
30439 }
30440 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
30441 if (l == NULL)
30442 continue;
30443 if (l->dw_loc_next)
30444 {
30445 if (AT_class (a) != dw_val_class_loc)
30446 continue;
30447 switch (a->dw_attr)
30448 {
30449 /* Following attributes allow both exprloc and loclist
30450 classes, so we can change them into a loclist. */
30451 case DW_AT_location:
30452 case DW_AT_string_length:
30453 case DW_AT_return_addr:
30454 case DW_AT_data_member_location:
30455 case DW_AT_frame_base:
30456 case DW_AT_segment:
30457 case DW_AT_static_link:
30458 case DW_AT_use_location:
30459 case DW_AT_vtable_elem_location:
30460 if (prev)
30461 {
30462 prev->dw_loc_next = NULL;
30463 prepend_loc_descr_to_each (l, AT_loc (a));
30464 }
30465 if (next)
30466 add_loc_descr_to_each (l, next);
30467 a->dw_attr_val.val_class = dw_val_class_loc_list;
30468 a->dw_attr_val.val_entry = NULL;
30469 a->dw_attr_val.v.val_loc_list = l;
30470 have_location_lists = true;
30471 return true;
30472 /* Following attributes allow both exprloc and reference,
30473 so if the whole expression is DW_OP_GNU_variable_value alone
30474 we could transform it into reference. */
30475 case DW_AT_byte_size:
30476 case DW_AT_bit_size:
30477 case DW_AT_lower_bound:
30478 case DW_AT_upper_bound:
30479 case DW_AT_bit_stride:
30480 case DW_AT_count:
30481 case DW_AT_allocated:
30482 case DW_AT_associated:
30483 case DW_AT_byte_stride:
30484 if (prev == NULL && next == NULL)
30485 break;
30486 /* FALLTHRU */
30487 default:
30488 if (dwarf_strict)
30489 continue;
30490 break;
30491 }
30492 /* Create DW_TAG_variable that we can refer to. */
30493 gen_decl_die (decl, NULL_TREE, NULL,
30494 lookup_decl_die (current_function_decl));
30495 ref = lookup_decl_die (decl);
30496 if (ref)
30497 {
30498 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30499 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30500 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30501 }
30502 continue;
30503 }
30504 if (prev)
30505 {
30506 prev->dw_loc_next = l->expr;
30507 add_loc_descr (&prev->dw_loc_next, next);
30508 free_loc_descr (loc, NULL);
30509 next = prev->dw_loc_next;
30510 }
30511 else
30512 {
30513 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
30514 add_loc_descr (&loc, next);
30515 next = loc;
30516 }
30517 loc = prev;
30518 }
30519 return false;
30520 }
30521
30522 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
30523
30524 static void
30525 resolve_variable_value (dw_die_ref die)
30526 {
30527 dw_attr_node *a;
30528 dw_loc_list_ref loc;
30529 unsigned ix;
30530
30531 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30532 switch (AT_class (a))
30533 {
30534 case dw_val_class_loc:
30535 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
30536 break;
30537 /* FALLTHRU */
30538 case dw_val_class_loc_list:
30539 loc = AT_loc_list (a);
30540 gcc_assert (loc);
30541 for (; loc; loc = loc->dw_loc_next)
30542 resolve_variable_value_in_expr (a, loc->expr);
30543 break;
30544 default:
30545 break;
30546 }
30547 }
30548
30549 /* Attempt to optimize DW_OP_GNU_variable_value refering to
30550 temporaries in the current function. */
30551
30552 static void
30553 resolve_variable_values (void)
30554 {
30555 if (!variable_value_hash || !current_function_decl)
30556 return;
30557
30558 struct variable_value_struct *node
30559 = variable_value_hash->find_with_hash (current_function_decl,
30560 DECL_UID (current_function_decl));
30561
30562 if (node == NULL)
30563 return;
30564
30565 unsigned int i;
30566 dw_die_ref die;
30567 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
30568 resolve_variable_value (die);
30569 }
30570
30571 /* Helper function for note_variable_value, handle one location
30572 expression. */
30573
30574 static void
30575 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
30576 {
30577 for (; loc; loc = loc->dw_loc_next)
30578 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
30579 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30580 {
30581 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30582 dw_die_ref ref = lookup_decl_die (decl);
30583 if (! ref && (flag_generate_lto || flag_generate_offload))
30584 {
30585 /* ??? This is somewhat a hack because we do not create DIEs
30586 for variables not in BLOCK trees early but when generating
30587 early LTO output we need the dw_val_class_decl_ref to be
30588 fully resolved. For fat LTO objects we'd also like to
30589 undo this after LTO dwarf output. */
30590 gcc_assert (DECL_CONTEXT (decl));
30591 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
30592 gcc_assert (ctx != NULL);
30593 gen_decl_die (decl, NULL_TREE, NULL, ctx);
30594 ref = lookup_decl_die (decl);
30595 gcc_assert (ref != NULL);
30596 }
30597 if (ref)
30598 {
30599 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30600 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30601 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30602 continue;
30603 }
30604 if (VAR_P (decl)
30605 && DECL_CONTEXT (decl)
30606 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
30607 && lookup_decl_die (DECL_CONTEXT (decl)))
30608 {
30609 if (!variable_value_hash)
30610 variable_value_hash
30611 = hash_table<variable_value_hasher>::create_ggc (10);
30612
30613 tree fndecl = DECL_CONTEXT (decl);
30614 struct variable_value_struct *node;
30615 struct variable_value_struct **slot
30616 = variable_value_hash->find_slot_with_hash (fndecl,
30617 DECL_UID (fndecl),
30618 INSERT);
30619 if (*slot == NULL)
30620 {
30621 node = ggc_cleared_alloc<variable_value_struct> ();
30622 node->decl_id = DECL_UID (fndecl);
30623 *slot = node;
30624 }
30625 else
30626 node = *slot;
30627
30628 vec_safe_push (node->dies, die);
30629 }
30630 }
30631 }
30632
30633 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
30634 with dw_val_class_decl_ref operand. */
30635
30636 static void
30637 note_variable_value (dw_die_ref die)
30638 {
30639 dw_die_ref c;
30640 dw_attr_node *a;
30641 dw_loc_list_ref loc;
30642 unsigned ix;
30643
30644 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30645 switch (AT_class (a))
30646 {
30647 case dw_val_class_loc_list:
30648 loc = AT_loc_list (a);
30649 gcc_assert (loc);
30650 if (!loc->noted_variable_value)
30651 {
30652 loc->noted_variable_value = 1;
30653 for (; loc; loc = loc->dw_loc_next)
30654 note_variable_value_in_expr (die, loc->expr);
30655 }
30656 break;
30657 case dw_val_class_loc:
30658 note_variable_value_in_expr (die, AT_loc (a));
30659 break;
30660 default:
30661 break;
30662 }
30663
30664 /* Mark children. */
30665 FOR_EACH_CHILD (die, c, note_variable_value (c));
30666 }
30667
30668 /* Perform any cleanups needed after the early debug generation pass
30669 has run. */
30670
30671 static void
30672 dwarf2out_early_finish (const char *filename)
30673 {
30674 set_early_dwarf s;
30675
30676 /* PCH might result in DW_AT_producer string being restored from the
30677 header compilation, so always fill it with empty string initially
30678 and overwrite only here. */
30679 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
30680 producer_string = gen_producer_string ();
30681 producer->dw_attr_val.v.val_str->refcount--;
30682 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
30683
30684 /* Add the name for the main input file now. We delayed this from
30685 dwarf2out_init to avoid complications with PCH. */
30686 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
30687 add_comp_dir_attribute (comp_unit_die ());
30688
30689 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
30690 DW_AT_comp_dir into .debug_line_str section. */
30691 if (!DWARF2_ASM_LINE_DEBUG_INFO
30692 && dwarf_version >= 5
30693 && DWARF5_USE_DEBUG_LINE_STR)
30694 {
30695 for (int i = 0; i < 2; i++)
30696 {
30697 dw_attr_node *a = get_AT (comp_unit_die (),
30698 i ? DW_AT_comp_dir : DW_AT_name);
30699 if (a == NULL
30700 || AT_class (a) != dw_val_class_str
30701 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
30702 continue;
30703
30704 if (! debug_line_str_hash)
30705 debug_line_str_hash
30706 = hash_table<indirect_string_hasher>::create_ggc (10);
30707
30708 struct indirect_string_node *node
30709 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
30710 set_indirect_string (node);
30711 node->form = DW_FORM_line_strp;
30712 a->dw_attr_val.v.val_str->refcount--;
30713 a->dw_attr_val.v.val_str = node;
30714 }
30715 }
30716
30717 /* With LTO early dwarf was really finished at compile-time, so make
30718 sure to adjust the phase after annotating the LTRANS CU DIE. */
30719 if (in_lto_p)
30720 {
30721 early_dwarf_finished = true;
30722 return;
30723 }
30724
30725 /* Walk through the list of incomplete types again, trying once more to
30726 emit full debugging info for them. */
30727 retry_incomplete_types ();
30728
30729 /* The point here is to flush out the limbo list so that it is empty
30730 and we don't need to stream it for LTO. */
30731 flush_limbo_die_list ();
30732
30733 gen_scheduled_generic_parms_dies ();
30734 gen_remaining_tmpl_value_param_die_attribute ();
30735
30736 /* Add DW_AT_linkage_name for all deferred DIEs. */
30737 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
30738 {
30739 tree decl = node->created_for;
30740 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
30741 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
30742 ended up in deferred_asm_name before we knew it was
30743 constant and never written to disk. */
30744 && DECL_ASSEMBLER_NAME (decl))
30745 {
30746 add_linkage_attr (node->die, decl);
30747 move_linkage_attr (node->die);
30748 }
30749 }
30750 deferred_asm_name = NULL;
30751
30752 if (flag_eliminate_unused_debug_types)
30753 prune_unused_types ();
30754
30755 /* Generate separate COMDAT sections for type DIEs. */
30756 if (use_debug_types)
30757 {
30758 break_out_comdat_types (comp_unit_die ());
30759
30760 /* Each new type_unit DIE was added to the limbo die list when created.
30761 Since these have all been added to comdat_type_list, clear the
30762 limbo die list. */
30763 limbo_die_list = NULL;
30764
30765 /* For each new comdat type unit, copy declarations for incomplete
30766 types to make the new unit self-contained (i.e., no direct
30767 references to the main compile unit). */
30768 for (comdat_type_node *ctnode = comdat_type_list;
30769 ctnode != NULL; ctnode = ctnode->next)
30770 copy_decls_for_unworthy_types (ctnode->root_die);
30771 copy_decls_for_unworthy_types (comp_unit_die ());
30772
30773 /* In the process of copying declarations from one unit to another,
30774 we may have left some declarations behind that are no longer
30775 referenced. Prune them. */
30776 prune_unused_types ();
30777 }
30778
30779 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
30780 with dw_val_class_decl_ref operand. */
30781 note_variable_value (comp_unit_die ());
30782 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30783 note_variable_value (node->die);
30784 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
30785 ctnode = ctnode->next)
30786 note_variable_value (ctnode->root_die);
30787 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30788 note_variable_value (node->die);
30789
30790 /* The AT_pubnames attribute needs to go in all skeleton dies, including
30791 both the main_cu and all skeleton TUs. Making this call unconditional
30792 would end up either adding a second copy of the AT_pubnames attribute, or
30793 requiring a special case in add_top_level_skeleton_die_attrs. */
30794 if (!dwarf_split_debug_info)
30795 add_AT_pubnames (comp_unit_die ());
30796
30797 /* The early debug phase is now finished. */
30798 early_dwarf_finished = true;
30799
30800 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
30801 if (!flag_generate_lto && !flag_generate_offload)
30802 return;
30803
30804 /* Now as we are going to output for LTO initialize sections and labels
30805 to the LTO variants. We don't need a random-seed postfix as other
30806 LTO sections as linking the LTO debug sections into one in a partial
30807 link is fine. */
30808 init_sections_and_labels (true);
30809
30810 /* The output below is modeled after dwarf2out_finish with all
30811 location related output removed and some LTO specific changes.
30812 Some refactoring might make both smaller and easier to match up. */
30813
30814 /* Traverse the DIE's and add add sibling attributes to those DIE's
30815 that have children. */
30816 add_sibling_attributes (comp_unit_die ());
30817 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30818 add_sibling_attributes (node->die);
30819 for (comdat_type_node *ctnode = comdat_type_list;
30820 ctnode != NULL; ctnode = ctnode->next)
30821 add_sibling_attributes (ctnode->root_die);
30822
30823 if (have_macinfo)
30824 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30825 macinfo_section_label);
30826
30827 save_macinfo_strings ();
30828
30829 if (dwarf_split_debug_info)
30830 {
30831 unsigned int index = 0;
30832 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
30833 }
30834
30835 /* Output all of the compilation units. We put the main one last so that
30836 the offsets are available to output_pubnames. */
30837 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30838 output_comp_unit (node->die, 0, NULL);
30839
30840 hash_table<comdat_type_hasher> comdat_type_table (100);
30841 for (comdat_type_node *ctnode = comdat_type_list;
30842 ctnode != NULL; ctnode = ctnode->next)
30843 {
30844 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30845
30846 /* Don't output duplicate types. */
30847 if (*slot != HTAB_EMPTY_ENTRY)
30848 continue;
30849
30850 /* Add a pointer to the line table for the main compilation unit
30851 so that the debugger can make sense of DW_AT_decl_file
30852 attributes. */
30853 if (debug_info_level >= DINFO_LEVEL_TERSE)
30854 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30855 (!dwarf_split_debug_info
30856 ? debug_line_section_label
30857 : debug_skeleton_line_section_label));
30858
30859 output_comdat_type_unit (ctnode);
30860 *slot = ctnode;
30861 }
30862
30863 /* Stick a unique symbol to the main debuginfo section. */
30864 compute_comp_unit_symbol (comp_unit_die ());
30865
30866 /* Output the main compilation unit. We always need it if only for
30867 the CU symbol. */
30868 output_comp_unit (comp_unit_die (), true, NULL);
30869
30870 /* Output the abbreviation table. */
30871 if (vec_safe_length (abbrev_die_table) != 1)
30872 {
30873 switch_to_section (debug_abbrev_section);
30874 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30875 output_abbrev_section ();
30876 }
30877
30878 /* Have to end the macro section. */
30879 if (have_macinfo)
30880 {
30881 /* We have to save macinfo state if we need to output it again
30882 for the FAT part of the object. */
30883 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
30884 if (flag_fat_lto_objects)
30885 macinfo_table = macinfo_table->copy ();
30886
30887 switch_to_section (debug_macinfo_section);
30888 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30889 output_macinfo (debug_skeleton_line_section_label, true);
30890 dw2_asm_output_data (1, 0, "End compilation unit");
30891
30892 /* Emit a skeleton debug_line section. */
30893 switch_to_section (debug_skeleton_line_section);
30894 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30895 output_line_info (true);
30896
30897 if (flag_fat_lto_objects)
30898 {
30899 vec_free (macinfo_table);
30900 macinfo_table = saved_macinfo_table;
30901 }
30902 }
30903
30904
30905 /* If we emitted any indirect strings, output the string table too. */
30906 if (debug_str_hash || skeleton_debug_str_hash)
30907 output_indirect_strings ();
30908
30909 /* Switch back to the text section. */
30910 switch_to_section (text_section);
30911 }
30912
30913 /* Reset all state within dwarf2out.c so that we can rerun the compiler
30914 within the same process. For use by toplev::finalize. */
30915
30916 void
30917 dwarf2out_c_finalize (void)
30918 {
30919 last_var_location_insn = NULL;
30920 cached_next_real_insn = NULL;
30921 used_rtx_array = NULL;
30922 incomplete_types = NULL;
30923 decl_scope_table = NULL;
30924 debug_info_section = NULL;
30925 debug_skeleton_info_section = NULL;
30926 debug_abbrev_section = NULL;
30927 debug_skeleton_abbrev_section = NULL;
30928 debug_aranges_section = NULL;
30929 debug_addr_section = NULL;
30930 debug_macinfo_section = NULL;
30931 debug_line_section = NULL;
30932 debug_skeleton_line_section = NULL;
30933 debug_loc_section = NULL;
30934 debug_pubnames_section = NULL;
30935 debug_pubtypes_section = NULL;
30936 debug_str_section = NULL;
30937 debug_line_str_section = NULL;
30938 debug_str_dwo_section = NULL;
30939 debug_str_offsets_section = NULL;
30940 debug_ranges_section = NULL;
30941 debug_frame_section = NULL;
30942 fde_vec = NULL;
30943 debug_str_hash = NULL;
30944 debug_line_str_hash = NULL;
30945 skeleton_debug_str_hash = NULL;
30946 dw2_string_counter = 0;
30947 have_multiple_function_sections = false;
30948 text_section_used = false;
30949 cold_text_section_used = false;
30950 cold_text_section = NULL;
30951 current_unit_personality = NULL;
30952
30953 early_dwarf = false;
30954 early_dwarf_finished = false;
30955
30956 next_die_offset = 0;
30957 single_comp_unit_die = NULL;
30958 comdat_type_list = NULL;
30959 limbo_die_list = NULL;
30960 file_table = NULL;
30961 decl_die_table = NULL;
30962 common_block_die_table = NULL;
30963 decl_loc_table = NULL;
30964 call_arg_locations = NULL;
30965 call_arg_loc_last = NULL;
30966 call_site_count = -1;
30967 tail_call_site_count = -1;
30968 cached_dw_loc_list_table = NULL;
30969 abbrev_die_table = NULL;
30970 delete dwarf_proc_stack_usage_map;
30971 dwarf_proc_stack_usage_map = NULL;
30972 line_info_label_num = 0;
30973 cur_line_info_table = NULL;
30974 text_section_line_info = NULL;
30975 cold_text_section_line_info = NULL;
30976 separate_line_info = NULL;
30977 info_section_emitted = false;
30978 pubname_table = NULL;
30979 pubtype_table = NULL;
30980 macinfo_table = NULL;
30981 ranges_table = NULL;
30982 ranges_by_label = NULL;
30983 rnglist_idx = 0;
30984 have_location_lists = false;
30985 loclabel_num = 0;
30986 poc_label_num = 0;
30987 last_emitted_file = NULL;
30988 label_num = 0;
30989 tmpl_value_parm_die_table = NULL;
30990 generic_type_instances = NULL;
30991 frame_pointer_fb_offset = 0;
30992 frame_pointer_fb_offset_valid = false;
30993 base_types.release ();
30994 XDELETEVEC (producer_string);
30995 producer_string = NULL;
30996 }
30997
30998 #include "gt-dwarf2out.h"