]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
* tree.c (find_decls_types_r): Do not check for redundant typedefs.
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_END_LABEL
302 #define FUNC_END_LABEL "LFE"
303 #endif
304
305 #ifndef PROLOGUE_END_LABEL
306 #define PROLOGUE_END_LABEL "LPE"
307 #endif
308
309 #ifndef EPILOGUE_BEGIN_LABEL
310 #define EPILOGUE_BEGIN_LABEL "LEB"
311 #endif
312
313 #ifndef FRAME_BEGIN_LABEL
314 #define FRAME_BEGIN_LABEL "Lframe"
315 #endif
316 #define CIE_AFTER_SIZE_LABEL "LSCIE"
317 #define CIE_END_LABEL "LECIE"
318 #define FDE_LABEL "LSFDE"
319 #define FDE_AFTER_SIZE_LABEL "LASFDE"
320 #define FDE_END_LABEL "LEFDE"
321 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
322 #define LINE_NUMBER_END_LABEL "LELT"
323 #define LN_PROLOG_AS_LABEL "LASLTP"
324 #define LN_PROLOG_END_LABEL "LELTP"
325 #define DIE_LABEL_PREFIX "DW"
326 \f
327 /* Match the base name of a file to the base name of a compilation unit. */
328
329 static int
330 matches_main_base (const char *path)
331 {
332 /* Cache the last query. */
333 static const char *last_path = NULL;
334 static int last_match = 0;
335 if (path != last_path)
336 {
337 const char *base;
338 int length = base_of_path (path, &base);
339 last_path = path;
340 last_match = (length == main_input_baselength
341 && memcmp (base, main_input_basename, length) == 0);
342 }
343 return last_match;
344 }
345
346 #ifdef DEBUG_DEBUG_STRUCT
347
348 static int
349 dump_struct_debug (tree type, enum debug_info_usage usage,
350 enum debug_struct_file criterion, int generic,
351 int matches, int result)
352 {
353 /* Find the type name. */
354 tree type_decl = TYPE_STUB_DECL (type);
355 tree t = type_decl;
356 const char *name = 0;
357 if (TREE_CODE (t) == TYPE_DECL)
358 t = DECL_NAME (t);
359 if (t)
360 name = IDENTIFIER_POINTER (t);
361
362 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
363 criterion,
364 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
365 matches ? "bas" : "hdr",
366 generic ? "gen" : "ord",
367 usage == DINFO_USAGE_DFN ? ";" :
368 usage == DINFO_USAGE_DIR_USE ? "." : "*",
369 result,
370 (void*) type_decl, name);
371 return result;
372 }
373 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
374 dump_struct_debug (type, usage, criterion, generic, matches, result)
375
376 #else
377
378 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
379 (result)
380
381 #endif
382
383 /* Get the number of HOST_WIDE_INTs needed to represent the precision
384 of the number. Some constants have a large uniform precision, so
385 we get the precision needed for the actual value of the number. */
386
387 static unsigned int
388 get_full_len (const wide_int &op)
389 {
390 int prec = wi::min_precision (op, UNSIGNED);
391 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
392 / HOST_BITS_PER_WIDE_INT);
393 }
394
395 static bool
396 should_emit_struct_debug (tree type, enum debug_info_usage usage)
397 {
398 enum debug_struct_file criterion;
399 tree type_decl;
400 bool generic = lang_hooks.types.generic_p (type);
401
402 if (generic)
403 criterion = debug_struct_generic[usage];
404 else
405 criterion = debug_struct_ordinary[usage];
406
407 if (criterion == DINFO_STRUCT_FILE_NONE)
408 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
409 if (criterion == DINFO_STRUCT_FILE_ANY)
410 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
411
412 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
413
414 if (type_decl != NULL)
415 {
416 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
417 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
418
419 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
420 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
421 }
422
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
424 }
425 \f
426 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
427 switch to the data section instead, and write out a synthetic start label
428 for collect2 the first time around. */
429
430 static void
431 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
432 {
433 if (eh_frame_section == 0)
434 {
435 int flags;
436
437 if (EH_TABLES_CAN_BE_READ_ONLY)
438 {
439 int fde_encoding;
440 int per_encoding;
441 int lsda_encoding;
442
443 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
444 /*global=*/0);
445 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
446 /*global=*/1);
447 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
448 /*global=*/0);
449 flags = ((! flag_pic
450 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
451 && (fde_encoding & 0x70) != DW_EH_PE_aligned
452 && (per_encoding & 0x70) != DW_EH_PE_absptr
453 && (per_encoding & 0x70) != DW_EH_PE_aligned
454 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
455 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
456 ? 0 : SECTION_WRITE);
457 }
458 else
459 flags = SECTION_WRITE;
460
461 #ifdef EH_FRAME_SECTION_NAME
462 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
463 #else
464 eh_frame_section = ((flags == SECTION_WRITE)
465 ? data_section : readonly_data_section);
466 #endif /* EH_FRAME_SECTION_NAME */
467 }
468
469 switch_to_section (eh_frame_section);
470
471 #ifdef EH_FRAME_THROUGH_COLLECT2
472 /* We have no special eh_frame section. Emit special labels to guide
473 collect2. */
474 if (!back)
475 {
476 tree label = get_file_function_name ("F");
477 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
478 targetm.asm_out.globalize_label (asm_out_file,
479 IDENTIFIER_POINTER (label));
480 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
481 }
482 #endif
483 }
484
485 /* Switch [BACK] to the eh or debug frame table section, depending on
486 FOR_EH. */
487
488 static void
489 switch_to_frame_table_section (int for_eh, bool back)
490 {
491 if (for_eh)
492 switch_to_eh_frame_section (back);
493 else
494 {
495 if (!debug_frame_section)
496 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
497 SECTION_DEBUG, NULL);
498 switch_to_section (debug_frame_section);
499 }
500 }
501
502 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
503
504 enum dw_cfi_oprnd_type
505 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
506 {
507 switch (cfi)
508 {
509 case DW_CFA_nop:
510 case DW_CFA_GNU_window_save:
511 case DW_CFA_remember_state:
512 case DW_CFA_restore_state:
513 return dw_cfi_oprnd_unused;
514
515 case DW_CFA_set_loc:
516 case DW_CFA_advance_loc1:
517 case DW_CFA_advance_loc2:
518 case DW_CFA_advance_loc4:
519 case DW_CFA_MIPS_advance_loc8:
520 return dw_cfi_oprnd_addr;
521
522 case DW_CFA_offset:
523 case DW_CFA_offset_extended:
524 case DW_CFA_def_cfa:
525 case DW_CFA_offset_extended_sf:
526 case DW_CFA_def_cfa_sf:
527 case DW_CFA_restore:
528 case DW_CFA_restore_extended:
529 case DW_CFA_undefined:
530 case DW_CFA_same_value:
531 case DW_CFA_def_cfa_register:
532 case DW_CFA_register:
533 case DW_CFA_expression:
534 case DW_CFA_val_expression:
535 return dw_cfi_oprnd_reg_num;
536
537 case DW_CFA_def_cfa_offset:
538 case DW_CFA_GNU_args_size:
539 case DW_CFA_def_cfa_offset_sf:
540 return dw_cfi_oprnd_offset;
541
542 case DW_CFA_def_cfa_expression:
543 return dw_cfi_oprnd_loc;
544
545 default:
546 gcc_unreachable ();
547 }
548 }
549
550 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
551
552 enum dw_cfi_oprnd_type
553 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
554 {
555 switch (cfi)
556 {
557 case DW_CFA_def_cfa:
558 case DW_CFA_def_cfa_sf:
559 case DW_CFA_offset:
560 case DW_CFA_offset_extended_sf:
561 case DW_CFA_offset_extended:
562 return dw_cfi_oprnd_offset;
563
564 case DW_CFA_register:
565 return dw_cfi_oprnd_reg_num;
566
567 case DW_CFA_expression:
568 case DW_CFA_val_expression:
569 return dw_cfi_oprnd_loc;
570
571 case DW_CFA_def_cfa_expression:
572 return dw_cfi_oprnd_cfa_loc;
573
574 default:
575 return dw_cfi_oprnd_unused;
576 }
577 }
578
579 /* Output one FDE. */
580
581 static void
582 output_fde (dw_fde_ref fde, bool for_eh, bool second,
583 char *section_start_label, int fde_encoding, char *augmentation,
584 bool any_lsda_needed, int lsda_encoding)
585 {
586 const char *begin, *end;
587 static unsigned int j;
588 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
589
590 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
591 /* empty */ 0);
592 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
593 for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
595 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
596 if (!XCOFF_DEBUGGING_INFO || for_eh)
597 {
598 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
599 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
600 " indicating 64-bit DWARF extension");
601 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
602 "FDE Length");
603 }
604 ASM_OUTPUT_LABEL (asm_out_file, l1);
605
606 if (for_eh)
607 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
608 else
609 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
610 debug_frame_section, "FDE CIE offset");
611
612 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
613 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
614
615 if (for_eh)
616 {
617 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
618 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
619 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
620 "FDE initial location");
621 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
622 end, begin, "FDE address range");
623 }
624 else
625 {
626 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
627 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
628 }
629
630 if (augmentation[0])
631 {
632 if (any_lsda_needed)
633 {
634 int size = size_of_encoded_value (lsda_encoding);
635
636 if (lsda_encoding == DW_EH_PE_aligned)
637 {
638 int offset = ( 4 /* Length */
639 + 4 /* CIE offset */
640 + 2 * size_of_encoded_value (fde_encoding)
641 + 1 /* Augmentation size */ );
642 int pad = -offset & (PTR_SIZE - 1);
643
644 size += pad;
645 gcc_assert (size_of_uleb128 (size) == 1);
646 }
647
648 dw2_asm_output_data_uleb128 (size, "Augmentation size");
649
650 if (fde->uses_eh_lsda)
651 {
652 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
653 fde->funcdef_number);
654 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
655 gen_rtx_SYMBOL_REF (Pmode, l1),
656 false,
657 "Language Specific Data Area");
658 }
659 else
660 {
661 if (lsda_encoding == DW_EH_PE_aligned)
662 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
663 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
664 "Language Specific Data Area (none)");
665 }
666 }
667 else
668 dw2_asm_output_data_uleb128 (0, "Augmentation size");
669 }
670
671 /* Loop through the Call Frame Instructions associated with this FDE. */
672 fde->dw_fde_current_label = begin;
673 {
674 size_t from, until, i;
675
676 from = 0;
677 until = vec_safe_length (fde->dw_fde_cfi);
678
679 if (fde->dw_fde_second_begin == NULL)
680 ;
681 else if (!second)
682 until = fde->dw_fde_switch_cfi_index;
683 else
684 from = fde->dw_fde_switch_cfi_index;
685
686 for (i = from; i < until; i++)
687 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
688 }
689
690 /* If we are to emit a ref/link from function bodies to their frame tables,
691 do it now. This is typically performed to make sure that tables
692 associated with functions are dragged with them and not discarded in
693 garbage collecting links. We need to do this on a per function basis to
694 cope with -ffunction-sections. */
695
696 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
697 /* Switch to the function section, emit the ref to the tables, and
698 switch *back* into the table section. */
699 switch_to_section (function_section (fde->decl));
700 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
701 switch_to_frame_table_section (for_eh, true);
702 #endif
703
704 /* Pad the FDE out to an address sized boundary. */
705 ASM_OUTPUT_ALIGN (asm_out_file,
706 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
707 ASM_OUTPUT_LABEL (asm_out_file, l2);
708
709 j += 2;
710 }
711
712 /* Return true if frame description entry FDE is needed for EH. */
713
714 static bool
715 fde_needed_for_eh_p (dw_fde_ref fde)
716 {
717 if (flag_asynchronous_unwind_tables)
718 return true;
719
720 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
721 return true;
722
723 if (fde->uses_eh_lsda)
724 return true;
725
726 /* If exceptions are enabled, we have collected nothrow info. */
727 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
728 return false;
729
730 return true;
731 }
732
733 /* Output the call frame information used to record information
734 that relates to calculating the frame pointer, and records the
735 location of saved registers. */
736
737 static void
738 output_call_frame_info (int for_eh)
739 {
740 unsigned int i;
741 dw_fde_ref fde;
742 dw_cfi_ref cfi;
743 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
744 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
745 bool any_lsda_needed = false;
746 char augmentation[6];
747 int augmentation_size;
748 int fde_encoding = DW_EH_PE_absptr;
749 int per_encoding = DW_EH_PE_absptr;
750 int lsda_encoding = DW_EH_PE_absptr;
751 int return_reg;
752 rtx personality = NULL;
753 int dw_cie_version;
754
755 /* Don't emit a CIE if there won't be any FDEs. */
756 if (!fde_vec)
757 return;
758
759 /* Nothing to do if the assembler's doing it all. */
760 if (dwarf2out_do_cfi_asm ())
761 return;
762
763 /* If we don't have any functions we'll want to unwind out of, don't emit
764 any EH unwind information. If we make FDEs linkonce, we may have to
765 emit an empty label for an FDE that wouldn't otherwise be emitted. We
766 want to avoid having an FDE kept around when the function it refers to
767 is discarded. Example where this matters: a primary function template
768 in C++ requires EH information, an explicit specialization doesn't. */
769 if (for_eh)
770 {
771 bool any_eh_needed = false;
772
773 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
774 {
775 if (fde->uses_eh_lsda)
776 any_eh_needed = any_lsda_needed = true;
777 else if (fde_needed_for_eh_p (fde))
778 any_eh_needed = true;
779 else if (TARGET_USES_WEAK_UNWIND_INFO)
780 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
781 }
782
783 if (!any_eh_needed)
784 return;
785 }
786
787 /* We're going to be generating comments, so turn on app. */
788 if (flag_debug_asm)
789 app_enable ();
790
791 /* Switch to the proper frame section, first time. */
792 switch_to_frame_table_section (for_eh, false);
793
794 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
795 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
796
797 /* Output the CIE. */
798 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
799 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
800 if (!XCOFF_DEBUGGING_INFO || for_eh)
801 {
802 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
803 dw2_asm_output_data (4, 0xffffffff,
804 "Initial length escape value indicating 64-bit DWARF extension");
805 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
806 "Length of Common Information Entry");
807 }
808 ASM_OUTPUT_LABEL (asm_out_file, l1);
809
810 /* Now that the CIE pointer is PC-relative for EH,
811 use 0 to identify the CIE. */
812 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
813 (for_eh ? 0 : DWARF_CIE_ID),
814 "CIE Identifier Tag");
815
816 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
817 use CIE version 1, unless that would produce incorrect results
818 due to overflowing the return register column. */
819 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
820 dw_cie_version = 1;
821 if (return_reg >= 256 || dwarf_version > 2)
822 dw_cie_version = 3;
823 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
824
825 augmentation[0] = 0;
826 augmentation_size = 0;
827
828 personality = current_unit_personality;
829 if (for_eh)
830 {
831 char *p;
832
833 /* Augmentation:
834 z Indicates that a uleb128 is present to size the
835 augmentation section.
836 L Indicates the encoding (and thus presence) of
837 an LSDA pointer in the FDE augmentation.
838 R Indicates a non-default pointer encoding for
839 FDE code pointers.
840 P Indicates the presence of an encoding + language
841 personality routine in the CIE augmentation. */
842
843 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
844 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
845 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
846
847 p = augmentation + 1;
848 if (personality)
849 {
850 *p++ = 'P';
851 augmentation_size += 1 + size_of_encoded_value (per_encoding);
852 assemble_external_libcall (personality);
853 }
854 if (any_lsda_needed)
855 {
856 *p++ = 'L';
857 augmentation_size += 1;
858 }
859 if (fde_encoding != DW_EH_PE_absptr)
860 {
861 *p++ = 'R';
862 augmentation_size += 1;
863 }
864 if (p > augmentation + 1)
865 {
866 augmentation[0] = 'z';
867 *p = '\0';
868 }
869
870 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
871 if (personality && per_encoding == DW_EH_PE_aligned)
872 {
873 int offset = ( 4 /* Length */
874 + 4 /* CIE Id */
875 + 1 /* CIE version */
876 + strlen (augmentation) + 1 /* Augmentation */
877 + size_of_uleb128 (1) /* Code alignment */
878 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
879 + 1 /* RA column */
880 + 1 /* Augmentation size */
881 + 1 /* Personality encoding */ );
882 int pad = -offset & (PTR_SIZE - 1);
883
884 augmentation_size += pad;
885
886 /* Augmentations should be small, so there's scarce need to
887 iterate for a solution. Die if we exceed one uleb128 byte. */
888 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
889 }
890 }
891
892 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
893 if (dw_cie_version >= 4)
894 {
895 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
896 dw2_asm_output_data (1, 0, "CIE Segment Size");
897 }
898 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
899 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
900 "CIE Data Alignment Factor");
901
902 if (dw_cie_version == 1)
903 dw2_asm_output_data (1, return_reg, "CIE RA Column");
904 else
905 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
906
907 if (augmentation[0])
908 {
909 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
910 if (personality)
911 {
912 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
913 eh_data_format_name (per_encoding));
914 dw2_asm_output_encoded_addr_rtx (per_encoding,
915 personality,
916 true, NULL);
917 }
918
919 if (any_lsda_needed)
920 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
921 eh_data_format_name (lsda_encoding));
922
923 if (fde_encoding != DW_EH_PE_absptr)
924 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
925 eh_data_format_name (fde_encoding));
926 }
927
928 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
929 output_cfi (cfi, NULL, for_eh);
930
931 /* Pad the CIE out to an address sized boundary. */
932 ASM_OUTPUT_ALIGN (asm_out_file,
933 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
934 ASM_OUTPUT_LABEL (asm_out_file, l2);
935
936 /* Loop through all of the FDE's. */
937 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
938 {
939 unsigned int k;
940
941 /* Don't emit EH unwind info for leaf functions that don't need it. */
942 if (for_eh && !fde_needed_for_eh_p (fde))
943 continue;
944
945 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
946 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
947 augmentation, any_lsda_needed, lsda_encoding);
948 }
949
950 if (for_eh && targetm.terminate_dw2_eh_frame_info)
951 dw2_asm_output_data (4, 0, "End of Table");
952
953 /* Turn off app to make assembly quicker. */
954 if (flag_debug_asm)
955 app_disable ();
956 }
957
958 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
959
960 static void
961 dwarf2out_do_cfi_startproc (bool second)
962 {
963 int enc;
964 rtx ref;
965
966 fprintf (asm_out_file, "\t.cfi_startproc\n");
967
968 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
969 eh unwinders. */
970 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
971 return;
972
973 rtx personality = get_personality_function (current_function_decl);
974
975 if (personality)
976 {
977 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
978 ref = personality;
979
980 /* ??? The GAS support isn't entirely consistent. We have to
981 handle indirect support ourselves, but PC-relative is done
982 in the assembler. Further, the assembler can't handle any
983 of the weirder relocation types. */
984 if (enc & DW_EH_PE_indirect)
985 ref = dw2_force_const_mem (ref, true);
986
987 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
988 output_addr_const (asm_out_file, ref);
989 fputc ('\n', asm_out_file);
990 }
991
992 if (crtl->uses_eh_lsda)
993 {
994 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
995
996 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
997 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
998 current_function_funcdef_no);
999 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1000 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1001
1002 if (enc & DW_EH_PE_indirect)
1003 ref = dw2_force_const_mem (ref, true);
1004
1005 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1006 output_addr_const (asm_out_file, ref);
1007 fputc ('\n', asm_out_file);
1008 }
1009 }
1010
1011 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1012 this allocation may be done before pass_final. */
1013
1014 dw_fde_ref
1015 dwarf2out_alloc_current_fde (void)
1016 {
1017 dw_fde_ref fde;
1018
1019 fde = ggc_cleared_alloc<dw_fde_node> ();
1020 fde->decl = current_function_decl;
1021 fde->funcdef_number = current_function_funcdef_no;
1022 fde->fde_index = vec_safe_length (fde_vec);
1023 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1024 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1025 fde->nothrow = crtl->nothrow;
1026 fde->drap_reg = INVALID_REGNUM;
1027 fde->vdrap_reg = INVALID_REGNUM;
1028
1029 /* Record the FDE associated with this function. */
1030 cfun->fde = fde;
1031 vec_safe_push (fde_vec, fde);
1032
1033 return fde;
1034 }
1035
1036 /* Output a marker (i.e. a label) for the beginning of a function, before
1037 the prologue. */
1038
1039 void
1040 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1041 unsigned int column ATTRIBUTE_UNUSED,
1042 const char *file ATTRIBUTE_UNUSED)
1043 {
1044 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1045 char * dup_label;
1046 dw_fde_ref fde;
1047 section *fnsec;
1048 bool do_frame;
1049
1050 current_function_func_begin_label = NULL;
1051
1052 do_frame = dwarf2out_do_frame ();
1053
1054 /* ??? current_function_func_begin_label is also used by except.c for
1055 call-site information. We must emit this label if it might be used. */
1056 if (!do_frame
1057 && (!flag_exceptions
1058 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1059 return;
1060
1061 fnsec = function_section (current_function_decl);
1062 switch_to_section (fnsec);
1063 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1064 current_function_funcdef_no);
1065 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1066 current_function_funcdef_no);
1067 dup_label = xstrdup (label);
1068 current_function_func_begin_label = dup_label;
1069
1070 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1071 if (!do_frame)
1072 return;
1073
1074 /* Unlike the debug version, the EH version of frame unwind info is a per-
1075 function setting so we need to record whether we need it for the unit. */
1076 do_eh_frame |= dwarf2out_do_eh_frame ();
1077
1078 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1079 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1080 would include pass_dwarf2_frame. If we've not created the FDE yet,
1081 do so now. */
1082 fde = cfun->fde;
1083 if (fde == NULL)
1084 fde = dwarf2out_alloc_current_fde ();
1085
1086 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1087 fde->dw_fde_begin = dup_label;
1088 fde->dw_fde_current_label = dup_label;
1089 fde->in_std_section = (fnsec == text_section
1090 || (cold_text_section && fnsec == cold_text_section));
1091
1092 /* We only want to output line number information for the genuine dwarf2
1093 prologue case, not the eh frame case. */
1094 #ifdef DWARF2_DEBUGGING_INFO
1095 if (file)
1096 dwarf2out_source_line (line, column, file, 0, true);
1097 #endif
1098
1099 if (dwarf2out_do_cfi_asm ())
1100 dwarf2out_do_cfi_startproc (false);
1101 else
1102 {
1103 rtx personality = get_personality_function (current_function_decl);
1104 if (!current_unit_personality)
1105 current_unit_personality = personality;
1106
1107 /* We cannot keep a current personality per function as without CFI
1108 asm, at the point where we emit the CFI data, there is no current
1109 function anymore. */
1110 if (personality && current_unit_personality != personality)
1111 sorry ("multiple EH personalities are supported only with assemblers "
1112 "supporting .cfi_personality directive");
1113 }
1114 }
1115
1116 /* Output a marker (i.e. a label) for the end of the generated code
1117 for a function prologue. This gets called *after* the prologue code has
1118 been generated. */
1119
1120 void
1121 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1122 const char *file ATTRIBUTE_UNUSED)
1123 {
1124 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1125
1126 /* Output a label to mark the endpoint of the code generated for this
1127 function. */
1128 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1129 current_function_funcdef_no);
1130 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1131 current_function_funcdef_no);
1132 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1133 }
1134
1135 /* Output a marker (i.e. a label) for the beginning of the generated code
1136 for a function epilogue. This gets called *before* the prologue code has
1137 been generated. */
1138
1139 void
1140 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1141 const char *file ATTRIBUTE_UNUSED)
1142 {
1143 dw_fde_ref fde = cfun->fde;
1144 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1145
1146 if (fde->dw_fde_vms_begin_epilogue)
1147 return;
1148
1149 /* Output a label to mark the endpoint of the code generated for this
1150 function. */
1151 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1152 current_function_funcdef_no);
1153 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1154 current_function_funcdef_no);
1155 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1156 }
1157
1158 /* Output a marker (i.e. a label) for the absolute end of the generated code
1159 for a function definition. This gets called *after* the epilogue code has
1160 been generated. */
1161
1162 void
1163 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1164 const char *file ATTRIBUTE_UNUSED)
1165 {
1166 dw_fde_ref fde;
1167 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1168
1169 last_var_location_insn = NULL;
1170 cached_next_real_insn = NULL;
1171
1172 if (dwarf2out_do_cfi_asm ())
1173 fprintf (asm_out_file, "\t.cfi_endproc\n");
1174
1175 /* Output a label to mark the endpoint of the code generated for this
1176 function. */
1177 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1178 current_function_funcdef_no);
1179 ASM_OUTPUT_LABEL (asm_out_file, label);
1180 fde = cfun->fde;
1181 gcc_assert (fde != NULL);
1182 if (fde->dw_fde_second_begin == NULL)
1183 fde->dw_fde_end = xstrdup (label);
1184 }
1185
1186 void
1187 dwarf2out_frame_finish (void)
1188 {
1189 /* Output call frame information. */
1190 if (targetm.debug_unwind_info () == UI_DWARF2)
1191 output_call_frame_info (0);
1192
1193 /* Output another copy for the unwinder. */
1194 if (do_eh_frame)
1195 output_call_frame_info (1);
1196 }
1197
1198 /* Note that the current function section is being used for code. */
1199
1200 static void
1201 dwarf2out_note_section_used (void)
1202 {
1203 section *sec = current_function_section ();
1204 if (sec == text_section)
1205 text_section_used = true;
1206 else if (sec == cold_text_section)
1207 cold_text_section_used = true;
1208 }
1209
1210 static void var_location_switch_text_section (void);
1211 static void set_cur_line_info_table (section *);
1212
1213 void
1214 dwarf2out_switch_text_section (void)
1215 {
1216 section *sect;
1217 dw_fde_ref fde = cfun->fde;
1218
1219 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1220
1221 if (!in_cold_section_p)
1222 {
1223 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1224 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1225 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1226 }
1227 else
1228 {
1229 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1232 }
1233 have_multiple_function_sections = true;
1234
1235 /* There is no need to mark used sections when not debugging. */
1236 if (cold_text_section != NULL)
1237 dwarf2out_note_section_used ();
1238
1239 if (dwarf2out_do_cfi_asm ())
1240 fprintf (asm_out_file, "\t.cfi_endproc\n");
1241
1242 /* Now do the real section switch. */
1243 sect = current_function_section ();
1244 switch_to_section (sect);
1245
1246 fde->second_in_std_section
1247 = (sect == text_section
1248 || (cold_text_section && sect == cold_text_section));
1249
1250 if (dwarf2out_do_cfi_asm ())
1251 dwarf2out_do_cfi_startproc (true);
1252
1253 var_location_switch_text_section ();
1254
1255 if (cold_text_section != NULL)
1256 set_cur_line_info_table (sect);
1257 }
1258 \f
1259 /* And now, the subset of the debugging information support code necessary
1260 for emitting location expressions. */
1261
1262 /* Data about a single source file. */
1263 struct GTY((for_user)) dwarf_file_data {
1264 const char * filename;
1265 int emitted_number;
1266 };
1267
1268 /* Describe an entry into the .debug_addr section. */
1269
1270 enum ate_kind {
1271 ate_kind_rtx,
1272 ate_kind_rtx_dtprel,
1273 ate_kind_label
1274 };
1275
1276 struct GTY((for_user)) addr_table_entry {
1277 enum ate_kind kind;
1278 unsigned int refcount;
1279 unsigned int index;
1280 union addr_table_entry_struct_union
1281 {
1282 rtx GTY ((tag ("0"))) rtl;
1283 char * GTY ((tag ("1"))) label;
1284 }
1285 GTY ((desc ("%1.kind"))) addr;
1286 };
1287
1288 typedef unsigned int var_loc_view;
1289
1290 /* Location lists are ranges + location descriptions for that range,
1291 so you can track variables that are in different places over
1292 their entire life. */
1293 typedef struct GTY(()) dw_loc_list_struct {
1294 dw_loc_list_ref dw_loc_next;
1295 const char *begin; /* Label and addr_entry for start of range */
1296 addr_table_entry *begin_entry;
1297 const char *end; /* Label for end of range */
1298 char *ll_symbol; /* Label for beginning of location list.
1299 Only on head of list. */
1300 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1301 const char *section; /* Section this loclist is relative to */
1302 dw_loc_descr_ref expr;
1303 var_loc_view vbegin, vend;
1304 hashval_t hash;
1305 /* True if all addresses in this and subsequent lists are known to be
1306 resolved. */
1307 bool resolved_addr;
1308 /* True if this list has been replaced by dw_loc_next. */
1309 bool replaced;
1310 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1311 section. */
1312 unsigned char emitted : 1;
1313 /* True if hash field is index rather than hash value. */
1314 unsigned char num_assigned : 1;
1315 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1316 unsigned char offset_emitted : 1;
1317 /* True if note_variable_value_in_expr has been called on it. */
1318 unsigned char noted_variable_value : 1;
1319 /* True if the range should be emitted even if begin and end
1320 are the same. */
1321 bool force;
1322 } dw_loc_list_node;
1323
1324 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1325 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1326
1327 /* Convert a DWARF stack opcode into its string name. */
1328
1329 static const char *
1330 dwarf_stack_op_name (unsigned int op)
1331 {
1332 const char *name = get_DW_OP_name (op);
1333
1334 if (name != NULL)
1335 return name;
1336
1337 return "OP_<unknown>";
1338 }
1339
1340 /* Return TRUE iff we're to output location view lists as a separate
1341 attribute next to the location lists, as an extension compatible
1342 with DWARF 2 and above. */
1343
1344 static inline bool
1345 dwarf2out_locviews_in_attribute ()
1346 {
1347 return debug_variable_location_views == 1;
1348 }
1349
1350 /* Return TRUE iff we're to output location view lists as part of the
1351 location lists, as proposed for standardization after DWARF 5. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_loclist ()
1355 {
1356 #ifndef DW_LLE_view_pair
1357 return false;
1358 #else
1359 return debug_variable_location_views == -1;
1360 #endif
1361 }
1362
1363 /* Return a pointer to a newly allocated location description. Location
1364 descriptions are simple expression terms that can be strung
1365 together to form more complicated location (address) descriptions. */
1366
1367 static inline dw_loc_descr_ref
1368 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1369 unsigned HOST_WIDE_INT oprnd2)
1370 {
1371 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1372
1373 descr->dw_loc_opc = op;
1374 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1375 descr->dw_loc_oprnd1.val_entry = NULL;
1376 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1377 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1378 descr->dw_loc_oprnd2.val_entry = NULL;
1379 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1380
1381 return descr;
1382 }
1383
1384 /* Add a location description term to a location description expression. */
1385
1386 static inline void
1387 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1388 {
1389 dw_loc_descr_ref *d;
1390
1391 /* Find the end of the chain. */
1392 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1393 ;
1394
1395 *d = descr;
1396 }
1397
1398 /* Compare two location operands for exact equality. */
1399
1400 static bool
1401 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1402 {
1403 if (a->val_class != b->val_class)
1404 return false;
1405 switch (a->val_class)
1406 {
1407 case dw_val_class_none:
1408 return true;
1409 case dw_val_class_addr:
1410 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1411
1412 case dw_val_class_offset:
1413 case dw_val_class_unsigned_const:
1414 case dw_val_class_const:
1415 case dw_val_class_unsigned_const_implicit:
1416 case dw_val_class_const_implicit:
1417 case dw_val_class_range_list:
1418 /* These are all HOST_WIDE_INT, signed or unsigned. */
1419 return a->v.val_unsigned == b->v.val_unsigned;
1420
1421 case dw_val_class_loc:
1422 return a->v.val_loc == b->v.val_loc;
1423 case dw_val_class_loc_list:
1424 return a->v.val_loc_list == b->v.val_loc_list;
1425 case dw_val_class_view_list:
1426 return a->v.val_view_list == b->v.val_view_list;
1427 case dw_val_class_die_ref:
1428 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1429 case dw_val_class_fde_ref:
1430 return a->v.val_fde_index == b->v.val_fde_index;
1431 case dw_val_class_symview:
1432 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1433 case dw_val_class_lbl_id:
1434 case dw_val_class_lineptr:
1435 case dw_val_class_macptr:
1436 case dw_val_class_loclistsptr:
1437 case dw_val_class_high_pc:
1438 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1439 case dw_val_class_str:
1440 return a->v.val_str == b->v.val_str;
1441 case dw_val_class_flag:
1442 return a->v.val_flag == b->v.val_flag;
1443 case dw_val_class_file:
1444 case dw_val_class_file_implicit:
1445 return a->v.val_file == b->v.val_file;
1446 case dw_val_class_decl_ref:
1447 return a->v.val_decl_ref == b->v.val_decl_ref;
1448
1449 case dw_val_class_const_double:
1450 return (a->v.val_double.high == b->v.val_double.high
1451 && a->v.val_double.low == b->v.val_double.low);
1452
1453 case dw_val_class_wide_int:
1454 return *a->v.val_wide == *b->v.val_wide;
1455
1456 case dw_val_class_vec:
1457 {
1458 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1459 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1460
1461 return (a_len == b_len
1462 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1463 }
1464
1465 case dw_val_class_data8:
1466 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1467
1468 case dw_val_class_vms_delta:
1469 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1470 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1471
1472 case dw_val_class_discr_value:
1473 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1474 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1475 case dw_val_class_discr_list:
1476 /* It makes no sense comparing two discriminant value lists. */
1477 return false;
1478 }
1479 gcc_unreachable ();
1480 }
1481
1482 /* Compare two location atoms for exact equality. */
1483
1484 static bool
1485 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1486 {
1487 if (a->dw_loc_opc != b->dw_loc_opc)
1488 return false;
1489
1490 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1491 address size, but since we always allocate cleared storage it
1492 should be zero for other types of locations. */
1493 if (a->dtprel != b->dtprel)
1494 return false;
1495
1496 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1497 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1498 }
1499
1500 /* Compare two complete location expressions for exact equality. */
1501
1502 bool
1503 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1504 {
1505 while (1)
1506 {
1507 if (a == b)
1508 return true;
1509 if (a == NULL || b == NULL)
1510 return false;
1511 if (!loc_descr_equal_p_1 (a, b))
1512 return false;
1513
1514 a = a->dw_loc_next;
1515 b = b->dw_loc_next;
1516 }
1517 }
1518
1519
1520 /* Add a constant POLY_OFFSET to a location expression. */
1521
1522 static void
1523 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1524 {
1525 dw_loc_descr_ref loc;
1526 HOST_WIDE_INT *p;
1527
1528 gcc_assert (*list_head != NULL);
1529
1530 if (known_eq (poly_offset, 0))
1531 return;
1532
1533 /* Find the end of the chain. */
1534 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1535 ;
1536
1537 HOST_WIDE_INT offset;
1538 if (!poly_offset.is_constant (&offset))
1539 {
1540 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1541 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1542 return;
1543 }
1544
1545 p = NULL;
1546 if (loc->dw_loc_opc == DW_OP_fbreg
1547 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1548 p = &loc->dw_loc_oprnd1.v.val_int;
1549 else if (loc->dw_loc_opc == DW_OP_bregx)
1550 p = &loc->dw_loc_oprnd2.v.val_int;
1551
1552 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1553 offset. Don't optimize if an signed integer overflow would happen. */
1554 if (p != NULL
1555 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1556 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1557 *p += offset;
1558
1559 else if (offset > 0)
1560 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1561
1562 else
1563 {
1564 loc->dw_loc_next
1565 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1566 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1567 }
1568 }
1569
1570 /* Return a pointer to a newly allocated location description for
1571 REG and OFFSET. */
1572
1573 static inline dw_loc_descr_ref
1574 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1575 {
1576 HOST_WIDE_INT const_offset;
1577 if (offset.is_constant (&const_offset))
1578 {
1579 if (reg <= 31)
1580 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1581 const_offset, 0);
1582 else
1583 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1584 }
1585 else
1586 {
1587 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1588 loc_descr_plus_const (&ret, offset);
1589 return ret;
1590 }
1591 }
1592
1593 /* Add a constant OFFSET to a location list. */
1594
1595 static void
1596 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1597 {
1598 dw_loc_list_ref d;
1599 for (d = list_head; d != NULL; d = d->dw_loc_next)
1600 loc_descr_plus_const (&d->expr, offset);
1601 }
1602
1603 #define DWARF_REF_SIZE \
1604 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1605
1606 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1607 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1608 DW_FORM_data16 with 128 bits. */
1609 #define DWARF_LARGEST_DATA_FORM_BITS \
1610 (dwarf_version >= 5 ? 128 : 64)
1611
1612 /* Utility inline function for construction of ops that were GNU extension
1613 before DWARF 5. */
1614 static inline enum dwarf_location_atom
1615 dwarf_OP (enum dwarf_location_atom op)
1616 {
1617 switch (op)
1618 {
1619 case DW_OP_implicit_pointer:
1620 if (dwarf_version < 5)
1621 return DW_OP_GNU_implicit_pointer;
1622 break;
1623
1624 case DW_OP_entry_value:
1625 if (dwarf_version < 5)
1626 return DW_OP_GNU_entry_value;
1627 break;
1628
1629 case DW_OP_const_type:
1630 if (dwarf_version < 5)
1631 return DW_OP_GNU_const_type;
1632 break;
1633
1634 case DW_OP_regval_type:
1635 if (dwarf_version < 5)
1636 return DW_OP_GNU_regval_type;
1637 break;
1638
1639 case DW_OP_deref_type:
1640 if (dwarf_version < 5)
1641 return DW_OP_GNU_deref_type;
1642 break;
1643
1644 case DW_OP_convert:
1645 if (dwarf_version < 5)
1646 return DW_OP_GNU_convert;
1647 break;
1648
1649 case DW_OP_reinterpret:
1650 if (dwarf_version < 5)
1651 return DW_OP_GNU_reinterpret;
1652 break;
1653
1654 case DW_OP_addrx:
1655 if (dwarf_version < 5)
1656 return DW_OP_GNU_addr_index;
1657 break;
1658
1659 case DW_OP_constx:
1660 if (dwarf_version < 5)
1661 return DW_OP_GNU_const_index;
1662 break;
1663
1664 default:
1665 break;
1666 }
1667 return op;
1668 }
1669
1670 /* Similarly for attributes. */
1671 static inline enum dwarf_attribute
1672 dwarf_AT (enum dwarf_attribute at)
1673 {
1674 switch (at)
1675 {
1676 case DW_AT_call_return_pc:
1677 if (dwarf_version < 5)
1678 return DW_AT_low_pc;
1679 break;
1680
1681 case DW_AT_call_tail_call:
1682 if (dwarf_version < 5)
1683 return DW_AT_GNU_tail_call;
1684 break;
1685
1686 case DW_AT_call_origin:
1687 if (dwarf_version < 5)
1688 return DW_AT_abstract_origin;
1689 break;
1690
1691 case DW_AT_call_target:
1692 if (dwarf_version < 5)
1693 return DW_AT_GNU_call_site_target;
1694 break;
1695
1696 case DW_AT_call_target_clobbered:
1697 if (dwarf_version < 5)
1698 return DW_AT_GNU_call_site_target_clobbered;
1699 break;
1700
1701 case DW_AT_call_parameter:
1702 if (dwarf_version < 5)
1703 return DW_AT_abstract_origin;
1704 break;
1705
1706 case DW_AT_call_value:
1707 if (dwarf_version < 5)
1708 return DW_AT_GNU_call_site_value;
1709 break;
1710
1711 case DW_AT_call_data_value:
1712 if (dwarf_version < 5)
1713 return DW_AT_GNU_call_site_data_value;
1714 break;
1715
1716 case DW_AT_call_all_calls:
1717 if (dwarf_version < 5)
1718 return DW_AT_GNU_all_call_sites;
1719 break;
1720
1721 case DW_AT_call_all_tail_calls:
1722 if (dwarf_version < 5)
1723 return DW_AT_GNU_all_tail_call_sites;
1724 break;
1725
1726 case DW_AT_dwo_name:
1727 if (dwarf_version < 5)
1728 return DW_AT_GNU_dwo_name;
1729 break;
1730
1731 case DW_AT_addr_base:
1732 if (dwarf_version < 5)
1733 return DW_AT_GNU_addr_base;
1734 break;
1735
1736 default:
1737 break;
1738 }
1739 return at;
1740 }
1741
1742 /* And similarly for tags. */
1743 static inline enum dwarf_tag
1744 dwarf_TAG (enum dwarf_tag tag)
1745 {
1746 switch (tag)
1747 {
1748 case DW_TAG_call_site:
1749 if (dwarf_version < 5)
1750 return DW_TAG_GNU_call_site;
1751 break;
1752
1753 case DW_TAG_call_site_parameter:
1754 if (dwarf_version < 5)
1755 return DW_TAG_GNU_call_site_parameter;
1756 break;
1757
1758 default:
1759 break;
1760 }
1761 return tag;
1762 }
1763
1764 /* And similarly for forms. */
1765 static inline enum dwarf_form
1766 dwarf_FORM (enum dwarf_form form)
1767 {
1768 switch (form)
1769 {
1770 case DW_FORM_addrx:
1771 if (dwarf_version < 5)
1772 return DW_FORM_GNU_addr_index;
1773 break;
1774
1775 case DW_FORM_strx:
1776 if (dwarf_version < 5)
1777 return DW_FORM_GNU_str_index;
1778 break;
1779
1780 default:
1781 break;
1782 }
1783 return form;
1784 }
1785
1786 static unsigned long int get_base_type_offset (dw_die_ref);
1787
1788 /* Return the size of a location descriptor. */
1789
1790 static unsigned long
1791 size_of_loc_descr (dw_loc_descr_ref loc)
1792 {
1793 unsigned long size = 1;
1794
1795 switch (loc->dw_loc_opc)
1796 {
1797 case DW_OP_addr:
1798 size += DWARF2_ADDR_SIZE;
1799 break;
1800 case DW_OP_GNU_addr_index:
1801 case DW_OP_addrx:
1802 case DW_OP_GNU_const_index:
1803 case DW_OP_constx:
1804 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1805 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1806 break;
1807 case DW_OP_const1u:
1808 case DW_OP_const1s:
1809 size += 1;
1810 break;
1811 case DW_OP_const2u:
1812 case DW_OP_const2s:
1813 size += 2;
1814 break;
1815 case DW_OP_const4u:
1816 case DW_OP_const4s:
1817 size += 4;
1818 break;
1819 case DW_OP_const8u:
1820 case DW_OP_const8s:
1821 size += 8;
1822 break;
1823 case DW_OP_constu:
1824 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1825 break;
1826 case DW_OP_consts:
1827 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1828 break;
1829 case DW_OP_pick:
1830 size += 1;
1831 break;
1832 case DW_OP_plus_uconst:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_skip:
1836 case DW_OP_bra:
1837 size += 2;
1838 break;
1839 case DW_OP_breg0:
1840 case DW_OP_breg1:
1841 case DW_OP_breg2:
1842 case DW_OP_breg3:
1843 case DW_OP_breg4:
1844 case DW_OP_breg5:
1845 case DW_OP_breg6:
1846 case DW_OP_breg7:
1847 case DW_OP_breg8:
1848 case DW_OP_breg9:
1849 case DW_OP_breg10:
1850 case DW_OP_breg11:
1851 case DW_OP_breg12:
1852 case DW_OP_breg13:
1853 case DW_OP_breg14:
1854 case DW_OP_breg15:
1855 case DW_OP_breg16:
1856 case DW_OP_breg17:
1857 case DW_OP_breg18:
1858 case DW_OP_breg19:
1859 case DW_OP_breg20:
1860 case DW_OP_breg21:
1861 case DW_OP_breg22:
1862 case DW_OP_breg23:
1863 case DW_OP_breg24:
1864 case DW_OP_breg25:
1865 case DW_OP_breg26:
1866 case DW_OP_breg27:
1867 case DW_OP_breg28:
1868 case DW_OP_breg29:
1869 case DW_OP_breg30:
1870 case DW_OP_breg31:
1871 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1872 break;
1873 case DW_OP_regx:
1874 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1875 break;
1876 case DW_OP_fbreg:
1877 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1878 break;
1879 case DW_OP_bregx:
1880 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1881 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1882 break;
1883 case DW_OP_piece:
1884 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1885 break;
1886 case DW_OP_bit_piece:
1887 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1888 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1889 break;
1890 case DW_OP_deref_size:
1891 case DW_OP_xderef_size:
1892 size += 1;
1893 break;
1894 case DW_OP_call2:
1895 size += 2;
1896 break;
1897 case DW_OP_call4:
1898 size += 4;
1899 break;
1900 case DW_OP_call_ref:
1901 case DW_OP_GNU_variable_value:
1902 size += DWARF_REF_SIZE;
1903 break;
1904 case DW_OP_implicit_value:
1905 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1906 + loc->dw_loc_oprnd1.v.val_unsigned;
1907 break;
1908 case DW_OP_implicit_pointer:
1909 case DW_OP_GNU_implicit_pointer:
1910 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1911 break;
1912 case DW_OP_entry_value:
1913 case DW_OP_GNU_entry_value:
1914 {
1915 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1916 size += size_of_uleb128 (op_size) + op_size;
1917 break;
1918 }
1919 case DW_OP_const_type:
1920 case DW_OP_GNU_const_type:
1921 {
1922 unsigned long o
1923 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1924 size += size_of_uleb128 (o) + 1;
1925 switch (loc->dw_loc_oprnd2.val_class)
1926 {
1927 case dw_val_class_vec:
1928 size += loc->dw_loc_oprnd2.v.val_vec.length
1929 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1930 break;
1931 case dw_val_class_const:
1932 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1933 break;
1934 case dw_val_class_const_double:
1935 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1936 break;
1937 case dw_val_class_wide_int:
1938 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1939 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1940 break;
1941 default:
1942 gcc_unreachable ();
1943 }
1944 break;
1945 }
1946 case DW_OP_regval_type:
1947 case DW_OP_GNU_regval_type:
1948 {
1949 unsigned long o
1950 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1951 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1952 + size_of_uleb128 (o);
1953 }
1954 break;
1955 case DW_OP_deref_type:
1956 case DW_OP_GNU_deref_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += 1 + size_of_uleb128 (o);
1961 }
1962 break;
1963 case DW_OP_convert:
1964 case DW_OP_reinterpret:
1965 case DW_OP_GNU_convert:
1966 case DW_OP_GNU_reinterpret:
1967 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1968 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1969 else
1970 {
1971 unsigned long o
1972 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1973 size += size_of_uleb128 (o);
1974 }
1975 break;
1976 case DW_OP_GNU_parameter_ref:
1977 size += 4;
1978 break;
1979 default:
1980 break;
1981 }
1982
1983 return size;
1984 }
1985
1986 /* Return the size of a series of location descriptors. */
1987
1988 unsigned long
1989 size_of_locs (dw_loc_descr_ref loc)
1990 {
1991 dw_loc_descr_ref l;
1992 unsigned long size;
1993
1994 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1995 field, to avoid writing to a PCH file. */
1996 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1997 {
1998 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1999 break;
2000 size += size_of_loc_descr (l);
2001 }
2002 if (! l)
2003 return size;
2004
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 l->dw_loc_addr = size;
2008 size += size_of_loc_descr (l);
2009 }
2010
2011 return size;
2012 }
2013
2014 /* Return the size of the value in a DW_AT_discr_value attribute. */
2015
2016 static int
2017 size_of_discr_value (dw_discr_value *discr_value)
2018 {
2019 if (discr_value->pos)
2020 return size_of_uleb128 (discr_value->v.uval);
2021 else
2022 return size_of_sleb128 (discr_value->v.sval);
2023 }
2024
2025 /* Return the size of the value in a DW_AT_discr_list attribute. */
2026
2027 static int
2028 size_of_discr_list (dw_discr_list_ref discr_list)
2029 {
2030 int size = 0;
2031
2032 for (dw_discr_list_ref list = discr_list;
2033 list != NULL;
2034 list = list->dw_discr_next)
2035 {
2036 /* One byte for the discriminant value descriptor, and then one or two
2037 LEB128 numbers, depending on whether it's a single case label or a
2038 range label. */
2039 size += 1;
2040 size += size_of_discr_value (&list->dw_discr_lower_bound);
2041 if (list->dw_discr_range != 0)
2042 size += size_of_discr_value (&list->dw_discr_upper_bound);
2043 }
2044 return size;
2045 }
2046
2047 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2048 static void get_ref_die_offset_label (char *, dw_die_ref);
2049 static unsigned long int get_ref_die_offset (dw_die_ref);
2050
2051 /* Output location description stack opcode's operands (if any).
2052 The for_eh_or_skip parameter controls whether register numbers are
2053 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2054 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2055 info). This should be suppressed for the cases that have not been converted
2056 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2057
2058 static void
2059 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2060 {
2061 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2062 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2063
2064 switch (loc->dw_loc_opc)
2065 {
2066 #ifdef DWARF2_DEBUGGING_INFO
2067 case DW_OP_const2u:
2068 case DW_OP_const2s:
2069 dw2_asm_output_data (2, val1->v.val_int, NULL);
2070 break;
2071 case DW_OP_const4u:
2072 if (loc->dtprel)
2073 {
2074 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2075 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2076 val1->v.val_addr);
2077 fputc ('\n', asm_out_file);
2078 break;
2079 }
2080 /* FALLTHRU */
2081 case DW_OP_const4s:
2082 dw2_asm_output_data (4, val1->v.val_int, NULL);
2083 break;
2084 case DW_OP_const8u:
2085 if (loc->dtprel)
2086 {
2087 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2088 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2089 val1->v.val_addr);
2090 fputc ('\n', asm_out_file);
2091 break;
2092 }
2093 /* FALLTHRU */
2094 case DW_OP_const8s:
2095 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2096 dw2_asm_output_data (8, val1->v.val_int, NULL);
2097 break;
2098 case DW_OP_skip:
2099 case DW_OP_bra:
2100 {
2101 int offset;
2102
2103 gcc_assert (val1->val_class == dw_val_class_loc);
2104 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2105
2106 dw2_asm_output_data (2, offset, NULL);
2107 }
2108 break;
2109 case DW_OP_implicit_value:
2110 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2111 switch (val2->val_class)
2112 {
2113 case dw_val_class_const:
2114 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2115 break;
2116 case dw_val_class_vec:
2117 {
2118 unsigned int elt_size = val2->v.val_vec.elt_size;
2119 unsigned int len = val2->v.val_vec.length;
2120 unsigned int i;
2121 unsigned char *p;
2122
2123 if (elt_size > sizeof (HOST_WIDE_INT))
2124 {
2125 elt_size /= 2;
2126 len *= 2;
2127 }
2128 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2129 i < len;
2130 i++, p += elt_size)
2131 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2132 "fp or vector constant word %u", i);
2133 }
2134 break;
2135 case dw_val_class_const_double:
2136 {
2137 unsigned HOST_WIDE_INT first, second;
2138
2139 if (WORDS_BIG_ENDIAN)
2140 {
2141 first = val2->v.val_double.high;
2142 second = val2->v.val_double.low;
2143 }
2144 else
2145 {
2146 first = val2->v.val_double.low;
2147 second = val2->v.val_double.high;
2148 }
2149 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2150 first, NULL);
2151 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2152 second, NULL);
2153 }
2154 break;
2155 case dw_val_class_wide_int:
2156 {
2157 int i;
2158 int len = get_full_len (*val2->v.val_wide);
2159 if (WORDS_BIG_ENDIAN)
2160 for (i = len - 1; i >= 0; --i)
2161 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2162 val2->v.val_wide->elt (i), NULL);
2163 else
2164 for (i = 0; i < len; ++i)
2165 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2166 val2->v.val_wide->elt (i), NULL);
2167 }
2168 break;
2169 case dw_val_class_addr:
2170 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2171 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2172 break;
2173 default:
2174 gcc_unreachable ();
2175 }
2176 break;
2177 #else
2178 case DW_OP_const2u:
2179 case DW_OP_const2s:
2180 case DW_OP_const4u:
2181 case DW_OP_const4s:
2182 case DW_OP_const8u:
2183 case DW_OP_const8s:
2184 case DW_OP_skip:
2185 case DW_OP_bra:
2186 case DW_OP_implicit_value:
2187 /* We currently don't make any attempt to make sure these are
2188 aligned properly like we do for the main unwind info, so
2189 don't support emitting things larger than a byte if we're
2190 only doing unwinding. */
2191 gcc_unreachable ();
2192 #endif
2193 case DW_OP_const1u:
2194 case DW_OP_const1s:
2195 dw2_asm_output_data (1, val1->v.val_int, NULL);
2196 break;
2197 case DW_OP_constu:
2198 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2199 break;
2200 case DW_OP_consts:
2201 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2202 break;
2203 case DW_OP_pick:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_plus_uconst:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_breg0:
2210 case DW_OP_breg1:
2211 case DW_OP_breg2:
2212 case DW_OP_breg3:
2213 case DW_OP_breg4:
2214 case DW_OP_breg5:
2215 case DW_OP_breg6:
2216 case DW_OP_breg7:
2217 case DW_OP_breg8:
2218 case DW_OP_breg9:
2219 case DW_OP_breg10:
2220 case DW_OP_breg11:
2221 case DW_OP_breg12:
2222 case DW_OP_breg13:
2223 case DW_OP_breg14:
2224 case DW_OP_breg15:
2225 case DW_OP_breg16:
2226 case DW_OP_breg17:
2227 case DW_OP_breg18:
2228 case DW_OP_breg19:
2229 case DW_OP_breg20:
2230 case DW_OP_breg21:
2231 case DW_OP_breg22:
2232 case DW_OP_breg23:
2233 case DW_OP_breg24:
2234 case DW_OP_breg25:
2235 case DW_OP_breg26:
2236 case DW_OP_breg27:
2237 case DW_OP_breg28:
2238 case DW_OP_breg29:
2239 case DW_OP_breg30:
2240 case DW_OP_breg31:
2241 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2242 break;
2243 case DW_OP_regx:
2244 {
2245 unsigned r = val1->v.val_unsigned;
2246 if (for_eh_or_skip >= 0)
2247 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2248 gcc_assert (size_of_uleb128 (r)
2249 == size_of_uleb128 (val1->v.val_unsigned));
2250 dw2_asm_output_data_uleb128 (r, NULL);
2251 }
2252 break;
2253 case DW_OP_fbreg:
2254 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2255 break;
2256 case DW_OP_bregx:
2257 {
2258 unsigned r = val1->v.val_unsigned;
2259 if (for_eh_or_skip >= 0)
2260 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2261 gcc_assert (size_of_uleb128 (r)
2262 == size_of_uleb128 (val1->v.val_unsigned));
2263 dw2_asm_output_data_uleb128 (r, NULL);
2264 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2265 }
2266 break;
2267 case DW_OP_piece:
2268 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2269 break;
2270 case DW_OP_bit_piece:
2271 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2272 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2273 break;
2274 case DW_OP_deref_size:
2275 case DW_OP_xderef_size:
2276 dw2_asm_output_data (1, val1->v.val_int, NULL);
2277 break;
2278
2279 case DW_OP_addr:
2280 if (loc->dtprel)
2281 {
2282 if (targetm.asm_out.output_dwarf_dtprel)
2283 {
2284 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2285 DWARF2_ADDR_SIZE,
2286 val1->v.val_addr);
2287 fputc ('\n', asm_out_file);
2288 }
2289 else
2290 gcc_unreachable ();
2291 }
2292 else
2293 {
2294 #ifdef DWARF2_DEBUGGING_INFO
2295 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2296 #else
2297 gcc_unreachable ();
2298 #endif
2299 }
2300 break;
2301
2302 case DW_OP_GNU_addr_index:
2303 case DW_OP_addrx:
2304 case DW_OP_GNU_const_index:
2305 case DW_OP_constx:
2306 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2307 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2308 "(index into .debug_addr)");
2309 break;
2310
2311 case DW_OP_call2:
2312 case DW_OP_call4:
2313 {
2314 unsigned long die_offset
2315 = get_ref_die_offset (val1->v.val_die_ref.die);
2316 /* Make sure the offset has been computed and that we can encode it as
2317 an operand. */
2318 gcc_assert (die_offset > 0
2319 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2320 ? 0xffff
2321 : 0xffffffff));
2322 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2323 die_offset, NULL);
2324 }
2325 break;
2326
2327 case DW_OP_call_ref:
2328 case DW_OP_GNU_variable_value:
2329 {
2330 char label[MAX_ARTIFICIAL_LABEL_BYTES
2331 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2332 gcc_assert (val1->val_class == dw_val_class_die_ref);
2333 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2334 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2335 }
2336 break;
2337
2338 case DW_OP_implicit_pointer:
2339 case DW_OP_GNU_implicit_pointer:
2340 {
2341 char label[MAX_ARTIFICIAL_LABEL_BYTES
2342 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2343 gcc_assert (val1->val_class == dw_val_class_die_ref);
2344 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2345 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2346 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2347 }
2348 break;
2349
2350 case DW_OP_entry_value:
2351 case DW_OP_GNU_entry_value:
2352 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2353 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2354 break;
2355
2356 case DW_OP_const_type:
2357 case DW_OP_GNU_const_type:
2358 {
2359 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2360 gcc_assert (o);
2361 dw2_asm_output_data_uleb128 (o, NULL);
2362 switch (val2->val_class)
2363 {
2364 case dw_val_class_const:
2365 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2366 dw2_asm_output_data (1, l, NULL);
2367 dw2_asm_output_data (l, val2->v.val_int, NULL);
2368 break;
2369 case dw_val_class_vec:
2370 {
2371 unsigned int elt_size = val2->v.val_vec.elt_size;
2372 unsigned int len = val2->v.val_vec.length;
2373 unsigned int i;
2374 unsigned char *p;
2375
2376 l = len * elt_size;
2377 dw2_asm_output_data (1, l, NULL);
2378 if (elt_size > sizeof (HOST_WIDE_INT))
2379 {
2380 elt_size /= 2;
2381 len *= 2;
2382 }
2383 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2384 i < len;
2385 i++, p += elt_size)
2386 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2387 "fp or vector constant word %u", i);
2388 }
2389 break;
2390 case dw_val_class_const_double:
2391 {
2392 unsigned HOST_WIDE_INT first, second;
2393 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2394
2395 dw2_asm_output_data (1, 2 * l, NULL);
2396 if (WORDS_BIG_ENDIAN)
2397 {
2398 first = val2->v.val_double.high;
2399 second = val2->v.val_double.low;
2400 }
2401 else
2402 {
2403 first = val2->v.val_double.low;
2404 second = val2->v.val_double.high;
2405 }
2406 dw2_asm_output_data (l, first, NULL);
2407 dw2_asm_output_data (l, second, NULL);
2408 }
2409 break;
2410 case dw_val_class_wide_int:
2411 {
2412 int i;
2413 int len = get_full_len (*val2->v.val_wide);
2414 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2415
2416 dw2_asm_output_data (1, len * l, NULL);
2417 if (WORDS_BIG_ENDIAN)
2418 for (i = len - 1; i >= 0; --i)
2419 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2420 else
2421 for (i = 0; i < len; ++i)
2422 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2423 }
2424 break;
2425 default:
2426 gcc_unreachable ();
2427 }
2428 }
2429 break;
2430 case DW_OP_regval_type:
2431 case DW_OP_GNU_regval_type:
2432 {
2433 unsigned r = val1->v.val_unsigned;
2434 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2435 gcc_assert (o);
2436 if (for_eh_or_skip >= 0)
2437 {
2438 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2439 gcc_assert (size_of_uleb128 (r)
2440 == size_of_uleb128 (val1->v.val_unsigned));
2441 }
2442 dw2_asm_output_data_uleb128 (r, NULL);
2443 dw2_asm_output_data_uleb128 (o, NULL);
2444 }
2445 break;
2446 case DW_OP_deref_type:
2447 case DW_OP_GNU_deref_type:
2448 {
2449 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2450 gcc_assert (o);
2451 dw2_asm_output_data (1, val1->v.val_int, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_convert:
2456 case DW_OP_reinterpret:
2457 case DW_OP_GNU_convert:
2458 case DW_OP_GNU_reinterpret:
2459 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2460 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2461 else
2462 {
2463 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2464 gcc_assert (o);
2465 dw2_asm_output_data_uleb128 (o, NULL);
2466 }
2467 break;
2468
2469 case DW_OP_GNU_parameter_ref:
2470 {
2471 unsigned long o;
2472 gcc_assert (val1->val_class == dw_val_class_die_ref);
2473 o = get_ref_die_offset (val1->v.val_die_ref.die);
2474 dw2_asm_output_data (4, o, NULL);
2475 }
2476 break;
2477
2478 default:
2479 /* Other codes have no operands. */
2480 break;
2481 }
2482 }
2483
2484 /* Output a sequence of location operations.
2485 The for_eh_or_skip parameter controls whether register numbers are
2486 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2487 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2488 info). This should be suppressed for the cases that have not been converted
2489 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2490
2491 void
2492 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2493 {
2494 for (; loc != NULL; loc = loc->dw_loc_next)
2495 {
2496 enum dwarf_location_atom opc = loc->dw_loc_opc;
2497 /* Output the opcode. */
2498 if (for_eh_or_skip >= 0
2499 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2500 {
2501 unsigned r = (opc - DW_OP_breg0);
2502 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2503 gcc_assert (r <= 31);
2504 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2505 }
2506 else if (for_eh_or_skip >= 0
2507 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2508 {
2509 unsigned r = (opc - DW_OP_reg0);
2510 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2511 gcc_assert (r <= 31);
2512 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2513 }
2514
2515 dw2_asm_output_data (1, opc,
2516 "%s", dwarf_stack_op_name (opc));
2517
2518 /* Output the operand(s) (if any). */
2519 output_loc_operands (loc, for_eh_or_skip);
2520 }
2521 }
2522
2523 /* Output location description stack opcode's operands (if any).
2524 The output is single bytes on a line, suitable for .cfi_escape. */
2525
2526 static void
2527 output_loc_operands_raw (dw_loc_descr_ref loc)
2528 {
2529 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2530 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2531
2532 switch (loc->dw_loc_opc)
2533 {
2534 case DW_OP_addr:
2535 case DW_OP_GNU_addr_index:
2536 case DW_OP_addrx:
2537 case DW_OP_GNU_const_index:
2538 case DW_OP_constx:
2539 case DW_OP_implicit_value:
2540 /* We cannot output addresses in .cfi_escape, only bytes. */
2541 gcc_unreachable ();
2542
2543 case DW_OP_const1u:
2544 case DW_OP_const1s:
2545 case DW_OP_pick:
2546 case DW_OP_deref_size:
2547 case DW_OP_xderef_size:
2548 fputc (',', asm_out_file);
2549 dw2_asm_output_data_raw (1, val1->v.val_int);
2550 break;
2551
2552 case DW_OP_const2u:
2553 case DW_OP_const2s:
2554 fputc (',', asm_out_file);
2555 dw2_asm_output_data_raw (2, val1->v.val_int);
2556 break;
2557
2558 case DW_OP_const4u:
2559 case DW_OP_const4s:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_raw (4, val1->v.val_int);
2562 break;
2563
2564 case DW_OP_const8u:
2565 case DW_OP_const8s:
2566 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2567 fputc (',', asm_out_file);
2568 dw2_asm_output_data_raw (8, val1->v.val_int);
2569 break;
2570
2571 case DW_OP_skip:
2572 case DW_OP_bra:
2573 {
2574 int offset;
2575
2576 gcc_assert (val1->val_class == dw_val_class_loc);
2577 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2578
2579 fputc (',', asm_out_file);
2580 dw2_asm_output_data_raw (2, offset);
2581 }
2582 break;
2583
2584 case DW_OP_regx:
2585 {
2586 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2587 gcc_assert (size_of_uleb128 (r)
2588 == size_of_uleb128 (val1->v.val_unsigned));
2589 fputc (',', asm_out_file);
2590 dw2_asm_output_data_uleb128_raw (r);
2591 }
2592 break;
2593
2594 case DW_OP_constu:
2595 case DW_OP_plus_uconst:
2596 case DW_OP_piece:
2597 fputc (',', asm_out_file);
2598 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2599 break;
2600
2601 case DW_OP_bit_piece:
2602 fputc (',', asm_out_file);
2603 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2604 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2605 break;
2606
2607 case DW_OP_consts:
2608 case DW_OP_breg0:
2609 case DW_OP_breg1:
2610 case DW_OP_breg2:
2611 case DW_OP_breg3:
2612 case DW_OP_breg4:
2613 case DW_OP_breg5:
2614 case DW_OP_breg6:
2615 case DW_OP_breg7:
2616 case DW_OP_breg8:
2617 case DW_OP_breg9:
2618 case DW_OP_breg10:
2619 case DW_OP_breg11:
2620 case DW_OP_breg12:
2621 case DW_OP_breg13:
2622 case DW_OP_breg14:
2623 case DW_OP_breg15:
2624 case DW_OP_breg16:
2625 case DW_OP_breg17:
2626 case DW_OP_breg18:
2627 case DW_OP_breg19:
2628 case DW_OP_breg20:
2629 case DW_OP_breg21:
2630 case DW_OP_breg22:
2631 case DW_OP_breg23:
2632 case DW_OP_breg24:
2633 case DW_OP_breg25:
2634 case DW_OP_breg26:
2635 case DW_OP_breg27:
2636 case DW_OP_breg28:
2637 case DW_OP_breg29:
2638 case DW_OP_breg30:
2639 case DW_OP_breg31:
2640 case DW_OP_fbreg:
2641 fputc (',', asm_out_file);
2642 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2643 break;
2644
2645 case DW_OP_bregx:
2646 {
2647 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2648 gcc_assert (size_of_uleb128 (r)
2649 == size_of_uleb128 (val1->v.val_unsigned));
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_uleb128_raw (r);
2652 fputc (',', asm_out_file);
2653 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2654 }
2655 break;
2656
2657 case DW_OP_implicit_pointer:
2658 case DW_OP_entry_value:
2659 case DW_OP_const_type:
2660 case DW_OP_regval_type:
2661 case DW_OP_deref_type:
2662 case DW_OP_convert:
2663 case DW_OP_reinterpret:
2664 case DW_OP_GNU_implicit_pointer:
2665 case DW_OP_GNU_entry_value:
2666 case DW_OP_GNU_const_type:
2667 case DW_OP_GNU_regval_type:
2668 case DW_OP_GNU_deref_type:
2669 case DW_OP_GNU_convert:
2670 case DW_OP_GNU_reinterpret:
2671 case DW_OP_GNU_parameter_ref:
2672 gcc_unreachable ();
2673 break;
2674
2675 default:
2676 /* Other codes have no operands. */
2677 break;
2678 }
2679 }
2680
2681 void
2682 output_loc_sequence_raw (dw_loc_descr_ref loc)
2683 {
2684 while (1)
2685 {
2686 enum dwarf_location_atom opc = loc->dw_loc_opc;
2687 /* Output the opcode. */
2688 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2689 {
2690 unsigned r = (opc - DW_OP_breg0);
2691 r = DWARF2_FRAME_REG_OUT (r, 1);
2692 gcc_assert (r <= 31);
2693 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2694 }
2695 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2696 {
2697 unsigned r = (opc - DW_OP_reg0);
2698 r = DWARF2_FRAME_REG_OUT (r, 1);
2699 gcc_assert (r <= 31);
2700 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2701 }
2702 /* Output the opcode. */
2703 fprintf (asm_out_file, "%#x", opc);
2704 output_loc_operands_raw (loc);
2705
2706 if (!loc->dw_loc_next)
2707 break;
2708 loc = loc->dw_loc_next;
2709
2710 fputc (',', asm_out_file);
2711 }
2712 }
2713
2714 /* This function builds a dwarf location descriptor sequence from a
2715 dw_cfa_location, adding the given OFFSET to the result of the
2716 expression. */
2717
2718 struct dw_loc_descr_node *
2719 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2720 {
2721 struct dw_loc_descr_node *head, *tmp;
2722
2723 offset += cfa->offset;
2724
2725 if (cfa->indirect)
2726 {
2727 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2728 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2729 head->dw_loc_oprnd1.val_entry = NULL;
2730 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2731 add_loc_descr (&head, tmp);
2732 loc_descr_plus_const (&head, offset);
2733 }
2734 else
2735 head = new_reg_loc_descr (cfa->reg, offset);
2736
2737 return head;
2738 }
2739
2740 /* This function builds a dwarf location descriptor sequence for
2741 the address at OFFSET from the CFA when stack is aligned to
2742 ALIGNMENT byte. */
2743
2744 struct dw_loc_descr_node *
2745 build_cfa_aligned_loc (dw_cfa_location *cfa,
2746 poly_int64 offset, HOST_WIDE_INT alignment)
2747 {
2748 struct dw_loc_descr_node *head;
2749 unsigned int dwarf_fp
2750 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2751
2752 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2753 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2754 {
2755 head = new_reg_loc_descr (dwarf_fp, 0);
2756 add_loc_descr (&head, int_loc_descriptor (alignment));
2757 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2758 loc_descr_plus_const (&head, offset);
2759 }
2760 else
2761 head = new_reg_loc_descr (dwarf_fp, offset);
2762 return head;
2763 }
2764 \f
2765 /* And now, the support for symbolic debugging information. */
2766
2767 /* .debug_str support. */
2768
2769 static void dwarf2out_init (const char *);
2770 static void dwarf2out_finish (const char *);
2771 static void dwarf2out_early_finish (const char *);
2772 static void dwarf2out_assembly_start (void);
2773 static void dwarf2out_define (unsigned int, const char *);
2774 static void dwarf2out_undef (unsigned int, const char *);
2775 static void dwarf2out_start_source_file (unsigned, const char *);
2776 static void dwarf2out_end_source_file (unsigned);
2777 static void dwarf2out_function_decl (tree);
2778 static void dwarf2out_begin_block (unsigned, unsigned);
2779 static void dwarf2out_end_block (unsigned, unsigned);
2780 static bool dwarf2out_ignore_block (const_tree);
2781 static void dwarf2out_early_global_decl (tree);
2782 static void dwarf2out_late_global_decl (tree);
2783 static void dwarf2out_type_decl (tree, int);
2784 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2785 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2786 dw_die_ref);
2787 static void dwarf2out_abstract_function (tree);
2788 static void dwarf2out_var_location (rtx_insn *);
2789 static void dwarf2out_inline_entry (tree);
2790 static void dwarf2out_size_function (tree);
2791 static void dwarf2out_begin_function (tree);
2792 static void dwarf2out_end_function (unsigned int);
2793 static void dwarf2out_register_main_translation_unit (tree unit);
2794 static void dwarf2out_set_name (tree, tree);
2795 static void dwarf2out_register_external_die (tree decl, const char *sym,
2796 unsigned HOST_WIDE_INT off);
2797 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2798 unsigned HOST_WIDE_INT *off);
2799
2800 /* The debug hooks structure. */
2801
2802 const struct gcc_debug_hooks dwarf2_debug_hooks =
2803 {
2804 dwarf2out_init,
2805 dwarf2out_finish,
2806 dwarf2out_early_finish,
2807 dwarf2out_assembly_start,
2808 dwarf2out_define,
2809 dwarf2out_undef,
2810 dwarf2out_start_source_file,
2811 dwarf2out_end_source_file,
2812 dwarf2out_begin_block,
2813 dwarf2out_end_block,
2814 dwarf2out_ignore_block,
2815 dwarf2out_source_line,
2816 dwarf2out_begin_prologue,
2817 #if VMS_DEBUGGING_INFO
2818 dwarf2out_vms_end_prologue,
2819 dwarf2out_vms_begin_epilogue,
2820 #else
2821 debug_nothing_int_charstar,
2822 debug_nothing_int_charstar,
2823 #endif
2824 dwarf2out_end_epilogue,
2825 dwarf2out_begin_function,
2826 dwarf2out_end_function, /* end_function */
2827 dwarf2out_register_main_translation_unit,
2828 dwarf2out_function_decl, /* function_decl */
2829 dwarf2out_early_global_decl,
2830 dwarf2out_late_global_decl,
2831 dwarf2out_type_decl, /* type_decl */
2832 dwarf2out_imported_module_or_decl,
2833 dwarf2out_die_ref_for_decl,
2834 dwarf2out_register_external_die,
2835 debug_nothing_tree, /* deferred_inline_function */
2836 /* The DWARF 2 backend tries to reduce debugging bloat by not
2837 emitting the abstract description of inline functions until
2838 something tries to reference them. */
2839 dwarf2out_abstract_function, /* outlining_inline_function */
2840 debug_nothing_rtx_code_label, /* label */
2841 debug_nothing_int, /* handle_pch */
2842 dwarf2out_var_location,
2843 dwarf2out_inline_entry, /* inline_entry */
2844 dwarf2out_size_function, /* size_function */
2845 dwarf2out_switch_text_section,
2846 dwarf2out_set_name,
2847 1, /* start_end_main_source_file */
2848 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2849 };
2850
2851 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2852 {
2853 dwarf2out_init,
2854 debug_nothing_charstar,
2855 debug_nothing_charstar,
2856 dwarf2out_assembly_start,
2857 debug_nothing_int_charstar,
2858 debug_nothing_int_charstar,
2859 debug_nothing_int_charstar,
2860 debug_nothing_int,
2861 debug_nothing_int_int, /* begin_block */
2862 debug_nothing_int_int, /* end_block */
2863 debug_true_const_tree, /* ignore_block */
2864 dwarf2out_source_line, /* source_line */
2865 debug_nothing_int_int_charstar, /* begin_prologue */
2866 debug_nothing_int_charstar, /* end_prologue */
2867 debug_nothing_int_charstar, /* begin_epilogue */
2868 debug_nothing_int_charstar, /* end_epilogue */
2869 debug_nothing_tree, /* begin_function */
2870 debug_nothing_int, /* end_function */
2871 debug_nothing_tree, /* register_main_translation_unit */
2872 debug_nothing_tree, /* function_decl */
2873 debug_nothing_tree, /* early_global_decl */
2874 debug_nothing_tree, /* late_global_decl */
2875 debug_nothing_tree_int, /* type_decl */
2876 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2877 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2878 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2879 debug_nothing_tree, /* deferred_inline_function */
2880 debug_nothing_tree, /* outlining_inline_function */
2881 debug_nothing_rtx_code_label, /* label */
2882 debug_nothing_int, /* handle_pch */
2883 debug_nothing_rtx_insn, /* var_location */
2884 debug_nothing_tree, /* inline_entry */
2885 debug_nothing_tree, /* size_function */
2886 debug_nothing_void, /* switch_text_section */
2887 debug_nothing_tree_tree, /* set_name */
2888 0, /* start_end_main_source_file */
2889 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2890 };
2891 \f
2892 /* NOTE: In the comments in this file, many references are made to
2893 "Debugging Information Entries". This term is abbreviated as `DIE'
2894 throughout the remainder of this file. */
2895
2896 /* An internal representation of the DWARF output is built, and then
2897 walked to generate the DWARF debugging info. The walk of the internal
2898 representation is done after the entire program has been compiled.
2899 The types below are used to describe the internal representation. */
2900
2901 /* Whether to put type DIEs into their own section .debug_types instead
2902 of making them part of the .debug_info section. Only supported for
2903 Dwarf V4 or higher and the user didn't disable them through
2904 -fno-debug-types-section. It is more efficient to put them in a
2905 separate comdat sections since the linker will then be able to
2906 remove duplicates. But not all tools support .debug_types sections
2907 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2908 it is DW_UT_type unit type in .debug_info section. */
2909
2910 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2911
2912 /* Various DIE's use offsets relative to the beginning of the
2913 .debug_info section to refer to each other. */
2914
2915 typedef long int dw_offset;
2916
2917 struct comdat_type_node;
2918
2919 /* The entries in the line_info table more-or-less mirror the opcodes
2920 that are used in the real dwarf line table. Arrays of these entries
2921 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2922 supported. */
2923
2924 enum dw_line_info_opcode {
2925 /* Emit DW_LNE_set_address; the operand is the label index. */
2926 LI_set_address,
2927
2928 /* Emit a row to the matrix with the given line. This may be done
2929 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2930 special opcodes. */
2931 LI_set_line,
2932
2933 /* Emit a DW_LNS_set_file. */
2934 LI_set_file,
2935
2936 /* Emit a DW_LNS_set_column. */
2937 LI_set_column,
2938
2939 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2940 LI_negate_stmt,
2941
2942 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2943 LI_set_prologue_end,
2944 LI_set_epilogue_begin,
2945
2946 /* Emit a DW_LNE_set_discriminator. */
2947 LI_set_discriminator,
2948
2949 /* Output a Fixed Advance PC; the target PC is the label index; the
2950 base PC is the previous LI_adv_address or LI_set_address entry.
2951 We only use this when emitting debug views without assembler
2952 support, at explicit user request. Ideally, we should only use
2953 it when the offset might be zero but we can't tell: it's the only
2954 way to maybe change the PC without resetting the view number. */
2955 LI_adv_address
2956 };
2957
2958 typedef struct GTY(()) dw_line_info_struct {
2959 enum dw_line_info_opcode opcode;
2960 unsigned int val;
2961 } dw_line_info_entry;
2962
2963
2964 struct GTY(()) dw_line_info_table {
2965 /* The label that marks the end of this section. */
2966 const char *end_label;
2967
2968 /* The values for the last row of the matrix, as collected in the table.
2969 These are used to minimize the changes to the next row. */
2970 unsigned int file_num;
2971 unsigned int line_num;
2972 unsigned int column_num;
2973 int discrim_num;
2974 bool is_stmt;
2975 bool in_use;
2976
2977 /* This denotes the NEXT view number.
2978
2979 If it is 0, it is known that the NEXT view will be the first view
2980 at the given PC.
2981
2982 If it is -1, we're forcing the view number to be reset, e.g. at a
2983 function entry.
2984
2985 The meaning of other nonzero values depends on whether we're
2986 computing views internally or leaving it for the assembler to do
2987 so. If we're emitting them internally, view denotes the view
2988 number since the last known advance of PC. If we're leaving it
2989 for the assembler, it denotes the LVU label number that we're
2990 going to ask the assembler to assign. */
2991 var_loc_view view;
2992
2993 /* This counts the number of symbolic views emitted in this table
2994 since the latest view reset. Its max value, over all tables,
2995 sets symview_upper_bound. */
2996 var_loc_view symviews_since_reset;
2997
2998 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2999 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3000 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3001 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3002
3003 vec<dw_line_info_entry, va_gc> *entries;
3004 };
3005
3006 /* This is an upper bound for view numbers that the assembler may
3007 assign to symbolic views output in this translation. It is used to
3008 decide how big a field to use to represent view numbers in
3009 symview-classed attributes. */
3010
3011 static var_loc_view symview_upper_bound;
3012
3013 /* If we're keep track of location views and their reset points, and
3014 INSN is a reset point (i.e., it necessarily advances the PC), mark
3015 the next view in TABLE as reset. */
3016
3017 static void
3018 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3019 {
3020 if (!debug_internal_reset_location_views)
3021 return;
3022
3023 /* Maybe turn (part of?) this test into a default target hook. */
3024 int reset = 0;
3025
3026 if (targetm.reset_location_view)
3027 reset = targetm.reset_location_view (insn);
3028
3029 if (reset)
3030 ;
3031 else if (JUMP_TABLE_DATA_P (insn))
3032 reset = 1;
3033 else if (GET_CODE (insn) == USE
3034 || GET_CODE (insn) == CLOBBER
3035 || GET_CODE (insn) == ASM_INPUT
3036 || asm_noperands (insn) >= 0)
3037 ;
3038 else if (get_attr_min_length (insn) > 0)
3039 reset = 1;
3040
3041 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3042 RESET_NEXT_VIEW (table->view);
3043 }
3044
3045 /* Each DIE attribute has a field specifying the attribute kind,
3046 a link to the next attribute in the chain, and an attribute value.
3047 Attributes are typically linked below the DIE they modify. */
3048
3049 typedef struct GTY(()) dw_attr_struct {
3050 enum dwarf_attribute dw_attr;
3051 dw_val_node dw_attr_val;
3052 }
3053 dw_attr_node;
3054
3055
3056 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3057 The children of each node form a circular list linked by
3058 die_sib. die_child points to the node *before* the "first" child node. */
3059
3060 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3061 union die_symbol_or_type_node
3062 {
3063 const char * GTY ((tag ("0"))) die_symbol;
3064 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3065 }
3066 GTY ((desc ("%0.comdat_type_p"))) die_id;
3067 vec<dw_attr_node, va_gc> *die_attr;
3068 dw_die_ref die_parent;
3069 dw_die_ref die_child;
3070 dw_die_ref die_sib;
3071 dw_die_ref die_definition; /* ref from a specification to its definition */
3072 dw_offset die_offset;
3073 unsigned long die_abbrev;
3074 int die_mark;
3075 unsigned int decl_id;
3076 enum dwarf_tag die_tag;
3077 /* Die is used and must not be pruned as unused. */
3078 BOOL_BITFIELD die_perennial_p : 1;
3079 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3080 /* For an external ref to die_symbol if die_offset contains an extra
3081 offset to that symbol. */
3082 BOOL_BITFIELD with_offset : 1;
3083 /* Whether this DIE was removed from the DIE tree, for example via
3084 prune_unused_types. We don't consider those present from the
3085 DIE lookup routines. */
3086 BOOL_BITFIELD removed : 1;
3087 /* Lots of spare bits. */
3088 }
3089 die_node;
3090
3091 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3092 static bool early_dwarf;
3093 static bool early_dwarf_finished;
3094 struct set_early_dwarf {
3095 bool saved;
3096 set_early_dwarf () : saved(early_dwarf)
3097 {
3098 gcc_assert (! early_dwarf_finished);
3099 early_dwarf = true;
3100 }
3101 ~set_early_dwarf () { early_dwarf = saved; }
3102 };
3103
3104 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3105 #define FOR_EACH_CHILD(die, c, expr) do { \
3106 c = die->die_child; \
3107 if (c) do { \
3108 c = c->die_sib; \
3109 expr; \
3110 } while (c != die->die_child); \
3111 } while (0)
3112
3113 /* The pubname structure */
3114
3115 typedef struct GTY(()) pubname_struct {
3116 dw_die_ref die;
3117 const char *name;
3118 }
3119 pubname_entry;
3120
3121
3122 struct GTY(()) dw_ranges {
3123 const char *label;
3124 /* If this is positive, it's a block number, otherwise it's a
3125 bitwise-negated index into dw_ranges_by_label. */
3126 int num;
3127 /* Index for the range list for DW_FORM_rnglistx. */
3128 unsigned int idx : 31;
3129 /* True if this range might be possibly in a different section
3130 from previous entry. */
3131 unsigned int maybe_new_sec : 1;
3132 };
3133
3134 /* A structure to hold a macinfo entry. */
3135
3136 typedef struct GTY(()) macinfo_struct {
3137 unsigned char code;
3138 unsigned HOST_WIDE_INT lineno;
3139 const char *info;
3140 }
3141 macinfo_entry;
3142
3143
3144 struct GTY(()) dw_ranges_by_label {
3145 const char *begin;
3146 const char *end;
3147 };
3148
3149 /* The comdat type node structure. */
3150 struct GTY(()) comdat_type_node
3151 {
3152 dw_die_ref root_die;
3153 dw_die_ref type_die;
3154 dw_die_ref skeleton_die;
3155 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3156 comdat_type_node *next;
3157 };
3158
3159 /* A list of DIEs for which we can't determine ancestry (parent_die
3160 field) just yet. Later in dwarf2out_finish we will fill in the
3161 missing bits. */
3162 typedef struct GTY(()) limbo_die_struct {
3163 dw_die_ref die;
3164 /* The tree for which this DIE was created. We use this to
3165 determine ancestry later. */
3166 tree created_for;
3167 struct limbo_die_struct *next;
3168 }
3169 limbo_die_node;
3170
3171 typedef struct skeleton_chain_struct
3172 {
3173 dw_die_ref old_die;
3174 dw_die_ref new_die;
3175 struct skeleton_chain_struct *parent;
3176 }
3177 skeleton_chain_node;
3178
3179 /* Define a macro which returns nonzero for a TYPE_DECL which was
3180 implicitly generated for a type.
3181
3182 Note that, unlike the C front-end (which generates a NULL named
3183 TYPE_DECL node for each complete tagged type, each array type,
3184 and each function type node created) the C++ front-end generates
3185 a _named_ TYPE_DECL node for each tagged type node created.
3186 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3187 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3188 front-end, but for each type, tagged or not. */
3189
3190 #define TYPE_DECL_IS_STUB(decl) \
3191 (DECL_NAME (decl) == NULL_TREE \
3192 || (DECL_ARTIFICIAL (decl) \
3193 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3194 /* This is necessary for stub decls that \
3195 appear in nested inline functions. */ \
3196 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3197 && (decl_ultimate_origin (decl) \
3198 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3199
3200 /* Information concerning the compilation unit's programming
3201 language, and compiler version. */
3202
3203 /* Fixed size portion of the DWARF compilation unit header. */
3204 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3205 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3206 + (dwarf_version >= 5 ? 4 : 3))
3207
3208 /* Fixed size portion of the DWARF comdat type unit header. */
3209 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3210 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3211 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3212
3213 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3214 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3215 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3216
3217 /* Fixed size portion of public names info. */
3218 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3219
3220 /* Fixed size portion of the address range info. */
3221 #define DWARF_ARANGES_HEADER_SIZE \
3222 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3223 DWARF2_ADDR_SIZE * 2) \
3224 - DWARF_INITIAL_LENGTH_SIZE)
3225
3226 /* Size of padding portion in the address range info. It must be
3227 aligned to twice the pointer size. */
3228 #define DWARF_ARANGES_PAD_SIZE \
3229 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3230 DWARF2_ADDR_SIZE * 2) \
3231 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3232
3233 /* Use assembler line directives if available. */
3234 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3235 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3236 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3237 #else
3238 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3239 #endif
3240 #endif
3241
3242 /* Use assembler views in line directives if available. */
3243 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3244 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3245 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3246 #else
3247 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3248 #endif
3249 #endif
3250
3251 /* Return true if GCC configure detected assembler support for .loc. */
3252
3253 bool
3254 dwarf2out_default_as_loc_support (void)
3255 {
3256 return DWARF2_ASM_LINE_DEBUG_INFO;
3257 #if (GCC_VERSION >= 3000)
3258 # undef DWARF2_ASM_LINE_DEBUG_INFO
3259 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3260 #endif
3261 }
3262
3263 /* Return true if GCC configure detected assembler support for views
3264 in .loc directives. */
3265
3266 bool
3267 dwarf2out_default_as_locview_support (void)
3268 {
3269 return DWARF2_ASM_VIEW_DEBUG_INFO;
3270 #if (GCC_VERSION >= 3000)
3271 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3272 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3273 #endif
3274 }
3275
3276 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3277 view computation, and it refers to a view identifier for which we
3278 will not emit a label because it is known to map to a view number
3279 zero. We won't allocate the bitmap if we're not using assembler
3280 support for location views, but we have to make the variable
3281 visible for GGC and for code that will be optimized out for lack of
3282 support but that's still parsed and compiled. We could abstract it
3283 out with macros, but it's not worth it. */
3284 static GTY(()) bitmap zero_view_p;
3285
3286 /* Evaluate to TRUE iff N is known to identify the first location view
3287 at its PC. When not using assembler location view computation,
3288 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3289 and views label numbers recorded in it are the ones known to be
3290 zero. */
3291 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3292 || (N) == (var_loc_view)-1 \
3293 || (zero_view_p \
3294 && bitmap_bit_p (zero_view_p, (N))))
3295
3296 /* Return true iff we're to emit .loc directives for the assembler to
3297 generate line number sections.
3298
3299 When we're not emitting views, all we need from the assembler is
3300 support for .loc directives.
3301
3302 If we are emitting views, we can only use the assembler's .loc
3303 support if it also supports views.
3304
3305 When the compiler is emitting the line number programs and
3306 computing view numbers itself, it resets view numbers at known PC
3307 changes and counts from that, and then it emits view numbers as
3308 literal constants in locviewlists. There are cases in which the
3309 compiler is not sure about PC changes, e.g. when extra alignment is
3310 requested for a label. In these cases, the compiler may not reset
3311 the view counter, and the potential PC advance in the line number
3312 program will use an opcode that does not reset the view counter
3313 even if the PC actually changes, so that compiler and debug info
3314 consumer can keep view numbers in sync.
3315
3316 When the compiler defers view computation to the assembler, it
3317 emits symbolic view numbers in locviewlists, with the exception of
3318 views known to be zero (forced resets, or reset after
3319 compiler-visible PC changes): instead of emitting symbols for
3320 these, we emit literal zero and assert the assembler agrees with
3321 the compiler's assessment. We could use symbolic views everywhere,
3322 instead of special-casing zero views, but then we'd be unable to
3323 optimize out locviewlists that contain only zeros. */
3324
3325 static bool
3326 output_asm_line_debug_info (void)
3327 {
3328 return (dwarf2out_as_loc_support
3329 && (dwarf2out_as_locview_support
3330 || !debug_variable_location_views));
3331 }
3332
3333 /* Minimum line offset in a special line info. opcode.
3334 This value was chosen to give a reasonable range of values. */
3335 #define DWARF_LINE_BASE -10
3336
3337 /* First special line opcode - leave room for the standard opcodes. */
3338 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3339
3340 /* Range of line offsets in a special line info. opcode. */
3341 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3342
3343 /* Flag that indicates the initial value of the is_stmt_start flag.
3344 In the present implementation, we do not mark any lines as
3345 the beginning of a source statement, because that information
3346 is not made available by the GCC front-end. */
3347 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3348
3349 /* Maximum number of operations per instruction bundle. */
3350 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3351 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3352 #endif
3353
3354 /* This location is used by calc_die_sizes() to keep track
3355 the offset of each DIE within the .debug_info section. */
3356 static unsigned long next_die_offset;
3357
3358 /* Record the root of the DIE's built for the current compilation unit. */
3359 static GTY(()) dw_die_ref single_comp_unit_die;
3360
3361 /* A list of type DIEs that have been separated into comdat sections. */
3362 static GTY(()) comdat_type_node *comdat_type_list;
3363
3364 /* A list of CU DIEs that have been separated. */
3365 static GTY(()) limbo_die_node *cu_die_list;
3366
3367 /* A list of DIEs with a NULL parent waiting to be relocated. */
3368 static GTY(()) limbo_die_node *limbo_die_list;
3369
3370 /* A list of DIEs for which we may have to generate
3371 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3372 static GTY(()) limbo_die_node *deferred_asm_name;
3373
3374 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3375 {
3376 typedef const char *compare_type;
3377
3378 static hashval_t hash (dwarf_file_data *);
3379 static bool equal (dwarf_file_data *, const char *);
3380 };
3381
3382 /* Filenames referenced by this compilation unit. */
3383 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3384
3385 struct decl_die_hasher : ggc_ptr_hash<die_node>
3386 {
3387 typedef tree compare_type;
3388
3389 static hashval_t hash (die_node *);
3390 static bool equal (die_node *, tree);
3391 };
3392 /* A hash table of references to DIE's that describe declarations.
3393 The key is a DECL_UID() which is a unique number identifying each decl. */
3394 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3395
3396 struct GTY ((for_user)) variable_value_struct {
3397 unsigned int decl_id;
3398 vec<dw_die_ref, va_gc> *dies;
3399 };
3400
3401 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3402 {
3403 typedef tree compare_type;
3404
3405 static hashval_t hash (variable_value_struct *);
3406 static bool equal (variable_value_struct *, tree);
3407 };
3408 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3409 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3410 DECL_CONTEXT of the referenced VAR_DECLs. */
3411 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3412
3413 struct block_die_hasher : ggc_ptr_hash<die_struct>
3414 {
3415 static hashval_t hash (die_struct *);
3416 static bool equal (die_struct *, die_struct *);
3417 };
3418
3419 /* A hash table of references to DIE's that describe COMMON blocks.
3420 The key is DECL_UID() ^ die_parent. */
3421 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3422
3423 typedef struct GTY(()) die_arg_entry_struct {
3424 dw_die_ref die;
3425 tree arg;
3426 } die_arg_entry;
3427
3428
3429 /* Node of the variable location list. */
3430 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3431 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3432 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3433 in mode of the EXPR_LIST node and first EXPR_LIST operand
3434 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3435 location or NULL for padding. For larger bitsizes,
3436 mode is 0 and first operand is a CONCAT with bitsize
3437 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3438 NULL as second operand. */
3439 rtx GTY (()) loc;
3440 const char * GTY (()) label;
3441 struct var_loc_node * GTY (()) next;
3442 var_loc_view view;
3443 };
3444
3445 /* Variable location list. */
3446 struct GTY ((for_user)) var_loc_list_def {
3447 struct var_loc_node * GTY (()) first;
3448
3449 /* Pointer to the last but one or last element of the
3450 chained list. If the list is empty, both first and
3451 last are NULL, if the list contains just one node
3452 or the last node certainly is not redundant, it points
3453 to the last node, otherwise points to the last but one.
3454 Do not mark it for GC because it is marked through the chain. */
3455 struct var_loc_node * GTY ((skip ("%h"))) last;
3456
3457 /* Pointer to the last element before section switch,
3458 if NULL, either sections weren't switched or first
3459 is after section switch. */
3460 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3461
3462 /* DECL_UID of the variable decl. */
3463 unsigned int decl_id;
3464 };
3465 typedef struct var_loc_list_def var_loc_list;
3466
3467 /* Call argument location list. */
3468 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3469 rtx GTY (()) call_arg_loc_note;
3470 const char * GTY (()) label;
3471 tree GTY (()) block;
3472 bool tail_call_p;
3473 rtx GTY (()) symbol_ref;
3474 struct call_arg_loc_node * GTY (()) next;
3475 };
3476
3477
3478 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3479 {
3480 typedef const_tree compare_type;
3481
3482 static hashval_t hash (var_loc_list *);
3483 static bool equal (var_loc_list *, const_tree);
3484 };
3485
3486 /* Table of decl location linked lists. */
3487 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3488
3489 /* Head and tail of call_arg_loc chain. */
3490 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3491 static struct call_arg_loc_node *call_arg_loc_last;
3492
3493 /* Number of call sites in the current function. */
3494 static int call_site_count = -1;
3495 /* Number of tail call sites in the current function. */
3496 static int tail_call_site_count = -1;
3497
3498 /* A cached location list. */
3499 struct GTY ((for_user)) cached_dw_loc_list_def {
3500 /* The DECL_UID of the decl that this entry describes. */
3501 unsigned int decl_id;
3502
3503 /* The cached location list. */
3504 dw_loc_list_ref loc_list;
3505 };
3506 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3507
3508 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3509 {
3510
3511 typedef const_tree compare_type;
3512
3513 static hashval_t hash (cached_dw_loc_list *);
3514 static bool equal (cached_dw_loc_list *, const_tree);
3515 };
3516
3517 /* Table of cached location lists. */
3518 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3519
3520 /* A vector of references to DIE's that are uniquely identified by their tag,
3521 presence/absence of children DIE's, and list of attribute/value pairs. */
3522 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3523
3524 /* A hash map to remember the stack usage for DWARF procedures. The value
3525 stored is the stack size difference between before the DWARF procedure
3526 invokation and after it returned. In other words, for a DWARF procedure
3527 that consumes N stack slots and that pushes M ones, this stores M - N. */
3528 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3529
3530 /* A global counter for generating labels for line number data. */
3531 static unsigned int line_info_label_num;
3532
3533 /* The current table to which we should emit line number information
3534 for the current function. This will be set up at the beginning of
3535 assembly for the function. */
3536 static GTY(()) dw_line_info_table *cur_line_info_table;
3537
3538 /* The two default tables of line number info. */
3539 static GTY(()) dw_line_info_table *text_section_line_info;
3540 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3541
3542 /* The set of all non-default tables of line number info. */
3543 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3544
3545 /* A flag to tell pubnames/types export if there is an info section to
3546 refer to. */
3547 static bool info_section_emitted;
3548
3549 /* A pointer to the base of a table that contains a list of publicly
3550 accessible names. */
3551 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3552
3553 /* A pointer to the base of a table that contains a list of publicly
3554 accessible types. */
3555 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3556
3557 /* A pointer to the base of a table that contains a list of macro
3558 defines/undefines (and file start/end markers). */
3559 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3560
3561 /* True if .debug_macinfo or .debug_macros section is going to be
3562 emitted. */
3563 #define have_macinfo \
3564 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3565 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3566 && !macinfo_table->is_empty ())
3567
3568 /* Vector of dies for which we should generate .debug_ranges info. */
3569 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3570
3571 /* Vector of pairs of labels referenced in ranges_table. */
3572 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3573
3574 /* Whether we have location lists that need outputting */
3575 static GTY(()) bool have_location_lists;
3576
3577 /* Unique label counter. */
3578 static GTY(()) unsigned int loclabel_num;
3579
3580 /* Unique label counter for point-of-call tables. */
3581 static GTY(()) unsigned int poc_label_num;
3582
3583 /* The last file entry emitted by maybe_emit_file(). */
3584 static GTY(()) struct dwarf_file_data * last_emitted_file;
3585
3586 /* Number of internal labels generated by gen_internal_sym(). */
3587 static GTY(()) int label_num;
3588
3589 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3590
3591 /* Instances of generic types for which we need to generate debug
3592 info that describe their generic parameters and arguments. That
3593 generation needs to happen once all types are properly laid out so
3594 we do it at the end of compilation. */
3595 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3596
3597 /* Offset from the "steady-state frame pointer" to the frame base,
3598 within the current function. */
3599 static poly_int64 frame_pointer_fb_offset;
3600 static bool frame_pointer_fb_offset_valid;
3601
3602 static vec<dw_die_ref> base_types;
3603
3604 /* Flags to represent a set of attribute classes for attributes that represent
3605 a scalar value (bounds, pointers, ...). */
3606 enum dw_scalar_form
3607 {
3608 dw_scalar_form_constant = 0x01,
3609 dw_scalar_form_exprloc = 0x02,
3610 dw_scalar_form_reference = 0x04
3611 };
3612
3613 /* Forward declarations for functions defined in this file. */
3614
3615 static int is_pseudo_reg (const_rtx);
3616 static tree type_main_variant (tree);
3617 static int is_tagged_type (const_tree);
3618 static const char *dwarf_tag_name (unsigned);
3619 static const char *dwarf_attr_name (unsigned);
3620 static const char *dwarf_form_name (unsigned);
3621 static tree decl_ultimate_origin (const_tree);
3622 static tree decl_class_context (tree);
3623 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3624 static inline enum dw_val_class AT_class (dw_attr_node *);
3625 static inline unsigned int AT_index (dw_attr_node *);
3626 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3627 static inline unsigned AT_flag (dw_attr_node *);
3628 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3629 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3630 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3631 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3632 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3633 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3634 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3635 unsigned int, unsigned char *);
3636 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3637 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3638 static inline const char *AT_string (dw_attr_node *);
3639 static enum dwarf_form AT_string_form (dw_attr_node *);
3640 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3641 static void add_AT_specification (dw_die_ref, dw_die_ref);
3642 static inline dw_die_ref AT_ref (dw_attr_node *);
3643 static inline int AT_ref_external (dw_attr_node *);
3644 static inline void set_AT_ref_external (dw_attr_node *, int);
3645 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3646 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3647 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3648 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3649 dw_loc_list_ref);
3650 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3651 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3652 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3653 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3654 static void remove_addr_table_entry (addr_table_entry *);
3655 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3656 static inline rtx AT_addr (dw_attr_node *);
3657 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3658 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3659 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3660 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3661 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3662 const char *);
3663 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3664 unsigned HOST_WIDE_INT);
3665 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3666 unsigned long, bool);
3667 static inline const char *AT_lbl (dw_attr_node *);
3668 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3669 static const char *get_AT_low_pc (dw_die_ref);
3670 static const char *get_AT_hi_pc (dw_die_ref);
3671 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3672 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3673 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3674 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3675 static bool is_cxx (void);
3676 static bool is_cxx (const_tree);
3677 static bool is_fortran (void);
3678 static bool is_ada (void);
3679 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3680 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3681 static void add_child_die (dw_die_ref, dw_die_ref);
3682 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3683 static dw_die_ref lookup_type_die (tree);
3684 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3685 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3686 static void equate_type_number_to_die (tree, dw_die_ref);
3687 static dw_die_ref lookup_decl_die (tree);
3688 static var_loc_list *lookup_decl_loc (const_tree);
3689 static void equate_decl_number_to_die (tree, dw_die_ref);
3690 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3691 static void print_spaces (FILE *);
3692 static void print_die (dw_die_ref, FILE *);
3693 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3694 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3695 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3696 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3697 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3698 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3699 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3700 struct md5_ctx *, int *);
3701 struct checksum_attributes;
3702 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3703 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3704 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3705 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3706 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3707 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3708 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3709 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3710 static int is_type_die (dw_die_ref);
3711 static int is_comdat_die (dw_die_ref);
3712 static inline bool is_template_instantiation (dw_die_ref);
3713 static int is_declaration_die (dw_die_ref);
3714 static int should_move_die_to_comdat (dw_die_ref);
3715 static dw_die_ref clone_as_declaration (dw_die_ref);
3716 static dw_die_ref clone_die (dw_die_ref);
3717 static dw_die_ref clone_tree (dw_die_ref);
3718 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3719 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3720 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3721 static dw_die_ref generate_skeleton (dw_die_ref);
3722 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3723 dw_die_ref,
3724 dw_die_ref);
3725 static void break_out_comdat_types (dw_die_ref);
3726 static void copy_decls_for_unworthy_types (dw_die_ref);
3727
3728 static void add_sibling_attributes (dw_die_ref);
3729 static void output_location_lists (dw_die_ref);
3730 static int constant_size (unsigned HOST_WIDE_INT);
3731 static unsigned long size_of_die (dw_die_ref);
3732 static void calc_die_sizes (dw_die_ref);
3733 static void calc_base_type_die_sizes (void);
3734 static void mark_dies (dw_die_ref);
3735 static void unmark_dies (dw_die_ref);
3736 static void unmark_all_dies (dw_die_ref);
3737 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3738 static unsigned long size_of_aranges (void);
3739 static enum dwarf_form value_format (dw_attr_node *);
3740 static void output_value_format (dw_attr_node *);
3741 static void output_abbrev_section (void);
3742 static void output_die_abbrevs (unsigned long, dw_die_ref);
3743 static void output_die (dw_die_ref);
3744 static void output_compilation_unit_header (enum dwarf_unit_type);
3745 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3746 static void output_comdat_type_unit (comdat_type_node *);
3747 static const char *dwarf2_name (tree, int);
3748 static void add_pubname (tree, dw_die_ref);
3749 static void add_enumerator_pubname (const char *, dw_die_ref);
3750 static void add_pubname_string (const char *, dw_die_ref);
3751 static void add_pubtype (tree, dw_die_ref);
3752 static void output_pubnames (vec<pubname_entry, va_gc> *);
3753 static void output_aranges (void);
3754 static unsigned int add_ranges (const_tree, bool = false);
3755 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3756 bool *, bool);
3757 static void output_ranges (void);
3758 static dw_line_info_table *new_line_info_table (void);
3759 static void output_line_info (bool);
3760 static void output_file_names (void);
3761 static dw_die_ref base_type_die (tree, bool);
3762 static int is_base_type (tree);
3763 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3764 static int decl_quals (const_tree);
3765 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3766 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3767 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3768 static int type_is_enum (const_tree);
3769 static unsigned int dbx_reg_number (const_rtx);
3770 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3771 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3772 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3773 enum var_init_status);
3774 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3775 enum var_init_status);
3776 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3777 enum var_init_status);
3778 static int is_based_loc (const_rtx);
3779 static bool resolve_one_addr (rtx *);
3780 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3783 enum var_init_status);
3784 struct loc_descr_context;
3785 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3786 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3787 static dw_loc_list_ref loc_list_from_tree (tree, int,
3788 struct loc_descr_context *);
3789 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3792 static tree field_type (const_tree);
3793 static unsigned int simple_type_align_in_bits (const_tree);
3794 static unsigned int simple_decl_align_in_bits (const_tree);
3795 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3796 struct vlr_context;
3797 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3798 HOST_WIDE_INT *);
3799 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3800 dw_loc_list_ref);
3801 static void add_data_member_location_attribute (dw_die_ref, tree,
3802 struct vlr_context *);
3803 static bool add_const_value_attribute (dw_die_ref, rtx);
3804 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3805 static void insert_wide_int (const wide_int &, unsigned char *, int);
3806 static void insert_float (const_rtx, unsigned char *);
3807 static rtx rtl_for_decl_location (tree);
3808 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3809 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3810 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3811 static void add_name_attribute (dw_die_ref, const char *);
3812 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3813 static void add_comp_dir_attribute (dw_die_ref);
3814 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3815 struct loc_descr_context *);
3816 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3817 struct loc_descr_context *);
3818 static void add_subscript_info (dw_die_ref, tree, bool);
3819 static void add_byte_size_attribute (dw_die_ref, tree);
3820 static void add_alignment_attribute (dw_die_ref, tree);
3821 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3822 struct vlr_context *);
3823 static void add_bit_size_attribute (dw_die_ref, tree);
3824 static void add_prototyped_attribute (dw_die_ref, tree);
3825 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3826 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3827 static void add_src_coords_attributes (dw_die_ref, tree);
3828 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3829 static void add_discr_value (dw_die_ref, dw_discr_value *);
3830 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3831 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3832 static dw_die_ref scope_die_for (tree, dw_die_ref);
3833 static inline int local_scope_p (dw_die_ref);
3834 static inline int class_scope_p (dw_die_ref);
3835 static inline int class_or_namespace_scope_p (dw_die_ref);
3836 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3837 static void add_calling_convention_attribute (dw_die_ref, tree);
3838 static const char *type_tag (const_tree);
3839 static tree member_declared_type (const_tree);
3840 #if 0
3841 static const char *decl_start_label (tree);
3842 #endif
3843 static void gen_array_type_die (tree, dw_die_ref);
3844 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3845 #if 0
3846 static void gen_entry_point_die (tree, dw_die_ref);
3847 #endif
3848 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3850 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3851 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3852 static void gen_formal_types_die (tree, dw_die_ref);
3853 static void gen_subprogram_die (tree, dw_die_ref);
3854 static void gen_variable_die (tree, tree, dw_die_ref);
3855 static void gen_const_die (tree, dw_die_ref);
3856 static void gen_label_die (tree, dw_die_ref);
3857 static void gen_lexical_block_die (tree, dw_die_ref);
3858 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3859 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3860 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3861 static dw_die_ref gen_compile_unit_die (const char *);
3862 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3863 static void gen_member_die (tree, dw_die_ref);
3864 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3865 enum debug_info_usage);
3866 static void gen_subroutine_type_die (tree, dw_die_ref);
3867 static void gen_typedef_die (tree, dw_die_ref);
3868 static void gen_type_die (tree, dw_die_ref);
3869 static void gen_block_die (tree, dw_die_ref);
3870 static void decls_for_scope (tree, dw_die_ref);
3871 static bool is_naming_typedef_decl (const_tree);
3872 static inline dw_die_ref get_context_die (tree);
3873 static void gen_namespace_die (tree, dw_die_ref);
3874 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3875 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3876 static dw_die_ref force_decl_die (tree);
3877 static dw_die_ref force_type_die (tree);
3878 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3879 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3880 static struct dwarf_file_data * lookup_filename (const char *);
3881 static void retry_incomplete_types (void);
3882 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3883 static void gen_generic_params_dies (tree);
3884 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3885 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3886 static void splice_child_die (dw_die_ref, dw_die_ref);
3887 static int file_info_cmp (const void *, const void *);
3888 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3889 const char *, var_loc_view, const char *);
3890 static void output_loc_list (dw_loc_list_ref);
3891 static char *gen_internal_sym (const char *);
3892 static bool want_pubnames (void);
3893
3894 static void prune_unmark_dies (dw_die_ref);
3895 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3896 static void prune_unused_types_mark (dw_die_ref, int);
3897 static void prune_unused_types_walk (dw_die_ref);
3898 static void prune_unused_types_walk_attribs (dw_die_ref);
3899 static void prune_unused_types_prune (dw_die_ref);
3900 static void prune_unused_types (void);
3901 static int maybe_emit_file (struct dwarf_file_data *fd);
3902 static inline const char *AT_vms_delta1 (dw_attr_node *);
3903 static inline const char *AT_vms_delta2 (dw_attr_node *);
3904 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3905 const char *, const char *);
3906 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3907 static void gen_remaining_tmpl_value_param_die_attribute (void);
3908 static bool generic_type_p (tree);
3909 static void schedule_generic_params_dies_gen (tree t);
3910 static void gen_scheduled_generic_parms_dies (void);
3911 static void resolve_variable_values (void);
3912
3913 static const char *comp_dir_string (void);
3914
3915 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3916
3917 /* enum for tracking thread-local variables whose address is really an offset
3918 relative to the TLS pointer, which will need link-time relocation, but will
3919 not need relocation by the DWARF consumer. */
3920
3921 enum dtprel_bool
3922 {
3923 dtprel_false = 0,
3924 dtprel_true = 1
3925 };
3926
3927 /* Return the operator to use for an address of a variable. For dtprel_true, we
3928 use DW_OP_const*. For regular variables, which need both link-time
3929 relocation and consumer-level relocation (e.g., to account for shared objects
3930 loaded at a random address), we use DW_OP_addr*. */
3931
3932 static inline enum dwarf_location_atom
3933 dw_addr_op (enum dtprel_bool dtprel)
3934 {
3935 if (dtprel == dtprel_true)
3936 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3937 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3938 else
3939 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3940 }
3941
3942 /* Return a pointer to a newly allocated address location description. If
3943 dwarf_split_debug_info is true, then record the address with the appropriate
3944 relocation. */
3945 static inline dw_loc_descr_ref
3946 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3947 {
3948 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3949
3950 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3951 ref->dw_loc_oprnd1.v.val_addr = addr;
3952 ref->dtprel = dtprel;
3953 if (dwarf_split_debug_info)
3954 ref->dw_loc_oprnd1.val_entry
3955 = add_addr_table_entry (addr,
3956 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3957 else
3958 ref->dw_loc_oprnd1.val_entry = NULL;
3959
3960 return ref;
3961 }
3962
3963 /* Section names used to hold DWARF debugging information. */
3964
3965 #ifndef DEBUG_INFO_SECTION
3966 #define DEBUG_INFO_SECTION ".debug_info"
3967 #endif
3968 #ifndef DEBUG_DWO_INFO_SECTION
3969 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3970 #endif
3971 #ifndef DEBUG_LTO_INFO_SECTION
3972 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3973 #endif
3974 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3975 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_ABBREV_SECTION
3978 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3979 #endif
3980 #ifndef DEBUG_LTO_ABBREV_SECTION
3981 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3982 #endif
3983 #ifndef DEBUG_DWO_ABBREV_SECTION
3984 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3985 #endif
3986 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3987 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3988 #endif
3989 #ifndef DEBUG_ARANGES_SECTION
3990 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3991 #endif
3992 #ifndef DEBUG_ADDR_SECTION
3993 #define DEBUG_ADDR_SECTION ".debug_addr"
3994 #endif
3995 #ifndef DEBUG_MACINFO_SECTION
3996 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3997 #endif
3998 #ifndef DEBUG_LTO_MACINFO_SECTION
3999 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4000 #endif
4001 #ifndef DEBUG_DWO_MACINFO_SECTION
4002 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4003 #endif
4004 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4005 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4006 #endif
4007 #ifndef DEBUG_MACRO_SECTION
4008 #define DEBUG_MACRO_SECTION ".debug_macro"
4009 #endif
4010 #ifndef DEBUG_LTO_MACRO_SECTION
4011 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4012 #endif
4013 #ifndef DEBUG_DWO_MACRO_SECTION
4014 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4015 #endif
4016 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4017 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4018 #endif
4019 #ifndef DEBUG_LINE_SECTION
4020 #define DEBUG_LINE_SECTION ".debug_line"
4021 #endif
4022 #ifndef DEBUG_LTO_LINE_SECTION
4023 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4024 #endif
4025 #ifndef DEBUG_DWO_LINE_SECTION
4026 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4027 #endif
4028 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4029 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4030 #endif
4031 #ifndef DEBUG_LOC_SECTION
4032 #define DEBUG_LOC_SECTION ".debug_loc"
4033 #endif
4034 #ifndef DEBUG_DWO_LOC_SECTION
4035 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4036 #endif
4037 #ifndef DEBUG_LOCLISTS_SECTION
4038 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4039 #endif
4040 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4041 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4042 #endif
4043 #ifndef DEBUG_PUBNAMES_SECTION
4044 #define DEBUG_PUBNAMES_SECTION \
4045 ((debug_generate_pub_sections == 2) \
4046 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4047 #endif
4048 #ifndef DEBUG_PUBTYPES_SECTION
4049 #define DEBUG_PUBTYPES_SECTION \
4050 ((debug_generate_pub_sections == 2) \
4051 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4052 #endif
4053 #ifndef DEBUG_STR_OFFSETS_SECTION
4054 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4055 #endif
4056 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4057 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4058 #endif
4059 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4060 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4061 #endif
4062 #ifndef DEBUG_STR_SECTION
4063 #define DEBUG_STR_SECTION ".debug_str"
4064 #endif
4065 #ifndef DEBUG_LTO_STR_SECTION
4066 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4067 #endif
4068 #ifndef DEBUG_STR_DWO_SECTION
4069 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_DWO_SECTION
4072 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4073 #endif
4074 #ifndef DEBUG_RANGES_SECTION
4075 #define DEBUG_RANGES_SECTION ".debug_ranges"
4076 #endif
4077 #ifndef DEBUG_RNGLISTS_SECTION
4078 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4079 #endif
4080 #ifndef DEBUG_LINE_STR_SECTION
4081 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4082 #endif
4083 #ifndef DEBUG_LTO_LINE_STR_SECTION
4084 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4085 #endif
4086
4087 /* Standard ELF section names for compiled code and data. */
4088 #ifndef TEXT_SECTION_NAME
4089 #define TEXT_SECTION_NAME ".text"
4090 #endif
4091
4092 /* Section flags for .debug_str section. */
4093 #define DEBUG_STR_SECTION_FLAGS \
4094 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4095 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4096 : SECTION_DEBUG)
4097
4098 /* Section flags for .debug_str.dwo section. */
4099 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4100
4101 /* Attribute used to refer to the macro section. */
4102 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4103 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4104
4105 /* Labels we insert at beginning sections we can reference instead of
4106 the section names themselves. */
4107
4108 #ifndef TEXT_SECTION_LABEL
4109 #define TEXT_SECTION_LABEL "Ltext"
4110 #endif
4111 #ifndef COLD_TEXT_SECTION_LABEL
4112 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4113 #endif
4114 #ifndef DEBUG_LINE_SECTION_LABEL
4115 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4116 #endif
4117 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4118 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4119 #endif
4120 #ifndef DEBUG_INFO_SECTION_LABEL
4121 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4122 #endif
4123 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4124 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4125 #endif
4126 #ifndef DEBUG_ABBREV_SECTION_LABEL
4127 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4128 #endif
4129 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4130 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4131 #endif
4132 #ifndef DEBUG_ADDR_SECTION_LABEL
4133 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4134 #endif
4135 #ifndef DEBUG_LOC_SECTION_LABEL
4136 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4137 #endif
4138 #ifndef DEBUG_RANGES_SECTION_LABEL
4139 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4140 #endif
4141 #ifndef DEBUG_MACINFO_SECTION_LABEL
4142 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4143 #endif
4144 #ifndef DEBUG_MACRO_SECTION_LABEL
4145 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4146 #endif
4147 #define SKELETON_COMP_DIE_ABBREV 1
4148 #define SKELETON_TYPE_DIE_ABBREV 2
4149
4150 /* Definitions of defaults for formats and names of various special
4151 (artificial) labels which may be generated within this file (when the -g
4152 options is used and DWARF2_DEBUGGING_INFO is in effect.
4153 If necessary, these may be overridden from within the tm.h file, but
4154 typically, overriding these defaults is unnecessary. */
4155
4156 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4171
4172 #ifndef TEXT_END_LABEL
4173 #define TEXT_END_LABEL "Letext"
4174 #endif
4175 #ifndef COLD_END_LABEL
4176 #define COLD_END_LABEL "Letext_cold"
4177 #endif
4178 #ifndef BLOCK_BEGIN_LABEL
4179 #define BLOCK_BEGIN_LABEL "LBB"
4180 #endif
4181 #ifndef BLOCK_INLINE_ENTRY_LABEL
4182 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4183 #endif
4184 #ifndef BLOCK_END_LABEL
4185 #define BLOCK_END_LABEL "LBE"
4186 #endif
4187 #ifndef LINE_CODE_LABEL
4188 #define LINE_CODE_LABEL "LM"
4189 #endif
4190
4191 \f
4192 /* Return the root of the DIE's built for the current compilation unit. */
4193 static dw_die_ref
4194 comp_unit_die (void)
4195 {
4196 if (!single_comp_unit_die)
4197 single_comp_unit_die = gen_compile_unit_die (NULL);
4198 return single_comp_unit_die;
4199 }
4200
4201 /* We allow a language front-end to designate a function that is to be
4202 called to "demangle" any name before it is put into a DIE. */
4203
4204 static const char *(*demangle_name_func) (const char *);
4205
4206 void
4207 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4208 {
4209 demangle_name_func = func;
4210 }
4211
4212 /* Test if rtl node points to a pseudo register. */
4213
4214 static inline int
4215 is_pseudo_reg (const_rtx rtl)
4216 {
4217 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4218 || (GET_CODE (rtl) == SUBREG
4219 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4220 }
4221
4222 /* Return a reference to a type, with its const and volatile qualifiers
4223 removed. */
4224
4225 static inline tree
4226 type_main_variant (tree type)
4227 {
4228 type = TYPE_MAIN_VARIANT (type);
4229
4230 /* ??? There really should be only one main variant among any group of
4231 variants of a given type (and all of the MAIN_VARIANT values for all
4232 members of the group should point to that one type) but sometimes the C
4233 front-end messes this up for array types, so we work around that bug
4234 here. */
4235 if (TREE_CODE (type) == ARRAY_TYPE)
4236 while (type != TYPE_MAIN_VARIANT (type))
4237 type = TYPE_MAIN_VARIANT (type);
4238
4239 return type;
4240 }
4241
4242 /* Return nonzero if the given type node represents a tagged type. */
4243
4244 static inline int
4245 is_tagged_type (const_tree type)
4246 {
4247 enum tree_code code = TREE_CODE (type);
4248
4249 return (code == RECORD_TYPE || code == UNION_TYPE
4250 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4251 }
4252
4253 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4254
4255 static void
4256 get_ref_die_offset_label (char *label, dw_die_ref ref)
4257 {
4258 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4259 }
4260
4261 /* Return die_offset of a DIE reference to a base type. */
4262
4263 static unsigned long int
4264 get_base_type_offset (dw_die_ref ref)
4265 {
4266 if (ref->die_offset)
4267 return ref->die_offset;
4268 if (comp_unit_die ()->die_abbrev)
4269 {
4270 calc_base_type_die_sizes ();
4271 gcc_assert (ref->die_offset);
4272 }
4273 return ref->die_offset;
4274 }
4275
4276 /* Return die_offset of a DIE reference other than base type. */
4277
4278 static unsigned long int
4279 get_ref_die_offset (dw_die_ref ref)
4280 {
4281 gcc_assert (ref->die_offset);
4282 return ref->die_offset;
4283 }
4284
4285 /* Convert a DIE tag into its string name. */
4286
4287 static const char *
4288 dwarf_tag_name (unsigned int tag)
4289 {
4290 const char *name = get_DW_TAG_name (tag);
4291
4292 if (name != NULL)
4293 return name;
4294
4295 return "DW_TAG_<unknown>";
4296 }
4297
4298 /* Convert a DWARF attribute code into its string name. */
4299
4300 static const char *
4301 dwarf_attr_name (unsigned int attr)
4302 {
4303 const char *name;
4304
4305 switch (attr)
4306 {
4307 #if VMS_DEBUGGING_INFO
4308 case DW_AT_HP_prologue:
4309 return "DW_AT_HP_prologue";
4310 #else
4311 case DW_AT_MIPS_loop_unroll_factor:
4312 return "DW_AT_MIPS_loop_unroll_factor";
4313 #endif
4314
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_epilogue:
4317 return "DW_AT_HP_epilogue";
4318 #else
4319 case DW_AT_MIPS_stride:
4320 return "DW_AT_MIPS_stride";
4321 #endif
4322 }
4323
4324 name = get_DW_AT_name (attr);
4325
4326 if (name != NULL)
4327 return name;
4328
4329 return "DW_AT_<unknown>";
4330 }
4331
4332 /* Convert a DWARF value form code into its string name. */
4333
4334 static const char *
4335 dwarf_form_name (unsigned int form)
4336 {
4337 const char *name = get_DW_FORM_name (form);
4338
4339 if (name != NULL)
4340 return name;
4341
4342 return "DW_FORM_<unknown>";
4343 }
4344 \f
4345 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4346 instance of an inlined instance of a decl which is local to an inline
4347 function, so we have to trace all of the way back through the origin chain
4348 to find out what sort of node actually served as the original seed for the
4349 given block. */
4350
4351 static tree
4352 decl_ultimate_origin (const_tree decl)
4353 {
4354 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4355 return NULL_TREE;
4356
4357 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4358 we're trying to output the abstract instance of this function. */
4359 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4360 return NULL_TREE;
4361
4362 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4363 most distant ancestor, this should never happen. */
4364 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4365
4366 return DECL_ABSTRACT_ORIGIN (decl);
4367 }
4368
4369 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4370 of a virtual function may refer to a base class, so we check the 'this'
4371 parameter. */
4372
4373 static tree
4374 decl_class_context (tree decl)
4375 {
4376 tree context = NULL_TREE;
4377
4378 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4379 context = DECL_CONTEXT (decl);
4380 else
4381 context = TYPE_MAIN_VARIANT
4382 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4383
4384 if (context && !TYPE_P (context))
4385 context = NULL_TREE;
4386
4387 return context;
4388 }
4389 \f
4390 /* Add an attribute/value pair to a DIE. */
4391
4392 static inline void
4393 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4394 {
4395 /* Maybe this should be an assert? */
4396 if (die == NULL)
4397 return;
4398
4399 if (flag_checking)
4400 {
4401 /* Check we do not add duplicate attrs. Can't use get_AT here
4402 because that recurses to the specification/abstract origin DIE. */
4403 dw_attr_node *a;
4404 unsigned ix;
4405 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4406 gcc_assert (a->dw_attr != attr->dw_attr);
4407 }
4408
4409 vec_safe_reserve (die->die_attr, 1);
4410 vec_safe_push (die->die_attr, *attr);
4411 }
4412
4413 static inline enum dw_val_class
4414 AT_class (dw_attr_node *a)
4415 {
4416 return a->dw_attr_val.val_class;
4417 }
4418
4419 /* Return the index for any attribute that will be referenced with a
4420 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4421 indices are stored in dw_attr_val.v.val_str for reference counting
4422 pruning. */
4423
4424 static inline unsigned int
4425 AT_index (dw_attr_node *a)
4426 {
4427 if (AT_class (a) == dw_val_class_str)
4428 return a->dw_attr_val.v.val_str->index;
4429 else if (a->dw_attr_val.val_entry != NULL)
4430 return a->dw_attr_val.val_entry->index;
4431 return NOT_INDEXED;
4432 }
4433
4434 /* Add a flag value attribute to a DIE. */
4435
4436 static inline void
4437 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4438 {
4439 dw_attr_node attr;
4440
4441 attr.dw_attr = attr_kind;
4442 attr.dw_attr_val.val_class = dw_val_class_flag;
4443 attr.dw_attr_val.val_entry = NULL;
4444 attr.dw_attr_val.v.val_flag = flag;
4445 add_dwarf_attr (die, &attr);
4446 }
4447
4448 static inline unsigned
4449 AT_flag (dw_attr_node *a)
4450 {
4451 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4452 return a->dw_attr_val.v.val_flag;
4453 }
4454
4455 /* Add a signed integer attribute value to a DIE. */
4456
4457 static inline void
4458 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_const;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_int = int_val;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline HOST_WIDE_INT
4470 AT_int (dw_attr_node *a)
4471 {
4472 gcc_assert (a && (AT_class (a) == dw_val_class_const
4473 || AT_class (a) == dw_val_class_const_implicit));
4474 return a->dw_attr_val.v.val_int;
4475 }
4476
4477 /* Add an unsigned integer attribute value to a DIE. */
4478
4479 static inline void
4480 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4481 unsigned HOST_WIDE_INT unsigned_val)
4482 {
4483 dw_attr_node attr;
4484
4485 attr.dw_attr = attr_kind;
4486 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4487 attr.dw_attr_val.val_entry = NULL;
4488 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4489 add_dwarf_attr (die, &attr);
4490 }
4491
4492 static inline unsigned HOST_WIDE_INT
4493 AT_unsigned (dw_attr_node *a)
4494 {
4495 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4496 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4497 return a->dw_attr_val.v.val_unsigned;
4498 }
4499
4500 /* Add an unsigned wide integer attribute value to a DIE. */
4501
4502 static inline void
4503 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4504 const wide_int& w)
4505 {
4506 dw_attr_node attr;
4507
4508 attr.dw_attr = attr_kind;
4509 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4510 attr.dw_attr_val.val_entry = NULL;
4511 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4512 *attr.dw_attr_val.v.val_wide = w;
4513 add_dwarf_attr (die, &attr);
4514 }
4515
4516 /* Add an unsigned double integer attribute value to a DIE. */
4517
4518 static inline void
4519 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4520 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4521 {
4522 dw_attr_node attr;
4523
4524 attr.dw_attr = attr_kind;
4525 attr.dw_attr_val.val_class = dw_val_class_const_double;
4526 attr.dw_attr_val.val_entry = NULL;
4527 attr.dw_attr_val.v.val_double.high = high;
4528 attr.dw_attr_val.v.val_double.low = low;
4529 add_dwarf_attr (die, &attr);
4530 }
4531
4532 /* Add a floating point attribute value to a DIE and return it. */
4533
4534 static inline void
4535 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4536 unsigned int length, unsigned int elt_size, unsigned char *array)
4537 {
4538 dw_attr_node attr;
4539
4540 attr.dw_attr = attr_kind;
4541 attr.dw_attr_val.val_class = dw_val_class_vec;
4542 attr.dw_attr_val.val_entry = NULL;
4543 attr.dw_attr_val.v.val_vec.length = length;
4544 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4545 attr.dw_attr_val.v.val_vec.array = array;
4546 add_dwarf_attr (die, &attr);
4547 }
4548
4549 /* Add an 8-byte data attribute value to a DIE. */
4550
4551 static inline void
4552 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4553 unsigned char data8[8])
4554 {
4555 dw_attr_node attr;
4556
4557 attr.dw_attr = attr_kind;
4558 attr.dw_attr_val.val_class = dw_val_class_data8;
4559 attr.dw_attr_val.val_entry = NULL;
4560 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4561 add_dwarf_attr (die, &attr);
4562 }
4563
4564 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4565 dwarf_split_debug_info, address attributes in dies destined for the
4566 final executable have force_direct set to avoid using indexed
4567 references. */
4568
4569 static inline void
4570 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4571 bool force_direct)
4572 {
4573 dw_attr_node attr;
4574 char * lbl_id;
4575
4576 lbl_id = xstrdup (lbl_low);
4577 attr.dw_attr = DW_AT_low_pc;
4578 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4579 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4580 if (dwarf_split_debug_info && !force_direct)
4581 attr.dw_attr_val.val_entry
4582 = add_addr_table_entry (lbl_id, ate_kind_label);
4583 else
4584 attr.dw_attr_val.val_entry = NULL;
4585 add_dwarf_attr (die, &attr);
4586
4587 attr.dw_attr = DW_AT_high_pc;
4588 if (dwarf_version < 4)
4589 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4590 else
4591 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4592 lbl_id = xstrdup (lbl_high);
4593 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4594 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4595 && dwarf_split_debug_info && !force_direct)
4596 attr.dw_attr_val.val_entry
4597 = add_addr_table_entry (lbl_id, ate_kind_label);
4598 else
4599 attr.dw_attr_val.val_entry = NULL;
4600 add_dwarf_attr (die, &attr);
4601 }
4602
4603 /* Hash and equality functions for debug_str_hash. */
4604
4605 hashval_t
4606 indirect_string_hasher::hash (indirect_string_node *x)
4607 {
4608 return htab_hash_string (x->str);
4609 }
4610
4611 bool
4612 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4613 {
4614 return strcmp (x1->str, x2) == 0;
4615 }
4616
4617 /* Add STR to the given string hash table. */
4618
4619 static struct indirect_string_node *
4620 find_AT_string_in_table (const char *str,
4621 hash_table<indirect_string_hasher> *table)
4622 {
4623 struct indirect_string_node *node;
4624
4625 indirect_string_node **slot
4626 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4627 if (*slot == NULL)
4628 {
4629 node = ggc_cleared_alloc<indirect_string_node> ();
4630 node->str = ggc_strdup (str);
4631 *slot = node;
4632 }
4633 else
4634 node = *slot;
4635
4636 node->refcount++;
4637 return node;
4638 }
4639
4640 /* Add STR to the indirect string hash table. */
4641
4642 static struct indirect_string_node *
4643 find_AT_string (const char *str)
4644 {
4645 if (! debug_str_hash)
4646 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4647
4648 return find_AT_string_in_table (str, debug_str_hash);
4649 }
4650
4651 /* Add a string attribute value to a DIE. */
4652
4653 static inline void
4654 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4655 {
4656 dw_attr_node attr;
4657 struct indirect_string_node *node;
4658
4659 node = find_AT_string (str);
4660
4661 attr.dw_attr = attr_kind;
4662 attr.dw_attr_val.val_class = dw_val_class_str;
4663 attr.dw_attr_val.val_entry = NULL;
4664 attr.dw_attr_val.v.val_str = node;
4665 add_dwarf_attr (die, &attr);
4666 }
4667
4668 static inline const char *
4669 AT_string (dw_attr_node *a)
4670 {
4671 gcc_assert (a && AT_class (a) == dw_val_class_str);
4672 return a->dw_attr_val.v.val_str->str;
4673 }
4674
4675 /* Call this function directly to bypass AT_string_form's logic to put
4676 the string inline in the die. */
4677
4678 static void
4679 set_indirect_string (struct indirect_string_node *node)
4680 {
4681 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4682 /* Already indirect is a no op. */
4683 if (node->form == DW_FORM_strp
4684 || node->form == DW_FORM_line_strp
4685 || node->form == dwarf_FORM (DW_FORM_strx))
4686 {
4687 gcc_assert (node->label);
4688 return;
4689 }
4690 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4691 ++dw2_string_counter;
4692 node->label = xstrdup (label);
4693
4694 if (!dwarf_split_debug_info)
4695 {
4696 node->form = DW_FORM_strp;
4697 node->index = NOT_INDEXED;
4698 }
4699 else
4700 {
4701 node->form = dwarf_FORM (DW_FORM_strx);
4702 node->index = NO_INDEX_ASSIGNED;
4703 }
4704 }
4705
4706 /* A helper function for dwarf2out_finish, called to reset indirect
4707 string decisions done for early LTO dwarf output before fat object
4708 dwarf output. */
4709
4710 int
4711 reset_indirect_string (indirect_string_node **h, void *)
4712 {
4713 struct indirect_string_node *node = *h;
4714 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4715 {
4716 free (node->label);
4717 node->label = NULL;
4718 node->form = (dwarf_form) 0;
4719 node->index = 0;
4720 }
4721 return 1;
4722 }
4723
4724 /* Find out whether a string should be output inline in DIE
4725 or out-of-line in .debug_str section. */
4726
4727 static enum dwarf_form
4728 find_string_form (struct indirect_string_node *node)
4729 {
4730 unsigned int len;
4731
4732 if (node->form)
4733 return node->form;
4734
4735 len = strlen (node->str) + 1;
4736
4737 /* If the string is shorter or equal to the size of the reference, it is
4738 always better to put it inline. */
4739 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4740 return node->form = DW_FORM_string;
4741
4742 /* If we cannot expect the linker to merge strings in .debug_str
4743 section, only put it into .debug_str if it is worth even in this
4744 single module. */
4745 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4746 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4747 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4748 return node->form = DW_FORM_string;
4749
4750 set_indirect_string (node);
4751
4752 return node->form;
4753 }
4754
4755 /* Find out whether the string referenced from the attribute should be
4756 output inline in DIE or out-of-line in .debug_str section. */
4757
4758 static enum dwarf_form
4759 AT_string_form (dw_attr_node *a)
4760 {
4761 gcc_assert (a && AT_class (a) == dw_val_class_str);
4762 return find_string_form (a->dw_attr_val.v.val_str);
4763 }
4764
4765 /* Add a DIE reference attribute value to a DIE. */
4766
4767 static inline void
4768 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4769 {
4770 dw_attr_node attr;
4771 gcc_checking_assert (targ_die != NULL);
4772
4773 /* With LTO we can end up trying to reference something we didn't create
4774 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4775 if (targ_die == NULL)
4776 return;
4777
4778 attr.dw_attr = attr_kind;
4779 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4780 attr.dw_attr_val.val_entry = NULL;
4781 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4782 attr.dw_attr_val.v.val_die_ref.external = 0;
4783 add_dwarf_attr (die, &attr);
4784 }
4785
4786 /* Change DIE reference REF to point to NEW_DIE instead. */
4787
4788 static inline void
4789 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4790 {
4791 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4792 ref->dw_attr_val.v.val_die_ref.die = new_die;
4793 ref->dw_attr_val.v.val_die_ref.external = 0;
4794 }
4795
4796 /* Add an AT_specification attribute to a DIE, and also make the back
4797 pointer from the specification to the definition. */
4798
4799 static inline void
4800 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4801 {
4802 add_AT_die_ref (die, DW_AT_specification, targ_die);
4803 gcc_assert (!targ_die->die_definition);
4804 targ_die->die_definition = die;
4805 }
4806
4807 static inline dw_die_ref
4808 AT_ref (dw_attr_node *a)
4809 {
4810 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4811 return a->dw_attr_val.v.val_die_ref.die;
4812 }
4813
4814 static inline int
4815 AT_ref_external (dw_attr_node *a)
4816 {
4817 if (a && AT_class (a) == dw_val_class_die_ref)
4818 return a->dw_attr_val.v.val_die_ref.external;
4819
4820 return 0;
4821 }
4822
4823 static inline void
4824 set_AT_ref_external (dw_attr_node *a, int i)
4825 {
4826 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4827 a->dw_attr_val.v.val_die_ref.external = i;
4828 }
4829
4830 /* Add an FDE reference attribute value to a DIE. */
4831
4832 static inline void
4833 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4834 {
4835 dw_attr_node attr;
4836
4837 attr.dw_attr = attr_kind;
4838 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4839 attr.dw_attr_val.val_entry = NULL;
4840 attr.dw_attr_val.v.val_fde_index = targ_fde;
4841 add_dwarf_attr (die, &attr);
4842 }
4843
4844 /* Add a location description attribute value to a DIE. */
4845
4846 static inline void
4847 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4848 {
4849 dw_attr_node attr;
4850
4851 attr.dw_attr = attr_kind;
4852 attr.dw_attr_val.val_class = dw_val_class_loc;
4853 attr.dw_attr_val.val_entry = NULL;
4854 attr.dw_attr_val.v.val_loc = loc;
4855 add_dwarf_attr (die, &attr);
4856 }
4857
4858 static inline dw_loc_descr_ref
4859 AT_loc (dw_attr_node *a)
4860 {
4861 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4862 return a->dw_attr_val.v.val_loc;
4863 }
4864
4865 static inline void
4866 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4867 {
4868 dw_attr_node attr;
4869
4870 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4871 return;
4872
4873 attr.dw_attr = attr_kind;
4874 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4875 attr.dw_attr_val.val_entry = NULL;
4876 attr.dw_attr_val.v.val_loc_list = loc_list;
4877 add_dwarf_attr (die, &attr);
4878 have_location_lists = true;
4879 }
4880
4881 static inline dw_loc_list_ref
4882 AT_loc_list (dw_attr_node *a)
4883 {
4884 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4885 return a->dw_attr_val.v.val_loc_list;
4886 }
4887
4888 /* Add a view list attribute to DIE. It must have a DW_AT_location
4889 attribute, because the view list complements the location list. */
4890
4891 static inline void
4892 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4893 {
4894 dw_attr_node attr;
4895
4896 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4897 return;
4898
4899 attr.dw_attr = attr_kind;
4900 attr.dw_attr_val.val_class = dw_val_class_view_list;
4901 attr.dw_attr_val.val_entry = NULL;
4902 attr.dw_attr_val.v.val_view_list = die;
4903 add_dwarf_attr (die, &attr);
4904 gcc_checking_assert (get_AT (die, DW_AT_location));
4905 gcc_assert (have_location_lists);
4906 }
4907
4908 /* Return a pointer to the location list referenced by the attribute.
4909 If the named attribute is a view list, look up the corresponding
4910 DW_AT_location attribute and return its location list. */
4911
4912 static inline dw_loc_list_ref *
4913 AT_loc_list_ptr (dw_attr_node *a)
4914 {
4915 gcc_assert (a);
4916 switch (AT_class (a))
4917 {
4918 case dw_val_class_loc_list:
4919 return &a->dw_attr_val.v.val_loc_list;
4920 case dw_val_class_view_list:
4921 {
4922 dw_attr_node *l;
4923 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4924 if (!l)
4925 return NULL;
4926 gcc_checking_assert (l + 1 == a);
4927 return AT_loc_list_ptr (l);
4928 }
4929 default:
4930 gcc_unreachable ();
4931 }
4932 }
4933
4934 /* Return the location attribute value associated with a view list
4935 attribute value. */
4936
4937 static inline dw_val_node *
4938 view_list_to_loc_list_val_node (dw_val_node *val)
4939 {
4940 gcc_assert (val->val_class == dw_val_class_view_list);
4941 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4942 if (!loc)
4943 return NULL;
4944 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4945 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4946 return &loc->dw_attr_val;
4947 }
4948
4949 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4950 {
4951 static hashval_t hash (addr_table_entry *);
4952 static bool equal (addr_table_entry *, addr_table_entry *);
4953 };
4954
4955 /* Table of entries into the .debug_addr section. */
4956
4957 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4958
4959 /* Hash an address_table_entry. */
4960
4961 hashval_t
4962 addr_hasher::hash (addr_table_entry *a)
4963 {
4964 inchash::hash hstate;
4965 switch (a->kind)
4966 {
4967 case ate_kind_rtx:
4968 hstate.add_int (0);
4969 break;
4970 case ate_kind_rtx_dtprel:
4971 hstate.add_int (1);
4972 break;
4973 case ate_kind_label:
4974 return htab_hash_string (a->addr.label);
4975 default:
4976 gcc_unreachable ();
4977 }
4978 inchash::add_rtx (a->addr.rtl, hstate);
4979 return hstate.end ();
4980 }
4981
4982 /* Determine equality for two address_table_entries. */
4983
4984 bool
4985 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4986 {
4987 if (a1->kind != a2->kind)
4988 return 0;
4989 switch (a1->kind)
4990 {
4991 case ate_kind_rtx:
4992 case ate_kind_rtx_dtprel:
4993 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4994 case ate_kind_label:
4995 return strcmp (a1->addr.label, a2->addr.label) == 0;
4996 default:
4997 gcc_unreachable ();
4998 }
4999 }
5000
5001 /* Initialize an addr_table_entry. */
5002
5003 void
5004 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5005 {
5006 e->kind = kind;
5007 switch (kind)
5008 {
5009 case ate_kind_rtx:
5010 case ate_kind_rtx_dtprel:
5011 e->addr.rtl = (rtx) addr;
5012 break;
5013 case ate_kind_label:
5014 e->addr.label = (char *) addr;
5015 break;
5016 }
5017 e->refcount = 0;
5018 e->index = NO_INDEX_ASSIGNED;
5019 }
5020
5021 /* Add attr to the address table entry to the table. Defer setting an
5022 index until output time. */
5023
5024 static addr_table_entry *
5025 add_addr_table_entry (void *addr, enum ate_kind kind)
5026 {
5027 addr_table_entry *node;
5028 addr_table_entry finder;
5029
5030 gcc_assert (dwarf_split_debug_info);
5031 if (! addr_index_table)
5032 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5033 init_addr_table_entry (&finder, kind, addr);
5034 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5035
5036 if (*slot == HTAB_EMPTY_ENTRY)
5037 {
5038 node = ggc_cleared_alloc<addr_table_entry> ();
5039 init_addr_table_entry (node, kind, addr);
5040 *slot = node;
5041 }
5042 else
5043 node = *slot;
5044
5045 node->refcount++;
5046 return node;
5047 }
5048
5049 /* Remove an entry from the addr table by decrementing its refcount.
5050 Strictly, decrementing the refcount would be enough, but the
5051 assertion that the entry is actually in the table has found
5052 bugs. */
5053
5054 static void
5055 remove_addr_table_entry (addr_table_entry *entry)
5056 {
5057 gcc_assert (dwarf_split_debug_info && addr_index_table);
5058 /* After an index is assigned, the table is frozen. */
5059 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5060 entry->refcount--;
5061 }
5062
5063 /* Given a location list, remove all addresses it refers to from the
5064 address_table. */
5065
5066 static void
5067 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5068 {
5069 for (; descr; descr = descr->dw_loc_next)
5070 if (descr->dw_loc_oprnd1.val_entry != NULL)
5071 {
5072 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5073 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5074 }
5075 }
5076
5077 /* A helper function for dwarf2out_finish called through
5078 htab_traverse. Assign an addr_table_entry its index. All entries
5079 must be collected into the table when this function is called,
5080 because the indexing code relies on htab_traverse to traverse nodes
5081 in the same order for each run. */
5082
5083 int
5084 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5085 {
5086 addr_table_entry *node = *h;
5087
5088 /* Don't index unreferenced nodes. */
5089 if (node->refcount == 0)
5090 return 1;
5091
5092 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5093 node->index = *index;
5094 *index += 1;
5095
5096 return 1;
5097 }
5098
5099 /* Add an address constant attribute value to a DIE. When using
5100 dwarf_split_debug_info, address attributes in dies destined for the
5101 final executable should be direct references--setting the parameter
5102 force_direct ensures this behavior. */
5103
5104 static inline void
5105 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5106 bool force_direct)
5107 {
5108 dw_attr_node attr;
5109
5110 attr.dw_attr = attr_kind;
5111 attr.dw_attr_val.val_class = dw_val_class_addr;
5112 attr.dw_attr_val.v.val_addr = addr;
5113 if (dwarf_split_debug_info && !force_direct)
5114 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5115 else
5116 attr.dw_attr_val.val_entry = NULL;
5117 add_dwarf_attr (die, &attr);
5118 }
5119
5120 /* Get the RTX from to an address DIE attribute. */
5121
5122 static inline rtx
5123 AT_addr (dw_attr_node *a)
5124 {
5125 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5126 return a->dw_attr_val.v.val_addr;
5127 }
5128
5129 /* Add a file attribute value to a DIE. */
5130
5131 static inline void
5132 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5133 struct dwarf_file_data *fd)
5134 {
5135 dw_attr_node attr;
5136
5137 attr.dw_attr = attr_kind;
5138 attr.dw_attr_val.val_class = dw_val_class_file;
5139 attr.dw_attr_val.val_entry = NULL;
5140 attr.dw_attr_val.v.val_file = fd;
5141 add_dwarf_attr (die, &attr);
5142 }
5143
5144 /* Get the dwarf_file_data from a file DIE attribute. */
5145
5146 static inline struct dwarf_file_data *
5147 AT_file (dw_attr_node *a)
5148 {
5149 gcc_assert (a && (AT_class (a) == dw_val_class_file
5150 || AT_class (a) == dw_val_class_file_implicit));
5151 return a->dw_attr_val.v.val_file;
5152 }
5153
5154 /* Add a vms delta attribute value to a DIE. */
5155
5156 static inline void
5157 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5158 const char *lbl1, const char *lbl2)
5159 {
5160 dw_attr_node attr;
5161
5162 attr.dw_attr = attr_kind;
5163 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5164 attr.dw_attr_val.val_entry = NULL;
5165 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5166 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5167 add_dwarf_attr (die, &attr);
5168 }
5169
5170 /* Add a symbolic view identifier attribute value to a DIE. */
5171
5172 static inline void
5173 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5174 const char *view_label)
5175 {
5176 dw_attr_node attr;
5177
5178 attr.dw_attr = attr_kind;
5179 attr.dw_attr_val.val_class = dw_val_class_symview;
5180 attr.dw_attr_val.val_entry = NULL;
5181 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5182 add_dwarf_attr (die, &attr);
5183 }
5184
5185 /* Add a label identifier attribute value to a DIE. */
5186
5187 static inline void
5188 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5189 const char *lbl_id)
5190 {
5191 dw_attr_node attr;
5192
5193 attr.dw_attr = attr_kind;
5194 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5195 attr.dw_attr_val.val_entry = NULL;
5196 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5197 if (dwarf_split_debug_info)
5198 attr.dw_attr_val.val_entry
5199 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5200 ate_kind_label);
5201 add_dwarf_attr (die, &attr);
5202 }
5203
5204 /* Add a section offset attribute value to a DIE, an offset into the
5205 debug_line section. */
5206
5207 static inline void
5208 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5209 const char *label)
5210 {
5211 dw_attr_node attr;
5212
5213 attr.dw_attr = attr_kind;
5214 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5215 attr.dw_attr_val.val_entry = NULL;
5216 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5217 add_dwarf_attr (die, &attr);
5218 }
5219
5220 /* Add a section offset attribute value to a DIE, an offset into the
5221 debug_loclists section. */
5222
5223 static inline void
5224 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5225 const char *label)
5226 {
5227 dw_attr_node attr;
5228
5229 attr.dw_attr = attr_kind;
5230 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5231 attr.dw_attr_val.val_entry = NULL;
5232 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5233 add_dwarf_attr (die, &attr);
5234 }
5235
5236 /* Add a section offset attribute value to a DIE, an offset into the
5237 debug_macinfo section. */
5238
5239 static inline void
5240 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5241 const char *label)
5242 {
5243 dw_attr_node attr;
5244
5245 attr.dw_attr = attr_kind;
5246 attr.dw_attr_val.val_class = dw_val_class_macptr;
5247 attr.dw_attr_val.val_entry = NULL;
5248 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5249 add_dwarf_attr (die, &attr);
5250 }
5251
5252 /* Add an offset attribute value to a DIE. */
5253
5254 static inline void
5255 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5256 unsigned HOST_WIDE_INT offset)
5257 {
5258 dw_attr_node attr;
5259
5260 attr.dw_attr = attr_kind;
5261 attr.dw_attr_val.val_class = dw_val_class_offset;
5262 attr.dw_attr_val.val_entry = NULL;
5263 attr.dw_attr_val.v.val_offset = offset;
5264 add_dwarf_attr (die, &attr);
5265 }
5266
5267 /* Add a range_list attribute value to a DIE. When using
5268 dwarf_split_debug_info, address attributes in dies destined for the
5269 final executable should be direct references--setting the parameter
5270 force_direct ensures this behavior. */
5271
5272 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5273 #define RELOCATED_OFFSET (NULL)
5274
5275 static void
5276 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5277 long unsigned int offset, bool force_direct)
5278 {
5279 dw_attr_node attr;
5280
5281 attr.dw_attr = attr_kind;
5282 attr.dw_attr_val.val_class = dw_val_class_range_list;
5283 /* For the range_list attribute, use val_entry to store whether the
5284 offset should follow split-debug-info or normal semantics. This
5285 value is read in output_range_list_offset. */
5286 if (dwarf_split_debug_info && !force_direct)
5287 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5288 else
5289 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5290 attr.dw_attr_val.v.val_offset = offset;
5291 add_dwarf_attr (die, &attr);
5292 }
5293
5294 /* Return the start label of a delta attribute. */
5295
5296 static inline const char *
5297 AT_vms_delta1 (dw_attr_node *a)
5298 {
5299 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5300 return a->dw_attr_val.v.val_vms_delta.lbl1;
5301 }
5302
5303 /* Return the end label of a delta attribute. */
5304
5305 static inline const char *
5306 AT_vms_delta2 (dw_attr_node *a)
5307 {
5308 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5309 return a->dw_attr_val.v.val_vms_delta.lbl2;
5310 }
5311
5312 static inline const char *
5313 AT_lbl (dw_attr_node *a)
5314 {
5315 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5316 || AT_class (a) == dw_val_class_lineptr
5317 || AT_class (a) == dw_val_class_macptr
5318 || AT_class (a) == dw_val_class_loclistsptr
5319 || AT_class (a) == dw_val_class_high_pc));
5320 return a->dw_attr_val.v.val_lbl_id;
5321 }
5322
5323 /* Get the attribute of type attr_kind. */
5324
5325 static dw_attr_node *
5326 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5327 {
5328 dw_attr_node *a;
5329 unsigned ix;
5330 dw_die_ref spec = NULL;
5331
5332 if (! die)
5333 return NULL;
5334
5335 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5336 if (a->dw_attr == attr_kind)
5337 return a;
5338 else if (a->dw_attr == DW_AT_specification
5339 || a->dw_attr == DW_AT_abstract_origin)
5340 spec = AT_ref (a);
5341
5342 if (spec)
5343 return get_AT (spec, attr_kind);
5344
5345 return NULL;
5346 }
5347
5348 /* Returns the parent of the declaration of DIE. */
5349
5350 static dw_die_ref
5351 get_die_parent (dw_die_ref die)
5352 {
5353 dw_die_ref t;
5354
5355 if (!die)
5356 return NULL;
5357
5358 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5359 || (t = get_AT_ref (die, DW_AT_specification)))
5360 die = t;
5361
5362 return die->die_parent;
5363 }
5364
5365 /* Return the "low pc" attribute value, typically associated with a subprogram
5366 DIE. Return null if the "low pc" attribute is either not present, or if it
5367 cannot be represented as an assembler label identifier. */
5368
5369 static inline const char *
5370 get_AT_low_pc (dw_die_ref die)
5371 {
5372 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5373
5374 return a ? AT_lbl (a) : NULL;
5375 }
5376
5377 /* Return the "high pc" attribute value, typically associated with a subprogram
5378 DIE. Return null if the "high pc" attribute is either not present, or if it
5379 cannot be represented as an assembler label identifier. */
5380
5381 static inline const char *
5382 get_AT_hi_pc (dw_die_ref die)
5383 {
5384 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5385
5386 return a ? AT_lbl (a) : NULL;
5387 }
5388
5389 /* Return the value of the string attribute designated by ATTR_KIND, or
5390 NULL if it is not present. */
5391
5392 static inline const char *
5393 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5394 {
5395 dw_attr_node *a = get_AT (die, attr_kind);
5396
5397 return a ? AT_string (a) : NULL;
5398 }
5399
5400 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5401 if it is not present. */
5402
5403 static inline int
5404 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5405 {
5406 dw_attr_node *a = get_AT (die, attr_kind);
5407
5408 return a ? AT_flag (a) : 0;
5409 }
5410
5411 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5412 if it is not present. */
5413
5414 static inline unsigned
5415 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5416 {
5417 dw_attr_node *a = get_AT (die, attr_kind);
5418
5419 return a ? AT_unsigned (a) : 0;
5420 }
5421
5422 static inline dw_die_ref
5423 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5424 {
5425 dw_attr_node *a = get_AT (die, attr_kind);
5426
5427 return a ? AT_ref (a) : NULL;
5428 }
5429
5430 static inline struct dwarf_file_data *
5431 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5432 {
5433 dw_attr_node *a = get_AT (die, attr_kind);
5434
5435 return a ? AT_file (a) : NULL;
5436 }
5437
5438 /* Return TRUE if the language is C++. */
5439
5440 static inline bool
5441 is_cxx (void)
5442 {
5443 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5444
5445 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5446 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5447 }
5448
5449 /* Return TRUE if DECL was created by the C++ frontend. */
5450
5451 static bool
5452 is_cxx (const_tree decl)
5453 {
5454 if (in_lto_p)
5455 {
5456 const_tree context = get_ultimate_context (decl);
5457 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5458 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5459 }
5460 return is_cxx ();
5461 }
5462
5463 /* Return TRUE if the language is Fortran. */
5464
5465 static inline bool
5466 is_fortran (void)
5467 {
5468 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5469
5470 return (lang == DW_LANG_Fortran77
5471 || lang == DW_LANG_Fortran90
5472 || lang == DW_LANG_Fortran95
5473 || lang == DW_LANG_Fortran03
5474 || lang == DW_LANG_Fortran08);
5475 }
5476
5477 static inline bool
5478 is_fortran (const_tree decl)
5479 {
5480 if (in_lto_p)
5481 {
5482 const_tree context = get_ultimate_context (decl);
5483 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5484 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5485 "GNU Fortran", 11) == 0
5486 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5487 "GNU F77") == 0);
5488 }
5489 return is_fortran ();
5490 }
5491
5492 /* Return TRUE if the language is Ada. */
5493
5494 static inline bool
5495 is_ada (void)
5496 {
5497 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5498
5499 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5500 }
5501
5502 /* Remove the specified attribute if present. Return TRUE if removal
5503 was successful. */
5504
5505 static bool
5506 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5507 {
5508 dw_attr_node *a;
5509 unsigned ix;
5510
5511 if (! die)
5512 return false;
5513
5514 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5515 if (a->dw_attr == attr_kind)
5516 {
5517 if (AT_class (a) == dw_val_class_str)
5518 if (a->dw_attr_val.v.val_str->refcount)
5519 a->dw_attr_val.v.val_str->refcount--;
5520
5521 /* vec::ordered_remove should help reduce the number of abbrevs
5522 that are needed. */
5523 die->die_attr->ordered_remove (ix);
5524 return true;
5525 }
5526 return false;
5527 }
5528
5529 /* Remove CHILD from its parent. PREV must have the property that
5530 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5531
5532 static void
5533 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5534 {
5535 gcc_assert (child->die_parent == prev->die_parent);
5536 gcc_assert (prev->die_sib == child);
5537 if (prev == child)
5538 {
5539 gcc_assert (child->die_parent->die_child == child);
5540 prev = NULL;
5541 }
5542 else
5543 prev->die_sib = child->die_sib;
5544 if (child->die_parent->die_child == child)
5545 child->die_parent->die_child = prev;
5546 child->die_sib = NULL;
5547 }
5548
5549 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5550 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5551
5552 static void
5553 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5554 {
5555 dw_die_ref parent = old_child->die_parent;
5556
5557 gcc_assert (parent == prev->die_parent);
5558 gcc_assert (prev->die_sib == old_child);
5559
5560 new_child->die_parent = parent;
5561 if (prev == old_child)
5562 {
5563 gcc_assert (parent->die_child == old_child);
5564 new_child->die_sib = new_child;
5565 }
5566 else
5567 {
5568 prev->die_sib = new_child;
5569 new_child->die_sib = old_child->die_sib;
5570 }
5571 if (old_child->die_parent->die_child == old_child)
5572 old_child->die_parent->die_child = new_child;
5573 old_child->die_sib = NULL;
5574 }
5575
5576 /* Move all children from OLD_PARENT to NEW_PARENT. */
5577
5578 static void
5579 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5580 {
5581 dw_die_ref c;
5582 new_parent->die_child = old_parent->die_child;
5583 old_parent->die_child = NULL;
5584 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5585 }
5586
5587 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5588 matches TAG. */
5589
5590 static void
5591 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5592 {
5593 dw_die_ref c;
5594
5595 c = die->die_child;
5596 if (c) do {
5597 dw_die_ref prev = c;
5598 c = c->die_sib;
5599 while (c->die_tag == tag)
5600 {
5601 remove_child_with_prev (c, prev);
5602 c->die_parent = NULL;
5603 /* Might have removed every child. */
5604 if (die->die_child == NULL)
5605 return;
5606 c = prev->die_sib;
5607 }
5608 } while (c != die->die_child);
5609 }
5610
5611 /* Add a CHILD_DIE as the last child of DIE. */
5612
5613 static void
5614 add_child_die (dw_die_ref die, dw_die_ref child_die)
5615 {
5616 /* FIXME this should probably be an assert. */
5617 if (! die || ! child_die)
5618 return;
5619 gcc_assert (die != child_die);
5620
5621 child_die->die_parent = die;
5622 if (die->die_child)
5623 {
5624 child_die->die_sib = die->die_child->die_sib;
5625 die->die_child->die_sib = child_die;
5626 }
5627 else
5628 child_die->die_sib = child_die;
5629 die->die_child = child_die;
5630 }
5631
5632 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5633
5634 static void
5635 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5636 dw_die_ref after_die)
5637 {
5638 gcc_assert (die
5639 && child_die
5640 && after_die
5641 && die->die_child
5642 && die != child_die);
5643
5644 child_die->die_parent = die;
5645 child_die->die_sib = after_die->die_sib;
5646 after_die->die_sib = child_die;
5647 if (die->die_child == after_die)
5648 die->die_child = child_die;
5649 }
5650
5651 /* Unassociate CHILD from its parent, and make its parent be
5652 NEW_PARENT. */
5653
5654 static void
5655 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5656 {
5657 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5658 if (p->die_sib == child)
5659 {
5660 remove_child_with_prev (child, p);
5661 break;
5662 }
5663 add_child_die (new_parent, child);
5664 }
5665
5666 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5667 is the specification, to the end of PARENT's list of children.
5668 This is done by removing and re-adding it. */
5669
5670 static void
5671 splice_child_die (dw_die_ref parent, dw_die_ref child)
5672 {
5673 /* We want the declaration DIE from inside the class, not the
5674 specification DIE at toplevel. */
5675 if (child->die_parent != parent)
5676 {
5677 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5678
5679 if (tmp)
5680 child = tmp;
5681 }
5682
5683 gcc_assert (child->die_parent == parent
5684 || (child->die_parent
5685 == get_AT_ref (parent, DW_AT_specification)));
5686
5687 reparent_child (child, parent);
5688 }
5689
5690 /* Create and return a new die with TAG_VALUE as tag. */
5691
5692 static inline dw_die_ref
5693 new_die_raw (enum dwarf_tag tag_value)
5694 {
5695 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5696 die->die_tag = tag_value;
5697 return die;
5698 }
5699
5700 /* Create and return a new die with a parent of PARENT_DIE. If
5701 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5702 associated tree T must be supplied to determine parenthood
5703 later. */
5704
5705 static inline dw_die_ref
5706 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5707 {
5708 dw_die_ref die = new_die_raw (tag_value);
5709
5710 if (parent_die != NULL)
5711 add_child_die (parent_die, die);
5712 else
5713 {
5714 limbo_die_node *limbo_node;
5715
5716 /* No DIEs created after early dwarf should end up in limbo,
5717 because the limbo list should not persist past LTO
5718 streaming. */
5719 if (tag_value != DW_TAG_compile_unit
5720 /* These are allowed because they're generated while
5721 breaking out COMDAT units late. */
5722 && tag_value != DW_TAG_type_unit
5723 && tag_value != DW_TAG_skeleton_unit
5724 && !early_dwarf
5725 /* Allow nested functions to live in limbo because they will
5726 only temporarily live there, as decls_for_scope will fix
5727 them up. */
5728 && (TREE_CODE (t) != FUNCTION_DECL
5729 || !decl_function_context (t))
5730 /* Same as nested functions above but for types. Types that
5731 are local to a function will be fixed in
5732 decls_for_scope. */
5733 && (!RECORD_OR_UNION_TYPE_P (t)
5734 || !TYPE_CONTEXT (t)
5735 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5736 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5737 especially in the ltrans stage, but once we implement LTO
5738 dwarf streaming, we should remove this exception. */
5739 && !in_lto_p)
5740 {
5741 fprintf (stderr, "symbol ended up in limbo too late:");
5742 debug_generic_stmt (t);
5743 gcc_unreachable ();
5744 }
5745
5746 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5747 limbo_node->die = die;
5748 limbo_node->created_for = t;
5749 limbo_node->next = limbo_die_list;
5750 limbo_die_list = limbo_node;
5751 }
5752
5753 return die;
5754 }
5755
5756 /* Return the DIE associated with the given type specifier. */
5757
5758 static inline dw_die_ref
5759 lookup_type_die (tree type)
5760 {
5761 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5762 if (die && die->removed)
5763 {
5764 TYPE_SYMTAB_DIE (type) = NULL;
5765 return NULL;
5766 }
5767 return die;
5768 }
5769
5770 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5771 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5772 anonymous type instead the one of the naming typedef. */
5773
5774 static inline dw_die_ref
5775 strip_naming_typedef (tree type, dw_die_ref type_die)
5776 {
5777 if (type
5778 && TREE_CODE (type) == RECORD_TYPE
5779 && type_die
5780 && type_die->die_tag == DW_TAG_typedef
5781 && is_naming_typedef_decl (TYPE_NAME (type)))
5782 type_die = get_AT_ref (type_die, DW_AT_type);
5783 return type_die;
5784 }
5785
5786 /* Like lookup_type_die, but if type is an anonymous type named by a
5787 typedef[1], return the DIE of the anonymous type instead the one of
5788 the naming typedef. This is because in gen_typedef_die, we did
5789 equate the anonymous struct named by the typedef with the DIE of
5790 the naming typedef. So by default, lookup_type_die on an anonymous
5791 struct yields the DIE of the naming typedef.
5792
5793 [1]: Read the comment of is_naming_typedef_decl to learn about what
5794 a naming typedef is. */
5795
5796 static inline dw_die_ref
5797 lookup_type_die_strip_naming_typedef (tree type)
5798 {
5799 dw_die_ref die = lookup_type_die (type);
5800 return strip_naming_typedef (type, die);
5801 }
5802
5803 /* Equate a DIE to a given type specifier. */
5804
5805 static inline void
5806 equate_type_number_to_die (tree type, dw_die_ref type_die)
5807 {
5808 TYPE_SYMTAB_DIE (type) = type_die;
5809 }
5810
5811 /* Returns a hash value for X (which really is a die_struct). */
5812
5813 inline hashval_t
5814 decl_die_hasher::hash (die_node *x)
5815 {
5816 return (hashval_t) x->decl_id;
5817 }
5818
5819 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5820
5821 inline bool
5822 decl_die_hasher::equal (die_node *x, tree y)
5823 {
5824 return (x->decl_id == DECL_UID (y));
5825 }
5826
5827 /* Return the DIE associated with a given declaration. */
5828
5829 static inline dw_die_ref
5830 lookup_decl_die (tree decl)
5831 {
5832 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5833 NO_INSERT);
5834 if (!die)
5835 return NULL;
5836 if ((*die)->removed)
5837 {
5838 decl_die_table->clear_slot (die);
5839 return NULL;
5840 }
5841 return *die;
5842 }
5843
5844
5845 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5846 style reference. Return true if we found one refering to a DIE for
5847 DECL, otherwise return false. */
5848
5849 static bool
5850 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5851 unsigned HOST_WIDE_INT *off)
5852 {
5853 dw_die_ref die;
5854
5855 if (in_lto_p && !decl_die_table)
5856 return false;
5857
5858 if (TREE_CODE (decl) == BLOCK)
5859 die = BLOCK_DIE (decl);
5860 else
5861 die = lookup_decl_die (decl);
5862 if (!die)
5863 return false;
5864
5865 /* During WPA stage and incremental linking we currently use DIEs
5866 to store the decl <-> label + offset map. That's quite inefficient
5867 but it works for now. */
5868 if (in_lto_p)
5869 {
5870 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5871 if (!ref)
5872 {
5873 gcc_assert (die == comp_unit_die ());
5874 return false;
5875 }
5876 *off = ref->die_offset;
5877 *sym = ref->die_id.die_symbol;
5878 return true;
5879 }
5880
5881 /* Similar to get_ref_die_offset_label, but using the "correct"
5882 label. */
5883 *off = die->die_offset;
5884 while (die->die_parent)
5885 die = die->die_parent;
5886 /* For the containing CU DIE we compute a die_symbol in
5887 compute_comp_unit_symbol. */
5888 gcc_assert (die->die_tag == DW_TAG_compile_unit
5889 && die->die_id.die_symbol != NULL);
5890 *sym = die->die_id.die_symbol;
5891 return true;
5892 }
5893
5894 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5895
5896 static void
5897 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5898 const char *symbol, HOST_WIDE_INT offset)
5899 {
5900 /* Create a fake DIE that contains the reference. Don't use
5901 new_die because we don't want to end up in the limbo list. */
5902 dw_die_ref ref = new_die_raw (die->die_tag);
5903 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5904 ref->die_offset = offset;
5905 ref->with_offset = 1;
5906 add_AT_die_ref (die, attr_kind, ref);
5907 }
5908
5909 /* Create a DIE for DECL if required and add a reference to a DIE
5910 at SYMBOL + OFFSET which contains attributes dumped early. */
5911
5912 static void
5913 dwarf2out_register_external_die (tree decl, const char *sym,
5914 unsigned HOST_WIDE_INT off)
5915 {
5916 if (debug_info_level == DINFO_LEVEL_NONE)
5917 return;
5918
5919 if ((flag_wpa
5920 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5921 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5922
5923 dw_die_ref die
5924 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5925 gcc_assert (!die);
5926
5927 tree ctx;
5928 dw_die_ref parent = NULL;
5929 /* Need to lookup a DIE for the decls context - the containing
5930 function or translation unit. */
5931 if (TREE_CODE (decl) == BLOCK)
5932 {
5933 ctx = BLOCK_SUPERCONTEXT (decl);
5934 /* ??? We do not output DIEs for all scopes thus skip as
5935 many DIEs as needed. */
5936 while (TREE_CODE (ctx) == BLOCK
5937 && !BLOCK_DIE (ctx))
5938 ctx = BLOCK_SUPERCONTEXT (ctx);
5939 }
5940 else
5941 ctx = DECL_CONTEXT (decl);
5942 /* Peel types in the context stack. */
5943 while (ctx && TYPE_P (ctx))
5944 ctx = TYPE_CONTEXT (ctx);
5945 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5946 if (debug_info_level <= DINFO_LEVEL_TERSE)
5947 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5948 ctx = DECL_CONTEXT (ctx);
5949 if (ctx)
5950 {
5951 if (TREE_CODE (ctx) == BLOCK)
5952 parent = BLOCK_DIE (ctx);
5953 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5954 /* Keep the 1:1 association during WPA. */
5955 && !flag_wpa
5956 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5957 /* Otherwise all late annotations go to the main CU which
5958 imports the original CUs. */
5959 parent = comp_unit_die ();
5960 else if (TREE_CODE (ctx) == FUNCTION_DECL
5961 && TREE_CODE (decl) != FUNCTION_DECL
5962 && TREE_CODE (decl) != PARM_DECL
5963 && TREE_CODE (decl) != RESULT_DECL
5964 && TREE_CODE (decl) != BLOCK)
5965 /* Leave function local entities parent determination to when
5966 we process scope vars. */
5967 ;
5968 else
5969 parent = lookup_decl_die (ctx);
5970 }
5971 else
5972 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5973 Handle this case gracefully by globalizing stuff. */
5974 parent = comp_unit_die ();
5975 /* Create a DIE "stub". */
5976 switch (TREE_CODE (decl))
5977 {
5978 case TRANSLATION_UNIT_DECL:
5979 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5980 {
5981 die = comp_unit_die ();
5982 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5983 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5984 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5985 to create a DIE for the original CUs. */
5986 return;
5987 }
5988 /* Keep the 1:1 association during WPA. */
5989 die = new_die (DW_TAG_compile_unit, NULL, decl);
5990 break;
5991 case NAMESPACE_DECL:
5992 if (is_fortran (decl))
5993 die = new_die (DW_TAG_module, parent, decl);
5994 else
5995 die = new_die (DW_TAG_namespace, parent, decl);
5996 break;
5997 case FUNCTION_DECL:
5998 die = new_die (DW_TAG_subprogram, parent, decl);
5999 break;
6000 case VAR_DECL:
6001 die = new_die (DW_TAG_variable, parent, decl);
6002 break;
6003 case RESULT_DECL:
6004 die = new_die (DW_TAG_variable, parent, decl);
6005 break;
6006 case PARM_DECL:
6007 die = new_die (DW_TAG_formal_parameter, parent, decl);
6008 break;
6009 case CONST_DECL:
6010 die = new_die (DW_TAG_constant, parent, decl);
6011 break;
6012 case LABEL_DECL:
6013 die = new_die (DW_TAG_label, parent, decl);
6014 break;
6015 case BLOCK:
6016 die = new_die (DW_TAG_lexical_block, parent, decl);
6017 break;
6018 default:
6019 gcc_unreachable ();
6020 }
6021 if (TREE_CODE (decl) == BLOCK)
6022 BLOCK_DIE (decl) = die;
6023 else
6024 equate_decl_number_to_die (decl, die);
6025
6026 /* Add a reference to the DIE providing early debug at $sym + off. */
6027 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6028 }
6029
6030 /* Returns a hash value for X (which really is a var_loc_list). */
6031
6032 inline hashval_t
6033 decl_loc_hasher::hash (var_loc_list *x)
6034 {
6035 return (hashval_t) x->decl_id;
6036 }
6037
6038 /* Return nonzero if decl_id of var_loc_list X is the same as
6039 UID of decl *Y. */
6040
6041 inline bool
6042 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6043 {
6044 return (x->decl_id == DECL_UID (y));
6045 }
6046
6047 /* Return the var_loc list associated with a given declaration. */
6048
6049 static inline var_loc_list *
6050 lookup_decl_loc (const_tree decl)
6051 {
6052 if (!decl_loc_table)
6053 return NULL;
6054 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6055 }
6056
6057 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6058
6059 inline hashval_t
6060 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6061 {
6062 return (hashval_t) x->decl_id;
6063 }
6064
6065 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6066 UID of decl *Y. */
6067
6068 inline bool
6069 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6070 {
6071 return (x->decl_id == DECL_UID (y));
6072 }
6073
6074 /* Equate a DIE to a particular declaration. */
6075
6076 static void
6077 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6078 {
6079 unsigned int decl_id = DECL_UID (decl);
6080
6081 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6082 decl_die->decl_id = decl_id;
6083 }
6084
6085 /* Return how many bits covers PIECE EXPR_LIST. */
6086
6087 static HOST_WIDE_INT
6088 decl_piece_bitsize (rtx piece)
6089 {
6090 int ret = (int) GET_MODE (piece);
6091 if (ret)
6092 return ret;
6093 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6094 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6095 return INTVAL (XEXP (XEXP (piece, 0), 0));
6096 }
6097
6098 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6099
6100 static rtx *
6101 decl_piece_varloc_ptr (rtx piece)
6102 {
6103 if ((int) GET_MODE (piece))
6104 return &XEXP (piece, 0);
6105 else
6106 return &XEXP (XEXP (piece, 0), 1);
6107 }
6108
6109 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6110 Next is the chain of following piece nodes. */
6111
6112 static rtx_expr_list *
6113 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6114 {
6115 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6116 return alloc_EXPR_LIST (bitsize, loc_note, next);
6117 else
6118 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6119 GEN_INT (bitsize),
6120 loc_note), next);
6121 }
6122
6123 /* Return rtx that should be stored into loc field for
6124 LOC_NOTE and BITPOS/BITSIZE. */
6125
6126 static rtx
6127 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6128 HOST_WIDE_INT bitsize)
6129 {
6130 if (bitsize != -1)
6131 {
6132 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6133 if (bitpos != 0)
6134 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6135 }
6136 return loc_note;
6137 }
6138
6139 /* This function either modifies location piece list *DEST in
6140 place (if SRC and INNER is NULL), or copies location piece list
6141 *SRC to *DEST while modifying it. Location BITPOS is modified
6142 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6143 not copied and if needed some padding around it is added.
6144 When modifying in place, DEST should point to EXPR_LIST where
6145 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6146 to the start of the whole list and INNER points to the EXPR_LIST
6147 where earlier pieces cover PIECE_BITPOS bits. */
6148
6149 static void
6150 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6151 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6152 HOST_WIDE_INT bitsize, rtx loc_note)
6153 {
6154 HOST_WIDE_INT diff;
6155 bool copy = inner != NULL;
6156
6157 if (copy)
6158 {
6159 /* First copy all nodes preceding the current bitpos. */
6160 while (src != inner)
6161 {
6162 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6163 decl_piece_bitsize (*src), NULL_RTX);
6164 dest = &XEXP (*dest, 1);
6165 src = &XEXP (*src, 1);
6166 }
6167 }
6168 /* Add padding if needed. */
6169 if (bitpos != piece_bitpos)
6170 {
6171 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6172 copy ? NULL_RTX : *dest);
6173 dest = &XEXP (*dest, 1);
6174 }
6175 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6176 {
6177 gcc_assert (!copy);
6178 /* A piece with correct bitpos and bitsize already exist,
6179 just update the location for it and return. */
6180 *decl_piece_varloc_ptr (*dest) = loc_note;
6181 return;
6182 }
6183 /* Add the piece that changed. */
6184 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6185 dest = &XEXP (*dest, 1);
6186 /* Skip over pieces that overlap it. */
6187 diff = bitpos - piece_bitpos + bitsize;
6188 if (!copy)
6189 src = dest;
6190 while (diff > 0 && *src)
6191 {
6192 rtx piece = *src;
6193 diff -= decl_piece_bitsize (piece);
6194 if (copy)
6195 src = &XEXP (piece, 1);
6196 else
6197 {
6198 *src = XEXP (piece, 1);
6199 free_EXPR_LIST_node (piece);
6200 }
6201 }
6202 /* Add padding if needed. */
6203 if (diff < 0 && *src)
6204 {
6205 if (!copy)
6206 dest = src;
6207 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6208 dest = &XEXP (*dest, 1);
6209 }
6210 if (!copy)
6211 return;
6212 /* Finally copy all nodes following it. */
6213 while (*src)
6214 {
6215 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6216 decl_piece_bitsize (*src), NULL_RTX);
6217 dest = &XEXP (*dest, 1);
6218 src = &XEXP (*src, 1);
6219 }
6220 }
6221
6222 /* Add a variable location node to the linked list for DECL. */
6223
6224 static struct var_loc_node *
6225 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6226 {
6227 unsigned int decl_id;
6228 var_loc_list *temp;
6229 struct var_loc_node *loc = NULL;
6230 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6231
6232 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6233 {
6234 tree realdecl = DECL_DEBUG_EXPR (decl);
6235 if (handled_component_p (realdecl)
6236 || (TREE_CODE (realdecl) == MEM_REF
6237 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6238 {
6239 bool reverse;
6240 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6241 &bitsize, &reverse);
6242 if (!innerdecl
6243 || !DECL_P (innerdecl)
6244 || DECL_IGNORED_P (innerdecl)
6245 || TREE_STATIC (innerdecl)
6246 || bitsize == 0
6247 || bitpos + bitsize > 256)
6248 return NULL;
6249 decl = innerdecl;
6250 }
6251 }
6252
6253 decl_id = DECL_UID (decl);
6254 var_loc_list **slot
6255 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6256 if (*slot == NULL)
6257 {
6258 temp = ggc_cleared_alloc<var_loc_list> ();
6259 temp->decl_id = decl_id;
6260 *slot = temp;
6261 }
6262 else
6263 temp = *slot;
6264
6265 /* For PARM_DECLs try to keep around the original incoming value,
6266 even if that means we'll emit a zero-range .debug_loc entry. */
6267 if (temp->last
6268 && temp->first == temp->last
6269 && TREE_CODE (decl) == PARM_DECL
6270 && NOTE_P (temp->first->loc)
6271 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6272 && DECL_INCOMING_RTL (decl)
6273 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6274 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6275 == GET_CODE (DECL_INCOMING_RTL (decl))
6276 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6277 && (bitsize != -1
6278 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6279 NOTE_VAR_LOCATION_LOC (loc_note))
6280 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6281 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6282 {
6283 loc = ggc_cleared_alloc<var_loc_node> ();
6284 temp->first->next = loc;
6285 temp->last = loc;
6286 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6287 }
6288 else if (temp->last)
6289 {
6290 struct var_loc_node *last = temp->last, *unused = NULL;
6291 rtx *piece_loc = NULL, last_loc_note;
6292 HOST_WIDE_INT piece_bitpos = 0;
6293 if (last->next)
6294 {
6295 last = last->next;
6296 gcc_assert (last->next == NULL);
6297 }
6298 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6299 {
6300 piece_loc = &last->loc;
6301 do
6302 {
6303 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6304 if (piece_bitpos + cur_bitsize > bitpos)
6305 break;
6306 piece_bitpos += cur_bitsize;
6307 piece_loc = &XEXP (*piece_loc, 1);
6308 }
6309 while (*piece_loc);
6310 }
6311 /* TEMP->LAST here is either pointer to the last but one or
6312 last element in the chained list, LAST is pointer to the
6313 last element. */
6314 if (label && strcmp (last->label, label) == 0 && last->view == view)
6315 {
6316 /* For SRA optimized variables if there weren't any real
6317 insns since last note, just modify the last node. */
6318 if (piece_loc != NULL)
6319 {
6320 adjust_piece_list (piece_loc, NULL, NULL,
6321 bitpos, piece_bitpos, bitsize, loc_note);
6322 return NULL;
6323 }
6324 /* If the last note doesn't cover any instructions, remove it. */
6325 if (temp->last != last)
6326 {
6327 temp->last->next = NULL;
6328 unused = last;
6329 last = temp->last;
6330 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6331 }
6332 else
6333 {
6334 gcc_assert (temp->first == temp->last
6335 || (temp->first->next == temp->last
6336 && TREE_CODE (decl) == PARM_DECL));
6337 memset (temp->last, '\0', sizeof (*temp->last));
6338 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6339 return temp->last;
6340 }
6341 }
6342 if (bitsize == -1 && NOTE_P (last->loc))
6343 last_loc_note = last->loc;
6344 else if (piece_loc != NULL
6345 && *piece_loc != NULL_RTX
6346 && piece_bitpos == bitpos
6347 && decl_piece_bitsize (*piece_loc) == bitsize)
6348 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6349 else
6350 last_loc_note = NULL_RTX;
6351 /* If the current location is the same as the end of the list,
6352 and either both or neither of the locations is uninitialized,
6353 we have nothing to do. */
6354 if (last_loc_note == NULL_RTX
6355 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6356 NOTE_VAR_LOCATION_LOC (loc_note)))
6357 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6358 != NOTE_VAR_LOCATION_STATUS (loc_note))
6359 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6360 == VAR_INIT_STATUS_UNINITIALIZED)
6361 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6362 == VAR_INIT_STATUS_UNINITIALIZED))))
6363 {
6364 /* Add LOC to the end of list and update LAST. If the last
6365 element of the list has been removed above, reuse its
6366 memory for the new node, otherwise allocate a new one. */
6367 if (unused)
6368 {
6369 loc = unused;
6370 memset (loc, '\0', sizeof (*loc));
6371 }
6372 else
6373 loc = ggc_cleared_alloc<var_loc_node> ();
6374 if (bitsize == -1 || piece_loc == NULL)
6375 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6376 else
6377 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6378 bitpos, piece_bitpos, bitsize, loc_note);
6379 last->next = loc;
6380 /* Ensure TEMP->LAST will point either to the new last but one
6381 element of the chain, or to the last element in it. */
6382 if (last != temp->last)
6383 temp->last = last;
6384 }
6385 else if (unused)
6386 ggc_free (unused);
6387 }
6388 else
6389 {
6390 loc = ggc_cleared_alloc<var_loc_node> ();
6391 temp->first = loc;
6392 temp->last = loc;
6393 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6394 }
6395 return loc;
6396 }
6397 \f
6398 /* Keep track of the number of spaces used to indent the
6399 output of the debugging routines that print the structure of
6400 the DIE internal representation. */
6401 static int print_indent;
6402
6403 /* Indent the line the number of spaces given by print_indent. */
6404
6405 static inline void
6406 print_spaces (FILE *outfile)
6407 {
6408 fprintf (outfile, "%*s", print_indent, "");
6409 }
6410
6411 /* Print a type signature in hex. */
6412
6413 static inline void
6414 print_signature (FILE *outfile, char *sig)
6415 {
6416 int i;
6417
6418 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6419 fprintf (outfile, "%02x", sig[i] & 0xff);
6420 }
6421
6422 static inline void
6423 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6424 {
6425 if (discr_value->pos)
6426 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6427 else
6428 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6429 }
6430
6431 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6432
6433 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6434 RECURSE, output location descriptor operations. */
6435
6436 static void
6437 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6438 {
6439 switch (val->val_class)
6440 {
6441 case dw_val_class_addr:
6442 fprintf (outfile, "address");
6443 break;
6444 case dw_val_class_offset:
6445 fprintf (outfile, "offset");
6446 break;
6447 case dw_val_class_loc:
6448 fprintf (outfile, "location descriptor");
6449 if (val->v.val_loc == NULL)
6450 fprintf (outfile, " -> <null>\n");
6451 else if (recurse)
6452 {
6453 fprintf (outfile, ":\n");
6454 print_indent += 4;
6455 print_loc_descr (val->v.val_loc, outfile);
6456 print_indent -= 4;
6457 }
6458 else
6459 {
6460 if (flag_dump_noaddr || flag_dump_unnumbered)
6461 fprintf (outfile, " #\n");
6462 else
6463 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6464 }
6465 break;
6466 case dw_val_class_loc_list:
6467 fprintf (outfile, "location list -> label:%s",
6468 val->v.val_loc_list->ll_symbol);
6469 break;
6470 case dw_val_class_view_list:
6471 val = view_list_to_loc_list_val_node (val);
6472 fprintf (outfile, "location list with views -> labels:%s and %s",
6473 val->v.val_loc_list->ll_symbol,
6474 val->v.val_loc_list->vl_symbol);
6475 break;
6476 case dw_val_class_range_list:
6477 fprintf (outfile, "range list");
6478 break;
6479 case dw_val_class_const:
6480 case dw_val_class_const_implicit:
6481 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6482 break;
6483 case dw_val_class_unsigned_const:
6484 case dw_val_class_unsigned_const_implicit:
6485 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6486 break;
6487 case dw_val_class_const_double:
6488 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6489 HOST_WIDE_INT_PRINT_UNSIGNED")",
6490 val->v.val_double.high,
6491 val->v.val_double.low);
6492 break;
6493 case dw_val_class_wide_int:
6494 {
6495 int i = val->v.val_wide->get_len ();
6496 fprintf (outfile, "constant (");
6497 gcc_assert (i > 0);
6498 if (val->v.val_wide->elt (i - 1) == 0)
6499 fprintf (outfile, "0x");
6500 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6501 val->v.val_wide->elt (--i));
6502 while (--i >= 0)
6503 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6504 val->v.val_wide->elt (i));
6505 fprintf (outfile, ")");
6506 break;
6507 }
6508 case dw_val_class_vec:
6509 fprintf (outfile, "floating-point or vector constant");
6510 break;
6511 case dw_val_class_flag:
6512 fprintf (outfile, "%u", val->v.val_flag);
6513 break;
6514 case dw_val_class_die_ref:
6515 if (val->v.val_die_ref.die != NULL)
6516 {
6517 dw_die_ref die = val->v.val_die_ref.die;
6518
6519 if (die->comdat_type_p)
6520 {
6521 fprintf (outfile, "die -> signature: ");
6522 print_signature (outfile,
6523 die->die_id.die_type_node->signature);
6524 }
6525 else if (die->die_id.die_symbol)
6526 {
6527 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6528 if (die->with_offset)
6529 fprintf (outfile, " + %ld", die->die_offset);
6530 }
6531 else
6532 fprintf (outfile, "die -> %ld", die->die_offset);
6533 if (flag_dump_noaddr || flag_dump_unnumbered)
6534 fprintf (outfile, " #");
6535 else
6536 fprintf (outfile, " (%p)", (void *) die);
6537 }
6538 else
6539 fprintf (outfile, "die -> <null>");
6540 break;
6541 case dw_val_class_vms_delta:
6542 fprintf (outfile, "delta: @slotcount(%s-%s)",
6543 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6544 break;
6545 case dw_val_class_symview:
6546 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6547 break;
6548 case dw_val_class_lbl_id:
6549 case dw_val_class_lineptr:
6550 case dw_val_class_macptr:
6551 case dw_val_class_loclistsptr:
6552 case dw_val_class_high_pc:
6553 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6554 break;
6555 case dw_val_class_str:
6556 if (val->v.val_str->str != NULL)
6557 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6558 else
6559 fprintf (outfile, "<null>");
6560 break;
6561 case dw_val_class_file:
6562 case dw_val_class_file_implicit:
6563 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6564 val->v.val_file->emitted_number);
6565 break;
6566 case dw_val_class_data8:
6567 {
6568 int i;
6569
6570 for (i = 0; i < 8; i++)
6571 fprintf (outfile, "%02x", val->v.val_data8[i]);
6572 break;
6573 }
6574 case dw_val_class_discr_value:
6575 print_discr_value (outfile, &val->v.val_discr_value);
6576 break;
6577 case dw_val_class_discr_list:
6578 for (dw_discr_list_ref node = val->v.val_discr_list;
6579 node != NULL;
6580 node = node->dw_discr_next)
6581 {
6582 if (node->dw_discr_range)
6583 {
6584 fprintf (outfile, " .. ");
6585 print_discr_value (outfile, &node->dw_discr_lower_bound);
6586 print_discr_value (outfile, &node->dw_discr_upper_bound);
6587 }
6588 else
6589 print_discr_value (outfile, &node->dw_discr_lower_bound);
6590
6591 if (node->dw_discr_next != NULL)
6592 fprintf (outfile, " | ");
6593 }
6594 default:
6595 break;
6596 }
6597 }
6598
6599 /* Likewise, for a DIE attribute. */
6600
6601 static void
6602 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6603 {
6604 print_dw_val (&a->dw_attr_val, recurse, outfile);
6605 }
6606
6607
6608 /* Print the list of operands in the LOC location description to OUTFILE. This
6609 routine is a debugging aid only. */
6610
6611 static void
6612 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6613 {
6614 dw_loc_descr_ref l = loc;
6615
6616 if (loc == NULL)
6617 {
6618 print_spaces (outfile);
6619 fprintf (outfile, "<null>\n");
6620 return;
6621 }
6622
6623 for (l = loc; l != NULL; l = l->dw_loc_next)
6624 {
6625 print_spaces (outfile);
6626 if (flag_dump_noaddr || flag_dump_unnumbered)
6627 fprintf (outfile, "#");
6628 else
6629 fprintf (outfile, "(%p)", (void *) l);
6630 fprintf (outfile, " %s",
6631 dwarf_stack_op_name (l->dw_loc_opc));
6632 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6633 {
6634 fprintf (outfile, " ");
6635 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6636 }
6637 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6638 {
6639 fprintf (outfile, ", ");
6640 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6641 }
6642 fprintf (outfile, "\n");
6643 }
6644 }
6645
6646 /* Print the information associated with a given DIE, and its children.
6647 This routine is a debugging aid only. */
6648
6649 static void
6650 print_die (dw_die_ref die, FILE *outfile)
6651 {
6652 dw_attr_node *a;
6653 dw_die_ref c;
6654 unsigned ix;
6655
6656 print_spaces (outfile);
6657 fprintf (outfile, "DIE %4ld: %s ",
6658 die->die_offset, dwarf_tag_name (die->die_tag));
6659 if (flag_dump_noaddr || flag_dump_unnumbered)
6660 fprintf (outfile, "#\n");
6661 else
6662 fprintf (outfile, "(%p)\n", (void*) die);
6663 print_spaces (outfile);
6664 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6665 fprintf (outfile, " offset: %ld", die->die_offset);
6666 fprintf (outfile, " mark: %d\n", die->die_mark);
6667
6668 if (die->comdat_type_p)
6669 {
6670 print_spaces (outfile);
6671 fprintf (outfile, " signature: ");
6672 print_signature (outfile, die->die_id.die_type_node->signature);
6673 fprintf (outfile, "\n");
6674 }
6675
6676 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6677 {
6678 print_spaces (outfile);
6679 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6680
6681 print_attribute (a, true, outfile);
6682 fprintf (outfile, "\n");
6683 }
6684
6685 if (die->die_child != NULL)
6686 {
6687 print_indent += 4;
6688 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6689 print_indent -= 4;
6690 }
6691 if (print_indent == 0)
6692 fprintf (outfile, "\n");
6693 }
6694
6695 /* Print the list of operations in the LOC location description. */
6696
6697 DEBUG_FUNCTION void
6698 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6699 {
6700 print_loc_descr (loc, stderr);
6701 }
6702
6703 /* Print the information collected for a given DIE. */
6704
6705 DEBUG_FUNCTION void
6706 debug_dwarf_die (dw_die_ref die)
6707 {
6708 print_die (die, stderr);
6709 }
6710
6711 DEBUG_FUNCTION void
6712 debug (die_struct &ref)
6713 {
6714 print_die (&ref, stderr);
6715 }
6716
6717 DEBUG_FUNCTION void
6718 debug (die_struct *ptr)
6719 {
6720 if (ptr)
6721 debug (*ptr);
6722 else
6723 fprintf (stderr, "<nil>\n");
6724 }
6725
6726
6727 /* Print all DWARF information collected for the compilation unit.
6728 This routine is a debugging aid only. */
6729
6730 DEBUG_FUNCTION void
6731 debug_dwarf (void)
6732 {
6733 print_indent = 0;
6734 print_die (comp_unit_die (), stderr);
6735 }
6736
6737 /* Verify the DIE tree structure. */
6738
6739 DEBUG_FUNCTION void
6740 verify_die (dw_die_ref die)
6741 {
6742 gcc_assert (!die->die_mark);
6743 if (die->die_parent == NULL
6744 && die->die_sib == NULL)
6745 return;
6746 /* Verify the die_sib list is cyclic. */
6747 dw_die_ref x = die;
6748 do
6749 {
6750 x->die_mark = 1;
6751 x = x->die_sib;
6752 }
6753 while (x && !x->die_mark);
6754 gcc_assert (x == die);
6755 x = die;
6756 do
6757 {
6758 /* Verify all dies have the same parent. */
6759 gcc_assert (x->die_parent == die->die_parent);
6760 if (x->die_child)
6761 {
6762 /* Verify the child has the proper parent and recurse. */
6763 gcc_assert (x->die_child->die_parent == x);
6764 verify_die (x->die_child);
6765 }
6766 x->die_mark = 0;
6767 x = x->die_sib;
6768 }
6769 while (x && x->die_mark);
6770 }
6771
6772 /* Sanity checks on DIEs. */
6773
6774 static void
6775 check_die (dw_die_ref die)
6776 {
6777 unsigned ix;
6778 dw_attr_node *a;
6779 bool inline_found = false;
6780 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6781 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6782 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6783 {
6784 switch (a->dw_attr)
6785 {
6786 case DW_AT_inline:
6787 if (a->dw_attr_val.v.val_unsigned)
6788 inline_found = true;
6789 break;
6790 case DW_AT_location:
6791 ++n_location;
6792 break;
6793 case DW_AT_low_pc:
6794 ++n_low_pc;
6795 break;
6796 case DW_AT_high_pc:
6797 ++n_high_pc;
6798 break;
6799 case DW_AT_artificial:
6800 ++n_artificial;
6801 break;
6802 case DW_AT_decl_column:
6803 ++n_decl_column;
6804 break;
6805 case DW_AT_decl_line:
6806 ++n_decl_line;
6807 break;
6808 case DW_AT_decl_file:
6809 ++n_decl_file;
6810 break;
6811 default:
6812 break;
6813 }
6814 }
6815 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6816 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6817 {
6818 fprintf (stderr, "Duplicate attributes in DIE:\n");
6819 debug_dwarf_die (die);
6820 gcc_unreachable ();
6821 }
6822 if (inline_found)
6823 {
6824 /* A debugging information entry that is a member of an abstract
6825 instance tree [that has DW_AT_inline] should not contain any
6826 attributes which describe aspects of the subroutine which vary
6827 between distinct inlined expansions or distinct out-of-line
6828 expansions. */
6829 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6830 gcc_assert (a->dw_attr != DW_AT_low_pc
6831 && a->dw_attr != DW_AT_high_pc
6832 && a->dw_attr != DW_AT_location
6833 && a->dw_attr != DW_AT_frame_base
6834 && a->dw_attr != DW_AT_call_all_calls
6835 && a->dw_attr != DW_AT_GNU_all_call_sites);
6836 }
6837 }
6838 \f
6839 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6840 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6841 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6842
6843 /* Calculate the checksum of a location expression. */
6844
6845 static inline void
6846 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6847 {
6848 int tem;
6849 inchash::hash hstate;
6850 hashval_t hash;
6851
6852 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6853 CHECKSUM (tem);
6854 hash_loc_operands (loc, hstate);
6855 hash = hstate.end();
6856 CHECKSUM (hash);
6857 }
6858
6859 /* Calculate the checksum of an attribute. */
6860
6861 static void
6862 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6863 {
6864 dw_loc_descr_ref loc;
6865 rtx r;
6866
6867 CHECKSUM (at->dw_attr);
6868
6869 /* We don't care that this was compiled with a different compiler
6870 snapshot; if the output is the same, that's what matters. */
6871 if (at->dw_attr == DW_AT_producer)
6872 return;
6873
6874 switch (AT_class (at))
6875 {
6876 case dw_val_class_const:
6877 case dw_val_class_const_implicit:
6878 CHECKSUM (at->dw_attr_val.v.val_int);
6879 break;
6880 case dw_val_class_unsigned_const:
6881 case dw_val_class_unsigned_const_implicit:
6882 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6883 break;
6884 case dw_val_class_const_double:
6885 CHECKSUM (at->dw_attr_val.v.val_double);
6886 break;
6887 case dw_val_class_wide_int:
6888 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6889 get_full_len (*at->dw_attr_val.v.val_wide)
6890 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6891 break;
6892 case dw_val_class_vec:
6893 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6894 (at->dw_attr_val.v.val_vec.length
6895 * at->dw_attr_val.v.val_vec.elt_size));
6896 break;
6897 case dw_val_class_flag:
6898 CHECKSUM (at->dw_attr_val.v.val_flag);
6899 break;
6900 case dw_val_class_str:
6901 CHECKSUM_STRING (AT_string (at));
6902 break;
6903
6904 case dw_val_class_addr:
6905 r = AT_addr (at);
6906 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6907 CHECKSUM_STRING (XSTR (r, 0));
6908 break;
6909
6910 case dw_val_class_offset:
6911 CHECKSUM (at->dw_attr_val.v.val_offset);
6912 break;
6913
6914 case dw_val_class_loc:
6915 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6916 loc_checksum (loc, ctx);
6917 break;
6918
6919 case dw_val_class_die_ref:
6920 die_checksum (AT_ref (at), ctx, mark);
6921 break;
6922
6923 case dw_val_class_fde_ref:
6924 case dw_val_class_vms_delta:
6925 case dw_val_class_symview:
6926 case dw_val_class_lbl_id:
6927 case dw_val_class_lineptr:
6928 case dw_val_class_macptr:
6929 case dw_val_class_loclistsptr:
6930 case dw_val_class_high_pc:
6931 break;
6932
6933 case dw_val_class_file:
6934 case dw_val_class_file_implicit:
6935 CHECKSUM_STRING (AT_file (at)->filename);
6936 break;
6937
6938 case dw_val_class_data8:
6939 CHECKSUM (at->dw_attr_val.v.val_data8);
6940 break;
6941
6942 default:
6943 break;
6944 }
6945 }
6946
6947 /* Calculate the checksum of a DIE. */
6948
6949 static void
6950 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6951 {
6952 dw_die_ref c;
6953 dw_attr_node *a;
6954 unsigned ix;
6955
6956 /* To avoid infinite recursion. */
6957 if (die->die_mark)
6958 {
6959 CHECKSUM (die->die_mark);
6960 return;
6961 }
6962 die->die_mark = ++(*mark);
6963
6964 CHECKSUM (die->die_tag);
6965
6966 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6967 attr_checksum (a, ctx, mark);
6968
6969 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6970 }
6971
6972 #undef CHECKSUM
6973 #undef CHECKSUM_BLOCK
6974 #undef CHECKSUM_STRING
6975
6976 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6977 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6978 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6979 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6980 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6981 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6982 #define CHECKSUM_ATTR(FOO) \
6983 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6984
6985 /* Calculate the checksum of a number in signed LEB128 format. */
6986
6987 static void
6988 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6989 {
6990 unsigned char byte;
6991 bool more;
6992
6993 while (1)
6994 {
6995 byte = (value & 0x7f);
6996 value >>= 7;
6997 more = !((value == 0 && (byte & 0x40) == 0)
6998 || (value == -1 && (byte & 0x40) != 0));
6999 if (more)
7000 byte |= 0x80;
7001 CHECKSUM (byte);
7002 if (!more)
7003 break;
7004 }
7005 }
7006
7007 /* Calculate the checksum of a number in unsigned LEB128 format. */
7008
7009 static void
7010 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7011 {
7012 while (1)
7013 {
7014 unsigned char byte = (value & 0x7f);
7015 value >>= 7;
7016 if (value != 0)
7017 /* More bytes to follow. */
7018 byte |= 0x80;
7019 CHECKSUM (byte);
7020 if (value == 0)
7021 break;
7022 }
7023 }
7024
7025 /* Checksum the context of the DIE. This adds the names of any
7026 surrounding namespaces or structures to the checksum. */
7027
7028 static void
7029 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7030 {
7031 const char *name;
7032 dw_die_ref spec;
7033 int tag = die->die_tag;
7034
7035 if (tag != DW_TAG_namespace
7036 && tag != DW_TAG_structure_type
7037 && tag != DW_TAG_class_type)
7038 return;
7039
7040 name = get_AT_string (die, DW_AT_name);
7041
7042 spec = get_AT_ref (die, DW_AT_specification);
7043 if (spec != NULL)
7044 die = spec;
7045
7046 if (die->die_parent != NULL)
7047 checksum_die_context (die->die_parent, ctx);
7048
7049 CHECKSUM_ULEB128 ('C');
7050 CHECKSUM_ULEB128 (tag);
7051 if (name != NULL)
7052 CHECKSUM_STRING (name);
7053 }
7054
7055 /* Calculate the checksum of a location expression. */
7056
7057 static inline void
7058 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7059 {
7060 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7061 were emitted as a DW_FORM_sdata instead of a location expression. */
7062 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7063 {
7064 CHECKSUM_ULEB128 (DW_FORM_sdata);
7065 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7066 return;
7067 }
7068
7069 /* Otherwise, just checksum the raw location expression. */
7070 while (loc != NULL)
7071 {
7072 inchash::hash hstate;
7073 hashval_t hash;
7074
7075 CHECKSUM_ULEB128 (loc->dtprel);
7076 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7077 hash_loc_operands (loc, hstate);
7078 hash = hstate.end ();
7079 CHECKSUM (hash);
7080 loc = loc->dw_loc_next;
7081 }
7082 }
7083
7084 /* Calculate the checksum of an attribute. */
7085
7086 static void
7087 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7088 struct md5_ctx *ctx, int *mark)
7089 {
7090 dw_loc_descr_ref loc;
7091 rtx r;
7092
7093 if (AT_class (at) == dw_val_class_die_ref)
7094 {
7095 dw_die_ref target_die = AT_ref (at);
7096
7097 /* For pointer and reference types, we checksum only the (qualified)
7098 name of the target type (if there is a name). For friend entries,
7099 we checksum only the (qualified) name of the target type or function.
7100 This allows the checksum to remain the same whether the target type
7101 is complete or not. */
7102 if ((at->dw_attr == DW_AT_type
7103 && (tag == DW_TAG_pointer_type
7104 || tag == DW_TAG_reference_type
7105 || tag == DW_TAG_rvalue_reference_type
7106 || tag == DW_TAG_ptr_to_member_type))
7107 || (at->dw_attr == DW_AT_friend
7108 && tag == DW_TAG_friend))
7109 {
7110 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7111
7112 if (name_attr != NULL)
7113 {
7114 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7115
7116 if (decl == NULL)
7117 decl = target_die;
7118 CHECKSUM_ULEB128 ('N');
7119 CHECKSUM_ULEB128 (at->dw_attr);
7120 if (decl->die_parent != NULL)
7121 checksum_die_context (decl->die_parent, ctx);
7122 CHECKSUM_ULEB128 ('E');
7123 CHECKSUM_STRING (AT_string (name_attr));
7124 return;
7125 }
7126 }
7127
7128 /* For all other references to another DIE, we check to see if the
7129 target DIE has already been visited. If it has, we emit a
7130 backward reference; if not, we descend recursively. */
7131 if (target_die->die_mark > 0)
7132 {
7133 CHECKSUM_ULEB128 ('R');
7134 CHECKSUM_ULEB128 (at->dw_attr);
7135 CHECKSUM_ULEB128 (target_die->die_mark);
7136 }
7137 else
7138 {
7139 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7140
7141 if (decl == NULL)
7142 decl = target_die;
7143 target_die->die_mark = ++(*mark);
7144 CHECKSUM_ULEB128 ('T');
7145 CHECKSUM_ULEB128 (at->dw_attr);
7146 if (decl->die_parent != NULL)
7147 checksum_die_context (decl->die_parent, ctx);
7148 die_checksum_ordered (target_die, ctx, mark);
7149 }
7150 return;
7151 }
7152
7153 CHECKSUM_ULEB128 ('A');
7154 CHECKSUM_ULEB128 (at->dw_attr);
7155
7156 switch (AT_class (at))
7157 {
7158 case dw_val_class_const:
7159 case dw_val_class_const_implicit:
7160 CHECKSUM_ULEB128 (DW_FORM_sdata);
7161 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7162 break;
7163
7164 case dw_val_class_unsigned_const:
7165 case dw_val_class_unsigned_const_implicit:
7166 CHECKSUM_ULEB128 (DW_FORM_sdata);
7167 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7168 break;
7169
7170 case dw_val_class_const_double:
7171 CHECKSUM_ULEB128 (DW_FORM_block);
7172 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7173 CHECKSUM (at->dw_attr_val.v.val_double);
7174 break;
7175
7176 case dw_val_class_wide_int:
7177 CHECKSUM_ULEB128 (DW_FORM_block);
7178 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7179 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7180 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7181 get_full_len (*at->dw_attr_val.v.val_wide)
7182 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7183 break;
7184
7185 case dw_val_class_vec:
7186 CHECKSUM_ULEB128 (DW_FORM_block);
7187 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7188 * at->dw_attr_val.v.val_vec.elt_size);
7189 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7190 (at->dw_attr_val.v.val_vec.length
7191 * at->dw_attr_val.v.val_vec.elt_size));
7192 break;
7193
7194 case dw_val_class_flag:
7195 CHECKSUM_ULEB128 (DW_FORM_flag);
7196 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7197 break;
7198
7199 case dw_val_class_str:
7200 CHECKSUM_ULEB128 (DW_FORM_string);
7201 CHECKSUM_STRING (AT_string (at));
7202 break;
7203
7204 case dw_val_class_addr:
7205 r = AT_addr (at);
7206 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7207 CHECKSUM_ULEB128 (DW_FORM_string);
7208 CHECKSUM_STRING (XSTR (r, 0));
7209 break;
7210
7211 case dw_val_class_offset:
7212 CHECKSUM_ULEB128 (DW_FORM_sdata);
7213 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7214 break;
7215
7216 case dw_val_class_loc:
7217 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7218 loc_checksum_ordered (loc, ctx);
7219 break;
7220
7221 case dw_val_class_fde_ref:
7222 case dw_val_class_symview:
7223 case dw_val_class_lbl_id:
7224 case dw_val_class_lineptr:
7225 case dw_val_class_macptr:
7226 case dw_val_class_loclistsptr:
7227 case dw_val_class_high_pc:
7228 break;
7229
7230 case dw_val_class_file:
7231 case dw_val_class_file_implicit:
7232 CHECKSUM_ULEB128 (DW_FORM_string);
7233 CHECKSUM_STRING (AT_file (at)->filename);
7234 break;
7235
7236 case dw_val_class_data8:
7237 CHECKSUM (at->dw_attr_val.v.val_data8);
7238 break;
7239
7240 default:
7241 break;
7242 }
7243 }
7244
7245 struct checksum_attributes
7246 {
7247 dw_attr_node *at_name;
7248 dw_attr_node *at_type;
7249 dw_attr_node *at_friend;
7250 dw_attr_node *at_accessibility;
7251 dw_attr_node *at_address_class;
7252 dw_attr_node *at_alignment;
7253 dw_attr_node *at_allocated;
7254 dw_attr_node *at_artificial;
7255 dw_attr_node *at_associated;
7256 dw_attr_node *at_binary_scale;
7257 dw_attr_node *at_bit_offset;
7258 dw_attr_node *at_bit_size;
7259 dw_attr_node *at_bit_stride;
7260 dw_attr_node *at_byte_size;
7261 dw_attr_node *at_byte_stride;
7262 dw_attr_node *at_const_value;
7263 dw_attr_node *at_containing_type;
7264 dw_attr_node *at_count;
7265 dw_attr_node *at_data_location;
7266 dw_attr_node *at_data_member_location;
7267 dw_attr_node *at_decimal_scale;
7268 dw_attr_node *at_decimal_sign;
7269 dw_attr_node *at_default_value;
7270 dw_attr_node *at_digit_count;
7271 dw_attr_node *at_discr;
7272 dw_attr_node *at_discr_list;
7273 dw_attr_node *at_discr_value;
7274 dw_attr_node *at_encoding;
7275 dw_attr_node *at_endianity;
7276 dw_attr_node *at_explicit;
7277 dw_attr_node *at_is_optional;
7278 dw_attr_node *at_location;
7279 dw_attr_node *at_lower_bound;
7280 dw_attr_node *at_mutable;
7281 dw_attr_node *at_ordering;
7282 dw_attr_node *at_picture_string;
7283 dw_attr_node *at_prototyped;
7284 dw_attr_node *at_small;
7285 dw_attr_node *at_segment;
7286 dw_attr_node *at_string_length;
7287 dw_attr_node *at_string_length_bit_size;
7288 dw_attr_node *at_string_length_byte_size;
7289 dw_attr_node *at_threads_scaled;
7290 dw_attr_node *at_upper_bound;
7291 dw_attr_node *at_use_location;
7292 dw_attr_node *at_use_UTF8;
7293 dw_attr_node *at_variable_parameter;
7294 dw_attr_node *at_virtuality;
7295 dw_attr_node *at_visibility;
7296 dw_attr_node *at_vtable_elem_location;
7297 };
7298
7299 /* Collect the attributes that we will want to use for the checksum. */
7300
7301 static void
7302 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7303 {
7304 dw_attr_node *a;
7305 unsigned ix;
7306
7307 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7308 {
7309 switch (a->dw_attr)
7310 {
7311 case DW_AT_name:
7312 attrs->at_name = a;
7313 break;
7314 case DW_AT_type:
7315 attrs->at_type = a;
7316 break;
7317 case DW_AT_friend:
7318 attrs->at_friend = a;
7319 break;
7320 case DW_AT_accessibility:
7321 attrs->at_accessibility = a;
7322 break;
7323 case DW_AT_address_class:
7324 attrs->at_address_class = a;
7325 break;
7326 case DW_AT_alignment:
7327 attrs->at_alignment = a;
7328 break;
7329 case DW_AT_allocated:
7330 attrs->at_allocated = a;
7331 break;
7332 case DW_AT_artificial:
7333 attrs->at_artificial = a;
7334 break;
7335 case DW_AT_associated:
7336 attrs->at_associated = a;
7337 break;
7338 case DW_AT_binary_scale:
7339 attrs->at_binary_scale = a;
7340 break;
7341 case DW_AT_bit_offset:
7342 attrs->at_bit_offset = a;
7343 break;
7344 case DW_AT_bit_size:
7345 attrs->at_bit_size = a;
7346 break;
7347 case DW_AT_bit_stride:
7348 attrs->at_bit_stride = a;
7349 break;
7350 case DW_AT_byte_size:
7351 attrs->at_byte_size = a;
7352 break;
7353 case DW_AT_byte_stride:
7354 attrs->at_byte_stride = a;
7355 break;
7356 case DW_AT_const_value:
7357 attrs->at_const_value = a;
7358 break;
7359 case DW_AT_containing_type:
7360 attrs->at_containing_type = a;
7361 break;
7362 case DW_AT_count:
7363 attrs->at_count = a;
7364 break;
7365 case DW_AT_data_location:
7366 attrs->at_data_location = a;
7367 break;
7368 case DW_AT_data_member_location:
7369 attrs->at_data_member_location = a;
7370 break;
7371 case DW_AT_decimal_scale:
7372 attrs->at_decimal_scale = a;
7373 break;
7374 case DW_AT_decimal_sign:
7375 attrs->at_decimal_sign = a;
7376 break;
7377 case DW_AT_default_value:
7378 attrs->at_default_value = a;
7379 break;
7380 case DW_AT_digit_count:
7381 attrs->at_digit_count = a;
7382 break;
7383 case DW_AT_discr:
7384 attrs->at_discr = a;
7385 break;
7386 case DW_AT_discr_list:
7387 attrs->at_discr_list = a;
7388 break;
7389 case DW_AT_discr_value:
7390 attrs->at_discr_value = a;
7391 break;
7392 case DW_AT_encoding:
7393 attrs->at_encoding = a;
7394 break;
7395 case DW_AT_endianity:
7396 attrs->at_endianity = a;
7397 break;
7398 case DW_AT_explicit:
7399 attrs->at_explicit = a;
7400 break;
7401 case DW_AT_is_optional:
7402 attrs->at_is_optional = a;
7403 break;
7404 case DW_AT_location:
7405 attrs->at_location = a;
7406 break;
7407 case DW_AT_lower_bound:
7408 attrs->at_lower_bound = a;
7409 break;
7410 case DW_AT_mutable:
7411 attrs->at_mutable = a;
7412 break;
7413 case DW_AT_ordering:
7414 attrs->at_ordering = a;
7415 break;
7416 case DW_AT_picture_string:
7417 attrs->at_picture_string = a;
7418 break;
7419 case DW_AT_prototyped:
7420 attrs->at_prototyped = a;
7421 break;
7422 case DW_AT_small:
7423 attrs->at_small = a;
7424 break;
7425 case DW_AT_segment:
7426 attrs->at_segment = a;
7427 break;
7428 case DW_AT_string_length:
7429 attrs->at_string_length = a;
7430 break;
7431 case DW_AT_string_length_bit_size:
7432 attrs->at_string_length_bit_size = a;
7433 break;
7434 case DW_AT_string_length_byte_size:
7435 attrs->at_string_length_byte_size = a;
7436 break;
7437 case DW_AT_threads_scaled:
7438 attrs->at_threads_scaled = a;
7439 break;
7440 case DW_AT_upper_bound:
7441 attrs->at_upper_bound = a;
7442 break;
7443 case DW_AT_use_location:
7444 attrs->at_use_location = a;
7445 break;
7446 case DW_AT_use_UTF8:
7447 attrs->at_use_UTF8 = a;
7448 break;
7449 case DW_AT_variable_parameter:
7450 attrs->at_variable_parameter = a;
7451 break;
7452 case DW_AT_virtuality:
7453 attrs->at_virtuality = a;
7454 break;
7455 case DW_AT_visibility:
7456 attrs->at_visibility = a;
7457 break;
7458 case DW_AT_vtable_elem_location:
7459 attrs->at_vtable_elem_location = a;
7460 break;
7461 default:
7462 break;
7463 }
7464 }
7465 }
7466
7467 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7468
7469 static void
7470 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7471 {
7472 dw_die_ref c;
7473 dw_die_ref decl;
7474 struct checksum_attributes attrs;
7475
7476 CHECKSUM_ULEB128 ('D');
7477 CHECKSUM_ULEB128 (die->die_tag);
7478
7479 memset (&attrs, 0, sizeof (attrs));
7480
7481 decl = get_AT_ref (die, DW_AT_specification);
7482 if (decl != NULL)
7483 collect_checksum_attributes (&attrs, decl);
7484 collect_checksum_attributes (&attrs, die);
7485
7486 CHECKSUM_ATTR (attrs.at_name);
7487 CHECKSUM_ATTR (attrs.at_accessibility);
7488 CHECKSUM_ATTR (attrs.at_address_class);
7489 CHECKSUM_ATTR (attrs.at_allocated);
7490 CHECKSUM_ATTR (attrs.at_artificial);
7491 CHECKSUM_ATTR (attrs.at_associated);
7492 CHECKSUM_ATTR (attrs.at_binary_scale);
7493 CHECKSUM_ATTR (attrs.at_bit_offset);
7494 CHECKSUM_ATTR (attrs.at_bit_size);
7495 CHECKSUM_ATTR (attrs.at_bit_stride);
7496 CHECKSUM_ATTR (attrs.at_byte_size);
7497 CHECKSUM_ATTR (attrs.at_byte_stride);
7498 CHECKSUM_ATTR (attrs.at_const_value);
7499 CHECKSUM_ATTR (attrs.at_containing_type);
7500 CHECKSUM_ATTR (attrs.at_count);
7501 CHECKSUM_ATTR (attrs.at_data_location);
7502 CHECKSUM_ATTR (attrs.at_data_member_location);
7503 CHECKSUM_ATTR (attrs.at_decimal_scale);
7504 CHECKSUM_ATTR (attrs.at_decimal_sign);
7505 CHECKSUM_ATTR (attrs.at_default_value);
7506 CHECKSUM_ATTR (attrs.at_digit_count);
7507 CHECKSUM_ATTR (attrs.at_discr);
7508 CHECKSUM_ATTR (attrs.at_discr_list);
7509 CHECKSUM_ATTR (attrs.at_discr_value);
7510 CHECKSUM_ATTR (attrs.at_encoding);
7511 CHECKSUM_ATTR (attrs.at_endianity);
7512 CHECKSUM_ATTR (attrs.at_explicit);
7513 CHECKSUM_ATTR (attrs.at_is_optional);
7514 CHECKSUM_ATTR (attrs.at_location);
7515 CHECKSUM_ATTR (attrs.at_lower_bound);
7516 CHECKSUM_ATTR (attrs.at_mutable);
7517 CHECKSUM_ATTR (attrs.at_ordering);
7518 CHECKSUM_ATTR (attrs.at_picture_string);
7519 CHECKSUM_ATTR (attrs.at_prototyped);
7520 CHECKSUM_ATTR (attrs.at_small);
7521 CHECKSUM_ATTR (attrs.at_segment);
7522 CHECKSUM_ATTR (attrs.at_string_length);
7523 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7524 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7525 CHECKSUM_ATTR (attrs.at_threads_scaled);
7526 CHECKSUM_ATTR (attrs.at_upper_bound);
7527 CHECKSUM_ATTR (attrs.at_use_location);
7528 CHECKSUM_ATTR (attrs.at_use_UTF8);
7529 CHECKSUM_ATTR (attrs.at_variable_parameter);
7530 CHECKSUM_ATTR (attrs.at_virtuality);
7531 CHECKSUM_ATTR (attrs.at_visibility);
7532 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7533 CHECKSUM_ATTR (attrs.at_type);
7534 CHECKSUM_ATTR (attrs.at_friend);
7535 CHECKSUM_ATTR (attrs.at_alignment);
7536
7537 /* Checksum the child DIEs. */
7538 c = die->die_child;
7539 if (c) do {
7540 dw_attr_node *name_attr;
7541
7542 c = c->die_sib;
7543 name_attr = get_AT (c, DW_AT_name);
7544 if (is_template_instantiation (c))
7545 {
7546 /* Ignore instantiations of member type and function templates. */
7547 }
7548 else if (name_attr != NULL
7549 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7550 {
7551 /* Use a shallow checksum for named nested types and member
7552 functions. */
7553 CHECKSUM_ULEB128 ('S');
7554 CHECKSUM_ULEB128 (c->die_tag);
7555 CHECKSUM_STRING (AT_string (name_attr));
7556 }
7557 else
7558 {
7559 /* Use a deep checksum for other children. */
7560 /* Mark this DIE so it gets processed when unmarking. */
7561 if (c->die_mark == 0)
7562 c->die_mark = -1;
7563 die_checksum_ordered (c, ctx, mark);
7564 }
7565 } while (c != die->die_child);
7566
7567 CHECKSUM_ULEB128 (0);
7568 }
7569
7570 /* Add a type name and tag to a hash. */
7571 static void
7572 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7573 {
7574 CHECKSUM_ULEB128 (tag);
7575 CHECKSUM_STRING (name);
7576 }
7577
7578 #undef CHECKSUM
7579 #undef CHECKSUM_STRING
7580 #undef CHECKSUM_ATTR
7581 #undef CHECKSUM_LEB128
7582 #undef CHECKSUM_ULEB128
7583
7584 /* Generate the type signature for DIE. This is computed by generating an
7585 MD5 checksum over the DIE's tag, its relevant attributes, and its
7586 children. Attributes that are references to other DIEs are processed
7587 by recursion, using the MARK field to prevent infinite recursion.
7588 If the DIE is nested inside a namespace or another type, we also
7589 need to include that context in the signature. The lower 64 bits
7590 of the resulting MD5 checksum comprise the signature. */
7591
7592 static void
7593 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7594 {
7595 int mark;
7596 const char *name;
7597 unsigned char checksum[16];
7598 struct md5_ctx ctx;
7599 dw_die_ref decl;
7600 dw_die_ref parent;
7601
7602 name = get_AT_string (die, DW_AT_name);
7603 decl = get_AT_ref (die, DW_AT_specification);
7604 parent = get_die_parent (die);
7605
7606 /* First, compute a signature for just the type name (and its surrounding
7607 context, if any. This is stored in the type unit DIE for link-time
7608 ODR (one-definition rule) checking. */
7609
7610 if (is_cxx () && name != NULL)
7611 {
7612 md5_init_ctx (&ctx);
7613
7614 /* Checksum the names of surrounding namespaces and structures. */
7615 if (parent != NULL)
7616 checksum_die_context (parent, &ctx);
7617
7618 /* Checksum the current DIE. */
7619 die_odr_checksum (die->die_tag, name, &ctx);
7620 md5_finish_ctx (&ctx, checksum);
7621
7622 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7623 }
7624
7625 /* Next, compute the complete type signature. */
7626
7627 md5_init_ctx (&ctx);
7628 mark = 1;
7629 die->die_mark = mark;
7630
7631 /* Checksum the names of surrounding namespaces and structures. */
7632 if (parent != NULL)
7633 checksum_die_context (parent, &ctx);
7634
7635 /* Checksum the DIE and its children. */
7636 die_checksum_ordered (die, &ctx, &mark);
7637 unmark_all_dies (die);
7638 md5_finish_ctx (&ctx, checksum);
7639
7640 /* Store the signature in the type node and link the type DIE and the
7641 type node together. */
7642 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7643 DWARF_TYPE_SIGNATURE_SIZE);
7644 die->comdat_type_p = true;
7645 die->die_id.die_type_node = type_node;
7646 type_node->type_die = die;
7647
7648 /* If the DIE is a specification, link its declaration to the type node
7649 as well. */
7650 if (decl != NULL)
7651 {
7652 decl->comdat_type_p = true;
7653 decl->die_id.die_type_node = type_node;
7654 }
7655 }
7656
7657 /* Do the location expressions look same? */
7658 static inline int
7659 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7660 {
7661 return loc1->dw_loc_opc == loc2->dw_loc_opc
7662 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7663 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7664 }
7665
7666 /* Do the values look the same? */
7667 static int
7668 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7669 {
7670 dw_loc_descr_ref loc1, loc2;
7671 rtx r1, r2;
7672
7673 if (v1->val_class != v2->val_class)
7674 return 0;
7675
7676 switch (v1->val_class)
7677 {
7678 case dw_val_class_const:
7679 case dw_val_class_const_implicit:
7680 return v1->v.val_int == v2->v.val_int;
7681 case dw_val_class_unsigned_const:
7682 case dw_val_class_unsigned_const_implicit:
7683 return v1->v.val_unsigned == v2->v.val_unsigned;
7684 case dw_val_class_const_double:
7685 return v1->v.val_double.high == v2->v.val_double.high
7686 && v1->v.val_double.low == v2->v.val_double.low;
7687 case dw_val_class_wide_int:
7688 return *v1->v.val_wide == *v2->v.val_wide;
7689 case dw_val_class_vec:
7690 if (v1->v.val_vec.length != v2->v.val_vec.length
7691 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7692 return 0;
7693 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7694 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7695 return 0;
7696 return 1;
7697 case dw_val_class_flag:
7698 return v1->v.val_flag == v2->v.val_flag;
7699 case dw_val_class_str:
7700 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7701
7702 case dw_val_class_addr:
7703 r1 = v1->v.val_addr;
7704 r2 = v2->v.val_addr;
7705 if (GET_CODE (r1) != GET_CODE (r2))
7706 return 0;
7707 return !rtx_equal_p (r1, r2);
7708
7709 case dw_val_class_offset:
7710 return v1->v.val_offset == v2->v.val_offset;
7711
7712 case dw_val_class_loc:
7713 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7714 loc1 && loc2;
7715 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7716 if (!same_loc_p (loc1, loc2, mark))
7717 return 0;
7718 return !loc1 && !loc2;
7719
7720 case dw_val_class_die_ref:
7721 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7722
7723 case dw_val_class_symview:
7724 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7725
7726 case dw_val_class_fde_ref:
7727 case dw_val_class_vms_delta:
7728 case dw_val_class_lbl_id:
7729 case dw_val_class_lineptr:
7730 case dw_val_class_macptr:
7731 case dw_val_class_loclistsptr:
7732 case dw_val_class_high_pc:
7733 return 1;
7734
7735 case dw_val_class_file:
7736 case dw_val_class_file_implicit:
7737 return v1->v.val_file == v2->v.val_file;
7738
7739 case dw_val_class_data8:
7740 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7741
7742 default:
7743 return 1;
7744 }
7745 }
7746
7747 /* Do the attributes look the same? */
7748
7749 static int
7750 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7751 {
7752 if (at1->dw_attr != at2->dw_attr)
7753 return 0;
7754
7755 /* We don't care that this was compiled with a different compiler
7756 snapshot; if the output is the same, that's what matters. */
7757 if (at1->dw_attr == DW_AT_producer)
7758 return 1;
7759
7760 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7761 }
7762
7763 /* Do the dies look the same? */
7764
7765 static int
7766 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7767 {
7768 dw_die_ref c1, c2;
7769 dw_attr_node *a1;
7770 unsigned ix;
7771
7772 /* To avoid infinite recursion. */
7773 if (die1->die_mark)
7774 return die1->die_mark == die2->die_mark;
7775 die1->die_mark = die2->die_mark = ++(*mark);
7776
7777 if (die1->die_tag != die2->die_tag)
7778 return 0;
7779
7780 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7781 return 0;
7782
7783 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7784 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7785 return 0;
7786
7787 c1 = die1->die_child;
7788 c2 = die2->die_child;
7789 if (! c1)
7790 {
7791 if (c2)
7792 return 0;
7793 }
7794 else
7795 for (;;)
7796 {
7797 if (!same_die_p (c1, c2, mark))
7798 return 0;
7799 c1 = c1->die_sib;
7800 c2 = c2->die_sib;
7801 if (c1 == die1->die_child)
7802 {
7803 if (c2 == die2->die_child)
7804 break;
7805 else
7806 return 0;
7807 }
7808 }
7809
7810 return 1;
7811 }
7812
7813 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7814 children, and set die_symbol. */
7815
7816 static void
7817 compute_comp_unit_symbol (dw_die_ref unit_die)
7818 {
7819 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7820 const char *base = die_name ? lbasename (die_name) : "anonymous";
7821 char *name = XALLOCAVEC (char, strlen (base) + 64);
7822 char *p;
7823 int i, mark;
7824 unsigned char checksum[16];
7825 struct md5_ctx ctx;
7826
7827 /* Compute the checksum of the DIE, then append part of it as hex digits to
7828 the name filename of the unit. */
7829
7830 md5_init_ctx (&ctx);
7831 mark = 0;
7832 die_checksum (unit_die, &ctx, &mark);
7833 unmark_all_dies (unit_die);
7834 md5_finish_ctx (&ctx, checksum);
7835
7836 /* When we this for comp_unit_die () we have a DW_AT_name that might
7837 not start with a letter but with anything valid for filenames and
7838 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7839 character is not a letter. */
7840 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7841 clean_symbol_name (name);
7842
7843 p = name + strlen (name);
7844 for (i = 0; i < 4; i++)
7845 {
7846 sprintf (p, "%.2x", checksum[i]);
7847 p += 2;
7848 }
7849
7850 unit_die->die_id.die_symbol = xstrdup (name);
7851 }
7852
7853 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7854
7855 static int
7856 is_type_die (dw_die_ref die)
7857 {
7858 switch (die->die_tag)
7859 {
7860 case DW_TAG_array_type:
7861 case DW_TAG_class_type:
7862 case DW_TAG_interface_type:
7863 case DW_TAG_enumeration_type:
7864 case DW_TAG_pointer_type:
7865 case DW_TAG_reference_type:
7866 case DW_TAG_rvalue_reference_type:
7867 case DW_TAG_string_type:
7868 case DW_TAG_structure_type:
7869 case DW_TAG_subroutine_type:
7870 case DW_TAG_union_type:
7871 case DW_TAG_ptr_to_member_type:
7872 case DW_TAG_set_type:
7873 case DW_TAG_subrange_type:
7874 case DW_TAG_base_type:
7875 case DW_TAG_const_type:
7876 case DW_TAG_file_type:
7877 case DW_TAG_packed_type:
7878 case DW_TAG_volatile_type:
7879 case DW_TAG_typedef:
7880 return 1;
7881 default:
7882 return 0;
7883 }
7884 }
7885
7886 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7887 Basically, we want to choose the bits that are likely to be shared between
7888 compilations (types) and leave out the bits that are specific to individual
7889 compilations (functions). */
7890
7891 static int
7892 is_comdat_die (dw_die_ref c)
7893 {
7894 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7895 we do for stabs. The advantage is a greater likelihood of sharing between
7896 objects that don't include headers in the same order (and therefore would
7897 put the base types in a different comdat). jason 8/28/00 */
7898
7899 if (c->die_tag == DW_TAG_base_type)
7900 return 0;
7901
7902 if (c->die_tag == DW_TAG_pointer_type
7903 || c->die_tag == DW_TAG_reference_type
7904 || c->die_tag == DW_TAG_rvalue_reference_type
7905 || c->die_tag == DW_TAG_const_type
7906 || c->die_tag == DW_TAG_volatile_type)
7907 {
7908 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7909
7910 return t ? is_comdat_die (t) : 0;
7911 }
7912
7913 return is_type_die (c);
7914 }
7915
7916 /* Returns true iff C is a compile-unit DIE. */
7917
7918 static inline bool
7919 is_cu_die (dw_die_ref c)
7920 {
7921 return c && (c->die_tag == DW_TAG_compile_unit
7922 || c->die_tag == DW_TAG_skeleton_unit);
7923 }
7924
7925 /* Returns true iff C is a unit DIE of some sort. */
7926
7927 static inline bool
7928 is_unit_die (dw_die_ref c)
7929 {
7930 return c && (c->die_tag == DW_TAG_compile_unit
7931 || c->die_tag == DW_TAG_partial_unit
7932 || c->die_tag == DW_TAG_type_unit
7933 || c->die_tag == DW_TAG_skeleton_unit);
7934 }
7935
7936 /* Returns true iff C is a namespace DIE. */
7937
7938 static inline bool
7939 is_namespace_die (dw_die_ref c)
7940 {
7941 return c && c->die_tag == DW_TAG_namespace;
7942 }
7943
7944 /* Returns true iff C is a class or structure DIE. */
7945
7946 static inline bool
7947 is_class_die (dw_die_ref c)
7948 {
7949 return c && (c->die_tag == DW_TAG_class_type
7950 || c->die_tag == DW_TAG_structure_type);
7951 }
7952
7953 /* Return non-zero if this DIE is a template parameter. */
7954
7955 static inline bool
7956 is_template_parameter (dw_die_ref die)
7957 {
7958 switch (die->die_tag)
7959 {
7960 case DW_TAG_template_type_param:
7961 case DW_TAG_template_value_param:
7962 case DW_TAG_GNU_template_template_param:
7963 case DW_TAG_GNU_template_parameter_pack:
7964 return true;
7965 default:
7966 return false;
7967 }
7968 }
7969
7970 /* Return non-zero if this DIE represents a template instantiation. */
7971
7972 static inline bool
7973 is_template_instantiation (dw_die_ref die)
7974 {
7975 dw_die_ref c;
7976
7977 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7978 return false;
7979 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7980 return false;
7981 }
7982
7983 static char *
7984 gen_internal_sym (const char *prefix)
7985 {
7986 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7987
7988 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7989 return xstrdup (buf);
7990 }
7991
7992 /* Return non-zero if this DIE is a declaration. */
7993
7994 static int
7995 is_declaration_die (dw_die_ref die)
7996 {
7997 dw_attr_node *a;
7998 unsigned ix;
7999
8000 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8001 if (a->dw_attr == DW_AT_declaration)
8002 return 1;
8003
8004 return 0;
8005 }
8006
8007 /* Return non-zero if this DIE is nested inside a subprogram. */
8008
8009 static int
8010 is_nested_in_subprogram (dw_die_ref die)
8011 {
8012 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8013
8014 if (decl == NULL)
8015 decl = die;
8016 return local_scope_p (decl);
8017 }
8018
8019 /* Return non-zero if this DIE contains a defining declaration of a
8020 subprogram. */
8021
8022 static int
8023 contains_subprogram_definition (dw_die_ref die)
8024 {
8025 dw_die_ref c;
8026
8027 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8028 return 1;
8029 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8030 return 0;
8031 }
8032
8033 /* Return non-zero if this is a type DIE that should be moved to a
8034 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8035 unit type. */
8036
8037 static int
8038 should_move_die_to_comdat (dw_die_ref die)
8039 {
8040 switch (die->die_tag)
8041 {
8042 case DW_TAG_class_type:
8043 case DW_TAG_structure_type:
8044 case DW_TAG_enumeration_type:
8045 case DW_TAG_union_type:
8046 /* Don't move declarations, inlined instances, types nested in a
8047 subprogram, or types that contain subprogram definitions. */
8048 if (is_declaration_die (die)
8049 || get_AT (die, DW_AT_abstract_origin)
8050 || is_nested_in_subprogram (die)
8051 || contains_subprogram_definition (die))
8052 return 0;
8053 return 1;
8054 case DW_TAG_array_type:
8055 case DW_TAG_interface_type:
8056 case DW_TAG_pointer_type:
8057 case DW_TAG_reference_type:
8058 case DW_TAG_rvalue_reference_type:
8059 case DW_TAG_string_type:
8060 case DW_TAG_subroutine_type:
8061 case DW_TAG_ptr_to_member_type:
8062 case DW_TAG_set_type:
8063 case DW_TAG_subrange_type:
8064 case DW_TAG_base_type:
8065 case DW_TAG_const_type:
8066 case DW_TAG_file_type:
8067 case DW_TAG_packed_type:
8068 case DW_TAG_volatile_type:
8069 case DW_TAG_typedef:
8070 default:
8071 return 0;
8072 }
8073 }
8074
8075 /* Make a clone of DIE. */
8076
8077 static dw_die_ref
8078 clone_die (dw_die_ref die)
8079 {
8080 dw_die_ref clone = new_die_raw (die->die_tag);
8081 dw_attr_node *a;
8082 unsigned ix;
8083
8084 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8085 add_dwarf_attr (clone, a);
8086
8087 return clone;
8088 }
8089
8090 /* Make a clone of the tree rooted at DIE. */
8091
8092 static dw_die_ref
8093 clone_tree (dw_die_ref die)
8094 {
8095 dw_die_ref c;
8096 dw_die_ref clone = clone_die (die);
8097
8098 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8099
8100 return clone;
8101 }
8102
8103 /* Make a clone of DIE as a declaration. */
8104
8105 static dw_die_ref
8106 clone_as_declaration (dw_die_ref die)
8107 {
8108 dw_die_ref clone;
8109 dw_die_ref decl;
8110 dw_attr_node *a;
8111 unsigned ix;
8112
8113 /* If the DIE is already a declaration, just clone it. */
8114 if (is_declaration_die (die))
8115 return clone_die (die);
8116
8117 /* If the DIE is a specification, just clone its declaration DIE. */
8118 decl = get_AT_ref (die, DW_AT_specification);
8119 if (decl != NULL)
8120 {
8121 clone = clone_die (decl);
8122 if (die->comdat_type_p)
8123 add_AT_die_ref (clone, DW_AT_signature, die);
8124 return clone;
8125 }
8126
8127 clone = new_die_raw (die->die_tag);
8128
8129 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8130 {
8131 /* We don't want to copy over all attributes.
8132 For example we don't want DW_AT_byte_size because otherwise we will no
8133 longer have a declaration and GDB will treat it as a definition. */
8134
8135 switch (a->dw_attr)
8136 {
8137 case DW_AT_abstract_origin:
8138 case DW_AT_artificial:
8139 case DW_AT_containing_type:
8140 case DW_AT_external:
8141 case DW_AT_name:
8142 case DW_AT_type:
8143 case DW_AT_virtuality:
8144 case DW_AT_linkage_name:
8145 case DW_AT_MIPS_linkage_name:
8146 add_dwarf_attr (clone, a);
8147 break;
8148 case DW_AT_byte_size:
8149 case DW_AT_alignment:
8150 default:
8151 break;
8152 }
8153 }
8154
8155 if (die->comdat_type_p)
8156 add_AT_die_ref (clone, DW_AT_signature, die);
8157
8158 add_AT_flag (clone, DW_AT_declaration, 1);
8159 return clone;
8160 }
8161
8162
8163 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8164
8165 struct decl_table_entry
8166 {
8167 dw_die_ref orig;
8168 dw_die_ref copy;
8169 };
8170
8171 /* Helpers to manipulate hash table of copied declarations. */
8172
8173 /* Hashtable helpers. */
8174
8175 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8176 {
8177 typedef die_struct *compare_type;
8178 static inline hashval_t hash (const decl_table_entry *);
8179 static inline bool equal (const decl_table_entry *, const die_struct *);
8180 };
8181
8182 inline hashval_t
8183 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8184 {
8185 return htab_hash_pointer (entry->orig);
8186 }
8187
8188 inline bool
8189 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8190 const die_struct *entry2)
8191 {
8192 return entry1->orig == entry2;
8193 }
8194
8195 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8196
8197 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8198 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8199 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8200 to check if the ancestor has already been copied into UNIT. */
8201
8202 static dw_die_ref
8203 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8204 decl_hash_type *decl_table)
8205 {
8206 dw_die_ref parent = die->die_parent;
8207 dw_die_ref new_parent = unit;
8208 dw_die_ref copy;
8209 decl_table_entry **slot = NULL;
8210 struct decl_table_entry *entry = NULL;
8211
8212 if (decl_table)
8213 {
8214 /* Check if the entry has already been copied to UNIT. */
8215 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8216 INSERT);
8217 if (*slot != HTAB_EMPTY_ENTRY)
8218 {
8219 entry = *slot;
8220 return entry->copy;
8221 }
8222
8223 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8224 entry = XCNEW (struct decl_table_entry);
8225 entry->orig = die;
8226 entry->copy = NULL;
8227 *slot = entry;
8228 }
8229
8230 if (parent != NULL)
8231 {
8232 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8233 if (spec != NULL)
8234 parent = spec;
8235 if (!is_unit_die (parent))
8236 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8237 }
8238
8239 copy = clone_as_declaration (die);
8240 add_child_die (new_parent, copy);
8241
8242 if (decl_table)
8243 {
8244 /* Record the pointer to the copy. */
8245 entry->copy = copy;
8246 }
8247
8248 return copy;
8249 }
8250 /* Copy the declaration context to the new type unit DIE. This includes
8251 any surrounding namespace or type declarations. If the DIE has an
8252 AT_specification attribute, it also includes attributes and children
8253 attached to the specification, and returns a pointer to the original
8254 parent of the declaration DIE. Returns NULL otherwise. */
8255
8256 static dw_die_ref
8257 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8258 {
8259 dw_die_ref decl;
8260 dw_die_ref new_decl;
8261 dw_die_ref orig_parent = NULL;
8262
8263 decl = get_AT_ref (die, DW_AT_specification);
8264 if (decl == NULL)
8265 decl = die;
8266 else
8267 {
8268 unsigned ix;
8269 dw_die_ref c;
8270 dw_attr_node *a;
8271
8272 /* The original DIE will be changed to a declaration, and must
8273 be moved to be a child of the original declaration DIE. */
8274 orig_parent = decl->die_parent;
8275
8276 /* Copy the type node pointer from the new DIE to the original
8277 declaration DIE so we can forward references later. */
8278 decl->comdat_type_p = true;
8279 decl->die_id.die_type_node = die->die_id.die_type_node;
8280
8281 remove_AT (die, DW_AT_specification);
8282
8283 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8284 {
8285 if (a->dw_attr != DW_AT_name
8286 && a->dw_attr != DW_AT_declaration
8287 && a->dw_attr != DW_AT_external)
8288 add_dwarf_attr (die, a);
8289 }
8290
8291 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8292 }
8293
8294 if (decl->die_parent != NULL
8295 && !is_unit_die (decl->die_parent))
8296 {
8297 new_decl = copy_ancestor_tree (unit, decl, NULL);
8298 if (new_decl != NULL)
8299 {
8300 remove_AT (new_decl, DW_AT_signature);
8301 add_AT_specification (die, new_decl);
8302 }
8303 }
8304
8305 return orig_parent;
8306 }
8307
8308 /* Generate the skeleton ancestor tree for the given NODE, then clone
8309 the DIE and add the clone into the tree. */
8310
8311 static void
8312 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8313 {
8314 if (node->new_die != NULL)
8315 return;
8316
8317 node->new_die = clone_as_declaration (node->old_die);
8318
8319 if (node->parent != NULL)
8320 {
8321 generate_skeleton_ancestor_tree (node->parent);
8322 add_child_die (node->parent->new_die, node->new_die);
8323 }
8324 }
8325
8326 /* Generate a skeleton tree of DIEs containing any declarations that are
8327 found in the original tree. We traverse the tree looking for declaration
8328 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8329
8330 static void
8331 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8332 {
8333 skeleton_chain_node node;
8334 dw_die_ref c;
8335 dw_die_ref first;
8336 dw_die_ref prev = NULL;
8337 dw_die_ref next = NULL;
8338
8339 node.parent = parent;
8340
8341 first = c = parent->old_die->die_child;
8342 if (c)
8343 next = c->die_sib;
8344 if (c) do {
8345 if (prev == NULL || prev->die_sib == c)
8346 prev = c;
8347 c = next;
8348 next = (c == first ? NULL : c->die_sib);
8349 node.old_die = c;
8350 node.new_die = NULL;
8351 if (is_declaration_die (c))
8352 {
8353 if (is_template_instantiation (c))
8354 {
8355 /* Instantiated templates do not need to be cloned into the
8356 type unit. Just move the DIE and its children back to
8357 the skeleton tree (in the main CU). */
8358 remove_child_with_prev (c, prev);
8359 add_child_die (parent->new_die, c);
8360 c = prev;
8361 }
8362 else if (c->comdat_type_p)
8363 {
8364 /* This is the skeleton of earlier break_out_comdat_types
8365 type. Clone the existing DIE, but keep the children
8366 under the original (which is in the main CU). */
8367 dw_die_ref clone = clone_die (c);
8368
8369 replace_child (c, clone, prev);
8370 generate_skeleton_ancestor_tree (parent);
8371 add_child_die (parent->new_die, c);
8372 c = clone;
8373 continue;
8374 }
8375 else
8376 {
8377 /* Clone the existing DIE, move the original to the skeleton
8378 tree (which is in the main CU), and put the clone, with
8379 all the original's children, where the original came from
8380 (which is about to be moved to the type unit). */
8381 dw_die_ref clone = clone_die (c);
8382 move_all_children (c, clone);
8383
8384 /* If the original has a DW_AT_object_pointer attribute,
8385 it would now point to a child DIE just moved to the
8386 cloned tree, so we need to remove that attribute from
8387 the original. */
8388 remove_AT (c, DW_AT_object_pointer);
8389
8390 replace_child (c, clone, prev);
8391 generate_skeleton_ancestor_tree (parent);
8392 add_child_die (parent->new_die, c);
8393 node.old_die = clone;
8394 node.new_die = c;
8395 c = clone;
8396 }
8397 }
8398 generate_skeleton_bottom_up (&node);
8399 } while (next != NULL);
8400 }
8401
8402 /* Wrapper function for generate_skeleton_bottom_up. */
8403
8404 static dw_die_ref
8405 generate_skeleton (dw_die_ref die)
8406 {
8407 skeleton_chain_node node;
8408
8409 node.old_die = die;
8410 node.new_die = NULL;
8411 node.parent = NULL;
8412
8413 /* If this type definition is nested inside another type,
8414 and is not an instantiation of a template, always leave
8415 at least a declaration in its place. */
8416 if (die->die_parent != NULL
8417 && is_type_die (die->die_parent)
8418 && !is_template_instantiation (die))
8419 node.new_die = clone_as_declaration (die);
8420
8421 generate_skeleton_bottom_up (&node);
8422 return node.new_die;
8423 }
8424
8425 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8426 declaration. The original DIE is moved to a new compile unit so that
8427 existing references to it follow it to the new location. If any of the
8428 original DIE's descendants is a declaration, we need to replace the
8429 original DIE with a skeleton tree and move the declarations back into the
8430 skeleton tree. */
8431
8432 static dw_die_ref
8433 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8434 dw_die_ref prev)
8435 {
8436 dw_die_ref skeleton, orig_parent;
8437
8438 /* Copy the declaration context to the type unit DIE. If the returned
8439 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8440 that DIE. */
8441 orig_parent = copy_declaration_context (unit, child);
8442
8443 skeleton = generate_skeleton (child);
8444 if (skeleton == NULL)
8445 remove_child_with_prev (child, prev);
8446 else
8447 {
8448 skeleton->comdat_type_p = true;
8449 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8450
8451 /* If the original DIE was a specification, we need to put
8452 the skeleton under the parent DIE of the declaration.
8453 This leaves the original declaration in the tree, but
8454 it will be pruned later since there are no longer any
8455 references to it. */
8456 if (orig_parent != NULL)
8457 {
8458 remove_child_with_prev (child, prev);
8459 add_child_die (orig_parent, skeleton);
8460 }
8461 else
8462 replace_child (child, skeleton, prev);
8463 }
8464
8465 return skeleton;
8466 }
8467
8468 static void
8469 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8470 comdat_type_node *type_node,
8471 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8472
8473 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8474 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8475 DWARF procedure references in the DW_AT_location attribute. */
8476
8477 static dw_die_ref
8478 copy_dwarf_procedure (dw_die_ref die,
8479 comdat_type_node *type_node,
8480 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8481 {
8482 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8483
8484 /* DWARF procedures are not supposed to have children... */
8485 gcc_assert (die->die_child == NULL);
8486
8487 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8488 gcc_assert (vec_safe_length (die->die_attr) == 1
8489 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8490
8491 /* Do not copy more than once DWARF procedures. */
8492 bool existed;
8493 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8494 if (existed)
8495 return die_copy;
8496
8497 die_copy = clone_die (die);
8498 add_child_die (type_node->root_die, die_copy);
8499 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8500 return die_copy;
8501 }
8502
8503 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8504 procedures in DIE's attributes. */
8505
8506 static void
8507 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8508 comdat_type_node *type_node,
8509 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8510 {
8511 dw_attr_node *a;
8512 unsigned i;
8513
8514 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8515 {
8516 dw_loc_descr_ref loc;
8517
8518 if (a->dw_attr_val.val_class != dw_val_class_loc)
8519 continue;
8520
8521 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8522 {
8523 switch (loc->dw_loc_opc)
8524 {
8525 case DW_OP_call2:
8526 case DW_OP_call4:
8527 case DW_OP_call_ref:
8528 gcc_assert (loc->dw_loc_oprnd1.val_class
8529 == dw_val_class_die_ref);
8530 loc->dw_loc_oprnd1.v.val_die_ref.die
8531 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8532 type_node,
8533 copied_dwarf_procs);
8534
8535 default:
8536 break;
8537 }
8538 }
8539 }
8540 }
8541
8542 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8543 rewrite references to point to the copies.
8544
8545 References are looked for in DIE's attributes and recursively in all its
8546 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8547 mapping from old DWARF procedures to their copy. It is used not to copy
8548 twice the same DWARF procedure under TYPE_NODE. */
8549
8550 static void
8551 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8552 comdat_type_node *type_node,
8553 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8554 {
8555 dw_die_ref c;
8556
8557 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8558 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8559 type_node,
8560 copied_dwarf_procs));
8561 }
8562
8563 /* Traverse the DIE and set up additional .debug_types or .debug_info
8564 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8565 section. */
8566
8567 static void
8568 break_out_comdat_types (dw_die_ref die)
8569 {
8570 dw_die_ref c;
8571 dw_die_ref first;
8572 dw_die_ref prev = NULL;
8573 dw_die_ref next = NULL;
8574 dw_die_ref unit = NULL;
8575
8576 first = c = die->die_child;
8577 if (c)
8578 next = c->die_sib;
8579 if (c) do {
8580 if (prev == NULL || prev->die_sib == c)
8581 prev = c;
8582 c = next;
8583 next = (c == first ? NULL : c->die_sib);
8584 if (should_move_die_to_comdat (c))
8585 {
8586 dw_die_ref replacement;
8587 comdat_type_node *type_node;
8588
8589 /* Break out nested types into their own type units. */
8590 break_out_comdat_types (c);
8591
8592 /* Create a new type unit DIE as the root for the new tree, and
8593 add it to the list of comdat types. */
8594 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8595 add_AT_unsigned (unit, DW_AT_language,
8596 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8597 type_node = ggc_cleared_alloc<comdat_type_node> ();
8598 type_node->root_die = unit;
8599 type_node->next = comdat_type_list;
8600 comdat_type_list = type_node;
8601
8602 /* Generate the type signature. */
8603 generate_type_signature (c, type_node);
8604
8605 /* Copy the declaration context, attributes, and children of the
8606 declaration into the new type unit DIE, then remove this DIE
8607 from the main CU (or replace it with a skeleton if necessary). */
8608 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8609 type_node->skeleton_die = replacement;
8610
8611 /* Add the DIE to the new compunit. */
8612 add_child_die (unit, c);
8613
8614 /* Types can reference DWARF procedures for type size or data location
8615 expressions. Calls in DWARF expressions cannot target procedures
8616 that are not in the same section. So we must copy DWARF procedures
8617 along with this type and then rewrite references to them. */
8618 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8619 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8620
8621 if (replacement != NULL)
8622 c = replacement;
8623 }
8624 else if (c->die_tag == DW_TAG_namespace
8625 || c->die_tag == DW_TAG_class_type
8626 || c->die_tag == DW_TAG_structure_type
8627 || c->die_tag == DW_TAG_union_type)
8628 {
8629 /* Look for nested types that can be broken out. */
8630 break_out_comdat_types (c);
8631 }
8632 } while (next != NULL);
8633 }
8634
8635 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8636 Enter all the cloned children into the hash table decl_table. */
8637
8638 static dw_die_ref
8639 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8640 {
8641 dw_die_ref c;
8642 dw_die_ref clone;
8643 struct decl_table_entry *entry;
8644 decl_table_entry **slot;
8645
8646 if (die->die_tag == DW_TAG_subprogram)
8647 clone = clone_as_declaration (die);
8648 else
8649 clone = clone_die (die);
8650
8651 slot = decl_table->find_slot_with_hash (die,
8652 htab_hash_pointer (die), INSERT);
8653
8654 /* Assert that DIE isn't in the hash table yet. If it would be there
8655 before, the ancestors would be necessarily there as well, therefore
8656 clone_tree_partial wouldn't be called. */
8657 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8658
8659 entry = XCNEW (struct decl_table_entry);
8660 entry->orig = die;
8661 entry->copy = clone;
8662 *slot = entry;
8663
8664 if (die->die_tag != DW_TAG_subprogram)
8665 FOR_EACH_CHILD (die, c,
8666 add_child_die (clone, clone_tree_partial (c, decl_table)));
8667
8668 return clone;
8669 }
8670
8671 /* Walk the DIE and its children, looking for references to incomplete
8672 or trivial types that are unmarked (i.e., that are not in the current
8673 type_unit). */
8674
8675 static void
8676 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8677 {
8678 dw_die_ref c;
8679 dw_attr_node *a;
8680 unsigned ix;
8681
8682 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8683 {
8684 if (AT_class (a) == dw_val_class_die_ref)
8685 {
8686 dw_die_ref targ = AT_ref (a);
8687 decl_table_entry **slot;
8688 struct decl_table_entry *entry;
8689
8690 if (targ->die_mark != 0 || targ->comdat_type_p)
8691 continue;
8692
8693 slot = decl_table->find_slot_with_hash (targ,
8694 htab_hash_pointer (targ),
8695 INSERT);
8696
8697 if (*slot != HTAB_EMPTY_ENTRY)
8698 {
8699 /* TARG has already been copied, so we just need to
8700 modify the reference to point to the copy. */
8701 entry = *slot;
8702 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8703 }
8704 else
8705 {
8706 dw_die_ref parent = unit;
8707 dw_die_ref copy = clone_die (targ);
8708
8709 /* Record in DECL_TABLE that TARG has been copied.
8710 Need to do this now, before the recursive call,
8711 because DECL_TABLE may be expanded and SLOT
8712 would no longer be a valid pointer. */
8713 entry = XCNEW (struct decl_table_entry);
8714 entry->orig = targ;
8715 entry->copy = copy;
8716 *slot = entry;
8717
8718 /* If TARG is not a declaration DIE, we need to copy its
8719 children. */
8720 if (!is_declaration_die (targ))
8721 {
8722 FOR_EACH_CHILD (
8723 targ, c,
8724 add_child_die (copy,
8725 clone_tree_partial (c, decl_table)));
8726 }
8727
8728 /* Make sure the cloned tree is marked as part of the
8729 type unit. */
8730 mark_dies (copy);
8731
8732 /* If TARG has surrounding context, copy its ancestor tree
8733 into the new type unit. */
8734 if (targ->die_parent != NULL
8735 && !is_unit_die (targ->die_parent))
8736 parent = copy_ancestor_tree (unit, targ->die_parent,
8737 decl_table);
8738
8739 add_child_die (parent, copy);
8740 a->dw_attr_val.v.val_die_ref.die = copy;
8741
8742 /* Make sure the newly-copied DIE is walked. If it was
8743 installed in a previously-added context, it won't
8744 get visited otherwise. */
8745 if (parent != unit)
8746 {
8747 /* Find the highest point of the newly-added tree,
8748 mark each node along the way, and walk from there. */
8749 parent->die_mark = 1;
8750 while (parent->die_parent
8751 && parent->die_parent->die_mark == 0)
8752 {
8753 parent = parent->die_parent;
8754 parent->die_mark = 1;
8755 }
8756 copy_decls_walk (unit, parent, decl_table);
8757 }
8758 }
8759 }
8760 }
8761
8762 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8763 }
8764
8765 /* Copy declarations for "unworthy" types into the new comdat section.
8766 Incomplete types, modified types, and certain other types aren't broken
8767 out into comdat sections of their own, so they don't have a signature,
8768 and we need to copy the declaration into the same section so that we
8769 don't have an external reference. */
8770
8771 static void
8772 copy_decls_for_unworthy_types (dw_die_ref unit)
8773 {
8774 mark_dies (unit);
8775 decl_hash_type decl_table (10);
8776 copy_decls_walk (unit, unit, &decl_table);
8777 unmark_dies (unit);
8778 }
8779
8780 /* Traverse the DIE and add a sibling attribute if it may have the
8781 effect of speeding up access to siblings. To save some space,
8782 avoid generating sibling attributes for DIE's without children. */
8783
8784 static void
8785 add_sibling_attributes (dw_die_ref die)
8786 {
8787 dw_die_ref c;
8788
8789 if (! die->die_child)
8790 return;
8791
8792 if (die->die_parent && die != die->die_parent->die_child)
8793 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8794
8795 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8796 }
8797
8798 /* Output all location lists for the DIE and its children. */
8799
8800 static void
8801 output_location_lists (dw_die_ref die)
8802 {
8803 dw_die_ref c;
8804 dw_attr_node *a;
8805 unsigned ix;
8806
8807 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8808 if (AT_class (a) == dw_val_class_loc_list)
8809 output_loc_list (AT_loc_list (a));
8810
8811 FOR_EACH_CHILD (die, c, output_location_lists (c));
8812 }
8813
8814 /* During assign_location_list_indexes and output_loclists_offset the
8815 current index, after it the number of assigned indexes (i.e. how
8816 large the .debug_loclists* offset table should be). */
8817 static unsigned int loc_list_idx;
8818
8819 /* Output all location list offsets for the DIE and its children. */
8820
8821 static void
8822 output_loclists_offsets (dw_die_ref die)
8823 {
8824 dw_die_ref c;
8825 dw_attr_node *a;
8826 unsigned ix;
8827
8828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8829 if (AT_class (a) == dw_val_class_loc_list)
8830 {
8831 dw_loc_list_ref l = AT_loc_list (a);
8832 if (l->offset_emitted)
8833 continue;
8834 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8835 loc_section_label, NULL);
8836 gcc_assert (l->hash == loc_list_idx);
8837 loc_list_idx++;
8838 l->offset_emitted = true;
8839 }
8840
8841 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8842 }
8843
8844 /* Recursively set indexes of location lists. */
8845
8846 static void
8847 assign_location_list_indexes (dw_die_ref die)
8848 {
8849 dw_die_ref c;
8850 dw_attr_node *a;
8851 unsigned ix;
8852
8853 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8854 if (AT_class (a) == dw_val_class_loc_list)
8855 {
8856 dw_loc_list_ref list = AT_loc_list (a);
8857 if (!list->num_assigned)
8858 {
8859 list->num_assigned = true;
8860 list->hash = loc_list_idx++;
8861 }
8862 }
8863
8864 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8865 }
8866
8867 /* We want to limit the number of external references, because they are
8868 larger than local references: a relocation takes multiple words, and
8869 even a sig8 reference is always eight bytes, whereas a local reference
8870 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8871 So if we encounter multiple external references to the same type DIE, we
8872 make a local typedef stub for it and redirect all references there.
8873
8874 This is the element of the hash table for keeping track of these
8875 references. */
8876
8877 struct external_ref
8878 {
8879 dw_die_ref type;
8880 dw_die_ref stub;
8881 unsigned n_refs;
8882 };
8883
8884 /* Hashtable helpers. */
8885
8886 struct external_ref_hasher : free_ptr_hash <external_ref>
8887 {
8888 static inline hashval_t hash (const external_ref *);
8889 static inline bool equal (const external_ref *, const external_ref *);
8890 };
8891
8892 inline hashval_t
8893 external_ref_hasher::hash (const external_ref *r)
8894 {
8895 dw_die_ref die = r->type;
8896 hashval_t h = 0;
8897
8898 /* We can't use the address of the DIE for hashing, because
8899 that will make the order of the stub DIEs non-deterministic. */
8900 if (! die->comdat_type_p)
8901 /* We have a symbol; use it to compute a hash. */
8902 h = htab_hash_string (die->die_id.die_symbol);
8903 else
8904 {
8905 /* We have a type signature; use a subset of the bits as the hash.
8906 The 8-byte signature is at least as large as hashval_t. */
8907 comdat_type_node *type_node = die->die_id.die_type_node;
8908 memcpy (&h, type_node->signature, sizeof (h));
8909 }
8910 return h;
8911 }
8912
8913 inline bool
8914 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8915 {
8916 return r1->type == r2->type;
8917 }
8918
8919 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8920
8921 /* Return a pointer to the external_ref for references to DIE. */
8922
8923 static struct external_ref *
8924 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8925 {
8926 struct external_ref ref, *ref_p;
8927 external_ref **slot;
8928
8929 ref.type = die;
8930 slot = map->find_slot (&ref, INSERT);
8931 if (*slot != HTAB_EMPTY_ENTRY)
8932 return *slot;
8933
8934 ref_p = XCNEW (struct external_ref);
8935 ref_p->type = die;
8936 *slot = ref_p;
8937 return ref_p;
8938 }
8939
8940 /* Subroutine of optimize_external_refs, below.
8941
8942 If we see a type skeleton, record it as our stub. If we see external
8943 references, remember how many we've seen. */
8944
8945 static void
8946 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8947 {
8948 dw_die_ref c;
8949 dw_attr_node *a;
8950 unsigned ix;
8951 struct external_ref *ref_p;
8952
8953 if (is_type_die (die)
8954 && (c = get_AT_ref (die, DW_AT_signature)))
8955 {
8956 /* This is a local skeleton; use it for local references. */
8957 ref_p = lookup_external_ref (map, c);
8958 ref_p->stub = die;
8959 }
8960
8961 /* Scan the DIE references, and remember any that refer to DIEs from
8962 other CUs (i.e. those which are not marked). */
8963 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8964 if (AT_class (a) == dw_val_class_die_ref
8965 && (c = AT_ref (a))->die_mark == 0
8966 && is_type_die (c))
8967 {
8968 ref_p = lookup_external_ref (map, c);
8969 ref_p->n_refs++;
8970 }
8971
8972 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8973 }
8974
8975 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8976 points to an external_ref, DATA is the CU we're processing. If we don't
8977 already have a local stub, and we have multiple refs, build a stub. */
8978
8979 int
8980 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8981 {
8982 struct external_ref *ref_p = *slot;
8983
8984 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8985 {
8986 /* We have multiple references to this type, so build a small stub.
8987 Both of these forms are a bit dodgy from the perspective of the
8988 DWARF standard, since technically they should have names. */
8989 dw_die_ref cu = data;
8990 dw_die_ref type = ref_p->type;
8991 dw_die_ref stub = NULL;
8992
8993 if (type->comdat_type_p)
8994 {
8995 /* If we refer to this type via sig8, use AT_signature. */
8996 stub = new_die (type->die_tag, cu, NULL_TREE);
8997 add_AT_die_ref (stub, DW_AT_signature, type);
8998 }
8999 else
9000 {
9001 /* Otherwise, use a typedef with no name. */
9002 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9003 add_AT_die_ref (stub, DW_AT_type, type);
9004 }
9005
9006 stub->die_mark++;
9007 ref_p->stub = stub;
9008 }
9009 return 1;
9010 }
9011
9012 /* DIE is a unit; look through all the DIE references to see if there are
9013 any external references to types, and if so, create local stubs for
9014 them which will be applied in build_abbrev_table. This is useful because
9015 references to local DIEs are smaller. */
9016
9017 static external_ref_hash_type *
9018 optimize_external_refs (dw_die_ref die)
9019 {
9020 external_ref_hash_type *map = new external_ref_hash_type (10);
9021 optimize_external_refs_1 (die, map);
9022 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9023 return map;
9024 }
9025
9026 /* The following 3 variables are temporaries that are computed only during the
9027 build_abbrev_table call and used and released during the following
9028 optimize_abbrev_table call. */
9029
9030 /* First abbrev_id that can be optimized based on usage. */
9031 static unsigned int abbrev_opt_start;
9032
9033 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9034 abbrev_id smaller than this, because they must be already sized
9035 during build_abbrev_table). */
9036 static unsigned int abbrev_opt_base_type_end;
9037
9038 /* Vector of usage counts during build_abbrev_table. Indexed by
9039 abbrev_id - abbrev_opt_start. */
9040 static vec<unsigned int> abbrev_usage_count;
9041
9042 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9043 static vec<dw_die_ref> sorted_abbrev_dies;
9044
9045 /* The format of each DIE (and its attribute value pairs) is encoded in an
9046 abbreviation table. This routine builds the abbreviation table and assigns
9047 a unique abbreviation id for each abbreviation entry. The children of each
9048 die are visited recursively. */
9049
9050 static void
9051 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9052 {
9053 unsigned int abbrev_id = 0;
9054 dw_die_ref c;
9055 dw_attr_node *a;
9056 unsigned ix;
9057 dw_die_ref abbrev;
9058
9059 /* Scan the DIE references, and replace any that refer to
9060 DIEs from other CUs (i.e. those which are not marked) with
9061 the local stubs we built in optimize_external_refs. */
9062 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9063 if (AT_class (a) == dw_val_class_die_ref
9064 && (c = AT_ref (a))->die_mark == 0)
9065 {
9066 struct external_ref *ref_p;
9067 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9068
9069 ref_p = lookup_external_ref (extern_map, c);
9070 if (ref_p->stub && ref_p->stub != die)
9071 change_AT_die_ref (a, ref_p->stub);
9072 else
9073 /* We aren't changing this reference, so mark it external. */
9074 set_AT_ref_external (a, 1);
9075 }
9076
9077 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9078 {
9079 dw_attr_node *die_a, *abbrev_a;
9080 unsigned ix;
9081 bool ok = true;
9082
9083 if (abbrev_id == 0)
9084 continue;
9085 if (abbrev->die_tag != die->die_tag)
9086 continue;
9087 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9088 continue;
9089
9090 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9091 continue;
9092
9093 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9094 {
9095 abbrev_a = &(*abbrev->die_attr)[ix];
9096 if ((abbrev_a->dw_attr != die_a->dw_attr)
9097 || (value_format (abbrev_a) != value_format (die_a)))
9098 {
9099 ok = false;
9100 break;
9101 }
9102 }
9103 if (ok)
9104 break;
9105 }
9106
9107 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9108 {
9109 vec_safe_push (abbrev_die_table, die);
9110 if (abbrev_opt_start)
9111 abbrev_usage_count.safe_push (0);
9112 }
9113 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9114 {
9115 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9116 sorted_abbrev_dies.safe_push (die);
9117 }
9118
9119 die->die_abbrev = abbrev_id;
9120 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9121 }
9122
9123 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9124 by die_abbrev's usage count, from the most commonly used
9125 abbreviation to the least. */
9126
9127 static int
9128 die_abbrev_cmp (const void *p1, const void *p2)
9129 {
9130 dw_die_ref die1 = *(const dw_die_ref *) p1;
9131 dw_die_ref die2 = *(const dw_die_ref *) p2;
9132
9133 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9134 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9135
9136 if (die1->die_abbrev >= abbrev_opt_base_type_end
9137 && die2->die_abbrev >= abbrev_opt_base_type_end)
9138 {
9139 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9140 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9141 return -1;
9142 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9143 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9144 return 1;
9145 }
9146
9147 /* Stabilize the sort. */
9148 if (die1->die_abbrev < die2->die_abbrev)
9149 return -1;
9150 if (die1->die_abbrev > die2->die_abbrev)
9151 return 1;
9152
9153 return 0;
9154 }
9155
9156 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9157 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9158 into dw_val_class_const_implicit or
9159 dw_val_class_unsigned_const_implicit. */
9160
9161 static void
9162 optimize_implicit_const (unsigned int first_id, unsigned int end,
9163 vec<bool> &implicit_consts)
9164 {
9165 /* It never makes sense if there is just one DIE using the abbreviation. */
9166 if (end < first_id + 2)
9167 return;
9168
9169 dw_attr_node *a;
9170 unsigned ix, i;
9171 dw_die_ref die = sorted_abbrev_dies[first_id];
9172 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9173 if (implicit_consts[ix])
9174 {
9175 enum dw_val_class new_class = dw_val_class_none;
9176 switch (AT_class (a))
9177 {
9178 case dw_val_class_unsigned_const:
9179 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9180 continue;
9181
9182 /* The .debug_abbrev section will grow by
9183 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9184 in all the DIEs using that abbreviation. */
9185 if (constant_size (AT_unsigned (a)) * (end - first_id)
9186 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9187 continue;
9188
9189 new_class = dw_val_class_unsigned_const_implicit;
9190 break;
9191
9192 case dw_val_class_const:
9193 new_class = dw_val_class_const_implicit;
9194 break;
9195
9196 case dw_val_class_file:
9197 new_class = dw_val_class_file_implicit;
9198 break;
9199
9200 default:
9201 continue;
9202 }
9203 for (i = first_id; i < end; i++)
9204 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9205 = new_class;
9206 }
9207 }
9208
9209 /* Attempt to optimize abbreviation table from abbrev_opt_start
9210 abbreviation above. */
9211
9212 static void
9213 optimize_abbrev_table (void)
9214 {
9215 if (abbrev_opt_start
9216 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9217 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9218 {
9219 auto_vec<bool, 32> implicit_consts;
9220 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9221
9222 unsigned int abbrev_id = abbrev_opt_start - 1;
9223 unsigned int first_id = ~0U;
9224 unsigned int last_abbrev_id = 0;
9225 unsigned int i;
9226 dw_die_ref die;
9227 if (abbrev_opt_base_type_end > abbrev_opt_start)
9228 abbrev_id = abbrev_opt_base_type_end - 1;
9229 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9230 most commonly used abbreviations come first. */
9231 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9232 {
9233 dw_attr_node *a;
9234 unsigned ix;
9235
9236 /* If calc_base_type_die_sizes has been called, the CU and
9237 base types after it can't be optimized, because we've already
9238 calculated their DIE offsets. We've sorted them first. */
9239 if (die->die_abbrev < abbrev_opt_base_type_end)
9240 continue;
9241 if (die->die_abbrev != last_abbrev_id)
9242 {
9243 last_abbrev_id = die->die_abbrev;
9244 if (dwarf_version >= 5 && first_id != ~0U)
9245 optimize_implicit_const (first_id, i, implicit_consts);
9246 abbrev_id++;
9247 (*abbrev_die_table)[abbrev_id] = die;
9248 if (dwarf_version >= 5)
9249 {
9250 first_id = i;
9251 implicit_consts.truncate (0);
9252
9253 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9254 switch (AT_class (a))
9255 {
9256 case dw_val_class_const:
9257 case dw_val_class_unsigned_const:
9258 case dw_val_class_file:
9259 implicit_consts.safe_push (true);
9260 break;
9261 default:
9262 implicit_consts.safe_push (false);
9263 break;
9264 }
9265 }
9266 }
9267 else if (dwarf_version >= 5)
9268 {
9269 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9270 if (!implicit_consts[ix])
9271 continue;
9272 else
9273 {
9274 dw_attr_node *other_a
9275 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9276 if (!dw_val_equal_p (&a->dw_attr_val,
9277 &other_a->dw_attr_val))
9278 implicit_consts[ix] = false;
9279 }
9280 }
9281 die->die_abbrev = abbrev_id;
9282 }
9283 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9284 if (dwarf_version >= 5 && first_id != ~0U)
9285 optimize_implicit_const (first_id, i, implicit_consts);
9286 }
9287
9288 abbrev_opt_start = 0;
9289 abbrev_opt_base_type_end = 0;
9290 abbrev_usage_count.release ();
9291 sorted_abbrev_dies.release ();
9292 }
9293 \f
9294 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9295
9296 static int
9297 constant_size (unsigned HOST_WIDE_INT value)
9298 {
9299 int log;
9300
9301 if (value == 0)
9302 log = 0;
9303 else
9304 log = floor_log2 (value);
9305
9306 log = log / 8;
9307 log = 1 << (floor_log2 (log) + 1);
9308
9309 return log;
9310 }
9311
9312 /* Return the size of a DIE as it is represented in the
9313 .debug_info section. */
9314
9315 static unsigned long
9316 size_of_die (dw_die_ref die)
9317 {
9318 unsigned long size = 0;
9319 dw_attr_node *a;
9320 unsigned ix;
9321 enum dwarf_form form;
9322
9323 size += size_of_uleb128 (die->die_abbrev);
9324 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9325 {
9326 switch (AT_class (a))
9327 {
9328 case dw_val_class_addr:
9329 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9330 {
9331 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9332 size += size_of_uleb128 (AT_index (a));
9333 }
9334 else
9335 size += DWARF2_ADDR_SIZE;
9336 break;
9337 case dw_val_class_offset:
9338 size += DWARF_OFFSET_SIZE;
9339 break;
9340 case dw_val_class_loc:
9341 {
9342 unsigned long lsize = size_of_locs (AT_loc (a));
9343
9344 /* Block length. */
9345 if (dwarf_version >= 4)
9346 size += size_of_uleb128 (lsize);
9347 else
9348 size += constant_size (lsize);
9349 size += lsize;
9350 }
9351 break;
9352 case dw_val_class_loc_list:
9353 case dw_val_class_view_list:
9354 if (dwarf_split_debug_info && dwarf_version >= 5)
9355 {
9356 gcc_assert (AT_loc_list (a)->num_assigned);
9357 size += size_of_uleb128 (AT_loc_list (a)->hash);
9358 }
9359 else
9360 size += DWARF_OFFSET_SIZE;
9361 break;
9362 case dw_val_class_range_list:
9363 if (value_format (a) == DW_FORM_rnglistx)
9364 {
9365 gcc_assert (rnglist_idx);
9366 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9367 size += size_of_uleb128 (r->idx);
9368 }
9369 else
9370 size += DWARF_OFFSET_SIZE;
9371 break;
9372 case dw_val_class_const:
9373 size += size_of_sleb128 (AT_int (a));
9374 break;
9375 case dw_val_class_unsigned_const:
9376 {
9377 int csize = constant_size (AT_unsigned (a));
9378 if (dwarf_version == 3
9379 && a->dw_attr == DW_AT_data_member_location
9380 && csize >= 4)
9381 size += size_of_uleb128 (AT_unsigned (a));
9382 else
9383 size += csize;
9384 }
9385 break;
9386 case dw_val_class_symview:
9387 if (symview_upper_bound <= 0xff)
9388 size += 1;
9389 else if (symview_upper_bound <= 0xffff)
9390 size += 2;
9391 else if (symview_upper_bound <= 0xffffffff)
9392 size += 4;
9393 else
9394 size += 8;
9395 break;
9396 case dw_val_class_const_implicit:
9397 case dw_val_class_unsigned_const_implicit:
9398 case dw_val_class_file_implicit:
9399 /* These occupy no size in the DIE, just an extra sleb128 in
9400 .debug_abbrev. */
9401 break;
9402 case dw_val_class_const_double:
9403 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9404 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9405 size++; /* block */
9406 break;
9407 case dw_val_class_wide_int:
9408 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9409 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9410 if (get_full_len (*a->dw_attr_val.v.val_wide)
9411 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9412 size++; /* block */
9413 break;
9414 case dw_val_class_vec:
9415 size += constant_size (a->dw_attr_val.v.val_vec.length
9416 * a->dw_attr_val.v.val_vec.elt_size)
9417 + a->dw_attr_val.v.val_vec.length
9418 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9419 break;
9420 case dw_val_class_flag:
9421 if (dwarf_version >= 4)
9422 /* Currently all add_AT_flag calls pass in 1 as last argument,
9423 so DW_FORM_flag_present can be used. If that ever changes,
9424 we'll need to use DW_FORM_flag and have some optimization
9425 in build_abbrev_table that will change those to
9426 DW_FORM_flag_present if it is set to 1 in all DIEs using
9427 the same abbrev entry. */
9428 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9429 else
9430 size += 1;
9431 break;
9432 case dw_val_class_die_ref:
9433 if (AT_ref_external (a))
9434 {
9435 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9436 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9437 is sized by target address length, whereas in DWARF3
9438 it's always sized as an offset. */
9439 if (use_debug_types)
9440 size += DWARF_TYPE_SIGNATURE_SIZE;
9441 else if (dwarf_version == 2)
9442 size += DWARF2_ADDR_SIZE;
9443 else
9444 size += DWARF_OFFSET_SIZE;
9445 }
9446 else
9447 size += DWARF_OFFSET_SIZE;
9448 break;
9449 case dw_val_class_fde_ref:
9450 size += DWARF_OFFSET_SIZE;
9451 break;
9452 case dw_val_class_lbl_id:
9453 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9454 {
9455 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9456 size += size_of_uleb128 (AT_index (a));
9457 }
9458 else
9459 size += DWARF2_ADDR_SIZE;
9460 break;
9461 case dw_val_class_lineptr:
9462 case dw_val_class_macptr:
9463 case dw_val_class_loclistsptr:
9464 size += DWARF_OFFSET_SIZE;
9465 break;
9466 case dw_val_class_str:
9467 form = AT_string_form (a);
9468 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9469 size += DWARF_OFFSET_SIZE;
9470 else if (form == dwarf_FORM (DW_FORM_strx))
9471 size += size_of_uleb128 (AT_index (a));
9472 else
9473 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9474 break;
9475 case dw_val_class_file:
9476 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9477 break;
9478 case dw_val_class_data8:
9479 size += 8;
9480 break;
9481 case dw_val_class_vms_delta:
9482 size += DWARF_OFFSET_SIZE;
9483 break;
9484 case dw_val_class_high_pc:
9485 size += DWARF2_ADDR_SIZE;
9486 break;
9487 case dw_val_class_discr_value:
9488 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9489 break;
9490 case dw_val_class_discr_list:
9491 {
9492 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9493
9494 /* This is a block, so we have the block length and then its
9495 data. */
9496 size += constant_size (block_size) + block_size;
9497 }
9498 break;
9499 default:
9500 gcc_unreachable ();
9501 }
9502 }
9503
9504 return size;
9505 }
9506
9507 /* Size the debugging information associated with a given DIE. Visits the
9508 DIE's children recursively. Updates the global variable next_die_offset, on
9509 each time through. Uses the current value of next_die_offset to update the
9510 die_offset field in each DIE. */
9511
9512 static void
9513 calc_die_sizes (dw_die_ref die)
9514 {
9515 dw_die_ref c;
9516
9517 gcc_assert (die->die_offset == 0
9518 || (unsigned long int) die->die_offset == next_die_offset);
9519 die->die_offset = next_die_offset;
9520 next_die_offset += size_of_die (die);
9521
9522 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9523
9524 if (die->die_child != NULL)
9525 /* Count the null byte used to terminate sibling lists. */
9526 next_die_offset += 1;
9527 }
9528
9529 /* Size just the base type children at the start of the CU.
9530 This is needed because build_abbrev needs to size locs
9531 and sizing of type based stack ops needs to know die_offset
9532 values for the base types. */
9533
9534 static void
9535 calc_base_type_die_sizes (void)
9536 {
9537 unsigned long die_offset = (dwarf_split_debug_info
9538 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9539 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9540 unsigned int i;
9541 dw_die_ref base_type;
9542 #if ENABLE_ASSERT_CHECKING
9543 dw_die_ref prev = comp_unit_die ()->die_child;
9544 #endif
9545
9546 die_offset += size_of_die (comp_unit_die ());
9547 for (i = 0; base_types.iterate (i, &base_type); i++)
9548 {
9549 #if ENABLE_ASSERT_CHECKING
9550 gcc_assert (base_type->die_offset == 0
9551 && prev->die_sib == base_type
9552 && base_type->die_child == NULL
9553 && base_type->die_abbrev);
9554 prev = base_type;
9555 #endif
9556 if (abbrev_opt_start
9557 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9558 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9559 base_type->die_offset = die_offset;
9560 die_offset += size_of_die (base_type);
9561 }
9562 }
9563
9564 /* Set the marks for a die and its children. We do this so
9565 that we know whether or not a reference needs to use FORM_ref_addr; only
9566 DIEs in the same CU will be marked. We used to clear out the offset
9567 and use that as the flag, but ran into ordering problems. */
9568
9569 static void
9570 mark_dies (dw_die_ref die)
9571 {
9572 dw_die_ref c;
9573
9574 gcc_assert (!die->die_mark);
9575
9576 die->die_mark = 1;
9577 FOR_EACH_CHILD (die, c, mark_dies (c));
9578 }
9579
9580 /* Clear the marks for a die and its children. */
9581
9582 static void
9583 unmark_dies (dw_die_ref die)
9584 {
9585 dw_die_ref c;
9586
9587 if (! use_debug_types)
9588 gcc_assert (die->die_mark);
9589
9590 die->die_mark = 0;
9591 FOR_EACH_CHILD (die, c, unmark_dies (c));
9592 }
9593
9594 /* Clear the marks for a die, its children and referred dies. */
9595
9596 static void
9597 unmark_all_dies (dw_die_ref die)
9598 {
9599 dw_die_ref c;
9600 dw_attr_node *a;
9601 unsigned ix;
9602
9603 if (!die->die_mark)
9604 return;
9605 die->die_mark = 0;
9606
9607 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9608
9609 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9610 if (AT_class (a) == dw_val_class_die_ref)
9611 unmark_all_dies (AT_ref (a));
9612 }
9613
9614 /* Calculate if the entry should appear in the final output file. It may be
9615 from a pruned a type. */
9616
9617 static bool
9618 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9619 {
9620 /* By limiting gnu pubnames to definitions only, gold can generate a
9621 gdb index without entries for declarations, which don't include
9622 enough information to be useful. */
9623 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9624 return false;
9625
9626 if (table == pubname_table)
9627 {
9628 /* Enumerator names are part of the pubname table, but the
9629 parent DW_TAG_enumeration_type die may have been pruned.
9630 Don't output them if that is the case. */
9631 if (p->die->die_tag == DW_TAG_enumerator &&
9632 (p->die->die_parent == NULL
9633 || !p->die->die_parent->die_perennial_p))
9634 return false;
9635
9636 /* Everything else in the pubname table is included. */
9637 return true;
9638 }
9639
9640 /* The pubtypes table shouldn't include types that have been
9641 pruned. */
9642 return (p->die->die_offset != 0
9643 || !flag_eliminate_unused_debug_types);
9644 }
9645
9646 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9647 generated for the compilation unit. */
9648
9649 static unsigned long
9650 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9651 {
9652 unsigned long size;
9653 unsigned i;
9654 pubname_entry *p;
9655 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9656
9657 size = DWARF_PUBNAMES_HEADER_SIZE;
9658 FOR_EACH_VEC_ELT (*names, i, p)
9659 if (include_pubname_in_output (names, p))
9660 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9661
9662 size += DWARF_OFFSET_SIZE;
9663 return size;
9664 }
9665
9666 /* Return the size of the information in the .debug_aranges section. */
9667
9668 static unsigned long
9669 size_of_aranges (void)
9670 {
9671 unsigned long size;
9672
9673 size = DWARF_ARANGES_HEADER_SIZE;
9674
9675 /* Count the address/length pair for this compilation unit. */
9676 if (text_section_used)
9677 size += 2 * DWARF2_ADDR_SIZE;
9678 if (cold_text_section_used)
9679 size += 2 * DWARF2_ADDR_SIZE;
9680 if (have_multiple_function_sections)
9681 {
9682 unsigned fde_idx;
9683 dw_fde_ref fde;
9684
9685 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9686 {
9687 if (DECL_IGNORED_P (fde->decl))
9688 continue;
9689 if (!fde->in_std_section)
9690 size += 2 * DWARF2_ADDR_SIZE;
9691 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9692 size += 2 * DWARF2_ADDR_SIZE;
9693 }
9694 }
9695
9696 /* Count the two zero words used to terminated the address range table. */
9697 size += 2 * DWARF2_ADDR_SIZE;
9698 return size;
9699 }
9700 \f
9701 /* Select the encoding of an attribute value. */
9702
9703 static enum dwarf_form
9704 value_format (dw_attr_node *a)
9705 {
9706 switch (AT_class (a))
9707 {
9708 case dw_val_class_addr:
9709 /* Only very few attributes allow DW_FORM_addr. */
9710 switch (a->dw_attr)
9711 {
9712 case DW_AT_low_pc:
9713 case DW_AT_high_pc:
9714 case DW_AT_entry_pc:
9715 case DW_AT_trampoline:
9716 return (AT_index (a) == NOT_INDEXED
9717 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9718 default:
9719 break;
9720 }
9721 switch (DWARF2_ADDR_SIZE)
9722 {
9723 case 1:
9724 return DW_FORM_data1;
9725 case 2:
9726 return DW_FORM_data2;
9727 case 4:
9728 return DW_FORM_data4;
9729 case 8:
9730 return DW_FORM_data8;
9731 default:
9732 gcc_unreachable ();
9733 }
9734 case dw_val_class_loc_list:
9735 case dw_val_class_view_list:
9736 if (dwarf_split_debug_info
9737 && dwarf_version >= 5
9738 && AT_loc_list (a)->num_assigned)
9739 return DW_FORM_loclistx;
9740 /* FALLTHRU */
9741 case dw_val_class_range_list:
9742 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9743 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9744 care about sizes of .debug* sections in shared libraries and
9745 executables and don't take into account relocations that affect just
9746 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9747 table in the .debug_rnglists section. */
9748 if (dwarf_split_debug_info
9749 && dwarf_version >= 5
9750 && AT_class (a) == dw_val_class_range_list
9751 && rnglist_idx
9752 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9753 return DW_FORM_rnglistx;
9754 if (dwarf_version >= 4)
9755 return DW_FORM_sec_offset;
9756 /* FALLTHRU */
9757 case dw_val_class_vms_delta:
9758 case dw_val_class_offset:
9759 switch (DWARF_OFFSET_SIZE)
9760 {
9761 case 4:
9762 return DW_FORM_data4;
9763 case 8:
9764 return DW_FORM_data8;
9765 default:
9766 gcc_unreachable ();
9767 }
9768 case dw_val_class_loc:
9769 if (dwarf_version >= 4)
9770 return DW_FORM_exprloc;
9771 switch (constant_size (size_of_locs (AT_loc (a))))
9772 {
9773 case 1:
9774 return DW_FORM_block1;
9775 case 2:
9776 return DW_FORM_block2;
9777 case 4:
9778 return DW_FORM_block4;
9779 default:
9780 gcc_unreachable ();
9781 }
9782 case dw_val_class_const:
9783 return DW_FORM_sdata;
9784 case dw_val_class_unsigned_const:
9785 switch (constant_size (AT_unsigned (a)))
9786 {
9787 case 1:
9788 return DW_FORM_data1;
9789 case 2:
9790 return DW_FORM_data2;
9791 case 4:
9792 /* In DWARF3 DW_AT_data_member_location with
9793 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9794 constant, so we need to use DW_FORM_udata if we need
9795 a large constant. */
9796 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9797 return DW_FORM_udata;
9798 return DW_FORM_data4;
9799 case 8:
9800 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9801 return DW_FORM_udata;
9802 return DW_FORM_data8;
9803 default:
9804 gcc_unreachable ();
9805 }
9806 case dw_val_class_const_implicit:
9807 case dw_val_class_unsigned_const_implicit:
9808 case dw_val_class_file_implicit:
9809 return DW_FORM_implicit_const;
9810 case dw_val_class_const_double:
9811 switch (HOST_BITS_PER_WIDE_INT)
9812 {
9813 case 8:
9814 return DW_FORM_data2;
9815 case 16:
9816 return DW_FORM_data4;
9817 case 32:
9818 return DW_FORM_data8;
9819 case 64:
9820 if (dwarf_version >= 5)
9821 return DW_FORM_data16;
9822 /* FALLTHRU */
9823 default:
9824 return DW_FORM_block1;
9825 }
9826 case dw_val_class_wide_int:
9827 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9828 {
9829 case 8:
9830 return DW_FORM_data1;
9831 case 16:
9832 return DW_FORM_data2;
9833 case 32:
9834 return DW_FORM_data4;
9835 case 64:
9836 return DW_FORM_data8;
9837 case 128:
9838 if (dwarf_version >= 5)
9839 return DW_FORM_data16;
9840 /* FALLTHRU */
9841 default:
9842 return DW_FORM_block1;
9843 }
9844 case dw_val_class_symview:
9845 /* ??? We might use uleb128, but then we'd have to compute
9846 .debug_info offsets in the assembler. */
9847 if (symview_upper_bound <= 0xff)
9848 return DW_FORM_data1;
9849 else if (symview_upper_bound <= 0xffff)
9850 return DW_FORM_data2;
9851 else if (symview_upper_bound <= 0xffffffff)
9852 return DW_FORM_data4;
9853 else
9854 return DW_FORM_data8;
9855 case dw_val_class_vec:
9856 switch (constant_size (a->dw_attr_val.v.val_vec.length
9857 * a->dw_attr_val.v.val_vec.elt_size))
9858 {
9859 case 1:
9860 return DW_FORM_block1;
9861 case 2:
9862 return DW_FORM_block2;
9863 case 4:
9864 return DW_FORM_block4;
9865 default:
9866 gcc_unreachable ();
9867 }
9868 case dw_val_class_flag:
9869 if (dwarf_version >= 4)
9870 {
9871 /* Currently all add_AT_flag calls pass in 1 as last argument,
9872 so DW_FORM_flag_present can be used. If that ever changes,
9873 we'll need to use DW_FORM_flag and have some optimization
9874 in build_abbrev_table that will change those to
9875 DW_FORM_flag_present if it is set to 1 in all DIEs using
9876 the same abbrev entry. */
9877 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9878 return DW_FORM_flag_present;
9879 }
9880 return DW_FORM_flag;
9881 case dw_val_class_die_ref:
9882 if (AT_ref_external (a))
9883 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9884 else
9885 return DW_FORM_ref;
9886 case dw_val_class_fde_ref:
9887 return DW_FORM_data;
9888 case dw_val_class_lbl_id:
9889 return (AT_index (a) == NOT_INDEXED
9890 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9891 case dw_val_class_lineptr:
9892 case dw_val_class_macptr:
9893 case dw_val_class_loclistsptr:
9894 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9895 case dw_val_class_str:
9896 return AT_string_form (a);
9897 case dw_val_class_file:
9898 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9899 {
9900 case 1:
9901 return DW_FORM_data1;
9902 case 2:
9903 return DW_FORM_data2;
9904 case 4:
9905 return DW_FORM_data4;
9906 default:
9907 gcc_unreachable ();
9908 }
9909
9910 case dw_val_class_data8:
9911 return DW_FORM_data8;
9912
9913 case dw_val_class_high_pc:
9914 switch (DWARF2_ADDR_SIZE)
9915 {
9916 case 1:
9917 return DW_FORM_data1;
9918 case 2:
9919 return DW_FORM_data2;
9920 case 4:
9921 return DW_FORM_data4;
9922 case 8:
9923 return DW_FORM_data8;
9924 default:
9925 gcc_unreachable ();
9926 }
9927
9928 case dw_val_class_discr_value:
9929 return (a->dw_attr_val.v.val_discr_value.pos
9930 ? DW_FORM_udata
9931 : DW_FORM_sdata);
9932 case dw_val_class_discr_list:
9933 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9934 {
9935 case 1:
9936 return DW_FORM_block1;
9937 case 2:
9938 return DW_FORM_block2;
9939 case 4:
9940 return DW_FORM_block4;
9941 default:
9942 gcc_unreachable ();
9943 }
9944
9945 default:
9946 gcc_unreachable ();
9947 }
9948 }
9949
9950 /* Output the encoding of an attribute value. */
9951
9952 static void
9953 output_value_format (dw_attr_node *a)
9954 {
9955 enum dwarf_form form = value_format (a);
9956
9957 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9958 }
9959
9960 /* Given a die and id, produce the appropriate abbreviations. */
9961
9962 static void
9963 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9964 {
9965 unsigned ix;
9966 dw_attr_node *a_attr;
9967
9968 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9969 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9970 dwarf_tag_name (abbrev->die_tag));
9971
9972 if (abbrev->die_child != NULL)
9973 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9974 else
9975 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9976
9977 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9978 {
9979 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9980 dwarf_attr_name (a_attr->dw_attr));
9981 output_value_format (a_attr);
9982 if (value_format (a_attr) == DW_FORM_implicit_const)
9983 {
9984 if (AT_class (a_attr) == dw_val_class_file_implicit)
9985 {
9986 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9987 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9988 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9989 }
9990 else
9991 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9992 }
9993 }
9994
9995 dw2_asm_output_data (1, 0, NULL);
9996 dw2_asm_output_data (1, 0, NULL);
9997 }
9998
9999
10000 /* Output the .debug_abbrev section which defines the DIE abbreviation
10001 table. */
10002
10003 static void
10004 output_abbrev_section (void)
10005 {
10006 unsigned int abbrev_id;
10007 dw_die_ref abbrev;
10008
10009 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10010 if (abbrev_id != 0)
10011 output_die_abbrevs (abbrev_id, abbrev);
10012
10013 /* Terminate the table. */
10014 dw2_asm_output_data (1, 0, NULL);
10015 }
10016
10017 /* Return a new location list, given the begin and end range, and the
10018 expression. */
10019
10020 static inline dw_loc_list_ref
10021 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10022 const char *end, var_loc_view vend,
10023 const char *section)
10024 {
10025 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10026
10027 retlist->begin = begin;
10028 retlist->begin_entry = NULL;
10029 retlist->end = end;
10030 retlist->expr = expr;
10031 retlist->section = section;
10032 retlist->vbegin = vbegin;
10033 retlist->vend = vend;
10034
10035 return retlist;
10036 }
10037
10038 /* Return true iff there's any nonzero view number in the loc list.
10039
10040 ??? When views are not enabled, we'll often extend a single range
10041 to the entire function, so that we emit a single location
10042 expression rather than a location list. With views, even with a
10043 single range, we'll output a list if start or end have a nonzero
10044 view. If we change this, we may want to stop splitting a single
10045 range in dw_loc_list just because of a nonzero view, even if it
10046 straddles across hot/cold partitions. */
10047
10048 static bool
10049 loc_list_has_views (dw_loc_list_ref list)
10050 {
10051 if (!debug_variable_location_views)
10052 return false;
10053
10054 for (dw_loc_list_ref loc = list;
10055 loc != NULL; loc = loc->dw_loc_next)
10056 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10057 return true;
10058
10059 return false;
10060 }
10061
10062 /* Generate a new internal symbol for this location list node, if it
10063 hasn't got one yet. */
10064
10065 static inline void
10066 gen_llsym (dw_loc_list_ref list)
10067 {
10068 gcc_assert (!list->ll_symbol);
10069 list->ll_symbol = gen_internal_sym ("LLST");
10070
10071 if (!loc_list_has_views (list))
10072 return;
10073
10074 if (dwarf2out_locviews_in_attribute ())
10075 {
10076 /* Use the same label_num for the view list. */
10077 label_num--;
10078 list->vl_symbol = gen_internal_sym ("LVUS");
10079 }
10080 else
10081 list->vl_symbol = list->ll_symbol;
10082 }
10083
10084 /* Generate a symbol for the list, but only if we really want to emit
10085 it as a list. */
10086
10087 static inline void
10088 maybe_gen_llsym (dw_loc_list_ref list)
10089 {
10090 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10091 return;
10092
10093 gen_llsym (list);
10094 }
10095
10096 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10097 NULL, don't consider size of the location expression. If we're not
10098 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10099 representation in *SIZEP. */
10100
10101 static bool
10102 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10103 {
10104 /* Don't output an entry that starts and ends at the same address. */
10105 if (strcmp (curr->begin, curr->end) == 0
10106 && curr->vbegin == curr->vend && !curr->force)
10107 return true;
10108
10109 if (!sizep)
10110 return false;
10111
10112 unsigned long size = size_of_locs (curr->expr);
10113
10114 /* If the expression is too large, drop it on the floor. We could
10115 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10116 in the expression, but >= 64KB expressions for a single value
10117 in a single range are unlikely very useful. */
10118 if (dwarf_version < 5 && size > 0xffff)
10119 return true;
10120
10121 *sizep = size;
10122
10123 return false;
10124 }
10125
10126 /* Output a view pair loclist entry for CURR, if it requires one. */
10127
10128 static void
10129 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10130 {
10131 if (!dwarf2out_locviews_in_loclist ())
10132 return;
10133
10134 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10135 return;
10136
10137 #ifdef DW_LLE_view_pair
10138 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10139
10140 if (dwarf2out_as_locview_support)
10141 {
10142 if (ZERO_VIEW_P (curr->vbegin))
10143 dw2_asm_output_data_uleb128 (0, "Location view begin");
10144 else
10145 {
10146 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10147 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10148 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10149 }
10150
10151 if (ZERO_VIEW_P (curr->vend))
10152 dw2_asm_output_data_uleb128 (0, "Location view end");
10153 else
10154 {
10155 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10156 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10157 dw2_asm_output_symname_uleb128 (label, "Location view end");
10158 }
10159 }
10160 else
10161 {
10162 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10163 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10164 }
10165 #endif /* DW_LLE_view_pair */
10166
10167 return;
10168 }
10169
10170 /* Output the location list given to us. */
10171
10172 static void
10173 output_loc_list (dw_loc_list_ref list_head)
10174 {
10175 int vcount = 0, lcount = 0;
10176
10177 if (list_head->emitted)
10178 return;
10179 list_head->emitted = true;
10180
10181 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10182 {
10183 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10184
10185 for (dw_loc_list_ref curr = list_head; curr != NULL;
10186 curr = curr->dw_loc_next)
10187 {
10188 unsigned long size;
10189
10190 if (skip_loc_list_entry (curr, &size))
10191 continue;
10192
10193 vcount++;
10194
10195 /* ?? dwarf_split_debug_info? */
10196 if (dwarf2out_as_locview_support)
10197 {
10198 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10199
10200 if (!ZERO_VIEW_P (curr->vbegin))
10201 {
10202 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10203 dw2_asm_output_symname_uleb128 (label,
10204 "View list begin (%s)",
10205 list_head->vl_symbol);
10206 }
10207 else
10208 dw2_asm_output_data_uleb128 (0,
10209 "View list begin (%s)",
10210 list_head->vl_symbol);
10211
10212 if (!ZERO_VIEW_P (curr->vend))
10213 {
10214 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10215 dw2_asm_output_symname_uleb128 (label,
10216 "View list end (%s)",
10217 list_head->vl_symbol);
10218 }
10219 else
10220 dw2_asm_output_data_uleb128 (0,
10221 "View list end (%s)",
10222 list_head->vl_symbol);
10223 }
10224 else
10225 {
10226 dw2_asm_output_data_uleb128 (curr->vbegin,
10227 "View list begin (%s)",
10228 list_head->vl_symbol);
10229 dw2_asm_output_data_uleb128 (curr->vend,
10230 "View list end (%s)",
10231 list_head->vl_symbol);
10232 }
10233 }
10234 }
10235
10236 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10237
10238 const char *last_section = NULL;
10239 const char *base_label = NULL;
10240
10241 /* Walk the location list, and output each range + expression. */
10242 for (dw_loc_list_ref curr = list_head; curr != NULL;
10243 curr = curr->dw_loc_next)
10244 {
10245 unsigned long size;
10246
10247 /* Skip this entry? If we skip it here, we must skip it in the
10248 view list above as well. */
10249 if (skip_loc_list_entry (curr, &size))
10250 continue;
10251
10252 lcount++;
10253
10254 if (dwarf_version >= 5)
10255 {
10256 if (dwarf_split_debug_info)
10257 {
10258 dwarf2out_maybe_output_loclist_view_pair (curr);
10259 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10260 uleb128 index into .debug_addr and uleb128 length. */
10261 dw2_asm_output_data (1, DW_LLE_startx_length,
10262 "DW_LLE_startx_length (%s)",
10263 list_head->ll_symbol);
10264 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10265 "Location list range start index "
10266 "(%s)", curr->begin);
10267 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10268 For that case we probably need to emit DW_LLE_startx_endx,
10269 but we'd need 2 .debug_addr entries rather than just one. */
10270 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10271 "Location list length (%s)",
10272 list_head->ll_symbol);
10273 }
10274 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10275 {
10276 dwarf2out_maybe_output_loclist_view_pair (curr);
10277 /* If all code is in .text section, the base address is
10278 already provided by the CU attributes. Use
10279 DW_LLE_offset_pair where both addresses are uleb128 encoded
10280 offsets against that base. */
10281 dw2_asm_output_data (1, DW_LLE_offset_pair,
10282 "DW_LLE_offset_pair (%s)",
10283 list_head->ll_symbol);
10284 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10285 "Location list begin address (%s)",
10286 list_head->ll_symbol);
10287 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10288 "Location list end address (%s)",
10289 list_head->ll_symbol);
10290 }
10291 else if (HAVE_AS_LEB128)
10292 {
10293 /* Otherwise, find out how many consecutive entries could share
10294 the same base entry. If just one, emit DW_LLE_start_length,
10295 otherwise emit DW_LLE_base_address for the base address
10296 followed by a series of DW_LLE_offset_pair. */
10297 if (last_section == NULL || curr->section != last_section)
10298 {
10299 dw_loc_list_ref curr2;
10300 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10301 curr2 = curr2->dw_loc_next)
10302 {
10303 if (strcmp (curr2->begin, curr2->end) == 0
10304 && !curr2->force)
10305 continue;
10306 break;
10307 }
10308 if (curr2 == NULL || curr->section != curr2->section)
10309 last_section = NULL;
10310 else
10311 {
10312 last_section = curr->section;
10313 base_label = curr->begin;
10314 dw2_asm_output_data (1, DW_LLE_base_address,
10315 "DW_LLE_base_address (%s)",
10316 list_head->ll_symbol);
10317 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10318 "Base address (%s)",
10319 list_head->ll_symbol);
10320 }
10321 }
10322 /* Only one entry with the same base address. Use
10323 DW_LLE_start_length with absolute address and uleb128
10324 length. */
10325 if (last_section == NULL)
10326 {
10327 dwarf2out_maybe_output_loclist_view_pair (curr);
10328 dw2_asm_output_data (1, DW_LLE_start_length,
10329 "DW_LLE_start_length (%s)",
10330 list_head->ll_symbol);
10331 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10332 "Location list begin address (%s)",
10333 list_head->ll_symbol);
10334 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10335 "Location list length "
10336 "(%s)", list_head->ll_symbol);
10337 }
10338 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10339 DW_LLE_base_address. */
10340 else
10341 {
10342 dwarf2out_maybe_output_loclist_view_pair (curr);
10343 dw2_asm_output_data (1, DW_LLE_offset_pair,
10344 "DW_LLE_offset_pair (%s)",
10345 list_head->ll_symbol);
10346 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10347 "Location list begin address "
10348 "(%s)", list_head->ll_symbol);
10349 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10350 "Location list end address "
10351 "(%s)", list_head->ll_symbol);
10352 }
10353 }
10354 /* The assembler does not support .uleb128 directive. Emit
10355 DW_LLE_start_end with a pair of absolute addresses. */
10356 else
10357 {
10358 dwarf2out_maybe_output_loclist_view_pair (curr);
10359 dw2_asm_output_data (1, DW_LLE_start_end,
10360 "DW_LLE_start_end (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10363 "Location list begin address (%s)",
10364 list_head->ll_symbol);
10365 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10366 "Location list end address (%s)",
10367 list_head->ll_symbol);
10368 }
10369 }
10370 else if (dwarf_split_debug_info)
10371 {
10372 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10373 and 4 byte length. */
10374 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10375 "Location list start/length entry (%s)",
10376 list_head->ll_symbol);
10377 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10378 "Location list range start index (%s)",
10379 curr->begin);
10380 /* The length field is 4 bytes. If we ever need to support
10381 an 8-byte length, we can add a new DW_LLE code or fall back
10382 to DW_LLE_GNU_start_end_entry. */
10383 dw2_asm_output_delta (4, curr->end, curr->begin,
10384 "Location list range length (%s)",
10385 list_head->ll_symbol);
10386 }
10387 else if (!have_multiple_function_sections)
10388 {
10389 /* Pair of relative addresses against start of text section. */
10390 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10391 "Location list begin address (%s)",
10392 list_head->ll_symbol);
10393 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10394 "Location list end address (%s)",
10395 list_head->ll_symbol);
10396 }
10397 else
10398 {
10399 /* Pair of absolute addresses. */
10400 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10401 "Location list begin address (%s)",
10402 list_head->ll_symbol);
10403 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10404 "Location list end address (%s)",
10405 list_head->ll_symbol);
10406 }
10407
10408 /* Output the block length for this list of location operations. */
10409 if (dwarf_version >= 5)
10410 dw2_asm_output_data_uleb128 (size, "Location expression size");
10411 else
10412 {
10413 gcc_assert (size <= 0xffff);
10414 dw2_asm_output_data (2, size, "Location expression size");
10415 }
10416
10417 output_loc_sequence (curr->expr, -1);
10418 }
10419
10420 /* And finally list termination. */
10421 if (dwarf_version >= 5)
10422 dw2_asm_output_data (1, DW_LLE_end_of_list,
10423 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10424 else if (dwarf_split_debug_info)
10425 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10426 "Location list terminator (%s)",
10427 list_head->ll_symbol);
10428 else
10429 {
10430 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10431 "Location list terminator begin (%s)",
10432 list_head->ll_symbol);
10433 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10434 "Location list terminator end (%s)",
10435 list_head->ll_symbol);
10436 }
10437
10438 gcc_assert (!list_head->vl_symbol
10439 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10440 }
10441
10442 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10443 section. Emit a relocated reference if val_entry is NULL, otherwise,
10444 emit an indirect reference. */
10445
10446 static void
10447 output_range_list_offset (dw_attr_node *a)
10448 {
10449 const char *name = dwarf_attr_name (a->dw_attr);
10450
10451 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10452 {
10453 if (dwarf_version >= 5)
10454 {
10455 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10456 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10457 debug_ranges_section, "%s", name);
10458 }
10459 else
10460 {
10461 char *p = strchr (ranges_section_label, '\0');
10462 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10463 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10464 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10465 debug_ranges_section, "%s", name);
10466 *p = '\0';
10467 }
10468 }
10469 else if (dwarf_version >= 5)
10470 {
10471 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10472 gcc_assert (rnglist_idx);
10473 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10474 }
10475 else
10476 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10477 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10478 "%s (offset from %s)", name, ranges_section_label);
10479 }
10480
10481 /* Output the offset into the debug_loc section. */
10482
10483 static void
10484 output_loc_list_offset (dw_attr_node *a)
10485 {
10486 char *sym = AT_loc_list (a)->ll_symbol;
10487
10488 gcc_assert (sym);
10489 if (!dwarf_split_debug_info)
10490 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10491 "%s", dwarf_attr_name (a->dw_attr));
10492 else if (dwarf_version >= 5)
10493 {
10494 gcc_assert (AT_loc_list (a)->num_assigned);
10495 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10496 dwarf_attr_name (a->dw_attr),
10497 sym);
10498 }
10499 else
10500 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10501 "%s", dwarf_attr_name (a->dw_attr));
10502 }
10503
10504 /* Output the offset into the debug_loc section. */
10505
10506 static void
10507 output_view_list_offset (dw_attr_node *a)
10508 {
10509 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10510
10511 gcc_assert (sym);
10512 if (dwarf_split_debug_info)
10513 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10514 "%s", dwarf_attr_name (a->dw_attr));
10515 else
10516 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10517 "%s", dwarf_attr_name (a->dw_attr));
10518 }
10519
10520 /* Output an attribute's index or value appropriately. */
10521
10522 static void
10523 output_attr_index_or_value (dw_attr_node *a)
10524 {
10525 const char *name = dwarf_attr_name (a->dw_attr);
10526
10527 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10528 {
10529 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10530 return;
10531 }
10532 switch (AT_class (a))
10533 {
10534 case dw_val_class_addr:
10535 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10536 break;
10537 case dw_val_class_high_pc:
10538 case dw_val_class_lbl_id:
10539 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10540 break;
10541 default:
10542 gcc_unreachable ();
10543 }
10544 }
10545
10546 /* Output a type signature. */
10547
10548 static inline void
10549 output_signature (const char *sig, const char *name)
10550 {
10551 int i;
10552
10553 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10554 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10555 }
10556
10557 /* Output a discriminant value. */
10558
10559 static inline void
10560 output_discr_value (dw_discr_value *discr_value, const char *name)
10561 {
10562 if (discr_value->pos)
10563 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10564 else
10565 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10566 }
10567
10568 /* Output the DIE and its attributes. Called recursively to generate
10569 the definitions of each child DIE. */
10570
10571 static void
10572 output_die (dw_die_ref die)
10573 {
10574 dw_attr_node *a;
10575 dw_die_ref c;
10576 unsigned long size;
10577 unsigned ix;
10578
10579 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10580 (unsigned long)die->die_offset,
10581 dwarf_tag_name (die->die_tag));
10582
10583 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10584 {
10585 const char *name = dwarf_attr_name (a->dw_attr);
10586
10587 switch (AT_class (a))
10588 {
10589 case dw_val_class_addr:
10590 output_attr_index_or_value (a);
10591 break;
10592
10593 case dw_val_class_offset:
10594 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10595 "%s", name);
10596 break;
10597
10598 case dw_val_class_range_list:
10599 output_range_list_offset (a);
10600 break;
10601
10602 case dw_val_class_loc:
10603 size = size_of_locs (AT_loc (a));
10604
10605 /* Output the block length for this list of location operations. */
10606 if (dwarf_version >= 4)
10607 dw2_asm_output_data_uleb128 (size, "%s", name);
10608 else
10609 dw2_asm_output_data (constant_size (size), size, "%s", name);
10610
10611 output_loc_sequence (AT_loc (a), -1);
10612 break;
10613
10614 case dw_val_class_const:
10615 /* ??? It would be slightly more efficient to use a scheme like is
10616 used for unsigned constants below, but gdb 4.x does not sign
10617 extend. Gdb 5.x does sign extend. */
10618 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10619 break;
10620
10621 case dw_val_class_unsigned_const:
10622 {
10623 int csize = constant_size (AT_unsigned (a));
10624 if (dwarf_version == 3
10625 && a->dw_attr == DW_AT_data_member_location
10626 && csize >= 4)
10627 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10628 else
10629 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10630 }
10631 break;
10632
10633 case dw_val_class_symview:
10634 {
10635 int vsize;
10636 if (symview_upper_bound <= 0xff)
10637 vsize = 1;
10638 else if (symview_upper_bound <= 0xffff)
10639 vsize = 2;
10640 else if (symview_upper_bound <= 0xffffffff)
10641 vsize = 4;
10642 else
10643 vsize = 8;
10644 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10645 "%s", name);
10646 }
10647 break;
10648
10649 case dw_val_class_const_implicit:
10650 if (flag_debug_asm)
10651 fprintf (asm_out_file, "\t\t\t%s %s ("
10652 HOST_WIDE_INT_PRINT_DEC ")\n",
10653 ASM_COMMENT_START, name, AT_int (a));
10654 break;
10655
10656 case dw_val_class_unsigned_const_implicit:
10657 if (flag_debug_asm)
10658 fprintf (asm_out_file, "\t\t\t%s %s ("
10659 HOST_WIDE_INT_PRINT_HEX ")\n",
10660 ASM_COMMENT_START, name, AT_unsigned (a));
10661 break;
10662
10663 case dw_val_class_const_double:
10664 {
10665 unsigned HOST_WIDE_INT first, second;
10666
10667 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10668 dw2_asm_output_data (1,
10669 HOST_BITS_PER_DOUBLE_INT
10670 / HOST_BITS_PER_CHAR,
10671 NULL);
10672
10673 if (WORDS_BIG_ENDIAN)
10674 {
10675 first = a->dw_attr_val.v.val_double.high;
10676 second = a->dw_attr_val.v.val_double.low;
10677 }
10678 else
10679 {
10680 first = a->dw_attr_val.v.val_double.low;
10681 second = a->dw_attr_val.v.val_double.high;
10682 }
10683
10684 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10685 first, "%s", name);
10686 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10687 second, NULL);
10688 }
10689 break;
10690
10691 case dw_val_class_wide_int:
10692 {
10693 int i;
10694 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10695 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10696 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10697 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10698 * l, NULL);
10699
10700 if (WORDS_BIG_ENDIAN)
10701 for (i = len - 1; i >= 0; --i)
10702 {
10703 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10704 "%s", name);
10705 name = "";
10706 }
10707 else
10708 for (i = 0; i < len; ++i)
10709 {
10710 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10711 "%s", name);
10712 name = "";
10713 }
10714 }
10715 break;
10716
10717 case dw_val_class_vec:
10718 {
10719 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10720 unsigned int len = a->dw_attr_val.v.val_vec.length;
10721 unsigned int i;
10722 unsigned char *p;
10723
10724 dw2_asm_output_data (constant_size (len * elt_size),
10725 len * elt_size, "%s", name);
10726 if (elt_size > sizeof (HOST_WIDE_INT))
10727 {
10728 elt_size /= 2;
10729 len *= 2;
10730 }
10731 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10732 i < len;
10733 i++, p += elt_size)
10734 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10735 "fp or vector constant word %u", i);
10736 break;
10737 }
10738
10739 case dw_val_class_flag:
10740 if (dwarf_version >= 4)
10741 {
10742 /* Currently all add_AT_flag calls pass in 1 as last argument,
10743 so DW_FORM_flag_present can be used. If that ever changes,
10744 we'll need to use DW_FORM_flag and have some optimization
10745 in build_abbrev_table that will change those to
10746 DW_FORM_flag_present if it is set to 1 in all DIEs using
10747 the same abbrev entry. */
10748 gcc_assert (AT_flag (a) == 1);
10749 if (flag_debug_asm)
10750 fprintf (asm_out_file, "\t\t\t%s %s\n",
10751 ASM_COMMENT_START, name);
10752 break;
10753 }
10754 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10755 break;
10756
10757 case dw_val_class_loc_list:
10758 output_loc_list_offset (a);
10759 break;
10760
10761 case dw_val_class_view_list:
10762 output_view_list_offset (a);
10763 break;
10764
10765 case dw_val_class_die_ref:
10766 if (AT_ref_external (a))
10767 {
10768 if (AT_ref (a)->comdat_type_p)
10769 {
10770 comdat_type_node *type_node
10771 = AT_ref (a)->die_id.die_type_node;
10772
10773 gcc_assert (type_node);
10774 output_signature (type_node->signature, name);
10775 }
10776 else
10777 {
10778 const char *sym = AT_ref (a)->die_id.die_symbol;
10779 int size;
10780
10781 gcc_assert (sym);
10782 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10783 length, whereas in DWARF3 it's always sized as an
10784 offset. */
10785 if (dwarf_version == 2)
10786 size = DWARF2_ADDR_SIZE;
10787 else
10788 size = DWARF_OFFSET_SIZE;
10789 /* ??? We cannot unconditionally output die_offset if
10790 non-zero - others might create references to those
10791 DIEs via symbols.
10792 And we do not clear its DIE offset after outputting it
10793 (and the label refers to the actual DIEs, not the
10794 DWARF CU unit header which is when using label + offset
10795 would be the correct thing to do).
10796 ??? This is the reason for the with_offset flag. */
10797 if (AT_ref (a)->with_offset)
10798 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10799 debug_info_section, "%s", name);
10800 else
10801 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10802 name);
10803 }
10804 }
10805 else
10806 {
10807 gcc_assert (AT_ref (a)->die_offset);
10808 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10809 "%s", name);
10810 }
10811 break;
10812
10813 case dw_val_class_fde_ref:
10814 {
10815 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10816
10817 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10818 a->dw_attr_val.v.val_fde_index * 2);
10819 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10820 "%s", name);
10821 }
10822 break;
10823
10824 case dw_val_class_vms_delta:
10825 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10826 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10827 AT_vms_delta2 (a), AT_vms_delta1 (a),
10828 "%s", name);
10829 #else
10830 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10831 AT_vms_delta2 (a), AT_vms_delta1 (a),
10832 "%s", name);
10833 #endif
10834 break;
10835
10836 case dw_val_class_lbl_id:
10837 output_attr_index_or_value (a);
10838 break;
10839
10840 case dw_val_class_lineptr:
10841 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10842 debug_line_section, "%s", name);
10843 break;
10844
10845 case dw_val_class_macptr:
10846 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10847 debug_macinfo_section, "%s", name);
10848 break;
10849
10850 case dw_val_class_loclistsptr:
10851 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10852 debug_loc_section, "%s", name);
10853 break;
10854
10855 case dw_val_class_str:
10856 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10857 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10858 a->dw_attr_val.v.val_str->label,
10859 debug_str_section,
10860 "%s: \"%s\"", name, AT_string (a));
10861 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10862 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10863 a->dw_attr_val.v.val_str->label,
10864 debug_line_str_section,
10865 "%s: \"%s\"", name, AT_string (a));
10866 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10867 dw2_asm_output_data_uleb128 (AT_index (a),
10868 "%s: \"%s\"", name, AT_string (a));
10869 else
10870 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10871 break;
10872
10873 case dw_val_class_file:
10874 {
10875 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10876
10877 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10878 a->dw_attr_val.v.val_file->filename);
10879 break;
10880 }
10881
10882 case dw_val_class_file_implicit:
10883 if (flag_debug_asm)
10884 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10885 ASM_COMMENT_START, name,
10886 maybe_emit_file (a->dw_attr_val.v.val_file),
10887 a->dw_attr_val.v.val_file->filename);
10888 break;
10889
10890 case dw_val_class_data8:
10891 {
10892 int i;
10893
10894 for (i = 0; i < 8; i++)
10895 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10896 i == 0 ? "%s" : NULL, name);
10897 break;
10898 }
10899
10900 case dw_val_class_high_pc:
10901 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10902 get_AT_low_pc (die), "DW_AT_high_pc");
10903 break;
10904
10905 case dw_val_class_discr_value:
10906 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10907 break;
10908
10909 case dw_val_class_discr_list:
10910 {
10911 dw_discr_list_ref list = AT_discr_list (a);
10912 const int size = size_of_discr_list (list);
10913
10914 /* This is a block, so output its length first. */
10915 dw2_asm_output_data (constant_size (size), size,
10916 "%s: block size", name);
10917
10918 for (; list != NULL; list = list->dw_discr_next)
10919 {
10920 /* One byte for the discriminant value descriptor, and then as
10921 many LEB128 numbers as required. */
10922 if (list->dw_discr_range)
10923 dw2_asm_output_data (1, DW_DSC_range,
10924 "%s: DW_DSC_range", name);
10925 else
10926 dw2_asm_output_data (1, DW_DSC_label,
10927 "%s: DW_DSC_label", name);
10928
10929 output_discr_value (&list->dw_discr_lower_bound, name);
10930 if (list->dw_discr_range)
10931 output_discr_value (&list->dw_discr_upper_bound, name);
10932 }
10933 break;
10934 }
10935
10936 default:
10937 gcc_unreachable ();
10938 }
10939 }
10940
10941 FOR_EACH_CHILD (die, c, output_die (c));
10942
10943 /* Add null byte to terminate sibling list. */
10944 if (die->die_child != NULL)
10945 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10946 (unsigned long) die->die_offset);
10947 }
10948
10949 /* Output the dwarf version number. */
10950
10951 static void
10952 output_dwarf_version ()
10953 {
10954 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10955 views in loclist. That will change eventually. */
10956 if (dwarf_version == 6)
10957 {
10958 static bool once;
10959 if (!once)
10960 {
10961 warning (0,
10962 "-gdwarf-6 is output as version 5 with incompatibilities");
10963 once = true;
10964 }
10965 dw2_asm_output_data (2, 5, "DWARF version number");
10966 }
10967 else
10968 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10969 }
10970
10971 /* Output the compilation unit that appears at the beginning of the
10972 .debug_info section, and precedes the DIE descriptions. */
10973
10974 static void
10975 output_compilation_unit_header (enum dwarf_unit_type ut)
10976 {
10977 if (!XCOFF_DEBUGGING_INFO)
10978 {
10979 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10980 dw2_asm_output_data (4, 0xffffffff,
10981 "Initial length escape value indicating 64-bit DWARF extension");
10982 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10983 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10984 "Length of Compilation Unit Info");
10985 }
10986
10987 output_dwarf_version ();
10988 if (dwarf_version >= 5)
10989 {
10990 const char *name;
10991 switch (ut)
10992 {
10993 case DW_UT_compile: name = "DW_UT_compile"; break;
10994 case DW_UT_type: name = "DW_UT_type"; break;
10995 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10996 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10997 default: gcc_unreachable ();
10998 }
10999 dw2_asm_output_data (1, ut, "%s", name);
11000 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11001 }
11002 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11003 debug_abbrev_section,
11004 "Offset Into Abbrev. Section");
11005 if (dwarf_version < 5)
11006 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11007 }
11008
11009 /* Output the compilation unit DIE and its children. */
11010
11011 static void
11012 output_comp_unit (dw_die_ref die, int output_if_empty,
11013 const unsigned char *dwo_id)
11014 {
11015 const char *secname, *oldsym;
11016 char *tmp;
11017
11018 /* Unless we are outputting main CU, we may throw away empty ones. */
11019 if (!output_if_empty && die->die_child == NULL)
11020 return;
11021
11022 /* Even if there are no children of this DIE, we must output the information
11023 about the compilation unit. Otherwise, on an empty translation unit, we
11024 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11025 will then complain when examining the file. First mark all the DIEs in
11026 this CU so we know which get local refs. */
11027 mark_dies (die);
11028
11029 external_ref_hash_type *extern_map = optimize_external_refs (die);
11030
11031 /* For now, optimize only the main CU, in order to optimize the rest
11032 we'd need to see all of them earlier. Leave the rest for post-linking
11033 tools like DWZ. */
11034 if (die == comp_unit_die ())
11035 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11036
11037 build_abbrev_table (die, extern_map);
11038
11039 optimize_abbrev_table ();
11040
11041 delete extern_map;
11042
11043 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11044 next_die_offset = (dwo_id
11045 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11046 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11047 calc_die_sizes (die);
11048
11049 oldsym = die->die_id.die_symbol;
11050 if (oldsym && die->comdat_type_p)
11051 {
11052 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11053
11054 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11055 secname = tmp;
11056 die->die_id.die_symbol = NULL;
11057 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11058 }
11059 else
11060 {
11061 switch_to_section (debug_info_section);
11062 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11063 info_section_emitted = true;
11064 }
11065
11066 /* For LTO cross unit DIE refs we want a symbol on the start of the
11067 debuginfo section, not on the CU DIE. */
11068 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11069 {
11070 /* ??? No way to get visibility assembled without a decl. */
11071 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11072 get_identifier (oldsym), char_type_node);
11073 TREE_PUBLIC (decl) = true;
11074 TREE_STATIC (decl) = true;
11075 DECL_ARTIFICIAL (decl) = true;
11076 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11077 DECL_VISIBILITY_SPECIFIED (decl) = true;
11078 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11079 #ifdef ASM_WEAKEN_LABEL
11080 /* We prefer a .weak because that handles duplicates from duplicate
11081 archive members in a graceful way. */
11082 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11083 #else
11084 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11085 #endif
11086 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11087 }
11088
11089 /* Output debugging information. */
11090 output_compilation_unit_header (dwo_id
11091 ? DW_UT_split_compile : DW_UT_compile);
11092 if (dwarf_version >= 5)
11093 {
11094 if (dwo_id != NULL)
11095 for (int i = 0; i < 8; i++)
11096 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11097 }
11098 output_die (die);
11099
11100 /* Leave the marks on the main CU, so we can check them in
11101 output_pubnames. */
11102 if (oldsym)
11103 {
11104 unmark_dies (die);
11105 die->die_id.die_symbol = oldsym;
11106 }
11107 }
11108
11109 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11110 and .debug_pubtypes. This is configured per-target, but can be
11111 overridden by the -gpubnames or -gno-pubnames options. */
11112
11113 static inline bool
11114 want_pubnames (void)
11115 {
11116 if (debug_info_level <= DINFO_LEVEL_TERSE)
11117 return false;
11118 if (debug_generate_pub_sections != -1)
11119 return debug_generate_pub_sections;
11120 return targetm.want_debug_pub_sections;
11121 }
11122
11123 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11124
11125 static void
11126 add_AT_pubnames (dw_die_ref die)
11127 {
11128 if (want_pubnames ())
11129 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11130 }
11131
11132 /* Add a string attribute value to a skeleton DIE. */
11133
11134 static inline void
11135 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11136 const char *str)
11137 {
11138 dw_attr_node attr;
11139 struct indirect_string_node *node;
11140
11141 if (! skeleton_debug_str_hash)
11142 skeleton_debug_str_hash
11143 = hash_table<indirect_string_hasher>::create_ggc (10);
11144
11145 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11146 find_string_form (node);
11147 if (node->form == dwarf_FORM (DW_FORM_strx))
11148 node->form = DW_FORM_strp;
11149
11150 attr.dw_attr = attr_kind;
11151 attr.dw_attr_val.val_class = dw_val_class_str;
11152 attr.dw_attr_val.val_entry = NULL;
11153 attr.dw_attr_val.v.val_str = node;
11154 add_dwarf_attr (die, &attr);
11155 }
11156
11157 /* Helper function to generate top-level dies for skeleton debug_info and
11158 debug_types. */
11159
11160 static void
11161 add_top_level_skeleton_die_attrs (dw_die_ref die)
11162 {
11163 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11164 const char *comp_dir = comp_dir_string ();
11165
11166 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11167 if (comp_dir != NULL)
11168 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11169 add_AT_pubnames (die);
11170 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11171 }
11172
11173 /* Output skeleton debug sections that point to the dwo file. */
11174
11175 static void
11176 output_skeleton_debug_sections (dw_die_ref comp_unit,
11177 const unsigned char *dwo_id)
11178 {
11179 /* These attributes will be found in the full debug_info section. */
11180 remove_AT (comp_unit, DW_AT_producer);
11181 remove_AT (comp_unit, DW_AT_language);
11182
11183 switch_to_section (debug_skeleton_info_section);
11184 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11185
11186 /* Produce the skeleton compilation-unit header. This one differs enough from
11187 a normal CU header that it's better not to call output_compilation_unit
11188 header. */
11189 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11190 dw2_asm_output_data (4, 0xffffffff,
11191 "Initial length escape value indicating 64-bit "
11192 "DWARF extension");
11193
11194 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11195 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11196 - DWARF_INITIAL_LENGTH_SIZE
11197 + size_of_die (comp_unit),
11198 "Length of Compilation Unit Info");
11199 output_dwarf_version ();
11200 if (dwarf_version >= 5)
11201 {
11202 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11203 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11204 }
11205 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11206 debug_skeleton_abbrev_section,
11207 "Offset Into Abbrev. Section");
11208 if (dwarf_version < 5)
11209 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11210 else
11211 for (int i = 0; i < 8; i++)
11212 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11213
11214 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11215 output_die (comp_unit);
11216
11217 /* Build the skeleton debug_abbrev section. */
11218 switch_to_section (debug_skeleton_abbrev_section);
11219 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11220
11221 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11222
11223 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11224 }
11225
11226 /* Output a comdat type unit DIE and its children. */
11227
11228 static void
11229 output_comdat_type_unit (comdat_type_node *node)
11230 {
11231 const char *secname;
11232 char *tmp;
11233 int i;
11234 #if defined (OBJECT_FORMAT_ELF)
11235 tree comdat_key;
11236 #endif
11237
11238 /* First mark all the DIEs in this CU so we know which get local refs. */
11239 mark_dies (node->root_die);
11240
11241 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11242
11243 build_abbrev_table (node->root_die, extern_map);
11244
11245 delete extern_map;
11246 extern_map = NULL;
11247
11248 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11249 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11250 calc_die_sizes (node->root_die);
11251
11252 #if defined (OBJECT_FORMAT_ELF)
11253 if (dwarf_version >= 5)
11254 {
11255 if (!dwarf_split_debug_info)
11256 secname = ".debug_info";
11257 else
11258 secname = ".debug_info.dwo";
11259 }
11260 else if (!dwarf_split_debug_info)
11261 secname = ".debug_types";
11262 else
11263 secname = ".debug_types.dwo";
11264
11265 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11266 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11267 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11268 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11269 comdat_key = get_identifier (tmp);
11270 targetm.asm_out.named_section (secname,
11271 SECTION_DEBUG | SECTION_LINKONCE,
11272 comdat_key);
11273 #else
11274 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11275 sprintf (tmp, (dwarf_version >= 5
11276 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11277 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11278 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11279 secname = tmp;
11280 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11281 #endif
11282
11283 /* Output debugging information. */
11284 output_compilation_unit_header (dwarf_split_debug_info
11285 ? DW_UT_split_type : DW_UT_type);
11286 output_signature (node->signature, "Type Signature");
11287 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11288 "Offset to Type DIE");
11289 output_die (node->root_die);
11290
11291 unmark_dies (node->root_die);
11292 }
11293
11294 /* Return the DWARF2/3 pubname associated with a decl. */
11295
11296 static const char *
11297 dwarf2_name (tree decl, int scope)
11298 {
11299 if (DECL_NAMELESS (decl))
11300 return NULL;
11301 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11302 }
11303
11304 /* Add a new entry to .debug_pubnames if appropriate. */
11305
11306 static void
11307 add_pubname_string (const char *str, dw_die_ref die)
11308 {
11309 pubname_entry e;
11310
11311 e.die = die;
11312 e.name = xstrdup (str);
11313 vec_safe_push (pubname_table, e);
11314 }
11315
11316 static void
11317 add_pubname (tree decl, dw_die_ref die)
11318 {
11319 if (!want_pubnames ())
11320 return;
11321
11322 /* Don't add items to the table when we expect that the consumer will have
11323 just read the enclosing die. For example, if the consumer is looking at a
11324 class_member, it will either be inside the class already, or will have just
11325 looked up the class to find the member. Either way, searching the class is
11326 faster than searching the index. */
11327 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11328 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11329 {
11330 const char *name = dwarf2_name (decl, 1);
11331
11332 if (name)
11333 add_pubname_string (name, die);
11334 }
11335 }
11336
11337 /* Add an enumerator to the pubnames section. */
11338
11339 static void
11340 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11341 {
11342 pubname_entry e;
11343
11344 gcc_assert (scope_name);
11345 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11346 e.die = die;
11347 vec_safe_push (pubname_table, e);
11348 }
11349
11350 /* Add a new entry to .debug_pubtypes if appropriate. */
11351
11352 static void
11353 add_pubtype (tree decl, dw_die_ref die)
11354 {
11355 pubname_entry e;
11356
11357 if (!want_pubnames ())
11358 return;
11359
11360 if ((TREE_PUBLIC (decl)
11361 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11362 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11363 {
11364 tree scope = NULL;
11365 const char *scope_name = "";
11366 const char *sep = is_cxx () ? "::" : ".";
11367 const char *name;
11368
11369 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11370 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11371 {
11372 scope_name = lang_hooks.dwarf_name (scope, 1);
11373 if (scope_name != NULL && scope_name[0] != '\0')
11374 scope_name = concat (scope_name, sep, NULL);
11375 else
11376 scope_name = "";
11377 }
11378
11379 if (TYPE_P (decl))
11380 name = type_tag (decl);
11381 else
11382 name = lang_hooks.dwarf_name (decl, 1);
11383
11384 /* If we don't have a name for the type, there's no point in adding
11385 it to the table. */
11386 if (name != NULL && name[0] != '\0')
11387 {
11388 e.die = die;
11389 e.name = concat (scope_name, name, NULL);
11390 vec_safe_push (pubtype_table, e);
11391 }
11392
11393 /* Although it might be more consistent to add the pubinfo for the
11394 enumerators as their dies are created, they should only be added if the
11395 enum type meets the criteria above. So rather than re-check the parent
11396 enum type whenever an enumerator die is created, just output them all
11397 here. This isn't protected by the name conditional because anonymous
11398 enums don't have names. */
11399 if (die->die_tag == DW_TAG_enumeration_type)
11400 {
11401 dw_die_ref c;
11402
11403 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11404 }
11405 }
11406 }
11407
11408 /* Output a single entry in the pubnames table. */
11409
11410 static void
11411 output_pubname (dw_offset die_offset, pubname_entry *entry)
11412 {
11413 dw_die_ref die = entry->die;
11414 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11415
11416 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11417
11418 if (debug_generate_pub_sections == 2)
11419 {
11420 /* This logic follows gdb's method for determining the value of the flag
11421 byte. */
11422 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11423 switch (die->die_tag)
11424 {
11425 case DW_TAG_typedef:
11426 case DW_TAG_base_type:
11427 case DW_TAG_subrange_type:
11428 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11429 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11430 break;
11431 case DW_TAG_enumerator:
11432 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11433 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11434 if (!is_cxx ())
11435 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11436 break;
11437 case DW_TAG_subprogram:
11438 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11439 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11440 if (!is_ada ())
11441 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11442 break;
11443 case DW_TAG_constant:
11444 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11445 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11446 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11447 break;
11448 case DW_TAG_variable:
11449 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11450 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11451 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11452 break;
11453 case DW_TAG_namespace:
11454 case DW_TAG_imported_declaration:
11455 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11456 break;
11457 case DW_TAG_class_type:
11458 case DW_TAG_interface_type:
11459 case DW_TAG_structure_type:
11460 case DW_TAG_union_type:
11461 case DW_TAG_enumeration_type:
11462 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11463 if (!is_cxx ())
11464 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11465 break;
11466 default:
11467 /* An unusual tag. Leave the flag-byte empty. */
11468 break;
11469 }
11470 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11471 "GDB-index flags");
11472 }
11473
11474 dw2_asm_output_nstring (entry->name, -1, "external name");
11475 }
11476
11477
11478 /* Output the public names table used to speed up access to externally
11479 visible names; or the public types table used to find type definitions. */
11480
11481 static void
11482 output_pubnames (vec<pubname_entry, va_gc> *names)
11483 {
11484 unsigned i;
11485 unsigned long pubnames_length = size_of_pubnames (names);
11486 pubname_entry *pub;
11487
11488 if (!XCOFF_DEBUGGING_INFO)
11489 {
11490 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11491 dw2_asm_output_data (4, 0xffffffff,
11492 "Initial length escape value indicating 64-bit DWARF extension");
11493 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11494 "Pub Info Length");
11495 }
11496
11497 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11498 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11499
11500 if (dwarf_split_debug_info)
11501 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11502 debug_skeleton_info_section,
11503 "Offset of Compilation Unit Info");
11504 else
11505 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11506 debug_info_section,
11507 "Offset of Compilation Unit Info");
11508 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11509 "Compilation Unit Length");
11510
11511 FOR_EACH_VEC_ELT (*names, i, pub)
11512 {
11513 if (include_pubname_in_output (names, pub))
11514 {
11515 dw_offset die_offset = pub->die->die_offset;
11516
11517 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11518 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11519 gcc_assert (pub->die->die_mark);
11520
11521 /* If we're putting types in their own .debug_types sections,
11522 the .debug_pubtypes table will still point to the compile
11523 unit (not the type unit), so we want to use the offset of
11524 the skeleton DIE (if there is one). */
11525 if (pub->die->comdat_type_p && names == pubtype_table)
11526 {
11527 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11528
11529 if (type_node != NULL)
11530 die_offset = (type_node->skeleton_die != NULL
11531 ? type_node->skeleton_die->die_offset
11532 : comp_unit_die ()->die_offset);
11533 }
11534
11535 output_pubname (die_offset, pub);
11536 }
11537 }
11538
11539 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11540 }
11541
11542 /* Output public names and types tables if necessary. */
11543
11544 static void
11545 output_pubtables (void)
11546 {
11547 if (!want_pubnames () || !info_section_emitted)
11548 return;
11549
11550 switch_to_section (debug_pubnames_section);
11551 output_pubnames (pubname_table);
11552 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11553 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11554 simply won't look for the section. */
11555 switch_to_section (debug_pubtypes_section);
11556 output_pubnames (pubtype_table);
11557 }
11558
11559
11560 /* Output the information that goes into the .debug_aranges table.
11561 Namely, define the beginning and ending address range of the
11562 text section generated for this compilation unit. */
11563
11564 static void
11565 output_aranges (void)
11566 {
11567 unsigned i;
11568 unsigned long aranges_length = size_of_aranges ();
11569
11570 if (!XCOFF_DEBUGGING_INFO)
11571 {
11572 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11573 dw2_asm_output_data (4, 0xffffffff,
11574 "Initial length escape value indicating 64-bit DWARF extension");
11575 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11576 "Length of Address Ranges Info");
11577 }
11578
11579 /* Version number for aranges is still 2, even up to DWARF5. */
11580 dw2_asm_output_data (2, 2, "DWARF aranges version");
11581 if (dwarf_split_debug_info)
11582 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11583 debug_skeleton_info_section,
11584 "Offset of Compilation Unit Info");
11585 else
11586 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11587 debug_info_section,
11588 "Offset of Compilation Unit Info");
11589 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11590 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11591
11592 /* We need to align to twice the pointer size here. */
11593 if (DWARF_ARANGES_PAD_SIZE)
11594 {
11595 /* Pad using a 2 byte words so that padding is correct for any
11596 pointer size. */
11597 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11598 2 * DWARF2_ADDR_SIZE);
11599 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11600 dw2_asm_output_data (2, 0, NULL);
11601 }
11602
11603 /* It is necessary not to output these entries if the sections were
11604 not used; if the sections were not used, the length will be 0 and
11605 the address may end up as 0 if the section is discarded by ld
11606 --gc-sections, leaving an invalid (0, 0) entry that can be
11607 confused with the terminator. */
11608 if (text_section_used)
11609 {
11610 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11611 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11612 text_section_label, "Length");
11613 }
11614 if (cold_text_section_used)
11615 {
11616 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11617 "Address");
11618 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11619 cold_text_section_label, "Length");
11620 }
11621
11622 if (have_multiple_function_sections)
11623 {
11624 unsigned fde_idx;
11625 dw_fde_ref fde;
11626
11627 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11628 {
11629 if (DECL_IGNORED_P (fde->decl))
11630 continue;
11631 if (!fde->in_std_section)
11632 {
11633 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11634 "Address");
11635 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11636 fde->dw_fde_begin, "Length");
11637 }
11638 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11639 {
11640 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11641 "Address");
11642 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11643 fde->dw_fde_second_begin, "Length");
11644 }
11645 }
11646 }
11647
11648 /* Output the terminator words. */
11649 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11650 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11651 }
11652
11653 /* Add a new entry to .debug_ranges. Return its index into
11654 ranges_table vector. */
11655
11656 static unsigned int
11657 add_ranges_num (int num, bool maybe_new_sec)
11658 {
11659 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11660 vec_safe_push (ranges_table, r);
11661 return vec_safe_length (ranges_table) - 1;
11662 }
11663
11664 /* Add a new entry to .debug_ranges corresponding to a block, or a
11665 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11666 this entry might be in a different section from previous range. */
11667
11668 static unsigned int
11669 add_ranges (const_tree block, bool maybe_new_sec)
11670 {
11671 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11672 }
11673
11674 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11675 chain, or middle entry of a chain that will be directly referred to. */
11676
11677 static void
11678 note_rnglist_head (unsigned int offset)
11679 {
11680 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11681 return;
11682 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11683 }
11684
11685 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11686 When using dwarf_split_debug_info, address attributes in dies destined
11687 for the final executable should be direct references--setting the
11688 parameter force_direct ensures this behavior. */
11689
11690 static void
11691 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11692 bool *added, bool force_direct)
11693 {
11694 unsigned int in_use = vec_safe_length (ranges_by_label);
11695 unsigned int offset;
11696 dw_ranges_by_label rbl = { begin, end };
11697 vec_safe_push (ranges_by_label, rbl);
11698 offset = add_ranges_num (-(int)in_use - 1, true);
11699 if (!*added)
11700 {
11701 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11702 *added = true;
11703 note_rnglist_head (offset);
11704 }
11705 }
11706
11707 /* Emit .debug_ranges section. */
11708
11709 static void
11710 output_ranges (void)
11711 {
11712 unsigned i;
11713 static const char *const start_fmt = "Offset %#x";
11714 const char *fmt = start_fmt;
11715 dw_ranges *r;
11716
11717 switch_to_section (debug_ranges_section);
11718 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11719 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11720 {
11721 int block_num = r->num;
11722
11723 if (block_num > 0)
11724 {
11725 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11726 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11727
11728 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11729 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11730
11731 /* If all code is in the text section, then the compilation
11732 unit base address defaults to DW_AT_low_pc, which is the
11733 base of the text section. */
11734 if (!have_multiple_function_sections)
11735 {
11736 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11737 text_section_label,
11738 fmt, i * 2 * DWARF2_ADDR_SIZE);
11739 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11740 text_section_label, NULL);
11741 }
11742
11743 /* Otherwise, the compilation unit base address is zero,
11744 which allows us to use absolute addresses, and not worry
11745 about whether the target supports cross-section
11746 arithmetic. */
11747 else
11748 {
11749 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11750 fmt, i * 2 * DWARF2_ADDR_SIZE);
11751 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11752 }
11753
11754 fmt = NULL;
11755 }
11756
11757 /* Negative block_num stands for an index into ranges_by_label. */
11758 else if (block_num < 0)
11759 {
11760 int lab_idx = - block_num - 1;
11761
11762 if (!have_multiple_function_sections)
11763 {
11764 gcc_unreachable ();
11765 #if 0
11766 /* If we ever use add_ranges_by_labels () for a single
11767 function section, all we have to do is to take out
11768 the #if 0 above. */
11769 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11770 (*ranges_by_label)[lab_idx].begin,
11771 text_section_label,
11772 fmt, i * 2 * DWARF2_ADDR_SIZE);
11773 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11774 (*ranges_by_label)[lab_idx].end,
11775 text_section_label, NULL);
11776 #endif
11777 }
11778 else
11779 {
11780 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11781 (*ranges_by_label)[lab_idx].begin,
11782 fmt, i * 2 * DWARF2_ADDR_SIZE);
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11784 (*ranges_by_label)[lab_idx].end,
11785 NULL);
11786 }
11787 }
11788 else
11789 {
11790 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11791 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11792 fmt = start_fmt;
11793 }
11794 }
11795 }
11796
11797 /* Non-zero if .debug_line_str should be used for .debug_line section
11798 strings or strings that are likely shareable with those. */
11799 #define DWARF5_USE_DEBUG_LINE_STR \
11800 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11801 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11802 /* FIXME: there is no .debug_line_str.dwo section, \
11803 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11804 && !dwarf_split_debug_info)
11805
11806 /* Assign .debug_rnglists indexes. */
11807
11808 static void
11809 index_rnglists (void)
11810 {
11811 unsigned i;
11812 dw_ranges *r;
11813
11814 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11815 if (r->label)
11816 r->idx = rnglist_idx++;
11817 }
11818
11819 /* Emit .debug_rnglists section. */
11820
11821 static void
11822 output_rnglists (unsigned generation)
11823 {
11824 unsigned i;
11825 dw_ranges *r;
11826 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11827 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11828 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11829
11830 switch_to_section (debug_ranges_section);
11831 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11832 /* There are up to 4 unique ranges labels per generation.
11833 See also init_sections_and_labels. */
11834 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11835 2 + generation * 4);
11836 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11837 3 + generation * 4);
11838 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11839 dw2_asm_output_data (4, 0xffffffff,
11840 "Initial length escape value indicating "
11841 "64-bit DWARF extension");
11842 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11843 "Length of Range Lists");
11844 ASM_OUTPUT_LABEL (asm_out_file, l1);
11845 output_dwarf_version ();
11846 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11847 dw2_asm_output_data (1, 0, "Segment Size");
11848 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11849 about relocation sizes and primarily care about the size of .debug*
11850 sections in linked shared libraries and executables, then
11851 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11852 into it are usually larger than just DW_FORM_sec_offset offsets
11853 into the .debug_rnglists section. */
11854 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11855 "Offset Entry Count");
11856 if (dwarf_split_debug_info)
11857 {
11858 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11859 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11860 if (r->label)
11861 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11862 ranges_base_label, NULL);
11863 }
11864
11865 const char *lab = "";
11866 unsigned int len = vec_safe_length (ranges_table);
11867 const char *base = NULL;
11868 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11869 {
11870 int block_num = r->num;
11871
11872 if (r->label)
11873 {
11874 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11875 lab = r->label;
11876 }
11877 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11878 base = NULL;
11879 if (block_num > 0)
11880 {
11881 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11882 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11883
11884 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11885 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11886
11887 if (HAVE_AS_LEB128)
11888 {
11889 /* If all code is in the text section, then the compilation
11890 unit base address defaults to DW_AT_low_pc, which is the
11891 base of the text section. */
11892 if (!have_multiple_function_sections)
11893 {
11894 dw2_asm_output_data (1, DW_RLE_offset_pair,
11895 "DW_RLE_offset_pair (%s)", lab);
11896 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11897 "Range begin address (%s)", lab);
11898 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11899 "Range end address (%s)", lab);
11900 continue;
11901 }
11902 if (base == NULL)
11903 {
11904 dw_ranges *r2 = NULL;
11905 if (i < len - 1)
11906 r2 = &(*ranges_table)[i + 1];
11907 if (r2
11908 && r2->num != 0
11909 && r2->label == NULL
11910 && !r2->maybe_new_sec)
11911 {
11912 dw2_asm_output_data (1, DW_RLE_base_address,
11913 "DW_RLE_base_address (%s)", lab);
11914 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11915 "Base address (%s)", lab);
11916 strcpy (basebuf, blabel);
11917 base = basebuf;
11918 }
11919 }
11920 if (base)
11921 {
11922 dw2_asm_output_data (1, DW_RLE_offset_pair,
11923 "DW_RLE_offset_pair (%s)", lab);
11924 dw2_asm_output_delta_uleb128 (blabel, base,
11925 "Range begin address (%s)", lab);
11926 dw2_asm_output_delta_uleb128 (elabel, base,
11927 "Range end address (%s)", lab);
11928 continue;
11929 }
11930 dw2_asm_output_data (1, DW_RLE_start_length,
11931 "DW_RLE_start_length (%s)", lab);
11932 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11933 "Range begin address (%s)", lab);
11934 dw2_asm_output_delta_uleb128 (elabel, blabel,
11935 "Range length (%s)", lab);
11936 }
11937 else
11938 {
11939 dw2_asm_output_data (1, DW_RLE_start_end,
11940 "DW_RLE_start_end (%s)", lab);
11941 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11942 "Range begin address (%s)", lab);
11943 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11944 "Range end address (%s)", lab);
11945 }
11946 }
11947
11948 /* Negative block_num stands for an index into ranges_by_label. */
11949 else if (block_num < 0)
11950 {
11951 int lab_idx = - block_num - 1;
11952 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11953 const char *elabel = (*ranges_by_label)[lab_idx].end;
11954
11955 if (!have_multiple_function_sections)
11956 gcc_unreachable ();
11957 if (HAVE_AS_LEB128)
11958 {
11959 dw2_asm_output_data (1, DW_RLE_start_length,
11960 "DW_RLE_start_length (%s)", lab);
11961 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11962 "Range begin address (%s)", lab);
11963 dw2_asm_output_delta_uleb128 (elabel, blabel,
11964 "Range length (%s)", lab);
11965 }
11966 else
11967 {
11968 dw2_asm_output_data (1, DW_RLE_start_end,
11969 "DW_RLE_start_end (%s)", lab);
11970 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11971 "Range begin address (%s)", lab);
11972 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11973 "Range end address (%s)", lab);
11974 }
11975 }
11976 else
11977 dw2_asm_output_data (1, DW_RLE_end_of_list,
11978 "DW_RLE_end_of_list (%s)", lab);
11979 }
11980 ASM_OUTPUT_LABEL (asm_out_file, l2);
11981 }
11982
11983 /* Data structure containing information about input files. */
11984 struct file_info
11985 {
11986 const char *path; /* Complete file name. */
11987 const char *fname; /* File name part. */
11988 int length; /* Length of entire string. */
11989 struct dwarf_file_data * file_idx; /* Index in input file table. */
11990 int dir_idx; /* Index in directory table. */
11991 };
11992
11993 /* Data structure containing information about directories with source
11994 files. */
11995 struct dir_info
11996 {
11997 const char *path; /* Path including directory name. */
11998 int length; /* Path length. */
11999 int prefix; /* Index of directory entry which is a prefix. */
12000 int count; /* Number of files in this directory. */
12001 int dir_idx; /* Index of directory used as base. */
12002 };
12003
12004 /* Callback function for file_info comparison. We sort by looking at
12005 the directories in the path. */
12006
12007 static int
12008 file_info_cmp (const void *p1, const void *p2)
12009 {
12010 const struct file_info *const s1 = (const struct file_info *) p1;
12011 const struct file_info *const s2 = (const struct file_info *) p2;
12012 const unsigned char *cp1;
12013 const unsigned char *cp2;
12014
12015 /* Take care of file names without directories. We need to make sure that
12016 we return consistent values to qsort since some will get confused if
12017 we return the same value when identical operands are passed in opposite
12018 orders. So if neither has a directory, return 0 and otherwise return
12019 1 or -1 depending on which one has the directory. We want the one with
12020 the directory to sort after the one without, so all no directory files
12021 are at the start (normally only the compilation unit file). */
12022 if ((s1->path == s1->fname || s2->path == s2->fname))
12023 return (s2->path == s2->fname) - (s1->path == s1->fname);
12024
12025 cp1 = (const unsigned char *) s1->path;
12026 cp2 = (const unsigned char *) s2->path;
12027
12028 while (1)
12029 {
12030 ++cp1;
12031 ++cp2;
12032 /* Reached the end of the first path? If so, handle like above,
12033 but now we want longer directory prefixes before shorter ones. */
12034 if ((cp1 == (const unsigned char *) s1->fname)
12035 || (cp2 == (const unsigned char *) s2->fname))
12036 return ((cp1 == (const unsigned char *) s1->fname)
12037 - (cp2 == (const unsigned char *) s2->fname));
12038
12039 /* Character of current path component the same? */
12040 else if (*cp1 != *cp2)
12041 return *cp1 - *cp2;
12042 }
12043 }
12044
12045 struct file_name_acquire_data
12046 {
12047 struct file_info *files;
12048 int used_files;
12049 int max_files;
12050 };
12051
12052 /* Traversal function for the hash table. */
12053
12054 int
12055 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12056 {
12057 struct dwarf_file_data *d = *slot;
12058 struct file_info *fi;
12059 const char *f;
12060
12061 gcc_assert (fnad->max_files >= d->emitted_number);
12062
12063 if (! d->emitted_number)
12064 return 1;
12065
12066 gcc_assert (fnad->max_files != fnad->used_files);
12067
12068 fi = fnad->files + fnad->used_files++;
12069
12070 /* Skip all leading "./". */
12071 f = d->filename;
12072 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12073 f += 2;
12074
12075 /* Create a new array entry. */
12076 fi->path = f;
12077 fi->length = strlen (f);
12078 fi->file_idx = d;
12079
12080 /* Search for the file name part. */
12081 f = strrchr (f, DIR_SEPARATOR);
12082 #if defined (DIR_SEPARATOR_2)
12083 {
12084 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12085
12086 if (g != NULL)
12087 {
12088 if (f == NULL || f < g)
12089 f = g;
12090 }
12091 }
12092 #endif
12093
12094 fi->fname = f == NULL ? fi->path : f + 1;
12095 return 1;
12096 }
12097
12098 /* Helper function for output_file_names. Emit a FORM encoded
12099 string STR, with assembly comment start ENTRY_KIND and
12100 index IDX */
12101
12102 static void
12103 output_line_string (enum dwarf_form form, const char *str,
12104 const char *entry_kind, unsigned int idx)
12105 {
12106 switch (form)
12107 {
12108 case DW_FORM_string:
12109 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12110 break;
12111 case DW_FORM_line_strp:
12112 if (!debug_line_str_hash)
12113 debug_line_str_hash
12114 = hash_table<indirect_string_hasher>::create_ggc (10);
12115
12116 struct indirect_string_node *node;
12117 node = find_AT_string_in_table (str, debug_line_str_hash);
12118 set_indirect_string (node);
12119 node->form = form;
12120 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12121 debug_line_str_section, "%s: %#x: \"%s\"",
12122 entry_kind, 0, node->str);
12123 break;
12124 default:
12125 gcc_unreachable ();
12126 }
12127 }
12128
12129 /* Output the directory table and the file name table. We try to minimize
12130 the total amount of memory needed. A heuristic is used to avoid large
12131 slowdowns with many input files. */
12132
12133 static void
12134 output_file_names (void)
12135 {
12136 struct file_name_acquire_data fnad;
12137 int numfiles;
12138 struct file_info *files;
12139 struct dir_info *dirs;
12140 int *saved;
12141 int *savehere;
12142 int *backmap;
12143 int ndirs;
12144 int idx_offset;
12145 int i;
12146
12147 if (!last_emitted_file)
12148 {
12149 if (dwarf_version >= 5)
12150 {
12151 dw2_asm_output_data (1, 0, "Directory entry format count");
12152 dw2_asm_output_data_uleb128 (0, "Directories count");
12153 dw2_asm_output_data (1, 0, "File name entry format count");
12154 dw2_asm_output_data_uleb128 (0, "File names count");
12155 }
12156 else
12157 {
12158 dw2_asm_output_data (1, 0, "End directory table");
12159 dw2_asm_output_data (1, 0, "End file name table");
12160 }
12161 return;
12162 }
12163
12164 numfiles = last_emitted_file->emitted_number;
12165
12166 /* Allocate the various arrays we need. */
12167 files = XALLOCAVEC (struct file_info, numfiles);
12168 dirs = XALLOCAVEC (struct dir_info, numfiles);
12169
12170 fnad.files = files;
12171 fnad.used_files = 0;
12172 fnad.max_files = numfiles;
12173 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12174 gcc_assert (fnad.used_files == fnad.max_files);
12175
12176 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12177
12178 /* Find all the different directories used. */
12179 dirs[0].path = files[0].path;
12180 dirs[0].length = files[0].fname - files[0].path;
12181 dirs[0].prefix = -1;
12182 dirs[0].count = 1;
12183 dirs[0].dir_idx = 0;
12184 files[0].dir_idx = 0;
12185 ndirs = 1;
12186
12187 for (i = 1; i < numfiles; i++)
12188 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12189 && memcmp (dirs[ndirs - 1].path, files[i].path,
12190 dirs[ndirs - 1].length) == 0)
12191 {
12192 /* Same directory as last entry. */
12193 files[i].dir_idx = ndirs - 1;
12194 ++dirs[ndirs - 1].count;
12195 }
12196 else
12197 {
12198 int j;
12199
12200 /* This is a new directory. */
12201 dirs[ndirs].path = files[i].path;
12202 dirs[ndirs].length = files[i].fname - files[i].path;
12203 dirs[ndirs].count = 1;
12204 dirs[ndirs].dir_idx = ndirs;
12205 files[i].dir_idx = ndirs;
12206
12207 /* Search for a prefix. */
12208 dirs[ndirs].prefix = -1;
12209 for (j = 0; j < ndirs; j++)
12210 if (dirs[j].length < dirs[ndirs].length
12211 && dirs[j].length > 1
12212 && (dirs[ndirs].prefix == -1
12213 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12214 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12215 dirs[ndirs].prefix = j;
12216
12217 ++ndirs;
12218 }
12219
12220 /* Now to the actual work. We have to find a subset of the directories which
12221 allow expressing the file name using references to the directory table
12222 with the least amount of characters. We do not do an exhaustive search
12223 where we would have to check out every combination of every single
12224 possible prefix. Instead we use a heuristic which provides nearly optimal
12225 results in most cases and never is much off. */
12226 saved = XALLOCAVEC (int, ndirs);
12227 savehere = XALLOCAVEC (int, ndirs);
12228
12229 memset (saved, '\0', ndirs * sizeof (saved[0]));
12230 for (i = 0; i < ndirs; i++)
12231 {
12232 int j;
12233 int total;
12234
12235 /* We can always save some space for the current directory. But this
12236 does not mean it will be enough to justify adding the directory. */
12237 savehere[i] = dirs[i].length;
12238 total = (savehere[i] - saved[i]) * dirs[i].count;
12239
12240 for (j = i + 1; j < ndirs; j++)
12241 {
12242 savehere[j] = 0;
12243 if (saved[j] < dirs[i].length)
12244 {
12245 /* Determine whether the dirs[i] path is a prefix of the
12246 dirs[j] path. */
12247 int k;
12248
12249 k = dirs[j].prefix;
12250 while (k != -1 && k != (int) i)
12251 k = dirs[k].prefix;
12252
12253 if (k == (int) i)
12254 {
12255 /* Yes it is. We can possibly save some memory by
12256 writing the filenames in dirs[j] relative to
12257 dirs[i]. */
12258 savehere[j] = dirs[i].length;
12259 total += (savehere[j] - saved[j]) * dirs[j].count;
12260 }
12261 }
12262 }
12263
12264 /* Check whether we can save enough to justify adding the dirs[i]
12265 directory. */
12266 if (total > dirs[i].length + 1)
12267 {
12268 /* It's worthwhile adding. */
12269 for (j = i; j < ndirs; j++)
12270 if (savehere[j] > 0)
12271 {
12272 /* Remember how much we saved for this directory so far. */
12273 saved[j] = savehere[j];
12274
12275 /* Remember the prefix directory. */
12276 dirs[j].dir_idx = i;
12277 }
12278 }
12279 }
12280
12281 /* Emit the directory name table. */
12282 idx_offset = dirs[0].length > 0 ? 1 : 0;
12283 enum dwarf_form str_form = DW_FORM_string;
12284 enum dwarf_form idx_form = DW_FORM_udata;
12285 if (dwarf_version >= 5)
12286 {
12287 const char *comp_dir = comp_dir_string ();
12288 if (comp_dir == NULL)
12289 comp_dir = "";
12290 dw2_asm_output_data (1, 1, "Directory entry format count");
12291 if (DWARF5_USE_DEBUG_LINE_STR)
12292 str_form = DW_FORM_line_strp;
12293 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12294 dw2_asm_output_data_uleb128 (str_form, "%s",
12295 get_DW_FORM_name (str_form));
12296 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12297 if (str_form == DW_FORM_string)
12298 {
12299 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12300 for (i = 1 - idx_offset; i < ndirs; i++)
12301 dw2_asm_output_nstring (dirs[i].path,
12302 dirs[i].length
12303 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12304 "Directory Entry: %#x", i + idx_offset);
12305 }
12306 else
12307 {
12308 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12309 for (i = 1 - idx_offset; i < ndirs; i++)
12310 {
12311 const char *str
12312 = ggc_alloc_string (dirs[i].path,
12313 dirs[i].length
12314 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12315 output_line_string (str_form, str, "Directory Entry",
12316 (unsigned) i + idx_offset);
12317 }
12318 }
12319 }
12320 else
12321 {
12322 for (i = 1 - idx_offset; i < ndirs; i++)
12323 dw2_asm_output_nstring (dirs[i].path,
12324 dirs[i].length
12325 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12326 "Directory Entry: %#x", i + idx_offset);
12327
12328 dw2_asm_output_data (1, 0, "End directory table");
12329 }
12330
12331 /* We have to emit them in the order of emitted_number since that's
12332 used in the debug info generation. To do this efficiently we
12333 generate a back-mapping of the indices first. */
12334 backmap = XALLOCAVEC (int, numfiles);
12335 for (i = 0; i < numfiles; i++)
12336 backmap[files[i].file_idx->emitted_number - 1] = i;
12337
12338 if (dwarf_version >= 5)
12339 {
12340 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12341 if (filename0 == NULL)
12342 filename0 = "";
12343 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12344 DW_FORM_data2. Choose one based on the number of directories
12345 and how much space would they occupy in each encoding.
12346 If we have at most 256 directories, all indexes fit into
12347 a single byte, so DW_FORM_data1 is most compact (if there
12348 are at most 128 directories, DW_FORM_udata would be as
12349 compact as that, but not shorter and slower to decode). */
12350 if (ndirs + idx_offset <= 256)
12351 idx_form = DW_FORM_data1;
12352 /* If there are more than 65536 directories, we have to use
12353 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12354 Otherwise, compute what space would occupy if all the indexes
12355 used DW_FORM_udata - sum - and compare that to how large would
12356 be DW_FORM_data2 encoding, and pick the more efficient one. */
12357 else if (ndirs + idx_offset <= 65536)
12358 {
12359 unsigned HOST_WIDE_INT sum = 1;
12360 for (i = 0; i < numfiles; i++)
12361 {
12362 int file_idx = backmap[i];
12363 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12364 sum += size_of_uleb128 (dir_idx);
12365 }
12366 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12367 idx_form = DW_FORM_data2;
12368 }
12369 #ifdef VMS_DEBUGGING_INFO
12370 dw2_asm_output_data (1, 4, "File name entry format count");
12371 #else
12372 dw2_asm_output_data (1, 2, "File name entry format count");
12373 #endif
12374 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12375 dw2_asm_output_data_uleb128 (str_form, "%s",
12376 get_DW_FORM_name (str_form));
12377 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12378 "DW_LNCT_directory_index");
12379 dw2_asm_output_data_uleb128 (idx_form, "%s",
12380 get_DW_FORM_name (idx_form));
12381 #ifdef VMS_DEBUGGING_INFO
12382 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12383 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12384 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12385 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12386 #endif
12387 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12388
12389 output_line_string (str_form, filename0, "File Entry", 0);
12390
12391 /* Include directory index. */
12392 if (idx_form != DW_FORM_udata)
12393 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12394 0, NULL);
12395 else
12396 dw2_asm_output_data_uleb128 (0, NULL);
12397
12398 #ifdef VMS_DEBUGGING_INFO
12399 dw2_asm_output_data_uleb128 (0, NULL);
12400 dw2_asm_output_data_uleb128 (0, NULL);
12401 #endif
12402 }
12403
12404 /* Now write all the file names. */
12405 for (i = 0; i < numfiles; i++)
12406 {
12407 int file_idx = backmap[i];
12408 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12409
12410 #ifdef VMS_DEBUGGING_INFO
12411 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12412
12413 /* Setting these fields can lead to debugger miscomparisons,
12414 but VMS Debug requires them to be set correctly. */
12415
12416 int ver;
12417 long long cdt;
12418 long siz;
12419 int maxfilelen = (strlen (files[file_idx].path)
12420 + dirs[dir_idx].length
12421 + MAX_VMS_VERSION_LEN + 1);
12422 char *filebuf = XALLOCAVEC (char, maxfilelen);
12423
12424 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12425 snprintf (filebuf, maxfilelen, "%s;%d",
12426 files[file_idx].path + dirs[dir_idx].length, ver);
12427
12428 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12429
12430 /* Include directory index. */
12431 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12432 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12433 dir_idx + idx_offset, NULL);
12434 else
12435 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12436
12437 /* Modification time. */
12438 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12439 &cdt, 0, 0, 0) == 0)
12440 ? cdt : 0, NULL);
12441
12442 /* File length in bytes. */
12443 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12444 0, &siz, 0, 0) == 0)
12445 ? siz : 0, NULL);
12446 #else
12447 output_line_string (str_form,
12448 files[file_idx].path + dirs[dir_idx].length,
12449 "File Entry", (unsigned) i + 1);
12450
12451 /* Include directory index. */
12452 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12453 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12454 dir_idx + idx_offset, NULL);
12455 else
12456 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12457
12458 if (dwarf_version >= 5)
12459 continue;
12460
12461 /* Modification time. */
12462 dw2_asm_output_data_uleb128 (0, NULL);
12463
12464 /* File length in bytes. */
12465 dw2_asm_output_data_uleb128 (0, NULL);
12466 #endif /* VMS_DEBUGGING_INFO */
12467 }
12468
12469 if (dwarf_version < 5)
12470 dw2_asm_output_data (1, 0, "End file name table");
12471 }
12472
12473
12474 /* Output one line number table into the .debug_line section. */
12475
12476 static void
12477 output_one_line_info_table (dw_line_info_table *table)
12478 {
12479 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12480 unsigned int current_line = 1;
12481 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12482 dw_line_info_entry *ent, *prev_addr;
12483 size_t i;
12484 unsigned int view;
12485
12486 view = 0;
12487
12488 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12489 {
12490 switch (ent->opcode)
12491 {
12492 case LI_set_address:
12493 /* ??? Unfortunately, we have little choice here currently, and
12494 must always use the most general form. GCC does not know the
12495 address delta itself, so we can't use DW_LNS_advance_pc. Many
12496 ports do have length attributes which will give an upper bound
12497 on the address range. We could perhaps use length attributes
12498 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12499 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12500
12501 view = 0;
12502
12503 /* This can handle any delta. This takes
12504 4+DWARF2_ADDR_SIZE bytes. */
12505 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12506 debug_variable_location_views
12507 ? ", reset view to 0" : "");
12508 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12509 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12510 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12511
12512 prev_addr = ent;
12513 break;
12514
12515 case LI_adv_address:
12516 {
12517 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12518 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12519 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12520
12521 view++;
12522
12523 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12524 dw2_asm_output_delta (2, line_label, prev_label,
12525 "from %s to %s", prev_label, line_label);
12526
12527 prev_addr = ent;
12528 break;
12529 }
12530
12531 case LI_set_line:
12532 if (ent->val == current_line)
12533 {
12534 /* We still need to start a new row, so output a copy insn. */
12535 dw2_asm_output_data (1, DW_LNS_copy,
12536 "copy line %u", current_line);
12537 }
12538 else
12539 {
12540 int line_offset = ent->val - current_line;
12541 int line_delta = line_offset - DWARF_LINE_BASE;
12542
12543 current_line = ent->val;
12544 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12545 {
12546 /* This can handle deltas from -10 to 234, using the current
12547 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12548 This takes 1 byte. */
12549 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12550 "line %u", current_line);
12551 }
12552 else
12553 {
12554 /* This can handle any delta. This takes at least 4 bytes,
12555 depending on the value being encoded. */
12556 dw2_asm_output_data (1, DW_LNS_advance_line,
12557 "advance to line %u", current_line);
12558 dw2_asm_output_data_sleb128 (line_offset, NULL);
12559 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12560 }
12561 }
12562 break;
12563
12564 case LI_set_file:
12565 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12566 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12567 break;
12568
12569 case LI_set_column:
12570 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12571 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12572 break;
12573
12574 case LI_negate_stmt:
12575 current_is_stmt = !current_is_stmt;
12576 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12577 "is_stmt %d", current_is_stmt);
12578 break;
12579
12580 case LI_set_prologue_end:
12581 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12582 "set prologue end");
12583 break;
12584
12585 case LI_set_epilogue_begin:
12586 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12587 "set epilogue begin");
12588 break;
12589
12590 case LI_set_discriminator:
12591 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12592 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12593 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12594 dw2_asm_output_data_uleb128 (ent->val, NULL);
12595 break;
12596 }
12597 }
12598
12599 /* Emit debug info for the address of the end of the table. */
12600 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12601 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12602 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12603 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12604
12605 dw2_asm_output_data (1, 0, "end sequence");
12606 dw2_asm_output_data_uleb128 (1, NULL);
12607 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12608 }
12609
12610 /* Output the source line number correspondence information. This
12611 information goes into the .debug_line section. */
12612
12613 static void
12614 output_line_info (bool prologue_only)
12615 {
12616 static unsigned int generation;
12617 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12618 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12619 bool saw_one = false;
12620 int opc;
12621
12622 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12623 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12624 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12625 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12626
12627 if (!XCOFF_DEBUGGING_INFO)
12628 {
12629 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12630 dw2_asm_output_data (4, 0xffffffff,
12631 "Initial length escape value indicating 64-bit DWARF extension");
12632 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12633 "Length of Source Line Info");
12634 }
12635
12636 ASM_OUTPUT_LABEL (asm_out_file, l1);
12637
12638 output_dwarf_version ();
12639 if (dwarf_version >= 5)
12640 {
12641 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12642 dw2_asm_output_data (1, 0, "Segment Size");
12643 }
12644 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12645 ASM_OUTPUT_LABEL (asm_out_file, p1);
12646
12647 /* Define the architecture-dependent minimum instruction length (in bytes).
12648 In this implementation of DWARF, this field is used for information
12649 purposes only. Since GCC generates assembly language, we have no
12650 a priori knowledge of how many instruction bytes are generated for each
12651 source line, and therefore can use only the DW_LNE_set_address and
12652 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12653 this as '1', which is "correct enough" for all architectures,
12654 and don't let the target override. */
12655 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12656
12657 if (dwarf_version >= 4)
12658 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12659 "Maximum Operations Per Instruction");
12660 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12661 "Default is_stmt_start flag");
12662 dw2_asm_output_data (1, DWARF_LINE_BASE,
12663 "Line Base Value (Special Opcodes)");
12664 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12665 "Line Range Value (Special Opcodes)");
12666 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12667 "Special Opcode Base");
12668
12669 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12670 {
12671 int n_op_args;
12672 switch (opc)
12673 {
12674 case DW_LNS_advance_pc:
12675 case DW_LNS_advance_line:
12676 case DW_LNS_set_file:
12677 case DW_LNS_set_column:
12678 case DW_LNS_fixed_advance_pc:
12679 case DW_LNS_set_isa:
12680 n_op_args = 1;
12681 break;
12682 default:
12683 n_op_args = 0;
12684 break;
12685 }
12686
12687 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12688 opc, n_op_args);
12689 }
12690
12691 /* Write out the information about the files we use. */
12692 output_file_names ();
12693 ASM_OUTPUT_LABEL (asm_out_file, p2);
12694 if (prologue_only)
12695 {
12696 /* Output the marker for the end of the line number info. */
12697 ASM_OUTPUT_LABEL (asm_out_file, l2);
12698 return;
12699 }
12700
12701 if (separate_line_info)
12702 {
12703 dw_line_info_table *table;
12704 size_t i;
12705
12706 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12707 if (table->in_use)
12708 {
12709 output_one_line_info_table (table);
12710 saw_one = true;
12711 }
12712 }
12713 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12714 {
12715 output_one_line_info_table (cold_text_section_line_info);
12716 saw_one = true;
12717 }
12718
12719 /* ??? Some Darwin linkers crash on a .debug_line section with no
12720 sequences. Further, merely a DW_LNE_end_sequence entry is not
12721 sufficient -- the address column must also be initialized.
12722 Make sure to output at least one set_address/end_sequence pair,
12723 choosing .text since that section is always present. */
12724 if (text_section_line_info->in_use || !saw_one)
12725 output_one_line_info_table (text_section_line_info);
12726
12727 /* Output the marker for the end of the line number info. */
12728 ASM_OUTPUT_LABEL (asm_out_file, l2);
12729 }
12730 \f
12731 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12732
12733 static inline bool
12734 need_endianity_attribute_p (bool reverse)
12735 {
12736 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12737 }
12738
12739 /* Given a pointer to a tree node for some base type, return a pointer to
12740 a DIE that describes the given type. REVERSE is true if the type is
12741 to be interpreted in the reverse storage order wrt the target order.
12742
12743 This routine must only be called for GCC type nodes that correspond to
12744 Dwarf base (fundamental) types. */
12745
12746 static dw_die_ref
12747 base_type_die (tree type, bool reverse)
12748 {
12749 dw_die_ref base_type_result;
12750 enum dwarf_type encoding;
12751 bool fpt_used = false;
12752 struct fixed_point_type_info fpt_info;
12753 tree type_bias = NULL_TREE;
12754
12755 /* If this is a subtype that should not be emitted as a subrange type,
12756 use the base type. See subrange_type_for_debug_p. */
12757 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12758 type = TREE_TYPE (type);
12759
12760 switch (TREE_CODE (type))
12761 {
12762 case INTEGER_TYPE:
12763 if ((dwarf_version >= 4 || !dwarf_strict)
12764 && TYPE_NAME (type)
12765 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12766 && DECL_IS_BUILTIN (TYPE_NAME (type))
12767 && DECL_NAME (TYPE_NAME (type)))
12768 {
12769 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12770 if (strcmp (name, "char16_t") == 0
12771 || strcmp (name, "char32_t") == 0)
12772 {
12773 encoding = DW_ATE_UTF;
12774 break;
12775 }
12776 }
12777 if ((dwarf_version >= 3 || !dwarf_strict)
12778 && lang_hooks.types.get_fixed_point_type_info)
12779 {
12780 memset (&fpt_info, 0, sizeof (fpt_info));
12781 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12782 {
12783 fpt_used = true;
12784 encoding = ((TYPE_UNSIGNED (type))
12785 ? DW_ATE_unsigned_fixed
12786 : DW_ATE_signed_fixed);
12787 break;
12788 }
12789 }
12790 if (TYPE_STRING_FLAG (type))
12791 {
12792 if (TYPE_UNSIGNED (type))
12793 encoding = DW_ATE_unsigned_char;
12794 else
12795 encoding = DW_ATE_signed_char;
12796 }
12797 else if (TYPE_UNSIGNED (type))
12798 encoding = DW_ATE_unsigned;
12799 else
12800 encoding = DW_ATE_signed;
12801
12802 if (!dwarf_strict
12803 && lang_hooks.types.get_type_bias)
12804 type_bias = lang_hooks.types.get_type_bias (type);
12805 break;
12806
12807 case REAL_TYPE:
12808 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12809 {
12810 if (dwarf_version >= 3 || !dwarf_strict)
12811 encoding = DW_ATE_decimal_float;
12812 else
12813 encoding = DW_ATE_lo_user;
12814 }
12815 else
12816 encoding = DW_ATE_float;
12817 break;
12818
12819 case FIXED_POINT_TYPE:
12820 if (!(dwarf_version >= 3 || !dwarf_strict))
12821 encoding = DW_ATE_lo_user;
12822 else if (TYPE_UNSIGNED (type))
12823 encoding = DW_ATE_unsigned_fixed;
12824 else
12825 encoding = DW_ATE_signed_fixed;
12826 break;
12827
12828 /* Dwarf2 doesn't know anything about complex ints, so use
12829 a user defined type for it. */
12830 case COMPLEX_TYPE:
12831 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12832 encoding = DW_ATE_complex_float;
12833 else
12834 encoding = DW_ATE_lo_user;
12835 break;
12836
12837 case BOOLEAN_TYPE:
12838 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12839 encoding = DW_ATE_boolean;
12840 break;
12841
12842 default:
12843 /* No other TREE_CODEs are Dwarf fundamental types. */
12844 gcc_unreachable ();
12845 }
12846
12847 base_type_result = new_die_raw (DW_TAG_base_type);
12848
12849 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12850 int_size_in_bytes (type));
12851 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12852
12853 if (need_endianity_attribute_p (reverse))
12854 add_AT_unsigned (base_type_result, DW_AT_endianity,
12855 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12856
12857 add_alignment_attribute (base_type_result, type);
12858
12859 if (fpt_used)
12860 {
12861 switch (fpt_info.scale_factor_kind)
12862 {
12863 case fixed_point_scale_factor_binary:
12864 add_AT_int (base_type_result, DW_AT_binary_scale,
12865 fpt_info.scale_factor.binary);
12866 break;
12867
12868 case fixed_point_scale_factor_decimal:
12869 add_AT_int (base_type_result, DW_AT_decimal_scale,
12870 fpt_info.scale_factor.decimal);
12871 break;
12872
12873 case fixed_point_scale_factor_arbitrary:
12874 /* Arbitrary scale factors cannot be described in standard DWARF,
12875 yet. */
12876 if (!dwarf_strict)
12877 {
12878 /* Describe the scale factor as a rational constant. */
12879 const dw_die_ref scale_factor
12880 = new_die (DW_TAG_constant, comp_unit_die (), type);
12881
12882 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12883 fpt_info.scale_factor.arbitrary.numerator);
12884 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12885 fpt_info.scale_factor.arbitrary.denominator);
12886
12887 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12888 }
12889 break;
12890
12891 default:
12892 gcc_unreachable ();
12893 }
12894 }
12895
12896 if (type_bias)
12897 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12898 dw_scalar_form_constant
12899 | dw_scalar_form_exprloc
12900 | dw_scalar_form_reference,
12901 NULL);
12902
12903 return base_type_result;
12904 }
12905
12906 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12907 named 'auto' in its type: return true for it, false otherwise. */
12908
12909 static inline bool
12910 is_cxx_auto (tree type)
12911 {
12912 if (is_cxx ())
12913 {
12914 tree name = TYPE_IDENTIFIER (type);
12915 if (name == get_identifier ("auto")
12916 || name == get_identifier ("decltype(auto)"))
12917 return true;
12918 }
12919 return false;
12920 }
12921
12922 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12923 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12924
12925 static inline int
12926 is_base_type (tree type)
12927 {
12928 switch (TREE_CODE (type))
12929 {
12930 case INTEGER_TYPE:
12931 case REAL_TYPE:
12932 case FIXED_POINT_TYPE:
12933 case COMPLEX_TYPE:
12934 case BOOLEAN_TYPE:
12935 return 1;
12936
12937 case VOID_TYPE:
12938 case ARRAY_TYPE:
12939 case RECORD_TYPE:
12940 case UNION_TYPE:
12941 case QUAL_UNION_TYPE:
12942 case ENUMERAL_TYPE:
12943 case FUNCTION_TYPE:
12944 case METHOD_TYPE:
12945 case POINTER_TYPE:
12946 case REFERENCE_TYPE:
12947 case NULLPTR_TYPE:
12948 case OFFSET_TYPE:
12949 case LANG_TYPE:
12950 case VECTOR_TYPE:
12951 return 0;
12952
12953 default:
12954 if (is_cxx_auto (type))
12955 return 0;
12956 gcc_unreachable ();
12957 }
12958
12959 return 0;
12960 }
12961
12962 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12963 node, return the size in bits for the type if it is a constant, or else
12964 return the alignment for the type if the type's size is not constant, or
12965 else return BITS_PER_WORD if the type actually turns out to be an
12966 ERROR_MARK node. */
12967
12968 static inline unsigned HOST_WIDE_INT
12969 simple_type_size_in_bits (const_tree type)
12970 {
12971 if (TREE_CODE (type) == ERROR_MARK)
12972 return BITS_PER_WORD;
12973 else if (TYPE_SIZE (type) == NULL_TREE)
12974 return 0;
12975 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12976 return tree_to_uhwi (TYPE_SIZE (type));
12977 else
12978 return TYPE_ALIGN (type);
12979 }
12980
12981 /* Similarly, but return an offset_int instead of UHWI. */
12982
12983 static inline offset_int
12984 offset_int_type_size_in_bits (const_tree type)
12985 {
12986 if (TREE_CODE (type) == ERROR_MARK)
12987 return BITS_PER_WORD;
12988 else if (TYPE_SIZE (type) == NULL_TREE)
12989 return 0;
12990 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12991 return wi::to_offset (TYPE_SIZE (type));
12992 else
12993 return TYPE_ALIGN (type);
12994 }
12995
12996 /* Given a pointer to a tree node for a subrange type, return a pointer
12997 to a DIE that describes the given type. */
12998
12999 static dw_die_ref
13000 subrange_type_die (tree type, tree low, tree high, tree bias,
13001 dw_die_ref context_die)
13002 {
13003 dw_die_ref subrange_die;
13004 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13005
13006 if (context_die == NULL)
13007 context_die = comp_unit_die ();
13008
13009 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13010
13011 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13012 {
13013 /* The size of the subrange type and its base type do not match,
13014 so we need to generate a size attribute for the subrange type. */
13015 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13016 }
13017
13018 add_alignment_attribute (subrange_die, type);
13019
13020 if (low)
13021 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13022 if (high)
13023 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13024 if (bias && !dwarf_strict)
13025 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13026 dw_scalar_form_constant
13027 | dw_scalar_form_exprloc
13028 | dw_scalar_form_reference,
13029 NULL);
13030
13031 return subrange_die;
13032 }
13033
13034 /* Returns the (const and/or volatile) cv_qualifiers associated with
13035 the decl node. This will normally be augmented with the
13036 cv_qualifiers of the underlying type in add_type_attribute. */
13037
13038 static int
13039 decl_quals (const_tree decl)
13040 {
13041 return ((TREE_READONLY (decl)
13042 /* The C++ front-end correctly marks reference-typed
13043 variables as readonly, but from a language (and debug
13044 info) standpoint they are not const-qualified. */
13045 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13046 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13047 | (TREE_THIS_VOLATILE (decl)
13048 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13049 }
13050
13051 /* Determine the TYPE whose qualifiers match the largest strict subset
13052 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13053 qualifiers outside QUAL_MASK. */
13054
13055 static int
13056 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13057 {
13058 tree t;
13059 int best_rank = 0, best_qual = 0, max_rank;
13060
13061 type_quals &= qual_mask;
13062 max_rank = popcount_hwi (type_quals) - 1;
13063
13064 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13065 t = TYPE_NEXT_VARIANT (t))
13066 {
13067 int q = TYPE_QUALS (t) & qual_mask;
13068
13069 if ((q & type_quals) == q && q != type_quals
13070 && check_base_type (t, type))
13071 {
13072 int rank = popcount_hwi (q);
13073
13074 if (rank > best_rank)
13075 {
13076 best_rank = rank;
13077 best_qual = q;
13078 }
13079 }
13080 }
13081
13082 return best_qual;
13083 }
13084
13085 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13086 static const dwarf_qual_info_t dwarf_qual_info[] =
13087 {
13088 { TYPE_QUAL_CONST, DW_TAG_const_type },
13089 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13090 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13091 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13092 };
13093 static const unsigned int dwarf_qual_info_size
13094 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13095
13096 /* If DIE is a qualified DIE of some base DIE with the same parent,
13097 return the base DIE, otherwise return NULL. Set MASK to the
13098 qualifiers added compared to the returned DIE. */
13099
13100 static dw_die_ref
13101 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13102 {
13103 unsigned int i;
13104 for (i = 0; i < dwarf_qual_info_size; i++)
13105 if (die->die_tag == dwarf_qual_info[i].t)
13106 break;
13107 if (i == dwarf_qual_info_size)
13108 return NULL;
13109 if (vec_safe_length (die->die_attr) != 1)
13110 return NULL;
13111 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13112 if (type == NULL || type->die_parent != die->die_parent)
13113 return NULL;
13114 *mask |= dwarf_qual_info[i].q;
13115 if (depth)
13116 {
13117 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13118 if (ret)
13119 return ret;
13120 }
13121 return type;
13122 }
13123
13124 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13125 entry that chains the modifiers specified by CV_QUALS in front of the
13126 given type. REVERSE is true if the type is to be interpreted in the
13127 reverse storage order wrt the target order. */
13128
13129 static dw_die_ref
13130 modified_type_die (tree type, int cv_quals, bool reverse,
13131 dw_die_ref context_die)
13132 {
13133 enum tree_code code = TREE_CODE (type);
13134 dw_die_ref mod_type_die;
13135 dw_die_ref sub_die = NULL;
13136 tree item_type = NULL;
13137 tree qualified_type;
13138 tree name, low, high;
13139 dw_die_ref mod_scope;
13140 /* Only these cv-qualifiers are currently handled. */
13141 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13142 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13143 ENCODE_QUAL_ADDR_SPACE(~0U));
13144 const bool reverse_base_type
13145 = need_endianity_attribute_p (reverse) && is_base_type (type);
13146
13147 if (code == ERROR_MARK)
13148 return NULL;
13149
13150 if (lang_hooks.types.get_debug_type)
13151 {
13152 tree debug_type = lang_hooks.types.get_debug_type (type);
13153
13154 if (debug_type != NULL_TREE && debug_type != type)
13155 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13156 }
13157
13158 cv_quals &= cv_qual_mask;
13159
13160 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13161 tag modifier (and not an attribute) old consumers won't be able
13162 to handle it. */
13163 if (dwarf_version < 3)
13164 cv_quals &= ~TYPE_QUAL_RESTRICT;
13165
13166 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13167 if (dwarf_version < 5)
13168 cv_quals &= ~TYPE_QUAL_ATOMIC;
13169
13170 /* See if we already have the appropriately qualified variant of
13171 this type. */
13172 qualified_type = get_qualified_type (type, cv_quals);
13173
13174 if (qualified_type == sizetype)
13175 {
13176 /* Try not to expose the internal sizetype type's name. */
13177 if (TYPE_NAME (qualified_type)
13178 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13179 {
13180 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13181
13182 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13183 && (TYPE_PRECISION (t)
13184 == TYPE_PRECISION (qualified_type))
13185 && (TYPE_UNSIGNED (t)
13186 == TYPE_UNSIGNED (qualified_type)));
13187 qualified_type = t;
13188 }
13189 else if (qualified_type == sizetype
13190 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13191 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13192 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13193 qualified_type = size_type_node;
13194 }
13195
13196 /* If we do, then we can just use its DIE, if it exists. */
13197 if (qualified_type)
13198 {
13199 mod_type_die = lookup_type_die (qualified_type);
13200
13201 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13202 dealt with specially: the DIE with the attribute, if it exists, is
13203 placed immediately after the regular DIE for the same base type. */
13204 if (mod_type_die
13205 && (!reverse_base_type
13206 || ((mod_type_die = mod_type_die->die_sib) != NULL
13207 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13208 return mod_type_die;
13209 }
13210
13211 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13212
13213 /* Handle C typedef types. */
13214 if (name
13215 && TREE_CODE (name) == TYPE_DECL
13216 && DECL_ORIGINAL_TYPE (name)
13217 && !DECL_ARTIFICIAL (name))
13218 {
13219 tree dtype = TREE_TYPE (name);
13220
13221 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13222 if (qualified_type == dtype && !reverse_base_type)
13223 {
13224 tree origin = decl_ultimate_origin (name);
13225
13226 /* Typedef variants that have an abstract origin don't get their own
13227 type DIE (see gen_typedef_die), so fall back on the ultimate
13228 abstract origin instead. */
13229 if (origin != NULL && origin != name)
13230 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13231 context_die);
13232
13233 /* For a named type, use the typedef. */
13234 gen_type_die (qualified_type, context_die);
13235 return lookup_type_die (qualified_type);
13236 }
13237 else
13238 {
13239 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13240 dquals &= cv_qual_mask;
13241 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13242 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13243 /* cv-unqualified version of named type. Just use
13244 the unnamed type to which it refers. */
13245 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13246 reverse, context_die);
13247 /* Else cv-qualified version of named type; fall through. */
13248 }
13249 }
13250
13251 mod_scope = scope_die_for (type, context_die);
13252
13253 if (cv_quals)
13254 {
13255 int sub_quals = 0, first_quals = 0;
13256 unsigned i;
13257 dw_die_ref first = NULL, last = NULL;
13258
13259 /* Determine a lesser qualified type that most closely matches
13260 this one. Then generate DW_TAG_* entries for the remaining
13261 qualifiers. */
13262 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13263 cv_qual_mask);
13264 if (sub_quals && use_debug_types)
13265 {
13266 bool needed = false;
13267 /* If emitting type units, make sure the order of qualifiers
13268 is canonical. Thus, start from unqualified type if
13269 an earlier qualifier is missing in sub_quals, but some later
13270 one is present there. */
13271 for (i = 0; i < dwarf_qual_info_size; i++)
13272 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13273 needed = true;
13274 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13275 {
13276 sub_quals = 0;
13277 break;
13278 }
13279 }
13280 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13281 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13282 {
13283 /* As not all intermediate qualified DIEs have corresponding
13284 tree types, ensure that qualified DIEs in the same scope
13285 as their DW_AT_type are emitted after their DW_AT_type,
13286 only with other qualified DIEs for the same type possibly
13287 in between them. Determine the range of such qualified
13288 DIEs now (first being the base type, last being corresponding
13289 last qualified DIE for it). */
13290 unsigned int count = 0;
13291 first = qualified_die_p (mod_type_die, &first_quals,
13292 dwarf_qual_info_size);
13293 if (first == NULL)
13294 first = mod_type_die;
13295 gcc_assert ((first_quals & ~sub_quals) == 0);
13296 for (count = 0, last = first;
13297 count < (1U << dwarf_qual_info_size);
13298 count++, last = last->die_sib)
13299 {
13300 int quals = 0;
13301 if (last == mod_scope->die_child)
13302 break;
13303 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13304 != first)
13305 break;
13306 }
13307 }
13308
13309 for (i = 0; i < dwarf_qual_info_size; i++)
13310 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13311 {
13312 dw_die_ref d;
13313 if (first && first != last)
13314 {
13315 for (d = first->die_sib; ; d = d->die_sib)
13316 {
13317 int quals = 0;
13318 qualified_die_p (d, &quals, dwarf_qual_info_size);
13319 if (quals == (first_quals | dwarf_qual_info[i].q))
13320 break;
13321 if (d == last)
13322 {
13323 d = NULL;
13324 break;
13325 }
13326 }
13327 if (d)
13328 {
13329 mod_type_die = d;
13330 continue;
13331 }
13332 }
13333 if (first)
13334 {
13335 d = new_die_raw (dwarf_qual_info[i].t);
13336 add_child_die_after (mod_scope, d, last);
13337 last = d;
13338 }
13339 else
13340 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13341 if (mod_type_die)
13342 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13343 mod_type_die = d;
13344 first_quals |= dwarf_qual_info[i].q;
13345 }
13346 }
13347 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13348 {
13349 dwarf_tag tag = DW_TAG_pointer_type;
13350 if (code == REFERENCE_TYPE)
13351 {
13352 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13353 tag = DW_TAG_rvalue_reference_type;
13354 else
13355 tag = DW_TAG_reference_type;
13356 }
13357 mod_type_die = new_die (tag, mod_scope, type);
13358
13359 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13360 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13361 add_alignment_attribute (mod_type_die, type);
13362 item_type = TREE_TYPE (type);
13363
13364 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13365 if (!ADDR_SPACE_GENERIC_P (as))
13366 {
13367 int action = targetm.addr_space.debug (as);
13368 if (action >= 0)
13369 {
13370 /* Positive values indicate an address_class. */
13371 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13372 }
13373 else
13374 {
13375 /* Negative values indicate an (inverted) segment base reg. */
13376 dw_loc_descr_ref d
13377 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13378 add_AT_loc (mod_type_die, DW_AT_segment, d);
13379 }
13380 }
13381 }
13382 else if (code == INTEGER_TYPE
13383 && TREE_TYPE (type) != NULL_TREE
13384 && subrange_type_for_debug_p (type, &low, &high))
13385 {
13386 tree bias = NULL_TREE;
13387 if (lang_hooks.types.get_type_bias)
13388 bias = lang_hooks.types.get_type_bias (type);
13389 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13390 item_type = TREE_TYPE (type);
13391 }
13392 else if (is_base_type (type))
13393 {
13394 mod_type_die = base_type_die (type, reverse);
13395
13396 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13397 if (reverse_base_type)
13398 {
13399 dw_die_ref after_die
13400 = modified_type_die (type, cv_quals, false, context_die);
13401 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13402 }
13403 else
13404 add_child_die (comp_unit_die (), mod_type_die);
13405
13406 add_pubtype (type, mod_type_die);
13407 }
13408 else
13409 {
13410 gen_type_die (type, context_die);
13411
13412 /* We have to get the type_main_variant here (and pass that to the
13413 `lookup_type_die' routine) because the ..._TYPE node we have
13414 might simply be a *copy* of some original type node (where the
13415 copy was created to help us keep track of typedef names) and
13416 that copy might have a different TYPE_UID from the original
13417 ..._TYPE node. */
13418 if (TREE_CODE (type) == FUNCTION_TYPE
13419 || TREE_CODE (type) == METHOD_TYPE)
13420 {
13421 /* For function/method types, can't just use type_main_variant here,
13422 because that can have different ref-qualifiers for C++,
13423 but try to canonicalize. */
13424 tree main = TYPE_MAIN_VARIANT (type);
13425 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13426 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13427 && check_base_type (t, main)
13428 && check_lang_type (t, type))
13429 return lookup_type_die (t);
13430 return lookup_type_die (type);
13431 }
13432 else if (TREE_CODE (type) != VECTOR_TYPE
13433 && TREE_CODE (type) != ARRAY_TYPE)
13434 return lookup_type_die (type_main_variant (type));
13435 else
13436 /* Vectors have the debugging information in the type,
13437 not the main variant. */
13438 return lookup_type_die (type);
13439 }
13440
13441 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13442 don't output a DW_TAG_typedef, since there isn't one in the
13443 user's program; just attach a DW_AT_name to the type.
13444 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13445 if the base type already has the same name. */
13446 if (name
13447 && ((TREE_CODE (name) != TYPE_DECL
13448 && (qualified_type == TYPE_MAIN_VARIANT (type)
13449 || (cv_quals == TYPE_UNQUALIFIED)))
13450 || (TREE_CODE (name) == TYPE_DECL
13451 && TREE_TYPE (name) == qualified_type
13452 && DECL_NAME (name))))
13453 {
13454 if (TREE_CODE (name) == TYPE_DECL)
13455 /* Could just call add_name_and_src_coords_attributes here,
13456 but since this is a builtin type it doesn't have any
13457 useful source coordinates anyway. */
13458 name = DECL_NAME (name);
13459 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13460 }
13461 /* This probably indicates a bug. */
13462 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13463 {
13464 name = TYPE_IDENTIFIER (type);
13465 add_name_attribute (mod_type_die,
13466 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13467 }
13468
13469 if (qualified_type && !reverse_base_type)
13470 equate_type_number_to_die (qualified_type, mod_type_die);
13471
13472 if (item_type)
13473 /* We must do this after the equate_type_number_to_die call, in case
13474 this is a recursive type. This ensures that the modified_type_die
13475 recursion will terminate even if the type is recursive. Recursive
13476 types are possible in Ada. */
13477 sub_die = modified_type_die (item_type,
13478 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13479 reverse,
13480 context_die);
13481
13482 if (sub_die != NULL)
13483 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13484
13485 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13486 if (TYPE_ARTIFICIAL (type))
13487 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13488
13489 return mod_type_die;
13490 }
13491
13492 /* Generate DIEs for the generic parameters of T.
13493 T must be either a generic type or a generic function.
13494 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13495
13496 static void
13497 gen_generic_params_dies (tree t)
13498 {
13499 tree parms, args;
13500 int parms_num, i;
13501 dw_die_ref die = NULL;
13502 int non_default;
13503
13504 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13505 return;
13506
13507 if (TYPE_P (t))
13508 die = lookup_type_die (t);
13509 else if (DECL_P (t))
13510 die = lookup_decl_die (t);
13511
13512 gcc_assert (die);
13513
13514 parms = lang_hooks.get_innermost_generic_parms (t);
13515 if (!parms)
13516 /* T has no generic parameter. It means T is neither a generic type
13517 or function. End of story. */
13518 return;
13519
13520 parms_num = TREE_VEC_LENGTH (parms);
13521 args = lang_hooks.get_innermost_generic_args (t);
13522 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13523 non_default = int_cst_value (TREE_CHAIN (args));
13524 else
13525 non_default = TREE_VEC_LENGTH (args);
13526 for (i = 0; i < parms_num; i++)
13527 {
13528 tree parm, arg, arg_pack_elems;
13529 dw_die_ref parm_die;
13530
13531 parm = TREE_VEC_ELT (parms, i);
13532 arg = TREE_VEC_ELT (args, i);
13533 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13534 gcc_assert (parm && TREE_VALUE (parm) && arg);
13535
13536 if (parm && TREE_VALUE (parm) && arg)
13537 {
13538 /* If PARM represents a template parameter pack,
13539 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13540 by DW_TAG_template_*_parameter DIEs for the argument
13541 pack elements of ARG. Note that ARG would then be
13542 an argument pack. */
13543 if (arg_pack_elems)
13544 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13545 arg_pack_elems,
13546 die);
13547 else
13548 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13549 true /* emit name */, die);
13550 if (i >= non_default)
13551 add_AT_flag (parm_die, DW_AT_default_value, 1);
13552 }
13553 }
13554 }
13555
13556 /* Create and return a DIE for PARM which should be
13557 the representation of a generic type parameter.
13558 For instance, in the C++ front end, PARM would be a template parameter.
13559 ARG is the argument to PARM.
13560 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13561 name of the PARM.
13562 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13563 as a child node. */
13564
13565 static dw_die_ref
13566 generic_parameter_die (tree parm, tree arg,
13567 bool emit_name_p,
13568 dw_die_ref parent_die)
13569 {
13570 dw_die_ref tmpl_die = NULL;
13571 const char *name = NULL;
13572
13573 if (!parm || !DECL_NAME (parm) || !arg)
13574 return NULL;
13575
13576 /* We support non-type generic parameters and arguments,
13577 type generic parameters and arguments, as well as
13578 generic generic parameters (a.k.a. template template parameters in C++)
13579 and arguments. */
13580 if (TREE_CODE (parm) == PARM_DECL)
13581 /* PARM is a nontype generic parameter */
13582 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13583 else if (TREE_CODE (parm) == TYPE_DECL)
13584 /* PARM is a type generic parameter. */
13585 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13586 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13587 /* PARM is a generic generic parameter.
13588 Its DIE is a GNU extension. It shall have a
13589 DW_AT_name attribute to represent the name of the template template
13590 parameter, and a DW_AT_GNU_template_name attribute to represent the
13591 name of the template template argument. */
13592 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13593 parent_die, parm);
13594 else
13595 gcc_unreachable ();
13596
13597 if (tmpl_die)
13598 {
13599 tree tmpl_type;
13600
13601 /* If PARM is a generic parameter pack, it means we are
13602 emitting debug info for a template argument pack element.
13603 In other terms, ARG is a template argument pack element.
13604 In that case, we don't emit any DW_AT_name attribute for
13605 the die. */
13606 if (emit_name_p)
13607 {
13608 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13609 gcc_assert (name);
13610 add_AT_string (tmpl_die, DW_AT_name, name);
13611 }
13612
13613 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13614 {
13615 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13616 TMPL_DIE should have a child DW_AT_type attribute that is set
13617 to the type of the argument to PARM, which is ARG.
13618 If PARM is a type generic parameter, TMPL_DIE should have a
13619 child DW_AT_type that is set to ARG. */
13620 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13621 add_type_attribute (tmpl_die, tmpl_type,
13622 (TREE_THIS_VOLATILE (tmpl_type)
13623 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13624 false, parent_die);
13625 }
13626 else
13627 {
13628 /* So TMPL_DIE is a DIE representing a
13629 a generic generic template parameter, a.k.a template template
13630 parameter in C++ and arg is a template. */
13631
13632 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13633 to the name of the argument. */
13634 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13635 if (name)
13636 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13637 }
13638
13639 if (TREE_CODE (parm) == PARM_DECL)
13640 /* So PARM is a non-type generic parameter.
13641 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13642 attribute of TMPL_DIE which value represents the value
13643 of ARG.
13644 We must be careful here:
13645 The value of ARG might reference some function decls.
13646 We might currently be emitting debug info for a generic
13647 type and types are emitted before function decls, we don't
13648 know if the function decls referenced by ARG will actually be
13649 emitted after cgraph computations.
13650 So must defer the generation of the DW_AT_const_value to
13651 after cgraph is ready. */
13652 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13653 }
13654
13655 return tmpl_die;
13656 }
13657
13658 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13659 PARM_PACK must be a template parameter pack. The returned DIE
13660 will be child DIE of PARENT_DIE. */
13661
13662 static dw_die_ref
13663 template_parameter_pack_die (tree parm_pack,
13664 tree parm_pack_args,
13665 dw_die_ref parent_die)
13666 {
13667 dw_die_ref die;
13668 int j;
13669
13670 gcc_assert (parent_die && parm_pack);
13671
13672 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13673 add_name_and_src_coords_attributes (die, parm_pack);
13674 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13675 generic_parameter_die (parm_pack,
13676 TREE_VEC_ELT (parm_pack_args, j),
13677 false /* Don't emit DW_AT_name */,
13678 die);
13679 return die;
13680 }
13681
13682 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13683 an enumerated type. */
13684
13685 static inline int
13686 type_is_enum (const_tree type)
13687 {
13688 return TREE_CODE (type) == ENUMERAL_TYPE;
13689 }
13690
13691 /* Return the DBX register number described by a given RTL node. */
13692
13693 static unsigned int
13694 dbx_reg_number (const_rtx rtl)
13695 {
13696 unsigned regno = REGNO (rtl);
13697
13698 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13699
13700 #ifdef LEAF_REG_REMAP
13701 if (crtl->uses_only_leaf_regs)
13702 {
13703 int leaf_reg = LEAF_REG_REMAP (regno);
13704 if (leaf_reg != -1)
13705 regno = (unsigned) leaf_reg;
13706 }
13707 #endif
13708
13709 regno = DBX_REGISTER_NUMBER (regno);
13710 gcc_assert (regno != INVALID_REGNUM);
13711 return regno;
13712 }
13713
13714 /* Optionally add a DW_OP_piece term to a location description expression.
13715 DW_OP_piece is only added if the location description expression already
13716 doesn't end with DW_OP_piece. */
13717
13718 static void
13719 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13720 {
13721 dw_loc_descr_ref loc;
13722
13723 if (*list_head != NULL)
13724 {
13725 /* Find the end of the chain. */
13726 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13727 ;
13728
13729 if (loc->dw_loc_opc != DW_OP_piece)
13730 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13731 }
13732 }
13733
13734 /* Return a location descriptor that designates a machine register or
13735 zero if there is none. */
13736
13737 static dw_loc_descr_ref
13738 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13739 {
13740 rtx regs;
13741
13742 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13743 return 0;
13744
13745 /* We only use "frame base" when we're sure we're talking about the
13746 post-prologue local stack frame. We do this by *not* running
13747 register elimination until this point, and recognizing the special
13748 argument pointer and soft frame pointer rtx's.
13749 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13750 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13751 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13752 {
13753 dw_loc_descr_ref result = NULL;
13754
13755 if (dwarf_version >= 4 || !dwarf_strict)
13756 {
13757 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13758 initialized);
13759 if (result)
13760 add_loc_descr (&result,
13761 new_loc_descr (DW_OP_stack_value, 0, 0));
13762 }
13763 return result;
13764 }
13765
13766 regs = targetm.dwarf_register_span (rtl);
13767
13768 if (REG_NREGS (rtl) > 1 || regs)
13769 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13770 else
13771 {
13772 unsigned int dbx_regnum = dbx_reg_number (rtl);
13773 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13774 return 0;
13775 return one_reg_loc_descriptor (dbx_regnum, initialized);
13776 }
13777 }
13778
13779 /* Return a location descriptor that designates a machine register for
13780 a given hard register number. */
13781
13782 static dw_loc_descr_ref
13783 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13784 {
13785 dw_loc_descr_ref reg_loc_descr;
13786
13787 if (regno <= 31)
13788 reg_loc_descr
13789 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13790 else
13791 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13792
13793 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13794 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13795
13796 return reg_loc_descr;
13797 }
13798
13799 /* Given an RTL of a register, return a location descriptor that
13800 designates a value that spans more than one register. */
13801
13802 static dw_loc_descr_ref
13803 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13804 enum var_init_status initialized)
13805 {
13806 int size, i;
13807 dw_loc_descr_ref loc_result = NULL;
13808
13809 /* Simple, contiguous registers. */
13810 if (regs == NULL_RTX)
13811 {
13812 unsigned reg = REGNO (rtl);
13813 int nregs;
13814
13815 #ifdef LEAF_REG_REMAP
13816 if (crtl->uses_only_leaf_regs)
13817 {
13818 int leaf_reg = LEAF_REG_REMAP (reg);
13819 if (leaf_reg != -1)
13820 reg = (unsigned) leaf_reg;
13821 }
13822 #endif
13823
13824 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13825 nregs = REG_NREGS (rtl);
13826
13827 /* At present we only track constant-sized pieces. */
13828 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13829 return NULL;
13830 size /= nregs;
13831
13832 loc_result = NULL;
13833 while (nregs--)
13834 {
13835 dw_loc_descr_ref t;
13836
13837 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13838 VAR_INIT_STATUS_INITIALIZED);
13839 add_loc_descr (&loc_result, t);
13840 add_loc_descr_op_piece (&loc_result, size);
13841 ++reg;
13842 }
13843 return loc_result;
13844 }
13845
13846 /* Now onto stupid register sets in non contiguous locations. */
13847
13848 gcc_assert (GET_CODE (regs) == PARALLEL);
13849
13850 /* At present we only track constant-sized pieces. */
13851 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13852 return NULL;
13853 loc_result = NULL;
13854
13855 for (i = 0; i < XVECLEN (regs, 0); ++i)
13856 {
13857 dw_loc_descr_ref t;
13858
13859 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13860 VAR_INIT_STATUS_INITIALIZED);
13861 add_loc_descr (&loc_result, t);
13862 add_loc_descr_op_piece (&loc_result, size);
13863 }
13864
13865 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13866 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13867 return loc_result;
13868 }
13869
13870 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13871
13872 /* Return a location descriptor that designates a constant i,
13873 as a compound operation from constant (i >> shift), constant shift
13874 and DW_OP_shl. */
13875
13876 static dw_loc_descr_ref
13877 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13878 {
13879 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13880 add_loc_descr (&ret, int_loc_descriptor (shift));
13881 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13882 return ret;
13883 }
13884
13885 /* Return a location descriptor that designates constant POLY_I. */
13886
13887 static dw_loc_descr_ref
13888 int_loc_descriptor (poly_int64 poly_i)
13889 {
13890 enum dwarf_location_atom op;
13891
13892 HOST_WIDE_INT i;
13893 if (!poly_i.is_constant (&i))
13894 {
13895 /* Create location descriptions for the non-constant part and
13896 add any constant offset at the end. */
13897 dw_loc_descr_ref ret = NULL;
13898 HOST_WIDE_INT constant = poly_i.coeffs[0];
13899 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13900 {
13901 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13902 if (coeff != 0)
13903 {
13904 dw_loc_descr_ref start = ret;
13905 unsigned int factor;
13906 int bias;
13907 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13908 (j, &factor, &bias);
13909
13910 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13911 add COEFF * (REGNO / FACTOR) now and subtract
13912 COEFF * BIAS from the final constant part. */
13913 constant -= coeff * bias;
13914 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13915 if (coeff % factor == 0)
13916 coeff /= factor;
13917 else
13918 {
13919 int amount = exact_log2 (factor);
13920 gcc_assert (amount >= 0);
13921 add_loc_descr (&ret, int_loc_descriptor (amount));
13922 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13923 }
13924 if (coeff != 1)
13925 {
13926 add_loc_descr (&ret, int_loc_descriptor (coeff));
13927 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13928 }
13929 if (start)
13930 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13931 }
13932 }
13933 loc_descr_plus_const (&ret, constant);
13934 return ret;
13935 }
13936
13937 /* Pick the smallest representation of a constant, rather than just
13938 defaulting to the LEB encoding. */
13939 if (i >= 0)
13940 {
13941 int clz = clz_hwi (i);
13942 int ctz = ctz_hwi (i);
13943 if (i <= 31)
13944 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13945 else if (i <= 0xff)
13946 op = DW_OP_const1u;
13947 else if (i <= 0xffff)
13948 op = DW_OP_const2u;
13949 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13950 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13951 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13952 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13953 while DW_OP_const4u is 5 bytes. */
13954 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13955 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13956 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13957 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13958 while DW_OP_const4u is 5 bytes. */
13959 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13960
13961 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13962 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13963 <= 4)
13964 {
13965 /* As i >= 2**31, the double cast above will yield a negative number.
13966 Since wrapping is defined in DWARF expressions we can output big
13967 positive integers as small negative ones, regardless of the size
13968 of host wide ints.
13969
13970 Here, since the evaluator will handle 32-bit values and since i >=
13971 2**31, we know it's going to be interpreted as a negative literal:
13972 store it this way if we can do better than 5 bytes this way. */
13973 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13974 }
13975 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13976 op = DW_OP_const4u;
13977
13978 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13979 least 6 bytes: see if we can do better before falling back to it. */
13980 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13981 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13982 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13983 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13984 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13985 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13986 >= HOST_BITS_PER_WIDE_INT)
13987 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13988 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13989 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13990 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13991 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13992 && size_of_uleb128 (i) > 6)
13993 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13994 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13995 else
13996 op = DW_OP_constu;
13997 }
13998 else
13999 {
14000 if (i >= -0x80)
14001 op = DW_OP_const1s;
14002 else if (i >= -0x8000)
14003 op = DW_OP_const2s;
14004 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14005 {
14006 if (size_of_int_loc_descriptor (i) < 5)
14007 {
14008 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14009 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14010 return ret;
14011 }
14012 op = DW_OP_const4s;
14013 }
14014 else
14015 {
14016 if (size_of_int_loc_descriptor (i)
14017 < (unsigned long) 1 + size_of_sleb128 (i))
14018 {
14019 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14020 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14021 return ret;
14022 }
14023 op = DW_OP_consts;
14024 }
14025 }
14026
14027 return new_loc_descr (op, i, 0);
14028 }
14029
14030 /* Likewise, for unsigned constants. */
14031
14032 static dw_loc_descr_ref
14033 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14034 {
14035 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14036 const unsigned HOST_WIDE_INT max_uint
14037 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14038
14039 /* If possible, use the clever signed constants handling. */
14040 if (i <= max_int)
14041 return int_loc_descriptor ((HOST_WIDE_INT) i);
14042
14043 /* Here, we are left with positive numbers that cannot be represented as
14044 HOST_WIDE_INT, i.e.:
14045 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14046
14047 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14048 whereas may be better to output a negative integer: thanks to integer
14049 wrapping, we know that:
14050 x = x - 2 ** DWARF2_ADDR_SIZE
14051 = x - 2 * (max (HOST_WIDE_INT) + 1)
14052 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14053 small negative integers. Let's try that in cases it will clearly improve
14054 the encoding: there is no gain turning DW_OP_const4u into
14055 DW_OP_const4s. */
14056 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14057 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14058 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14059 {
14060 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14061
14062 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14063 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14064 const HOST_WIDE_INT second_shift
14065 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14066
14067 /* So we finally have:
14068 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14069 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14070 return int_loc_descriptor (second_shift);
14071 }
14072
14073 /* Last chance: fallback to a simple constant operation. */
14074 return new_loc_descr
14075 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14076 ? DW_OP_const4u
14077 : DW_OP_const8u,
14078 i, 0);
14079 }
14080
14081 /* Generate and return a location description that computes the unsigned
14082 comparison of the two stack top entries (a OP b where b is the top-most
14083 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14084 LE_EXPR, GT_EXPR or GE_EXPR. */
14085
14086 static dw_loc_descr_ref
14087 uint_comparison_loc_list (enum tree_code kind)
14088 {
14089 enum dwarf_location_atom op, flip_op;
14090 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14091
14092 switch (kind)
14093 {
14094 case LT_EXPR:
14095 op = DW_OP_lt;
14096 break;
14097 case LE_EXPR:
14098 op = DW_OP_le;
14099 break;
14100 case GT_EXPR:
14101 op = DW_OP_gt;
14102 break;
14103 case GE_EXPR:
14104 op = DW_OP_ge;
14105 break;
14106 default:
14107 gcc_unreachable ();
14108 }
14109
14110 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14111 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14112
14113 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14114 possible to perform unsigned comparisons: we just have to distinguish
14115 three cases:
14116
14117 1. when a and b have the same sign (as signed integers); then we should
14118 return: a OP(signed) b;
14119
14120 2. when a is a negative signed integer while b is a positive one, then a
14121 is a greater unsigned integer than b; likewise when a and b's roles
14122 are flipped.
14123
14124 So first, compare the sign of the two operands. */
14125 ret = new_loc_descr (DW_OP_over, 0, 0);
14126 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14127 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14128 /* If they have different signs (i.e. they have different sign bits), then
14129 the stack top value has now the sign bit set and thus it's smaller than
14130 zero. */
14131 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14132 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14133 add_loc_descr (&ret, bra_node);
14134
14135 /* We are in case 1. At this point, we know both operands have the same
14136 sign, to it's safe to use the built-in signed comparison. */
14137 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14138 add_loc_descr (&ret, jmp_node);
14139
14140 /* We are in case 2. Here, we know both operands do not have the same sign,
14141 so we have to flip the signed comparison. */
14142 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14143 tmp = new_loc_descr (flip_op, 0, 0);
14144 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14145 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14146 add_loc_descr (&ret, tmp);
14147
14148 /* This dummy operation is necessary to make the two branches join. */
14149 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14150 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14151 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14152 add_loc_descr (&ret, tmp);
14153
14154 return ret;
14155 }
14156
14157 /* Likewise, but takes the location description lists (might be destructive on
14158 them). Return NULL if either is NULL or if concatenation fails. */
14159
14160 static dw_loc_list_ref
14161 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14162 enum tree_code kind)
14163 {
14164 if (left == NULL || right == NULL)
14165 return NULL;
14166
14167 add_loc_list (&left, right);
14168 if (left == NULL)
14169 return NULL;
14170
14171 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14172 return left;
14173 }
14174
14175 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14176 without actually allocating it. */
14177
14178 static unsigned long
14179 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14180 {
14181 return size_of_int_loc_descriptor (i >> shift)
14182 + size_of_int_loc_descriptor (shift)
14183 + 1;
14184 }
14185
14186 /* Return size_of_locs (int_loc_descriptor (i)) without
14187 actually allocating it. */
14188
14189 static unsigned long
14190 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14191 {
14192 unsigned long s;
14193
14194 if (i >= 0)
14195 {
14196 int clz, ctz;
14197 if (i <= 31)
14198 return 1;
14199 else if (i <= 0xff)
14200 return 2;
14201 else if (i <= 0xffff)
14202 return 3;
14203 clz = clz_hwi (i);
14204 ctz = ctz_hwi (i);
14205 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14206 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14207 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14208 - clz - 5);
14209 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14210 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14211 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14212 - clz - 8);
14213 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14214 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14215 <= 4)
14216 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14217 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14218 return 5;
14219 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14220 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14221 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14222 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14223 - clz - 8);
14224 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14225 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14226 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14227 - clz - 16);
14228 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14229 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14230 && s > 6)
14231 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14232 - clz - 32);
14233 else
14234 return 1 + s;
14235 }
14236 else
14237 {
14238 if (i >= -0x80)
14239 return 2;
14240 else if (i >= -0x8000)
14241 return 3;
14242 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14243 {
14244 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14245 {
14246 s = size_of_int_loc_descriptor (-i) + 1;
14247 if (s < 5)
14248 return s;
14249 }
14250 return 5;
14251 }
14252 else
14253 {
14254 unsigned long r = 1 + size_of_sleb128 (i);
14255 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14256 {
14257 s = size_of_int_loc_descriptor (-i) + 1;
14258 if (s < r)
14259 return s;
14260 }
14261 return r;
14262 }
14263 }
14264 }
14265
14266 /* Return loc description representing "address" of integer value.
14267 This can appear only as toplevel expression. */
14268
14269 static dw_loc_descr_ref
14270 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14271 {
14272 int litsize;
14273 dw_loc_descr_ref loc_result = NULL;
14274
14275 if (!(dwarf_version >= 4 || !dwarf_strict))
14276 return NULL;
14277
14278 litsize = size_of_int_loc_descriptor (i);
14279 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14280 is more compact. For DW_OP_stack_value we need:
14281 litsize + 1 (DW_OP_stack_value)
14282 and for DW_OP_implicit_value:
14283 1 (DW_OP_implicit_value) + 1 (length) + size. */
14284 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14285 {
14286 loc_result = int_loc_descriptor (i);
14287 add_loc_descr (&loc_result,
14288 new_loc_descr (DW_OP_stack_value, 0, 0));
14289 return loc_result;
14290 }
14291
14292 loc_result = new_loc_descr (DW_OP_implicit_value,
14293 size, 0);
14294 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14295 loc_result->dw_loc_oprnd2.v.val_int = i;
14296 return loc_result;
14297 }
14298
14299 /* Return a location descriptor that designates a base+offset location. */
14300
14301 static dw_loc_descr_ref
14302 based_loc_descr (rtx reg, poly_int64 offset,
14303 enum var_init_status initialized)
14304 {
14305 unsigned int regno;
14306 dw_loc_descr_ref result;
14307 dw_fde_ref fde = cfun->fde;
14308
14309 /* We only use "frame base" when we're sure we're talking about the
14310 post-prologue local stack frame. We do this by *not* running
14311 register elimination until this point, and recognizing the special
14312 argument pointer and soft frame pointer rtx's. */
14313 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14314 {
14315 rtx elim = (ira_use_lra_p
14316 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14317 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14318
14319 if (elim != reg)
14320 {
14321 elim = strip_offset_and_add (elim, &offset);
14322 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14323 && (elim == hard_frame_pointer_rtx
14324 || elim == stack_pointer_rtx))
14325 || elim == (frame_pointer_needed
14326 ? hard_frame_pointer_rtx
14327 : stack_pointer_rtx));
14328
14329 /* If drap register is used to align stack, use frame
14330 pointer + offset to access stack variables. If stack
14331 is aligned without drap, use stack pointer + offset to
14332 access stack variables. */
14333 if (crtl->stack_realign_tried
14334 && reg == frame_pointer_rtx)
14335 {
14336 int base_reg
14337 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14338 ? HARD_FRAME_POINTER_REGNUM
14339 : REGNO (elim));
14340 return new_reg_loc_descr (base_reg, offset);
14341 }
14342
14343 gcc_assert (frame_pointer_fb_offset_valid);
14344 offset += frame_pointer_fb_offset;
14345 HOST_WIDE_INT const_offset;
14346 if (offset.is_constant (&const_offset))
14347 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14348 else
14349 {
14350 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14351 loc_descr_plus_const (&ret, offset);
14352 return ret;
14353 }
14354 }
14355 }
14356
14357 regno = REGNO (reg);
14358 #ifdef LEAF_REG_REMAP
14359 if (crtl->uses_only_leaf_regs)
14360 {
14361 int leaf_reg = LEAF_REG_REMAP (regno);
14362 if (leaf_reg != -1)
14363 regno = (unsigned) leaf_reg;
14364 }
14365 #endif
14366 regno = DWARF_FRAME_REGNUM (regno);
14367
14368 HOST_WIDE_INT const_offset;
14369 if (!optimize && fde
14370 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14371 && offset.is_constant (&const_offset))
14372 {
14373 /* Use cfa+offset to represent the location of arguments passed
14374 on the stack when drap is used to align stack.
14375 Only do this when not optimizing, for optimized code var-tracking
14376 is supposed to track where the arguments live and the register
14377 used as vdrap or drap in some spot might be used for something
14378 else in other part of the routine. */
14379 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14380 }
14381
14382 result = new_reg_loc_descr (regno, offset);
14383
14384 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14385 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14386
14387 return result;
14388 }
14389
14390 /* Return true if this RTL expression describes a base+offset calculation. */
14391
14392 static inline int
14393 is_based_loc (const_rtx rtl)
14394 {
14395 return (GET_CODE (rtl) == PLUS
14396 && ((REG_P (XEXP (rtl, 0))
14397 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14398 && CONST_INT_P (XEXP (rtl, 1)))));
14399 }
14400
14401 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14402 failed. */
14403
14404 static dw_loc_descr_ref
14405 tls_mem_loc_descriptor (rtx mem)
14406 {
14407 tree base;
14408 dw_loc_descr_ref loc_result;
14409
14410 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14411 return NULL;
14412
14413 base = get_base_address (MEM_EXPR (mem));
14414 if (base == NULL
14415 || !VAR_P (base)
14416 || !DECL_THREAD_LOCAL_P (base))
14417 return NULL;
14418
14419 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14420 if (loc_result == NULL)
14421 return NULL;
14422
14423 if (maybe_ne (MEM_OFFSET (mem), 0))
14424 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14425
14426 return loc_result;
14427 }
14428
14429 /* Output debug info about reason why we failed to expand expression as dwarf
14430 expression. */
14431
14432 static void
14433 expansion_failed (tree expr, rtx rtl, char const *reason)
14434 {
14435 if (dump_file && (dump_flags & TDF_DETAILS))
14436 {
14437 fprintf (dump_file, "Failed to expand as dwarf: ");
14438 if (expr)
14439 print_generic_expr (dump_file, expr, dump_flags);
14440 if (rtl)
14441 {
14442 fprintf (dump_file, "\n");
14443 print_rtl (dump_file, rtl);
14444 }
14445 fprintf (dump_file, "\nReason: %s\n", reason);
14446 }
14447 }
14448
14449 /* Helper function for const_ok_for_output. */
14450
14451 static bool
14452 const_ok_for_output_1 (rtx rtl)
14453 {
14454 if (targetm.const_not_ok_for_debug_p (rtl))
14455 {
14456 if (GET_CODE (rtl) != UNSPEC)
14457 {
14458 expansion_failed (NULL_TREE, rtl,
14459 "Expression rejected for debug by the backend.\n");
14460 return false;
14461 }
14462
14463 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14464 the target hook doesn't explicitly allow it in debug info, assume
14465 we can't express it in the debug info. */
14466 /* Don't complain about TLS UNSPECs, those are just too hard to
14467 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14468 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14469 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14470 if (flag_checking
14471 && (XVECLEN (rtl, 0) == 0
14472 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14473 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14474 inform (current_function_decl
14475 ? DECL_SOURCE_LOCATION (current_function_decl)
14476 : UNKNOWN_LOCATION,
14477 #if NUM_UNSPEC_VALUES > 0
14478 "non-delegitimized UNSPEC %s (%d) found in variable location",
14479 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14480 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14481 XINT (rtl, 1));
14482 #else
14483 "non-delegitimized UNSPEC %d found in variable location",
14484 XINT (rtl, 1));
14485 #endif
14486 expansion_failed (NULL_TREE, rtl,
14487 "UNSPEC hasn't been delegitimized.\n");
14488 return false;
14489 }
14490
14491 if (CONST_POLY_INT_P (rtl))
14492 return false;
14493
14494 if (targetm.const_not_ok_for_debug_p (rtl))
14495 {
14496 expansion_failed (NULL_TREE, rtl,
14497 "Expression rejected for debug by the backend.\n");
14498 return false;
14499 }
14500
14501 /* FIXME: Refer to PR60655. It is possible for simplification
14502 of rtl expressions in var tracking to produce such expressions.
14503 We should really identify / validate expressions
14504 enclosed in CONST that can be handled by assemblers on various
14505 targets and only handle legitimate cases here. */
14506 switch (GET_CODE (rtl))
14507 {
14508 case SYMBOL_REF:
14509 break;
14510 case NOT:
14511 case NEG:
14512 return false;
14513 default:
14514 return true;
14515 }
14516
14517 if (CONSTANT_POOL_ADDRESS_P (rtl))
14518 {
14519 bool marked;
14520 get_pool_constant_mark (rtl, &marked);
14521 /* If all references to this pool constant were optimized away,
14522 it was not output and thus we can't represent it. */
14523 if (!marked)
14524 {
14525 expansion_failed (NULL_TREE, rtl,
14526 "Constant was removed from constant pool.\n");
14527 return false;
14528 }
14529 }
14530
14531 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14532 return false;
14533
14534 /* Avoid references to external symbols in debug info, on several targets
14535 the linker might even refuse to link when linking a shared library,
14536 and in many other cases the relocations for .debug_info/.debug_loc are
14537 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14538 to be defined within the same shared library or executable are fine. */
14539 if (SYMBOL_REF_EXTERNAL_P (rtl))
14540 {
14541 tree decl = SYMBOL_REF_DECL (rtl);
14542
14543 if (decl == NULL || !targetm.binds_local_p (decl))
14544 {
14545 expansion_failed (NULL_TREE, rtl,
14546 "Symbol not defined in current TU.\n");
14547 return false;
14548 }
14549 }
14550
14551 return true;
14552 }
14553
14554 /* Return true if constant RTL can be emitted in DW_OP_addr or
14555 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14556 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14557
14558 static bool
14559 const_ok_for_output (rtx rtl)
14560 {
14561 if (GET_CODE (rtl) == SYMBOL_REF)
14562 return const_ok_for_output_1 (rtl);
14563
14564 if (GET_CODE (rtl) == CONST)
14565 {
14566 subrtx_var_iterator::array_type array;
14567 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14568 if (!const_ok_for_output_1 (*iter))
14569 return false;
14570 return true;
14571 }
14572
14573 return true;
14574 }
14575
14576 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14577 if possible, NULL otherwise. */
14578
14579 static dw_die_ref
14580 base_type_for_mode (machine_mode mode, bool unsignedp)
14581 {
14582 dw_die_ref type_die;
14583 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14584
14585 if (type == NULL)
14586 return NULL;
14587 switch (TREE_CODE (type))
14588 {
14589 case INTEGER_TYPE:
14590 case REAL_TYPE:
14591 break;
14592 default:
14593 return NULL;
14594 }
14595 type_die = lookup_type_die (type);
14596 if (!type_die)
14597 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14598 comp_unit_die ());
14599 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14600 return NULL;
14601 return type_die;
14602 }
14603
14604 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14605 type matching MODE, or, if MODE is narrower than or as wide as
14606 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14607 possible. */
14608
14609 static dw_loc_descr_ref
14610 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14611 {
14612 machine_mode outer_mode = mode;
14613 dw_die_ref type_die;
14614 dw_loc_descr_ref cvt;
14615
14616 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14617 {
14618 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14619 return op;
14620 }
14621 type_die = base_type_for_mode (outer_mode, 1);
14622 if (type_die == NULL)
14623 return NULL;
14624 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14625 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14626 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14627 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14628 add_loc_descr (&op, cvt);
14629 return op;
14630 }
14631
14632 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14633
14634 static dw_loc_descr_ref
14635 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14636 dw_loc_descr_ref op1)
14637 {
14638 dw_loc_descr_ref ret = op0;
14639 add_loc_descr (&ret, op1);
14640 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14641 if (STORE_FLAG_VALUE != 1)
14642 {
14643 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14644 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14645 }
14646 return ret;
14647 }
14648
14649 /* Subroutine of scompare_loc_descriptor for the case in which we're
14650 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14651 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14652
14653 static dw_loc_descr_ref
14654 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14655 scalar_int_mode op_mode,
14656 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14657 {
14658 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14659 dw_loc_descr_ref cvt;
14660
14661 if (type_die == NULL)
14662 return NULL;
14663 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14664 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14665 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14666 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14667 add_loc_descr (&op0, cvt);
14668 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14669 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14670 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14671 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14672 add_loc_descr (&op1, cvt);
14673 return compare_loc_descriptor (op, op0, op1);
14674 }
14675
14676 /* Subroutine of scompare_loc_descriptor for the case in which we're
14677 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14678 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14679
14680 static dw_loc_descr_ref
14681 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14682 scalar_int_mode op_mode,
14683 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14684 {
14685 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14686 /* For eq/ne, if the operands are known to be zero-extended,
14687 there is no need to do the fancy shifting up. */
14688 if (op == DW_OP_eq || op == DW_OP_ne)
14689 {
14690 dw_loc_descr_ref last0, last1;
14691 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14692 ;
14693 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14694 ;
14695 /* deref_size zero extends, and for constants we can check
14696 whether they are zero extended or not. */
14697 if (((last0->dw_loc_opc == DW_OP_deref_size
14698 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14699 || (CONST_INT_P (XEXP (rtl, 0))
14700 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14701 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14702 && ((last1->dw_loc_opc == DW_OP_deref_size
14703 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14704 || (CONST_INT_P (XEXP (rtl, 1))
14705 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14706 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14707 return compare_loc_descriptor (op, op0, op1);
14708
14709 /* EQ/NE comparison against constant in narrower type than
14710 DWARF2_ADDR_SIZE can be performed either as
14711 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14712 DW_OP_{eq,ne}
14713 or
14714 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14715 DW_OP_{eq,ne}. Pick whatever is shorter. */
14716 if (CONST_INT_P (XEXP (rtl, 1))
14717 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14718 && (size_of_int_loc_descriptor (shift) + 1
14719 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14720 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14721 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14722 & GET_MODE_MASK (op_mode))))
14723 {
14724 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14725 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14726 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14727 & GET_MODE_MASK (op_mode));
14728 return compare_loc_descriptor (op, op0, op1);
14729 }
14730 }
14731 add_loc_descr (&op0, int_loc_descriptor (shift));
14732 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14733 if (CONST_INT_P (XEXP (rtl, 1)))
14734 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14735 else
14736 {
14737 add_loc_descr (&op1, int_loc_descriptor (shift));
14738 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14739 }
14740 return compare_loc_descriptor (op, op0, op1);
14741 }
14742
14743 /* Return location descriptor for unsigned comparison OP RTL. */
14744
14745 static dw_loc_descr_ref
14746 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14747 machine_mode mem_mode)
14748 {
14749 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14750 dw_loc_descr_ref op0, op1;
14751
14752 if (op_mode == VOIDmode)
14753 op_mode = GET_MODE (XEXP (rtl, 1));
14754 if (op_mode == VOIDmode)
14755 return NULL;
14756
14757 scalar_int_mode int_op_mode;
14758 if (dwarf_strict
14759 && dwarf_version < 5
14760 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14761 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14762 return NULL;
14763
14764 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14765 VAR_INIT_STATUS_INITIALIZED);
14766 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14767 VAR_INIT_STATUS_INITIALIZED);
14768
14769 if (op0 == NULL || op1 == NULL)
14770 return NULL;
14771
14772 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14773 {
14774 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14775 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14776
14777 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14778 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14779 }
14780 return compare_loc_descriptor (op, op0, op1);
14781 }
14782
14783 /* Return location descriptor for unsigned comparison OP RTL. */
14784
14785 static dw_loc_descr_ref
14786 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14787 machine_mode mem_mode)
14788 {
14789 dw_loc_descr_ref op0, op1;
14790
14791 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14792 if (test_op_mode == VOIDmode)
14793 test_op_mode = GET_MODE (XEXP (rtl, 1));
14794
14795 scalar_int_mode op_mode;
14796 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14797 return NULL;
14798
14799 if (dwarf_strict
14800 && dwarf_version < 5
14801 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14802 return NULL;
14803
14804 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14805 VAR_INIT_STATUS_INITIALIZED);
14806 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14807 VAR_INIT_STATUS_INITIALIZED);
14808
14809 if (op0 == NULL || op1 == NULL)
14810 return NULL;
14811
14812 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14813 {
14814 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14815 dw_loc_descr_ref last0, last1;
14816 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14817 ;
14818 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14819 ;
14820 if (CONST_INT_P (XEXP (rtl, 0)))
14821 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14822 /* deref_size zero extends, so no need to mask it again. */
14823 else if (last0->dw_loc_opc != DW_OP_deref_size
14824 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14825 {
14826 add_loc_descr (&op0, int_loc_descriptor (mask));
14827 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14828 }
14829 if (CONST_INT_P (XEXP (rtl, 1)))
14830 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14831 /* deref_size zero extends, so no need to mask it again. */
14832 else if (last1->dw_loc_opc != DW_OP_deref_size
14833 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14834 {
14835 add_loc_descr (&op1, int_loc_descriptor (mask));
14836 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14837 }
14838 }
14839 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14840 {
14841 HOST_WIDE_INT bias = 1;
14842 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14843 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14844 if (CONST_INT_P (XEXP (rtl, 1)))
14845 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14846 + INTVAL (XEXP (rtl, 1)));
14847 else
14848 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14849 bias, 0));
14850 }
14851 return compare_loc_descriptor (op, op0, op1);
14852 }
14853
14854 /* Return location descriptor for {U,S}{MIN,MAX}. */
14855
14856 static dw_loc_descr_ref
14857 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14858 machine_mode mem_mode)
14859 {
14860 enum dwarf_location_atom op;
14861 dw_loc_descr_ref op0, op1, ret;
14862 dw_loc_descr_ref bra_node, drop_node;
14863
14864 scalar_int_mode int_mode;
14865 if (dwarf_strict
14866 && dwarf_version < 5
14867 && (!is_a <scalar_int_mode> (mode, &int_mode)
14868 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14869 return NULL;
14870
14871 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14872 VAR_INIT_STATUS_INITIALIZED);
14873 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14874 VAR_INIT_STATUS_INITIALIZED);
14875
14876 if (op0 == NULL || op1 == NULL)
14877 return NULL;
14878
14879 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14880 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14881 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14882 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14883 {
14884 /* Checked by the caller. */
14885 int_mode = as_a <scalar_int_mode> (mode);
14886 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14887 {
14888 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14889 add_loc_descr (&op0, int_loc_descriptor (mask));
14890 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14891 add_loc_descr (&op1, int_loc_descriptor (mask));
14892 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14893 }
14894 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14895 {
14896 HOST_WIDE_INT bias = 1;
14897 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14898 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14899 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14900 }
14901 }
14902 else if (is_a <scalar_int_mode> (mode, &int_mode)
14903 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14904 {
14905 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14906 add_loc_descr (&op0, int_loc_descriptor (shift));
14907 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14908 add_loc_descr (&op1, int_loc_descriptor (shift));
14909 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14910 }
14911 else if (is_a <scalar_int_mode> (mode, &int_mode)
14912 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14913 {
14914 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14915 dw_loc_descr_ref cvt;
14916 if (type_die == NULL)
14917 return NULL;
14918 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14919 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14920 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14921 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14922 add_loc_descr (&op0, cvt);
14923 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14924 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14925 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14926 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14927 add_loc_descr (&op1, cvt);
14928 }
14929
14930 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14931 op = DW_OP_lt;
14932 else
14933 op = DW_OP_gt;
14934 ret = op0;
14935 add_loc_descr (&ret, op1);
14936 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14937 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14938 add_loc_descr (&ret, bra_node);
14939 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14940 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14941 add_loc_descr (&ret, drop_node);
14942 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14943 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14944 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14945 && is_a <scalar_int_mode> (mode, &int_mode)
14946 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14947 ret = convert_descriptor_to_mode (int_mode, ret);
14948 return ret;
14949 }
14950
14951 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14952 but after converting arguments to type_die, afterwards
14953 convert back to unsigned. */
14954
14955 static dw_loc_descr_ref
14956 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14957 scalar_int_mode mode, machine_mode mem_mode)
14958 {
14959 dw_loc_descr_ref cvt, op0, op1;
14960
14961 if (type_die == NULL)
14962 return NULL;
14963 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14964 VAR_INIT_STATUS_INITIALIZED);
14965 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14966 VAR_INIT_STATUS_INITIALIZED);
14967 if (op0 == NULL || op1 == NULL)
14968 return NULL;
14969 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14970 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14971 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14972 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14973 add_loc_descr (&op0, cvt);
14974 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14975 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14976 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14977 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14978 add_loc_descr (&op1, cvt);
14979 add_loc_descr (&op0, op1);
14980 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14981 return convert_descriptor_to_mode (mode, op0);
14982 }
14983
14984 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14985 const0 is DW_OP_lit0 or corresponding typed constant,
14986 const1 is DW_OP_lit1 or corresponding typed constant
14987 and constMSB is constant with just the MSB bit set
14988 for the mode):
14989 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14990 L1: const0 DW_OP_swap
14991 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14992 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14993 L3: DW_OP_drop
14994 L4: DW_OP_nop
14995
14996 CTZ is similar:
14997 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14998 L1: const0 DW_OP_swap
14999 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15000 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15001 L3: DW_OP_drop
15002 L4: DW_OP_nop
15003
15004 FFS is similar:
15005 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15006 L1: const1 DW_OP_swap
15007 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15008 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15009 L3: DW_OP_drop
15010 L4: DW_OP_nop */
15011
15012 static dw_loc_descr_ref
15013 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15014 machine_mode mem_mode)
15015 {
15016 dw_loc_descr_ref op0, ret, tmp;
15017 HOST_WIDE_INT valv;
15018 dw_loc_descr_ref l1jump, l1label;
15019 dw_loc_descr_ref l2jump, l2label;
15020 dw_loc_descr_ref l3jump, l3label;
15021 dw_loc_descr_ref l4jump, l4label;
15022 rtx msb;
15023
15024 if (GET_MODE (XEXP (rtl, 0)) != mode)
15025 return NULL;
15026
15027 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15028 VAR_INIT_STATUS_INITIALIZED);
15029 if (op0 == NULL)
15030 return NULL;
15031 ret = op0;
15032 if (GET_CODE (rtl) == CLZ)
15033 {
15034 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15035 valv = GET_MODE_BITSIZE (mode);
15036 }
15037 else if (GET_CODE (rtl) == FFS)
15038 valv = 0;
15039 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15040 valv = GET_MODE_BITSIZE (mode);
15041 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15042 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15043 add_loc_descr (&ret, l1jump);
15044 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15045 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15046 VAR_INIT_STATUS_INITIALIZED);
15047 if (tmp == NULL)
15048 return NULL;
15049 add_loc_descr (&ret, tmp);
15050 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15051 add_loc_descr (&ret, l4jump);
15052 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15053 ? const1_rtx : const0_rtx,
15054 mode, mem_mode,
15055 VAR_INIT_STATUS_INITIALIZED);
15056 if (l1label == NULL)
15057 return NULL;
15058 add_loc_descr (&ret, l1label);
15059 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15060 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15061 add_loc_descr (&ret, l2label);
15062 if (GET_CODE (rtl) != CLZ)
15063 msb = const1_rtx;
15064 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15065 msb = GEN_INT (HOST_WIDE_INT_1U
15066 << (GET_MODE_BITSIZE (mode) - 1));
15067 else
15068 msb = immed_wide_int_const
15069 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15070 GET_MODE_PRECISION (mode)), mode);
15071 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15072 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15073 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15074 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15075 else
15076 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15077 VAR_INIT_STATUS_INITIALIZED);
15078 if (tmp == NULL)
15079 return NULL;
15080 add_loc_descr (&ret, tmp);
15081 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15082 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15083 add_loc_descr (&ret, l3jump);
15084 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15085 VAR_INIT_STATUS_INITIALIZED);
15086 if (tmp == NULL)
15087 return NULL;
15088 add_loc_descr (&ret, tmp);
15089 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15090 ? DW_OP_shl : DW_OP_shr, 0, 0));
15091 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15092 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15093 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15094 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15095 add_loc_descr (&ret, l2jump);
15096 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15097 add_loc_descr (&ret, l3label);
15098 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15099 add_loc_descr (&ret, l4label);
15100 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15101 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15102 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15103 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15104 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15105 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15106 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15107 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15108 return ret;
15109 }
15110
15111 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15112 const1 is DW_OP_lit1 or corresponding typed constant):
15113 const0 DW_OP_swap
15114 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15115 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15116 L2: DW_OP_drop
15117
15118 PARITY is similar:
15119 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15120 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15121 L2: DW_OP_drop */
15122
15123 static dw_loc_descr_ref
15124 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15125 machine_mode mem_mode)
15126 {
15127 dw_loc_descr_ref op0, ret, tmp;
15128 dw_loc_descr_ref l1jump, l1label;
15129 dw_loc_descr_ref l2jump, l2label;
15130
15131 if (GET_MODE (XEXP (rtl, 0)) != mode)
15132 return NULL;
15133
15134 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15135 VAR_INIT_STATUS_INITIALIZED);
15136 if (op0 == NULL)
15137 return NULL;
15138 ret = op0;
15139 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15140 VAR_INIT_STATUS_INITIALIZED);
15141 if (tmp == NULL)
15142 return NULL;
15143 add_loc_descr (&ret, tmp);
15144 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15145 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15146 add_loc_descr (&ret, l1label);
15147 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15148 add_loc_descr (&ret, l2jump);
15149 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15151 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15152 VAR_INIT_STATUS_INITIALIZED);
15153 if (tmp == NULL)
15154 return NULL;
15155 add_loc_descr (&ret, tmp);
15156 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15157 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15158 ? DW_OP_plus : DW_OP_xor, 0, 0));
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15160 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15161 VAR_INIT_STATUS_INITIALIZED);
15162 add_loc_descr (&ret, tmp);
15163 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15164 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15165 add_loc_descr (&ret, l1jump);
15166 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15167 add_loc_descr (&ret, l2label);
15168 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15169 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15170 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15171 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15172 return ret;
15173 }
15174
15175 /* BSWAP (constS is initial shift count, either 56 or 24):
15176 constS const0
15177 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15178 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15179 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15180 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15181 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15182
15183 static dw_loc_descr_ref
15184 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15185 machine_mode mem_mode)
15186 {
15187 dw_loc_descr_ref op0, ret, tmp;
15188 dw_loc_descr_ref l1jump, l1label;
15189 dw_loc_descr_ref l2jump, l2label;
15190
15191 if (BITS_PER_UNIT != 8
15192 || (GET_MODE_BITSIZE (mode) != 32
15193 && GET_MODE_BITSIZE (mode) != 64))
15194 return NULL;
15195
15196 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15197 VAR_INIT_STATUS_INITIALIZED);
15198 if (op0 == NULL)
15199 return NULL;
15200
15201 ret = op0;
15202 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15203 mode, mem_mode,
15204 VAR_INIT_STATUS_INITIALIZED);
15205 if (tmp == NULL)
15206 return NULL;
15207 add_loc_descr (&ret, tmp);
15208 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15209 VAR_INIT_STATUS_INITIALIZED);
15210 if (tmp == NULL)
15211 return NULL;
15212 add_loc_descr (&ret, tmp);
15213 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15214 add_loc_descr (&ret, l1label);
15215 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15216 mode, mem_mode,
15217 VAR_INIT_STATUS_INITIALIZED);
15218 add_loc_descr (&ret, tmp);
15219 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15220 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15222 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15223 VAR_INIT_STATUS_INITIALIZED);
15224 if (tmp == NULL)
15225 return NULL;
15226 add_loc_descr (&ret, tmp);
15227 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15228 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15229 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15231 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15232 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15233 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15234 VAR_INIT_STATUS_INITIALIZED);
15235 add_loc_descr (&ret, tmp);
15236 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15237 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15238 add_loc_descr (&ret, l2jump);
15239 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15240 VAR_INIT_STATUS_INITIALIZED);
15241 add_loc_descr (&ret, tmp);
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15243 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15244 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15245 add_loc_descr (&ret, l1jump);
15246 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15247 add_loc_descr (&ret, l2label);
15248 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15249 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15250 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15251 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15252 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15253 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15254 return ret;
15255 }
15256
15257 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15258 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15259 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15260 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15261
15262 ROTATERT is similar:
15263 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15264 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15265 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15266
15267 static dw_loc_descr_ref
15268 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15269 machine_mode mem_mode)
15270 {
15271 rtx rtlop1 = XEXP (rtl, 1);
15272 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15273 int i;
15274
15275 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15276 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15277 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15278 VAR_INIT_STATUS_INITIALIZED);
15279 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15280 VAR_INIT_STATUS_INITIALIZED);
15281 if (op0 == NULL || op1 == NULL)
15282 return NULL;
15283 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15284 for (i = 0; i < 2; i++)
15285 {
15286 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15287 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15288 mode, mem_mode,
15289 VAR_INIT_STATUS_INITIALIZED);
15290 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15291 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15292 ? DW_OP_const4u
15293 : HOST_BITS_PER_WIDE_INT == 64
15294 ? DW_OP_const8u : DW_OP_constu,
15295 GET_MODE_MASK (mode), 0);
15296 else
15297 mask[i] = NULL;
15298 if (mask[i] == NULL)
15299 return NULL;
15300 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15301 }
15302 ret = op0;
15303 add_loc_descr (&ret, op1);
15304 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15306 if (GET_CODE (rtl) == ROTATERT)
15307 {
15308 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15309 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15310 GET_MODE_BITSIZE (mode), 0));
15311 }
15312 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15313 if (mask[0] != NULL)
15314 add_loc_descr (&ret, mask[0]);
15315 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15316 if (mask[1] != NULL)
15317 {
15318 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15319 add_loc_descr (&ret, mask[1]);
15320 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15321 }
15322 if (GET_CODE (rtl) == ROTATE)
15323 {
15324 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15325 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15326 GET_MODE_BITSIZE (mode), 0));
15327 }
15328 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15329 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15330 return ret;
15331 }
15332
15333 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15334 for DEBUG_PARAMETER_REF RTL. */
15335
15336 static dw_loc_descr_ref
15337 parameter_ref_descriptor (rtx rtl)
15338 {
15339 dw_loc_descr_ref ret;
15340 dw_die_ref ref;
15341
15342 if (dwarf_strict)
15343 return NULL;
15344 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15345 /* With LTO during LTRANS we get the late DIE that refers to the early
15346 DIE, thus we add another indirection here. This seems to confuse
15347 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15348 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15349 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15350 if (ref)
15351 {
15352 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15353 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15354 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15355 }
15356 else
15357 {
15358 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15359 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15360 }
15361 return ret;
15362 }
15363
15364 /* The following routine converts the RTL for a variable or parameter
15365 (resident in memory) into an equivalent Dwarf representation of a
15366 mechanism for getting the address of that same variable onto the top of a
15367 hypothetical "address evaluation" stack.
15368
15369 When creating memory location descriptors, we are effectively transforming
15370 the RTL for a memory-resident object into its Dwarf postfix expression
15371 equivalent. This routine recursively descends an RTL tree, turning
15372 it into Dwarf postfix code as it goes.
15373
15374 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15375
15376 MEM_MODE is the mode of the memory reference, needed to handle some
15377 autoincrement addressing modes.
15378
15379 Return 0 if we can't represent the location. */
15380
15381 dw_loc_descr_ref
15382 mem_loc_descriptor (rtx rtl, machine_mode mode,
15383 machine_mode mem_mode,
15384 enum var_init_status initialized)
15385 {
15386 dw_loc_descr_ref mem_loc_result = NULL;
15387 enum dwarf_location_atom op;
15388 dw_loc_descr_ref op0, op1;
15389 rtx inner = NULL_RTX;
15390 poly_int64 offset;
15391
15392 if (mode == VOIDmode)
15393 mode = GET_MODE (rtl);
15394
15395 /* Note that for a dynamically sized array, the location we will generate a
15396 description of here will be the lowest numbered location which is
15397 actually within the array. That's *not* necessarily the same as the
15398 zeroth element of the array. */
15399
15400 rtl = targetm.delegitimize_address (rtl);
15401
15402 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15403 return NULL;
15404
15405 scalar_int_mode int_mode, inner_mode, op1_mode;
15406 switch (GET_CODE (rtl))
15407 {
15408 case POST_INC:
15409 case POST_DEC:
15410 case POST_MODIFY:
15411 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15412
15413 case SUBREG:
15414 /* The case of a subreg may arise when we have a local (register)
15415 variable or a formal (register) parameter which doesn't quite fill
15416 up an entire register. For now, just assume that it is
15417 legitimate to make the Dwarf info refer to the whole register which
15418 contains the given subreg. */
15419 if (!subreg_lowpart_p (rtl))
15420 break;
15421 inner = SUBREG_REG (rtl);
15422 /* FALLTHRU */
15423 case TRUNCATE:
15424 if (inner == NULL_RTX)
15425 inner = XEXP (rtl, 0);
15426 if (is_a <scalar_int_mode> (mode, &int_mode)
15427 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15428 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15429 #ifdef POINTERS_EXTEND_UNSIGNED
15430 || (int_mode == Pmode && mem_mode != VOIDmode)
15431 #endif
15432 )
15433 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15434 {
15435 mem_loc_result = mem_loc_descriptor (inner,
15436 inner_mode,
15437 mem_mode, initialized);
15438 break;
15439 }
15440 if (dwarf_strict && dwarf_version < 5)
15441 break;
15442 if (is_a <scalar_int_mode> (mode, &int_mode)
15443 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15444 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15445 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15446 {
15447 dw_die_ref type_die;
15448 dw_loc_descr_ref cvt;
15449
15450 mem_loc_result = mem_loc_descriptor (inner,
15451 GET_MODE (inner),
15452 mem_mode, initialized);
15453 if (mem_loc_result == NULL)
15454 break;
15455 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15456 if (type_die == NULL)
15457 {
15458 mem_loc_result = NULL;
15459 break;
15460 }
15461 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15462 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15463 else
15464 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15465 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15466 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15467 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15468 add_loc_descr (&mem_loc_result, cvt);
15469 if (is_a <scalar_int_mode> (mode, &int_mode)
15470 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15471 {
15472 /* Convert it to untyped afterwards. */
15473 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15474 add_loc_descr (&mem_loc_result, cvt);
15475 }
15476 }
15477 break;
15478
15479 case REG:
15480 if (!is_a <scalar_int_mode> (mode, &int_mode)
15481 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15482 && rtl != arg_pointer_rtx
15483 && rtl != frame_pointer_rtx
15484 #ifdef POINTERS_EXTEND_UNSIGNED
15485 && (int_mode != Pmode || mem_mode == VOIDmode)
15486 #endif
15487 ))
15488 {
15489 dw_die_ref type_die;
15490 unsigned int dbx_regnum;
15491
15492 if (dwarf_strict && dwarf_version < 5)
15493 break;
15494 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15495 break;
15496 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15497 if (type_die == NULL)
15498 break;
15499
15500 dbx_regnum = dbx_reg_number (rtl);
15501 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15502 break;
15503 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15504 dbx_regnum, 0);
15505 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15506 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15507 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15508 break;
15509 }
15510 /* Whenever a register number forms a part of the description of the
15511 method for calculating the (dynamic) address of a memory resident
15512 object, DWARF rules require the register number be referred to as
15513 a "base register". This distinction is not based in any way upon
15514 what category of register the hardware believes the given register
15515 belongs to. This is strictly DWARF terminology we're dealing with
15516 here. Note that in cases where the location of a memory-resident
15517 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15518 OP_CONST (0)) the actual DWARF location descriptor that we generate
15519 may just be OP_BASEREG (basereg). This may look deceptively like
15520 the object in question was allocated to a register (rather than in
15521 memory) so DWARF consumers need to be aware of the subtle
15522 distinction between OP_REG and OP_BASEREG. */
15523 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15524 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15525 else if (stack_realign_drap
15526 && crtl->drap_reg
15527 && crtl->args.internal_arg_pointer == rtl
15528 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15529 {
15530 /* If RTL is internal_arg_pointer, which has been optimized
15531 out, use DRAP instead. */
15532 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15533 VAR_INIT_STATUS_INITIALIZED);
15534 }
15535 break;
15536
15537 case SIGN_EXTEND:
15538 case ZERO_EXTEND:
15539 if (!is_a <scalar_int_mode> (mode, &int_mode)
15540 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15541 break;
15542 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15543 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15544 if (op0 == 0)
15545 break;
15546 else if (GET_CODE (rtl) == ZERO_EXTEND
15547 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15548 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15549 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15550 to expand zero extend as two shifts instead of
15551 masking. */
15552 && GET_MODE_SIZE (inner_mode) <= 4)
15553 {
15554 mem_loc_result = op0;
15555 add_loc_descr (&mem_loc_result,
15556 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15557 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15558 }
15559 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15560 {
15561 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15562 shift *= BITS_PER_UNIT;
15563 if (GET_CODE (rtl) == SIGN_EXTEND)
15564 op = DW_OP_shra;
15565 else
15566 op = DW_OP_shr;
15567 mem_loc_result = op0;
15568 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15569 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15570 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15571 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15572 }
15573 else if (!dwarf_strict || dwarf_version >= 5)
15574 {
15575 dw_die_ref type_die1, type_die2;
15576 dw_loc_descr_ref cvt;
15577
15578 type_die1 = base_type_for_mode (inner_mode,
15579 GET_CODE (rtl) == ZERO_EXTEND);
15580 if (type_die1 == NULL)
15581 break;
15582 type_die2 = base_type_for_mode (int_mode, 1);
15583 if (type_die2 == NULL)
15584 break;
15585 mem_loc_result = op0;
15586 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15587 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15588 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15589 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15590 add_loc_descr (&mem_loc_result, cvt);
15591 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15592 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15593 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15594 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15595 add_loc_descr (&mem_loc_result, cvt);
15596 }
15597 break;
15598
15599 case MEM:
15600 {
15601 rtx new_rtl = avoid_constant_pool_reference (rtl);
15602 if (new_rtl != rtl)
15603 {
15604 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15605 initialized);
15606 if (mem_loc_result != NULL)
15607 return mem_loc_result;
15608 }
15609 }
15610 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15611 get_address_mode (rtl), mode,
15612 VAR_INIT_STATUS_INITIALIZED);
15613 if (mem_loc_result == NULL)
15614 mem_loc_result = tls_mem_loc_descriptor (rtl);
15615 if (mem_loc_result != NULL)
15616 {
15617 if (!is_a <scalar_int_mode> (mode, &int_mode)
15618 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15619 {
15620 dw_die_ref type_die;
15621 dw_loc_descr_ref deref;
15622 HOST_WIDE_INT size;
15623
15624 if (dwarf_strict && dwarf_version < 5)
15625 return NULL;
15626 if (!GET_MODE_SIZE (mode).is_constant (&size))
15627 return NULL;
15628 type_die
15629 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15630 if (type_die == NULL)
15631 return NULL;
15632 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15633 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15634 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15635 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15636 add_loc_descr (&mem_loc_result, deref);
15637 }
15638 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15639 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15640 else
15641 add_loc_descr (&mem_loc_result,
15642 new_loc_descr (DW_OP_deref_size,
15643 GET_MODE_SIZE (int_mode), 0));
15644 }
15645 break;
15646
15647 case LO_SUM:
15648 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15649
15650 case LABEL_REF:
15651 /* Some ports can transform a symbol ref into a label ref, because
15652 the symbol ref is too far away and has to be dumped into a constant
15653 pool. */
15654 case CONST:
15655 case SYMBOL_REF:
15656 if (!is_a <scalar_int_mode> (mode, &int_mode)
15657 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15658 #ifdef POINTERS_EXTEND_UNSIGNED
15659 && (int_mode != Pmode || mem_mode == VOIDmode)
15660 #endif
15661 ))
15662 break;
15663 if (GET_CODE (rtl) == SYMBOL_REF
15664 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15665 {
15666 dw_loc_descr_ref temp;
15667
15668 /* If this is not defined, we have no way to emit the data. */
15669 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15670 break;
15671
15672 temp = new_addr_loc_descr (rtl, dtprel_true);
15673
15674 /* We check for DWARF 5 here because gdb did not implement
15675 DW_OP_form_tls_address until after 7.12. */
15676 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15677 ? DW_OP_form_tls_address
15678 : DW_OP_GNU_push_tls_address),
15679 0, 0);
15680 add_loc_descr (&mem_loc_result, temp);
15681
15682 break;
15683 }
15684
15685 if (!const_ok_for_output (rtl))
15686 {
15687 if (GET_CODE (rtl) == CONST)
15688 switch (GET_CODE (XEXP (rtl, 0)))
15689 {
15690 case NOT:
15691 op = DW_OP_not;
15692 goto try_const_unop;
15693 case NEG:
15694 op = DW_OP_neg;
15695 goto try_const_unop;
15696 try_const_unop:
15697 rtx arg;
15698 arg = XEXP (XEXP (rtl, 0), 0);
15699 if (!CONSTANT_P (arg))
15700 arg = gen_rtx_CONST (int_mode, arg);
15701 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15702 initialized);
15703 if (op0)
15704 {
15705 mem_loc_result = op0;
15706 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15707 }
15708 break;
15709 default:
15710 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15711 mem_mode, initialized);
15712 break;
15713 }
15714 break;
15715 }
15716
15717 symref:
15718 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15719 vec_safe_push (used_rtx_array, rtl);
15720 break;
15721
15722 case CONCAT:
15723 case CONCATN:
15724 case VAR_LOCATION:
15725 case DEBUG_IMPLICIT_PTR:
15726 expansion_failed (NULL_TREE, rtl,
15727 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15728 return 0;
15729
15730 case ENTRY_VALUE:
15731 if (dwarf_strict && dwarf_version < 5)
15732 return NULL;
15733 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15734 {
15735 if (!is_a <scalar_int_mode> (mode, &int_mode)
15736 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15737 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15738 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15739 else
15740 {
15741 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15742 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15743 return NULL;
15744 op0 = one_reg_loc_descriptor (dbx_regnum,
15745 VAR_INIT_STATUS_INITIALIZED);
15746 }
15747 }
15748 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15749 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15750 {
15751 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15752 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15753 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15754 return NULL;
15755 }
15756 else
15757 gcc_unreachable ();
15758 if (op0 == NULL)
15759 return NULL;
15760 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15761 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15762 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15763 break;
15764
15765 case DEBUG_PARAMETER_REF:
15766 mem_loc_result = parameter_ref_descriptor (rtl);
15767 break;
15768
15769 case PRE_MODIFY:
15770 /* Extract the PLUS expression nested inside and fall into
15771 PLUS code below. */
15772 rtl = XEXP (rtl, 1);
15773 goto plus;
15774
15775 case PRE_INC:
15776 case PRE_DEC:
15777 /* Turn these into a PLUS expression and fall into the PLUS code
15778 below. */
15779 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15780 gen_int_mode (GET_CODE (rtl) == PRE_INC
15781 ? GET_MODE_UNIT_SIZE (mem_mode)
15782 : -GET_MODE_UNIT_SIZE (mem_mode),
15783 mode));
15784
15785 /* fall through */
15786
15787 case PLUS:
15788 plus:
15789 if (is_based_loc (rtl)
15790 && is_a <scalar_int_mode> (mode, &int_mode)
15791 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15792 || XEXP (rtl, 0) == arg_pointer_rtx
15793 || XEXP (rtl, 0) == frame_pointer_rtx))
15794 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15795 INTVAL (XEXP (rtl, 1)),
15796 VAR_INIT_STATUS_INITIALIZED);
15797 else
15798 {
15799 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15800 VAR_INIT_STATUS_INITIALIZED);
15801 if (mem_loc_result == 0)
15802 break;
15803
15804 if (CONST_INT_P (XEXP (rtl, 1))
15805 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15806 <= DWARF2_ADDR_SIZE))
15807 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15808 else
15809 {
15810 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15811 VAR_INIT_STATUS_INITIALIZED);
15812 if (op1 == 0)
15813 return NULL;
15814 add_loc_descr (&mem_loc_result, op1);
15815 add_loc_descr (&mem_loc_result,
15816 new_loc_descr (DW_OP_plus, 0, 0));
15817 }
15818 }
15819 break;
15820
15821 /* If a pseudo-reg is optimized away, it is possible for it to
15822 be replaced with a MEM containing a multiply or shift. */
15823 case MINUS:
15824 op = DW_OP_minus;
15825 goto do_binop;
15826
15827 case MULT:
15828 op = DW_OP_mul;
15829 goto do_binop;
15830
15831 case DIV:
15832 if ((!dwarf_strict || dwarf_version >= 5)
15833 && is_a <scalar_int_mode> (mode, &int_mode)
15834 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15835 {
15836 mem_loc_result = typed_binop (DW_OP_div, rtl,
15837 base_type_for_mode (mode, 0),
15838 int_mode, mem_mode);
15839 break;
15840 }
15841 op = DW_OP_div;
15842 goto do_binop;
15843
15844 case UMOD:
15845 op = DW_OP_mod;
15846 goto do_binop;
15847
15848 case ASHIFT:
15849 op = DW_OP_shl;
15850 goto do_shift;
15851
15852 case ASHIFTRT:
15853 op = DW_OP_shra;
15854 goto do_shift;
15855
15856 case LSHIFTRT:
15857 op = DW_OP_shr;
15858 goto do_shift;
15859
15860 do_shift:
15861 if (!is_a <scalar_int_mode> (mode, &int_mode))
15862 break;
15863 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15864 VAR_INIT_STATUS_INITIALIZED);
15865 {
15866 rtx rtlop1 = XEXP (rtl, 1);
15867 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15868 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15869 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15870 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15871 VAR_INIT_STATUS_INITIALIZED);
15872 }
15873
15874 if (op0 == 0 || op1 == 0)
15875 break;
15876
15877 mem_loc_result = op0;
15878 add_loc_descr (&mem_loc_result, op1);
15879 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15880 break;
15881
15882 case AND:
15883 op = DW_OP_and;
15884 goto do_binop;
15885
15886 case IOR:
15887 op = DW_OP_or;
15888 goto do_binop;
15889
15890 case XOR:
15891 op = DW_OP_xor;
15892 goto do_binop;
15893
15894 do_binop:
15895 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15896 VAR_INIT_STATUS_INITIALIZED);
15897 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15898 VAR_INIT_STATUS_INITIALIZED);
15899
15900 if (op0 == 0 || op1 == 0)
15901 break;
15902
15903 mem_loc_result = op0;
15904 add_loc_descr (&mem_loc_result, op1);
15905 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15906 break;
15907
15908 case MOD:
15909 if ((!dwarf_strict || dwarf_version >= 5)
15910 && is_a <scalar_int_mode> (mode, &int_mode)
15911 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15912 {
15913 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15914 base_type_for_mode (mode, 0),
15915 int_mode, mem_mode);
15916 break;
15917 }
15918
15919 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15920 VAR_INIT_STATUS_INITIALIZED);
15921 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15922 VAR_INIT_STATUS_INITIALIZED);
15923
15924 if (op0 == 0 || op1 == 0)
15925 break;
15926
15927 mem_loc_result = op0;
15928 add_loc_descr (&mem_loc_result, op1);
15929 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15930 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15931 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15932 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15933 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15934 break;
15935
15936 case UDIV:
15937 if ((!dwarf_strict || dwarf_version >= 5)
15938 && is_a <scalar_int_mode> (mode, &int_mode))
15939 {
15940 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15941 {
15942 op = DW_OP_div;
15943 goto do_binop;
15944 }
15945 mem_loc_result = typed_binop (DW_OP_div, rtl,
15946 base_type_for_mode (int_mode, 1),
15947 int_mode, mem_mode);
15948 }
15949 break;
15950
15951 case NOT:
15952 op = DW_OP_not;
15953 goto do_unop;
15954
15955 case ABS:
15956 op = DW_OP_abs;
15957 goto do_unop;
15958
15959 case NEG:
15960 op = DW_OP_neg;
15961 goto do_unop;
15962
15963 do_unop:
15964 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15965 VAR_INIT_STATUS_INITIALIZED);
15966
15967 if (op0 == 0)
15968 break;
15969
15970 mem_loc_result = op0;
15971 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15972 break;
15973
15974 case CONST_INT:
15975 if (!is_a <scalar_int_mode> (mode, &int_mode)
15976 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15977 #ifdef POINTERS_EXTEND_UNSIGNED
15978 || (int_mode == Pmode
15979 && mem_mode != VOIDmode
15980 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15981 #endif
15982 )
15983 {
15984 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15985 break;
15986 }
15987 if ((!dwarf_strict || dwarf_version >= 5)
15988 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15989 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15990 {
15991 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15992 scalar_int_mode amode;
15993 if (type_die == NULL)
15994 return NULL;
15995 if (INTVAL (rtl) >= 0
15996 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15997 .exists (&amode))
15998 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15999 /* const DW_OP_convert <XXX> vs.
16000 DW_OP_const_type <XXX, 1, const>. */
16001 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16002 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16003 {
16004 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16005 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16006 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16007 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16008 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16009 add_loc_descr (&mem_loc_result, op0);
16010 return mem_loc_result;
16011 }
16012 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16013 INTVAL (rtl));
16014 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16015 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16016 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16017 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16018 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16019 else
16020 {
16021 mem_loc_result->dw_loc_oprnd2.val_class
16022 = dw_val_class_const_double;
16023 mem_loc_result->dw_loc_oprnd2.v.val_double
16024 = double_int::from_shwi (INTVAL (rtl));
16025 }
16026 }
16027 break;
16028
16029 case CONST_DOUBLE:
16030 if (!dwarf_strict || dwarf_version >= 5)
16031 {
16032 dw_die_ref type_die;
16033
16034 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16035 CONST_DOUBLE rtx could represent either a large integer
16036 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16037 the value is always a floating point constant.
16038
16039 When it is an integer, a CONST_DOUBLE is used whenever
16040 the constant requires 2 HWIs to be adequately represented.
16041 We output CONST_DOUBLEs as blocks. */
16042 if (mode == VOIDmode
16043 || (GET_MODE (rtl) == VOIDmode
16044 && maybe_ne (GET_MODE_BITSIZE (mode),
16045 HOST_BITS_PER_DOUBLE_INT)))
16046 break;
16047 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16048 if (type_die == NULL)
16049 return NULL;
16050 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16051 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16052 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16053 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16054 #if TARGET_SUPPORTS_WIDE_INT == 0
16055 if (!SCALAR_FLOAT_MODE_P (mode))
16056 {
16057 mem_loc_result->dw_loc_oprnd2.val_class
16058 = dw_val_class_const_double;
16059 mem_loc_result->dw_loc_oprnd2.v.val_double
16060 = rtx_to_double_int (rtl);
16061 }
16062 else
16063 #endif
16064 {
16065 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16066 unsigned int length = GET_MODE_SIZE (float_mode);
16067 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16068
16069 insert_float (rtl, array);
16070 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16071 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16072 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16073 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16074 }
16075 }
16076 break;
16077
16078 case CONST_WIDE_INT:
16079 if (!dwarf_strict || dwarf_version >= 5)
16080 {
16081 dw_die_ref type_die;
16082
16083 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16084 if (type_die == NULL)
16085 return NULL;
16086 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16087 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16088 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16089 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16090 mem_loc_result->dw_loc_oprnd2.val_class
16091 = dw_val_class_wide_int;
16092 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16093 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16094 }
16095 break;
16096
16097 case CONST_POLY_INT:
16098 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16099 break;
16100
16101 case EQ:
16102 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16103 break;
16104
16105 case GE:
16106 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16107 break;
16108
16109 case GT:
16110 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16111 break;
16112
16113 case LE:
16114 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16115 break;
16116
16117 case LT:
16118 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16119 break;
16120
16121 case NE:
16122 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16123 break;
16124
16125 case GEU:
16126 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16127 break;
16128
16129 case GTU:
16130 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16131 break;
16132
16133 case LEU:
16134 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16135 break;
16136
16137 case LTU:
16138 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16139 break;
16140
16141 case UMIN:
16142 case UMAX:
16143 if (!SCALAR_INT_MODE_P (mode))
16144 break;
16145 /* FALLTHRU */
16146 case SMIN:
16147 case SMAX:
16148 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16149 break;
16150
16151 case ZERO_EXTRACT:
16152 case SIGN_EXTRACT:
16153 if (CONST_INT_P (XEXP (rtl, 1))
16154 && CONST_INT_P (XEXP (rtl, 2))
16155 && is_a <scalar_int_mode> (mode, &int_mode)
16156 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16157 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16158 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16159 && ((unsigned) INTVAL (XEXP (rtl, 1))
16160 + (unsigned) INTVAL (XEXP (rtl, 2))
16161 <= GET_MODE_BITSIZE (int_mode)))
16162 {
16163 int shift, size;
16164 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16165 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16166 if (op0 == 0)
16167 break;
16168 if (GET_CODE (rtl) == SIGN_EXTRACT)
16169 op = DW_OP_shra;
16170 else
16171 op = DW_OP_shr;
16172 mem_loc_result = op0;
16173 size = INTVAL (XEXP (rtl, 1));
16174 shift = INTVAL (XEXP (rtl, 2));
16175 if (BITS_BIG_ENDIAN)
16176 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16177 if (shift + size != (int) DWARF2_ADDR_SIZE)
16178 {
16179 add_loc_descr (&mem_loc_result,
16180 int_loc_descriptor (DWARF2_ADDR_SIZE
16181 - shift - size));
16182 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16183 }
16184 if (size != (int) DWARF2_ADDR_SIZE)
16185 {
16186 add_loc_descr (&mem_loc_result,
16187 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16188 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16189 }
16190 }
16191 break;
16192
16193 case IF_THEN_ELSE:
16194 {
16195 dw_loc_descr_ref op2, bra_node, drop_node;
16196 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16197 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16198 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16199 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16200 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16201 VAR_INIT_STATUS_INITIALIZED);
16202 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16203 VAR_INIT_STATUS_INITIALIZED);
16204 if (op0 == NULL || op1 == NULL || op2 == NULL)
16205 break;
16206
16207 mem_loc_result = op1;
16208 add_loc_descr (&mem_loc_result, op2);
16209 add_loc_descr (&mem_loc_result, op0);
16210 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16211 add_loc_descr (&mem_loc_result, bra_node);
16212 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16213 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16214 add_loc_descr (&mem_loc_result, drop_node);
16215 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16216 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16217 }
16218 break;
16219
16220 case FLOAT_EXTEND:
16221 case FLOAT_TRUNCATE:
16222 case FLOAT:
16223 case UNSIGNED_FLOAT:
16224 case FIX:
16225 case UNSIGNED_FIX:
16226 if (!dwarf_strict || dwarf_version >= 5)
16227 {
16228 dw_die_ref type_die;
16229 dw_loc_descr_ref cvt;
16230
16231 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16232 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16233 if (op0 == NULL)
16234 break;
16235 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16236 && (GET_CODE (rtl) == FLOAT
16237 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16238 {
16239 type_die = base_type_for_mode (int_mode,
16240 GET_CODE (rtl) == UNSIGNED_FLOAT);
16241 if (type_die == NULL)
16242 break;
16243 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16244 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16245 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16246 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16247 add_loc_descr (&op0, cvt);
16248 }
16249 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16250 if (type_die == NULL)
16251 break;
16252 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16253 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16254 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16255 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16256 add_loc_descr (&op0, cvt);
16257 if (is_a <scalar_int_mode> (mode, &int_mode)
16258 && (GET_CODE (rtl) == FIX
16259 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16260 {
16261 op0 = convert_descriptor_to_mode (int_mode, op0);
16262 if (op0 == NULL)
16263 break;
16264 }
16265 mem_loc_result = op0;
16266 }
16267 break;
16268
16269 case CLZ:
16270 case CTZ:
16271 case FFS:
16272 if (is_a <scalar_int_mode> (mode, &int_mode))
16273 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16274 break;
16275
16276 case POPCOUNT:
16277 case PARITY:
16278 if (is_a <scalar_int_mode> (mode, &int_mode))
16279 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16280 break;
16281
16282 case BSWAP:
16283 if (is_a <scalar_int_mode> (mode, &int_mode))
16284 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16285 break;
16286
16287 case ROTATE:
16288 case ROTATERT:
16289 if (is_a <scalar_int_mode> (mode, &int_mode))
16290 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16291 break;
16292
16293 case COMPARE:
16294 /* In theory, we could implement the above. */
16295 /* DWARF cannot represent the unsigned compare operations
16296 natively. */
16297 case SS_MULT:
16298 case US_MULT:
16299 case SS_DIV:
16300 case US_DIV:
16301 case SS_PLUS:
16302 case US_PLUS:
16303 case SS_MINUS:
16304 case US_MINUS:
16305 case SS_NEG:
16306 case US_NEG:
16307 case SS_ABS:
16308 case SS_ASHIFT:
16309 case US_ASHIFT:
16310 case SS_TRUNCATE:
16311 case US_TRUNCATE:
16312 case UNORDERED:
16313 case ORDERED:
16314 case UNEQ:
16315 case UNGE:
16316 case UNGT:
16317 case UNLE:
16318 case UNLT:
16319 case LTGT:
16320 case FRACT_CONVERT:
16321 case UNSIGNED_FRACT_CONVERT:
16322 case SAT_FRACT:
16323 case UNSIGNED_SAT_FRACT:
16324 case SQRT:
16325 case ASM_OPERANDS:
16326 case VEC_MERGE:
16327 case VEC_SELECT:
16328 case VEC_CONCAT:
16329 case VEC_DUPLICATE:
16330 case VEC_SERIES:
16331 case UNSPEC:
16332 case HIGH:
16333 case FMA:
16334 case STRICT_LOW_PART:
16335 case CONST_VECTOR:
16336 case CONST_FIXED:
16337 case CLRSB:
16338 case CLOBBER:
16339 case CLOBBER_HIGH:
16340 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16341 can't express it in the debug info. This can happen e.g. with some
16342 TLS UNSPECs. */
16343 break;
16344
16345 case CONST_STRING:
16346 resolve_one_addr (&rtl);
16347 goto symref;
16348
16349 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16350 the expression. An UNSPEC rtx represents a raw DWARF operation,
16351 new_loc_descr is called for it to build the operation directly.
16352 Otherwise mem_loc_descriptor is called recursively. */
16353 case PARALLEL:
16354 {
16355 int index = 0;
16356 dw_loc_descr_ref exp_result = NULL;
16357
16358 for (; index < XVECLEN (rtl, 0); index++)
16359 {
16360 rtx elem = XVECEXP (rtl, 0, index);
16361 if (GET_CODE (elem) == UNSPEC)
16362 {
16363 /* Each DWARF operation UNSPEC contain two operands, if
16364 one operand is not used for the operation, const0_rtx is
16365 passed. */
16366 gcc_assert (XVECLEN (elem, 0) == 2);
16367
16368 HOST_WIDE_INT dw_op = XINT (elem, 1);
16369 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16370 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16371 exp_result
16372 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16373 oprnd2);
16374 }
16375 else
16376 exp_result
16377 = mem_loc_descriptor (elem, mode, mem_mode,
16378 VAR_INIT_STATUS_INITIALIZED);
16379
16380 if (!mem_loc_result)
16381 mem_loc_result = exp_result;
16382 else
16383 add_loc_descr (&mem_loc_result, exp_result);
16384 }
16385
16386 break;
16387 }
16388
16389 default:
16390 if (flag_checking)
16391 {
16392 print_rtl (stderr, rtl);
16393 gcc_unreachable ();
16394 }
16395 break;
16396 }
16397
16398 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16399 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16400
16401 return mem_loc_result;
16402 }
16403
16404 /* Return a descriptor that describes the concatenation of two locations.
16405 This is typically a complex variable. */
16406
16407 static dw_loc_descr_ref
16408 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16409 {
16410 /* At present we only track constant-sized pieces. */
16411 unsigned int size0, size1;
16412 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16413 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16414 return 0;
16415
16416 dw_loc_descr_ref cc_loc_result = NULL;
16417 dw_loc_descr_ref x0_ref
16418 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16419 dw_loc_descr_ref x1_ref
16420 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16421
16422 if (x0_ref == 0 || x1_ref == 0)
16423 return 0;
16424
16425 cc_loc_result = x0_ref;
16426 add_loc_descr_op_piece (&cc_loc_result, size0);
16427
16428 add_loc_descr (&cc_loc_result, x1_ref);
16429 add_loc_descr_op_piece (&cc_loc_result, size1);
16430
16431 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16432 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16433
16434 return cc_loc_result;
16435 }
16436
16437 /* Return a descriptor that describes the concatenation of N
16438 locations. */
16439
16440 static dw_loc_descr_ref
16441 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16442 {
16443 unsigned int i;
16444 dw_loc_descr_ref cc_loc_result = NULL;
16445 unsigned int n = XVECLEN (concatn, 0);
16446 unsigned int size;
16447
16448 for (i = 0; i < n; ++i)
16449 {
16450 dw_loc_descr_ref ref;
16451 rtx x = XVECEXP (concatn, 0, i);
16452
16453 /* At present we only track constant-sized pieces. */
16454 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16455 return NULL;
16456
16457 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16458 if (ref == NULL)
16459 return NULL;
16460
16461 add_loc_descr (&cc_loc_result, ref);
16462 add_loc_descr_op_piece (&cc_loc_result, size);
16463 }
16464
16465 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16466 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16467
16468 return cc_loc_result;
16469 }
16470
16471 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16472 for DEBUG_IMPLICIT_PTR RTL. */
16473
16474 static dw_loc_descr_ref
16475 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16476 {
16477 dw_loc_descr_ref ret;
16478 dw_die_ref ref;
16479
16480 if (dwarf_strict && dwarf_version < 5)
16481 return NULL;
16482 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16483 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16484 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16485 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16486 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16487 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16488 if (ref)
16489 {
16490 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16491 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16492 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16493 }
16494 else
16495 {
16496 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16497 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16498 }
16499 return ret;
16500 }
16501
16502 /* Output a proper Dwarf location descriptor for a variable or parameter
16503 which is either allocated in a register or in a memory location. For a
16504 register, we just generate an OP_REG and the register number. For a
16505 memory location we provide a Dwarf postfix expression describing how to
16506 generate the (dynamic) address of the object onto the address stack.
16507
16508 MODE is mode of the decl if this loc_descriptor is going to be used in
16509 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16510 allowed, VOIDmode otherwise.
16511
16512 If we don't know how to describe it, return 0. */
16513
16514 static dw_loc_descr_ref
16515 loc_descriptor (rtx rtl, machine_mode mode,
16516 enum var_init_status initialized)
16517 {
16518 dw_loc_descr_ref loc_result = NULL;
16519 scalar_int_mode int_mode;
16520
16521 switch (GET_CODE (rtl))
16522 {
16523 case SUBREG:
16524 /* The case of a subreg may arise when we have a local (register)
16525 variable or a formal (register) parameter which doesn't quite fill
16526 up an entire register. For now, just assume that it is
16527 legitimate to make the Dwarf info refer to the whole register which
16528 contains the given subreg. */
16529 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16530 loc_result = loc_descriptor (SUBREG_REG (rtl),
16531 GET_MODE (SUBREG_REG (rtl)), initialized);
16532 else
16533 goto do_default;
16534 break;
16535
16536 case REG:
16537 loc_result = reg_loc_descriptor (rtl, initialized);
16538 break;
16539
16540 case MEM:
16541 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16542 GET_MODE (rtl), initialized);
16543 if (loc_result == NULL)
16544 loc_result = tls_mem_loc_descriptor (rtl);
16545 if (loc_result == NULL)
16546 {
16547 rtx new_rtl = avoid_constant_pool_reference (rtl);
16548 if (new_rtl != rtl)
16549 loc_result = loc_descriptor (new_rtl, mode, initialized);
16550 }
16551 break;
16552
16553 case CONCAT:
16554 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16555 initialized);
16556 break;
16557
16558 case CONCATN:
16559 loc_result = concatn_loc_descriptor (rtl, initialized);
16560 break;
16561
16562 case VAR_LOCATION:
16563 /* Single part. */
16564 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16565 {
16566 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16567 if (GET_CODE (loc) == EXPR_LIST)
16568 loc = XEXP (loc, 0);
16569 loc_result = loc_descriptor (loc, mode, initialized);
16570 break;
16571 }
16572
16573 rtl = XEXP (rtl, 1);
16574 /* FALLTHRU */
16575
16576 case PARALLEL:
16577 {
16578 rtvec par_elems = XVEC (rtl, 0);
16579 int num_elem = GET_NUM_ELEM (par_elems);
16580 machine_mode mode;
16581 int i, size;
16582
16583 /* Create the first one, so we have something to add to. */
16584 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16585 VOIDmode, initialized);
16586 if (loc_result == NULL)
16587 return NULL;
16588 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16589 /* At present we only track constant-sized pieces. */
16590 if (!GET_MODE_SIZE (mode).is_constant (&size))
16591 return NULL;
16592 add_loc_descr_op_piece (&loc_result, size);
16593 for (i = 1; i < num_elem; i++)
16594 {
16595 dw_loc_descr_ref temp;
16596
16597 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16598 VOIDmode, initialized);
16599 if (temp == NULL)
16600 return NULL;
16601 add_loc_descr (&loc_result, temp);
16602 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16603 /* At present we only track constant-sized pieces. */
16604 if (!GET_MODE_SIZE (mode).is_constant (&size))
16605 return NULL;
16606 add_loc_descr_op_piece (&loc_result, size);
16607 }
16608 }
16609 break;
16610
16611 case CONST_INT:
16612 if (mode != VOIDmode && mode != BLKmode)
16613 {
16614 int_mode = as_a <scalar_int_mode> (mode);
16615 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16616 INTVAL (rtl));
16617 }
16618 break;
16619
16620 case CONST_DOUBLE:
16621 if (mode == VOIDmode)
16622 mode = GET_MODE (rtl);
16623
16624 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16625 {
16626 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16627
16628 /* Note that a CONST_DOUBLE rtx could represent either an integer
16629 or a floating-point constant. A CONST_DOUBLE is used whenever
16630 the constant requires more than one word in order to be
16631 adequately represented. We output CONST_DOUBLEs as blocks. */
16632 scalar_mode smode = as_a <scalar_mode> (mode);
16633 loc_result = new_loc_descr (DW_OP_implicit_value,
16634 GET_MODE_SIZE (smode), 0);
16635 #if TARGET_SUPPORTS_WIDE_INT == 0
16636 if (!SCALAR_FLOAT_MODE_P (smode))
16637 {
16638 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16639 loc_result->dw_loc_oprnd2.v.val_double
16640 = rtx_to_double_int (rtl);
16641 }
16642 else
16643 #endif
16644 {
16645 unsigned int length = GET_MODE_SIZE (smode);
16646 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16647
16648 insert_float (rtl, array);
16649 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16650 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16651 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16652 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16653 }
16654 }
16655 break;
16656
16657 case CONST_WIDE_INT:
16658 if (mode == VOIDmode)
16659 mode = GET_MODE (rtl);
16660
16661 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16662 {
16663 int_mode = as_a <scalar_int_mode> (mode);
16664 loc_result = new_loc_descr (DW_OP_implicit_value,
16665 GET_MODE_SIZE (int_mode), 0);
16666 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16667 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16668 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16669 }
16670 break;
16671
16672 case CONST_VECTOR:
16673 if (mode == VOIDmode)
16674 mode = GET_MODE (rtl);
16675
16676 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16677 {
16678 unsigned int length;
16679 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16680 return NULL;
16681
16682 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16683 unsigned char *array
16684 = ggc_vec_alloc<unsigned char> (length * elt_size);
16685 unsigned int i;
16686 unsigned char *p;
16687 machine_mode imode = GET_MODE_INNER (mode);
16688
16689 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16690 switch (GET_MODE_CLASS (mode))
16691 {
16692 case MODE_VECTOR_INT:
16693 for (i = 0, p = array; i < length; i++, p += elt_size)
16694 {
16695 rtx elt = CONST_VECTOR_ELT (rtl, i);
16696 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16697 }
16698 break;
16699
16700 case MODE_VECTOR_FLOAT:
16701 for (i = 0, p = array; i < length; i++, p += elt_size)
16702 {
16703 rtx elt = CONST_VECTOR_ELT (rtl, i);
16704 insert_float (elt, p);
16705 }
16706 break;
16707
16708 default:
16709 gcc_unreachable ();
16710 }
16711
16712 loc_result = new_loc_descr (DW_OP_implicit_value,
16713 length * elt_size, 0);
16714 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16715 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16716 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16717 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16718 }
16719 break;
16720
16721 case CONST:
16722 if (mode == VOIDmode
16723 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16724 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16725 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16726 {
16727 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16728 break;
16729 }
16730 /* FALLTHROUGH */
16731 case SYMBOL_REF:
16732 if (!const_ok_for_output (rtl))
16733 break;
16734 /* FALLTHROUGH */
16735 case LABEL_REF:
16736 if (is_a <scalar_int_mode> (mode, &int_mode)
16737 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16738 && (dwarf_version >= 4 || !dwarf_strict))
16739 {
16740 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16741 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16742 vec_safe_push (used_rtx_array, rtl);
16743 }
16744 break;
16745
16746 case DEBUG_IMPLICIT_PTR:
16747 loc_result = implicit_ptr_descriptor (rtl, 0);
16748 break;
16749
16750 case PLUS:
16751 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16752 && CONST_INT_P (XEXP (rtl, 1)))
16753 {
16754 loc_result
16755 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16756 break;
16757 }
16758 /* FALLTHRU */
16759 do_default:
16760 default:
16761 if ((is_a <scalar_int_mode> (mode, &int_mode)
16762 && GET_MODE (rtl) == int_mode
16763 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16764 && dwarf_version >= 4)
16765 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16766 {
16767 /* Value expression. */
16768 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16769 if (loc_result)
16770 add_loc_descr (&loc_result,
16771 new_loc_descr (DW_OP_stack_value, 0, 0));
16772 }
16773 break;
16774 }
16775
16776 return loc_result;
16777 }
16778
16779 /* We need to figure out what section we should use as the base for the
16780 address ranges where a given location is valid.
16781 1. If this particular DECL has a section associated with it, use that.
16782 2. If this function has a section associated with it, use that.
16783 3. Otherwise, use the text section.
16784 XXX: If you split a variable across multiple sections, we won't notice. */
16785
16786 static const char *
16787 secname_for_decl (const_tree decl)
16788 {
16789 const char *secname;
16790
16791 if (VAR_OR_FUNCTION_DECL_P (decl)
16792 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16793 && DECL_SECTION_NAME (decl))
16794 secname = DECL_SECTION_NAME (decl);
16795 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16796 secname = DECL_SECTION_NAME (current_function_decl);
16797 else if (cfun && in_cold_section_p)
16798 secname = crtl->subsections.cold_section_label;
16799 else
16800 secname = text_section_label;
16801
16802 return secname;
16803 }
16804
16805 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16806
16807 static bool
16808 decl_by_reference_p (tree decl)
16809 {
16810 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16811 || VAR_P (decl))
16812 && DECL_BY_REFERENCE (decl));
16813 }
16814
16815 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16816 for VARLOC. */
16817
16818 static dw_loc_descr_ref
16819 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16820 enum var_init_status initialized)
16821 {
16822 int have_address = 0;
16823 dw_loc_descr_ref descr;
16824 machine_mode mode;
16825
16826 if (want_address != 2)
16827 {
16828 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16829 /* Single part. */
16830 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16831 {
16832 varloc = PAT_VAR_LOCATION_LOC (varloc);
16833 if (GET_CODE (varloc) == EXPR_LIST)
16834 varloc = XEXP (varloc, 0);
16835 mode = GET_MODE (varloc);
16836 if (MEM_P (varloc))
16837 {
16838 rtx addr = XEXP (varloc, 0);
16839 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16840 mode, initialized);
16841 if (descr)
16842 have_address = 1;
16843 else
16844 {
16845 rtx x = avoid_constant_pool_reference (varloc);
16846 if (x != varloc)
16847 descr = mem_loc_descriptor (x, mode, VOIDmode,
16848 initialized);
16849 }
16850 }
16851 else
16852 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16853 }
16854 else
16855 return 0;
16856 }
16857 else
16858 {
16859 if (GET_CODE (varloc) == VAR_LOCATION)
16860 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16861 else
16862 mode = DECL_MODE (loc);
16863 descr = loc_descriptor (varloc, mode, initialized);
16864 have_address = 1;
16865 }
16866
16867 if (!descr)
16868 return 0;
16869
16870 if (want_address == 2 && !have_address
16871 && (dwarf_version >= 4 || !dwarf_strict))
16872 {
16873 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16874 {
16875 expansion_failed (loc, NULL_RTX,
16876 "DWARF address size mismatch");
16877 return 0;
16878 }
16879 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16880 have_address = 1;
16881 }
16882 /* Show if we can't fill the request for an address. */
16883 if (want_address && !have_address)
16884 {
16885 expansion_failed (loc, NULL_RTX,
16886 "Want address and only have value");
16887 return 0;
16888 }
16889
16890 /* If we've got an address and don't want one, dereference. */
16891 if (!want_address && have_address)
16892 {
16893 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16894 enum dwarf_location_atom op;
16895
16896 if (size > DWARF2_ADDR_SIZE || size == -1)
16897 {
16898 expansion_failed (loc, NULL_RTX,
16899 "DWARF address size mismatch");
16900 return 0;
16901 }
16902 else if (size == DWARF2_ADDR_SIZE)
16903 op = DW_OP_deref;
16904 else
16905 op = DW_OP_deref_size;
16906
16907 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16908 }
16909
16910 return descr;
16911 }
16912
16913 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16914 if it is not possible. */
16915
16916 static dw_loc_descr_ref
16917 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16918 {
16919 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16920 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16921 else if (dwarf_version >= 3 || !dwarf_strict)
16922 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16923 else
16924 return NULL;
16925 }
16926
16927 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16928 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16929
16930 static dw_loc_descr_ref
16931 dw_sra_loc_expr (tree decl, rtx loc)
16932 {
16933 rtx p;
16934 unsigned HOST_WIDE_INT padsize = 0;
16935 dw_loc_descr_ref descr, *descr_tail;
16936 unsigned HOST_WIDE_INT decl_size;
16937 rtx varloc;
16938 enum var_init_status initialized;
16939
16940 if (DECL_SIZE (decl) == NULL
16941 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16942 return NULL;
16943
16944 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16945 descr = NULL;
16946 descr_tail = &descr;
16947
16948 for (p = loc; p; p = XEXP (p, 1))
16949 {
16950 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16951 rtx loc_note = *decl_piece_varloc_ptr (p);
16952 dw_loc_descr_ref cur_descr;
16953 dw_loc_descr_ref *tail, last = NULL;
16954 unsigned HOST_WIDE_INT opsize = 0;
16955
16956 if (loc_note == NULL_RTX
16957 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16958 {
16959 padsize += bitsize;
16960 continue;
16961 }
16962 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16963 varloc = NOTE_VAR_LOCATION (loc_note);
16964 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16965 if (cur_descr == NULL)
16966 {
16967 padsize += bitsize;
16968 continue;
16969 }
16970
16971 /* Check that cur_descr either doesn't use
16972 DW_OP_*piece operations, or their sum is equal
16973 to bitsize. Otherwise we can't embed it. */
16974 for (tail = &cur_descr; *tail != NULL;
16975 tail = &(*tail)->dw_loc_next)
16976 if ((*tail)->dw_loc_opc == DW_OP_piece)
16977 {
16978 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16979 * BITS_PER_UNIT;
16980 last = *tail;
16981 }
16982 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16983 {
16984 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16985 last = *tail;
16986 }
16987
16988 if (last != NULL && opsize != bitsize)
16989 {
16990 padsize += bitsize;
16991 /* Discard the current piece of the descriptor and release any
16992 addr_table entries it uses. */
16993 remove_loc_list_addr_table_entries (cur_descr);
16994 continue;
16995 }
16996
16997 /* If there is a hole, add DW_OP_*piece after empty DWARF
16998 expression, which means that those bits are optimized out. */
16999 if (padsize)
17000 {
17001 if (padsize > decl_size)
17002 {
17003 remove_loc_list_addr_table_entries (cur_descr);
17004 goto discard_descr;
17005 }
17006 decl_size -= padsize;
17007 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17008 if (*descr_tail == NULL)
17009 {
17010 remove_loc_list_addr_table_entries (cur_descr);
17011 goto discard_descr;
17012 }
17013 descr_tail = &(*descr_tail)->dw_loc_next;
17014 padsize = 0;
17015 }
17016 *descr_tail = cur_descr;
17017 descr_tail = tail;
17018 if (bitsize > decl_size)
17019 goto discard_descr;
17020 decl_size -= bitsize;
17021 if (last == NULL)
17022 {
17023 HOST_WIDE_INT offset = 0;
17024 if (GET_CODE (varloc) == VAR_LOCATION
17025 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17026 {
17027 varloc = PAT_VAR_LOCATION_LOC (varloc);
17028 if (GET_CODE (varloc) == EXPR_LIST)
17029 varloc = XEXP (varloc, 0);
17030 }
17031 do
17032 {
17033 if (GET_CODE (varloc) == CONST
17034 || GET_CODE (varloc) == SIGN_EXTEND
17035 || GET_CODE (varloc) == ZERO_EXTEND)
17036 varloc = XEXP (varloc, 0);
17037 else if (GET_CODE (varloc) == SUBREG)
17038 varloc = SUBREG_REG (varloc);
17039 else
17040 break;
17041 }
17042 while (1);
17043 /* DW_OP_bit_size offset should be zero for register
17044 or implicit location descriptions and empty location
17045 descriptions, but for memory addresses needs big endian
17046 adjustment. */
17047 if (MEM_P (varloc))
17048 {
17049 unsigned HOST_WIDE_INT memsize;
17050 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17051 goto discard_descr;
17052 memsize *= BITS_PER_UNIT;
17053 if (memsize != bitsize)
17054 {
17055 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17056 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17057 goto discard_descr;
17058 if (memsize < bitsize)
17059 goto discard_descr;
17060 if (BITS_BIG_ENDIAN)
17061 offset = memsize - bitsize;
17062 }
17063 }
17064
17065 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17066 if (*descr_tail == NULL)
17067 goto discard_descr;
17068 descr_tail = &(*descr_tail)->dw_loc_next;
17069 }
17070 }
17071
17072 /* If there were any non-empty expressions, add padding till the end of
17073 the decl. */
17074 if (descr != NULL && decl_size != 0)
17075 {
17076 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17077 if (*descr_tail == NULL)
17078 goto discard_descr;
17079 }
17080 return descr;
17081
17082 discard_descr:
17083 /* Discard the descriptor and release any addr_table entries it uses. */
17084 remove_loc_list_addr_table_entries (descr);
17085 return NULL;
17086 }
17087
17088 /* Return the dwarf representation of the location list LOC_LIST of
17089 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17090 function. */
17091
17092 static dw_loc_list_ref
17093 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17094 {
17095 const char *endname, *secname;
17096 var_loc_view endview;
17097 rtx varloc;
17098 enum var_init_status initialized;
17099 struct var_loc_node *node;
17100 dw_loc_descr_ref descr;
17101 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17102 dw_loc_list_ref list = NULL;
17103 dw_loc_list_ref *listp = &list;
17104
17105 /* Now that we know what section we are using for a base,
17106 actually construct the list of locations.
17107 The first location information is what is passed to the
17108 function that creates the location list, and the remaining
17109 locations just get added on to that list.
17110 Note that we only know the start address for a location
17111 (IE location changes), so to build the range, we use
17112 the range [current location start, next location start].
17113 This means we have to special case the last node, and generate
17114 a range of [last location start, end of function label]. */
17115
17116 if (cfun && crtl->has_bb_partition)
17117 {
17118 bool save_in_cold_section_p = in_cold_section_p;
17119 in_cold_section_p = first_function_block_is_cold;
17120 if (loc_list->last_before_switch == NULL)
17121 in_cold_section_p = !in_cold_section_p;
17122 secname = secname_for_decl (decl);
17123 in_cold_section_p = save_in_cold_section_p;
17124 }
17125 else
17126 secname = secname_for_decl (decl);
17127
17128 for (node = loc_list->first; node; node = node->next)
17129 {
17130 bool range_across_switch = false;
17131 if (GET_CODE (node->loc) == EXPR_LIST
17132 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17133 {
17134 if (GET_CODE (node->loc) == EXPR_LIST)
17135 {
17136 descr = NULL;
17137 /* This requires DW_OP_{,bit_}piece, which is not usable
17138 inside DWARF expressions. */
17139 if (want_address == 2)
17140 descr = dw_sra_loc_expr (decl, node->loc);
17141 }
17142 else
17143 {
17144 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17145 varloc = NOTE_VAR_LOCATION (node->loc);
17146 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17147 }
17148 if (descr)
17149 {
17150 /* If section switch happens in between node->label
17151 and node->next->label (or end of function) and
17152 we can't emit it as a single entry list,
17153 emit two ranges, first one ending at the end
17154 of first partition and second one starting at the
17155 beginning of second partition. */
17156 if (node == loc_list->last_before_switch
17157 && (node != loc_list->first || loc_list->first->next
17158 /* If we are to emit a view number, we will emit
17159 a loclist rather than a single location
17160 expression for the entire function (see
17161 loc_list_has_views), so we have to split the
17162 range that straddles across partitions. */
17163 || !ZERO_VIEW_P (node->view))
17164 && current_function_decl)
17165 {
17166 endname = cfun->fde->dw_fde_end;
17167 endview = 0;
17168 range_across_switch = true;
17169 }
17170 /* The variable has a location between NODE->LABEL and
17171 NODE->NEXT->LABEL. */
17172 else if (node->next)
17173 endname = node->next->label, endview = node->next->view;
17174 /* If the variable has a location at the last label
17175 it keeps its location until the end of function. */
17176 else if (!current_function_decl)
17177 endname = text_end_label, endview = 0;
17178 else
17179 {
17180 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17181 current_function_funcdef_no);
17182 endname = ggc_strdup (label_id);
17183 endview = 0;
17184 }
17185
17186 *listp = new_loc_list (descr, node->label, node->view,
17187 endname, endview, secname);
17188 if (TREE_CODE (decl) == PARM_DECL
17189 && node == loc_list->first
17190 && NOTE_P (node->loc)
17191 && strcmp (node->label, endname) == 0)
17192 (*listp)->force = true;
17193 listp = &(*listp)->dw_loc_next;
17194 }
17195 }
17196
17197 if (cfun
17198 && crtl->has_bb_partition
17199 && node == loc_list->last_before_switch)
17200 {
17201 bool save_in_cold_section_p = in_cold_section_p;
17202 in_cold_section_p = !first_function_block_is_cold;
17203 secname = secname_for_decl (decl);
17204 in_cold_section_p = save_in_cold_section_p;
17205 }
17206
17207 if (range_across_switch)
17208 {
17209 if (GET_CODE (node->loc) == EXPR_LIST)
17210 descr = dw_sra_loc_expr (decl, node->loc);
17211 else
17212 {
17213 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17214 varloc = NOTE_VAR_LOCATION (node->loc);
17215 descr = dw_loc_list_1 (decl, varloc, want_address,
17216 initialized);
17217 }
17218 gcc_assert (descr);
17219 /* The variable has a location between NODE->LABEL and
17220 NODE->NEXT->LABEL. */
17221 if (node->next)
17222 endname = node->next->label, endview = node->next->view;
17223 else
17224 endname = cfun->fde->dw_fde_second_end, endview = 0;
17225 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17226 endname, endview, secname);
17227 listp = &(*listp)->dw_loc_next;
17228 }
17229 }
17230
17231 /* Try to avoid the overhead of a location list emitting a location
17232 expression instead, but only if we didn't have more than one
17233 location entry in the first place. If some entries were not
17234 representable, we don't want to pretend a single entry that was
17235 applies to the entire scope in which the variable is
17236 available. */
17237 if (list && loc_list->first->next)
17238 gen_llsym (list);
17239 else
17240 maybe_gen_llsym (list);
17241
17242 return list;
17243 }
17244
17245 /* Return if the loc_list has only single element and thus can be represented
17246 as location description. */
17247
17248 static bool
17249 single_element_loc_list_p (dw_loc_list_ref list)
17250 {
17251 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17252 return !list->ll_symbol;
17253 }
17254
17255 /* Duplicate a single element of location list. */
17256
17257 static inline dw_loc_descr_ref
17258 copy_loc_descr (dw_loc_descr_ref ref)
17259 {
17260 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17261 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17262 return copy;
17263 }
17264
17265 /* To each location in list LIST append loc descr REF. */
17266
17267 static void
17268 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17269 {
17270 dw_loc_descr_ref copy;
17271 add_loc_descr (&list->expr, ref);
17272 list = list->dw_loc_next;
17273 while (list)
17274 {
17275 copy = copy_loc_descr (ref);
17276 add_loc_descr (&list->expr, copy);
17277 while (copy->dw_loc_next)
17278 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17279 list = list->dw_loc_next;
17280 }
17281 }
17282
17283 /* To each location in list LIST prepend loc descr REF. */
17284
17285 static void
17286 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17287 {
17288 dw_loc_descr_ref copy;
17289 dw_loc_descr_ref ref_end = list->expr;
17290 add_loc_descr (&ref, list->expr);
17291 list->expr = ref;
17292 list = list->dw_loc_next;
17293 while (list)
17294 {
17295 dw_loc_descr_ref end = list->expr;
17296 list->expr = copy = copy_loc_descr (ref);
17297 while (copy->dw_loc_next != ref_end)
17298 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17299 copy->dw_loc_next = end;
17300 list = list->dw_loc_next;
17301 }
17302 }
17303
17304 /* Given two lists RET and LIST
17305 produce location list that is result of adding expression in LIST
17306 to expression in RET on each position in program.
17307 Might be destructive on both RET and LIST.
17308
17309 TODO: We handle only simple cases of RET or LIST having at most one
17310 element. General case would involve sorting the lists in program order
17311 and merging them that will need some additional work.
17312 Adding that will improve quality of debug info especially for SRA-ed
17313 structures. */
17314
17315 static void
17316 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17317 {
17318 if (!list)
17319 return;
17320 if (!*ret)
17321 {
17322 *ret = list;
17323 return;
17324 }
17325 if (!list->dw_loc_next)
17326 {
17327 add_loc_descr_to_each (*ret, list->expr);
17328 return;
17329 }
17330 if (!(*ret)->dw_loc_next)
17331 {
17332 prepend_loc_descr_to_each (list, (*ret)->expr);
17333 *ret = list;
17334 return;
17335 }
17336 expansion_failed (NULL_TREE, NULL_RTX,
17337 "Don't know how to merge two non-trivial"
17338 " location lists.\n");
17339 *ret = NULL;
17340 return;
17341 }
17342
17343 /* LOC is constant expression. Try a luck, look it up in constant
17344 pool and return its loc_descr of its address. */
17345
17346 static dw_loc_descr_ref
17347 cst_pool_loc_descr (tree loc)
17348 {
17349 /* Get an RTL for this, if something has been emitted. */
17350 rtx rtl = lookup_constant_def (loc);
17351
17352 if (!rtl || !MEM_P (rtl))
17353 {
17354 gcc_assert (!rtl);
17355 return 0;
17356 }
17357 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17358
17359 /* TODO: We might get more coverage if we was actually delaying expansion
17360 of all expressions till end of compilation when constant pools are fully
17361 populated. */
17362 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17363 {
17364 expansion_failed (loc, NULL_RTX,
17365 "CST value in contant pool but not marked.");
17366 return 0;
17367 }
17368 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17369 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17370 }
17371
17372 /* Return dw_loc_list representing address of addr_expr LOC
17373 by looking for inner INDIRECT_REF expression and turning
17374 it into simple arithmetics.
17375
17376 See loc_list_from_tree for the meaning of CONTEXT. */
17377
17378 static dw_loc_list_ref
17379 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17380 loc_descr_context *context)
17381 {
17382 tree obj, offset;
17383 poly_int64 bitsize, bitpos, bytepos;
17384 machine_mode mode;
17385 int unsignedp, reversep, volatilep = 0;
17386 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17387
17388 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17389 &bitsize, &bitpos, &offset, &mode,
17390 &unsignedp, &reversep, &volatilep);
17391 STRIP_NOPS (obj);
17392 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17393 {
17394 expansion_failed (loc, NULL_RTX, "bitfield access");
17395 return 0;
17396 }
17397 if (!INDIRECT_REF_P (obj))
17398 {
17399 expansion_failed (obj,
17400 NULL_RTX, "no indirect ref in inner refrence");
17401 return 0;
17402 }
17403 if (!offset && known_eq (bitpos, 0))
17404 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17405 context);
17406 else if (toplev
17407 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17408 && (dwarf_version >= 4 || !dwarf_strict))
17409 {
17410 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17411 if (!list_ret)
17412 return 0;
17413 if (offset)
17414 {
17415 /* Variable offset. */
17416 list_ret1 = loc_list_from_tree (offset, 0, context);
17417 if (list_ret1 == 0)
17418 return 0;
17419 add_loc_list (&list_ret, list_ret1);
17420 if (!list_ret)
17421 return 0;
17422 add_loc_descr_to_each (list_ret,
17423 new_loc_descr (DW_OP_plus, 0, 0));
17424 }
17425 HOST_WIDE_INT value;
17426 if (bytepos.is_constant (&value) && value > 0)
17427 add_loc_descr_to_each (list_ret,
17428 new_loc_descr (DW_OP_plus_uconst, value, 0));
17429 else if (maybe_ne (bytepos, 0))
17430 loc_list_plus_const (list_ret, bytepos);
17431 add_loc_descr_to_each (list_ret,
17432 new_loc_descr (DW_OP_stack_value, 0, 0));
17433 }
17434 return list_ret;
17435 }
17436
17437 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17438 all operations from LOC are nops, move to the last one. Insert in NOPS all
17439 operations that are skipped. */
17440
17441 static void
17442 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17443 hash_set<dw_loc_descr_ref> &nops)
17444 {
17445 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17446 {
17447 nops.add (loc);
17448 loc = loc->dw_loc_next;
17449 }
17450 }
17451
17452 /* Helper for loc_descr_without_nops: free the location description operation
17453 P. */
17454
17455 bool
17456 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17457 {
17458 ggc_free (loc);
17459 return true;
17460 }
17461
17462 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17463 finishes LOC. */
17464
17465 static void
17466 loc_descr_without_nops (dw_loc_descr_ref &loc)
17467 {
17468 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17469 return;
17470
17471 /* Set of all DW_OP_nop operations we remove. */
17472 hash_set<dw_loc_descr_ref> nops;
17473
17474 /* First, strip all prefix NOP operations in order to keep the head of the
17475 operations list. */
17476 loc_descr_to_next_no_nop (loc, nops);
17477
17478 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17479 {
17480 /* For control flow operations: strip "prefix" nops in destination
17481 labels. */
17482 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17483 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17484 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17485 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17486
17487 /* Do the same for the operations that follow, then move to the next
17488 iteration. */
17489 if (cur->dw_loc_next != NULL)
17490 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17491 cur = cur->dw_loc_next;
17492 }
17493
17494 nops.traverse<void *, free_loc_descr> (NULL);
17495 }
17496
17497
17498 struct dwarf_procedure_info;
17499
17500 /* Helper structure for location descriptions generation. */
17501 struct loc_descr_context
17502 {
17503 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17504 NULL_TREE if DW_OP_push_object_address in invalid for this location
17505 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17506 tree context_type;
17507 /* The ..._DECL node that should be translated as a
17508 DW_OP_push_object_address operation. */
17509 tree base_decl;
17510 /* Information about the DWARF procedure we are currently generating. NULL if
17511 we are not generating a DWARF procedure. */
17512 struct dwarf_procedure_info *dpi;
17513 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17514 by consumer. Used for DW_TAG_generic_subrange attributes. */
17515 bool placeholder_arg;
17516 /* True if PLACEHOLDER_EXPR has been seen. */
17517 bool placeholder_seen;
17518 };
17519
17520 /* DWARF procedures generation
17521
17522 DWARF expressions (aka. location descriptions) are used to encode variable
17523 things such as sizes or offsets. Such computations can have redundant parts
17524 that can be factorized in order to reduce the size of the output debug
17525 information. This is the whole point of DWARF procedures.
17526
17527 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17528 already factorized into functions ("size functions") in order to handle very
17529 big and complex types. Such functions are quite simple: they have integral
17530 arguments, they return an integral result and their body contains only a
17531 return statement with arithmetic expressions. This is the only kind of
17532 function we are interested in translating into DWARF procedures, here.
17533
17534 DWARF expressions and DWARF procedure are executed using a stack, so we have
17535 to define some calling convention for them to interact. Let's say that:
17536
17537 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17538 all arguments in reverse order (right-to-left) so that when the DWARF
17539 procedure execution starts, the first argument is the top of the stack.
17540
17541 - Then, when returning, the DWARF procedure must have consumed all arguments
17542 on the stack, must have pushed the result and touched nothing else.
17543
17544 - Each integral argument and the result are integral types can be hold in a
17545 single stack slot.
17546
17547 - We call "frame offset" the number of stack slots that are "under DWARF
17548 procedure control": it includes the arguments slots, the temporaries and
17549 the result slot. Thus, it is equal to the number of arguments when the
17550 procedure execution starts and must be equal to one (the result) when it
17551 returns. */
17552
17553 /* Helper structure used when generating operations for a DWARF procedure. */
17554 struct dwarf_procedure_info
17555 {
17556 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17557 currently translated. */
17558 tree fndecl;
17559 /* The number of arguments FNDECL takes. */
17560 unsigned args_count;
17561 };
17562
17563 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17564 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17565 equate it to this DIE. */
17566
17567 static dw_die_ref
17568 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17569 dw_die_ref parent_die)
17570 {
17571 dw_die_ref dwarf_proc_die;
17572
17573 if ((dwarf_version < 3 && dwarf_strict)
17574 || location == NULL)
17575 return NULL;
17576
17577 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17578 if (fndecl)
17579 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17580 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17581 return dwarf_proc_die;
17582 }
17583
17584 /* Return whether TYPE is a supported type as a DWARF procedure argument
17585 type or return type (we handle only scalar types and pointer types that
17586 aren't wider than the DWARF expression evaluation stack. */
17587
17588 static bool
17589 is_handled_procedure_type (tree type)
17590 {
17591 return ((INTEGRAL_TYPE_P (type)
17592 || TREE_CODE (type) == OFFSET_TYPE
17593 || TREE_CODE (type) == POINTER_TYPE)
17594 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17595 }
17596
17597 /* Helper for resolve_args_picking: do the same but stop when coming across
17598 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17599 offset *before* evaluating the corresponding operation. */
17600
17601 static bool
17602 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17603 struct dwarf_procedure_info *dpi,
17604 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17605 {
17606 /* The "frame_offset" identifier is already used to name a macro... */
17607 unsigned frame_offset_ = initial_frame_offset;
17608 dw_loc_descr_ref l;
17609
17610 for (l = loc; l != NULL;)
17611 {
17612 bool existed;
17613 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17614
17615 /* If we already met this node, there is nothing to compute anymore. */
17616 if (existed)
17617 {
17618 /* Make sure that the stack size is consistent wherever the execution
17619 flow comes from. */
17620 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17621 break;
17622 }
17623 l_frame_offset = frame_offset_;
17624
17625 /* If needed, relocate the picking offset with respect to the frame
17626 offset. */
17627 if (l->frame_offset_rel)
17628 {
17629 unsigned HOST_WIDE_INT off;
17630 switch (l->dw_loc_opc)
17631 {
17632 case DW_OP_pick:
17633 off = l->dw_loc_oprnd1.v.val_unsigned;
17634 break;
17635 case DW_OP_dup:
17636 off = 0;
17637 break;
17638 case DW_OP_over:
17639 off = 1;
17640 break;
17641 default:
17642 gcc_unreachable ();
17643 }
17644 /* frame_offset_ is the size of the current stack frame, including
17645 incoming arguments. Besides, the arguments are pushed
17646 right-to-left. Thus, in order to access the Nth argument from
17647 this operation node, the picking has to skip temporaries *plus*
17648 one stack slot per argument (0 for the first one, 1 for the second
17649 one, etc.).
17650
17651 The targetted argument number (N) is already set as the operand,
17652 and the number of temporaries can be computed with:
17653 frame_offsets_ - dpi->args_count */
17654 off += frame_offset_ - dpi->args_count;
17655
17656 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17657 if (off > 255)
17658 return false;
17659
17660 if (off == 0)
17661 {
17662 l->dw_loc_opc = DW_OP_dup;
17663 l->dw_loc_oprnd1.v.val_unsigned = 0;
17664 }
17665 else if (off == 1)
17666 {
17667 l->dw_loc_opc = DW_OP_over;
17668 l->dw_loc_oprnd1.v.val_unsigned = 0;
17669 }
17670 else
17671 {
17672 l->dw_loc_opc = DW_OP_pick;
17673 l->dw_loc_oprnd1.v.val_unsigned = off;
17674 }
17675 }
17676
17677 /* Update frame_offset according to the effect the current operation has
17678 on the stack. */
17679 switch (l->dw_loc_opc)
17680 {
17681 case DW_OP_deref:
17682 case DW_OP_swap:
17683 case DW_OP_rot:
17684 case DW_OP_abs:
17685 case DW_OP_neg:
17686 case DW_OP_not:
17687 case DW_OP_plus_uconst:
17688 case DW_OP_skip:
17689 case DW_OP_reg0:
17690 case DW_OP_reg1:
17691 case DW_OP_reg2:
17692 case DW_OP_reg3:
17693 case DW_OP_reg4:
17694 case DW_OP_reg5:
17695 case DW_OP_reg6:
17696 case DW_OP_reg7:
17697 case DW_OP_reg8:
17698 case DW_OP_reg9:
17699 case DW_OP_reg10:
17700 case DW_OP_reg11:
17701 case DW_OP_reg12:
17702 case DW_OP_reg13:
17703 case DW_OP_reg14:
17704 case DW_OP_reg15:
17705 case DW_OP_reg16:
17706 case DW_OP_reg17:
17707 case DW_OP_reg18:
17708 case DW_OP_reg19:
17709 case DW_OP_reg20:
17710 case DW_OP_reg21:
17711 case DW_OP_reg22:
17712 case DW_OP_reg23:
17713 case DW_OP_reg24:
17714 case DW_OP_reg25:
17715 case DW_OP_reg26:
17716 case DW_OP_reg27:
17717 case DW_OP_reg28:
17718 case DW_OP_reg29:
17719 case DW_OP_reg30:
17720 case DW_OP_reg31:
17721 case DW_OP_bregx:
17722 case DW_OP_piece:
17723 case DW_OP_deref_size:
17724 case DW_OP_nop:
17725 case DW_OP_bit_piece:
17726 case DW_OP_implicit_value:
17727 case DW_OP_stack_value:
17728 break;
17729
17730 case DW_OP_addr:
17731 case DW_OP_const1u:
17732 case DW_OP_const1s:
17733 case DW_OP_const2u:
17734 case DW_OP_const2s:
17735 case DW_OP_const4u:
17736 case DW_OP_const4s:
17737 case DW_OP_const8u:
17738 case DW_OP_const8s:
17739 case DW_OP_constu:
17740 case DW_OP_consts:
17741 case DW_OP_dup:
17742 case DW_OP_over:
17743 case DW_OP_pick:
17744 case DW_OP_lit0:
17745 case DW_OP_lit1:
17746 case DW_OP_lit2:
17747 case DW_OP_lit3:
17748 case DW_OP_lit4:
17749 case DW_OP_lit5:
17750 case DW_OP_lit6:
17751 case DW_OP_lit7:
17752 case DW_OP_lit8:
17753 case DW_OP_lit9:
17754 case DW_OP_lit10:
17755 case DW_OP_lit11:
17756 case DW_OP_lit12:
17757 case DW_OP_lit13:
17758 case DW_OP_lit14:
17759 case DW_OP_lit15:
17760 case DW_OP_lit16:
17761 case DW_OP_lit17:
17762 case DW_OP_lit18:
17763 case DW_OP_lit19:
17764 case DW_OP_lit20:
17765 case DW_OP_lit21:
17766 case DW_OP_lit22:
17767 case DW_OP_lit23:
17768 case DW_OP_lit24:
17769 case DW_OP_lit25:
17770 case DW_OP_lit26:
17771 case DW_OP_lit27:
17772 case DW_OP_lit28:
17773 case DW_OP_lit29:
17774 case DW_OP_lit30:
17775 case DW_OP_lit31:
17776 case DW_OP_breg0:
17777 case DW_OP_breg1:
17778 case DW_OP_breg2:
17779 case DW_OP_breg3:
17780 case DW_OP_breg4:
17781 case DW_OP_breg5:
17782 case DW_OP_breg6:
17783 case DW_OP_breg7:
17784 case DW_OP_breg8:
17785 case DW_OP_breg9:
17786 case DW_OP_breg10:
17787 case DW_OP_breg11:
17788 case DW_OP_breg12:
17789 case DW_OP_breg13:
17790 case DW_OP_breg14:
17791 case DW_OP_breg15:
17792 case DW_OP_breg16:
17793 case DW_OP_breg17:
17794 case DW_OP_breg18:
17795 case DW_OP_breg19:
17796 case DW_OP_breg20:
17797 case DW_OP_breg21:
17798 case DW_OP_breg22:
17799 case DW_OP_breg23:
17800 case DW_OP_breg24:
17801 case DW_OP_breg25:
17802 case DW_OP_breg26:
17803 case DW_OP_breg27:
17804 case DW_OP_breg28:
17805 case DW_OP_breg29:
17806 case DW_OP_breg30:
17807 case DW_OP_breg31:
17808 case DW_OP_fbreg:
17809 case DW_OP_push_object_address:
17810 case DW_OP_call_frame_cfa:
17811 case DW_OP_GNU_variable_value:
17812 ++frame_offset_;
17813 break;
17814
17815 case DW_OP_drop:
17816 case DW_OP_xderef:
17817 case DW_OP_and:
17818 case DW_OP_div:
17819 case DW_OP_minus:
17820 case DW_OP_mod:
17821 case DW_OP_mul:
17822 case DW_OP_or:
17823 case DW_OP_plus:
17824 case DW_OP_shl:
17825 case DW_OP_shr:
17826 case DW_OP_shra:
17827 case DW_OP_xor:
17828 case DW_OP_bra:
17829 case DW_OP_eq:
17830 case DW_OP_ge:
17831 case DW_OP_gt:
17832 case DW_OP_le:
17833 case DW_OP_lt:
17834 case DW_OP_ne:
17835 case DW_OP_regx:
17836 case DW_OP_xderef_size:
17837 --frame_offset_;
17838 break;
17839
17840 case DW_OP_call2:
17841 case DW_OP_call4:
17842 case DW_OP_call_ref:
17843 {
17844 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17845 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17846
17847 if (stack_usage == NULL)
17848 return false;
17849 frame_offset_ += *stack_usage;
17850 break;
17851 }
17852
17853 case DW_OP_implicit_pointer:
17854 case DW_OP_entry_value:
17855 case DW_OP_const_type:
17856 case DW_OP_regval_type:
17857 case DW_OP_deref_type:
17858 case DW_OP_convert:
17859 case DW_OP_reinterpret:
17860 case DW_OP_form_tls_address:
17861 case DW_OP_GNU_push_tls_address:
17862 case DW_OP_GNU_uninit:
17863 case DW_OP_GNU_encoded_addr:
17864 case DW_OP_GNU_implicit_pointer:
17865 case DW_OP_GNU_entry_value:
17866 case DW_OP_GNU_const_type:
17867 case DW_OP_GNU_regval_type:
17868 case DW_OP_GNU_deref_type:
17869 case DW_OP_GNU_convert:
17870 case DW_OP_GNU_reinterpret:
17871 case DW_OP_GNU_parameter_ref:
17872 /* loc_list_from_tree will probably not output these operations for
17873 size functions, so assume they will not appear here. */
17874 /* Fall through... */
17875
17876 default:
17877 gcc_unreachable ();
17878 }
17879
17880 /* Now, follow the control flow (except subroutine calls). */
17881 switch (l->dw_loc_opc)
17882 {
17883 case DW_OP_bra:
17884 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17885 frame_offsets))
17886 return false;
17887 /* Fall through. */
17888
17889 case DW_OP_skip:
17890 l = l->dw_loc_oprnd1.v.val_loc;
17891 break;
17892
17893 case DW_OP_stack_value:
17894 return true;
17895
17896 default:
17897 l = l->dw_loc_next;
17898 break;
17899 }
17900 }
17901
17902 return true;
17903 }
17904
17905 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17906 operations) in order to resolve the operand of DW_OP_pick operations that
17907 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17908 offset *before* LOC is executed. Return if all relocations were
17909 successful. */
17910
17911 static bool
17912 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17913 struct dwarf_procedure_info *dpi)
17914 {
17915 /* Associate to all visited operations the frame offset *before* evaluating
17916 this operation. */
17917 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17918
17919 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17920 frame_offsets);
17921 }
17922
17923 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17924 Return NULL if it is not possible. */
17925
17926 static dw_die_ref
17927 function_to_dwarf_procedure (tree fndecl)
17928 {
17929 struct loc_descr_context ctx;
17930 struct dwarf_procedure_info dpi;
17931 dw_die_ref dwarf_proc_die;
17932 tree tree_body = DECL_SAVED_TREE (fndecl);
17933 dw_loc_descr_ref loc_body, epilogue;
17934
17935 tree cursor;
17936 unsigned i;
17937
17938 /* Do not generate multiple DWARF procedures for the same function
17939 declaration. */
17940 dwarf_proc_die = lookup_decl_die (fndecl);
17941 if (dwarf_proc_die != NULL)
17942 return dwarf_proc_die;
17943
17944 /* DWARF procedures are available starting with the DWARFv3 standard. */
17945 if (dwarf_version < 3 && dwarf_strict)
17946 return NULL;
17947
17948 /* We handle only functions for which we still have a body, that return a
17949 supported type and that takes arguments with supported types. Note that
17950 there is no point translating functions that return nothing. */
17951 if (tree_body == NULL_TREE
17952 || DECL_RESULT (fndecl) == NULL_TREE
17953 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17954 return NULL;
17955
17956 for (cursor = DECL_ARGUMENTS (fndecl);
17957 cursor != NULL_TREE;
17958 cursor = TREE_CHAIN (cursor))
17959 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17960 return NULL;
17961
17962 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17963 if (TREE_CODE (tree_body) != RETURN_EXPR)
17964 return NULL;
17965 tree_body = TREE_OPERAND (tree_body, 0);
17966 if (TREE_CODE (tree_body) != MODIFY_EXPR
17967 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17968 return NULL;
17969 tree_body = TREE_OPERAND (tree_body, 1);
17970
17971 /* Try to translate the body expression itself. Note that this will probably
17972 cause an infinite recursion if its call graph has a cycle. This is very
17973 unlikely for size functions, however, so don't bother with such things at
17974 the moment. */
17975 ctx.context_type = NULL_TREE;
17976 ctx.base_decl = NULL_TREE;
17977 ctx.dpi = &dpi;
17978 ctx.placeholder_arg = false;
17979 ctx.placeholder_seen = false;
17980 dpi.fndecl = fndecl;
17981 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17982 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17983 if (!loc_body)
17984 return NULL;
17985
17986 /* After evaluating all operands in "loc_body", we should still have on the
17987 stack all arguments plus the desired function result (top of the stack).
17988 Generate code in order to keep only the result in our stack frame. */
17989 epilogue = NULL;
17990 for (i = 0; i < dpi.args_count; ++i)
17991 {
17992 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17993 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17994 op_couple->dw_loc_next->dw_loc_next = epilogue;
17995 epilogue = op_couple;
17996 }
17997 add_loc_descr (&loc_body, epilogue);
17998 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17999 return NULL;
18000
18001 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18002 because they are considered useful. Now there is an epilogue, they are
18003 not anymore, so give it another try. */
18004 loc_descr_without_nops (loc_body);
18005
18006 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18007 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18008 though, given that size functions do not come from source, so they should
18009 not have a dedicated DW_TAG_subprogram DIE. */
18010 dwarf_proc_die
18011 = new_dwarf_proc_die (loc_body, fndecl,
18012 get_context_die (DECL_CONTEXT (fndecl)));
18013
18014 /* The called DWARF procedure consumes one stack slot per argument and
18015 returns one stack slot. */
18016 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18017
18018 return dwarf_proc_die;
18019 }
18020
18021
18022 /* Generate Dwarf location list representing LOC.
18023 If WANT_ADDRESS is false, expression computing LOC will be computed
18024 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18025 if WANT_ADDRESS is 2, expression computing address useable in location
18026 will be returned (i.e. DW_OP_reg can be used
18027 to refer to register values).
18028
18029 CONTEXT provides information to customize the location descriptions
18030 generation. Its context_type field specifies what type is implicitly
18031 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18032 will not be generated.
18033
18034 Its DPI field determines whether we are generating a DWARF expression for a
18035 DWARF procedure, so PARM_DECL references are processed specifically.
18036
18037 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18038 and dpi fields were null. */
18039
18040 static dw_loc_list_ref
18041 loc_list_from_tree_1 (tree loc, int want_address,
18042 struct loc_descr_context *context)
18043 {
18044 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18045 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18046 int have_address = 0;
18047 enum dwarf_location_atom op;
18048
18049 /* ??? Most of the time we do not take proper care for sign/zero
18050 extending the values properly. Hopefully this won't be a real
18051 problem... */
18052
18053 if (context != NULL
18054 && context->base_decl == loc
18055 && want_address == 0)
18056 {
18057 if (dwarf_version >= 3 || !dwarf_strict)
18058 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18059 NULL, 0, NULL, 0, NULL);
18060 else
18061 return NULL;
18062 }
18063
18064 switch (TREE_CODE (loc))
18065 {
18066 case ERROR_MARK:
18067 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18068 return 0;
18069
18070 case PLACEHOLDER_EXPR:
18071 /* This case involves extracting fields from an object to determine the
18072 position of other fields. It is supposed to appear only as the first
18073 operand of COMPONENT_REF nodes and to reference precisely the type
18074 that the context allows. */
18075 if (context != NULL
18076 && TREE_TYPE (loc) == context->context_type
18077 && want_address >= 1)
18078 {
18079 if (dwarf_version >= 3 || !dwarf_strict)
18080 {
18081 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18082 have_address = 1;
18083 break;
18084 }
18085 else
18086 return NULL;
18087 }
18088 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18089 the single argument passed by consumer. */
18090 else if (context != NULL
18091 && context->placeholder_arg
18092 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18093 && want_address == 0)
18094 {
18095 ret = new_loc_descr (DW_OP_pick, 0, 0);
18096 ret->frame_offset_rel = 1;
18097 context->placeholder_seen = true;
18098 break;
18099 }
18100 else
18101 expansion_failed (loc, NULL_RTX,
18102 "PLACEHOLDER_EXPR for an unexpected type");
18103 break;
18104
18105 case CALL_EXPR:
18106 {
18107 const int nargs = call_expr_nargs (loc);
18108 tree callee = get_callee_fndecl (loc);
18109 int i;
18110 dw_die_ref dwarf_proc;
18111
18112 if (callee == NULL_TREE)
18113 goto call_expansion_failed;
18114
18115 /* We handle only functions that return an integer. */
18116 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18117 goto call_expansion_failed;
18118
18119 dwarf_proc = function_to_dwarf_procedure (callee);
18120 if (dwarf_proc == NULL)
18121 goto call_expansion_failed;
18122
18123 /* Evaluate arguments right-to-left so that the first argument will
18124 be the top-most one on the stack. */
18125 for (i = nargs - 1; i >= 0; --i)
18126 {
18127 dw_loc_descr_ref loc_descr
18128 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18129 context);
18130
18131 if (loc_descr == NULL)
18132 goto call_expansion_failed;
18133
18134 add_loc_descr (&ret, loc_descr);
18135 }
18136
18137 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18138 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18139 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18140 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18141 add_loc_descr (&ret, ret1);
18142 break;
18143
18144 call_expansion_failed:
18145 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18146 /* There are no opcodes for these operations. */
18147 return 0;
18148 }
18149
18150 case PREINCREMENT_EXPR:
18151 case PREDECREMENT_EXPR:
18152 case POSTINCREMENT_EXPR:
18153 case POSTDECREMENT_EXPR:
18154 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18155 /* There are no opcodes for these operations. */
18156 return 0;
18157
18158 case ADDR_EXPR:
18159 /* If we already want an address, see if there is INDIRECT_REF inside
18160 e.g. for &this->field. */
18161 if (want_address)
18162 {
18163 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18164 (loc, want_address == 2, context);
18165 if (list_ret)
18166 have_address = 1;
18167 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18168 && (ret = cst_pool_loc_descr (loc)))
18169 have_address = 1;
18170 }
18171 /* Otherwise, process the argument and look for the address. */
18172 if (!list_ret && !ret)
18173 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18174 else
18175 {
18176 if (want_address)
18177 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18178 return NULL;
18179 }
18180 break;
18181
18182 case VAR_DECL:
18183 if (DECL_THREAD_LOCAL_P (loc))
18184 {
18185 rtx rtl;
18186 enum dwarf_location_atom tls_op;
18187 enum dtprel_bool dtprel = dtprel_false;
18188
18189 if (targetm.have_tls)
18190 {
18191 /* If this is not defined, we have no way to emit the
18192 data. */
18193 if (!targetm.asm_out.output_dwarf_dtprel)
18194 return 0;
18195
18196 /* The way DW_OP_GNU_push_tls_address is specified, we
18197 can only look up addresses of objects in the current
18198 module. We used DW_OP_addr as first op, but that's
18199 wrong, because DW_OP_addr is relocated by the debug
18200 info consumer, while DW_OP_GNU_push_tls_address
18201 operand shouldn't be. */
18202 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18203 return 0;
18204 dtprel = dtprel_true;
18205 /* We check for DWARF 5 here because gdb did not implement
18206 DW_OP_form_tls_address until after 7.12. */
18207 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18208 : DW_OP_GNU_push_tls_address);
18209 }
18210 else
18211 {
18212 if (!targetm.emutls.debug_form_tls_address
18213 || !(dwarf_version >= 3 || !dwarf_strict))
18214 return 0;
18215 /* We stuffed the control variable into the DECL_VALUE_EXPR
18216 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18217 no longer appear in gimple code. We used the control
18218 variable in specific so that we could pick it up here. */
18219 loc = DECL_VALUE_EXPR (loc);
18220 tls_op = DW_OP_form_tls_address;
18221 }
18222
18223 rtl = rtl_for_decl_location (loc);
18224 if (rtl == NULL_RTX)
18225 return 0;
18226
18227 if (!MEM_P (rtl))
18228 return 0;
18229 rtl = XEXP (rtl, 0);
18230 if (! CONSTANT_P (rtl))
18231 return 0;
18232
18233 ret = new_addr_loc_descr (rtl, dtprel);
18234 ret1 = new_loc_descr (tls_op, 0, 0);
18235 add_loc_descr (&ret, ret1);
18236
18237 have_address = 1;
18238 break;
18239 }
18240 /* FALLTHRU */
18241
18242 case PARM_DECL:
18243 if (context != NULL && context->dpi != NULL
18244 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18245 {
18246 /* We are generating code for a DWARF procedure and we want to access
18247 one of its arguments: find the appropriate argument offset and let
18248 the resolve_args_picking pass compute the offset that complies
18249 with the stack frame size. */
18250 unsigned i = 0;
18251 tree cursor;
18252
18253 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18254 cursor != NULL_TREE && cursor != loc;
18255 cursor = TREE_CHAIN (cursor), ++i)
18256 ;
18257 /* If we are translating a DWARF procedure, all referenced parameters
18258 must belong to the current function. */
18259 gcc_assert (cursor != NULL_TREE);
18260
18261 ret = new_loc_descr (DW_OP_pick, i, 0);
18262 ret->frame_offset_rel = 1;
18263 break;
18264 }
18265 /* FALLTHRU */
18266
18267 case RESULT_DECL:
18268 if (DECL_HAS_VALUE_EXPR_P (loc))
18269 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18270 want_address, context);
18271 /* FALLTHRU */
18272
18273 case FUNCTION_DECL:
18274 {
18275 rtx rtl;
18276 var_loc_list *loc_list = lookup_decl_loc (loc);
18277
18278 if (loc_list && loc_list->first)
18279 {
18280 list_ret = dw_loc_list (loc_list, loc, want_address);
18281 have_address = want_address != 0;
18282 break;
18283 }
18284 rtl = rtl_for_decl_location (loc);
18285 if (rtl == NULL_RTX)
18286 {
18287 if (TREE_CODE (loc) != FUNCTION_DECL
18288 && early_dwarf
18289 && current_function_decl
18290 && want_address != 1
18291 && ! DECL_IGNORED_P (loc)
18292 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18293 || POINTER_TYPE_P (TREE_TYPE (loc)))
18294 && DECL_CONTEXT (loc) == current_function_decl
18295 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18296 <= DWARF2_ADDR_SIZE))
18297 {
18298 dw_die_ref ref = lookup_decl_die (loc);
18299 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18300 if (ref)
18301 {
18302 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18303 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18304 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18305 }
18306 else
18307 {
18308 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18309 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18310 }
18311 break;
18312 }
18313 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18314 return 0;
18315 }
18316 else if (CONST_INT_P (rtl))
18317 {
18318 HOST_WIDE_INT val = INTVAL (rtl);
18319 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18320 val &= GET_MODE_MASK (DECL_MODE (loc));
18321 ret = int_loc_descriptor (val);
18322 }
18323 else if (GET_CODE (rtl) == CONST_STRING)
18324 {
18325 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18326 return 0;
18327 }
18328 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18329 ret = new_addr_loc_descr (rtl, dtprel_false);
18330 else
18331 {
18332 machine_mode mode, mem_mode;
18333
18334 /* Certain constructs can only be represented at top-level. */
18335 if (want_address == 2)
18336 {
18337 ret = loc_descriptor (rtl, VOIDmode,
18338 VAR_INIT_STATUS_INITIALIZED);
18339 have_address = 1;
18340 }
18341 else
18342 {
18343 mode = GET_MODE (rtl);
18344 mem_mode = VOIDmode;
18345 if (MEM_P (rtl))
18346 {
18347 mem_mode = mode;
18348 mode = get_address_mode (rtl);
18349 rtl = XEXP (rtl, 0);
18350 have_address = 1;
18351 }
18352 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18353 VAR_INIT_STATUS_INITIALIZED);
18354 }
18355 if (!ret)
18356 expansion_failed (loc, rtl,
18357 "failed to produce loc descriptor for rtl");
18358 }
18359 }
18360 break;
18361
18362 case MEM_REF:
18363 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18364 {
18365 have_address = 1;
18366 goto do_plus;
18367 }
18368 /* Fallthru. */
18369 case INDIRECT_REF:
18370 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18371 have_address = 1;
18372 break;
18373
18374 case TARGET_MEM_REF:
18375 case SSA_NAME:
18376 case DEBUG_EXPR_DECL:
18377 return NULL;
18378
18379 case COMPOUND_EXPR:
18380 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18381 context);
18382
18383 CASE_CONVERT:
18384 case VIEW_CONVERT_EXPR:
18385 case SAVE_EXPR:
18386 case MODIFY_EXPR:
18387 case NON_LVALUE_EXPR:
18388 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18389 context);
18390
18391 case COMPONENT_REF:
18392 case BIT_FIELD_REF:
18393 case ARRAY_REF:
18394 case ARRAY_RANGE_REF:
18395 case REALPART_EXPR:
18396 case IMAGPART_EXPR:
18397 {
18398 tree obj, offset;
18399 poly_int64 bitsize, bitpos, bytepos;
18400 machine_mode mode;
18401 int unsignedp, reversep, volatilep = 0;
18402
18403 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18404 &unsignedp, &reversep, &volatilep);
18405
18406 gcc_assert (obj != loc);
18407
18408 list_ret = loc_list_from_tree_1 (obj,
18409 want_address == 2
18410 && known_eq (bitpos, 0)
18411 && !offset ? 2 : 1,
18412 context);
18413 /* TODO: We can extract value of the small expression via shifting even
18414 for nonzero bitpos. */
18415 if (list_ret == 0)
18416 return 0;
18417 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18418 || !multiple_p (bitsize, BITS_PER_UNIT))
18419 {
18420 expansion_failed (loc, NULL_RTX,
18421 "bitfield access");
18422 return 0;
18423 }
18424
18425 if (offset != NULL_TREE)
18426 {
18427 /* Variable offset. */
18428 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18429 if (list_ret1 == 0)
18430 return 0;
18431 add_loc_list (&list_ret, list_ret1);
18432 if (!list_ret)
18433 return 0;
18434 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18435 }
18436
18437 HOST_WIDE_INT value;
18438 if (bytepos.is_constant (&value) && value > 0)
18439 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18440 value, 0));
18441 else if (maybe_ne (bytepos, 0))
18442 loc_list_plus_const (list_ret, bytepos);
18443
18444 have_address = 1;
18445 break;
18446 }
18447
18448 case INTEGER_CST:
18449 if ((want_address || !tree_fits_shwi_p (loc))
18450 && (ret = cst_pool_loc_descr (loc)))
18451 have_address = 1;
18452 else if (want_address == 2
18453 && tree_fits_shwi_p (loc)
18454 && (ret = address_of_int_loc_descriptor
18455 (int_size_in_bytes (TREE_TYPE (loc)),
18456 tree_to_shwi (loc))))
18457 have_address = 1;
18458 else if (tree_fits_shwi_p (loc))
18459 ret = int_loc_descriptor (tree_to_shwi (loc));
18460 else if (tree_fits_uhwi_p (loc))
18461 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18462 else
18463 {
18464 expansion_failed (loc, NULL_RTX,
18465 "Integer operand is not host integer");
18466 return 0;
18467 }
18468 break;
18469
18470 case CONSTRUCTOR:
18471 case REAL_CST:
18472 case STRING_CST:
18473 case COMPLEX_CST:
18474 if ((ret = cst_pool_loc_descr (loc)))
18475 have_address = 1;
18476 else if (TREE_CODE (loc) == CONSTRUCTOR)
18477 {
18478 tree type = TREE_TYPE (loc);
18479 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18480 unsigned HOST_WIDE_INT offset = 0;
18481 unsigned HOST_WIDE_INT cnt;
18482 constructor_elt *ce;
18483
18484 if (TREE_CODE (type) == RECORD_TYPE)
18485 {
18486 /* This is very limited, but it's enough to output
18487 pointers to member functions, as long as the
18488 referenced function is defined in the current
18489 translation unit. */
18490 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18491 {
18492 tree val = ce->value;
18493
18494 tree field = ce->index;
18495
18496 if (val)
18497 STRIP_NOPS (val);
18498
18499 if (!field || DECL_BIT_FIELD (field))
18500 {
18501 expansion_failed (loc, NULL_RTX,
18502 "bitfield in record type constructor");
18503 size = offset = (unsigned HOST_WIDE_INT)-1;
18504 ret = NULL;
18505 break;
18506 }
18507
18508 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18509 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18510 gcc_assert (pos + fieldsize <= size);
18511 if (pos < offset)
18512 {
18513 expansion_failed (loc, NULL_RTX,
18514 "out-of-order fields in record constructor");
18515 size = offset = (unsigned HOST_WIDE_INT)-1;
18516 ret = NULL;
18517 break;
18518 }
18519 if (pos > offset)
18520 {
18521 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18522 add_loc_descr (&ret, ret1);
18523 offset = pos;
18524 }
18525 if (val && fieldsize != 0)
18526 {
18527 ret1 = loc_descriptor_from_tree (val, want_address, context);
18528 if (!ret1)
18529 {
18530 expansion_failed (loc, NULL_RTX,
18531 "unsupported expression in field");
18532 size = offset = (unsigned HOST_WIDE_INT)-1;
18533 ret = NULL;
18534 break;
18535 }
18536 add_loc_descr (&ret, ret1);
18537 }
18538 if (fieldsize)
18539 {
18540 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18541 add_loc_descr (&ret, ret1);
18542 offset = pos + fieldsize;
18543 }
18544 }
18545
18546 if (offset != size)
18547 {
18548 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18549 add_loc_descr (&ret, ret1);
18550 offset = size;
18551 }
18552
18553 have_address = !!want_address;
18554 }
18555 else
18556 expansion_failed (loc, NULL_RTX,
18557 "constructor of non-record type");
18558 }
18559 else
18560 /* We can construct small constants here using int_loc_descriptor. */
18561 expansion_failed (loc, NULL_RTX,
18562 "constructor or constant not in constant pool");
18563 break;
18564
18565 case TRUTH_AND_EXPR:
18566 case TRUTH_ANDIF_EXPR:
18567 case BIT_AND_EXPR:
18568 op = DW_OP_and;
18569 goto do_binop;
18570
18571 case TRUTH_XOR_EXPR:
18572 case BIT_XOR_EXPR:
18573 op = DW_OP_xor;
18574 goto do_binop;
18575
18576 case TRUTH_OR_EXPR:
18577 case TRUTH_ORIF_EXPR:
18578 case BIT_IOR_EXPR:
18579 op = DW_OP_or;
18580 goto do_binop;
18581
18582 case FLOOR_DIV_EXPR:
18583 case CEIL_DIV_EXPR:
18584 case ROUND_DIV_EXPR:
18585 case TRUNC_DIV_EXPR:
18586 case EXACT_DIV_EXPR:
18587 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18588 return 0;
18589 op = DW_OP_div;
18590 goto do_binop;
18591
18592 case MINUS_EXPR:
18593 op = DW_OP_minus;
18594 goto do_binop;
18595
18596 case FLOOR_MOD_EXPR:
18597 case CEIL_MOD_EXPR:
18598 case ROUND_MOD_EXPR:
18599 case TRUNC_MOD_EXPR:
18600 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18601 {
18602 op = DW_OP_mod;
18603 goto do_binop;
18604 }
18605 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18606 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18607 if (list_ret == 0 || list_ret1 == 0)
18608 return 0;
18609
18610 add_loc_list (&list_ret, list_ret1);
18611 if (list_ret == 0)
18612 return 0;
18613 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18614 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18615 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18616 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18617 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18618 break;
18619
18620 case MULT_EXPR:
18621 op = DW_OP_mul;
18622 goto do_binop;
18623
18624 case LSHIFT_EXPR:
18625 op = DW_OP_shl;
18626 goto do_binop;
18627
18628 case RSHIFT_EXPR:
18629 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18630 goto do_binop;
18631
18632 case POINTER_PLUS_EXPR:
18633 case PLUS_EXPR:
18634 do_plus:
18635 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18636 {
18637 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18638 smarter to encode their opposite. The DW_OP_plus_uconst operation
18639 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18640 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18641 bytes, Y being the size of the operation that pushes the opposite
18642 of the addend. So let's choose the smallest representation. */
18643 const tree tree_addend = TREE_OPERAND (loc, 1);
18644 offset_int wi_addend;
18645 HOST_WIDE_INT shwi_addend;
18646 dw_loc_descr_ref loc_naddend;
18647
18648 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18649 if (list_ret == 0)
18650 return 0;
18651
18652 /* Try to get the literal to push. It is the opposite of the addend,
18653 so as we rely on wrapping during DWARF evaluation, first decode
18654 the literal as a "DWARF-sized" signed number. */
18655 wi_addend = wi::to_offset (tree_addend);
18656 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18657 shwi_addend = wi_addend.to_shwi ();
18658 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18659 ? int_loc_descriptor (-shwi_addend)
18660 : NULL;
18661
18662 if (loc_naddend != NULL
18663 && ((unsigned) size_of_uleb128 (shwi_addend)
18664 > size_of_loc_descr (loc_naddend)))
18665 {
18666 add_loc_descr_to_each (list_ret, loc_naddend);
18667 add_loc_descr_to_each (list_ret,
18668 new_loc_descr (DW_OP_minus, 0, 0));
18669 }
18670 else
18671 {
18672 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18673 {
18674 loc_naddend = loc_cur;
18675 loc_cur = loc_cur->dw_loc_next;
18676 ggc_free (loc_naddend);
18677 }
18678 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18679 }
18680 break;
18681 }
18682
18683 op = DW_OP_plus;
18684 goto do_binop;
18685
18686 case LE_EXPR:
18687 op = DW_OP_le;
18688 goto do_comp_binop;
18689
18690 case GE_EXPR:
18691 op = DW_OP_ge;
18692 goto do_comp_binop;
18693
18694 case LT_EXPR:
18695 op = DW_OP_lt;
18696 goto do_comp_binop;
18697
18698 case GT_EXPR:
18699 op = DW_OP_gt;
18700 goto do_comp_binop;
18701
18702 do_comp_binop:
18703 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18704 {
18705 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18706 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18707 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18708 TREE_CODE (loc));
18709 break;
18710 }
18711 else
18712 goto do_binop;
18713
18714 case EQ_EXPR:
18715 op = DW_OP_eq;
18716 goto do_binop;
18717
18718 case NE_EXPR:
18719 op = DW_OP_ne;
18720 goto do_binop;
18721
18722 do_binop:
18723 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18724 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18725 if (list_ret == 0 || list_ret1 == 0)
18726 return 0;
18727
18728 add_loc_list (&list_ret, list_ret1);
18729 if (list_ret == 0)
18730 return 0;
18731 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18732 break;
18733
18734 case TRUTH_NOT_EXPR:
18735 case BIT_NOT_EXPR:
18736 op = DW_OP_not;
18737 goto do_unop;
18738
18739 case ABS_EXPR:
18740 op = DW_OP_abs;
18741 goto do_unop;
18742
18743 case NEGATE_EXPR:
18744 op = DW_OP_neg;
18745 goto do_unop;
18746
18747 do_unop:
18748 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18749 if (list_ret == 0)
18750 return 0;
18751
18752 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18753 break;
18754
18755 case MIN_EXPR:
18756 case MAX_EXPR:
18757 {
18758 const enum tree_code code =
18759 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18760
18761 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18762 build2 (code, integer_type_node,
18763 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18764 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18765 }
18766
18767 /* fall through */
18768
18769 case COND_EXPR:
18770 {
18771 dw_loc_descr_ref lhs
18772 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18773 dw_loc_list_ref rhs
18774 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18775 dw_loc_descr_ref bra_node, jump_node, tmp;
18776
18777 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18778 if (list_ret == 0 || lhs == 0 || rhs == 0)
18779 return 0;
18780
18781 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18782 add_loc_descr_to_each (list_ret, bra_node);
18783
18784 add_loc_list (&list_ret, rhs);
18785 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18786 add_loc_descr_to_each (list_ret, jump_node);
18787
18788 add_loc_descr_to_each (list_ret, lhs);
18789 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18790 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18791
18792 /* ??? Need a node to point the skip at. Use a nop. */
18793 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18794 add_loc_descr_to_each (list_ret, tmp);
18795 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18796 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18797 }
18798 break;
18799
18800 case FIX_TRUNC_EXPR:
18801 return 0;
18802
18803 default:
18804 /* Leave front-end specific codes as simply unknown. This comes
18805 up, for instance, with the C STMT_EXPR. */
18806 if ((unsigned int) TREE_CODE (loc)
18807 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18808 {
18809 expansion_failed (loc, NULL_RTX,
18810 "language specific tree node");
18811 return 0;
18812 }
18813
18814 /* Otherwise this is a generic code; we should just lists all of
18815 these explicitly. We forgot one. */
18816 if (flag_checking)
18817 gcc_unreachable ();
18818
18819 /* In a release build, we want to degrade gracefully: better to
18820 generate incomplete debugging information than to crash. */
18821 return NULL;
18822 }
18823
18824 if (!ret && !list_ret)
18825 return 0;
18826
18827 if (want_address == 2 && !have_address
18828 && (dwarf_version >= 4 || !dwarf_strict))
18829 {
18830 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18831 {
18832 expansion_failed (loc, NULL_RTX,
18833 "DWARF address size mismatch");
18834 return 0;
18835 }
18836 if (ret)
18837 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18838 else
18839 add_loc_descr_to_each (list_ret,
18840 new_loc_descr (DW_OP_stack_value, 0, 0));
18841 have_address = 1;
18842 }
18843 /* Show if we can't fill the request for an address. */
18844 if (want_address && !have_address)
18845 {
18846 expansion_failed (loc, NULL_RTX,
18847 "Want address and only have value");
18848 return 0;
18849 }
18850
18851 gcc_assert (!ret || !list_ret);
18852
18853 /* If we've got an address and don't want one, dereference. */
18854 if (!want_address && have_address)
18855 {
18856 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18857
18858 if (size > DWARF2_ADDR_SIZE || size == -1)
18859 {
18860 expansion_failed (loc, NULL_RTX,
18861 "DWARF address size mismatch");
18862 return 0;
18863 }
18864 else if (size == DWARF2_ADDR_SIZE)
18865 op = DW_OP_deref;
18866 else
18867 op = DW_OP_deref_size;
18868
18869 if (ret)
18870 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18871 else
18872 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18873 }
18874 if (ret)
18875 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18876
18877 return list_ret;
18878 }
18879
18880 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18881 expressions. */
18882
18883 static dw_loc_list_ref
18884 loc_list_from_tree (tree loc, int want_address,
18885 struct loc_descr_context *context)
18886 {
18887 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18888
18889 for (dw_loc_list_ref loc_cur = result;
18890 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18891 loc_descr_without_nops (loc_cur->expr);
18892 return result;
18893 }
18894
18895 /* Same as above but return only single location expression. */
18896 static dw_loc_descr_ref
18897 loc_descriptor_from_tree (tree loc, int want_address,
18898 struct loc_descr_context *context)
18899 {
18900 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18901 if (!ret)
18902 return NULL;
18903 if (ret->dw_loc_next)
18904 {
18905 expansion_failed (loc, NULL_RTX,
18906 "Location list where only loc descriptor needed");
18907 return NULL;
18908 }
18909 return ret->expr;
18910 }
18911
18912 /* Given a value, round it up to the lowest multiple of `boundary'
18913 which is not less than the value itself. */
18914
18915 static inline HOST_WIDE_INT
18916 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18917 {
18918 return (((value + boundary - 1) / boundary) * boundary);
18919 }
18920
18921 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18922 pointer to the declared type for the relevant field variable, or return
18923 `integer_type_node' if the given node turns out to be an
18924 ERROR_MARK node. */
18925
18926 static inline tree
18927 field_type (const_tree decl)
18928 {
18929 tree type;
18930
18931 if (TREE_CODE (decl) == ERROR_MARK)
18932 return integer_type_node;
18933
18934 type = DECL_BIT_FIELD_TYPE (decl);
18935 if (type == NULL_TREE)
18936 type = TREE_TYPE (decl);
18937
18938 return type;
18939 }
18940
18941 /* Given a pointer to a tree node, return the alignment in bits for
18942 it, or else return BITS_PER_WORD if the node actually turns out to
18943 be an ERROR_MARK node. */
18944
18945 static inline unsigned
18946 simple_type_align_in_bits (const_tree type)
18947 {
18948 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18949 }
18950
18951 static inline unsigned
18952 simple_decl_align_in_bits (const_tree decl)
18953 {
18954 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18955 }
18956
18957 /* Return the result of rounding T up to ALIGN. */
18958
18959 static inline offset_int
18960 round_up_to_align (const offset_int &t, unsigned int align)
18961 {
18962 return wi::udiv_trunc (t + align - 1, align) * align;
18963 }
18964
18965 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18966 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18967 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18968 if we fail to return the size in one of these two forms. */
18969
18970 static dw_loc_descr_ref
18971 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18972 {
18973 tree tree_size;
18974 struct loc_descr_context ctx;
18975
18976 /* Return a constant integer in priority, if possible. */
18977 *cst_size = int_size_in_bytes (type);
18978 if (*cst_size != -1)
18979 return NULL;
18980
18981 ctx.context_type = const_cast<tree> (type);
18982 ctx.base_decl = NULL_TREE;
18983 ctx.dpi = NULL;
18984 ctx.placeholder_arg = false;
18985 ctx.placeholder_seen = false;
18986
18987 type = TYPE_MAIN_VARIANT (type);
18988 tree_size = TYPE_SIZE_UNIT (type);
18989 return ((tree_size != NULL_TREE)
18990 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18991 : NULL);
18992 }
18993
18994 /* Helper structure for RECORD_TYPE processing. */
18995 struct vlr_context
18996 {
18997 /* Root RECORD_TYPE. It is needed to generate data member location
18998 descriptions in variable-length records (VLR), but also to cope with
18999 variants, which are composed of nested structures multiplexed with
19000 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19001 function processing a FIELD_DECL, it is required to be non null. */
19002 tree struct_type;
19003 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19004 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19005 this variant part as part of the root record (in storage units). For
19006 regular records, it must be NULL_TREE. */
19007 tree variant_part_offset;
19008 };
19009
19010 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19011 addressed byte of the "containing object" for the given FIELD_DECL. If
19012 possible, return a native constant through CST_OFFSET (in which case NULL is
19013 returned); otherwise return a DWARF expression that computes the offset.
19014
19015 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19016 that offset is, either because the argument turns out to be a pointer to an
19017 ERROR_MARK node, or because the offset expression is too complex for us.
19018
19019 CTX is required: see the comment for VLR_CONTEXT. */
19020
19021 static dw_loc_descr_ref
19022 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19023 HOST_WIDE_INT *cst_offset)
19024 {
19025 tree tree_result;
19026 dw_loc_list_ref loc_result;
19027
19028 *cst_offset = 0;
19029
19030 if (TREE_CODE (decl) == ERROR_MARK)
19031 return NULL;
19032 else
19033 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19034
19035 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19036 case. */
19037 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19038 return NULL;
19039
19040 #ifdef PCC_BITFIELD_TYPE_MATTERS
19041 /* We used to handle only constant offsets in all cases. Now, we handle
19042 properly dynamic byte offsets only when PCC bitfield type doesn't
19043 matter. */
19044 if (PCC_BITFIELD_TYPE_MATTERS
19045 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19046 {
19047 offset_int object_offset_in_bits;
19048 offset_int object_offset_in_bytes;
19049 offset_int bitpos_int;
19050 tree type;
19051 tree field_size_tree;
19052 offset_int deepest_bitpos;
19053 offset_int field_size_in_bits;
19054 unsigned int type_align_in_bits;
19055 unsigned int decl_align_in_bits;
19056 offset_int type_size_in_bits;
19057
19058 bitpos_int = wi::to_offset (bit_position (decl));
19059 type = field_type (decl);
19060 type_size_in_bits = offset_int_type_size_in_bits (type);
19061 type_align_in_bits = simple_type_align_in_bits (type);
19062
19063 field_size_tree = DECL_SIZE (decl);
19064
19065 /* The size could be unspecified if there was an error, or for
19066 a flexible array member. */
19067 if (!field_size_tree)
19068 field_size_tree = bitsize_zero_node;
19069
19070 /* If the size of the field is not constant, use the type size. */
19071 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19072 field_size_in_bits = wi::to_offset (field_size_tree);
19073 else
19074 field_size_in_bits = type_size_in_bits;
19075
19076 decl_align_in_bits = simple_decl_align_in_bits (decl);
19077
19078 /* The GCC front-end doesn't make any attempt to keep track of the
19079 starting bit offset (relative to the start of the containing
19080 structure type) of the hypothetical "containing object" for a
19081 bit-field. Thus, when computing the byte offset value for the
19082 start of the "containing object" of a bit-field, we must deduce
19083 this information on our own. This can be rather tricky to do in
19084 some cases. For example, handling the following structure type
19085 definition when compiling for an i386/i486 target (which only
19086 aligns long long's to 32-bit boundaries) can be very tricky:
19087
19088 struct S { int field1; long long field2:31; };
19089
19090 Fortunately, there is a simple rule-of-thumb which can be used
19091 in such cases. When compiling for an i386/i486, GCC will
19092 allocate 8 bytes for the structure shown above. It decides to
19093 do this based upon one simple rule for bit-field allocation.
19094 GCC allocates each "containing object" for each bit-field at
19095 the first (i.e. lowest addressed) legitimate alignment boundary
19096 (based upon the required minimum alignment for the declared
19097 type of the field) which it can possibly use, subject to the
19098 condition that there is still enough available space remaining
19099 in the containing object (when allocated at the selected point)
19100 to fully accommodate all of the bits of the bit-field itself.
19101
19102 This simple rule makes it obvious why GCC allocates 8 bytes for
19103 each object of the structure type shown above. When looking
19104 for a place to allocate the "containing object" for `field2',
19105 the compiler simply tries to allocate a 64-bit "containing
19106 object" at each successive 32-bit boundary (starting at zero)
19107 until it finds a place to allocate that 64- bit field such that
19108 at least 31 contiguous (and previously unallocated) bits remain
19109 within that selected 64 bit field. (As it turns out, for the
19110 example above, the compiler finds it is OK to allocate the
19111 "containing object" 64-bit field at bit-offset zero within the
19112 structure type.)
19113
19114 Here we attempt to work backwards from the limited set of facts
19115 we're given, and we try to deduce from those facts, where GCC
19116 must have believed that the containing object started (within
19117 the structure type). The value we deduce is then used (by the
19118 callers of this routine) to generate DW_AT_location and
19119 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19120 the case of DW_AT_location, regular fields as well). */
19121
19122 /* Figure out the bit-distance from the start of the structure to
19123 the "deepest" bit of the bit-field. */
19124 deepest_bitpos = bitpos_int + field_size_in_bits;
19125
19126 /* This is the tricky part. Use some fancy footwork to deduce
19127 where the lowest addressed bit of the containing object must
19128 be. */
19129 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19130
19131 /* Round up to type_align by default. This works best for
19132 bitfields. */
19133 object_offset_in_bits
19134 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19135
19136 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19137 {
19138 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19139
19140 /* Round up to decl_align instead. */
19141 object_offset_in_bits
19142 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19143 }
19144
19145 object_offset_in_bytes
19146 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19147 if (ctx->variant_part_offset == NULL_TREE)
19148 {
19149 *cst_offset = object_offset_in_bytes.to_shwi ();
19150 return NULL;
19151 }
19152 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19153 }
19154 else
19155 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19156 tree_result = byte_position (decl);
19157
19158 if (ctx->variant_part_offset != NULL_TREE)
19159 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19160 ctx->variant_part_offset, tree_result);
19161
19162 /* If the byte offset is a constant, it's simplier to handle a native
19163 constant rather than a DWARF expression. */
19164 if (TREE_CODE (tree_result) == INTEGER_CST)
19165 {
19166 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19167 return NULL;
19168 }
19169 struct loc_descr_context loc_ctx = {
19170 ctx->struct_type, /* context_type */
19171 NULL_TREE, /* base_decl */
19172 NULL, /* dpi */
19173 false, /* placeholder_arg */
19174 false /* placeholder_seen */
19175 };
19176 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19177
19178 /* We want a DWARF expression: abort if we only have a location list with
19179 multiple elements. */
19180 if (!loc_result || !single_element_loc_list_p (loc_result))
19181 return NULL;
19182 else
19183 return loc_result->expr;
19184 }
19185 \f
19186 /* The following routines define various Dwarf attributes and any data
19187 associated with them. */
19188
19189 /* Add a location description attribute value to a DIE.
19190
19191 This emits location attributes suitable for whole variables and
19192 whole parameters. Note that the location attributes for struct fields are
19193 generated by the routine `data_member_location_attribute' below. */
19194
19195 static inline void
19196 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19197 dw_loc_list_ref descr)
19198 {
19199 bool check_no_locviews = true;
19200 if (descr == 0)
19201 return;
19202 if (single_element_loc_list_p (descr))
19203 add_AT_loc (die, attr_kind, descr->expr);
19204 else
19205 {
19206 add_AT_loc_list (die, attr_kind, descr);
19207 gcc_assert (descr->ll_symbol);
19208 if (attr_kind == DW_AT_location && descr->vl_symbol
19209 && dwarf2out_locviews_in_attribute ())
19210 {
19211 add_AT_view_list (die, DW_AT_GNU_locviews);
19212 check_no_locviews = false;
19213 }
19214 }
19215
19216 if (check_no_locviews)
19217 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19218 }
19219
19220 /* Add DW_AT_accessibility attribute to DIE if needed. */
19221
19222 static void
19223 add_accessibility_attribute (dw_die_ref die, tree decl)
19224 {
19225 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19226 children, otherwise the default is DW_ACCESS_public. In DWARF2
19227 the default has always been DW_ACCESS_public. */
19228 if (TREE_PROTECTED (decl))
19229 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19230 else if (TREE_PRIVATE (decl))
19231 {
19232 if (dwarf_version == 2
19233 || die->die_parent == NULL
19234 || die->die_parent->die_tag != DW_TAG_class_type)
19235 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19236 }
19237 else if (dwarf_version > 2
19238 && die->die_parent
19239 && die->die_parent->die_tag == DW_TAG_class_type)
19240 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19241 }
19242
19243 /* Attach the specialized form of location attribute used for data members of
19244 struct and union types. In the special case of a FIELD_DECL node which
19245 represents a bit-field, the "offset" part of this special location
19246 descriptor must indicate the distance in bytes from the lowest-addressed
19247 byte of the containing struct or union type to the lowest-addressed byte of
19248 the "containing object" for the bit-field. (See the `field_byte_offset'
19249 function above).
19250
19251 For any given bit-field, the "containing object" is a hypothetical object
19252 (of some integral or enum type) within which the given bit-field lives. The
19253 type of this hypothetical "containing object" is always the same as the
19254 declared type of the individual bit-field itself (for GCC anyway... the
19255 DWARF spec doesn't actually mandate this). Note that it is the size (in
19256 bytes) of the hypothetical "containing object" which will be given in the
19257 DW_AT_byte_size attribute for this bit-field. (See the
19258 `byte_size_attribute' function below.) It is also used when calculating the
19259 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19260 function below.)
19261
19262 CTX is required: see the comment for VLR_CONTEXT. */
19263
19264 static void
19265 add_data_member_location_attribute (dw_die_ref die,
19266 tree decl,
19267 struct vlr_context *ctx)
19268 {
19269 HOST_WIDE_INT offset;
19270 dw_loc_descr_ref loc_descr = 0;
19271
19272 if (TREE_CODE (decl) == TREE_BINFO)
19273 {
19274 /* We're working on the TAG_inheritance for a base class. */
19275 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19276 {
19277 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19278 aren't at a fixed offset from all (sub)objects of the same
19279 type. We need to extract the appropriate offset from our
19280 vtable. The following dwarf expression means
19281
19282 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19283
19284 This is specific to the V3 ABI, of course. */
19285
19286 dw_loc_descr_ref tmp;
19287
19288 /* Make a copy of the object address. */
19289 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19290 add_loc_descr (&loc_descr, tmp);
19291
19292 /* Extract the vtable address. */
19293 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19294 add_loc_descr (&loc_descr, tmp);
19295
19296 /* Calculate the address of the offset. */
19297 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19298 gcc_assert (offset < 0);
19299
19300 tmp = int_loc_descriptor (-offset);
19301 add_loc_descr (&loc_descr, tmp);
19302 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19303 add_loc_descr (&loc_descr, tmp);
19304
19305 /* Extract the offset. */
19306 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19307 add_loc_descr (&loc_descr, tmp);
19308
19309 /* Add it to the object address. */
19310 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19311 add_loc_descr (&loc_descr, tmp);
19312 }
19313 else
19314 offset = tree_to_shwi (BINFO_OFFSET (decl));
19315 }
19316 else
19317 {
19318 loc_descr = field_byte_offset (decl, ctx, &offset);
19319
19320 /* If loc_descr is available then we know the field offset is dynamic.
19321 However, GDB does not handle dynamic field offsets very well at the
19322 moment. */
19323 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19324 {
19325 loc_descr = NULL;
19326 offset = 0;
19327 }
19328
19329 /* Data member location evalutation starts with the base address on the
19330 stack. Compute the field offset and add it to this base address. */
19331 else if (loc_descr != NULL)
19332 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19333 }
19334
19335 if (! loc_descr)
19336 {
19337 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19338 e.g. GDB only added support to it in November 2016. For DWARF5
19339 we need newer debug info consumers anyway. We might change this
19340 to dwarf_version >= 4 once most consumers catched up. */
19341 if (dwarf_version >= 5
19342 && TREE_CODE (decl) == FIELD_DECL
19343 && DECL_BIT_FIELD_TYPE (decl))
19344 {
19345 tree off = bit_position (decl);
19346 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19347 {
19348 remove_AT (die, DW_AT_byte_size);
19349 remove_AT (die, DW_AT_bit_offset);
19350 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19351 return;
19352 }
19353 }
19354 if (dwarf_version > 2)
19355 {
19356 /* Don't need to output a location expression, just the constant. */
19357 if (offset < 0)
19358 add_AT_int (die, DW_AT_data_member_location, offset);
19359 else
19360 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19361 return;
19362 }
19363 else
19364 {
19365 enum dwarf_location_atom op;
19366
19367 /* The DWARF2 standard says that we should assume that the structure
19368 address is already on the stack, so we can specify a structure
19369 field address by using DW_OP_plus_uconst. */
19370 op = DW_OP_plus_uconst;
19371 loc_descr = new_loc_descr (op, offset, 0);
19372 }
19373 }
19374
19375 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19376 }
19377
19378 /* Writes integer values to dw_vec_const array. */
19379
19380 static void
19381 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19382 {
19383 while (size != 0)
19384 {
19385 *dest++ = val & 0xff;
19386 val >>= 8;
19387 --size;
19388 }
19389 }
19390
19391 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19392
19393 static HOST_WIDE_INT
19394 extract_int (const unsigned char *src, unsigned int size)
19395 {
19396 HOST_WIDE_INT val = 0;
19397
19398 src += size;
19399 while (size != 0)
19400 {
19401 val <<= 8;
19402 val |= *--src & 0xff;
19403 --size;
19404 }
19405 return val;
19406 }
19407
19408 /* Writes wide_int values to dw_vec_const array. */
19409
19410 static void
19411 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19412 {
19413 int i;
19414
19415 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19416 {
19417 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19418 return;
19419 }
19420
19421 /* We'd have to extend this code to support odd sizes. */
19422 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19423
19424 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19425
19426 if (WORDS_BIG_ENDIAN)
19427 for (i = n - 1; i >= 0; i--)
19428 {
19429 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19430 dest += sizeof (HOST_WIDE_INT);
19431 }
19432 else
19433 for (i = 0; i < n; i++)
19434 {
19435 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19436 dest += sizeof (HOST_WIDE_INT);
19437 }
19438 }
19439
19440 /* Writes floating point values to dw_vec_const array. */
19441
19442 static void
19443 insert_float (const_rtx rtl, unsigned char *array)
19444 {
19445 long val[4];
19446 int i;
19447 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19448
19449 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19450
19451 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19452 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19453 {
19454 insert_int (val[i], 4, array);
19455 array += 4;
19456 }
19457 }
19458
19459 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19460 does not have a "location" either in memory or in a register. These
19461 things can arise in GNU C when a constant is passed as an actual parameter
19462 to an inlined function. They can also arise in C++ where declared
19463 constants do not necessarily get memory "homes". */
19464
19465 static bool
19466 add_const_value_attribute (dw_die_ref die, rtx rtl)
19467 {
19468 switch (GET_CODE (rtl))
19469 {
19470 case CONST_INT:
19471 {
19472 HOST_WIDE_INT val = INTVAL (rtl);
19473
19474 if (val < 0)
19475 add_AT_int (die, DW_AT_const_value, val);
19476 else
19477 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19478 }
19479 return true;
19480
19481 case CONST_WIDE_INT:
19482 {
19483 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19484 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19485 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19486 wide_int w = wi::zext (w1, prec);
19487 add_AT_wide (die, DW_AT_const_value, w);
19488 }
19489 return true;
19490
19491 case CONST_DOUBLE:
19492 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19493 floating-point constant. A CONST_DOUBLE is used whenever the
19494 constant requires more than one word in order to be adequately
19495 represented. */
19496 if (TARGET_SUPPORTS_WIDE_INT == 0
19497 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19498 add_AT_double (die, DW_AT_const_value,
19499 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19500 else
19501 {
19502 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19503 unsigned int length = GET_MODE_SIZE (mode);
19504 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19505
19506 insert_float (rtl, array);
19507 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19508 }
19509 return true;
19510
19511 case CONST_VECTOR:
19512 {
19513 unsigned int length;
19514 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19515 return false;
19516
19517 machine_mode mode = GET_MODE (rtl);
19518 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19519 unsigned char *array
19520 = ggc_vec_alloc<unsigned char> (length * elt_size);
19521 unsigned int i;
19522 unsigned char *p;
19523 machine_mode imode = GET_MODE_INNER (mode);
19524
19525 switch (GET_MODE_CLASS (mode))
19526 {
19527 case MODE_VECTOR_INT:
19528 for (i = 0, p = array; i < length; i++, p += elt_size)
19529 {
19530 rtx elt = CONST_VECTOR_ELT (rtl, i);
19531 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19532 }
19533 break;
19534
19535 case MODE_VECTOR_FLOAT:
19536 for (i = 0, p = array; i < length; i++, p += elt_size)
19537 {
19538 rtx elt = CONST_VECTOR_ELT (rtl, i);
19539 insert_float (elt, p);
19540 }
19541 break;
19542
19543 default:
19544 gcc_unreachable ();
19545 }
19546
19547 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19548 }
19549 return true;
19550
19551 case CONST_STRING:
19552 if (dwarf_version >= 4 || !dwarf_strict)
19553 {
19554 dw_loc_descr_ref loc_result;
19555 resolve_one_addr (&rtl);
19556 rtl_addr:
19557 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19558 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19559 add_AT_loc (die, DW_AT_location, loc_result);
19560 vec_safe_push (used_rtx_array, rtl);
19561 return true;
19562 }
19563 return false;
19564
19565 case CONST:
19566 if (CONSTANT_P (XEXP (rtl, 0)))
19567 return add_const_value_attribute (die, XEXP (rtl, 0));
19568 /* FALLTHROUGH */
19569 case SYMBOL_REF:
19570 if (!const_ok_for_output (rtl))
19571 return false;
19572 /* FALLTHROUGH */
19573 case LABEL_REF:
19574 if (dwarf_version >= 4 || !dwarf_strict)
19575 goto rtl_addr;
19576 return false;
19577
19578 case PLUS:
19579 /* In cases where an inlined instance of an inline function is passed
19580 the address of an `auto' variable (which is local to the caller) we
19581 can get a situation where the DECL_RTL of the artificial local
19582 variable (for the inlining) which acts as a stand-in for the
19583 corresponding formal parameter (of the inline function) will look
19584 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19585 exactly a compile-time constant expression, but it isn't the address
19586 of the (artificial) local variable either. Rather, it represents the
19587 *value* which the artificial local variable always has during its
19588 lifetime. We currently have no way to represent such quasi-constant
19589 values in Dwarf, so for now we just punt and generate nothing. */
19590 return false;
19591
19592 case HIGH:
19593 case CONST_FIXED:
19594 return false;
19595
19596 case MEM:
19597 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19598 && MEM_READONLY_P (rtl)
19599 && GET_MODE (rtl) == BLKmode)
19600 {
19601 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19602 return true;
19603 }
19604 return false;
19605
19606 default:
19607 /* No other kinds of rtx should be possible here. */
19608 gcc_unreachable ();
19609 }
19610 return false;
19611 }
19612
19613 /* Determine whether the evaluation of EXPR references any variables
19614 or functions which aren't otherwise used (and therefore may not be
19615 output). */
19616 static tree
19617 reference_to_unused (tree * tp, int * walk_subtrees,
19618 void * data ATTRIBUTE_UNUSED)
19619 {
19620 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19621 *walk_subtrees = 0;
19622
19623 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19624 && ! TREE_ASM_WRITTEN (*tp))
19625 return *tp;
19626 /* ??? The C++ FE emits debug information for using decls, so
19627 putting gcc_unreachable here falls over. See PR31899. For now
19628 be conservative. */
19629 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19630 return *tp;
19631 else if (VAR_P (*tp))
19632 {
19633 varpool_node *node = varpool_node::get (*tp);
19634 if (!node || !node->definition)
19635 return *tp;
19636 }
19637 else if (TREE_CODE (*tp) == FUNCTION_DECL
19638 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19639 {
19640 /* The call graph machinery must have finished analyzing,
19641 optimizing and gimplifying the CU by now.
19642 So if *TP has no call graph node associated
19643 to it, it means *TP will not be emitted. */
19644 if (!cgraph_node::get (*tp))
19645 return *tp;
19646 }
19647 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19648 return *tp;
19649
19650 return NULL_TREE;
19651 }
19652
19653 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19654 for use in a later add_const_value_attribute call. */
19655
19656 static rtx
19657 rtl_for_decl_init (tree init, tree type)
19658 {
19659 rtx rtl = NULL_RTX;
19660
19661 STRIP_NOPS (init);
19662
19663 /* If a variable is initialized with a string constant without embedded
19664 zeros, build CONST_STRING. */
19665 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19666 {
19667 tree enttype = TREE_TYPE (type);
19668 tree domain = TYPE_DOMAIN (type);
19669 scalar_int_mode mode;
19670
19671 if (is_int_mode (TYPE_MODE (enttype), &mode)
19672 && GET_MODE_SIZE (mode) == 1
19673 && domain
19674 && TYPE_MAX_VALUE (domain)
19675 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19676 && integer_zerop (TYPE_MIN_VALUE (domain))
19677 && compare_tree_int (TYPE_MAX_VALUE (domain),
19678 TREE_STRING_LENGTH (init) - 1) == 0
19679 && ((size_t) TREE_STRING_LENGTH (init)
19680 == strlen (TREE_STRING_POINTER (init)) + 1))
19681 {
19682 rtl = gen_rtx_CONST_STRING (VOIDmode,
19683 ggc_strdup (TREE_STRING_POINTER (init)));
19684 rtl = gen_rtx_MEM (BLKmode, rtl);
19685 MEM_READONLY_P (rtl) = 1;
19686 }
19687 }
19688 /* Other aggregates, and complex values, could be represented using
19689 CONCAT: FIXME! */
19690 else if (AGGREGATE_TYPE_P (type)
19691 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19692 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19693 || TREE_CODE (type) == COMPLEX_TYPE)
19694 ;
19695 /* Vectors only work if their mode is supported by the target.
19696 FIXME: generic vectors ought to work too. */
19697 else if (TREE_CODE (type) == VECTOR_TYPE
19698 && !VECTOR_MODE_P (TYPE_MODE (type)))
19699 ;
19700 /* If the initializer is something that we know will expand into an
19701 immediate RTL constant, expand it now. We must be careful not to
19702 reference variables which won't be output. */
19703 else if (initializer_constant_valid_p (init, type)
19704 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19705 {
19706 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19707 possible. */
19708 if (TREE_CODE (type) == VECTOR_TYPE)
19709 switch (TREE_CODE (init))
19710 {
19711 case VECTOR_CST:
19712 break;
19713 case CONSTRUCTOR:
19714 if (TREE_CONSTANT (init))
19715 {
19716 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19717 bool constant_p = true;
19718 tree value;
19719 unsigned HOST_WIDE_INT ix;
19720
19721 /* Even when ctor is constant, it might contain non-*_CST
19722 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19723 belong into VECTOR_CST nodes. */
19724 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19725 if (!CONSTANT_CLASS_P (value))
19726 {
19727 constant_p = false;
19728 break;
19729 }
19730
19731 if (constant_p)
19732 {
19733 init = build_vector_from_ctor (type, elts);
19734 break;
19735 }
19736 }
19737 /* FALLTHRU */
19738
19739 default:
19740 return NULL;
19741 }
19742
19743 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19744
19745 /* If expand_expr returns a MEM, it wasn't immediate. */
19746 gcc_assert (!rtl || !MEM_P (rtl));
19747 }
19748
19749 return rtl;
19750 }
19751
19752 /* Generate RTL for the variable DECL to represent its location. */
19753
19754 static rtx
19755 rtl_for_decl_location (tree decl)
19756 {
19757 rtx rtl;
19758
19759 /* Here we have to decide where we are going to say the parameter "lives"
19760 (as far as the debugger is concerned). We only have a couple of
19761 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19762
19763 DECL_RTL normally indicates where the parameter lives during most of the
19764 activation of the function. If optimization is enabled however, this
19765 could be either NULL or else a pseudo-reg. Both of those cases indicate
19766 that the parameter doesn't really live anywhere (as far as the code
19767 generation parts of GCC are concerned) during most of the function's
19768 activation. That will happen (for example) if the parameter is never
19769 referenced within the function.
19770
19771 We could just generate a location descriptor here for all non-NULL
19772 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19773 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19774 where DECL_RTL is NULL or is a pseudo-reg.
19775
19776 Note however that we can only get away with using DECL_INCOMING_RTL as
19777 a backup substitute for DECL_RTL in certain limited cases. In cases
19778 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19779 we can be sure that the parameter was passed using the same type as it is
19780 declared to have within the function, and that its DECL_INCOMING_RTL
19781 points us to a place where a value of that type is passed.
19782
19783 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19784 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19785 because in these cases DECL_INCOMING_RTL points us to a value of some
19786 type which is *different* from the type of the parameter itself. Thus,
19787 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19788 such cases, the debugger would end up (for example) trying to fetch a
19789 `float' from a place which actually contains the first part of a
19790 `double'. That would lead to really incorrect and confusing
19791 output at debug-time.
19792
19793 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19794 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19795 are a couple of exceptions however. On little-endian machines we can
19796 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19797 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19798 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19799 when (on a little-endian machine) a non-prototyped function has a
19800 parameter declared to be of type `short' or `char'. In such cases,
19801 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19802 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19803 passed `int' value. If the debugger then uses that address to fetch
19804 a `short' or a `char' (on a little-endian machine) the result will be
19805 the correct data, so we allow for such exceptional cases below.
19806
19807 Note that our goal here is to describe the place where the given formal
19808 parameter lives during most of the function's activation (i.e. between the
19809 end of the prologue and the start of the epilogue). We'll do that as best
19810 as we can. Note however that if the given formal parameter is modified
19811 sometime during the execution of the function, then a stack backtrace (at
19812 debug-time) will show the function as having been called with the *new*
19813 value rather than the value which was originally passed in. This happens
19814 rarely enough that it is not a major problem, but it *is* a problem, and
19815 I'd like to fix it.
19816
19817 A future version of dwarf2out.c may generate two additional attributes for
19818 any given DW_TAG_formal_parameter DIE which will describe the "passed
19819 type" and the "passed location" for the given formal parameter in addition
19820 to the attributes we now generate to indicate the "declared type" and the
19821 "active location" for each parameter. This additional set of attributes
19822 could be used by debuggers for stack backtraces. Separately, note that
19823 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19824 This happens (for example) for inlined-instances of inline function formal
19825 parameters which are never referenced. This really shouldn't be
19826 happening. All PARM_DECL nodes should get valid non-NULL
19827 DECL_INCOMING_RTL values. FIXME. */
19828
19829 /* Use DECL_RTL as the "location" unless we find something better. */
19830 rtl = DECL_RTL_IF_SET (decl);
19831
19832 /* When generating abstract instances, ignore everything except
19833 constants, symbols living in memory, and symbols living in
19834 fixed registers. */
19835 if (! reload_completed)
19836 {
19837 if (rtl
19838 && (CONSTANT_P (rtl)
19839 || (MEM_P (rtl)
19840 && CONSTANT_P (XEXP (rtl, 0)))
19841 || (REG_P (rtl)
19842 && VAR_P (decl)
19843 && TREE_STATIC (decl))))
19844 {
19845 rtl = targetm.delegitimize_address (rtl);
19846 return rtl;
19847 }
19848 rtl = NULL_RTX;
19849 }
19850 else if (TREE_CODE (decl) == PARM_DECL)
19851 {
19852 if (rtl == NULL_RTX
19853 || is_pseudo_reg (rtl)
19854 || (MEM_P (rtl)
19855 && is_pseudo_reg (XEXP (rtl, 0))
19856 && DECL_INCOMING_RTL (decl)
19857 && MEM_P (DECL_INCOMING_RTL (decl))
19858 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19859 {
19860 tree declared_type = TREE_TYPE (decl);
19861 tree passed_type = DECL_ARG_TYPE (decl);
19862 machine_mode dmode = TYPE_MODE (declared_type);
19863 machine_mode pmode = TYPE_MODE (passed_type);
19864
19865 /* This decl represents a formal parameter which was optimized out.
19866 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19867 all cases where (rtl == NULL_RTX) just below. */
19868 if (dmode == pmode)
19869 rtl = DECL_INCOMING_RTL (decl);
19870 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19871 && SCALAR_INT_MODE_P (dmode)
19872 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19873 && DECL_INCOMING_RTL (decl))
19874 {
19875 rtx inc = DECL_INCOMING_RTL (decl);
19876 if (REG_P (inc))
19877 rtl = inc;
19878 else if (MEM_P (inc))
19879 {
19880 if (BYTES_BIG_ENDIAN)
19881 rtl = adjust_address_nv (inc, dmode,
19882 GET_MODE_SIZE (pmode)
19883 - GET_MODE_SIZE (dmode));
19884 else
19885 rtl = inc;
19886 }
19887 }
19888 }
19889
19890 /* If the parm was passed in registers, but lives on the stack, then
19891 make a big endian correction if the mode of the type of the
19892 parameter is not the same as the mode of the rtl. */
19893 /* ??? This is the same series of checks that are made in dbxout.c before
19894 we reach the big endian correction code there. It isn't clear if all
19895 of these checks are necessary here, but keeping them all is the safe
19896 thing to do. */
19897 else if (MEM_P (rtl)
19898 && XEXP (rtl, 0) != const0_rtx
19899 && ! CONSTANT_P (XEXP (rtl, 0))
19900 /* Not passed in memory. */
19901 && !MEM_P (DECL_INCOMING_RTL (decl))
19902 /* Not passed by invisible reference. */
19903 && (!REG_P (XEXP (rtl, 0))
19904 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19905 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19906 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19907 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19908 #endif
19909 )
19910 /* Big endian correction check. */
19911 && BYTES_BIG_ENDIAN
19912 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19913 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19914 UNITS_PER_WORD))
19915 {
19916 machine_mode addr_mode = get_address_mode (rtl);
19917 poly_int64 offset = (UNITS_PER_WORD
19918 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19919
19920 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19921 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19922 }
19923 }
19924 else if (VAR_P (decl)
19925 && rtl
19926 && MEM_P (rtl)
19927 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19928 {
19929 machine_mode addr_mode = get_address_mode (rtl);
19930 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19931 GET_MODE (rtl));
19932
19933 /* If a variable is declared "register" yet is smaller than
19934 a register, then if we store the variable to memory, it
19935 looks like we're storing a register-sized value, when in
19936 fact we are not. We need to adjust the offset of the
19937 storage location to reflect the actual value's bytes,
19938 else gdb will not be able to display it. */
19939 if (maybe_ne (offset, 0))
19940 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19941 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19942 }
19943
19944 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19945 and will have been substituted directly into all expressions that use it.
19946 C does not have such a concept, but C++ and other languages do. */
19947 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19948 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19949
19950 if (rtl)
19951 rtl = targetm.delegitimize_address (rtl);
19952
19953 /* If we don't look past the constant pool, we risk emitting a
19954 reference to a constant pool entry that isn't referenced from
19955 code, and thus is not emitted. */
19956 if (rtl)
19957 rtl = avoid_constant_pool_reference (rtl);
19958
19959 /* Try harder to get a rtl. If this symbol ends up not being emitted
19960 in the current CU, resolve_addr will remove the expression referencing
19961 it. */
19962 if (rtl == NULL_RTX
19963 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19964 && VAR_P (decl)
19965 && !DECL_EXTERNAL (decl)
19966 && TREE_STATIC (decl)
19967 && DECL_NAME (decl)
19968 && !DECL_HARD_REGISTER (decl)
19969 && DECL_MODE (decl) != VOIDmode)
19970 {
19971 rtl = make_decl_rtl_for_debug (decl);
19972 if (!MEM_P (rtl)
19973 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19974 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19975 rtl = NULL_RTX;
19976 }
19977
19978 return rtl;
19979 }
19980
19981 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19982 returned. If so, the decl for the COMMON block is returned, and the
19983 value is the offset into the common block for the symbol. */
19984
19985 static tree
19986 fortran_common (tree decl, HOST_WIDE_INT *value)
19987 {
19988 tree val_expr, cvar;
19989 machine_mode mode;
19990 poly_int64 bitsize, bitpos;
19991 tree offset;
19992 HOST_WIDE_INT cbitpos;
19993 int unsignedp, reversep, volatilep = 0;
19994
19995 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19996 it does not have a value (the offset into the common area), or if it
19997 is thread local (as opposed to global) then it isn't common, and shouldn't
19998 be handled as such. */
19999 if (!VAR_P (decl)
20000 || !TREE_STATIC (decl)
20001 || !DECL_HAS_VALUE_EXPR_P (decl)
20002 || !is_fortran ())
20003 return NULL_TREE;
20004
20005 val_expr = DECL_VALUE_EXPR (decl);
20006 if (TREE_CODE (val_expr) != COMPONENT_REF)
20007 return NULL_TREE;
20008
20009 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20010 &unsignedp, &reversep, &volatilep);
20011
20012 if (cvar == NULL_TREE
20013 || !VAR_P (cvar)
20014 || DECL_ARTIFICIAL (cvar)
20015 || !TREE_PUBLIC (cvar)
20016 /* We don't expect to have to cope with variable offsets,
20017 since at present all static data must have a constant size. */
20018 || !bitpos.is_constant (&cbitpos))
20019 return NULL_TREE;
20020
20021 *value = 0;
20022 if (offset != NULL)
20023 {
20024 if (!tree_fits_shwi_p (offset))
20025 return NULL_TREE;
20026 *value = tree_to_shwi (offset);
20027 }
20028 if (cbitpos != 0)
20029 *value += cbitpos / BITS_PER_UNIT;
20030
20031 return cvar;
20032 }
20033
20034 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20035 data attribute for a variable or a parameter. We generate the
20036 DW_AT_const_value attribute only in those cases where the given variable
20037 or parameter does not have a true "location" either in memory or in a
20038 register. This can happen (for example) when a constant is passed as an
20039 actual argument in a call to an inline function. (It's possible that
20040 these things can crop up in other ways also.) Note that one type of
20041 constant value which can be passed into an inlined function is a constant
20042 pointer. This can happen for example if an actual argument in an inlined
20043 function call evaluates to a compile-time constant address.
20044
20045 CACHE_P is true if it is worth caching the location list for DECL,
20046 so that future calls can reuse it rather than regenerate it from scratch.
20047 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20048 since we will need to refer to them each time the function is inlined. */
20049
20050 static bool
20051 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20052 {
20053 rtx rtl;
20054 dw_loc_list_ref list;
20055 var_loc_list *loc_list;
20056 cached_dw_loc_list *cache;
20057
20058 if (early_dwarf)
20059 return false;
20060
20061 if (TREE_CODE (decl) == ERROR_MARK)
20062 return false;
20063
20064 if (get_AT (die, DW_AT_location)
20065 || get_AT (die, DW_AT_const_value))
20066 return true;
20067
20068 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20069 || TREE_CODE (decl) == RESULT_DECL);
20070
20071 /* Try to get some constant RTL for this decl, and use that as the value of
20072 the location. */
20073
20074 rtl = rtl_for_decl_location (decl);
20075 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20076 && add_const_value_attribute (die, rtl))
20077 return true;
20078
20079 /* See if we have single element location list that is equivalent to
20080 a constant value. That way we are better to use add_const_value_attribute
20081 rather than expanding constant value equivalent. */
20082 loc_list = lookup_decl_loc (decl);
20083 if (loc_list
20084 && loc_list->first
20085 && loc_list->first->next == NULL
20086 && NOTE_P (loc_list->first->loc)
20087 && NOTE_VAR_LOCATION (loc_list->first->loc)
20088 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20089 {
20090 struct var_loc_node *node;
20091
20092 node = loc_list->first;
20093 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20094 if (GET_CODE (rtl) == EXPR_LIST)
20095 rtl = XEXP (rtl, 0);
20096 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20097 && add_const_value_attribute (die, rtl))
20098 return true;
20099 }
20100 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20101 list several times. See if we've already cached the contents. */
20102 list = NULL;
20103 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20104 cache_p = false;
20105 if (cache_p)
20106 {
20107 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20108 if (cache)
20109 list = cache->loc_list;
20110 }
20111 if (list == NULL)
20112 {
20113 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20114 NULL);
20115 /* It is usually worth caching this result if the decl is from
20116 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20117 if (cache_p && list && list->dw_loc_next)
20118 {
20119 cached_dw_loc_list **slot
20120 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20121 DECL_UID (decl),
20122 INSERT);
20123 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20124 cache->decl_id = DECL_UID (decl);
20125 cache->loc_list = list;
20126 *slot = cache;
20127 }
20128 }
20129 if (list)
20130 {
20131 add_AT_location_description (die, DW_AT_location, list);
20132 return true;
20133 }
20134 /* None of that worked, so it must not really have a location;
20135 try adding a constant value attribute from the DECL_INITIAL. */
20136 return tree_add_const_value_attribute_for_decl (die, decl);
20137 }
20138
20139 /* Helper function for tree_add_const_value_attribute. Natively encode
20140 initializer INIT into an array. Return true if successful. */
20141
20142 static bool
20143 native_encode_initializer (tree init, unsigned char *array, int size)
20144 {
20145 tree type;
20146
20147 if (init == NULL_TREE)
20148 return false;
20149
20150 STRIP_NOPS (init);
20151 switch (TREE_CODE (init))
20152 {
20153 case STRING_CST:
20154 type = TREE_TYPE (init);
20155 if (TREE_CODE (type) == ARRAY_TYPE)
20156 {
20157 tree enttype = TREE_TYPE (type);
20158 scalar_int_mode mode;
20159
20160 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20161 || GET_MODE_SIZE (mode) != 1)
20162 return false;
20163 if (int_size_in_bytes (type) != size)
20164 return false;
20165 if (size > TREE_STRING_LENGTH (init))
20166 {
20167 memcpy (array, TREE_STRING_POINTER (init),
20168 TREE_STRING_LENGTH (init));
20169 memset (array + TREE_STRING_LENGTH (init),
20170 '\0', size - TREE_STRING_LENGTH (init));
20171 }
20172 else
20173 memcpy (array, TREE_STRING_POINTER (init), size);
20174 return true;
20175 }
20176 return false;
20177 case CONSTRUCTOR:
20178 type = TREE_TYPE (init);
20179 if (int_size_in_bytes (type) != size)
20180 return false;
20181 if (TREE_CODE (type) == ARRAY_TYPE)
20182 {
20183 HOST_WIDE_INT min_index;
20184 unsigned HOST_WIDE_INT cnt;
20185 int curpos = 0, fieldsize;
20186 constructor_elt *ce;
20187
20188 if (TYPE_DOMAIN (type) == NULL_TREE
20189 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20190 return false;
20191
20192 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20193 if (fieldsize <= 0)
20194 return false;
20195
20196 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20197 memset (array, '\0', size);
20198 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20199 {
20200 tree val = ce->value;
20201 tree index = ce->index;
20202 int pos = curpos;
20203 if (index && TREE_CODE (index) == RANGE_EXPR)
20204 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20205 * fieldsize;
20206 else if (index)
20207 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20208
20209 if (val)
20210 {
20211 STRIP_NOPS (val);
20212 if (!native_encode_initializer (val, array + pos, fieldsize))
20213 return false;
20214 }
20215 curpos = pos + fieldsize;
20216 if (index && TREE_CODE (index) == RANGE_EXPR)
20217 {
20218 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20219 - tree_to_shwi (TREE_OPERAND (index, 0));
20220 while (count-- > 0)
20221 {
20222 if (val)
20223 memcpy (array + curpos, array + pos, fieldsize);
20224 curpos += fieldsize;
20225 }
20226 }
20227 gcc_assert (curpos <= size);
20228 }
20229 return true;
20230 }
20231 else if (TREE_CODE (type) == RECORD_TYPE
20232 || TREE_CODE (type) == UNION_TYPE)
20233 {
20234 tree field = NULL_TREE;
20235 unsigned HOST_WIDE_INT cnt;
20236 constructor_elt *ce;
20237
20238 if (int_size_in_bytes (type) != size)
20239 return false;
20240
20241 if (TREE_CODE (type) == RECORD_TYPE)
20242 field = TYPE_FIELDS (type);
20243
20244 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20245 {
20246 tree val = ce->value;
20247 int pos, fieldsize;
20248
20249 if (ce->index != 0)
20250 field = ce->index;
20251
20252 if (val)
20253 STRIP_NOPS (val);
20254
20255 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20256 return false;
20257
20258 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20259 && TYPE_DOMAIN (TREE_TYPE (field))
20260 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20261 return false;
20262 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20263 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20264 return false;
20265 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20266 pos = int_byte_position (field);
20267 gcc_assert (pos + fieldsize <= size);
20268 if (val && fieldsize != 0
20269 && !native_encode_initializer (val, array + pos, fieldsize))
20270 return false;
20271 }
20272 return true;
20273 }
20274 return false;
20275 case VIEW_CONVERT_EXPR:
20276 case NON_LVALUE_EXPR:
20277 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20278 default:
20279 return native_encode_expr (init, array, size) == size;
20280 }
20281 }
20282
20283 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20284 attribute is the const value T. */
20285
20286 static bool
20287 tree_add_const_value_attribute (dw_die_ref die, tree t)
20288 {
20289 tree init;
20290 tree type = TREE_TYPE (t);
20291 rtx rtl;
20292
20293 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20294 return false;
20295
20296 init = t;
20297 gcc_assert (!DECL_P (init));
20298
20299 if (TREE_CODE (init) == INTEGER_CST)
20300 {
20301 if (tree_fits_uhwi_p (init))
20302 {
20303 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20304 return true;
20305 }
20306 if (tree_fits_shwi_p (init))
20307 {
20308 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20309 return true;
20310 }
20311 }
20312 if (! early_dwarf)
20313 {
20314 rtl = rtl_for_decl_init (init, type);
20315 if (rtl)
20316 return add_const_value_attribute (die, rtl);
20317 }
20318 /* If the host and target are sane, try harder. */
20319 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20320 && initializer_constant_valid_p (init, type))
20321 {
20322 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20323 if (size > 0 && (int) size == size)
20324 {
20325 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20326
20327 if (native_encode_initializer (init, array, size))
20328 {
20329 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20330 return true;
20331 }
20332 ggc_free (array);
20333 }
20334 }
20335 return false;
20336 }
20337
20338 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20339 attribute is the const value of T, where T is an integral constant
20340 variable with static storage duration
20341 (so it can't be a PARM_DECL or a RESULT_DECL). */
20342
20343 static bool
20344 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20345 {
20346
20347 if (!decl
20348 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20349 || (VAR_P (decl) && !TREE_STATIC (decl)))
20350 return false;
20351
20352 if (TREE_READONLY (decl)
20353 && ! TREE_THIS_VOLATILE (decl)
20354 && DECL_INITIAL (decl))
20355 /* OK */;
20356 else
20357 return false;
20358
20359 /* Don't add DW_AT_const_value if abstract origin already has one. */
20360 if (get_AT (var_die, DW_AT_const_value))
20361 return false;
20362
20363 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20364 }
20365
20366 /* Convert the CFI instructions for the current function into a
20367 location list. This is used for DW_AT_frame_base when we targeting
20368 a dwarf2 consumer that does not support the dwarf3
20369 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20370 expressions. */
20371
20372 static dw_loc_list_ref
20373 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20374 {
20375 int ix;
20376 dw_fde_ref fde;
20377 dw_loc_list_ref list, *list_tail;
20378 dw_cfi_ref cfi;
20379 dw_cfa_location last_cfa, next_cfa;
20380 const char *start_label, *last_label, *section;
20381 dw_cfa_location remember;
20382
20383 fde = cfun->fde;
20384 gcc_assert (fde != NULL);
20385
20386 section = secname_for_decl (current_function_decl);
20387 list_tail = &list;
20388 list = NULL;
20389
20390 memset (&next_cfa, 0, sizeof (next_cfa));
20391 next_cfa.reg = INVALID_REGNUM;
20392 remember = next_cfa;
20393
20394 start_label = fde->dw_fde_begin;
20395
20396 /* ??? Bald assumption that the CIE opcode list does not contain
20397 advance opcodes. */
20398 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20399 lookup_cfa_1 (cfi, &next_cfa, &remember);
20400
20401 last_cfa = next_cfa;
20402 last_label = start_label;
20403
20404 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20405 {
20406 /* If the first partition contained no CFI adjustments, the
20407 CIE opcodes apply to the whole first partition. */
20408 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20409 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20410 list_tail =&(*list_tail)->dw_loc_next;
20411 start_label = last_label = fde->dw_fde_second_begin;
20412 }
20413
20414 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20415 {
20416 switch (cfi->dw_cfi_opc)
20417 {
20418 case DW_CFA_set_loc:
20419 case DW_CFA_advance_loc1:
20420 case DW_CFA_advance_loc2:
20421 case DW_CFA_advance_loc4:
20422 if (!cfa_equal_p (&last_cfa, &next_cfa))
20423 {
20424 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20425 start_label, 0, last_label, 0, section);
20426
20427 list_tail = &(*list_tail)->dw_loc_next;
20428 last_cfa = next_cfa;
20429 start_label = last_label;
20430 }
20431 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20432 break;
20433
20434 case DW_CFA_advance_loc:
20435 /* The encoding is complex enough that we should never emit this. */
20436 gcc_unreachable ();
20437
20438 default:
20439 lookup_cfa_1 (cfi, &next_cfa, &remember);
20440 break;
20441 }
20442 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20443 {
20444 if (!cfa_equal_p (&last_cfa, &next_cfa))
20445 {
20446 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20447 start_label, 0, last_label, 0, section);
20448
20449 list_tail = &(*list_tail)->dw_loc_next;
20450 last_cfa = next_cfa;
20451 start_label = last_label;
20452 }
20453 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20454 start_label, 0, fde->dw_fde_end, 0, section);
20455 list_tail = &(*list_tail)->dw_loc_next;
20456 start_label = last_label = fde->dw_fde_second_begin;
20457 }
20458 }
20459
20460 if (!cfa_equal_p (&last_cfa, &next_cfa))
20461 {
20462 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20463 start_label, 0, last_label, 0, section);
20464 list_tail = &(*list_tail)->dw_loc_next;
20465 start_label = last_label;
20466 }
20467
20468 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20469 start_label, 0,
20470 fde->dw_fde_second_begin
20471 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20472 section);
20473
20474 maybe_gen_llsym (list);
20475
20476 return list;
20477 }
20478
20479 /* Compute a displacement from the "steady-state frame pointer" to the
20480 frame base (often the same as the CFA), and store it in
20481 frame_pointer_fb_offset. OFFSET is added to the displacement
20482 before the latter is negated. */
20483
20484 static void
20485 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20486 {
20487 rtx reg, elim;
20488
20489 #ifdef FRAME_POINTER_CFA_OFFSET
20490 reg = frame_pointer_rtx;
20491 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20492 #else
20493 reg = arg_pointer_rtx;
20494 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20495 #endif
20496
20497 elim = (ira_use_lra_p
20498 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20499 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20500 elim = strip_offset_and_add (elim, &offset);
20501
20502 frame_pointer_fb_offset = -offset;
20503
20504 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20505 in which to eliminate. This is because it's stack pointer isn't
20506 directly accessible as a register within the ISA. To work around
20507 this, assume that while we cannot provide a proper value for
20508 frame_pointer_fb_offset, we won't need one either. */
20509 frame_pointer_fb_offset_valid
20510 = ((SUPPORTS_STACK_ALIGNMENT
20511 && (elim == hard_frame_pointer_rtx
20512 || elim == stack_pointer_rtx))
20513 || elim == (frame_pointer_needed
20514 ? hard_frame_pointer_rtx
20515 : stack_pointer_rtx));
20516 }
20517
20518 /* Generate a DW_AT_name attribute given some string value to be included as
20519 the value of the attribute. */
20520
20521 static void
20522 add_name_attribute (dw_die_ref die, const char *name_string)
20523 {
20524 if (name_string != NULL && *name_string != 0)
20525 {
20526 if (demangle_name_func)
20527 name_string = (*demangle_name_func) (name_string);
20528
20529 add_AT_string (die, DW_AT_name, name_string);
20530 }
20531 }
20532
20533 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20534 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20535 of TYPE accordingly.
20536
20537 ??? This is a temporary measure until after we're able to generate
20538 regular DWARF for the complex Ada type system. */
20539
20540 static void
20541 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20542 dw_die_ref context_die)
20543 {
20544 tree dtype;
20545 dw_die_ref dtype_die;
20546
20547 if (!lang_hooks.types.descriptive_type)
20548 return;
20549
20550 dtype = lang_hooks.types.descriptive_type (type);
20551 if (!dtype)
20552 return;
20553
20554 dtype_die = lookup_type_die (dtype);
20555 if (!dtype_die)
20556 {
20557 gen_type_die (dtype, context_die);
20558 dtype_die = lookup_type_die (dtype);
20559 gcc_assert (dtype_die);
20560 }
20561
20562 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20563 }
20564
20565 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20566
20567 static const char *
20568 comp_dir_string (void)
20569 {
20570 const char *wd;
20571 char *wd1;
20572 static const char *cached_wd = NULL;
20573
20574 if (cached_wd != NULL)
20575 return cached_wd;
20576
20577 wd = get_src_pwd ();
20578 if (wd == NULL)
20579 return NULL;
20580
20581 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20582 {
20583 int wdlen;
20584
20585 wdlen = strlen (wd);
20586 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20587 strcpy (wd1, wd);
20588 wd1 [wdlen] = DIR_SEPARATOR;
20589 wd1 [wdlen + 1] = 0;
20590 wd = wd1;
20591 }
20592
20593 cached_wd = remap_debug_filename (wd);
20594 return cached_wd;
20595 }
20596
20597 /* Generate a DW_AT_comp_dir attribute for DIE. */
20598
20599 static void
20600 add_comp_dir_attribute (dw_die_ref die)
20601 {
20602 const char * wd = comp_dir_string ();
20603 if (wd != NULL)
20604 add_AT_string (die, DW_AT_comp_dir, wd);
20605 }
20606
20607 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20608 pointer computation, ...), output a representation for that bound according
20609 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20610 loc_list_from_tree for the meaning of CONTEXT. */
20611
20612 static void
20613 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20614 int forms, struct loc_descr_context *context)
20615 {
20616 dw_die_ref context_die, decl_die = NULL;
20617 dw_loc_list_ref list;
20618 bool strip_conversions = true;
20619 bool placeholder_seen = false;
20620
20621 while (strip_conversions)
20622 switch (TREE_CODE (value))
20623 {
20624 case ERROR_MARK:
20625 case SAVE_EXPR:
20626 return;
20627
20628 CASE_CONVERT:
20629 case VIEW_CONVERT_EXPR:
20630 value = TREE_OPERAND (value, 0);
20631 break;
20632
20633 default:
20634 strip_conversions = false;
20635 break;
20636 }
20637
20638 /* If possible and permitted, output the attribute as a constant. */
20639 if ((forms & dw_scalar_form_constant) != 0
20640 && TREE_CODE (value) == INTEGER_CST)
20641 {
20642 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20643
20644 /* If HOST_WIDE_INT is big enough then represent the bound as
20645 a constant value. We need to choose a form based on
20646 whether the type is signed or unsigned. We cannot just
20647 call add_AT_unsigned if the value itself is positive
20648 (add_AT_unsigned might add the unsigned value encoded as
20649 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20650 bounds type and then sign extend any unsigned values found
20651 for signed types. This is needed only for
20652 DW_AT_{lower,upper}_bound, since for most other attributes,
20653 consumers will treat DW_FORM_data[1248] as unsigned values,
20654 regardless of the underlying type. */
20655 if (prec <= HOST_BITS_PER_WIDE_INT
20656 || tree_fits_uhwi_p (value))
20657 {
20658 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20659 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20660 else
20661 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20662 }
20663 else
20664 /* Otherwise represent the bound as an unsigned value with
20665 the precision of its type. The precision and signedness
20666 of the type will be necessary to re-interpret it
20667 unambiguously. */
20668 add_AT_wide (die, attr, wi::to_wide (value));
20669 return;
20670 }
20671
20672 /* Otherwise, if it's possible and permitted too, output a reference to
20673 another DIE. */
20674 if ((forms & dw_scalar_form_reference) != 0)
20675 {
20676 tree decl = NULL_TREE;
20677
20678 /* Some type attributes reference an outer type. For instance, the upper
20679 bound of an array may reference an embedding record (this happens in
20680 Ada). */
20681 if (TREE_CODE (value) == COMPONENT_REF
20682 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20683 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20684 decl = TREE_OPERAND (value, 1);
20685
20686 else if (VAR_P (value)
20687 || TREE_CODE (value) == PARM_DECL
20688 || TREE_CODE (value) == RESULT_DECL)
20689 decl = value;
20690
20691 if (decl != NULL_TREE)
20692 {
20693 decl_die = lookup_decl_die (decl);
20694
20695 /* ??? Can this happen, or should the variable have been bound
20696 first? Probably it can, since I imagine that we try to create
20697 the types of parameters in the order in which they exist in
20698 the list, and won't have created a forward reference to a
20699 later parameter. */
20700 if (decl_die != NULL)
20701 {
20702 if (get_AT (decl_die, DW_AT_location)
20703 || get_AT (decl_die, DW_AT_const_value))
20704 {
20705 add_AT_die_ref (die, attr, decl_die);
20706 return;
20707 }
20708 }
20709 }
20710 }
20711
20712 /* Last chance: try to create a stack operation procedure to evaluate the
20713 value. Do nothing if even that is not possible or permitted. */
20714 if ((forms & dw_scalar_form_exprloc) == 0)
20715 return;
20716
20717 list = loc_list_from_tree (value, 2, context);
20718 if (context && context->placeholder_arg)
20719 {
20720 placeholder_seen = context->placeholder_seen;
20721 context->placeholder_seen = false;
20722 }
20723 if (list == NULL || single_element_loc_list_p (list))
20724 {
20725 /* If this attribute is not a reference nor constant, it is
20726 a DWARF expression rather than location description. For that
20727 loc_list_from_tree (value, 0, &context) is needed. */
20728 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20729 if (list2 && single_element_loc_list_p (list2))
20730 {
20731 if (placeholder_seen)
20732 {
20733 struct dwarf_procedure_info dpi;
20734 dpi.fndecl = NULL_TREE;
20735 dpi.args_count = 1;
20736 if (!resolve_args_picking (list2->expr, 1, &dpi))
20737 return;
20738 }
20739 add_AT_loc (die, attr, list2->expr);
20740 return;
20741 }
20742 }
20743
20744 /* If that failed to give a single element location list, fall back to
20745 outputting this as a reference... still if permitted. */
20746 if (list == NULL
20747 || (forms & dw_scalar_form_reference) == 0
20748 || placeholder_seen)
20749 return;
20750
20751 if (!decl_die)
20752 {
20753 if (current_function_decl == 0)
20754 context_die = comp_unit_die ();
20755 else
20756 context_die = lookup_decl_die (current_function_decl);
20757
20758 decl_die = new_die (DW_TAG_variable, context_die, value);
20759 add_AT_flag (decl_die, DW_AT_artificial, 1);
20760 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20761 context_die);
20762 }
20763
20764 add_AT_location_description (decl_die, DW_AT_location, list);
20765 add_AT_die_ref (die, attr, decl_die);
20766 }
20767
20768 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20769 default. */
20770
20771 static int
20772 lower_bound_default (void)
20773 {
20774 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20775 {
20776 case DW_LANG_C:
20777 case DW_LANG_C89:
20778 case DW_LANG_C99:
20779 case DW_LANG_C11:
20780 case DW_LANG_C_plus_plus:
20781 case DW_LANG_C_plus_plus_11:
20782 case DW_LANG_C_plus_plus_14:
20783 case DW_LANG_ObjC:
20784 case DW_LANG_ObjC_plus_plus:
20785 return 0;
20786 case DW_LANG_Fortran77:
20787 case DW_LANG_Fortran90:
20788 case DW_LANG_Fortran95:
20789 case DW_LANG_Fortran03:
20790 case DW_LANG_Fortran08:
20791 return 1;
20792 case DW_LANG_UPC:
20793 case DW_LANG_D:
20794 case DW_LANG_Python:
20795 return dwarf_version >= 4 ? 0 : -1;
20796 case DW_LANG_Ada95:
20797 case DW_LANG_Ada83:
20798 case DW_LANG_Cobol74:
20799 case DW_LANG_Cobol85:
20800 case DW_LANG_Modula2:
20801 case DW_LANG_PLI:
20802 return dwarf_version >= 4 ? 1 : -1;
20803 default:
20804 return -1;
20805 }
20806 }
20807
20808 /* Given a tree node describing an array bound (either lower or upper) output
20809 a representation for that bound. */
20810
20811 static void
20812 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20813 tree bound, struct loc_descr_context *context)
20814 {
20815 int dflt;
20816
20817 while (1)
20818 switch (TREE_CODE (bound))
20819 {
20820 /* Strip all conversions. */
20821 CASE_CONVERT:
20822 case VIEW_CONVERT_EXPR:
20823 bound = TREE_OPERAND (bound, 0);
20824 break;
20825
20826 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20827 are even omitted when they are the default. */
20828 case INTEGER_CST:
20829 /* If the value for this bound is the default one, we can even omit the
20830 attribute. */
20831 if (bound_attr == DW_AT_lower_bound
20832 && tree_fits_shwi_p (bound)
20833 && (dflt = lower_bound_default ()) != -1
20834 && tree_to_shwi (bound) == dflt)
20835 return;
20836
20837 /* FALLTHRU */
20838
20839 default:
20840 /* Because of the complex interaction there can be with other GNAT
20841 encodings, GDB isn't ready yet to handle proper DWARF description
20842 for self-referencial subrange bounds: let GNAT encodings do the
20843 magic in such a case. */
20844 if (is_ada ()
20845 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20846 && contains_placeholder_p (bound))
20847 return;
20848
20849 add_scalar_info (subrange_die, bound_attr, bound,
20850 dw_scalar_form_constant
20851 | dw_scalar_form_exprloc
20852 | dw_scalar_form_reference,
20853 context);
20854 return;
20855 }
20856 }
20857
20858 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20859 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20860 Note that the block of subscript information for an array type also
20861 includes information about the element type of the given array type.
20862
20863 This function reuses previously set type and bound information if
20864 available. */
20865
20866 static void
20867 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20868 {
20869 unsigned dimension_number;
20870 tree lower, upper;
20871 dw_die_ref child = type_die->die_child;
20872
20873 for (dimension_number = 0;
20874 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20875 type = TREE_TYPE (type), dimension_number++)
20876 {
20877 tree domain = TYPE_DOMAIN (type);
20878
20879 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20880 break;
20881
20882 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20883 and (in GNU C only) variable bounds. Handle all three forms
20884 here. */
20885
20886 /* Find and reuse a previously generated DW_TAG_subrange_type if
20887 available.
20888
20889 For multi-dimensional arrays, as we iterate through the
20890 various dimensions in the enclosing for loop above, we also
20891 iterate through the DIE children and pick at each
20892 DW_TAG_subrange_type previously generated (if available).
20893 Each child DW_TAG_subrange_type DIE describes the range of
20894 the current dimension. At this point we should have as many
20895 DW_TAG_subrange_type's as we have dimensions in the
20896 array. */
20897 dw_die_ref subrange_die = NULL;
20898 if (child)
20899 while (1)
20900 {
20901 child = child->die_sib;
20902 if (child->die_tag == DW_TAG_subrange_type)
20903 subrange_die = child;
20904 if (child == type_die->die_child)
20905 {
20906 /* If we wrapped around, stop looking next time. */
20907 child = NULL;
20908 break;
20909 }
20910 if (child->die_tag == DW_TAG_subrange_type)
20911 break;
20912 }
20913 if (!subrange_die)
20914 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20915
20916 if (domain)
20917 {
20918 /* We have an array type with specified bounds. */
20919 lower = TYPE_MIN_VALUE (domain);
20920 upper = TYPE_MAX_VALUE (domain);
20921
20922 /* Define the index type. */
20923 if (TREE_TYPE (domain)
20924 && !get_AT (subrange_die, DW_AT_type))
20925 {
20926 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20927 TREE_TYPE field. We can't emit debug info for this
20928 because it is an unnamed integral type. */
20929 if (TREE_CODE (domain) == INTEGER_TYPE
20930 && TYPE_NAME (domain) == NULL_TREE
20931 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20932 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20933 ;
20934 else
20935 add_type_attribute (subrange_die, TREE_TYPE (domain),
20936 TYPE_UNQUALIFIED, false, type_die);
20937 }
20938
20939 /* ??? If upper is NULL, the array has unspecified length,
20940 but it does have a lower bound. This happens with Fortran
20941 dimension arr(N:*)
20942 Since the debugger is definitely going to need to know N
20943 to produce useful results, go ahead and output the lower
20944 bound solo, and hope the debugger can cope. */
20945
20946 if (!get_AT (subrange_die, DW_AT_lower_bound))
20947 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20948 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20949 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20950 }
20951
20952 /* Otherwise we have an array type with an unspecified length. The
20953 DWARF-2 spec does not say how to handle this; let's just leave out the
20954 bounds. */
20955 }
20956 }
20957
20958 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20959
20960 static void
20961 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20962 {
20963 dw_die_ref decl_die;
20964 HOST_WIDE_INT size;
20965 dw_loc_descr_ref size_expr = NULL;
20966
20967 switch (TREE_CODE (tree_node))
20968 {
20969 case ERROR_MARK:
20970 size = 0;
20971 break;
20972 case ENUMERAL_TYPE:
20973 case RECORD_TYPE:
20974 case UNION_TYPE:
20975 case QUAL_UNION_TYPE:
20976 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20977 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20978 {
20979 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20980 return;
20981 }
20982 size_expr = type_byte_size (tree_node, &size);
20983 break;
20984 case FIELD_DECL:
20985 /* For a data member of a struct or union, the DW_AT_byte_size is
20986 generally given as the number of bytes normally allocated for an
20987 object of the *declared* type of the member itself. This is true
20988 even for bit-fields. */
20989 size = int_size_in_bytes (field_type (tree_node));
20990 break;
20991 default:
20992 gcc_unreachable ();
20993 }
20994
20995 /* Support for dynamically-sized objects was introduced by DWARFv3.
20996 At the moment, GDB does not handle variable byte sizes very well,
20997 though. */
20998 if ((dwarf_version >= 3 || !dwarf_strict)
20999 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21000 && size_expr != NULL)
21001 add_AT_loc (die, DW_AT_byte_size, size_expr);
21002
21003 /* Note that `size' might be -1 when we get to this point. If it is, that
21004 indicates that the byte size of the entity in question is variable and
21005 that we could not generate a DWARF expression that computes it. */
21006 if (size >= 0)
21007 add_AT_unsigned (die, DW_AT_byte_size, size);
21008 }
21009
21010 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21011 alignment. */
21012
21013 static void
21014 add_alignment_attribute (dw_die_ref die, tree tree_node)
21015 {
21016 if (dwarf_version < 5 && dwarf_strict)
21017 return;
21018
21019 unsigned align;
21020
21021 if (DECL_P (tree_node))
21022 {
21023 if (!DECL_USER_ALIGN (tree_node))
21024 return;
21025
21026 align = DECL_ALIGN_UNIT (tree_node);
21027 }
21028 else if (TYPE_P (tree_node))
21029 {
21030 if (!TYPE_USER_ALIGN (tree_node))
21031 return;
21032
21033 align = TYPE_ALIGN_UNIT (tree_node);
21034 }
21035 else
21036 gcc_unreachable ();
21037
21038 add_AT_unsigned (die, DW_AT_alignment, align);
21039 }
21040
21041 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21042 which specifies the distance in bits from the highest order bit of the
21043 "containing object" for the bit-field to the highest order bit of the
21044 bit-field itself.
21045
21046 For any given bit-field, the "containing object" is a hypothetical object
21047 (of some integral or enum type) within which the given bit-field lives. The
21048 type of this hypothetical "containing object" is always the same as the
21049 declared type of the individual bit-field itself. The determination of the
21050 exact location of the "containing object" for a bit-field is rather
21051 complicated. It's handled by the `field_byte_offset' function (above).
21052
21053 CTX is required: see the comment for VLR_CONTEXT.
21054
21055 Note that it is the size (in bytes) of the hypothetical "containing object"
21056 which will be given in the DW_AT_byte_size attribute for this bit-field.
21057 (See `byte_size_attribute' above). */
21058
21059 static inline void
21060 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21061 {
21062 HOST_WIDE_INT object_offset_in_bytes;
21063 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21064 HOST_WIDE_INT bitpos_int;
21065 HOST_WIDE_INT highest_order_object_bit_offset;
21066 HOST_WIDE_INT highest_order_field_bit_offset;
21067 HOST_WIDE_INT bit_offset;
21068
21069 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21070
21071 /* Must be a field and a bit field. */
21072 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21073
21074 /* We can't yet handle bit-fields whose offsets are variable, so if we
21075 encounter such things, just return without generating any attribute
21076 whatsoever. Likewise for variable or too large size. */
21077 if (! tree_fits_shwi_p (bit_position (decl))
21078 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21079 return;
21080
21081 bitpos_int = int_bit_position (decl);
21082
21083 /* Note that the bit offset is always the distance (in bits) from the
21084 highest-order bit of the "containing object" to the highest-order bit of
21085 the bit-field itself. Since the "high-order end" of any object or field
21086 is different on big-endian and little-endian machines, the computation
21087 below must take account of these differences. */
21088 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21089 highest_order_field_bit_offset = bitpos_int;
21090
21091 if (! BYTES_BIG_ENDIAN)
21092 {
21093 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21094 highest_order_object_bit_offset +=
21095 simple_type_size_in_bits (original_type);
21096 }
21097
21098 bit_offset
21099 = (! BYTES_BIG_ENDIAN
21100 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21101 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21102
21103 if (bit_offset < 0)
21104 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21105 else
21106 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21107 }
21108
21109 /* For a FIELD_DECL node which represents a bit field, output an attribute
21110 which specifies the length in bits of the given field. */
21111
21112 static inline void
21113 add_bit_size_attribute (dw_die_ref die, tree decl)
21114 {
21115 /* Must be a field and a bit field. */
21116 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21117 && DECL_BIT_FIELD_TYPE (decl));
21118
21119 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21120 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21121 }
21122
21123 /* If the compiled language is ANSI C, then add a 'prototyped'
21124 attribute, if arg types are given for the parameters of a function. */
21125
21126 static inline void
21127 add_prototyped_attribute (dw_die_ref die, tree func_type)
21128 {
21129 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21130 {
21131 case DW_LANG_C:
21132 case DW_LANG_C89:
21133 case DW_LANG_C99:
21134 case DW_LANG_C11:
21135 case DW_LANG_ObjC:
21136 if (prototype_p (func_type))
21137 add_AT_flag (die, DW_AT_prototyped, 1);
21138 break;
21139 default:
21140 break;
21141 }
21142 }
21143
21144 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21145 by looking in the type declaration, the object declaration equate table or
21146 the block mapping. */
21147
21148 static inline dw_die_ref
21149 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21150 {
21151 dw_die_ref origin_die = NULL;
21152
21153 if (DECL_P (origin))
21154 {
21155 dw_die_ref c;
21156 origin_die = lookup_decl_die (origin);
21157 /* "Unwrap" the decls DIE which we put in the imported unit context.
21158 We are looking for the abstract copy here. */
21159 if (in_lto_p
21160 && origin_die
21161 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21162 /* ??? Identify this better. */
21163 && c->with_offset)
21164 origin_die = c;
21165 }
21166 else if (TYPE_P (origin))
21167 origin_die = lookup_type_die (origin);
21168 else if (TREE_CODE (origin) == BLOCK)
21169 origin_die = BLOCK_DIE (origin);
21170
21171 /* XXX: Functions that are never lowered don't always have correct block
21172 trees (in the case of java, they simply have no block tree, in some other
21173 languages). For these functions, there is nothing we can really do to
21174 output correct debug info for inlined functions in all cases. Rather
21175 than die, we'll just produce deficient debug info now, in that we will
21176 have variables without a proper abstract origin. In the future, when all
21177 functions are lowered, we should re-add a gcc_assert (origin_die)
21178 here. */
21179
21180 if (origin_die)
21181 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21182 return origin_die;
21183 }
21184
21185 /* We do not currently support the pure_virtual attribute. */
21186
21187 static inline void
21188 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21189 {
21190 if (DECL_VINDEX (func_decl))
21191 {
21192 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21193
21194 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21195 add_AT_loc (die, DW_AT_vtable_elem_location,
21196 new_loc_descr (DW_OP_constu,
21197 tree_to_shwi (DECL_VINDEX (func_decl)),
21198 0));
21199
21200 /* GNU extension: Record what type this method came from originally. */
21201 if (debug_info_level > DINFO_LEVEL_TERSE
21202 && DECL_CONTEXT (func_decl))
21203 add_AT_die_ref (die, DW_AT_containing_type,
21204 lookup_type_die (DECL_CONTEXT (func_decl)));
21205 }
21206 }
21207 \f
21208 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21209 given decl. This used to be a vendor extension until after DWARF 4
21210 standardized it. */
21211
21212 static void
21213 add_linkage_attr (dw_die_ref die, tree decl)
21214 {
21215 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21216
21217 /* Mimic what assemble_name_raw does with a leading '*'. */
21218 if (name[0] == '*')
21219 name = &name[1];
21220
21221 if (dwarf_version >= 4)
21222 add_AT_string (die, DW_AT_linkage_name, name);
21223 else
21224 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21225 }
21226
21227 /* Add source coordinate attributes for the given decl. */
21228
21229 static void
21230 add_src_coords_attributes (dw_die_ref die, tree decl)
21231 {
21232 expanded_location s;
21233
21234 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21235 return;
21236 s = expand_location (DECL_SOURCE_LOCATION (decl));
21237 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21238 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21239 if (debug_column_info && s.column)
21240 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21241 }
21242
21243 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21244
21245 static void
21246 add_linkage_name_raw (dw_die_ref die, tree decl)
21247 {
21248 /* Defer until we have an assembler name set. */
21249 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21250 {
21251 limbo_die_node *asm_name;
21252
21253 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21254 asm_name->die = die;
21255 asm_name->created_for = decl;
21256 asm_name->next = deferred_asm_name;
21257 deferred_asm_name = asm_name;
21258 }
21259 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21260 add_linkage_attr (die, decl);
21261 }
21262
21263 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21264
21265 static void
21266 add_linkage_name (dw_die_ref die, tree decl)
21267 {
21268 if (debug_info_level > DINFO_LEVEL_NONE
21269 && VAR_OR_FUNCTION_DECL_P (decl)
21270 && TREE_PUBLIC (decl)
21271 && !(VAR_P (decl) && DECL_REGISTER (decl))
21272 && die->die_tag != DW_TAG_member)
21273 add_linkage_name_raw (die, decl);
21274 }
21275
21276 /* Add a DW_AT_name attribute and source coordinate attribute for the
21277 given decl, but only if it actually has a name. */
21278
21279 static void
21280 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21281 bool no_linkage_name)
21282 {
21283 tree decl_name;
21284
21285 decl_name = DECL_NAME (decl);
21286 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21287 {
21288 const char *name = dwarf2_name (decl, 0);
21289 if (name)
21290 add_name_attribute (die, name);
21291 if (! DECL_ARTIFICIAL (decl))
21292 add_src_coords_attributes (die, decl);
21293
21294 if (!no_linkage_name)
21295 add_linkage_name (die, decl);
21296 }
21297
21298 #ifdef VMS_DEBUGGING_INFO
21299 /* Get the function's name, as described by its RTL. This may be different
21300 from the DECL_NAME name used in the source file. */
21301 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21302 {
21303 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21304 XEXP (DECL_RTL (decl), 0), false);
21305 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21306 }
21307 #endif /* VMS_DEBUGGING_INFO */
21308 }
21309
21310 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21311
21312 static void
21313 add_discr_value (dw_die_ref die, dw_discr_value *value)
21314 {
21315 dw_attr_node attr;
21316
21317 attr.dw_attr = DW_AT_discr_value;
21318 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21319 attr.dw_attr_val.val_entry = NULL;
21320 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21321 if (value->pos)
21322 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21323 else
21324 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21325 add_dwarf_attr (die, &attr);
21326 }
21327
21328 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21329
21330 static void
21331 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21332 {
21333 dw_attr_node attr;
21334
21335 attr.dw_attr = DW_AT_discr_list;
21336 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21337 attr.dw_attr_val.val_entry = NULL;
21338 attr.dw_attr_val.v.val_discr_list = discr_list;
21339 add_dwarf_attr (die, &attr);
21340 }
21341
21342 static inline dw_discr_list_ref
21343 AT_discr_list (dw_attr_node *attr)
21344 {
21345 return attr->dw_attr_val.v.val_discr_list;
21346 }
21347
21348 #ifdef VMS_DEBUGGING_INFO
21349 /* Output the debug main pointer die for VMS */
21350
21351 void
21352 dwarf2out_vms_debug_main_pointer (void)
21353 {
21354 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21355 dw_die_ref die;
21356
21357 /* Allocate the VMS debug main subprogram die. */
21358 die = new_die_raw (DW_TAG_subprogram);
21359 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21360 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21361 current_function_funcdef_no);
21362 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21363
21364 /* Make it the first child of comp_unit_die (). */
21365 die->die_parent = comp_unit_die ();
21366 if (comp_unit_die ()->die_child)
21367 {
21368 die->die_sib = comp_unit_die ()->die_child->die_sib;
21369 comp_unit_die ()->die_child->die_sib = die;
21370 }
21371 else
21372 {
21373 die->die_sib = die;
21374 comp_unit_die ()->die_child = die;
21375 }
21376 }
21377 #endif /* VMS_DEBUGGING_INFO */
21378
21379 /* walk_tree helper function for uses_local_type, below. */
21380
21381 static tree
21382 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21383 {
21384 if (!TYPE_P (*tp))
21385 *walk_subtrees = 0;
21386 else
21387 {
21388 tree name = TYPE_NAME (*tp);
21389 if (name && DECL_P (name) && decl_function_context (name))
21390 return *tp;
21391 }
21392 return NULL_TREE;
21393 }
21394
21395 /* If TYPE involves a function-local type (including a local typedef to a
21396 non-local type), returns that type; otherwise returns NULL_TREE. */
21397
21398 static tree
21399 uses_local_type (tree type)
21400 {
21401 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21402 return used;
21403 }
21404
21405 /* Return the DIE for the scope that immediately contains this type.
21406 Non-named types that do not involve a function-local type get global
21407 scope. Named types nested in namespaces or other types get their
21408 containing scope. All other types (i.e. function-local named types) get
21409 the current active scope. */
21410
21411 static dw_die_ref
21412 scope_die_for (tree t, dw_die_ref context_die)
21413 {
21414 dw_die_ref scope_die = NULL;
21415 tree containing_scope;
21416
21417 /* Non-types always go in the current scope. */
21418 gcc_assert (TYPE_P (t));
21419
21420 /* Use the scope of the typedef, rather than the scope of the type
21421 it refers to. */
21422 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21423 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21424 else
21425 containing_scope = TYPE_CONTEXT (t);
21426
21427 /* Use the containing namespace if there is one. */
21428 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21429 {
21430 if (context_die == lookup_decl_die (containing_scope))
21431 /* OK */;
21432 else if (debug_info_level > DINFO_LEVEL_TERSE)
21433 context_die = get_context_die (containing_scope);
21434 else
21435 containing_scope = NULL_TREE;
21436 }
21437
21438 /* Ignore function type "scopes" from the C frontend. They mean that
21439 a tagged type is local to a parmlist of a function declarator, but
21440 that isn't useful to DWARF. */
21441 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21442 containing_scope = NULL_TREE;
21443
21444 if (SCOPE_FILE_SCOPE_P (containing_scope))
21445 {
21446 /* If T uses a local type keep it local as well, to avoid references
21447 to function-local DIEs from outside the function. */
21448 if (current_function_decl && uses_local_type (t))
21449 scope_die = context_die;
21450 else
21451 scope_die = comp_unit_die ();
21452 }
21453 else if (TYPE_P (containing_scope))
21454 {
21455 /* For types, we can just look up the appropriate DIE. */
21456 if (debug_info_level > DINFO_LEVEL_TERSE)
21457 scope_die = get_context_die (containing_scope);
21458 else
21459 {
21460 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21461 if (scope_die == NULL)
21462 scope_die = comp_unit_die ();
21463 }
21464 }
21465 else
21466 scope_die = context_die;
21467
21468 return scope_die;
21469 }
21470
21471 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21472
21473 static inline int
21474 local_scope_p (dw_die_ref context_die)
21475 {
21476 for (; context_die; context_die = context_die->die_parent)
21477 if (context_die->die_tag == DW_TAG_inlined_subroutine
21478 || context_die->die_tag == DW_TAG_subprogram)
21479 return 1;
21480
21481 return 0;
21482 }
21483
21484 /* Returns nonzero if CONTEXT_DIE is a class. */
21485
21486 static inline int
21487 class_scope_p (dw_die_ref context_die)
21488 {
21489 return (context_die
21490 && (context_die->die_tag == DW_TAG_structure_type
21491 || context_die->die_tag == DW_TAG_class_type
21492 || context_die->die_tag == DW_TAG_interface_type
21493 || context_die->die_tag == DW_TAG_union_type));
21494 }
21495
21496 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21497 whether or not to treat a DIE in this context as a declaration. */
21498
21499 static inline int
21500 class_or_namespace_scope_p (dw_die_ref context_die)
21501 {
21502 return (class_scope_p (context_die)
21503 || (context_die && context_die->die_tag == DW_TAG_namespace));
21504 }
21505
21506 /* Many forms of DIEs require a "type description" attribute. This
21507 routine locates the proper "type descriptor" die for the type given
21508 by 'type' plus any additional qualifiers given by 'cv_quals', and
21509 adds a DW_AT_type attribute below the given die. */
21510
21511 static void
21512 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21513 bool reverse, dw_die_ref context_die)
21514 {
21515 enum tree_code code = TREE_CODE (type);
21516 dw_die_ref type_die = NULL;
21517
21518 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21519 or fixed-point type, use the inner type. This is because we have no
21520 support for unnamed types in base_type_die. This can happen if this is
21521 an Ada subrange type. Correct solution is emit a subrange type die. */
21522 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21523 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21524 type = TREE_TYPE (type), code = TREE_CODE (type);
21525
21526 if (code == ERROR_MARK
21527 /* Handle a special case. For functions whose return type is void, we
21528 generate *no* type attribute. (Note that no object may have type
21529 `void', so this only applies to function return types). */
21530 || code == VOID_TYPE)
21531 return;
21532
21533 type_die = modified_type_die (type,
21534 cv_quals | TYPE_QUALS (type),
21535 reverse,
21536 context_die);
21537
21538 if (type_die != NULL)
21539 add_AT_die_ref (object_die, DW_AT_type, type_die);
21540 }
21541
21542 /* Given an object die, add the calling convention attribute for the
21543 function call type. */
21544 static void
21545 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21546 {
21547 enum dwarf_calling_convention value = DW_CC_normal;
21548
21549 value = ((enum dwarf_calling_convention)
21550 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21551
21552 if (is_fortran ()
21553 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21554 {
21555 /* DWARF 2 doesn't provide a way to identify a program's source-level
21556 entry point. DW_AT_calling_convention attributes are only meant
21557 to describe functions' calling conventions. However, lacking a
21558 better way to signal the Fortran main program, we used this for
21559 a long time, following existing custom. Now, DWARF 4 has
21560 DW_AT_main_subprogram, which we add below, but some tools still
21561 rely on the old way, which we thus keep. */
21562 value = DW_CC_program;
21563
21564 if (dwarf_version >= 4 || !dwarf_strict)
21565 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21566 }
21567
21568 /* Only add the attribute if the backend requests it, and
21569 is not DW_CC_normal. */
21570 if (value && (value != DW_CC_normal))
21571 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21572 }
21573
21574 /* Given a tree pointer to a struct, class, union, or enum type node, return
21575 a pointer to the (string) tag name for the given type, or zero if the type
21576 was declared without a tag. */
21577
21578 static const char *
21579 type_tag (const_tree type)
21580 {
21581 const char *name = 0;
21582
21583 if (TYPE_NAME (type) != 0)
21584 {
21585 tree t = 0;
21586
21587 /* Find the IDENTIFIER_NODE for the type name. */
21588 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21589 && !TYPE_NAMELESS (type))
21590 t = TYPE_NAME (type);
21591
21592 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21593 a TYPE_DECL node, regardless of whether or not a `typedef' was
21594 involved. */
21595 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21596 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21597 {
21598 /* We want to be extra verbose. Don't call dwarf_name if
21599 DECL_NAME isn't set. The default hook for decl_printable_name
21600 doesn't like that, and in this context it's correct to return
21601 0, instead of "<anonymous>" or the like. */
21602 if (DECL_NAME (TYPE_NAME (type))
21603 && !DECL_NAMELESS (TYPE_NAME (type)))
21604 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21605 }
21606
21607 /* Now get the name as a string, or invent one. */
21608 if (!name && t != 0)
21609 name = IDENTIFIER_POINTER (t);
21610 }
21611
21612 return (name == 0 || *name == '\0') ? 0 : name;
21613 }
21614
21615 /* Return the type associated with a data member, make a special check
21616 for bit field types. */
21617
21618 static inline tree
21619 member_declared_type (const_tree member)
21620 {
21621 return (DECL_BIT_FIELD_TYPE (member)
21622 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21623 }
21624
21625 /* Get the decl's label, as described by its RTL. This may be different
21626 from the DECL_NAME name used in the source file. */
21627
21628 #if 0
21629 static const char *
21630 decl_start_label (tree decl)
21631 {
21632 rtx x;
21633 const char *fnname;
21634
21635 x = DECL_RTL (decl);
21636 gcc_assert (MEM_P (x));
21637
21638 x = XEXP (x, 0);
21639 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21640
21641 fnname = XSTR (x, 0);
21642 return fnname;
21643 }
21644 #endif
21645 \f
21646 /* For variable-length arrays that have been previously generated, but
21647 may be incomplete due to missing subscript info, fill the subscript
21648 info. Return TRUE if this is one of those cases. */
21649 static bool
21650 fill_variable_array_bounds (tree type)
21651 {
21652 if (TREE_ASM_WRITTEN (type)
21653 && TREE_CODE (type) == ARRAY_TYPE
21654 && variably_modified_type_p (type, NULL))
21655 {
21656 dw_die_ref array_die = lookup_type_die (type);
21657 if (!array_die)
21658 return false;
21659 add_subscript_info (array_die, type, !is_ada ());
21660 return true;
21661 }
21662 return false;
21663 }
21664
21665 /* These routines generate the internal representation of the DIE's for
21666 the compilation unit. Debugging information is collected by walking
21667 the declaration trees passed in from dwarf2out_decl(). */
21668
21669 static void
21670 gen_array_type_die (tree type, dw_die_ref context_die)
21671 {
21672 dw_die_ref array_die;
21673
21674 /* GNU compilers represent multidimensional array types as sequences of one
21675 dimensional array types whose element types are themselves array types.
21676 We sometimes squish that down to a single array_type DIE with multiple
21677 subscripts in the Dwarf debugging info. The draft Dwarf specification
21678 say that we are allowed to do this kind of compression in C, because
21679 there is no difference between an array of arrays and a multidimensional
21680 array. We don't do this for Ada to remain as close as possible to the
21681 actual representation, which is especially important against the language
21682 flexibilty wrt arrays of variable size. */
21683
21684 bool collapse_nested_arrays = !is_ada ();
21685
21686 if (fill_variable_array_bounds (type))
21687 return;
21688
21689 dw_die_ref scope_die = scope_die_for (type, context_die);
21690 tree element_type;
21691
21692 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21693 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21694 if (TYPE_STRING_FLAG (type)
21695 && TREE_CODE (type) == ARRAY_TYPE
21696 && is_fortran ()
21697 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21698 {
21699 HOST_WIDE_INT size;
21700
21701 array_die = new_die (DW_TAG_string_type, scope_die, type);
21702 add_name_attribute (array_die, type_tag (type));
21703 equate_type_number_to_die (type, array_die);
21704 size = int_size_in_bytes (type);
21705 if (size >= 0)
21706 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21707 /* ??? We can't annotate types late, but for LTO we may not
21708 generate a location early either (gfortran.dg/save_6.f90). */
21709 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21710 && TYPE_DOMAIN (type) != NULL_TREE
21711 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21712 {
21713 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21714 tree rszdecl = szdecl;
21715
21716 size = int_size_in_bytes (TREE_TYPE (szdecl));
21717 if (!DECL_P (szdecl))
21718 {
21719 if (TREE_CODE (szdecl) == INDIRECT_REF
21720 && DECL_P (TREE_OPERAND (szdecl, 0)))
21721 {
21722 rszdecl = TREE_OPERAND (szdecl, 0);
21723 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21724 != DWARF2_ADDR_SIZE)
21725 size = 0;
21726 }
21727 else
21728 size = 0;
21729 }
21730 if (size > 0)
21731 {
21732 dw_loc_list_ref loc
21733 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21734 NULL);
21735 if (loc)
21736 {
21737 add_AT_location_description (array_die, DW_AT_string_length,
21738 loc);
21739 if (size != DWARF2_ADDR_SIZE)
21740 add_AT_unsigned (array_die, dwarf_version >= 5
21741 ? DW_AT_string_length_byte_size
21742 : DW_AT_byte_size, size);
21743 }
21744 }
21745 }
21746 return;
21747 }
21748
21749 array_die = new_die (DW_TAG_array_type, scope_die, type);
21750 add_name_attribute (array_die, type_tag (type));
21751 equate_type_number_to_die (type, array_die);
21752
21753 if (TREE_CODE (type) == VECTOR_TYPE)
21754 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21755
21756 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21757 if (is_fortran ()
21758 && TREE_CODE (type) == ARRAY_TYPE
21759 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21760 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21761 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21762
21763 #if 0
21764 /* We default the array ordering. Debuggers will probably do the right
21765 things even if DW_AT_ordering is not present. It's not even an issue
21766 until we start to get into multidimensional arrays anyway. If a debugger
21767 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21768 then we'll have to put the DW_AT_ordering attribute back in. (But if
21769 and when we find out that we need to put these in, we will only do so
21770 for multidimensional arrays. */
21771 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21772 #endif
21773
21774 if (TREE_CODE (type) == VECTOR_TYPE)
21775 {
21776 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21777 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21778 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21779 add_bound_info (subrange_die, DW_AT_upper_bound,
21780 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21781 }
21782 else
21783 add_subscript_info (array_die, type, collapse_nested_arrays);
21784
21785 /* Add representation of the type of the elements of this array type and
21786 emit the corresponding DIE if we haven't done it already. */
21787 element_type = TREE_TYPE (type);
21788 if (collapse_nested_arrays)
21789 while (TREE_CODE (element_type) == ARRAY_TYPE)
21790 {
21791 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21792 break;
21793 element_type = TREE_TYPE (element_type);
21794 }
21795
21796 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21797 TREE_CODE (type) == ARRAY_TYPE
21798 && TYPE_REVERSE_STORAGE_ORDER (type),
21799 context_die);
21800
21801 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21802 if (TYPE_ARTIFICIAL (type))
21803 add_AT_flag (array_die, DW_AT_artificial, 1);
21804
21805 if (get_AT (array_die, DW_AT_name))
21806 add_pubtype (type, array_die);
21807
21808 add_alignment_attribute (array_die, type);
21809 }
21810
21811 /* This routine generates DIE for array with hidden descriptor, details
21812 are filled into *info by a langhook. */
21813
21814 static void
21815 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21816 dw_die_ref context_die)
21817 {
21818 const dw_die_ref scope_die = scope_die_for (type, context_die);
21819 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21820 struct loc_descr_context context = { type, info->base_decl, NULL,
21821 false, false };
21822 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21823 int dim;
21824
21825 add_name_attribute (array_die, type_tag (type));
21826 equate_type_number_to_die (type, array_die);
21827
21828 if (info->ndimensions > 1)
21829 switch (info->ordering)
21830 {
21831 case array_descr_ordering_row_major:
21832 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21833 break;
21834 case array_descr_ordering_column_major:
21835 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21836 break;
21837 default:
21838 break;
21839 }
21840
21841 if (dwarf_version >= 3 || !dwarf_strict)
21842 {
21843 if (info->data_location)
21844 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21845 dw_scalar_form_exprloc, &context);
21846 if (info->associated)
21847 add_scalar_info (array_die, DW_AT_associated, info->associated,
21848 dw_scalar_form_constant
21849 | dw_scalar_form_exprloc
21850 | dw_scalar_form_reference, &context);
21851 if (info->allocated)
21852 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21853 dw_scalar_form_constant
21854 | dw_scalar_form_exprloc
21855 | dw_scalar_form_reference, &context);
21856 if (info->stride)
21857 {
21858 const enum dwarf_attribute attr
21859 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21860 const int forms
21861 = (info->stride_in_bits)
21862 ? dw_scalar_form_constant
21863 : (dw_scalar_form_constant
21864 | dw_scalar_form_exprloc
21865 | dw_scalar_form_reference);
21866
21867 add_scalar_info (array_die, attr, info->stride, forms, &context);
21868 }
21869 }
21870 if (dwarf_version >= 5)
21871 {
21872 if (info->rank)
21873 {
21874 add_scalar_info (array_die, DW_AT_rank, info->rank,
21875 dw_scalar_form_constant
21876 | dw_scalar_form_exprloc, &context);
21877 subrange_tag = DW_TAG_generic_subrange;
21878 context.placeholder_arg = true;
21879 }
21880 }
21881
21882 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21883
21884 for (dim = 0; dim < info->ndimensions; dim++)
21885 {
21886 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21887
21888 if (info->dimen[dim].bounds_type)
21889 add_type_attribute (subrange_die,
21890 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21891 false, context_die);
21892 if (info->dimen[dim].lower_bound)
21893 add_bound_info (subrange_die, DW_AT_lower_bound,
21894 info->dimen[dim].lower_bound, &context);
21895 if (info->dimen[dim].upper_bound)
21896 add_bound_info (subrange_die, DW_AT_upper_bound,
21897 info->dimen[dim].upper_bound, &context);
21898 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21899 add_scalar_info (subrange_die, DW_AT_byte_stride,
21900 info->dimen[dim].stride,
21901 dw_scalar_form_constant
21902 | dw_scalar_form_exprloc
21903 | dw_scalar_form_reference,
21904 &context);
21905 }
21906
21907 gen_type_die (info->element_type, context_die);
21908 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21909 TREE_CODE (type) == ARRAY_TYPE
21910 && TYPE_REVERSE_STORAGE_ORDER (type),
21911 context_die);
21912
21913 if (get_AT (array_die, DW_AT_name))
21914 add_pubtype (type, array_die);
21915
21916 add_alignment_attribute (array_die, type);
21917 }
21918
21919 #if 0
21920 static void
21921 gen_entry_point_die (tree decl, dw_die_ref context_die)
21922 {
21923 tree origin = decl_ultimate_origin (decl);
21924 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21925
21926 if (origin != NULL)
21927 add_abstract_origin_attribute (decl_die, origin);
21928 else
21929 {
21930 add_name_and_src_coords_attributes (decl_die, decl);
21931 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21932 TYPE_UNQUALIFIED, false, context_die);
21933 }
21934
21935 if (DECL_ABSTRACT_P (decl))
21936 equate_decl_number_to_die (decl, decl_die);
21937 else
21938 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21939 }
21940 #endif
21941
21942 /* Walk through the list of incomplete types again, trying once more to
21943 emit full debugging info for them. */
21944
21945 static void
21946 retry_incomplete_types (void)
21947 {
21948 set_early_dwarf s;
21949 int i;
21950
21951 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21952 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21953 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21954 vec_safe_truncate (incomplete_types, 0);
21955 }
21956
21957 /* Determine what tag to use for a record type. */
21958
21959 static enum dwarf_tag
21960 record_type_tag (tree type)
21961 {
21962 if (! lang_hooks.types.classify_record)
21963 return DW_TAG_structure_type;
21964
21965 switch (lang_hooks.types.classify_record (type))
21966 {
21967 case RECORD_IS_STRUCT:
21968 return DW_TAG_structure_type;
21969
21970 case RECORD_IS_CLASS:
21971 return DW_TAG_class_type;
21972
21973 case RECORD_IS_INTERFACE:
21974 if (dwarf_version >= 3 || !dwarf_strict)
21975 return DW_TAG_interface_type;
21976 return DW_TAG_structure_type;
21977
21978 default:
21979 gcc_unreachable ();
21980 }
21981 }
21982
21983 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21984 include all of the information about the enumeration values also. Each
21985 enumerated type name/value is listed as a child of the enumerated type
21986 DIE. */
21987
21988 static dw_die_ref
21989 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21990 {
21991 dw_die_ref type_die = lookup_type_die (type);
21992 dw_die_ref orig_type_die = type_die;
21993
21994 if (type_die == NULL)
21995 {
21996 type_die = new_die (DW_TAG_enumeration_type,
21997 scope_die_for (type, context_die), type);
21998 equate_type_number_to_die (type, type_die);
21999 add_name_attribute (type_die, type_tag (type));
22000 if ((dwarf_version >= 4 || !dwarf_strict)
22001 && ENUM_IS_SCOPED (type))
22002 add_AT_flag (type_die, DW_AT_enum_class, 1);
22003 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22004 add_AT_flag (type_die, DW_AT_declaration, 1);
22005 if (!dwarf_strict)
22006 add_AT_unsigned (type_die, DW_AT_encoding,
22007 TYPE_UNSIGNED (type)
22008 ? DW_ATE_unsigned
22009 : DW_ATE_signed);
22010 }
22011 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22012 return type_die;
22013 else
22014 remove_AT (type_die, DW_AT_declaration);
22015
22016 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22017 given enum type is incomplete, do not generate the DW_AT_byte_size
22018 attribute or the DW_AT_element_list attribute. */
22019 if (TYPE_SIZE (type))
22020 {
22021 tree link;
22022
22023 if (!ENUM_IS_OPAQUE (type))
22024 TREE_ASM_WRITTEN (type) = 1;
22025 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22026 add_byte_size_attribute (type_die, type);
22027 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22028 add_alignment_attribute (type_die, type);
22029 if ((dwarf_version >= 3 || !dwarf_strict)
22030 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22031 {
22032 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22033 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22034 context_die);
22035 }
22036 if (TYPE_STUB_DECL (type) != NULL_TREE)
22037 {
22038 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22039 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22040 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22041 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22042 }
22043
22044 /* If the first reference to this type was as the return type of an
22045 inline function, then it may not have a parent. Fix this now. */
22046 if (type_die->die_parent == NULL)
22047 add_child_die (scope_die_for (type, context_die), type_die);
22048
22049 for (link = TYPE_VALUES (type);
22050 link != NULL; link = TREE_CHAIN (link))
22051 {
22052 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22053 tree value = TREE_VALUE (link);
22054
22055 gcc_assert (!ENUM_IS_OPAQUE (type));
22056 add_name_attribute (enum_die,
22057 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22058
22059 if (TREE_CODE (value) == CONST_DECL)
22060 value = DECL_INITIAL (value);
22061
22062 if (simple_type_size_in_bits (TREE_TYPE (value))
22063 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22064 {
22065 /* For constant forms created by add_AT_unsigned DWARF
22066 consumers (GDB, elfutils, etc.) always zero extend
22067 the value. Only when the actual value is negative
22068 do we need to use add_AT_int to generate a constant
22069 form that can represent negative values. */
22070 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22071 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22072 add_AT_unsigned (enum_die, DW_AT_const_value,
22073 (unsigned HOST_WIDE_INT) val);
22074 else
22075 add_AT_int (enum_die, DW_AT_const_value, val);
22076 }
22077 else
22078 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22079 that here. TODO: This should be re-worked to use correct
22080 signed/unsigned double tags for all cases. */
22081 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22082 }
22083
22084 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22085 if (TYPE_ARTIFICIAL (type)
22086 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22087 add_AT_flag (type_die, DW_AT_artificial, 1);
22088 }
22089 else
22090 add_AT_flag (type_die, DW_AT_declaration, 1);
22091
22092 add_pubtype (type, type_die);
22093
22094 return type_die;
22095 }
22096
22097 /* Generate a DIE to represent either a real live formal parameter decl or to
22098 represent just the type of some formal parameter position in some function
22099 type.
22100
22101 Note that this routine is a bit unusual because its argument may be a
22102 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22103 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22104 node. If it's the former then this function is being called to output a
22105 DIE to represent a formal parameter object (or some inlining thereof). If
22106 it's the latter, then this function is only being called to output a
22107 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22108 argument type of some subprogram type.
22109 If EMIT_NAME_P is true, name and source coordinate attributes
22110 are emitted. */
22111
22112 static dw_die_ref
22113 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22114 dw_die_ref context_die)
22115 {
22116 tree node_or_origin = node ? node : origin;
22117 tree ultimate_origin;
22118 dw_die_ref parm_die = NULL;
22119
22120 if (DECL_P (node_or_origin))
22121 {
22122 parm_die = lookup_decl_die (node);
22123
22124 /* If the contexts differ, we may not be talking about the same
22125 thing.
22126 ??? When in LTO the DIE parent is the "abstract" copy and the
22127 context_die is the specification "copy". But this whole block
22128 should eventually be no longer needed. */
22129 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22130 {
22131 if (!DECL_ABSTRACT_P (node))
22132 {
22133 /* This can happen when creating an inlined instance, in
22134 which case we need to create a new DIE that will get
22135 annotated with DW_AT_abstract_origin. */
22136 parm_die = NULL;
22137 }
22138 else
22139 gcc_unreachable ();
22140 }
22141
22142 if (parm_die && parm_die->die_parent == NULL)
22143 {
22144 /* Check that parm_die already has the right attributes that
22145 we would have added below. If any attributes are
22146 missing, fall through to add them. */
22147 if (! DECL_ABSTRACT_P (node_or_origin)
22148 && !get_AT (parm_die, DW_AT_location)
22149 && !get_AT (parm_die, DW_AT_const_value))
22150 /* We are missing location info, and are about to add it. */
22151 ;
22152 else
22153 {
22154 add_child_die (context_die, parm_die);
22155 return parm_die;
22156 }
22157 }
22158 }
22159
22160 /* If we have a previously generated DIE, use it, unless this is an
22161 concrete instance (origin != NULL), in which case we need a new
22162 DIE with a corresponding DW_AT_abstract_origin. */
22163 bool reusing_die;
22164 if (parm_die && origin == NULL)
22165 reusing_die = true;
22166 else
22167 {
22168 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22169 reusing_die = false;
22170 }
22171
22172 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22173 {
22174 case tcc_declaration:
22175 ultimate_origin = decl_ultimate_origin (node_or_origin);
22176 if (node || ultimate_origin)
22177 origin = ultimate_origin;
22178
22179 if (reusing_die)
22180 goto add_location;
22181
22182 if (origin != NULL)
22183 add_abstract_origin_attribute (parm_die, origin);
22184 else if (emit_name_p)
22185 add_name_and_src_coords_attributes (parm_die, node);
22186 if (origin == NULL
22187 || (! DECL_ABSTRACT_P (node_or_origin)
22188 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22189 decl_function_context
22190 (node_or_origin))))
22191 {
22192 tree type = TREE_TYPE (node_or_origin);
22193 if (decl_by_reference_p (node_or_origin))
22194 add_type_attribute (parm_die, TREE_TYPE (type),
22195 TYPE_UNQUALIFIED,
22196 false, context_die);
22197 else
22198 add_type_attribute (parm_die, type,
22199 decl_quals (node_or_origin),
22200 false, context_die);
22201 }
22202 if (origin == NULL && DECL_ARTIFICIAL (node))
22203 add_AT_flag (parm_die, DW_AT_artificial, 1);
22204 add_location:
22205 if (node && node != origin)
22206 equate_decl_number_to_die (node, parm_die);
22207 if (! DECL_ABSTRACT_P (node_or_origin))
22208 add_location_or_const_value_attribute (parm_die, node_or_origin,
22209 node == NULL);
22210
22211 break;
22212
22213 case tcc_type:
22214 /* We were called with some kind of a ..._TYPE node. */
22215 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22216 context_die);
22217 break;
22218
22219 default:
22220 gcc_unreachable ();
22221 }
22222
22223 return parm_die;
22224 }
22225
22226 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22227 children DW_TAG_formal_parameter DIEs representing the arguments of the
22228 parameter pack.
22229
22230 PARM_PACK must be a function parameter pack.
22231 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22232 must point to the subsequent arguments of the function PACK_ARG belongs to.
22233 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22234 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22235 following the last one for which a DIE was generated. */
22236
22237 static dw_die_ref
22238 gen_formal_parameter_pack_die (tree parm_pack,
22239 tree pack_arg,
22240 dw_die_ref subr_die,
22241 tree *next_arg)
22242 {
22243 tree arg;
22244 dw_die_ref parm_pack_die;
22245
22246 gcc_assert (parm_pack
22247 && lang_hooks.function_parameter_pack_p (parm_pack)
22248 && subr_die);
22249
22250 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22251 add_src_coords_attributes (parm_pack_die, parm_pack);
22252
22253 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22254 {
22255 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22256 parm_pack))
22257 break;
22258 gen_formal_parameter_die (arg, NULL,
22259 false /* Don't emit name attribute. */,
22260 parm_pack_die);
22261 }
22262 if (next_arg)
22263 *next_arg = arg;
22264 return parm_pack_die;
22265 }
22266
22267 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22268 at the end of an (ANSI prototyped) formal parameters list. */
22269
22270 static void
22271 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22272 {
22273 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22274 }
22275
22276 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22277 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22278 parameters as specified in some function type specification (except for
22279 those which appear as part of a function *definition*). */
22280
22281 static void
22282 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22283 {
22284 tree link;
22285 tree formal_type = NULL;
22286 tree first_parm_type;
22287 tree arg;
22288
22289 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22290 {
22291 arg = DECL_ARGUMENTS (function_or_method_type);
22292 function_or_method_type = TREE_TYPE (function_or_method_type);
22293 }
22294 else
22295 arg = NULL_TREE;
22296
22297 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22298
22299 /* Make our first pass over the list of formal parameter types and output a
22300 DW_TAG_formal_parameter DIE for each one. */
22301 for (link = first_parm_type; link; )
22302 {
22303 dw_die_ref parm_die;
22304
22305 formal_type = TREE_VALUE (link);
22306 if (formal_type == void_type_node)
22307 break;
22308
22309 /* Output a (nameless) DIE to represent the formal parameter itself. */
22310 parm_die = gen_formal_parameter_die (formal_type, NULL,
22311 true /* Emit name attribute. */,
22312 context_die);
22313 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22314 && link == first_parm_type)
22315 {
22316 add_AT_flag (parm_die, DW_AT_artificial, 1);
22317 if (dwarf_version >= 3 || !dwarf_strict)
22318 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22319 }
22320 else if (arg && DECL_ARTIFICIAL (arg))
22321 add_AT_flag (parm_die, DW_AT_artificial, 1);
22322
22323 link = TREE_CHAIN (link);
22324 if (arg)
22325 arg = DECL_CHAIN (arg);
22326 }
22327
22328 /* If this function type has an ellipsis, add a
22329 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22330 if (formal_type != void_type_node)
22331 gen_unspecified_parameters_die (function_or_method_type, context_die);
22332
22333 /* Make our second (and final) pass over the list of formal parameter types
22334 and output DIEs to represent those types (as necessary). */
22335 for (link = TYPE_ARG_TYPES (function_or_method_type);
22336 link && TREE_VALUE (link);
22337 link = TREE_CHAIN (link))
22338 gen_type_die (TREE_VALUE (link), context_die);
22339 }
22340
22341 /* We want to generate the DIE for TYPE so that we can generate the
22342 die for MEMBER, which has been defined; we will need to refer back
22343 to the member declaration nested within TYPE. If we're trying to
22344 generate minimal debug info for TYPE, processing TYPE won't do the
22345 trick; we need to attach the member declaration by hand. */
22346
22347 static void
22348 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22349 {
22350 gen_type_die (type, context_die);
22351
22352 /* If we're trying to avoid duplicate debug info, we may not have
22353 emitted the member decl for this function. Emit it now. */
22354 if (TYPE_STUB_DECL (type)
22355 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22356 && ! lookup_decl_die (member))
22357 {
22358 dw_die_ref type_die;
22359 gcc_assert (!decl_ultimate_origin (member));
22360
22361 type_die = lookup_type_die_strip_naming_typedef (type);
22362 if (TREE_CODE (member) == FUNCTION_DECL)
22363 gen_subprogram_die (member, type_die);
22364 else if (TREE_CODE (member) == FIELD_DECL)
22365 {
22366 /* Ignore the nameless fields that are used to skip bits but handle
22367 C++ anonymous unions and structs. */
22368 if (DECL_NAME (member) != NULL_TREE
22369 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22370 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22371 {
22372 struct vlr_context vlr_ctx = {
22373 DECL_CONTEXT (member), /* struct_type */
22374 NULL_TREE /* variant_part_offset */
22375 };
22376 gen_type_die (member_declared_type (member), type_die);
22377 gen_field_die (member, &vlr_ctx, type_die);
22378 }
22379 }
22380 else
22381 gen_variable_die (member, NULL_TREE, type_die);
22382 }
22383 }
22384 \f
22385 /* Forward declare these functions, because they are mutually recursive
22386 with their set_block_* pairing functions. */
22387 static void set_decl_origin_self (tree);
22388
22389 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22390 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22391 that it points to the node itself, thus indicating that the node is its
22392 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22393 the given node is NULL, recursively descend the decl/block tree which
22394 it is the root of, and for each other ..._DECL or BLOCK node contained
22395 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22396 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22397 values to point to themselves. */
22398
22399 static void
22400 set_block_origin_self (tree stmt)
22401 {
22402 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22403 {
22404 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22405
22406 {
22407 tree local_decl;
22408
22409 for (local_decl = BLOCK_VARS (stmt);
22410 local_decl != NULL_TREE;
22411 local_decl = DECL_CHAIN (local_decl))
22412 /* Do not recurse on nested functions since the inlining status
22413 of parent and child can be different as per the DWARF spec. */
22414 if (TREE_CODE (local_decl) != FUNCTION_DECL
22415 && !DECL_EXTERNAL (local_decl))
22416 set_decl_origin_self (local_decl);
22417 }
22418
22419 {
22420 tree subblock;
22421
22422 for (subblock = BLOCK_SUBBLOCKS (stmt);
22423 subblock != NULL_TREE;
22424 subblock = BLOCK_CHAIN (subblock))
22425 set_block_origin_self (subblock); /* Recurse. */
22426 }
22427 }
22428 }
22429
22430 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22431 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22432 node to so that it points to the node itself, thus indicating that the
22433 node represents its own (abstract) origin. Additionally, if the
22434 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22435 the decl/block tree of which the given node is the root of, and for
22436 each other ..._DECL or BLOCK node contained therein whose
22437 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22438 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22439 point to themselves. */
22440
22441 static void
22442 set_decl_origin_self (tree decl)
22443 {
22444 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22445 {
22446 DECL_ABSTRACT_ORIGIN (decl) = decl;
22447 if (TREE_CODE (decl) == FUNCTION_DECL)
22448 {
22449 tree arg;
22450
22451 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22452 DECL_ABSTRACT_ORIGIN (arg) = arg;
22453 if (DECL_INITIAL (decl) != NULL_TREE
22454 && DECL_INITIAL (decl) != error_mark_node)
22455 set_block_origin_self (DECL_INITIAL (decl));
22456 }
22457 }
22458 }
22459 \f
22460 /* Mark the early DIE for DECL as the abstract instance. */
22461
22462 static void
22463 dwarf2out_abstract_function (tree decl)
22464 {
22465 dw_die_ref old_die;
22466
22467 /* Make sure we have the actual abstract inline, not a clone. */
22468 decl = DECL_ORIGIN (decl);
22469
22470 if (DECL_IGNORED_P (decl))
22471 return;
22472
22473 old_die = lookup_decl_die (decl);
22474 /* With early debug we always have an old DIE unless we are in LTO
22475 and the user did not compile but only link with debug. */
22476 if (in_lto_p && ! old_die)
22477 return;
22478 gcc_assert (old_die != NULL);
22479 if (get_AT (old_die, DW_AT_inline)
22480 || get_AT (old_die, DW_AT_abstract_origin))
22481 /* We've already generated the abstract instance. */
22482 return;
22483
22484 /* Go ahead and put DW_AT_inline on the DIE. */
22485 if (DECL_DECLARED_INLINE_P (decl))
22486 {
22487 if (cgraph_function_possibly_inlined_p (decl))
22488 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22489 else
22490 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22491 }
22492 else
22493 {
22494 if (cgraph_function_possibly_inlined_p (decl))
22495 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22496 else
22497 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22498 }
22499
22500 if (DECL_DECLARED_INLINE_P (decl)
22501 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22502 add_AT_flag (old_die, DW_AT_artificial, 1);
22503
22504 set_decl_origin_self (decl);
22505 }
22506
22507 /* Helper function of premark_used_types() which gets called through
22508 htab_traverse.
22509
22510 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22511 marked as unused by prune_unused_types. */
22512
22513 bool
22514 premark_used_types_helper (tree const &type, void *)
22515 {
22516 dw_die_ref die;
22517
22518 die = lookup_type_die (type);
22519 if (die != NULL)
22520 die->die_perennial_p = 1;
22521 return true;
22522 }
22523
22524 /* Helper function of premark_types_used_by_global_vars which gets called
22525 through htab_traverse.
22526
22527 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22528 marked as unused by prune_unused_types. The DIE of the type is marked
22529 only if the global variable using the type will actually be emitted. */
22530
22531 int
22532 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22533 void *)
22534 {
22535 struct types_used_by_vars_entry *entry;
22536 dw_die_ref die;
22537
22538 entry = (struct types_used_by_vars_entry *) *slot;
22539 gcc_assert (entry->type != NULL
22540 && entry->var_decl != NULL);
22541 die = lookup_type_die (entry->type);
22542 if (die)
22543 {
22544 /* Ask cgraph if the global variable really is to be emitted.
22545 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22546 varpool_node *node = varpool_node::get (entry->var_decl);
22547 if (node && node->definition)
22548 {
22549 die->die_perennial_p = 1;
22550 /* Keep the parent DIEs as well. */
22551 while ((die = die->die_parent) && die->die_perennial_p == 0)
22552 die->die_perennial_p = 1;
22553 }
22554 }
22555 return 1;
22556 }
22557
22558 /* Mark all members of used_types_hash as perennial. */
22559
22560 static void
22561 premark_used_types (struct function *fun)
22562 {
22563 if (fun && fun->used_types_hash)
22564 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22565 }
22566
22567 /* Mark all members of types_used_by_vars_entry as perennial. */
22568
22569 static void
22570 premark_types_used_by_global_vars (void)
22571 {
22572 if (types_used_by_vars_hash)
22573 types_used_by_vars_hash
22574 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22575 }
22576
22577 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22578 for CA_LOC call arg loc node. */
22579
22580 static dw_die_ref
22581 gen_call_site_die (tree decl, dw_die_ref subr_die,
22582 struct call_arg_loc_node *ca_loc)
22583 {
22584 dw_die_ref stmt_die = NULL, die;
22585 tree block = ca_loc->block;
22586
22587 while (block
22588 && block != DECL_INITIAL (decl)
22589 && TREE_CODE (block) == BLOCK)
22590 {
22591 stmt_die = BLOCK_DIE (block);
22592 if (stmt_die)
22593 break;
22594 block = BLOCK_SUPERCONTEXT (block);
22595 }
22596 if (stmt_die == NULL)
22597 stmt_die = subr_die;
22598 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22599 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22600 if (ca_loc->tail_call_p)
22601 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22602 if (ca_loc->symbol_ref)
22603 {
22604 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22605 if (tdie)
22606 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22607 else
22608 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22609 false);
22610 }
22611 return die;
22612 }
22613
22614 /* Generate a DIE to represent a declared function (either file-scope or
22615 block-local). */
22616
22617 static void
22618 gen_subprogram_die (tree decl, dw_die_ref context_die)
22619 {
22620 tree origin = decl_ultimate_origin (decl);
22621 dw_die_ref subr_die;
22622 dw_die_ref old_die = lookup_decl_die (decl);
22623
22624 /* This function gets called multiple times for different stages of
22625 the debug process. For example, for func() in this code:
22626
22627 namespace S
22628 {
22629 void func() { ... }
22630 }
22631
22632 ...we get called 4 times. Twice in early debug and twice in
22633 late debug:
22634
22635 Early debug
22636 -----------
22637
22638 1. Once while generating func() within the namespace. This is
22639 the declaration. The declaration bit below is set, as the
22640 context is the namespace.
22641
22642 A new DIE will be generated with DW_AT_declaration set.
22643
22644 2. Once for func() itself. This is the specification. The
22645 declaration bit below is clear as the context is the CU.
22646
22647 We will use the cached DIE from (1) to create a new DIE with
22648 DW_AT_specification pointing to the declaration in (1).
22649
22650 Late debug via rest_of_handle_final()
22651 -------------------------------------
22652
22653 3. Once generating func() within the namespace. This is also the
22654 declaration, as in (1), but this time we will early exit below
22655 as we have a cached DIE and a declaration needs no additional
22656 annotations (no locations), as the source declaration line
22657 info is enough.
22658
22659 4. Once for func() itself. As in (2), this is the specification,
22660 but this time we will re-use the cached DIE, and just annotate
22661 it with the location information that should now be available.
22662
22663 For something without namespaces, but with abstract instances, we
22664 are also called a multiple times:
22665
22666 class Base
22667 {
22668 public:
22669 Base (); // constructor declaration (1)
22670 };
22671
22672 Base::Base () { } // constructor specification (2)
22673
22674 Early debug
22675 -----------
22676
22677 1. Once for the Base() constructor by virtue of it being a
22678 member of the Base class. This is done via
22679 rest_of_type_compilation.
22680
22681 This is a declaration, so a new DIE will be created with
22682 DW_AT_declaration.
22683
22684 2. Once for the Base() constructor definition, but this time
22685 while generating the abstract instance of the base
22686 constructor (__base_ctor) which is being generated via early
22687 debug of reachable functions.
22688
22689 Even though we have a cached version of the declaration (1),
22690 we will create a DW_AT_specification of the declaration DIE
22691 in (1).
22692
22693 3. Once for the __base_ctor itself, but this time, we generate
22694 an DW_AT_abstract_origin version of the DW_AT_specification in
22695 (2).
22696
22697 Late debug via rest_of_handle_final
22698 -----------------------------------
22699
22700 4. One final time for the __base_ctor (which will have a cached
22701 DIE with DW_AT_abstract_origin created in (3). This time,
22702 we will just annotate the location information now
22703 available.
22704 */
22705 int declaration = (current_function_decl != decl
22706 || class_or_namespace_scope_p (context_die));
22707
22708 /* A declaration that has been previously dumped needs no
22709 additional information. */
22710 if (old_die && declaration)
22711 return;
22712
22713 /* Now that the C++ front end lazily declares artificial member fns, we
22714 might need to retrofit the declaration into its class. */
22715 if (!declaration && !origin && !old_die
22716 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22717 && !class_or_namespace_scope_p (context_die)
22718 && debug_info_level > DINFO_LEVEL_TERSE)
22719 old_die = force_decl_die (decl);
22720
22721 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22722 if (origin != NULL)
22723 {
22724 gcc_assert (!declaration || local_scope_p (context_die));
22725
22726 /* Fixup die_parent for the abstract instance of a nested
22727 inline function. */
22728 if (old_die && old_die->die_parent == NULL)
22729 add_child_die (context_die, old_die);
22730
22731 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22732 {
22733 /* If we have a DW_AT_abstract_origin we have a working
22734 cached version. */
22735 subr_die = old_die;
22736 }
22737 else
22738 {
22739 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22740 add_abstract_origin_attribute (subr_die, origin);
22741 /* This is where the actual code for a cloned function is.
22742 Let's emit linkage name attribute for it. This helps
22743 debuggers to e.g, set breakpoints into
22744 constructors/destructors when the user asks "break
22745 K::K". */
22746 add_linkage_name (subr_die, decl);
22747 }
22748 }
22749 /* A cached copy, possibly from early dwarf generation. Reuse as
22750 much as possible. */
22751 else if (old_die)
22752 {
22753 if (!get_AT_flag (old_die, DW_AT_declaration)
22754 /* We can have a normal definition following an inline one in the
22755 case of redefinition of GNU C extern inlines.
22756 It seems reasonable to use AT_specification in this case. */
22757 && !get_AT (old_die, DW_AT_inline))
22758 {
22759 /* Detect and ignore this case, where we are trying to output
22760 something we have already output. */
22761 if (get_AT (old_die, DW_AT_low_pc)
22762 || get_AT (old_die, DW_AT_ranges))
22763 return;
22764
22765 /* If we have no location information, this must be a
22766 partially generated DIE from early dwarf generation.
22767 Fall through and generate it. */
22768 }
22769
22770 /* If the definition comes from the same place as the declaration,
22771 maybe use the old DIE. We always want the DIE for this function
22772 that has the *_pc attributes to be under comp_unit_die so the
22773 debugger can find it. We also need to do this for abstract
22774 instances of inlines, since the spec requires the out-of-line copy
22775 to have the same parent. For local class methods, this doesn't
22776 apply; we just use the old DIE. */
22777 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22778 struct dwarf_file_data * file_index = lookup_filename (s.file);
22779 if (((is_unit_die (old_die->die_parent)
22780 /* This condition fixes the inconsistency/ICE with the
22781 following Fortran test (or some derivative thereof) while
22782 building libgfortran:
22783
22784 module some_m
22785 contains
22786 logical function funky (FLAG)
22787 funky = .true.
22788 end function
22789 end module
22790 */
22791 || (old_die->die_parent
22792 && old_die->die_parent->die_tag == DW_TAG_module)
22793 || local_scope_p (old_die->die_parent)
22794 || context_die == NULL)
22795 && (DECL_ARTIFICIAL (decl)
22796 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22797 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22798 == (unsigned) s.line)
22799 && (!debug_column_info
22800 || s.column == 0
22801 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22802 == (unsigned) s.column)))))
22803 /* With LTO if there's an abstract instance for
22804 the old DIE, this is a concrete instance and
22805 thus re-use the DIE. */
22806 || get_AT (old_die, DW_AT_abstract_origin))
22807 {
22808 subr_die = old_die;
22809
22810 /* Clear out the declaration attribute, but leave the
22811 parameters so they can be augmented with location
22812 information later. Unless this was a declaration, in
22813 which case, wipe out the nameless parameters and recreate
22814 them further down. */
22815 if (remove_AT (subr_die, DW_AT_declaration))
22816 {
22817
22818 remove_AT (subr_die, DW_AT_object_pointer);
22819 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22820 }
22821 }
22822 /* Make a specification pointing to the previously built
22823 declaration. */
22824 else
22825 {
22826 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22827 add_AT_specification (subr_die, old_die);
22828 add_pubname (decl, subr_die);
22829 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22830 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22831 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22832 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22833 if (debug_column_info
22834 && s.column
22835 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22836 != (unsigned) s.column))
22837 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22838
22839 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22840 emit the real type on the definition die. */
22841 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22842 {
22843 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22844 if (die == auto_die || die == decltype_auto_die)
22845 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22846 TYPE_UNQUALIFIED, false, context_die);
22847 }
22848
22849 /* When we process the method declaration, we haven't seen
22850 the out-of-class defaulted definition yet, so we have to
22851 recheck now. */
22852 if ((dwarf_version >= 5 || ! dwarf_strict)
22853 && !get_AT (subr_die, DW_AT_defaulted))
22854 {
22855 int defaulted
22856 = lang_hooks.decls.decl_dwarf_attribute (decl,
22857 DW_AT_defaulted);
22858 if (defaulted != -1)
22859 {
22860 /* Other values must have been handled before. */
22861 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22862 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22863 }
22864 }
22865 }
22866 }
22867 /* Create a fresh DIE for anything else. */
22868 else
22869 {
22870 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22871
22872 if (TREE_PUBLIC (decl))
22873 add_AT_flag (subr_die, DW_AT_external, 1);
22874
22875 add_name_and_src_coords_attributes (subr_die, decl);
22876 add_pubname (decl, subr_die);
22877 if (debug_info_level > DINFO_LEVEL_TERSE)
22878 {
22879 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22880 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22881 TYPE_UNQUALIFIED, false, context_die);
22882 }
22883
22884 add_pure_or_virtual_attribute (subr_die, decl);
22885 if (DECL_ARTIFICIAL (decl))
22886 add_AT_flag (subr_die, DW_AT_artificial, 1);
22887
22888 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22889 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22890
22891 add_alignment_attribute (subr_die, decl);
22892
22893 add_accessibility_attribute (subr_die, decl);
22894 }
22895
22896 /* Unless we have an existing non-declaration DIE, equate the new
22897 DIE. */
22898 if (!old_die || is_declaration_die (old_die))
22899 equate_decl_number_to_die (decl, subr_die);
22900
22901 if (declaration)
22902 {
22903 if (!old_die || !get_AT (old_die, DW_AT_inline))
22904 {
22905 add_AT_flag (subr_die, DW_AT_declaration, 1);
22906
22907 /* If this is an explicit function declaration then generate
22908 a DW_AT_explicit attribute. */
22909 if ((dwarf_version >= 3 || !dwarf_strict)
22910 && lang_hooks.decls.decl_dwarf_attribute (decl,
22911 DW_AT_explicit) == 1)
22912 add_AT_flag (subr_die, DW_AT_explicit, 1);
22913
22914 /* If this is a C++11 deleted special function member then generate
22915 a DW_AT_deleted attribute. */
22916 if ((dwarf_version >= 5 || !dwarf_strict)
22917 && lang_hooks.decls.decl_dwarf_attribute (decl,
22918 DW_AT_deleted) == 1)
22919 add_AT_flag (subr_die, DW_AT_deleted, 1);
22920
22921 /* If this is a C++11 defaulted special function member then
22922 generate a DW_AT_defaulted attribute. */
22923 if (dwarf_version >= 5 || !dwarf_strict)
22924 {
22925 int defaulted
22926 = lang_hooks.decls.decl_dwarf_attribute (decl,
22927 DW_AT_defaulted);
22928 if (defaulted != -1)
22929 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22930 }
22931
22932 /* If this is a C++11 non-static member function with & ref-qualifier
22933 then generate a DW_AT_reference attribute. */
22934 if ((dwarf_version >= 5 || !dwarf_strict)
22935 && lang_hooks.decls.decl_dwarf_attribute (decl,
22936 DW_AT_reference) == 1)
22937 add_AT_flag (subr_die, DW_AT_reference, 1);
22938
22939 /* If this is a C++11 non-static member function with &&
22940 ref-qualifier then generate a DW_AT_reference attribute. */
22941 if ((dwarf_version >= 5 || !dwarf_strict)
22942 && lang_hooks.decls.decl_dwarf_attribute (decl,
22943 DW_AT_rvalue_reference)
22944 == 1)
22945 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22946 }
22947 }
22948 /* For non DECL_EXTERNALs, if range information is available, fill
22949 the DIE with it. */
22950 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22951 {
22952 HOST_WIDE_INT cfa_fb_offset;
22953
22954 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22955
22956 if (!crtl->has_bb_partition)
22957 {
22958 dw_fde_ref fde = fun->fde;
22959 if (fde->dw_fde_begin)
22960 {
22961 /* We have already generated the labels. */
22962 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22963 fde->dw_fde_end, false);
22964 }
22965 else
22966 {
22967 /* Create start/end labels and add the range. */
22968 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22969 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22970 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22971 current_function_funcdef_no);
22972 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22973 current_function_funcdef_no);
22974 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22975 false);
22976 }
22977
22978 #if VMS_DEBUGGING_INFO
22979 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22980 Section 2.3 Prologue and Epilogue Attributes:
22981 When a breakpoint is set on entry to a function, it is generally
22982 desirable for execution to be suspended, not on the very first
22983 instruction of the function, but rather at a point after the
22984 function's frame has been set up, after any language defined local
22985 declaration processing has been completed, and before execution of
22986 the first statement of the function begins. Debuggers generally
22987 cannot properly determine where this point is. Similarly for a
22988 breakpoint set on exit from a function. The prologue and epilogue
22989 attributes allow a compiler to communicate the location(s) to use. */
22990
22991 {
22992 if (fde->dw_fde_vms_end_prologue)
22993 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22994 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22995
22996 if (fde->dw_fde_vms_begin_epilogue)
22997 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22998 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22999 }
23000 #endif
23001
23002 }
23003 else
23004 {
23005 /* Generate pubnames entries for the split function code ranges. */
23006 dw_fde_ref fde = fun->fde;
23007
23008 if (fde->dw_fde_second_begin)
23009 {
23010 if (dwarf_version >= 3 || !dwarf_strict)
23011 {
23012 /* We should use ranges for non-contiguous code section
23013 addresses. Use the actual code range for the initial
23014 section, since the HOT/COLD labels might precede an
23015 alignment offset. */
23016 bool range_list_added = false;
23017 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23018 fde->dw_fde_end, &range_list_added,
23019 false);
23020 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23021 fde->dw_fde_second_end,
23022 &range_list_added, false);
23023 if (range_list_added)
23024 add_ranges (NULL);
23025 }
23026 else
23027 {
23028 /* There is no real support in DW2 for this .. so we make
23029 a work-around. First, emit the pub name for the segment
23030 containing the function label. Then make and emit a
23031 simplified subprogram DIE for the second segment with the
23032 name pre-fixed by __hot/cold_sect_of_. We use the same
23033 linkage name for the second die so that gdb will find both
23034 sections when given "b foo". */
23035 const char *name = NULL;
23036 tree decl_name = DECL_NAME (decl);
23037 dw_die_ref seg_die;
23038
23039 /* Do the 'primary' section. */
23040 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23041 fde->dw_fde_end, false);
23042
23043 /* Build a minimal DIE for the secondary section. */
23044 seg_die = new_die (DW_TAG_subprogram,
23045 subr_die->die_parent, decl);
23046
23047 if (TREE_PUBLIC (decl))
23048 add_AT_flag (seg_die, DW_AT_external, 1);
23049
23050 if (decl_name != NULL
23051 && IDENTIFIER_POINTER (decl_name) != NULL)
23052 {
23053 name = dwarf2_name (decl, 1);
23054 if (! DECL_ARTIFICIAL (decl))
23055 add_src_coords_attributes (seg_die, decl);
23056
23057 add_linkage_name (seg_die, decl);
23058 }
23059 gcc_assert (name != NULL);
23060 add_pure_or_virtual_attribute (seg_die, decl);
23061 if (DECL_ARTIFICIAL (decl))
23062 add_AT_flag (seg_die, DW_AT_artificial, 1);
23063
23064 name = concat ("__second_sect_of_", name, NULL);
23065 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23066 fde->dw_fde_second_end, false);
23067 add_name_attribute (seg_die, name);
23068 if (want_pubnames ())
23069 add_pubname_string (name, seg_die);
23070 }
23071 }
23072 else
23073 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23074 false);
23075 }
23076
23077 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23078
23079 /* We define the "frame base" as the function's CFA. This is more
23080 convenient for several reasons: (1) It's stable across the prologue
23081 and epilogue, which makes it better than just a frame pointer,
23082 (2) With dwarf3, there exists a one-byte encoding that allows us
23083 to reference the .debug_frame data by proxy, but failing that,
23084 (3) We can at least reuse the code inspection and interpretation
23085 code that determines the CFA position at various points in the
23086 function. */
23087 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23088 {
23089 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23090 add_AT_loc (subr_die, DW_AT_frame_base, op);
23091 }
23092 else
23093 {
23094 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23095 if (list->dw_loc_next)
23096 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23097 else
23098 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23099 }
23100
23101 /* Compute a displacement from the "steady-state frame pointer" to
23102 the CFA. The former is what all stack slots and argument slots
23103 will reference in the rtl; the latter is what we've told the
23104 debugger about. We'll need to adjust all frame_base references
23105 by this displacement. */
23106 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23107
23108 if (fun->static_chain_decl)
23109 {
23110 /* DWARF requires here a location expression that computes the
23111 address of the enclosing subprogram's frame base. The machinery
23112 in tree-nested.c is supposed to store this specific address in the
23113 last field of the FRAME record. */
23114 const tree frame_type
23115 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23116 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23117
23118 tree fb_expr
23119 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23120 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23121 fb_expr, fb_decl, NULL_TREE);
23122
23123 add_AT_location_description (subr_die, DW_AT_static_link,
23124 loc_list_from_tree (fb_expr, 0, NULL));
23125 }
23126
23127 resolve_variable_values ();
23128 }
23129
23130 /* Generate child dies for template paramaters. */
23131 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23132 gen_generic_params_dies (decl);
23133
23134 /* Now output descriptions of the arguments for this function. This gets
23135 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23136 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23137 `...' at the end of the formal parameter list. In order to find out if
23138 there was a trailing ellipsis or not, we must instead look at the type
23139 associated with the FUNCTION_DECL. This will be a node of type
23140 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23141 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23142 an ellipsis at the end. */
23143
23144 /* In the case where we are describing a mere function declaration, all we
23145 need to do here (and all we *can* do here) is to describe the *types* of
23146 its formal parameters. */
23147 if (debug_info_level <= DINFO_LEVEL_TERSE)
23148 ;
23149 else if (declaration)
23150 gen_formal_types_die (decl, subr_die);
23151 else
23152 {
23153 /* Generate DIEs to represent all known formal parameters. */
23154 tree parm = DECL_ARGUMENTS (decl);
23155 tree generic_decl = early_dwarf
23156 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23157 tree generic_decl_parm = generic_decl
23158 ? DECL_ARGUMENTS (generic_decl)
23159 : NULL;
23160
23161 /* Now we want to walk the list of parameters of the function and
23162 emit their relevant DIEs.
23163
23164 We consider the case of DECL being an instance of a generic function
23165 as well as it being a normal function.
23166
23167 If DECL is an instance of a generic function we walk the
23168 parameters of the generic function declaration _and_ the parameters of
23169 DECL itself. This is useful because we want to emit specific DIEs for
23170 function parameter packs and those are declared as part of the
23171 generic function declaration. In that particular case,
23172 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23173 That DIE has children DIEs representing the set of arguments
23174 of the pack. Note that the set of pack arguments can be empty.
23175 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23176 children DIE.
23177
23178 Otherwise, we just consider the parameters of DECL. */
23179 while (generic_decl_parm || parm)
23180 {
23181 if (generic_decl_parm
23182 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23183 gen_formal_parameter_pack_die (generic_decl_parm,
23184 parm, subr_die,
23185 &parm);
23186 else if (parm)
23187 {
23188 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23189
23190 if (early_dwarf
23191 && parm == DECL_ARGUMENTS (decl)
23192 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23193 && parm_die
23194 && (dwarf_version >= 3 || !dwarf_strict))
23195 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23196
23197 parm = DECL_CHAIN (parm);
23198 }
23199 else if (parm)
23200 parm = DECL_CHAIN (parm);
23201
23202 if (generic_decl_parm)
23203 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23204 }
23205
23206 /* Decide whether we need an unspecified_parameters DIE at the end.
23207 There are 2 more cases to do this for: 1) the ansi ... declaration -
23208 this is detectable when the end of the arg list is not a
23209 void_type_node 2) an unprototyped function declaration (not a
23210 definition). This just means that we have no info about the
23211 parameters at all. */
23212 if (early_dwarf)
23213 {
23214 if (prototype_p (TREE_TYPE (decl)))
23215 {
23216 /* This is the prototyped case, check for.... */
23217 if (stdarg_p (TREE_TYPE (decl)))
23218 gen_unspecified_parameters_die (decl, subr_die);
23219 }
23220 else if (DECL_INITIAL (decl) == NULL_TREE)
23221 gen_unspecified_parameters_die (decl, subr_die);
23222 }
23223 }
23224
23225 if (subr_die != old_die)
23226 /* Add the calling convention attribute if requested. */
23227 add_calling_convention_attribute (subr_die, decl);
23228
23229 /* Output Dwarf info for all of the stuff within the body of the function
23230 (if it has one - it may be just a declaration).
23231
23232 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23233 a function. This BLOCK actually represents the outermost binding contour
23234 for the function, i.e. the contour in which the function's formal
23235 parameters and labels get declared. Curiously, it appears that the front
23236 end doesn't actually put the PARM_DECL nodes for the current function onto
23237 the BLOCK_VARS list for this outer scope, but are strung off of the
23238 DECL_ARGUMENTS list for the function instead.
23239
23240 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23241 the LABEL_DECL nodes for the function however, and we output DWARF info
23242 for those in decls_for_scope. Just within the `outer_scope' there will be
23243 a BLOCK node representing the function's outermost pair of curly braces,
23244 and any blocks used for the base and member initializers of a C++
23245 constructor function. */
23246 tree outer_scope = DECL_INITIAL (decl);
23247 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23248 {
23249 int call_site_note_count = 0;
23250 int tail_call_site_note_count = 0;
23251
23252 /* Emit a DW_TAG_variable DIE for a named return value. */
23253 if (DECL_NAME (DECL_RESULT (decl)))
23254 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23255
23256 /* The first time through decls_for_scope we will generate the
23257 DIEs for the locals. The second time, we fill in the
23258 location info. */
23259 decls_for_scope (outer_scope, subr_die);
23260
23261 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23262 {
23263 struct call_arg_loc_node *ca_loc;
23264 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23265 {
23266 dw_die_ref die = NULL;
23267 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23268 rtx arg, next_arg;
23269
23270 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23271 ? XEXP (ca_loc->call_arg_loc_note, 0)
23272 : NULL_RTX);
23273 arg; arg = next_arg)
23274 {
23275 dw_loc_descr_ref reg, val;
23276 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23277 dw_die_ref cdie, tdie = NULL;
23278
23279 next_arg = XEXP (arg, 1);
23280 if (REG_P (XEXP (XEXP (arg, 0), 0))
23281 && next_arg
23282 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23283 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23284 && REGNO (XEXP (XEXP (arg, 0), 0))
23285 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23286 next_arg = XEXP (next_arg, 1);
23287 if (mode == VOIDmode)
23288 {
23289 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23290 if (mode == VOIDmode)
23291 mode = GET_MODE (XEXP (arg, 0));
23292 }
23293 if (mode == VOIDmode || mode == BLKmode)
23294 continue;
23295 /* Get dynamic information about call target only if we
23296 have no static information: we cannot generate both
23297 DW_AT_call_origin and DW_AT_call_target
23298 attributes. */
23299 if (ca_loc->symbol_ref == NULL_RTX)
23300 {
23301 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23302 {
23303 tloc = XEXP (XEXP (arg, 0), 1);
23304 continue;
23305 }
23306 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23307 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23308 {
23309 tlocc = XEXP (XEXP (arg, 0), 1);
23310 continue;
23311 }
23312 }
23313 reg = NULL;
23314 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23315 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23316 VAR_INIT_STATUS_INITIALIZED);
23317 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23318 {
23319 rtx mem = XEXP (XEXP (arg, 0), 0);
23320 reg = mem_loc_descriptor (XEXP (mem, 0),
23321 get_address_mode (mem),
23322 GET_MODE (mem),
23323 VAR_INIT_STATUS_INITIALIZED);
23324 }
23325 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23326 == DEBUG_PARAMETER_REF)
23327 {
23328 tree tdecl
23329 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23330 tdie = lookup_decl_die (tdecl);
23331 if (tdie == NULL)
23332 continue;
23333 }
23334 else
23335 continue;
23336 if (reg == NULL
23337 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23338 != DEBUG_PARAMETER_REF)
23339 continue;
23340 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23341 VOIDmode,
23342 VAR_INIT_STATUS_INITIALIZED);
23343 if (val == NULL)
23344 continue;
23345 if (die == NULL)
23346 die = gen_call_site_die (decl, subr_die, ca_loc);
23347 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23348 NULL_TREE);
23349 if (reg != NULL)
23350 add_AT_loc (cdie, DW_AT_location, reg);
23351 else if (tdie != NULL)
23352 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23353 tdie);
23354 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23355 if (next_arg != XEXP (arg, 1))
23356 {
23357 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23358 if (mode == VOIDmode)
23359 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23360 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23361 0), 1),
23362 mode, VOIDmode,
23363 VAR_INIT_STATUS_INITIALIZED);
23364 if (val != NULL)
23365 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23366 val);
23367 }
23368 }
23369 if (die == NULL
23370 && (ca_loc->symbol_ref || tloc))
23371 die = gen_call_site_die (decl, subr_die, ca_loc);
23372 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23373 {
23374 dw_loc_descr_ref tval = NULL;
23375
23376 if (tloc != NULL_RTX)
23377 tval = mem_loc_descriptor (tloc,
23378 GET_MODE (tloc) == VOIDmode
23379 ? Pmode : GET_MODE (tloc),
23380 VOIDmode,
23381 VAR_INIT_STATUS_INITIALIZED);
23382 if (tval)
23383 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23384 else if (tlocc != NULL_RTX)
23385 {
23386 tval = mem_loc_descriptor (tlocc,
23387 GET_MODE (tlocc) == VOIDmode
23388 ? Pmode : GET_MODE (tlocc),
23389 VOIDmode,
23390 VAR_INIT_STATUS_INITIALIZED);
23391 if (tval)
23392 add_AT_loc (die,
23393 dwarf_AT (DW_AT_call_target_clobbered),
23394 tval);
23395 }
23396 }
23397 if (die != NULL)
23398 {
23399 call_site_note_count++;
23400 if (ca_loc->tail_call_p)
23401 tail_call_site_note_count++;
23402 }
23403 }
23404 }
23405 call_arg_locations = NULL;
23406 call_arg_loc_last = NULL;
23407 if (tail_call_site_count >= 0
23408 && tail_call_site_count == tail_call_site_note_count
23409 && (!dwarf_strict || dwarf_version >= 5))
23410 {
23411 if (call_site_count >= 0
23412 && call_site_count == call_site_note_count)
23413 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23414 else
23415 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23416 }
23417 call_site_count = -1;
23418 tail_call_site_count = -1;
23419 }
23420
23421 /* Mark used types after we have created DIEs for the functions scopes. */
23422 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23423 }
23424
23425 /* Returns a hash value for X (which really is a die_struct). */
23426
23427 hashval_t
23428 block_die_hasher::hash (die_struct *d)
23429 {
23430 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23431 }
23432
23433 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23434 as decl_id and die_parent of die_struct Y. */
23435
23436 bool
23437 block_die_hasher::equal (die_struct *x, die_struct *y)
23438 {
23439 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23440 }
23441
23442 /* Hold information about markers for inlined entry points. */
23443 struct GTY ((for_user)) inline_entry_data
23444 {
23445 /* The block that's the inlined_function_outer_scope for an inlined
23446 function. */
23447 tree block;
23448
23449 /* The label at the inlined entry point. */
23450 const char *label_pfx;
23451 unsigned int label_num;
23452
23453 /* The view number to be used as the inlined entry point. */
23454 var_loc_view view;
23455 };
23456
23457 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23458 {
23459 typedef tree compare_type;
23460 static inline hashval_t hash (const inline_entry_data *);
23461 static inline bool equal (const inline_entry_data *, const_tree);
23462 };
23463
23464 /* Hash table routines for inline_entry_data. */
23465
23466 inline hashval_t
23467 inline_entry_data_hasher::hash (const inline_entry_data *data)
23468 {
23469 return htab_hash_pointer (data->block);
23470 }
23471
23472 inline bool
23473 inline_entry_data_hasher::equal (const inline_entry_data *data,
23474 const_tree block)
23475 {
23476 return data->block == block;
23477 }
23478
23479 /* Inlined entry points pending DIE creation in this compilation unit. */
23480
23481 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23482
23483
23484 /* Return TRUE if DECL, which may have been previously generated as
23485 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23486 true if decl (or its origin) is either an extern declaration or a
23487 class/namespace scoped declaration.
23488
23489 The declare_in_namespace support causes us to get two DIEs for one
23490 variable, both of which are declarations. We want to avoid
23491 considering one to be a specification, so we must test for
23492 DECLARATION and DW_AT_declaration. */
23493 static inline bool
23494 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23495 {
23496 return (old_die && TREE_STATIC (decl) && !declaration
23497 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23498 }
23499
23500 /* Return true if DECL is a local static. */
23501
23502 static inline bool
23503 local_function_static (tree decl)
23504 {
23505 gcc_assert (VAR_P (decl));
23506 return TREE_STATIC (decl)
23507 && DECL_CONTEXT (decl)
23508 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23509 }
23510
23511 /* Generate a DIE to represent a declared data object.
23512 Either DECL or ORIGIN must be non-null. */
23513
23514 static void
23515 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23516 {
23517 HOST_WIDE_INT off = 0;
23518 tree com_decl;
23519 tree decl_or_origin = decl ? decl : origin;
23520 tree ultimate_origin;
23521 dw_die_ref var_die;
23522 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23523 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23524 || class_or_namespace_scope_p (context_die));
23525 bool specialization_p = false;
23526 bool no_linkage_name = false;
23527
23528 /* While C++ inline static data members have definitions inside of the
23529 class, force the first DIE to be a declaration, then let gen_member_die
23530 reparent it to the class context and call gen_variable_die again
23531 to create the outside of the class DIE for the definition. */
23532 if (!declaration
23533 && old_die == NULL
23534 && decl
23535 && DECL_CONTEXT (decl)
23536 && TYPE_P (DECL_CONTEXT (decl))
23537 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23538 {
23539 declaration = true;
23540 if (dwarf_version < 5)
23541 no_linkage_name = true;
23542 }
23543
23544 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23545 if (decl || ultimate_origin)
23546 origin = ultimate_origin;
23547 com_decl = fortran_common (decl_or_origin, &off);
23548
23549 /* Symbol in common gets emitted as a child of the common block, in the form
23550 of a data member. */
23551 if (com_decl)
23552 {
23553 dw_die_ref com_die;
23554 dw_loc_list_ref loc = NULL;
23555 die_node com_die_arg;
23556
23557 var_die = lookup_decl_die (decl_or_origin);
23558 if (var_die)
23559 {
23560 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23561 {
23562 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23563 if (loc)
23564 {
23565 if (off)
23566 {
23567 /* Optimize the common case. */
23568 if (single_element_loc_list_p (loc)
23569 && loc->expr->dw_loc_opc == DW_OP_addr
23570 && loc->expr->dw_loc_next == NULL
23571 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23572 == SYMBOL_REF)
23573 {
23574 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23575 loc->expr->dw_loc_oprnd1.v.val_addr
23576 = plus_constant (GET_MODE (x), x , off);
23577 }
23578 else
23579 loc_list_plus_const (loc, off);
23580 }
23581 add_AT_location_description (var_die, DW_AT_location, loc);
23582 remove_AT (var_die, DW_AT_declaration);
23583 }
23584 }
23585 return;
23586 }
23587
23588 if (common_block_die_table == NULL)
23589 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23590
23591 com_die_arg.decl_id = DECL_UID (com_decl);
23592 com_die_arg.die_parent = context_die;
23593 com_die = common_block_die_table->find (&com_die_arg);
23594 if (! early_dwarf)
23595 loc = loc_list_from_tree (com_decl, 2, NULL);
23596 if (com_die == NULL)
23597 {
23598 const char *cnam
23599 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23600 die_node **slot;
23601
23602 com_die = new_die (DW_TAG_common_block, context_die, decl);
23603 add_name_and_src_coords_attributes (com_die, com_decl);
23604 if (loc)
23605 {
23606 add_AT_location_description (com_die, DW_AT_location, loc);
23607 /* Avoid sharing the same loc descriptor between
23608 DW_TAG_common_block and DW_TAG_variable. */
23609 loc = loc_list_from_tree (com_decl, 2, NULL);
23610 }
23611 else if (DECL_EXTERNAL (decl_or_origin))
23612 add_AT_flag (com_die, DW_AT_declaration, 1);
23613 if (want_pubnames ())
23614 add_pubname_string (cnam, com_die); /* ??? needed? */
23615 com_die->decl_id = DECL_UID (com_decl);
23616 slot = common_block_die_table->find_slot (com_die, INSERT);
23617 *slot = com_die;
23618 }
23619 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23620 {
23621 add_AT_location_description (com_die, DW_AT_location, loc);
23622 loc = loc_list_from_tree (com_decl, 2, NULL);
23623 remove_AT (com_die, DW_AT_declaration);
23624 }
23625 var_die = new_die (DW_TAG_variable, com_die, decl);
23626 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23627 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23628 decl_quals (decl_or_origin), false,
23629 context_die);
23630 add_alignment_attribute (var_die, decl);
23631 add_AT_flag (var_die, DW_AT_external, 1);
23632 if (loc)
23633 {
23634 if (off)
23635 {
23636 /* Optimize the common case. */
23637 if (single_element_loc_list_p (loc)
23638 && loc->expr->dw_loc_opc == DW_OP_addr
23639 && loc->expr->dw_loc_next == NULL
23640 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23641 {
23642 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23643 loc->expr->dw_loc_oprnd1.v.val_addr
23644 = plus_constant (GET_MODE (x), x, off);
23645 }
23646 else
23647 loc_list_plus_const (loc, off);
23648 }
23649 add_AT_location_description (var_die, DW_AT_location, loc);
23650 }
23651 else if (DECL_EXTERNAL (decl_or_origin))
23652 add_AT_flag (var_die, DW_AT_declaration, 1);
23653 if (decl)
23654 equate_decl_number_to_die (decl, var_die);
23655 return;
23656 }
23657
23658 if (old_die)
23659 {
23660 if (declaration)
23661 {
23662 /* A declaration that has been previously dumped, needs no
23663 further annotations, since it doesn't need location on
23664 the second pass. */
23665 return;
23666 }
23667 else if (decl_will_get_specification_p (old_die, decl, declaration)
23668 && !get_AT (old_die, DW_AT_specification))
23669 {
23670 /* Fall-thru so we can make a new variable die along with a
23671 DW_AT_specification. */
23672 }
23673 else if (origin && old_die->die_parent != context_die)
23674 {
23675 /* If we will be creating an inlined instance, we need a
23676 new DIE that will get annotated with
23677 DW_AT_abstract_origin. */
23678 gcc_assert (!DECL_ABSTRACT_P (decl));
23679 }
23680 else
23681 {
23682 /* If a DIE was dumped early, it still needs location info.
23683 Skip to where we fill the location bits. */
23684 var_die = old_die;
23685
23686 /* ??? In LTRANS we cannot annotate early created variably
23687 modified type DIEs without copying them and adjusting all
23688 references to them. Thus we dumped them again. Also add a
23689 reference to them but beware of -g0 compile and -g link
23690 in which case the reference will be already present. */
23691 tree type = TREE_TYPE (decl_or_origin);
23692 if (in_lto_p
23693 && ! get_AT (var_die, DW_AT_type)
23694 && variably_modified_type_p
23695 (type, decl_function_context (decl_or_origin)))
23696 {
23697 if (decl_by_reference_p (decl_or_origin))
23698 add_type_attribute (var_die, TREE_TYPE (type),
23699 TYPE_UNQUALIFIED, false, context_die);
23700 else
23701 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23702 false, context_die);
23703 }
23704
23705 goto gen_variable_die_location;
23706 }
23707 }
23708
23709 /* For static data members, the declaration in the class is supposed
23710 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23711 also in DWARF2; the specification should still be DW_TAG_variable
23712 referencing the DW_TAG_member DIE. */
23713 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23714 var_die = new_die (DW_TAG_member, context_die, decl);
23715 else
23716 var_die = new_die (DW_TAG_variable, context_die, decl);
23717
23718 if (origin != NULL)
23719 add_abstract_origin_attribute (var_die, origin);
23720
23721 /* Loop unrolling can create multiple blocks that refer to the same
23722 static variable, so we must test for the DW_AT_declaration flag.
23723
23724 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23725 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23726 sharing them.
23727
23728 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23729 else if (decl_will_get_specification_p (old_die, decl, declaration))
23730 {
23731 /* This is a definition of a C++ class level static. */
23732 add_AT_specification (var_die, old_die);
23733 specialization_p = true;
23734 if (DECL_NAME (decl))
23735 {
23736 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23737 struct dwarf_file_data * file_index = lookup_filename (s.file);
23738
23739 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23740 add_AT_file (var_die, DW_AT_decl_file, file_index);
23741
23742 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23743 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23744
23745 if (debug_column_info
23746 && s.column
23747 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23748 != (unsigned) s.column))
23749 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23750
23751 if (old_die->die_tag == DW_TAG_member)
23752 add_linkage_name (var_die, decl);
23753 }
23754 }
23755 else
23756 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23757
23758 if ((origin == NULL && !specialization_p)
23759 || (origin != NULL
23760 && !DECL_ABSTRACT_P (decl_or_origin)
23761 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23762 decl_function_context
23763 (decl_or_origin))))
23764 {
23765 tree type = TREE_TYPE (decl_or_origin);
23766
23767 if (decl_by_reference_p (decl_or_origin))
23768 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23769 context_die);
23770 else
23771 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23772 context_die);
23773 }
23774
23775 if (origin == NULL && !specialization_p)
23776 {
23777 if (TREE_PUBLIC (decl))
23778 add_AT_flag (var_die, DW_AT_external, 1);
23779
23780 if (DECL_ARTIFICIAL (decl))
23781 add_AT_flag (var_die, DW_AT_artificial, 1);
23782
23783 add_alignment_attribute (var_die, decl);
23784
23785 add_accessibility_attribute (var_die, decl);
23786 }
23787
23788 if (declaration)
23789 add_AT_flag (var_die, DW_AT_declaration, 1);
23790
23791 if (decl && (DECL_ABSTRACT_P (decl)
23792 || !old_die || is_declaration_die (old_die)))
23793 equate_decl_number_to_die (decl, var_die);
23794
23795 gen_variable_die_location:
23796 if (! declaration
23797 && (! DECL_ABSTRACT_P (decl_or_origin)
23798 /* Local static vars are shared between all clones/inlines,
23799 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23800 already set. */
23801 || (VAR_P (decl_or_origin)
23802 && TREE_STATIC (decl_or_origin)
23803 && DECL_RTL_SET_P (decl_or_origin))))
23804 {
23805 if (early_dwarf)
23806 add_pubname (decl_or_origin, var_die);
23807 else
23808 add_location_or_const_value_attribute (var_die, decl_or_origin,
23809 decl == NULL);
23810 }
23811 else
23812 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23813
23814 if ((dwarf_version >= 4 || !dwarf_strict)
23815 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23816 DW_AT_const_expr) == 1
23817 && !get_AT (var_die, DW_AT_const_expr)
23818 && !specialization_p)
23819 add_AT_flag (var_die, DW_AT_const_expr, 1);
23820
23821 if (!dwarf_strict)
23822 {
23823 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23824 DW_AT_inline);
23825 if (inl != -1
23826 && !get_AT (var_die, DW_AT_inline)
23827 && !specialization_p)
23828 add_AT_unsigned (var_die, DW_AT_inline, inl);
23829 }
23830 }
23831
23832 /* Generate a DIE to represent a named constant. */
23833
23834 static void
23835 gen_const_die (tree decl, dw_die_ref context_die)
23836 {
23837 dw_die_ref const_die;
23838 tree type = TREE_TYPE (decl);
23839
23840 const_die = lookup_decl_die (decl);
23841 if (const_die)
23842 return;
23843
23844 const_die = new_die (DW_TAG_constant, context_die, decl);
23845 equate_decl_number_to_die (decl, const_die);
23846 add_name_and_src_coords_attributes (const_die, decl);
23847 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23848 if (TREE_PUBLIC (decl))
23849 add_AT_flag (const_die, DW_AT_external, 1);
23850 if (DECL_ARTIFICIAL (decl))
23851 add_AT_flag (const_die, DW_AT_artificial, 1);
23852 tree_add_const_value_attribute_for_decl (const_die, decl);
23853 }
23854
23855 /* Generate a DIE to represent a label identifier. */
23856
23857 static void
23858 gen_label_die (tree decl, dw_die_ref context_die)
23859 {
23860 tree origin = decl_ultimate_origin (decl);
23861 dw_die_ref lbl_die = lookup_decl_die (decl);
23862 rtx insn;
23863 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23864
23865 if (!lbl_die)
23866 {
23867 lbl_die = new_die (DW_TAG_label, context_die, decl);
23868 equate_decl_number_to_die (decl, lbl_die);
23869
23870 if (origin != NULL)
23871 add_abstract_origin_attribute (lbl_die, origin);
23872 else
23873 add_name_and_src_coords_attributes (lbl_die, decl);
23874 }
23875
23876 if (DECL_ABSTRACT_P (decl))
23877 equate_decl_number_to_die (decl, lbl_die);
23878 else if (! early_dwarf)
23879 {
23880 insn = DECL_RTL_IF_SET (decl);
23881
23882 /* Deleted labels are programmer specified labels which have been
23883 eliminated because of various optimizations. We still emit them
23884 here so that it is possible to put breakpoints on them. */
23885 if (insn
23886 && (LABEL_P (insn)
23887 || ((NOTE_P (insn)
23888 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23889 {
23890 /* When optimization is enabled (via -O) some parts of the compiler
23891 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23892 represent source-level labels which were explicitly declared by
23893 the user. This really shouldn't be happening though, so catch
23894 it if it ever does happen. */
23895 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23896
23897 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23898 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23899 }
23900 else if (insn
23901 && NOTE_P (insn)
23902 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23903 && CODE_LABEL_NUMBER (insn) != -1)
23904 {
23905 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23906 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23907 }
23908 }
23909 }
23910
23911 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23912 attributes to the DIE for a block STMT, to describe where the inlined
23913 function was called from. This is similar to add_src_coords_attributes. */
23914
23915 static inline void
23916 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23917 {
23918 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23919
23920 if (dwarf_version >= 3 || !dwarf_strict)
23921 {
23922 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23923 add_AT_unsigned (die, DW_AT_call_line, s.line);
23924 if (debug_column_info && s.column)
23925 add_AT_unsigned (die, DW_AT_call_column, s.column);
23926 }
23927 }
23928
23929
23930 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23931 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23932
23933 static inline void
23934 add_high_low_attributes (tree stmt, dw_die_ref die)
23935 {
23936 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23937
23938 if (inline_entry_data **iedp
23939 = !inline_entry_data_table ? NULL
23940 : inline_entry_data_table->find_slot_with_hash (stmt,
23941 htab_hash_pointer (stmt),
23942 NO_INSERT))
23943 {
23944 inline_entry_data *ied = *iedp;
23945 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23946 gcc_assert (debug_inline_points);
23947 gcc_assert (inlined_function_outer_scope_p (stmt));
23948
23949 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23950 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23951
23952 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23953 && !dwarf_strict)
23954 {
23955 if (!output_asm_line_debug_info ())
23956 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23957 else
23958 {
23959 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23960 /* FIXME: this will resolve to a small number. Could we
23961 possibly emit smaller data? Ideally we'd emit a
23962 uleb128, but that would make the size of DIEs
23963 impossible for the compiler to compute, since it's
23964 the assembler that computes the value of the view
23965 label in this case. Ideally, we'd have a single form
23966 encompassing both the address and the view, and
23967 indirecting them through a table might make things
23968 easier, but even that would be more wasteful,
23969 space-wise, than what we have now. */
23970 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23971 }
23972 }
23973
23974 inline_entry_data_table->clear_slot (iedp);
23975 }
23976
23977 if (BLOCK_FRAGMENT_CHAIN (stmt)
23978 && (dwarf_version >= 3 || !dwarf_strict))
23979 {
23980 tree chain, superblock = NULL_TREE;
23981 dw_die_ref pdie;
23982 dw_attr_node *attr = NULL;
23983
23984 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23985 {
23986 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23987 BLOCK_NUMBER (stmt));
23988 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23989 }
23990
23991 /* Optimize duplicate .debug_ranges lists or even tails of
23992 lists. If this BLOCK has same ranges as its supercontext,
23993 lookup DW_AT_ranges attribute in the supercontext (and
23994 recursively so), verify that the ranges_table contains the
23995 right values and use it instead of adding a new .debug_range. */
23996 for (chain = stmt, pdie = die;
23997 BLOCK_SAME_RANGE (chain);
23998 chain = BLOCK_SUPERCONTEXT (chain))
23999 {
24000 dw_attr_node *new_attr;
24001
24002 pdie = pdie->die_parent;
24003 if (pdie == NULL)
24004 break;
24005 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24006 break;
24007 new_attr = get_AT (pdie, DW_AT_ranges);
24008 if (new_attr == NULL
24009 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24010 break;
24011 attr = new_attr;
24012 superblock = BLOCK_SUPERCONTEXT (chain);
24013 }
24014 if (attr != NULL
24015 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24016 == BLOCK_NUMBER (superblock))
24017 && BLOCK_FRAGMENT_CHAIN (superblock))
24018 {
24019 unsigned long off = attr->dw_attr_val.v.val_offset;
24020 unsigned long supercnt = 0, thiscnt = 0;
24021 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24022 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24023 {
24024 ++supercnt;
24025 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24026 == BLOCK_NUMBER (chain));
24027 }
24028 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24029 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24030 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24031 ++thiscnt;
24032 gcc_assert (supercnt >= thiscnt);
24033 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24034 false);
24035 note_rnglist_head (off + supercnt - thiscnt);
24036 return;
24037 }
24038
24039 unsigned int offset = add_ranges (stmt, true);
24040 add_AT_range_list (die, DW_AT_ranges, offset, false);
24041 note_rnglist_head (offset);
24042
24043 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24044 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24045 do
24046 {
24047 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24048 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24049 chain = BLOCK_FRAGMENT_CHAIN (chain);
24050 }
24051 while (chain);
24052 add_ranges (NULL);
24053 }
24054 else
24055 {
24056 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24057 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24058 BLOCK_NUMBER (stmt));
24059 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24060 BLOCK_NUMBER (stmt));
24061 add_AT_low_high_pc (die, label, label_high, false);
24062 }
24063 }
24064
24065 /* Generate a DIE for a lexical block. */
24066
24067 static void
24068 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24069 {
24070 dw_die_ref old_die = BLOCK_DIE (stmt);
24071 dw_die_ref stmt_die = NULL;
24072 if (!old_die)
24073 {
24074 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24075 BLOCK_DIE (stmt) = stmt_die;
24076 }
24077
24078 if (BLOCK_ABSTRACT (stmt))
24079 {
24080 if (old_die)
24081 {
24082 /* This must have been generated early and it won't even
24083 need location information since it's a DW_AT_inline
24084 function. */
24085 if (flag_checking)
24086 for (dw_die_ref c = context_die; c; c = c->die_parent)
24087 if (c->die_tag == DW_TAG_inlined_subroutine
24088 || c->die_tag == DW_TAG_subprogram)
24089 {
24090 gcc_assert (get_AT (c, DW_AT_inline));
24091 break;
24092 }
24093 return;
24094 }
24095 }
24096 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24097 {
24098 /* If this is an inlined instance, create a new lexical die for
24099 anything below to attach DW_AT_abstract_origin to. */
24100 if (old_die)
24101 {
24102 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24103 BLOCK_DIE (stmt) = stmt_die;
24104 old_die = NULL;
24105 }
24106
24107 tree origin = block_ultimate_origin (stmt);
24108 if (origin != NULL_TREE && origin != stmt)
24109 add_abstract_origin_attribute (stmt_die, origin);
24110 }
24111
24112 if (old_die)
24113 stmt_die = old_die;
24114
24115 /* A non abstract block whose blocks have already been reordered
24116 should have the instruction range for this block. If so, set the
24117 high/low attributes. */
24118 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24119 {
24120 gcc_assert (stmt_die);
24121 add_high_low_attributes (stmt, stmt_die);
24122 }
24123
24124 decls_for_scope (stmt, stmt_die);
24125 }
24126
24127 /* Generate a DIE for an inlined subprogram. */
24128
24129 static void
24130 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24131 {
24132 tree decl;
24133
24134 /* The instance of function that is effectively being inlined shall not
24135 be abstract. */
24136 gcc_assert (! BLOCK_ABSTRACT (stmt));
24137
24138 decl = block_ultimate_origin (stmt);
24139
24140 /* Make sure any inlined functions are known to be inlineable. */
24141 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24142 || cgraph_function_possibly_inlined_p (decl));
24143
24144 if (! BLOCK_ABSTRACT (stmt))
24145 {
24146 dw_die_ref subr_die
24147 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24148
24149 if (call_arg_locations || debug_inline_points)
24150 BLOCK_DIE (stmt) = subr_die;
24151 add_abstract_origin_attribute (subr_die, decl);
24152 if (TREE_ASM_WRITTEN (stmt))
24153 add_high_low_attributes (stmt, subr_die);
24154 add_call_src_coords_attributes (stmt, subr_die);
24155
24156 decls_for_scope (stmt, subr_die);
24157 }
24158 }
24159
24160 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24161 the comment for VLR_CONTEXT. */
24162
24163 static void
24164 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24165 {
24166 dw_die_ref decl_die;
24167
24168 if (TREE_TYPE (decl) == error_mark_node)
24169 return;
24170
24171 decl_die = new_die (DW_TAG_member, context_die, decl);
24172 add_name_and_src_coords_attributes (decl_die, decl);
24173 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24174 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24175 context_die);
24176
24177 if (DECL_BIT_FIELD_TYPE (decl))
24178 {
24179 add_byte_size_attribute (decl_die, decl);
24180 add_bit_size_attribute (decl_die, decl);
24181 add_bit_offset_attribute (decl_die, decl, ctx);
24182 }
24183
24184 add_alignment_attribute (decl_die, decl);
24185
24186 /* If we have a variant part offset, then we are supposed to process a member
24187 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24188 trees. */
24189 gcc_assert (ctx->variant_part_offset == NULL_TREE
24190 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24191 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24192 add_data_member_location_attribute (decl_die, decl, ctx);
24193
24194 if (DECL_ARTIFICIAL (decl))
24195 add_AT_flag (decl_die, DW_AT_artificial, 1);
24196
24197 add_accessibility_attribute (decl_die, decl);
24198
24199 /* Equate decl number to die, so that we can look up this decl later on. */
24200 equate_decl_number_to_die (decl, decl_die);
24201 }
24202
24203 /* Generate a DIE for a pointer to a member type. TYPE can be an
24204 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24205 pointer to member function. */
24206
24207 static void
24208 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24209 {
24210 if (lookup_type_die (type))
24211 return;
24212
24213 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24214 scope_die_for (type, context_die), type);
24215
24216 equate_type_number_to_die (type, ptr_die);
24217 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24218 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24219 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24220 context_die);
24221 add_alignment_attribute (ptr_die, type);
24222
24223 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24224 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24225 {
24226 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24227 add_AT_loc (ptr_die, DW_AT_use_location, op);
24228 }
24229 }
24230
24231 static char *producer_string;
24232
24233 /* Return a heap allocated producer string including command line options
24234 if -grecord-gcc-switches. */
24235
24236 static char *
24237 gen_producer_string (void)
24238 {
24239 size_t j;
24240 auto_vec<const char *> switches;
24241 const char *language_string = lang_hooks.name;
24242 char *producer, *tail;
24243 const char *p;
24244 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24245 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24246
24247 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24248 switch (save_decoded_options[j].opt_index)
24249 {
24250 case OPT_o:
24251 case OPT_d:
24252 case OPT_dumpbase:
24253 case OPT_dumpdir:
24254 case OPT_auxbase:
24255 case OPT_auxbase_strip:
24256 case OPT_quiet:
24257 case OPT_version:
24258 case OPT_v:
24259 case OPT_w:
24260 case OPT_L:
24261 case OPT_D:
24262 case OPT_I:
24263 case OPT_U:
24264 case OPT_SPECIAL_unknown:
24265 case OPT_SPECIAL_ignore:
24266 case OPT_SPECIAL_deprecated:
24267 case OPT_SPECIAL_program_name:
24268 case OPT_SPECIAL_input_file:
24269 case OPT_grecord_gcc_switches:
24270 case OPT__output_pch_:
24271 case OPT_fdiagnostics_show_location_:
24272 case OPT_fdiagnostics_show_option:
24273 case OPT_fdiagnostics_show_caret:
24274 case OPT_fdiagnostics_show_labels:
24275 case OPT_fdiagnostics_show_line_numbers:
24276 case OPT_fdiagnostics_color_:
24277 case OPT_fverbose_asm:
24278 case OPT____:
24279 case OPT__sysroot_:
24280 case OPT_nostdinc:
24281 case OPT_nostdinc__:
24282 case OPT_fpreprocessed:
24283 case OPT_fltrans_output_list_:
24284 case OPT_fresolution_:
24285 case OPT_fdebug_prefix_map_:
24286 case OPT_fmacro_prefix_map_:
24287 case OPT_ffile_prefix_map_:
24288 case OPT_fcompare_debug:
24289 case OPT_fchecking:
24290 case OPT_fchecking_:
24291 /* Ignore these. */
24292 continue;
24293 default:
24294 if (cl_options[save_decoded_options[j].opt_index].flags
24295 & CL_NO_DWARF_RECORD)
24296 continue;
24297 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24298 == '-');
24299 switch (save_decoded_options[j].canonical_option[0][1])
24300 {
24301 case 'M':
24302 case 'i':
24303 case 'W':
24304 continue;
24305 case 'f':
24306 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24307 "dump", 4) == 0)
24308 continue;
24309 break;
24310 default:
24311 break;
24312 }
24313 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24314 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24315 break;
24316 }
24317
24318 producer = XNEWVEC (char, plen + 1 + len + 1);
24319 tail = producer;
24320 sprintf (tail, "%s %s", language_string, version_string);
24321 tail += plen;
24322
24323 FOR_EACH_VEC_ELT (switches, j, p)
24324 {
24325 len = strlen (p);
24326 *tail = ' ';
24327 memcpy (tail + 1, p, len);
24328 tail += len + 1;
24329 }
24330
24331 *tail = '\0';
24332 return producer;
24333 }
24334
24335 /* Given a C and/or C++ language/version string return the "highest".
24336 C++ is assumed to be "higher" than C in this case. Used for merging
24337 LTO translation unit languages. */
24338 static const char *
24339 highest_c_language (const char *lang1, const char *lang2)
24340 {
24341 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24342 return "GNU C++17";
24343 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24344 return "GNU C++14";
24345 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24346 return "GNU C++11";
24347 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24348 return "GNU C++98";
24349
24350 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24351 return "GNU C17";
24352 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24353 return "GNU C11";
24354 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24355 return "GNU C99";
24356 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24357 return "GNU C89";
24358
24359 gcc_unreachable ();
24360 }
24361
24362
24363 /* Generate the DIE for the compilation unit. */
24364
24365 static dw_die_ref
24366 gen_compile_unit_die (const char *filename)
24367 {
24368 dw_die_ref die;
24369 const char *language_string = lang_hooks.name;
24370 int language;
24371
24372 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24373
24374 if (filename)
24375 {
24376 add_name_attribute (die, filename);
24377 /* Don't add cwd for <built-in>. */
24378 if (filename[0] != '<')
24379 add_comp_dir_attribute (die);
24380 }
24381
24382 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24383
24384 /* If our producer is LTO try to figure out a common language to use
24385 from the global list of translation units. */
24386 if (strcmp (language_string, "GNU GIMPLE") == 0)
24387 {
24388 unsigned i;
24389 tree t;
24390 const char *common_lang = NULL;
24391
24392 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24393 {
24394 if (!TRANSLATION_UNIT_LANGUAGE (t))
24395 continue;
24396 if (!common_lang)
24397 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24398 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24399 ;
24400 else if (strncmp (common_lang, "GNU C", 5) == 0
24401 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24402 /* Mixing C and C++ is ok, use C++ in that case. */
24403 common_lang = highest_c_language (common_lang,
24404 TRANSLATION_UNIT_LANGUAGE (t));
24405 else
24406 {
24407 /* Fall back to C. */
24408 common_lang = NULL;
24409 break;
24410 }
24411 }
24412
24413 if (common_lang)
24414 language_string = common_lang;
24415 }
24416
24417 language = DW_LANG_C;
24418 if (strncmp (language_string, "GNU C", 5) == 0
24419 && ISDIGIT (language_string[5]))
24420 {
24421 language = DW_LANG_C89;
24422 if (dwarf_version >= 3 || !dwarf_strict)
24423 {
24424 if (strcmp (language_string, "GNU C89") != 0)
24425 language = DW_LANG_C99;
24426
24427 if (dwarf_version >= 5 /* || !dwarf_strict */)
24428 if (strcmp (language_string, "GNU C11") == 0
24429 || strcmp (language_string, "GNU C17") == 0)
24430 language = DW_LANG_C11;
24431 }
24432 }
24433 else if (strncmp (language_string, "GNU C++", 7) == 0)
24434 {
24435 language = DW_LANG_C_plus_plus;
24436 if (dwarf_version >= 5 /* || !dwarf_strict */)
24437 {
24438 if (strcmp (language_string, "GNU C++11") == 0)
24439 language = DW_LANG_C_plus_plus_11;
24440 else if (strcmp (language_string, "GNU C++14") == 0)
24441 language = DW_LANG_C_plus_plus_14;
24442 else if (strcmp (language_string, "GNU C++17") == 0)
24443 /* For now. */
24444 language = DW_LANG_C_plus_plus_14;
24445 }
24446 }
24447 else if (strcmp (language_string, "GNU F77") == 0)
24448 language = DW_LANG_Fortran77;
24449 else if (dwarf_version >= 3 || !dwarf_strict)
24450 {
24451 if (strcmp (language_string, "GNU Ada") == 0)
24452 language = DW_LANG_Ada95;
24453 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24454 {
24455 language = DW_LANG_Fortran95;
24456 if (dwarf_version >= 5 /* || !dwarf_strict */)
24457 {
24458 if (strcmp (language_string, "GNU Fortran2003") == 0)
24459 language = DW_LANG_Fortran03;
24460 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24461 language = DW_LANG_Fortran08;
24462 }
24463 }
24464 else if (strcmp (language_string, "GNU Objective-C") == 0)
24465 language = DW_LANG_ObjC;
24466 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24467 language = DW_LANG_ObjC_plus_plus;
24468 else if (dwarf_version >= 5 || !dwarf_strict)
24469 {
24470 if (strcmp (language_string, "GNU Go") == 0)
24471 language = DW_LANG_Go;
24472 }
24473 }
24474 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24475 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24476 language = DW_LANG_Fortran90;
24477 /* Likewise for Ada. */
24478 else if (strcmp (language_string, "GNU Ada") == 0)
24479 language = DW_LANG_Ada83;
24480
24481 add_AT_unsigned (die, DW_AT_language, language);
24482
24483 switch (language)
24484 {
24485 case DW_LANG_Fortran77:
24486 case DW_LANG_Fortran90:
24487 case DW_LANG_Fortran95:
24488 case DW_LANG_Fortran03:
24489 case DW_LANG_Fortran08:
24490 /* Fortran has case insensitive identifiers and the front-end
24491 lowercases everything. */
24492 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24493 break;
24494 default:
24495 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24496 break;
24497 }
24498 return die;
24499 }
24500
24501 /* Generate the DIE for a base class. */
24502
24503 static void
24504 gen_inheritance_die (tree binfo, tree access, tree type,
24505 dw_die_ref context_die)
24506 {
24507 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24508 struct vlr_context ctx = { type, NULL };
24509
24510 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24511 context_die);
24512 add_data_member_location_attribute (die, binfo, &ctx);
24513
24514 if (BINFO_VIRTUAL_P (binfo))
24515 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24516
24517 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24518 children, otherwise the default is DW_ACCESS_public. In DWARF2
24519 the default has always been DW_ACCESS_private. */
24520 if (access == access_public_node)
24521 {
24522 if (dwarf_version == 2
24523 || context_die->die_tag == DW_TAG_class_type)
24524 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24525 }
24526 else if (access == access_protected_node)
24527 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24528 else if (dwarf_version > 2
24529 && context_die->die_tag != DW_TAG_class_type)
24530 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24531 }
24532
24533 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24534 structure. */
24535 static bool
24536 is_variant_part (tree decl)
24537 {
24538 return (TREE_CODE (decl) == FIELD_DECL
24539 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24540 }
24541
24542 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24543 return the FIELD_DECL. Return NULL_TREE otherwise. */
24544
24545 static tree
24546 analyze_discr_in_predicate (tree operand, tree struct_type)
24547 {
24548 bool continue_stripping = true;
24549 while (continue_stripping)
24550 switch (TREE_CODE (operand))
24551 {
24552 CASE_CONVERT:
24553 operand = TREE_OPERAND (operand, 0);
24554 break;
24555 default:
24556 continue_stripping = false;
24557 break;
24558 }
24559
24560 /* Match field access to members of struct_type only. */
24561 if (TREE_CODE (operand) == COMPONENT_REF
24562 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24563 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24564 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24565 return TREE_OPERAND (operand, 1);
24566 else
24567 return NULL_TREE;
24568 }
24569
24570 /* Check that SRC is a constant integer that can be represented as a native
24571 integer constant (either signed or unsigned). If so, store it into DEST and
24572 return true. Return false otherwise. */
24573
24574 static bool
24575 get_discr_value (tree src, dw_discr_value *dest)
24576 {
24577 tree discr_type = TREE_TYPE (src);
24578
24579 if (lang_hooks.types.get_debug_type)
24580 {
24581 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24582 if (debug_type != NULL)
24583 discr_type = debug_type;
24584 }
24585
24586 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24587 return false;
24588
24589 /* Signedness can vary between the original type and the debug type. This
24590 can happen for character types in Ada for instance: the character type
24591 used for code generation can be signed, to be compatible with the C one,
24592 but from a debugger point of view, it must be unsigned. */
24593 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24594 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24595
24596 if (is_orig_unsigned != is_debug_unsigned)
24597 src = fold_convert (discr_type, src);
24598
24599 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24600 return false;
24601
24602 dest->pos = is_debug_unsigned;
24603 if (is_debug_unsigned)
24604 dest->v.uval = tree_to_uhwi (src);
24605 else
24606 dest->v.sval = tree_to_shwi (src);
24607
24608 return true;
24609 }
24610
24611 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24612 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24613 store NULL_TREE in DISCR_DECL. Otherwise:
24614
24615 - store the discriminant field in STRUCT_TYPE that controls the variant
24616 part to *DISCR_DECL
24617
24618 - put in *DISCR_LISTS_P an array where for each variant, the item
24619 represents the corresponding matching list of discriminant values.
24620
24621 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24622 the above array.
24623
24624 Note that when the array is allocated (i.e. when the analysis is
24625 successful), it is up to the caller to free the array. */
24626
24627 static void
24628 analyze_variants_discr (tree variant_part_decl,
24629 tree struct_type,
24630 tree *discr_decl,
24631 dw_discr_list_ref **discr_lists_p,
24632 unsigned *discr_lists_length)
24633 {
24634 tree variant_part_type = TREE_TYPE (variant_part_decl);
24635 tree variant;
24636 dw_discr_list_ref *discr_lists;
24637 unsigned i;
24638
24639 /* Compute how many variants there are in this variant part. */
24640 *discr_lists_length = 0;
24641 for (variant = TYPE_FIELDS (variant_part_type);
24642 variant != NULL_TREE;
24643 variant = DECL_CHAIN (variant))
24644 ++*discr_lists_length;
24645
24646 *discr_decl = NULL_TREE;
24647 *discr_lists_p
24648 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24649 sizeof (**discr_lists_p));
24650 discr_lists = *discr_lists_p;
24651
24652 /* And then analyze all variants to extract discriminant information for all
24653 of them. This analysis is conservative: as soon as we detect something we
24654 do not support, abort everything and pretend we found nothing. */
24655 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24656 variant != NULL_TREE;
24657 variant = DECL_CHAIN (variant), ++i)
24658 {
24659 tree match_expr = DECL_QUALIFIER (variant);
24660
24661 /* Now, try to analyze the predicate and deduce a discriminant for
24662 it. */
24663 if (match_expr == boolean_true_node)
24664 /* Typically happens for the default variant: it matches all cases that
24665 previous variants rejected. Don't output any matching value for
24666 this one. */
24667 continue;
24668
24669 /* The following loop tries to iterate over each discriminant
24670 possibility: single values or ranges. */
24671 while (match_expr != NULL_TREE)
24672 {
24673 tree next_round_match_expr;
24674 tree candidate_discr = NULL_TREE;
24675 dw_discr_list_ref new_node = NULL;
24676
24677 /* Possibilities are matched one after the other by nested
24678 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24679 continue with the rest at next iteration. */
24680 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24681 {
24682 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24683 match_expr = TREE_OPERAND (match_expr, 1);
24684 }
24685 else
24686 next_round_match_expr = NULL_TREE;
24687
24688 if (match_expr == boolean_false_node)
24689 /* This sub-expression matches nothing: just wait for the next
24690 one. */
24691 ;
24692
24693 else if (TREE_CODE (match_expr) == EQ_EXPR)
24694 {
24695 /* We are matching: <discr_field> == <integer_cst>
24696 This sub-expression matches a single value. */
24697 tree integer_cst = TREE_OPERAND (match_expr, 1);
24698
24699 candidate_discr
24700 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24701 struct_type);
24702
24703 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24704 if (!get_discr_value (integer_cst,
24705 &new_node->dw_discr_lower_bound))
24706 goto abort;
24707 new_node->dw_discr_range = false;
24708 }
24709
24710 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24711 {
24712 /* We are matching:
24713 <discr_field> > <integer_cst>
24714 && <discr_field> < <integer_cst>.
24715 This sub-expression matches the range of values between the
24716 two matched integer constants. Note that comparisons can be
24717 inclusive or exclusive. */
24718 tree candidate_discr_1, candidate_discr_2;
24719 tree lower_cst, upper_cst;
24720 bool lower_cst_included, upper_cst_included;
24721 tree lower_op = TREE_OPERAND (match_expr, 0);
24722 tree upper_op = TREE_OPERAND (match_expr, 1);
24723
24724 /* When the comparison is exclusive, the integer constant is not
24725 the discriminant range bound we are looking for: we will have
24726 to increment or decrement it. */
24727 if (TREE_CODE (lower_op) == GE_EXPR)
24728 lower_cst_included = true;
24729 else if (TREE_CODE (lower_op) == GT_EXPR)
24730 lower_cst_included = false;
24731 else
24732 goto abort;
24733
24734 if (TREE_CODE (upper_op) == LE_EXPR)
24735 upper_cst_included = true;
24736 else if (TREE_CODE (upper_op) == LT_EXPR)
24737 upper_cst_included = false;
24738 else
24739 goto abort;
24740
24741 /* Extract the discriminant from the first operand and check it
24742 is consistant with the same analysis in the second
24743 operand. */
24744 candidate_discr_1
24745 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24746 struct_type);
24747 candidate_discr_2
24748 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24749 struct_type);
24750 if (candidate_discr_1 == candidate_discr_2)
24751 candidate_discr = candidate_discr_1;
24752 else
24753 goto abort;
24754
24755 /* Extract bounds from both. */
24756 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24757 lower_cst = TREE_OPERAND (lower_op, 1);
24758 upper_cst = TREE_OPERAND (upper_op, 1);
24759
24760 if (!lower_cst_included)
24761 lower_cst
24762 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24763 build_int_cst (TREE_TYPE (lower_cst), 1));
24764 if (!upper_cst_included)
24765 upper_cst
24766 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24767 build_int_cst (TREE_TYPE (upper_cst), 1));
24768
24769 if (!get_discr_value (lower_cst,
24770 &new_node->dw_discr_lower_bound)
24771 || !get_discr_value (upper_cst,
24772 &new_node->dw_discr_upper_bound))
24773 goto abort;
24774
24775 new_node->dw_discr_range = true;
24776 }
24777
24778 else
24779 /* Unsupported sub-expression: we cannot determine the set of
24780 matching discriminant values. Abort everything. */
24781 goto abort;
24782
24783 /* If the discriminant info is not consistant with what we saw so
24784 far, consider the analysis failed and abort everything. */
24785 if (candidate_discr == NULL_TREE
24786 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24787 goto abort;
24788 else
24789 *discr_decl = candidate_discr;
24790
24791 if (new_node != NULL)
24792 {
24793 new_node->dw_discr_next = discr_lists[i];
24794 discr_lists[i] = new_node;
24795 }
24796 match_expr = next_round_match_expr;
24797 }
24798 }
24799
24800 /* If we reach this point, we could match everything we were interested
24801 in. */
24802 return;
24803
24804 abort:
24805 /* Clean all data structure and return no result. */
24806 free (*discr_lists_p);
24807 *discr_lists_p = NULL;
24808 *discr_decl = NULL_TREE;
24809 }
24810
24811 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24812 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24813 under CONTEXT_DIE.
24814
24815 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24816 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24817 this type, which are record types, represent the available variants and each
24818 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24819 values are inferred from these attributes.
24820
24821 In trees, the offsets for the fields inside these sub-records are relative
24822 to the variant part itself, whereas the corresponding DIEs should have
24823 offset attributes that are relative to the embedding record base address.
24824 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24825 must be an expression that computes the offset of the variant part to
24826 describe in DWARF. */
24827
24828 static void
24829 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24830 dw_die_ref context_die)
24831 {
24832 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24833 tree variant_part_offset = vlr_ctx->variant_part_offset;
24834 struct loc_descr_context ctx = {
24835 vlr_ctx->struct_type, /* context_type */
24836 NULL_TREE, /* base_decl */
24837 NULL, /* dpi */
24838 false, /* placeholder_arg */
24839 false /* placeholder_seen */
24840 };
24841
24842 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24843 NULL_TREE if there is no such field. */
24844 tree discr_decl = NULL_TREE;
24845 dw_discr_list_ref *discr_lists;
24846 unsigned discr_lists_length = 0;
24847 unsigned i;
24848
24849 dw_die_ref dwarf_proc_die = NULL;
24850 dw_die_ref variant_part_die
24851 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24852
24853 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24854
24855 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24856 &discr_decl, &discr_lists, &discr_lists_length);
24857
24858 if (discr_decl != NULL_TREE)
24859 {
24860 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24861
24862 if (discr_die)
24863 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24864 else
24865 /* We have no DIE for the discriminant, so just discard all
24866 discrimimant information in the output. */
24867 discr_decl = NULL_TREE;
24868 }
24869
24870 /* If the offset for this variant part is more complex than a constant,
24871 create a DWARF procedure for it so that we will not have to generate DWARF
24872 expressions for it for each member. */
24873 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24874 && (dwarf_version >= 3 || !dwarf_strict))
24875 {
24876 const tree dwarf_proc_fndecl
24877 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24878 build_function_type (TREE_TYPE (variant_part_offset),
24879 NULL_TREE));
24880 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24881 const dw_loc_descr_ref dwarf_proc_body
24882 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24883
24884 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24885 dwarf_proc_fndecl, context_die);
24886 if (dwarf_proc_die != NULL)
24887 variant_part_offset = dwarf_proc_call;
24888 }
24889
24890 /* Output DIEs for all variants. */
24891 i = 0;
24892 for (tree variant = TYPE_FIELDS (variant_part_type);
24893 variant != NULL_TREE;
24894 variant = DECL_CHAIN (variant), ++i)
24895 {
24896 tree variant_type = TREE_TYPE (variant);
24897 dw_die_ref variant_die;
24898
24899 /* All variants (i.e. members of a variant part) are supposed to be
24900 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24901 under these records. */
24902 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24903
24904 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24905 equate_decl_number_to_die (variant, variant_die);
24906
24907 /* Output discriminant values this variant matches, if any. */
24908 if (discr_decl == NULL || discr_lists[i] == NULL)
24909 /* In the case we have discriminant information at all, this is
24910 probably the default variant: as the standard says, don't
24911 output any discriminant value/list attribute. */
24912 ;
24913 else if (discr_lists[i]->dw_discr_next == NULL
24914 && !discr_lists[i]->dw_discr_range)
24915 /* If there is only one accepted value, don't bother outputting a
24916 list. */
24917 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24918 else
24919 add_discr_list (variant_die, discr_lists[i]);
24920
24921 for (tree member = TYPE_FIELDS (variant_type);
24922 member != NULL_TREE;
24923 member = DECL_CHAIN (member))
24924 {
24925 struct vlr_context vlr_sub_ctx = {
24926 vlr_ctx->struct_type, /* struct_type */
24927 NULL /* variant_part_offset */
24928 };
24929 if (is_variant_part (member))
24930 {
24931 /* All offsets for fields inside variant parts are relative to
24932 the top-level embedding RECORD_TYPE's base address. On the
24933 other hand, offsets in GCC's types are relative to the
24934 nested-most variant part. So we have to sum offsets each time
24935 we recurse. */
24936
24937 vlr_sub_ctx.variant_part_offset
24938 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24939 variant_part_offset, byte_position (member));
24940 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24941 }
24942 else
24943 {
24944 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24945 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24946 }
24947 }
24948 }
24949
24950 free (discr_lists);
24951 }
24952
24953 /* Generate a DIE for a class member. */
24954
24955 static void
24956 gen_member_die (tree type, dw_die_ref context_die)
24957 {
24958 tree member;
24959 tree binfo = TYPE_BINFO (type);
24960
24961 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24962
24963 /* If this is not an incomplete type, output descriptions of each of its
24964 members. Note that as we output the DIEs necessary to represent the
24965 members of this record or union type, we will also be trying to output
24966 DIEs to represent the *types* of those members. However the `type'
24967 function (above) will specifically avoid generating type DIEs for member
24968 types *within* the list of member DIEs for this (containing) type except
24969 for those types (of members) which are explicitly marked as also being
24970 members of this (containing) type themselves. The g++ front- end can
24971 force any given type to be treated as a member of some other (containing)
24972 type by setting the TYPE_CONTEXT of the given (member) type to point to
24973 the TREE node representing the appropriate (containing) type. */
24974
24975 /* First output info about the base classes. */
24976 if (binfo)
24977 {
24978 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24979 int i;
24980 tree base;
24981
24982 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24983 gen_inheritance_die (base,
24984 (accesses ? (*accesses)[i] : access_public_node),
24985 type,
24986 context_die);
24987 }
24988
24989 /* Now output info about the data members and type members. */
24990 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24991 {
24992 struct vlr_context vlr_ctx = { type, NULL_TREE };
24993 bool static_inline_p
24994 = (TREE_STATIC (member)
24995 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24996 != -1));
24997
24998 /* Ignore clones. */
24999 if (DECL_ABSTRACT_ORIGIN (member))
25000 continue;
25001
25002 /* If we thought we were generating minimal debug info for TYPE
25003 and then changed our minds, some of the member declarations
25004 may have already been defined. Don't define them again, but
25005 do put them in the right order. */
25006
25007 if (dw_die_ref child = lookup_decl_die (member))
25008 {
25009 /* Handle inline static data members, which only have in-class
25010 declarations. */
25011 dw_die_ref ref = NULL;
25012 if (child->die_tag == DW_TAG_variable
25013 && child->die_parent == comp_unit_die ())
25014 {
25015 ref = get_AT_ref (child, DW_AT_specification);
25016 /* For C++17 inline static data members followed by redundant
25017 out of class redeclaration, we might get here with
25018 child being the DIE created for the out of class
25019 redeclaration and with its DW_AT_specification being
25020 the DIE created for in-class definition. We want to
25021 reparent the latter, and don't want to create another
25022 DIE with DW_AT_specification in that case, because
25023 we already have one. */
25024 if (ref
25025 && static_inline_p
25026 && ref->die_tag == DW_TAG_variable
25027 && ref->die_parent == comp_unit_die ()
25028 && get_AT (ref, DW_AT_specification) == NULL)
25029 {
25030 child = ref;
25031 ref = NULL;
25032 static_inline_p = false;
25033 }
25034 }
25035
25036 if (child->die_tag == DW_TAG_variable
25037 && child->die_parent == comp_unit_die ()
25038 && ref == NULL)
25039 {
25040 reparent_child (child, context_die);
25041 if (dwarf_version < 5)
25042 child->die_tag = DW_TAG_member;
25043 }
25044 else
25045 splice_child_die (context_die, child);
25046 }
25047
25048 /* Do not generate standard DWARF for variant parts if we are generating
25049 the corresponding GNAT encodings: DIEs generated for both would
25050 conflict in our mappings. */
25051 else if (is_variant_part (member)
25052 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25053 {
25054 vlr_ctx.variant_part_offset = byte_position (member);
25055 gen_variant_part (member, &vlr_ctx, context_die);
25056 }
25057 else
25058 {
25059 vlr_ctx.variant_part_offset = NULL_TREE;
25060 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25061 }
25062
25063 /* For C++ inline static data members emit immediately a DW_TAG_variable
25064 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25065 DW_AT_specification. */
25066 if (static_inline_p)
25067 {
25068 int old_extern = DECL_EXTERNAL (member);
25069 DECL_EXTERNAL (member) = 0;
25070 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25071 DECL_EXTERNAL (member) = old_extern;
25072 }
25073 }
25074 }
25075
25076 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25077 is set, we pretend that the type was never defined, so we only get the
25078 member DIEs needed by later specification DIEs. */
25079
25080 static void
25081 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25082 enum debug_info_usage usage)
25083 {
25084 if (TREE_ASM_WRITTEN (type))
25085 {
25086 /* Fill in the bound of variable-length fields in late dwarf if
25087 still incomplete. */
25088 if (!early_dwarf && variably_modified_type_p (type, NULL))
25089 for (tree member = TYPE_FIELDS (type);
25090 member;
25091 member = DECL_CHAIN (member))
25092 fill_variable_array_bounds (TREE_TYPE (member));
25093 return;
25094 }
25095
25096 dw_die_ref type_die = lookup_type_die (type);
25097 dw_die_ref scope_die = 0;
25098 int nested = 0;
25099 int complete = (TYPE_SIZE (type)
25100 && (! TYPE_STUB_DECL (type)
25101 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25102 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25103 complete = complete && should_emit_struct_debug (type, usage);
25104
25105 if (type_die && ! complete)
25106 return;
25107
25108 if (TYPE_CONTEXT (type) != NULL_TREE
25109 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25110 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25111 nested = 1;
25112
25113 scope_die = scope_die_for (type, context_die);
25114
25115 /* Generate child dies for template paramaters. */
25116 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25117 schedule_generic_params_dies_gen (type);
25118
25119 if (! type_die || (nested && is_cu_die (scope_die)))
25120 /* First occurrence of type or toplevel definition of nested class. */
25121 {
25122 dw_die_ref old_die = type_die;
25123
25124 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25125 ? record_type_tag (type) : DW_TAG_union_type,
25126 scope_die, type);
25127 equate_type_number_to_die (type, type_die);
25128 if (old_die)
25129 add_AT_specification (type_die, old_die);
25130 else
25131 add_name_attribute (type_die, type_tag (type));
25132 }
25133 else
25134 remove_AT (type_die, DW_AT_declaration);
25135
25136 /* If this type has been completed, then give it a byte_size attribute and
25137 then give a list of members. */
25138 if (complete && !ns_decl)
25139 {
25140 /* Prevent infinite recursion in cases where the type of some member of
25141 this type is expressed in terms of this type itself. */
25142 TREE_ASM_WRITTEN (type) = 1;
25143 add_byte_size_attribute (type_die, type);
25144 add_alignment_attribute (type_die, type);
25145 if (TYPE_STUB_DECL (type) != NULL_TREE)
25146 {
25147 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25148 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25149 }
25150
25151 /* If the first reference to this type was as the return type of an
25152 inline function, then it may not have a parent. Fix this now. */
25153 if (type_die->die_parent == NULL)
25154 add_child_die (scope_die, type_die);
25155
25156 gen_member_die (type, type_die);
25157
25158 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25159 if (TYPE_ARTIFICIAL (type))
25160 add_AT_flag (type_die, DW_AT_artificial, 1);
25161
25162 /* GNU extension: Record what type our vtable lives in. */
25163 if (TYPE_VFIELD (type))
25164 {
25165 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25166
25167 gen_type_die (vtype, context_die);
25168 add_AT_die_ref (type_die, DW_AT_containing_type,
25169 lookup_type_die (vtype));
25170 }
25171 }
25172 else
25173 {
25174 add_AT_flag (type_die, DW_AT_declaration, 1);
25175
25176 /* We don't need to do this for function-local types. */
25177 if (TYPE_STUB_DECL (type)
25178 && ! decl_function_context (TYPE_STUB_DECL (type)))
25179 vec_safe_push (incomplete_types, type);
25180 }
25181
25182 if (get_AT (type_die, DW_AT_name))
25183 add_pubtype (type, type_die);
25184 }
25185
25186 /* Generate a DIE for a subroutine _type_. */
25187
25188 static void
25189 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25190 {
25191 tree return_type = TREE_TYPE (type);
25192 dw_die_ref subr_die
25193 = new_die (DW_TAG_subroutine_type,
25194 scope_die_for (type, context_die), type);
25195
25196 equate_type_number_to_die (type, subr_die);
25197 add_prototyped_attribute (subr_die, type);
25198 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25199 context_die);
25200 add_alignment_attribute (subr_die, type);
25201 gen_formal_types_die (type, subr_die);
25202
25203 if (get_AT (subr_die, DW_AT_name))
25204 add_pubtype (type, subr_die);
25205 if ((dwarf_version >= 5 || !dwarf_strict)
25206 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25207 add_AT_flag (subr_die, DW_AT_reference, 1);
25208 if ((dwarf_version >= 5 || !dwarf_strict)
25209 && lang_hooks.types.type_dwarf_attribute (type,
25210 DW_AT_rvalue_reference) != -1)
25211 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25212 }
25213
25214 /* Generate a DIE for a type definition. */
25215
25216 static void
25217 gen_typedef_die (tree decl, dw_die_ref context_die)
25218 {
25219 dw_die_ref type_die;
25220 tree type;
25221
25222 if (TREE_ASM_WRITTEN (decl))
25223 {
25224 if (DECL_ORIGINAL_TYPE (decl))
25225 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25226 return;
25227 }
25228
25229 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25230 checks in process_scope_var and modified_type_die), this should be called
25231 only for original types. */
25232 gcc_assert (decl_ultimate_origin (decl) == NULL
25233 || decl_ultimate_origin (decl) == decl);
25234
25235 TREE_ASM_WRITTEN (decl) = 1;
25236 type_die = new_die (DW_TAG_typedef, context_die, decl);
25237
25238 add_name_and_src_coords_attributes (type_die, decl);
25239 if (DECL_ORIGINAL_TYPE (decl))
25240 {
25241 type = DECL_ORIGINAL_TYPE (decl);
25242 if (type == error_mark_node)
25243 return;
25244
25245 gcc_assert (type != TREE_TYPE (decl));
25246 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25247 }
25248 else
25249 {
25250 type = TREE_TYPE (decl);
25251 if (type == error_mark_node)
25252 return;
25253
25254 if (is_naming_typedef_decl (TYPE_NAME (type)))
25255 {
25256 /* Here, we are in the case of decl being a typedef naming
25257 an anonymous type, e.g:
25258 typedef struct {...} foo;
25259 In that case TREE_TYPE (decl) is not a typedef variant
25260 type and TYPE_NAME of the anonymous type is set to the
25261 TYPE_DECL of the typedef. This construct is emitted by
25262 the C++ FE.
25263
25264 TYPE is the anonymous struct named by the typedef
25265 DECL. As we need the DW_AT_type attribute of the
25266 DW_TAG_typedef to point to the DIE of TYPE, let's
25267 generate that DIE right away. add_type_attribute
25268 called below will then pick (via lookup_type_die) that
25269 anonymous struct DIE. */
25270 if (!TREE_ASM_WRITTEN (type))
25271 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25272
25273 /* This is a GNU Extension. We are adding a
25274 DW_AT_linkage_name attribute to the DIE of the
25275 anonymous struct TYPE. The value of that attribute
25276 is the name of the typedef decl naming the anonymous
25277 struct. This greatly eases the work of consumers of
25278 this debug info. */
25279 add_linkage_name_raw (lookup_type_die (type), decl);
25280 }
25281 }
25282
25283 add_type_attribute (type_die, type, decl_quals (decl), false,
25284 context_die);
25285
25286 if (is_naming_typedef_decl (decl))
25287 /* We want that all subsequent calls to lookup_type_die with
25288 TYPE in argument yield the DW_TAG_typedef we have just
25289 created. */
25290 equate_type_number_to_die (type, type_die);
25291
25292 add_alignment_attribute (type_die, TREE_TYPE (decl));
25293
25294 add_accessibility_attribute (type_die, decl);
25295
25296 if (DECL_ABSTRACT_P (decl))
25297 equate_decl_number_to_die (decl, type_die);
25298
25299 if (get_AT (type_die, DW_AT_name))
25300 add_pubtype (decl, type_die);
25301 }
25302
25303 /* Generate a DIE for a struct, class, enum or union type. */
25304
25305 static void
25306 gen_tagged_type_die (tree type,
25307 dw_die_ref context_die,
25308 enum debug_info_usage usage)
25309 {
25310 if (type == NULL_TREE
25311 || !is_tagged_type (type))
25312 return;
25313
25314 if (TREE_ASM_WRITTEN (type))
25315 ;
25316 /* If this is a nested type whose containing class hasn't been written
25317 out yet, writing it out will cover this one, too. This does not apply
25318 to instantiations of member class templates; they need to be added to
25319 the containing class as they are generated. FIXME: This hurts the
25320 idea of combining type decls from multiple TUs, since we can't predict
25321 what set of template instantiations we'll get. */
25322 else if (TYPE_CONTEXT (type)
25323 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25324 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25325 {
25326 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25327
25328 if (TREE_ASM_WRITTEN (type))
25329 return;
25330
25331 /* If that failed, attach ourselves to the stub. */
25332 context_die = lookup_type_die (TYPE_CONTEXT (type));
25333 }
25334 else if (TYPE_CONTEXT (type) != NULL_TREE
25335 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25336 {
25337 /* If this type is local to a function that hasn't been written
25338 out yet, use a NULL context for now; it will be fixed up in
25339 decls_for_scope. */
25340 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25341 /* A declaration DIE doesn't count; nested types need to go in the
25342 specification. */
25343 if (context_die && is_declaration_die (context_die))
25344 context_die = NULL;
25345 }
25346 else
25347 context_die = declare_in_namespace (type, context_die);
25348
25349 if (TREE_CODE (type) == ENUMERAL_TYPE)
25350 {
25351 /* This might have been written out by the call to
25352 declare_in_namespace. */
25353 if (!TREE_ASM_WRITTEN (type))
25354 gen_enumeration_type_die (type, context_die);
25355 }
25356 else
25357 gen_struct_or_union_type_die (type, context_die, usage);
25358
25359 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25360 it up if it is ever completed. gen_*_type_die will set it for us
25361 when appropriate. */
25362 }
25363
25364 /* Generate a type description DIE. */
25365
25366 static void
25367 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25368 enum debug_info_usage usage)
25369 {
25370 struct array_descr_info info;
25371
25372 if (type == NULL_TREE || type == error_mark_node)
25373 return;
25374
25375 if (flag_checking && type)
25376 verify_type (type);
25377
25378 if (TYPE_NAME (type) != NULL_TREE
25379 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25380 && is_redundant_typedef (TYPE_NAME (type))
25381 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25382 /* The DECL of this type is a typedef we don't want to emit debug
25383 info for but we want debug info for its underlying typedef.
25384 This can happen for e.g, the injected-class-name of a C++
25385 type. */
25386 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25387
25388 /* If TYPE is a typedef type variant, let's generate debug info
25389 for the parent typedef which TYPE is a type of. */
25390 if (typedef_variant_p (type))
25391 {
25392 if (TREE_ASM_WRITTEN (type))
25393 return;
25394
25395 tree name = TYPE_NAME (type);
25396 tree origin = decl_ultimate_origin (name);
25397 if (origin != NULL && origin != name)
25398 {
25399 gen_decl_die (origin, NULL, NULL, context_die);
25400 return;
25401 }
25402
25403 /* Prevent broken recursion; we can't hand off to the same type. */
25404 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25405
25406 /* Give typedefs the right scope. */
25407 context_die = scope_die_for (type, context_die);
25408
25409 TREE_ASM_WRITTEN (type) = 1;
25410
25411 gen_decl_die (name, NULL, NULL, context_die);
25412 return;
25413 }
25414
25415 /* If type is an anonymous tagged type named by a typedef, let's
25416 generate debug info for the typedef. */
25417 if (is_naming_typedef_decl (TYPE_NAME (type)))
25418 {
25419 /* Give typedefs the right scope. */
25420 context_die = scope_die_for (type, context_die);
25421
25422 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25423 return;
25424 }
25425
25426 if (lang_hooks.types.get_debug_type)
25427 {
25428 tree debug_type = lang_hooks.types.get_debug_type (type);
25429
25430 if (debug_type != NULL_TREE && debug_type != type)
25431 {
25432 gen_type_die_with_usage (debug_type, context_die, usage);
25433 return;
25434 }
25435 }
25436
25437 /* We are going to output a DIE to represent the unqualified version
25438 of this type (i.e. without any const or volatile qualifiers) so
25439 get the main variant (i.e. the unqualified version) of this type
25440 now. (Vectors and arrays are special because the debugging info is in the
25441 cloned type itself. Similarly function/method types can contain extra
25442 ref-qualification). */
25443 if (TREE_CODE (type) == FUNCTION_TYPE
25444 || TREE_CODE (type) == METHOD_TYPE)
25445 {
25446 /* For function/method types, can't use type_main_variant here,
25447 because that can have different ref-qualifiers for C++,
25448 but try to canonicalize. */
25449 tree main = TYPE_MAIN_VARIANT (type);
25450 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25451 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25452 && check_base_type (t, main)
25453 && check_lang_type (t, type))
25454 {
25455 type = t;
25456 break;
25457 }
25458 }
25459 else if (TREE_CODE (type) != VECTOR_TYPE
25460 && TREE_CODE (type) != ARRAY_TYPE)
25461 type = type_main_variant (type);
25462
25463 /* If this is an array type with hidden descriptor, handle it first. */
25464 if (!TREE_ASM_WRITTEN (type)
25465 && lang_hooks.types.get_array_descr_info)
25466 {
25467 memset (&info, 0, sizeof (info));
25468 if (lang_hooks.types.get_array_descr_info (type, &info))
25469 {
25470 /* Fortran sometimes emits array types with no dimension. */
25471 gcc_assert (info.ndimensions >= 0
25472 && (info.ndimensions
25473 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25474 gen_descr_array_type_die (type, &info, context_die);
25475 TREE_ASM_WRITTEN (type) = 1;
25476 return;
25477 }
25478 }
25479
25480 if (TREE_ASM_WRITTEN (type))
25481 {
25482 /* Variable-length types may be incomplete even if
25483 TREE_ASM_WRITTEN. For such types, fall through to
25484 gen_array_type_die() and possibly fill in
25485 DW_AT_{upper,lower}_bound attributes. */
25486 if ((TREE_CODE (type) != ARRAY_TYPE
25487 && TREE_CODE (type) != RECORD_TYPE
25488 && TREE_CODE (type) != UNION_TYPE
25489 && TREE_CODE (type) != QUAL_UNION_TYPE)
25490 || !variably_modified_type_p (type, NULL))
25491 return;
25492 }
25493
25494 switch (TREE_CODE (type))
25495 {
25496 case ERROR_MARK:
25497 break;
25498
25499 case POINTER_TYPE:
25500 case REFERENCE_TYPE:
25501 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25502 ensures that the gen_type_die recursion will terminate even if the
25503 type is recursive. Recursive types are possible in Ada. */
25504 /* ??? We could perhaps do this for all types before the switch
25505 statement. */
25506 TREE_ASM_WRITTEN (type) = 1;
25507
25508 /* For these types, all that is required is that we output a DIE (or a
25509 set of DIEs) to represent the "basis" type. */
25510 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25511 DINFO_USAGE_IND_USE);
25512 break;
25513
25514 case OFFSET_TYPE:
25515 /* This code is used for C++ pointer-to-data-member types.
25516 Output a description of the relevant class type. */
25517 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25518 DINFO_USAGE_IND_USE);
25519
25520 /* Output a description of the type of the object pointed to. */
25521 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25522 DINFO_USAGE_IND_USE);
25523
25524 /* Now output a DIE to represent this pointer-to-data-member type
25525 itself. */
25526 gen_ptr_to_mbr_type_die (type, context_die);
25527 break;
25528
25529 case FUNCTION_TYPE:
25530 /* Force out return type (in case it wasn't forced out already). */
25531 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25532 DINFO_USAGE_DIR_USE);
25533 gen_subroutine_type_die (type, context_die);
25534 break;
25535
25536 case METHOD_TYPE:
25537 /* Force out return type (in case it wasn't forced out already). */
25538 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25539 DINFO_USAGE_DIR_USE);
25540 gen_subroutine_type_die (type, context_die);
25541 break;
25542
25543 case ARRAY_TYPE:
25544 case VECTOR_TYPE:
25545 gen_array_type_die (type, context_die);
25546 break;
25547
25548 case ENUMERAL_TYPE:
25549 case RECORD_TYPE:
25550 case UNION_TYPE:
25551 case QUAL_UNION_TYPE:
25552 gen_tagged_type_die (type, context_die, usage);
25553 return;
25554
25555 case VOID_TYPE:
25556 case INTEGER_TYPE:
25557 case REAL_TYPE:
25558 case FIXED_POINT_TYPE:
25559 case COMPLEX_TYPE:
25560 case BOOLEAN_TYPE:
25561 /* No DIEs needed for fundamental types. */
25562 break;
25563
25564 case NULLPTR_TYPE:
25565 case LANG_TYPE:
25566 /* Just use DW_TAG_unspecified_type. */
25567 {
25568 dw_die_ref type_die = lookup_type_die (type);
25569 if (type_die == NULL)
25570 {
25571 tree name = TYPE_IDENTIFIER (type);
25572 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25573 type);
25574 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25575 equate_type_number_to_die (type, type_die);
25576 }
25577 }
25578 break;
25579
25580 default:
25581 if (is_cxx_auto (type))
25582 {
25583 tree name = TYPE_IDENTIFIER (type);
25584 dw_die_ref *die = (name == get_identifier ("auto")
25585 ? &auto_die : &decltype_auto_die);
25586 if (!*die)
25587 {
25588 *die = new_die (DW_TAG_unspecified_type,
25589 comp_unit_die (), NULL_TREE);
25590 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25591 }
25592 equate_type_number_to_die (type, *die);
25593 break;
25594 }
25595 gcc_unreachable ();
25596 }
25597
25598 TREE_ASM_WRITTEN (type) = 1;
25599 }
25600
25601 static void
25602 gen_type_die (tree type, dw_die_ref context_die)
25603 {
25604 if (type != error_mark_node)
25605 {
25606 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25607 if (flag_checking)
25608 {
25609 dw_die_ref die = lookup_type_die (type);
25610 if (die)
25611 check_die (die);
25612 }
25613 }
25614 }
25615
25616 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25617 things which are local to the given block. */
25618
25619 static void
25620 gen_block_die (tree stmt, dw_die_ref context_die)
25621 {
25622 int must_output_die = 0;
25623 bool inlined_func;
25624
25625 /* Ignore blocks that are NULL. */
25626 if (stmt == NULL_TREE)
25627 return;
25628
25629 inlined_func = inlined_function_outer_scope_p (stmt);
25630
25631 /* If the block is one fragment of a non-contiguous block, do not
25632 process the variables, since they will have been done by the
25633 origin block. Do process subblocks. */
25634 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25635 {
25636 tree sub;
25637
25638 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25639 gen_block_die (sub, context_die);
25640
25641 return;
25642 }
25643
25644 /* Determine if we need to output any Dwarf DIEs at all to represent this
25645 block. */
25646 if (inlined_func)
25647 /* The outer scopes for inlinings *must* always be represented. We
25648 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25649 must_output_die = 1;
25650 else if (BLOCK_DIE (stmt))
25651 /* If we already have a DIE then it was filled early. Meanwhile
25652 we might have pruned all BLOCK_VARS as optimized out but we
25653 still want to generate high/low PC attributes so output it. */
25654 must_output_die = 1;
25655 else if (TREE_USED (stmt)
25656 || TREE_ASM_WRITTEN (stmt)
25657 || BLOCK_ABSTRACT (stmt))
25658 {
25659 /* Determine if this block directly contains any "significant"
25660 local declarations which we will need to output DIEs for. */
25661 if (debug_info_level > DINFO_LEVEL_TERSE)
25662 {
25663 /* We are not in terse mode so any local declaration that
25664 is not ignored for debug purposes counts as being a
25665 "significant" one. */
25666 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25667 must_output_die = 1;
25668 else
25669 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25670 if (!DECL_IGNORED_P (var))
25671 {
25672 must_output_die = 1;
25673 break;
25674 }
25675 }
25676 else if (!dwarf2out_ignore_block (stmt))
25677 must_output_die = 1;
25678 }
25679
25680 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25681 DIE for any block which contains no significant local declarations at
25682 all. Rather, in such cases we just call `decls_for_scope' so that any
25683 needed Dwarf info for any sub-blocks will get properly generated. Note
25684 that in terse mode, our definition of what constitutes a "significant"
25685 local declaration gets restricted to include only inlined function
25686 instances and local (nested) function definitions. */
25687 if (must_output_die)
25688 {
25689 if (inlined_func)
25690 {
25691 /* If STMT block is abstract, that means we have been called
25692 indirectly from dwarf2out_abstract_function.
25693 That function rightfully marks the descendent blocks (of
25694 the abstract function it is dealing with) as being abstract,
25695 precisely to prevent us from emitting any
25696 DW_TAG_inlined_subroutine DIE as a descendent
25697 of an abstract function instance. So in that case, we should
25698 not call gen_inlined_subroutine_die.
25699
25700 Later though, when cgraph asks dwarf2out to emit info
25701 for the concrete instance of the function decl into which
25702 the concrete instance of STMT got inlined, the later will lead
25703 to the generation of a DW_TAG_inlined_subroutine DIE. */
25704 if (! BLOCK_ABSTRACT (stmt))
25705 gen_inlined_subroutine_die (stmt, context_die);
25706 }
25707 else
25708 gen_lexical_block_die (stmt, context_die);
25709 }
25710 else
25711 decls_for_scope (stmt, context_die);
25712 }
25713
25714 /* Process variable DECL (or variable with origin ORIGIN) within
25715 block STMT and add it to CONTEXT_DIE. */
25716 static void
25717 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25718 {
25719 dw_die_ref die;
25720 tree decl_or_origin = decl ? decl : origin;
25721
25722 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25723 die = lookup_decl_die (decl_or_origin);
25724 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25725 {
25726 if (TYPE_DECL_IS_STUB (decl_or_origin))
25727 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25728 else
25729 die = lookup_decl_die (decl_or_origin);
25730 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25731 if (! die && ! early_dwarf)
25732 return;
25733 }
25734 else
25735 die = NULL;
25736
25737 /* Avoid creating DIEs for local typedefs and concrete static variables that
25738 will only be pruned later. */
25739 if ((origin || decl_ultimate_origin (decl))
25740 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25741 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25742 {
25743 origin = decl_ultimate_origin (decl_or_origin);
25744 if (decl && VAR_P (decl) && die != NULL)
25745 {
25746 die = lookup_decl_die (origin);
25747 if (die != NULL)
25748 equate_decl_number_to_die (decl, die);
25749 }
25750 return;
25751 }
25752
25753 if (die != NULL && die->die_parent == NULL)
25754 add_child_die (context_die, die);
25755 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25756 {
25757 if (early_dwarf)
25758 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25759 stmt, context_die);
25760 }
25761 else
25762 {
25763 if (decl && DECL_P (decl))
25764 {
25765 die = lookup_decl_die (decl);
25766
25767 /* Early created DIEs do not have a parent as the decls refer
25768 to the function as DECL_CONTEXT rather than the BLOCK. */
25769 if (die && die->die_parent == NULL)
25770 {
25771 gcc_assert (in_lto_p);
25772 add_child_die (context_die, die);
25773 }
25774 }
25775
25776 gen_decl_die (decl, origin, NULL, context_die);
25777 }
25778 }
25779
25780 /* Generate all of the decls declared within a given scope and (recursively)
25781 all of its sub-blocks. */
25782
25783 static void
25784 decls_for_scope (tree stmt, dw_die_ref context_die)
25785 {
25786 tree decl;
25787 unsigned int i;
25788 tree subblocks;
25789
25790 /* Ignore NULL blocks. */
25791 if (stmt == NULL_TREE)
25792 return;
25793
25794 /* Output the DIEs to represent all of the data objects and typedefs
25795 declared directly within this block but not within any nested
25796 sub-blocks. Also, nested function and tag DIEs have been
25797 generated with a parent of NULL; fix that up now. We don't
25798 have to do this if we're at -g1. */
25799 if (debug_info_level > DINFO_LEVEL_TERSE)
25800 {
25801 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25802 process_scope_var (stmt, decl, NULL_TREE, context_die);
25803 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25804 origin - avoid doing this twice as we have no good way to see
25805 if we've done it once already. */
25806 if (! early_dwarf)
25807 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25808 {
25809 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25810 if (decl == current_function_decl)
25811 /* Ignore declarations of the current function, while they
25812 are declarations, gen_subprogram_die would treat them
25813 as definitions again, because they are equal to
25814 current_function_decl and endlessly recurse. */;
25815 else if (TREE_CODE (decl) == FUNCTION_DECL)
25816 process_scope_var (stmt, decl, NULL_TREE, context_die);
25817 else
25818 process_scope_var (stmt, NULL_TREE, decl, context_die);
25819 }
25820 }
25821
25822 /* Even if we're at -g1, we need to process the subblocks in order to get
25823 inlined call information. */
25824
25825 /* Output the DIEs to represent all sub-blocks (and the items declared
25826 therein) of this block. */
25827 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25828 subblocks != NULL;
25829 subblocks = BLOCK_CHAIN (subblocks))
25830 gen_block_die (subblocks, context_die);
25831 }
25832
25833 /* Is this a typedef we can avoid emitting? */
25834
25835 static bool
25836 is_redundant_typedef (const_tree decl)
25837 {
25838 if (TYPE_DECL_IS_STUB (decl))
25839 return true;
25840
25841 if (DECL_ARTIFICIAL (decl)
25842 && DECL_CONTEXT (decl)
25843 && is_tagged_type (DECL_CONTEXT (decl))
25844 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25845 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25846 /* Also ignore the artificial member typedef for the class name. */
25847 return true;
25848
25849 return false;
25850 }
25851
25852 /* Return TRUE if TYPE is a typedef that names a type for linkage
25853 purposes. This kind of typedefs is produced by the C++ FE for
25854 constructs like:
25855
25856 typedef struct {...} foo;
25857
25858 In that case, there is no typedef variant type produced for foo.
25859 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25860 struct type. */
25861
25862 static bool
25863 is_naming_typedef_decl (const_tree decl)
25864 {
25865 if (decl == NULL_TREE
25866 || TREE_CODE (decl) != TYPE_DECL
25867 || DECL_NAMELESS (decl)
25868 || !is_tagged_type (TREE_TYPE (decl))
25869 || DECL_IS_BUILTIN (decl)
25870 || is_redundant_typedef (decl)
25871 /* It looks like Ada produces TYPE_DECLs that are very similar
25872 to C++ naming typedefs but that have different
25873 semantics. Let's be specific to c++ for now. */
25874 || !is_cxx (decl))
25875 return FALSE;
25876
25877 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25878 && TYPE_NAME (TREE_TYPE (decl)) == decl
25879 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25880 != TYPE_NAME (TREE_TYPE (decl))));
25881 }
25882
25883 /* Looks up the DIE for a context. */
25884
25885 static inline dw_die_ref
25886 lookup_context_die (tree context)
25887 {
25888 if (context)
25889 {
25890 /* Find die that represents this context. */
25891 if (TYPE_P (context))
25892 {
25893 context = TYPE_MAIN_VARIANT (context);
25894 dw_die_ref ctx = lookup_type_die (context);
25895 if (!ctx)
25896 return NULL;
25897 return strip_naming_typedef (context, ctx);
25898 }
25899 else
25900 return lookup_decl_die (context);
25901 }
25902 return comp_unit_die ();
25903 }
25904
25905 /* Returns the DIE for a context. */
25906
25907 static inline dw_die_ref
25908 get_context_die (tree context)
25909 {
25910 if (context)
25911 {
25912 /* Find die that represents this context. */
25913 if (TYPE_P (context))
25914 {
25915 context = TYPE_MAIN_VARIANT (context);
25916 return strip_naming_typedef (context, force_type_die (context));
25917 }
25918 else
25919 return force_decl_die (context);
25920 }
25921 return comp_unit_die ();
25922 }
25923
25924 /* Returns the DIE for decl. A DIE will always be returned. */
25925
25926 static dw_die_ref
25927 force_decl_die (tree decl)
25928 {
25929 dw_die_ref decl_die;
25930 unsigned saved_external_flag;
25931 tree save_fn = NULL_TREE;
25932 decl_die = lookup_decl_die (decl);
25933 if (!decl_die)
25934 {
25935 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25936
25937 decl_die = lookup_decl_die (decl);
25938 if (decl_die)
25939 return decl_die;
25940
25941 switch (TREE_CODE (decl))
25942 {
25943 case FUNCTION_DECL:
25944 /* Clear current_function_decl, so that gen_subprogram_die thinks
25945 that this is a declaration. At this point, we just want to force
25946 declaration die. */
25947 save_fn = current_function_decl;
25948 current_function_decl = NULL_TREE;
25949 gen_subprogram_die (decl, context_die);
25950 current_function_decl = save_fn;
25951 break;
25952
25953 case VAR_DECL:
25954 /* Set external flag to force declaration die. Restore it after
25955 gen_decl_die() call. */
25956 saved_external_flag = DECL_EXTERNAL (decl);
25957 DECL_EXTERNAL (decl) = 1;
25958 gen_decl_die (decl, NULL, NULL, context_die);
25959 DECL_EXTERNAL (decl) = saved_external_flag;
25960 break;
25961
25962 case NAMESPACE_DECL:
25963 if (dwarf_version >= 3 || !dwarf_strict)
25964 dwarf2out_decl (decl);
25965 else
25966 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25967 decl_die = comp_unit_die ();
25968 break;
25969
25970 case TRANSLATION_UNIT_DECL:
25971 decl_die = comp_unit_die ();
25972 break;
25973
25974 default:
25975 gcc_unreachable ();
25976 }
25977
25978 /* We should be able to find the DIE now. */
25979 if (!decl_die)
25980 decl_die = lookup_decl_die (decl);
25981 gcc_assert (decl_die);
25982 }
25983
25984 return decl_die;
25985 }
25986
25987 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25988 always returned. */
25989
25990 static dw_die_ref
25991 force_type_die (tree type)
25992 {
25993 dw_die_ref type_die;
25994
25995 type_die = lookup_type_die (type);
25996 if (!type_die)
25997 {
25998 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25999
26000 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26001 false, context_die);
26002 gcc_assert (type_die);
26003 }
26004 return type_die;
26005 }
26006
26007 /* Force out any required namespaces to be able to output DECL,
26008 and return the new context_die for it, if it's changed. */
26009
26010 static dw_die_ref
26011 setup_namespace_context (tree thing, dw_die_ref context_die)
26012 {
26013 tree context = (DECL_P (thing)
26014 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26015 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26016 /* Force out the namespace. */
26017 context_die = force_decl_die (context);
26018
26019 return context_die;
26020 }
26021
26022 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26023 type) within its namespace, if appropriate.
26024
26025 For compatibility with older debuggers, namespace DIEs only contain
26026 declarations; all definitions are emitted at CU scope, with
26027 DW_AT_specification pointing to the declaration (like with class
26028 members). */
26029
26030 static dw_die_ref
26031 declare_in_namespace (tree thing, dw_die_ref context_die)
26032 {
26033 dw_die_ref ns_context;
26034
26035 if (debug_info_level <= DINFO_LEVEL_TERSE)
26036 return context_die;
26037
26038 /* External declarations in the local scope only need to be emitted
26039 once, not once in the namespace and once in the scope.
26040
26041 This avoids declaring the `extern' below in the
26042 namespace DIE as well as in the innermost scope:
26043
26044 namespace S
26045 {
26046 int i=5;
26047 int foo()
26048 {
26049 int i=8;
26050 extern int i;
26051 return i;
26052 }
26053 }
26054 */
26055 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26056 return context_die;
26057
26058 /* If this decl is from an inlined function, then don't try to emit it in its
26059 namespace, as we will get confused. It would have already been emitted
26060 when the abstract instance of the inline function was emitted anyways. */
26061 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26062 return context_die;
26063
26064 ns_context = setup_namespace_context (thing, context_die);
26065
26066 if (ns_context != context_die)
26067 {
26068 if (is_fortran ())
26069 return ns_context;
26070 if (DECL_P (thing))
26071 gen_decl_die (thing, NULL, NULL, ns_context);
26072 else
26073 gen_type_die (thing, ns_context);
26074 }
26075 return context_die;
26076 }
26077
26078 /* Generate a DIE for a namespace or namespace alias. */
26079
26080 static void
26081 gen_namespace_die (tree decl, dw_die_ref context_die)
26082 {
26083 dw_die_ref namespace_die;
26084
26085 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26086 they are an alias of. */
26087 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26088 {
26089 /* Output a real namespace or module. */
26090 context_die = setup_namespace_context (decl, comp_unit_die ());
26091 namespace_die = new_die (is_fortran ()
26092 ? DW_TAG_module : DW_TAG_namespace,
26093 context_die, decl);
26094 /* For Fortran modules defined in different CU don't add src coords. */
26095 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26096 {
26097 const char *name = dwarf2_name (decl, 0);
26098 if (name)
26099 add_name_attribute (namespace_die, name);
26100 }
26101 else
26102 add_name_and_src_coords_attributes (namespace_die, decl);
26103 if (DECL_EXTERNAL (decl))
26104 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26105 equate_decl_number_to_die (decl, namespace_die);
26106 }
26107 else
26108 {
26109 /* Output a namespace alias. */
26110
26111 /* Force out the namespace we are an alias of, if necessary. */
26112 dw_die_ref origin_die
26113 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26114
26115 if (DECL_FILE_SCOPE_P (decl)
26116 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26117 context_die = setup_namespace_context (decl, comp_unit_die ());
26118 /* Now create the namespace alias DIE. */
26119 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26120 add_name_and_src_coords_attributes (namespace_die, decl);
26121 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26122 equate_decl_number_to_die (decl, namespace_die);
26123 }
26124 if ((dwarf_version >= 5 || !dwarf_strict)
26125 && lang_hooks.decls.decl_dwarf_attribute (decl,
26126 DW_AT_export_symbols) == 1)
26127 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26128
26129 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26130 if (want_pubnames ())
26131 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26132 }
26133
26134 /* Generate Dwarf debug information for a decl described by DECL.
26135 The return value is currently only meaningful for PARM_DECLs,
26136 for all other decls it returns NULL.
26137
26138 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26139 It can be NULL otherwise. */
26140
26141 static dw_die_ref
26142 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26143 dw_die_ref context_die)
26144 {
26145 tree decl_or_origin = decl ? decl : origin;
26146 tree class_origin = NULL, ultimate_origin;
26147
26148 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26149 return NULL;
26150
26151 switch (TREE_CODE (decl_or_origin))
26152 {
26153 case ERROR_MARK:
26154 break;
26155
26156 case CONST_DECL:
26157 if (!is_fortran () && !is_ada ())
26158 {
26159 /* The individual enumerators of an enum type get output when we output
26160 the Dwarf representation of the relevant enum type itself. */
26161 break;
26162 }
26163
26164 /* Emit its type. */
26165 gen_type_die (TREE_TYPE (decl), context_die);
26166
26167 /* And its containing namespace. */
26168 context_die = declare_in_namespace (decl, context_die);
26169
26170 gen_const_die (decl, context_die);
26171 break;
26172
26173 case FUNCTION_DECL:
26174 #if 0
26175 /* FIXME */
26176 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26177 on local redeclarations of global functions. That seems broken. */
26178 if (current_function_decl != decl)
26179 /* This is only a declaration. */;
26180 #endif
26181
26182 /* We should have abstract copies already and should not generate
26183 stray type DIEs in late LTO dumping. */
26184 if (! early_dwarf)
26185 ;
26186
26187 /* If we're emitting a clone, emit info for the abstract instance. */
26188 else if (origin || DECL_ORIGIN (decl) != decl)
26189 dwarf2out_abstract_function (origin
26190 ? DECL_ORIGIN (origin)
26191 : DECL_ABSTRACT_ORIGIN (decl));
26192
26193 /* If we're emitting a possibly inlined function emit it as
26194 abstract instance. */
26195 else if (cgraph_function_possibly_inlined_p (decl)
26196 && ! DECL_ABSTRACT_P (decl)
26197 && ! class_or_namespace_scope_p (context_die)
26198 /* dwarf2out_abstract_function won't emit a die if this is just
26199 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26200 that case, because that works only if we have a die. */
26201 && DECL_INITIAL (decl) != NULL_TREE)
26202 dwarf2out_abstract_function (decl);
26203
26204 /* Otherwise we're emitting the primary DIE for this decl. */
26205 else if (debug_info_level > DINFO_LEVEL_TERSE)
26206 {
26207 /* Before we describe the FUNCTION_DECL itself, make sure that we
26208 have its containing type. */
26209 if (!origin)
26210 origin = decl_class_context (decl);
26211 if (origin != NULL_TREE)
26212 gen_type_die (origin, context_die);
26213
26214 /* And its return type. */
26215 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26216
26217 /* And its virtual context. */
26218 if (DECL_VINDEX (decl) != NULL_TREE)
26219 gen_type_die (DECL_CONTEXT (decl), context_die);
26220
26221 /* Make sure we have a member DIE for decl. */
26222 if (origin != NULL_TREE)
26223 gen_type_die_for_member (origin, decl, context_die);
26224
26225 /* And its containing namespace. */
26226 context_die = declare_in_namespace (decl, context_die);
26227 }
26228
26229 /* Now output a DIE to represent the function itself. */
26230 if (decl)
26231 gen_subprogram_die (decl, context_die);
26232 break;
26233
26234 case TYPE_DECL:
26235 /* If we are in terse mode, don't generate any DIEs to represent any
26236 actual typedefs. */
26237 if (debug_info_level <= DINFO_LEVEL_TERSE)
26238 break;
26239
26240 /* In the special case of a TYPE_DECL node representing the declaration
26241 of some type tag, if the given TYPE_DECL is marked as having been
26242 instantiated from some other (original) TYPE_DECL node (e.g. one which
26243 was generated within the original definition of an inline function) we
26244 used to generate a special (abbreviated) DW_TAG_structure_type,
26245 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26246 should be actually referencing those DIEs, as variable DIEs with that
26247 type would be emitted already in the abstract origin, so it was always
26248 removed during unused type prunning. Don't add anything in this
26249 case. */
26250 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26251 break;
26252
26253 if (is_redundant_typedef (decl))
26254 gen_type_die (TREE_TYPE (decl), context_die);
26255 else
26256 /* Output a DIE to represent the typedef itself. */
26257 gen_typedef_die (decl, context_die);
26258 break;
26259
26260 case LABEL_DECL:
26261 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26262 gen_label_die (decl, context_die);
26263 break;
26264
26265 case VAR_DECL:
26266 case RESULT_DECL:
26267 /* If we are in terse mode, don't generate any DIEs to represent any
26268 variable declarations or definitions. */
26269 if (debug_info_level <= DINFO_LEVEL_TERSE)
26270 break;
26271
26272 /* Avoid generating stray type DIEs during late dwarf dumping.
26273 All types have been dumped early. */
26274 if (early_dwarf
26275 /* ??? But in LTRANS we cannot annotate early created variably
26276 modified type DIEs without copying them and adjusting all
26277 references to them. Dump them again as happens for inlining
26278 which copies both the decl and the types. */
26279 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26280 in VLA bound information for example. */
26281 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26282 current_function_decl)))
26283 {
26284 /* Output any DIEs that are needed to specify the type of this data
26285 object. */
26286 if (decl_by_reference_p (decl_or_origin))
26287 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26288 else
26289 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26290 }
26291
26292 if (early_dwarf)
26293 {
26294 /* And its containing type. */
26295 class_origin = decl_class_context (decl_or_origin);
26296 if (class_origin != NULL_TREE)
26297 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26298
26299 /* And its containing namespace. */
26300 context_die = declare_in_namespace (decl_or_origin, context_die);
26301 }
26302
26303 /* Now output the DIE to represent the data object itself. This gets
26304 complicated because of the possibility that the VAR_DECL really
26305 represents an inlined instance of a formal parameter for an inline
26306 function. */
26307 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26308 if (ultimate_origin != NULL_TREE
26309 && TREE_CODE (ultimate_origin) == PARM_DECL)
26310 gen_formal_parameter_die (decl, origin,
26311 true /* Emit name attribute. */,
26312 context_die);
26313 else
26314 gen_variable_die (decl, origin, context_die);
26315 break;
26316
26317 case FIELD_DECL:
26318 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26319 /* Ignore the nameless fields that are used to skip bits but handle C++
26320 anonymous unions and structs. */
26321 if (DECL_NAME (decl) != NULL_TREE
26322 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26323 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26324 {
26325 gen_type_die (member_declared_type (decl), context_die);
26326 gen_field_die (decl, ctx, context_die);
26327 }
26328 break;
26329
26330 case PARM_DECL:
26331 /* Avoid generating stray type DIEs during late dwarf dumping.
26332 All types have been dumped early. */
26333 if (early_dwarf
26334 /* ??? But in LTRANS we cannot annotate early created variably
26335 modified type DIEs without copying them and adjusting all
26336 references to them. Dump them again as happens for inlining
26337 which copies both the decl and the types. */
26338 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26339 in VLA bound information for example. */
26340 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26341 current_function_decl)))
26342 {
26343 if (DECL_BY_REFERENCE (decl_or_origin))
26344 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26345 else
26346 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26347 }
26348 return gen_formal_parameter_die (decl, origin,
26349 true /* Emit name attribute. */,
26350 context_die);
26351
26352 case NAMESPACE_DECL:
26353 if (dwarf_version >= 3 || !dwarf_strict)
26354 gen_namespace_die (decl, context_die);
26355 break;
26356
26357 case IMPORTED_DECL:
26358 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26359 DECL_CONTEXT (decl), context_die);
26360 break;
26361
26362 case NAMELIST_DECL:
26363 gen_namelist_decl (DECL_NAME (decl), context_die,
26364 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26365 break;
26366
26367 default:
26368 /* Probably some frontend-internal decl. Assume we don't care. */
26369 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26370 break;
26371 }
26372
26373 return NULL;
26374 }
26375 \f
26376 /* Output initial debug information for global DECL. Called at the
26377 end of the parsing process.
26378
26379 This is the initial debug generation process. As such, the DIEs
26380 generated may be incomplete. A later debug generation pass
26381 (dwarf2out_late_global_decl) will augment the information generated
26382 in this pass (e.g., with complete location info). */
26383
26384 static void
26385 dwarf2out_early_global_decl (tree decl)
26386 {
26387 set_early_dwarf s;
26388
26389 /* gen_decl_die() will set DECL_ABSTRACT because
26390 cgraph_function_possibly_inlined_p() returns true. This is in
26391 turn will cause DW_AT_inline attributes to be set.
26392
26393 This happens because at early dwarf generation, there is no
26394 cgraph information, causing cgraph_function_possibly_inlined_p()
26395 to return true. Trick cgraph_function_possibly_inlined_p()
26396 while we generate dwarf early. */
26397 bool save = symtab->global_info_ready;
26398 symtab->global_info_ready = true;
26399
26400 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26401 other DECLs and they can point to template types or other things
26402 that dwarf2out can't handle when done via dwarf2out_decl. */
26403 if (TREE_CODE (decl) != TYPE_DECL
26404 && TREE_CODE (decl) != PARM_DECL)
26405 {
26406 if (TREE_CODE (decl) == FUNCTION_DECL)
26407 {
26408 tree save_fndecl = current_function_decl;
26409
26410 /* For nested functions, make sure we have DIEs for the parents first
26411 so that all nested DIEs are generated at the proper scope in the
26412 first shot. */
26413 tree context = decl_function_context (decl);
26414 if (context != NULL)
26415 {
26416 dw_die_ref context_die = lookup_decl_die (context);
26417 current_function_decl = context;
26418
26419 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26420 enough so that it lands in its own context. This avoids type
26421 pruning issues later on. */
26422 if (context_die == NULL || is_declaration_die (context_die))
26423 dwarf2out_decl (context);
26424 }
26425
26426 /* Emit an abstract origin of a function first. This happens
26427 with C++ constructor clones for example and makes
26428 dwarf2out_abstract_function happy which requires the early
26429 DIE of the abstract instance to be present. */
26430 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26431 dw_die_ref origin_die;
26432 if (origin != NULL
26433 /* Do not emit the DIE multiple times but make sure to
26434 process it fully here in case we just saw a declaration. */
26435 && ((origin_die = lookup_decl_die (origin)) == NULL
26436 || is_declaration_die (origin_die)))
26437 {
26438 current_function_decl = origin;
26439 dwarf2out_decl (origin);
26440 }
26441
26442 /* Emit the DIE for decl but avoid doing that multiple times. */
26443 dw_die_ref old_die;
26444 if ((old_die = lookup_decl_die (decl)) == NULL
26445 || is_declaration_die (old_die))
26446 {
26447 current_function_decl = decl;
26448 dwarf2out_decl (decl);
26449 }
26450
26451 current_function_decl = save_fndecl;
26452 }
26453 else
26454 dwarf2out_decl (decl);
26455 }
26456 symtab->global_info_ready = save;
26457 }
26458
26459 /* Return whether EXPR is an expression with the following pattern:
26460 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26461
26462 static bool
26463 is_trivial_indirect_ref (tree expr)
26464 {
26465 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26466 return false;
26467
26468 tree nop = TREE_OPERAND (expr, 0);
26469 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26470 return false;
26471
26472 tree int_cst = TREE_OPERAND (nop, 0);
26473 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26474 }
26475
26476 /* Output debug information for global decl DECL. Called from
26477 toplev.c after compilation proper has finished. */
26478
26479 static void
26480 dwarf2out_late_global_decl (tree decl)
26481 {
26482 /* Fill-in any location information we were unable to determine
26483 on the first pass. */
26484 if (VAR_P (decl))
26485 {
26486 dw_die_ref die = lookup_decl_die (decl);
26487
26488 /* We may have to generate early debug late for LTO in case debug
26489 was not enabled at compile-time or the target doesn't support
26490 the LTO early debug scheme. */
26491 if (! die && in_lto_p)
26492 {
26493 dwarf2out_decl (decl);
26494 die = lookup_decl_die (decl);
26495 }
26496
26497 if (die)
26498 {
26499 /* We get called via the symtab code invoking late_global_decl
26500 for symbols that are optimized out.
26501
26502 Do not add locations for those, except if they have a
26503 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26504 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26505 INDIRECT_REF expression, as this could generate relocations to
26506 text symbols in LTO object files, which is invalid. */
26507 varpool_node *node = varpool_node::get (decl);
26508 if ((! node || ! node->definition)
26509 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26510 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26511 tree_add_const_value_attribute_for_decl (die, decl);
26512 else
26513 add_location_or_const_value_attribute (die, decl, false);
26514 }
26515 }
26516 }
26517
26518 /* Output debug information for type decl DECL. Called from toplev.c
26519 and from language front ends (to record built-in types). */
26520 static void
26521 dwarf2out_type_decl (tree decl, int local)
26522 {
26523 if (!local)
26524 {
26525 set_early_dwarf s;
26526 dwarf2out_decl (decl);
26527 }
26528 }
26529
26530 /* Output debug information for imported module or decl DECL.
26531 NAME is non-NULL name in the lexical block if the decl has been renamed.
26532 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26533 that DECL belongs to.
26534 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26535 static void
26536 dwarf2out_imported_module_or_decl_1 (tree decl,
26537 tree name,
26538 tree lexical_block,
26539 dw_die_ref lexical_block_die)
26540 {
26541 expanded_location xloc;
26542 dw_die_ref imported_die = NULL;
26543 dw_die_ref at_import_die;
26544
26545 if (TREE_CODE (decl) == IMPORTED_DECL)
26546 {
26547 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26548 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26549 gcc_assert (decl);
26550 }
26551 else
26552 xloc = expand_location (input_location);
26553
26554 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26555 {
26556 at_import_die = force_type_die (TREE_TYPE (decl));
26557 /* For namespace N { typedef void T; } using N::T; base_type_die
26558 returns NULL, but DW_TAG_imported_declaration requires
26559 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26560 if (!at_import_die)
26561 {
26562 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26563 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26564 at_import_die = lookup_type_die (TREE_TYPE (decl));
26565 gcc_assert (at_import_die);
26566 }
26567 }
26568 else
26569 {
26570 at_import_die = lookup_decl_die (decl);
26571 if (!at_import_die)
26572 {
26573 /* If we're trying to avoid duplicate debug info, we may not have
26574 emitted the member decl for this field. Emit it now. */
26575 if (TREE_CODE (decl) == FIELD_DECL)
26576 {
26577 tree type = DECL_CONTEXT (decl);
26578
26579 if (TYPE_CONTEXT (type)
26580 && TYPE_P (TYPE_CONTEXT (type))
26581 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26582 DINFO_USAGE_DIR_USE))
26583 return;
26584 gen_type_die_for_member (type, decl,
26585 get_context_die (TYPE_CONTEXT (type)));
26586 }
26587 if (TREE_CODE (decl) == NAMELIST_DECL)
26588 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26589 get_context_die (DECL_CONTEXT (decl)),
26590 NULL_TREE);
26591 else
26592 at_import_die = force_decl_die (decl);
26593 }
26594 }
26595
26596 if (TREE_CODE (decl) == NAMESPACE_DECL)
26597 {
26598 if (dwarf_version >= 3 || !dwarf_strict)
26599 imported_die = new_die (DW_TAG_imported_module,
26600 lexical_block_die,
26601 lexical_block);
26602 else
26603 return;
26604 }
26605 else
26606 imported_die = new_die (DW_TAG_imported_declaration,
26607 lexical_block_die,
26608 lexical_block);
26609
26610 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26611 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26612 if (debug_column_info && xloc.column)
26613 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26614 if (name)
26615 add_AT_string (imported_die, DW_AT_name,
26616 IDENTIFIER_POINTER (name));
26617 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26618 }
26619
26620 /* Output debug information for imported module or decl DECL.
26621 NAME is non-NULL name in context if the decl has been renamed.
26622 CHILD is true if decl is one of the renamed decls as part of
26623 importing whole module.
26624 IMPLICIT is set if this hook is called for an implicit import
26625 such as inline namespace. */
26626
26627 static void
26628 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26629 bool child, bool implicit)
26630 {
26631 /* dw_die_ref at_import_die; */
26632 dw_die_ref scope_die;
26633
26634 if (debug_info_level <= DINFO_LEVEL_TERSE)
26635 return;
26636
26637 gcc_assert (decl);
26638
26639 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26640 should be enough, for DWARF4 and older even if we emit as extension
26641 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26642 for the benefit of consumers unaware of DW_AT_export_symbols. */
26643 if (implicit
26644 && dwarf_version >= 5
26645 && lang_hooks.decls.decl_dwarf_attribute (decl,
26646 DW_AT_export_symbols) == 1)
26647 return;
26648
26649 set_early_dwarf s;
26650
26651 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26652 We need decl DIE for reference and scope die. First, get DIE for the decl
26653 itself. */
26654
26655 /* Get the scope die for decl context. Use comp_unit_die for global module
26656 or decl. If die is not found for non globals, force new die. */
26657 if (context
26658 && TYPE_P (context)
26659 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26660 return;
26661
26662 scope_die = get_context_die (context);
26663
26664 if (child)
26665 {
26666 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26667 there is nothing we can do, here. */
26668 if (dwarf_version < 3 && dwarf_strict)
26669 return;
26670
26671 gcc_assert (scope_die->die_child);
26672 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26673 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26674 scope_die = scope_die->die_child;
26675 }
26676
26677 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26678 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26679 }
26680
26681 /* Output debug information for namelists. */
26682
26683 static dw_die_ref
26684 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26685 {
26686 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26687 tree value;
26688 unsigned i;
26689
26690 if (debug_info_level <= DINFO_LEVEL_TERSE)
26691 return NULL;
26692
26693 gcc_assert (scope_die != NULL);
26694 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26695 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26696
26697 /* If there are no item_decls, we have a nondefining namelist, e.g.
26698 with USE association; hence, set DW_AT_declaration. */
26699 if (item_decls == NULL_TREE)
26700 {
26701 add_AT_flag (nml_die, DW_AT_declaration, 1);
26702 return nml_die;
26703 }
26704
26705 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26706 {
26707 nml_item_ref_die = lookup_decl_die (value);
26708 if (!nml_item_ref_die)
26709 nml_item_ref_die = force_decl_die (value);
26710
26711 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26712 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26713 }
26714 return nml_die;
26715 }
26716
26717
26718 /* Write the debugging output for DECL and return the DIE. */
26719
26720 static void
26721 dwarf2out_decl (tree decl)
26722 {
26723 dw_die_ref context_die = comp_unit_die ();
26724
26725 switch (TREE_CODE (decl))
26726 {
26727 case ERROR_MARK:
26728 return;
26729
26730 case FUNCTION_DECL:
26731 /* If we're a nested function, initially use a parent of NULL; if we're
26732 a plain function, this will be fixed up in decls_for_scope. If
26733 we're a method, it will be ignored, since we already have a DIE.
26734 Avoid doing this late though since clones of class methods may
26735 otherwise end up in limbo and create type DIEs late. */
26736 if (early_dwarf
26737 && decl_function_context (decl)
26738 /* But if we're in terse mode, we don't care about scope. */
26739 && debug_info_level > DINFO_LEVEL_TERSE)
26740 context_die = NULL;
26741 break;
26742
26743 case VAR_DECL:
26744 /* For local statics lookup proper context die. */
26745 if (local_function_static (decl))
26746 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26747
26748 /* If we are in terse mode, don't generate any DIEs to represent any
26749 variable declarations or definitions. */
26750 if (debug_info_level <= DINFO_LEVEL_TERSE)
26751 return;
26752 break;
26753
26754 case CONST_DECL:
26755 if (debug_info_level <= DINFO_LEVEL_TERSE)
26756 return;
26757 if (!is_fortran () && !is_ada ())
26758 return;
26759 if (TREE_STATIC (decl) && decl_function_context (decl))
26760 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26761 break;
26762
26763 case NAMESPACE_DECL:
26764 case IMPORTED_DECL:
26765 if (debug_info_level <= DINFO_LEVEL_TERSE)
26766 return;
26767 if (lookup_decl_die (decl) != NULL)
26768 return;
26769 break;
26770
26771 case TYPE_DECL:
26772 /* Don't emit stubs for types unless they are needed by other DIEs. */
26773 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26774 return;
26775
26776 /* Don't bother trying to generate any DIEs to represent any of the
26777 normal built-in types for the language we are compiling. */
26778 if (DECL_IS_BUILTIN (decl))
26779 return;
26780
26781 /* If we are in terse mode, don't generate any DIEs for types. */
26782 if (debug_info_level <= DINFO_LEVEL_TERSE)
26783 return;
26784
26785 /* If we're a function-scope tag, initially use a parent of NULL;
26786 this will be fixed up in decls_for_scope. */
26787 if (decl_function_context (decl))
26788 context_die = NULL;
26789
26790 break;
26791
26792 case NAMELIST_DECL:
26793 break;
26794
26795 default:
26796 return;
26797 }
26798
26799 gen_decl_die (decl, NULL, NULL, context_die);
26800
26801 if (flag_checking)
26802 {
26803 dw_die_ref die = lookup_decl_die (decl);
26804 if (die)
26805 check_die (die);
26806 }
26807 }
26808
26809 /* Write the debugging output for DECL. */
26810
26811 static void
26812 dwarf2out_function_decl (tree decl)
26813 {
26814 dwarf2out_decl (decl);
26815 call_arg_locations = NULL;
26816 call_arg_loc_last = NULL;
26817 call_site_count = -1;
26818 tail_call_site_count = -1;
26819 decl_loc_table->empty ();
26820 cached_dw_loc_list_table->empty ();
26821 }
26822
26823 /* Output a marker (i.e. a label) for the beginning of the generated code for
26824 a lexical block. */
26825
26826 static void
26827 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26828 unsigned int blocknum)
26829 {
26830 switch_to_section (current_function_section ());
26831 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26832 }
26833
26834 /* Output a marker (i.e. a label) for the end of the generated code for a
26835 lexical block. */
26836
26837 static void
26838 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26839 {
26840 switch_to_section (current_function_section ());
26841 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26842 }
26843
26844 /* Returns nonzero if it is appropriate not to emit any debugging
26845 information for BLOCK, because it doesn't contain any instructions.
26846
26847 Don't allow this for blocks with nested functions or local classes
26848 as we would end up with orphans, and in the presence of scheduling
26849 we may end up calling them anyway. */
26850
26851 static bool
26852 dwarf2out_ignore_block (const_tree block)
26853 {
26854 tree decl;
26855 unsigned int i;
26856
26857 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26858 if (TREE_CODE (decl) == FUNCTION_DECL
26859 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26860 return 0;
26861 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26862 {
26863 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26864 if (TREE_CODE (decl) == FUNCTION_DECL
26865 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26866 return 0;
26867 }
26868
26869 return 1;
26870 }
26871
26872 /* Hash table routines for file_hash. */
26873
26874 bool
26875 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26876 {
26877 return filename_cmp (p1->filename, p2) == 0;
26878 }
26879
26880 hashval_t
26881 dwarf_file_hasher::hash (dwarf_file_data *p)
26882 {
26883 return htab_hash_string (p->filename);
26884 }
26885
26886 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26887 dwarf2out.c) and return its "index". The index of each (known) filename is
26888 just a unique number which is associated with only that one filename. We
26889 need such numbers for the sake of generating labels (in the .debug_sfnames
26890 section) and references to those files numbers (in the .debug_srcinfo
26891 and .debug_macinfo sections). If the filename given as an argument is not
26892 found in our current list, add it to the list and assign it the next
26893 available unique index number. */
26894
26895 static struct dwarf_file_data *
26896 lookup_filename (const char *file_name)
26897 {
26898 struct dwarf_file_data * created;
26899
26900 if (!file_name)
26901 return NULL;
26902
26903 dwarf_file_data **slot
26904 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26905 INSERT);
26906 if (*slot)
26907 return *slot;
26908
26909 created = ggc_alloc<dwarf_file_data> ();
26910 created->filename = file_name;
26911 created->emitted_number = 0;
26912 *slot = created;
26913 return created;
26914 }
26915
26916 /* If the assembler will construct the file table, then translate the compiler
26917 internal file table number into the assembler file table number, and emit
26918 a .file directive if we haven't already emitted one yet. The file table
26919 numbers are different because we prune debug info for unused variables and
26920 types, which may include filenames. */
26921
26922 static int
26923 maybe_emit_file (struct dwarf_file_data * fd)
26924 {
26925 if (! fd->emitted_number)
26926 {
26927 if (last_emitted_file)
26928 fd->emitted_number = last_emitted_file->emitted_number + 1;
26929 else
26930 fd->emitted_number = 1;
26931 last_emitted_file = fd;
26932
26933 if (output_asm_line_debug_info ())
26934 {
26935 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26936 output_quoted_string (asm_out_file,
26937 remap_debug_filename (fd->filename));
26938 fputc ('\n', asm_out_file);
26939 }
26940 }
26941
26942 return fd->emitted_number;
26943 }
26944
26945 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26946 That generation should happen after function debug info has been
26947 generated. The value of the attribute is the constant value of ARG. */
26948
26949 static void
26950 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26951 {
26952 die_arg_entry entry;
26953
26954 if (!die || !arg)
26955 return;
26956
26957 gcc_assert (early_dwarf);
26958
26959 if (!tmpl_value_parm_die_table)
26960 vec_alloc (tmpl_value_parm_die_table, 32);
26961
26962 entry.die = die;
26963 entry.arg = arg;
26964 vec_safe_push (tmpl_value_parm_die_table, entry);
26965 }
26966
26967 /* Return TRUE if T is an instance of generic type, FALSE
26968 otherwise. */
26969
26970 static bool
26971 generic_type_p (tree t)
26972 {
26973 if (t == NULL_TREE || !TYPE_P (t))
26974 return false;
26975 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26976 }
26977
26978 /* Schedule the generation of the generic parameter dies for the
26979 instance of generic type T. The proper generation itself is later
26980 done by gen_scheduled_generic_parms_dies. */
26981
26982 static void
26983 schedule_generic_params_dies_gen (tree t)
26984 {
26985 if (!generic_type_p (t))
26986 return;
26987
26988 gcc_assert (early_dwarf);
26989
26990 if (!generic_type_instances)
26991 vec_alloc (generic_type_instances, 256);
26992
26993 vec_safe_push (generic_type_instances, t);
26994 }
26995
26996 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26997 by append_entry_to_tmpl_value_parm_die_table. This function must
26998 be called after function DIEs have been generated. */
26999
27000 static void
27001 gen_remaining_tmpl_value_param_die_attribute (void)
27002 {
27003 if (tmpl_value_parm_die_table)
27004 {
27005 unsigned i, j;
27006 die_arg_entry *e;
27007
27008 /* We do this in two phases - first get the cases we can
27009 handle during early-finish, preserving those we cannot
27010 (containing symbolic constants where we don't yet know
27011 whether we are going to output the referenced symbols).
27012 For those we try again at late-finish. */
27013 j = 0;
27014 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27015 {
27016 if (!e->die->removed
27017 && !tree_add_const_value_attribute (e->die, e->arg))
27018 {
27019 dw_loc_descr_ref loc = NULL;
27020 if (! early_dwarf
27021 && (dwarf_version >= 5 || !dwarf_strict))
27022 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27023 if (loc)
27024 add_AT_loc (e->die, DW_AT_location, loc);
27025 else
27026 (*tmpl_value_parm_die_table)[j++] = *e;
27027 }
27028 }
27029 tmpl_value_parm_die_table->truncate (j);
27030 }
27031 }
27032
27033 /* Generate generic parameters DIEs for instances of generic types
27034 that have been previously scheduled by
27035 schedule_generic_params_dies_gen. This function must be called
27036 after all the types of the CU have been laid out. */
27037
27038 static void
27039 gen_scheduled_generic_parms_dies (void)
27040 {
27041 unsigned i;
27042 tree t;
27043
27044 if (!generic_type_instances)
27045 return;
27046
27047 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27048 if (COMPLETE_TYPE_P (t))
27049 gen_generic_params_dies (t);
27050
27051 generic_type_instances = NULL;
27052 }
27053
27054
27055 /* Replace DW_AT_name for the decl with name. */
27056
27057 static void
27058 dwarf2out_set_name (tree decl, tree name)
27059 {
27060 dw_die_ref die;
27061 dw_attr_node *attr;
27062 const char *dname;
27063
27064 die = TYPE_SYMTAB_DIE (decl);
27065 if (!die)
27066 return;
27067
27068 dname = dwarf2_name (name, 0);
27069 if (!dname)
27070 return;
27071
27072 attr = get_AT (die, DW_AT_name);
27073 if (attr)
27074 {
27075 struct indirect_string_node *node;
27076
27077 node = find_AT_string (dname);
27078 /* replace the string. */
27079 attr->dw_attr_val.v.val_str = node;
27080 }
27081
27082 else
27083 add_name_attribute (die, dname);
27084 }
27085
27086 /* True if before or during processing of the first function being emitted. */
27087 static bool in_first_function_p = true;
27088 /* True if loc_note during dwarf2out_var_location call might still be
27089 before first real instruction at address equal to .Ltext0. */
27090 static bool maybe_at_text_label_p = true;
27091 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27092 static unsigned int first_loclabel_num_not_at_text_label;
27093
27094 /* Look ahead for a real insn, or for a begin stmt marker. */
27095
27096 static rtx_insn *
27097 dwarf2out_next_real_insn (rtx_insn *loc_note)
27098 {
27099 rtx_insn *next_real = NEXT_INSN (loc_note);
27100
27101 while (next_real)
27102 if (INSN_P (next_real))
27103 break;
27104 else
27105 next_real = NEXT_INSN (next_real);
27106
27107 return next_real;
27108 }
27109
27110 /* Called by the final INSN scan whenever we see a var location. We
27111 use it to drop labels in the right places, and throw the location in
27112 our lookup table. */
27113
27114 static void
27115 dwarf2out_var_location (rtx_insn *loc_note)
27116 {
27117 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27118 struct var_loc_node *newloc;
27119 rtx_insn *next_real, *next_note;
27120 rtx_insn *call_insn = NULL;
27121 static const char *last_label;
27122 static const char *last_postcall_label;
27123 static bool last_in_cold_section_p;
27124 static rtx_insn *expected_next_loc_note;
27125 tree decl;
27126 bool var_loc_p;
27127 var_loc_view view = 0;
27128
27129 if (!NOTE_P (loc_note))
27130 {
27131 if (CALL_P (loc_note))
27132 {
27133 maybe_reset_location_view (loc_note, cur_line_info_table);
27134 call_site_count++;
27135 if (SIBLING_CALL_P (loc_note))
27136 tail_call_site_count++;
27137 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27138 {
27139 call_insn = loc_note;
27140 loc_note = NULL;
27141 var_loc_p = false;
27142
27143 next_real = dwarf2out_next_real_insn (call_insn);
27144 next_note = NULL;
27145 cached_next_real_insn = NULL;
27146 goto create_label;
27147 }
27148 if (optimize == 0 && !flag_var_tracking)
27149 {
27150 /* When the var-tracking pass is not running, there is no note
27151 for indirect calls whose target is compile-time known. In this
27152 case, process such calls specifically so that we generate call
27153 sites for them anyway. */
27154 rtx x = PATTERN (loc_note);
27155 if (GET_CODE (x) == PARALLEL)
27156 x = XVECEXP (x, 0, 0);
27157 if (GET_CODE (x) == SET)
27158 x = SET_SRC (x);
27159 if (GET_CODE (x) == CALL)
27160 x = XEXP (x, 0);
27161 if (!MEM_P (x)
27162 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27163 || !SYMBOL_REF_DECL (XEXP (x, 0))
27164 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27165 != FUNCTION_DECL))
27166 {
27167 call_insn = loc_note;
27168 loc_note = NULL;
27169 var_loc_p = false;
27170
27171 next_real = dwarf2out_next_real_insn (call_insn);
27172 next_note = NULL;
27173 cached_next_real_insn = NULL;
27174 goto create_label;
27175 }
27176 }
27177 }
27178 else if (!debug_variable_location_views)
27179 gcc_unreachable ();
27180 else
27181 maybe_reset_location_view (loc_note, cur_line_info_table);
27182
27183 return;
27184 }
27185
27186 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27187 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27188 return;
27189
27190 /* Optimize processing a large consecutive sequence of location
27191 notes so we don't spend too much time in next_real_insn. If the
27192 next insn is another location note, remember the next_real_insn
27193 calculation for next time. */
27194 next_real = cached_next_real_insn;
27195 if (next_real)
27196 {
27197 if (expected_next_loc_note != loc_note)
27198 next_real = NULL;
27199 }
27200
27201 next_note = NEXT_INSN (loc_note);
27202 if (! next_note
27203 || next_note->deleted ()
27204 || ! NOTE_P (next_note)
27205 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27206 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27207 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27208 next_note = NULL;
27209
27210 if (! next_real)
27211 next_real = dwarf2out_next_real_insn (loc_note);
27212
27213 if (next_note)
27214 {
27215 expected_next_loc_note = next_note;
27216 cached_next_real_insn = next_real;
27217 }
27218 else
27219 cached_next_real_insn = NULL;
27220
27221 /* If there are no instructions which would be affected by this note,
27222 don't do anything. */
27223 if (var_loc_p
27224 && next_real == NULL_RTX
27225 && !NOTE_DURING_CALL_P (loc_note))
27226 return;
27227
27228 create_label:
27229
27230 if (next_real == NULL_RTX)
27231 next_real = get_last_insn ();
27232
27233 /* If there were any real insns between note we processed last time
27234 and this note (or if it is the first note), clear
27235 last_{,postcall_}label so that they are not reused this time. */
27236 if (last_var_location_insn == NULL_RTX
27237 || last_var_location_insn != next_real
27238 || last_in_cold_section_p != in_cold_section_p)
27239 {
27240 last_label = NULL;
27241 last_postcall_label = NULL;
27242 }
27243
27244 if (var_loc_p)
27245 {
27246 const char *label
27247 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27248 view = cur_line_info_table->view;
27249 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27250 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27251 if (newloc == NULL)
27252 return;
27253 }
27254 else
27255 {
27256 decl = NULL_TREE;
27257 newloc = NULL;
27258 }
27259
27260 /* If there were no real insns between note we processed last time
27261 and this note, use the label we emitted last time. Otherwise
27262 create a new label and emit it. */
27263 if (last_label == NULL)
27264 {
27265 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27266 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27267 loclabel_num++;
27268 last_label = ggc_strdup (loclabel);
27269 /* See if loclabel might be equal to .Ltext0. If yes,
27270 bump first_loclabel_num_not_at_text_label. */
27271 if (!have_multiple_function_sections
27272 && in_first_function_p
27273 && maybe_at_text_label_p)
27274 {
27275 static rtx_insn *last_start;
27276 rtx_insn *insn;
27277 for (insn = loc_note; insn; insn = previous_insn (insn))
27278 if (insn == last_start)
27279 break;
27280 else if (!NONDEBUG_INSN_P (insn))
27281 continue;
27282 else
27283 {
27284 rtx body = PATTERN (insn);
27285 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27286 continue;
27287 /* Inline asm could occupy zero bytes. */
27288 else if (GET_CODE (body) == ASM_INPUT
27289 || asm_noperands (body) >= 0)
27290 continue;
27291 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27292 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27293 continue;
27294 #endif
27295 else
27296 {
27297 /* Assume insn has non-zero length. */
27298 maybe_at_text_label_p = false;
27299 break;
27300 }
27301 }
27302 if (maybe_at_text_label_p)
27303 {
27304 last_start = loc_note;
27305 first_loclabel_num_not_at_text_label = loclabel_num;
27306 }
27307 }
27308 }
27309
27310 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27311 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27312
27313 if (!var_loc_p)
27314 {
27315 struct call_arg_loc_node *ca_loc
27316 = ggc_cleared_alloc<call_arg_loc_node> ();
27317 rtx_insn *prev = call_insn;
27318
27319 ca_loc->call_arg_loc_note
27320 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27321 ca_loc->next = NULL;
27322 ca_loc->label = last_label;
27323 gcc_assert (prev
27324 && (CALL_P (prev)
27325 || (NONJUMP_INSN_P (prev)
27326 && GET_CODE (PATTERN (prev)) == SEQUENCE
27327 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27328 if (!CALL_P (prev))
27329 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27330 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27331
27332 /* Look for a SYMBOL_REF in the "prev" instruction. */
27333 rtx x = get_call_rtx_from (PATTERN (prev));
27334 if (x)
27335 {
27336 /* Try to get the call symbol, if any. */
27337 if (MEM_P (XEXP (x, 0)))
27338 x = XEXP (x, 0);
27339 /* First, look for a memory access to a symbol_ref. */
27340 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27341 && SYMBOL_REF_DECL (XEXP (x, 0))
27342 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27343 ca_loc->symbol_ref = XEXP (x, 0);
27344 /* Otherwise, look at a compile-time known user-level function
27345 declaration. */
27346 else if (MEM_P (x)
27347 && MEM_EXPR (x)
27348 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27349 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27350 }
27351
27352 ca_loc->block = insn_scope (prev);
27353 if (call_arg_locations)
27354 call_arg_loc_last->next = ca_loc;
27355 else
27356 call_arg_locations = ca_loc;
27357 call_arg_loc_last = ca_loc;
27358 }
27359 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27360 {
27361 newloc->label = last_label;
27362 newloc->view = view;
27363 }
27364 else
27365 {
27366 if (!last_postcall_label)
27367 {
27368 sprintf (loclabel, "%s-1", last_label);
27369 last_postcall_label = ggc_strdup (loclabel);
27370 }
27371 newloc->label = last_postcall_label;
27372 /* ??? This view is at last_label, not last_label-1, but we
27373 could only assume view at last_label-1 is zero if we could
27374 assume calls always have length greater than one. This is
27375 probably true in general, though there might be a rare
27376 exception to this rule, e.g. if a call insn is optimized out
27377 by target magic. Then, even the -1 in the label will be
27378 wrong, which might invalidate the range. Anyway, using view,
27379 though technically possibly incorrect, will work as far as
27380 ranges go: since L-1 is in the middle of the call insn,
27381 (L-1).0 and (L-1).V shouldn't make any difference, and having
27382 the loclist entry refer to the .loc entry might be useful, so
27383 leave it like this. */
27384 newloc->view = view;
27385 }
27386
27387 if (var_loc_p && flag_debug_asm)
27388 {
27389 const char *name, *sep, *patstr;
27390 if (decl && DECL_NAME (decl))
27391 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27392 else
27393 name = "";
27394 if (NOTE_VAR_LOCATION_LOC (loc_note))
27395 {
27396 sep = " => ";
27397 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27398 }
27399 else
27400 {
27401 sep = " ";
27402 patstr = "RESET";
27403 }
27404 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27405 name, sep, patstr);
27406 }
27407
27408 last_var_location_insn = next_real;
27409 last_in_cold_section_p = in_cold_section_p;
27410 }
27411
27412 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27413 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27414 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27415 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27416 BLOCK_FRAGMENT_ORIGIN links. */
27417 static bool
27418 block_within_block_p (tree block, tree outer, bool bothways)
27419 {
27420 if (block == outer)
27421 return true;
27422
27423 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27424 for (tree context = BLOCK_SUPERCONTEXT (block);
27425 context != outer;
27426 context = BLOCK_SUPERCONTEXT (context))
27427 if (!context || TREE_CODE (context) != BLOCK)
27428 return false;
27429
27430 if (!bothways)
27431 return true;
27432
27433 /* Now check that each block is actually referenced by its
27434 parent. */
27435 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27436 context = BLOCK_SUPERCONTEXT (context))
27437 {
27438 if (BLOCK_FRAGMENT_ORIGIN (context))
27439 {
27440 gcc_assert (!BLOCK_SUBBLOCKS (context));
27441 context = BLOCK_FRAGMENT_ORIGIN (context);
27442 }
27443 for (tree sub = BLOCK_SUBBLOCKS (context);
27444 sub != block;
27445 sub = BLOCK_CHAIN (sub))
27446 if (!sub)
27447 return false;
27448 if (context == outer)
27449 return true;
27450 else
27451 block = context;
27452 }
27453 }
27454
27455 /* Called during final while assembling the marker of the entry point
27456 for an inlined function. */
27457
27458 static void
27459 dwarf2out_inline_entry (tree block)
27460 {
27461 gcc_assert (debug_inline_points);
27462
27463 /* If we can't represent it, don't bother. */
27464 if (!(dwarf_version >= 3 || !dwarf_strict))
27465 return;
27466
27467 gcc_assert (DECL_P (block_ultimate_origin (block)));
27468
27469 /* Sanity check the block tree. This would catch a case in which
27470 BLOCK got removed from the tree reachable from the outermost
27471 lexical block, but got retained in markers. It would still link
27472 back to its parents, but some ancestor would be missing a link
27473 down the path to the sub BLOCK. If the block got removed, its
27474 BLOCK_NUMBER will not be a usable value. */
27475 if (flag_checking)
27476 gcc_assert (block_within_block_p (block,
27477 DECL_INITIAL (current_function_decl),
27478 true));
27479
27480 gcc_assert (inlined_function_outer_scope_p (block));
27481 gcc_assert (!BLOCK_DIE (block));
27482
27483 if (BLOCK_FRAGMENT_ORIGIN (block))
27484 block = BLOCK_FRAGMENT_ORIGIN (block);
27485 /* Can the entry point ever not be at the beginning of an
27486 unfragmented lexical block? */
27487 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27488 || (cur_line_info_table
27489 && !ZERO_VIEW_P (cur_line_info_table->view))))
27490 return;
27491
27492 if (!inline_entry_data_table)
27493 inline_entry_data_table
27494 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27495
27496
27497 inline_entry_data **iedp
27498 = inline_entry_data_table->find_slot_with_hash (block,
27499 htab_hash_pointer (block),
27500 INSERT);
27501 if (*iedp)
27502 /* ??? Ideally, we'd record all entry points for the same inlined
27503 function (some may have been duplicated by e.g. unrolling), but
27504 we have no way to represent that ATM. */
27505 return;
27506
27507 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27508 ied->block = block;
27509 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27510 ied->label_num = BLOCK_NUMBER (block);
27511 if (cur_line_info_table)
27512 ied->view = cur_line_info_table->view;
27513
27514 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27515
27516 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27517 BLOCK_NUMBER (block));
27518 ASM_OUTPUT_LABEL (asm_out_file, label);
27519 }
27520
27521 /* Called from finalize_size_functions for size functions so that their body
27522 can be encoded in the debug info to describe the layout of variable-length
27523 structures. */
27524
27525 static void
27526 dwarf2out_size_function (tree decl)
27527 {
27528 function_to_dwarf_procedure (decl);
27529 }
27530
27531 /* Note in one location list that text section has changed. */
27532
27533 int
27534 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27535 {
27536 var_loc_list *list = *slot;
27537 if (list->first)
27538 list->last_before_switch
27539 = list->last->next ? list->last->next : list->last;
27540 return 1;
27541 }
27542
27543 /* Note in all location lists that text section has changed. */
27544
27545 static void
27546 var_location_switch_text_section (void)
27547 {
27548 if (decl_loc_table == NULL)
27549 return;
27550
27551 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27552 }
27553
27554 /* Create a new line number table. */
27555
27556 static dw_line_info_table *
27557 new_line_info_table (void)
27558 {
27559 dw_line_info_table *table;
27560
27561 table = ggc_cleared_alloc<dw_line_info_table> ();
27562 table->file_num = 1;
27563 table->line_num = 1;
27564 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27565 FORCE_RESET_NEXT_VIEW (table->view);
27566 table->symviews_since_reset = 0;
27567
27568 return table;
27569 }
27570
27571 /* Lookup the "current" table into which we emit line info, so
27572 that we don't have to do it for every source line. */
27573
27574 static void
27575 set_cur_line_info_table (section *sec)
27576 {
27577 dw_line_info_table *table;
27578
27579 if (sec == text_section)
27580 table = text_section_line_info;
27581 else if (sec == cold_text_section)
27582 {
27583 table = cold_text_section_line_info;
27584 if (!table)
27585 {
27586 cold_text_section_line_info = table = new_line_info_table ();
27587 table->end_label = cold_end_label;
27588 }
27589 }
27590 else
27591 {
27592 const char *end_label;
27593
27594 if (crtl->has_bb_partition)
27595 {
27596 if (in_cold_section_p)
27597 end_label = crtl->subsections.cold_section_end_label;
27598 else
27599 end_label = crtl->subsections.hot_section_end_label;
27600 }
27601 else
27602 {
27603 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27604 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27605 current_function_funcdef_no);
27606 end_label = ggc_strdup (label);
27607 }
27608
27609 table = new_line_info_table ();
27610 table->end_label = end_label;
27611
27612 vec_safe_push (separate_line_info, table);
27613 }
27614
27615 if (output_asm_line_debug_info ())
27616 table->is_stmt = (cur_line_info_table
27617 ? cur_line_info_table->is_stmt
27618 : DWARF_LINE_DEFAULT_IS_STMT_START);
27619 cur_line_info_table = table;
27620 }
27621
27622
27623 /* We need to reset the locations at the beginning of each
27624 function. We can't do this in the end_function hook, because the
27625 declarations that use the locations won't have been output when
27626 that hook is called. Also compute have_multiple_function_sections here. */
27627
27628 static void
27629 dwarf2out_begin_function (tree fun)
27630 {
27631 section *sec = function_section (fun);
27632
27633 if (sec != text_section)
27634 have_multiple_function_sections = true;
27635
27636 if (crtl->has_bb_partition && !cold_text_section)
27637 {
27638 gcc_assert (current_function_decl == fun);
27639 cold_text_section = unlikely_text_section ();
27640 switch_to_section (cold_text_section);
27641 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27642 switch_to_section (sec);
27643 }
27644
27645 dwarf2out_note_section_used ();
27646 call_site_count = 0;
27647 tail_call_site_count = 0;
27648
27649 set_cur_line_info_table (sec);
27650 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27651 }
27652
27653 /* Helper function of dwarf2out_end_function, called only after emitting
27654 the very first function into assembly. Check if some .debug_loc range
27655 might end with a .LVL* label that could be equal to .Ltext0.
27656 In that case we must force using absolute addresses in .debug_loc ranges,
27657 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27658 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27659 list terminator.
27660 Set have_multiple_function_sections to true in that case and
27661 terminate htab traversal. */
27662
27663 int
27664 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27665 {
27666 var_loc_list *entry = *slot;
27667 struct var_loc_node *node;
27668
27669 node = entry->first;
27670 if (node && node->next && node->next->label)
27671 {
27672 unsigned int i;
27673 const char *label = node->next->label;
27674 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27675
27676 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27677 {
27678 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27679 if (strcmp (label, loclabel) == 0)
27680 {
27681 have_multiple_function_sections = true;
27682 return 0;
27683 }
27684 }
27685 }
27686 return 1;
27687 }
27688
27689 /* Hook called after emitting a function into assembly.
27690 This does something only for the very first function emitted. */
27691
27692 static void
27693 dwarf2out_end_function (unsigned int)
27694 {
27695 if (in_first_function_p
27696 && !have_multiple_function_sections
27697 && first_loclabel_num_not_at_text_label
27698 && decl_loc_table)
27699 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27700 in_first_function_p = false;
27701 maybe_at_text_label_p = false;
27702 }
27703
27704 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27705 front-ends register a translation unit even before dwarf2out_init is
27706 called. */
27707 static tree main_translation_unit = NULL_TREE;
27708
27709 /* Hook called by front-ends after they built their main translation unit.
27710 Associate comp_unit_die to UNIT. */
27711
27712 static void
27713 dwarf2out_register_main_translation_unit (tree unit)
27714 {
27715 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27716 && main_translation_unit == NULL_TREE);
27717 main_translation_unit = unit;
27718 /* If dwarf2out_init has not been called yet, it will perform the association
27719 itself looking at main_translation_unit. */
27720 if (decl_die_table != NULL)
27721 equate_decl_number_to_die (unit, comp_unit_die ());
27722 }
27723
27724 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27725
27726 static void
27727 push_dw_line_info_entry (dw_line_info_table *table,
27728 enum dw_line_info_opcode opcode, unsigned int val)
27729 {
27730 dw_line_info_entry e;
27731 e.opcode = opcode;
27732 e.val = val;
27733 vec_safe_push (table->entries, e);
27734 }
27735
27736 /* Output a label to mark the beginning of a source code line entry
27737 and record information relating to this source line, in
27738 'line_info_table' for later output of the .debug_line section. */
27739 /* ??? The discriminator parameter ought to be unsigned. */
27740
27741 static void
27742 dwarf2out_source_line (unsigned int line, unsigned int column,
27743 const char *filename,
27744 int discriminator, bool is_stmt)
27745 {
27746 unsigned int file_num;
27747 dw_line_info_table *table;
27748 static var_loc_view lvugid;
27749
27750 if (debug_info_level < DINFO_LEVEL_TERSE)
27751 return;
27752
27753 table = cur_line_info_table;
27754
27755 if (line == 0)
27756 {
27757 if (debug_variable_location_views
27758 && output_asm_line_debug_info ()
27759 && table && !RESETTING_VIEW_P (table->view))
27760 {
27761 /* If we're using the assembler to compute view numbers, we
27762 can't issue a .loc directive for line zero, so we can't
27763 get a view number at this point. We might attempt to
27764 compute it from the previous view, or equate it to a
27765 subsequent view (though it might not be there!), but
27766 since we're omitting the line number entry, we might as
27767 well omit the view number as well. That means pretending
27768 it's a view number zero, which might very well turn out
27769 to be correct. ??? Extend the assembler so that the
27770 compiler could emit e.g. ".locview .LVU#", to output a
27771 view without changing line number information. We'd then
27772 have to count it in symviews_since_reset; when it's omitted,
27773 it doesn't count. */
27774 if (!zero_view_p)
27775 zero_view_p = BITMAP_GGC_ALLOC ();
27776 bitmap_set_bit (zero_view_p, table->view);
27777 if (flag_debug_asm)
27778 {
27779 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27780 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27781 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27782 ASM_COMMENT_START);
27783 assemble_name (asm_out_file, label);
27784 putc ('\n', asm_out_file);
27785 }
27786 table->view = ++lvugid;
27787 }
27788 return;
27789 }
27790
27791 /* The discriminator column was added in dwarf4. Simplify the below
27792 by simply removing it if we're not supposed to output it. */
27793 if (dwarf_version < 4 && dwarf_strict)
27794 discriminator = 0;
27795
27796 if (!debug_column_info)
27797 column = 0;
27798
27799 file_num = maybe_emit_file (lookup_filename (filename));
27800
27801 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27802 the debugger has used the second (possibly duplicate) line number
27803 at the beginning of the function to mark the end of the prologue.
27804 We could eliminate any other duplicates within the function. For
27805 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27806 that second line number entry. */
27807 /* Recall that this end-of-prologue indication is *not* the same thing
27808 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27809 to which the hook corresponds, follows the last insn that was
27810 emitted by gen_prologue. What we need is to precede the first insn
27811 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27812 insn that corresponds to something the user wrote. These may be
27813 very different locations once scheduling is enabled. */
27814
27815 if (0 && file_num == table->file_num
27816 && line == table->line_num
27817 && column == table->column_num
27818 && discriminator == table->discrim_num
27819 && is_stmt == table->is_stmt)
27820 return;
27821
27822 switch_to_section (current_function_section ());
27823
27824 /* If requested, emit something human-readable. */
27825 if (flag_debug_asm)
27826 {
27827 if (debug_column_info)
27828 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27829 filename, line, column);
27830 else
27831 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27832 filename, line);
27833 }
27834
27835 if (output_asm_line_debug_info ())
27836 {
27837 /* Emit the .loc directive understood by GNU as. */
27838 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27839 file_num, line, is_stmt, discriminator */
27840 fputs ("\t.loc ", asm_out_file);
27841 fprint_ul (asm_out_file, file_num);
27842 putc (' ', asm_out_file);
27843 fprint_ul (asm_out_file, line);
27844 putc (' ', asm_out_file);
27845 fprint_ul (asm_out_file, column);
27846
27847 if (is_stmt != table->is_stmt)
27848 {
27849 fputs (" is_stmt ", asm_out_file);
27850 putc (is_stmt ? '1' : '0', asm_out_file);
27851 }
27852 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27853 {
27854 gcc_assert (discriminator > 0);
27855 fputs (" discriminator ", asm_out_file);
27856 fprint_ul (asm_out_file, (unsigned long) discriminator);
27857 }
27858 if (debug_variable_location_views)
27859 {
27860 if (!RESETTING_VIEW_P (table->view))
27861 {
27862 table->symviews_since_reset++;
27863 if (table->symviews_since_reset > symview_upper_bound)
27864 symview_upper_bound = table->symviews_since_reset;
27865 /* When we're using the assembler to compute view
27866 numbers, we output symbolic labels after "view" in
27867 .loc directives, and the assembler will set them for
27868 us, so that we can refer to the view numbers in
27869 location lists. The only exceptions are when we know
27870 a view will be zero: "-0" is a forced reset, used
27871 e.g. in the beginning of functions, whereas "0" tells
27872 the assembler to check that there was a PC change
27873 since the previous view, in a way that implicitly
27874 resets the next view. */
27875 fputs (" view ", asm_out_file);
27876 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27877 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27878 assemble_name (asm_out_file, label);
27879 table->view = ++lvugid;
27880 }
27881 else
27882 {
27883 table->symviews_since_reset = 0;
27884 if (FORCE_RESETTING_VIEW_P (table->view))
27885 fputs (" view -0", asm_out_file);
27886 else
27887 fputs (" view 0", asm_out_file);
27888 /* Mark the present view as a zero view. Earlier debug
27889 binds may have already added its id to loclists to be
27890 emitted later, so we can't reuse the id for something
27891 else. However, it's good to know whether a view is
27892 known to be zero, because then we may be able to
27893 optimize out locviews that are all zeros, so take
27894 note of it in zero_view_p. */
27895 if (!zero_view_p)
27896 zero_view_p = BITMAP_GGC_ALLOC ();
27897 bitmap_set_bit (zero_view_p, lvugid);
27898 table->view = ++lvugid;
27899 }
27900 }
27901 putc ('\n', asm_out_file);
27902 }
27903 else
27904 {
27905 unsigned int label_num = ++line_info_label_num;
27906
27907 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27908
27909 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27910 push_dw_line_info_entry (table, LI_adv_address, label_num);
27911 else
27912 push_dw_line_info_entry (table, LI_set_address, label_num);
27913 if (debug_variable_location_views)
27914 {
27915 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27916 if (resetting)
27917 table->view = 0;
27918
27919 if (flag_debug_asm)
27920 fprintf (asm_out_file, "\t%s view %s%d\n",
27921 ASM_COMMENT_START,
27922 resetting ? "-" : "",
27923 table->view);
27924
27925 table->view++;
27926 }
27927 if (file_num != table->file_num)
27928 push_dw_line_info_entry (table, LI_set_file, file_num);
27929 if (discriminator != table->discrim_num)
27930 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27931 if (is_stmt != table->is_stmt)
27932 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27933 push_dw_line_info_entry (table, LI_set_line, line);
27934 if (debug_column_info)
27935 push_dw_line_info_entry (table, LI_set_column, column);
27936 }
27937
27938 table->file_num = file_num;
27939 table->line_num = line;
27940 table->column_num = column;
27941 table->discrim_num = discriminator;
27942 table->is_stmt = is_stmt;
27943 table->in_use = true;
27944 }
27945
27946 /* Record the beginning of a new source file. */
27947
27948 static void
27949 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27950 {
27951 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27952 {
27953 macinfo_entry e;
27954 e.code = DW_MACINFO_start_file;
27955 e.lineno = lineno;
27956 e.info = ggc_strdup (filename);
27957 vec_safe_push (macinfo_table, e);
27958 }
27959 }
27960
27961 /* Record the end of a source file. */
27962
27963 static void
27964 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27965 {
27966 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27967 {
27968 macinfo_entry e;
27969 e.code = DW_MACINFO_end_file;
27970 e.lineno = lineno;
27971 e.info = NULL;
27972 vec_safe_push (macinfo_table, e);
27973 }
27974 }
27975
27976 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27977 the tail part of the directive line, i.e. the part which is past the
27978 initial whitespace, #, whitespace, directive-name, whitespace part. */
27979
27980 static void
27981 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27982 const char *buffer ATTRIBUTE_UNUSED)
27983 {
27984 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27985 {
27986 macinfo_entry e;
27987 /* Insert a dummy first entry to be able to optimize the whole
27988 predefined macro block using DW_MACRO_import. */
27989 if (macinfo_table->is_empty () && lineno <= 1)
27990 {
27991 e.code = 0;
27992 e.lineno = 0;
27993 e.info = NULL;
27994 vec_safe_push (macinfo_table, e);
27995 }
27996 e.code = DW_MACINFO_define;
27997 e.lineno = lineno;
27998 e.info = ggc_strdup (buffer);
27999 vec_safe_push (macinfo_table, e);
28000 }
28001 }
28002
28003 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28004 the tail part of the directive line, i.e. the part which is past the
28005 initial whitespace, #, whitespace, directive-name, whitespace part. */
28006
28007 static void
28008 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28009 const char *buffer ATTRIBUTE_UNUSED)
28010 {
28011 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28012 {
28013 macinfo_entry e;
28014 /* Insert a dummy first entry to be able to optimize the whole
28015 predefined macro block using DW_MACRO_import. */
28016 if (macinfo_table->is_empty () && lineno <= 1)
28017 {
28018 e.code = 0;
28019 e.lineno = 0;
28020 e.info = NULL;
28021 vec_safe_push (macinfo_table, e);
28022 }
28023 e.code = DW_MACINFO_undef;
28024 e.lineno = lineno;
28025 e.info = ggc_strdup (buffer);
28026 vec_safe_push (macinfo_table, e);
28027 }
28028 }
28029
28030 /* Helpers to manipulate hash table of CUs. */
28031
28032 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28033 {
28034 static inline hashval_t hash (const macinfo_entry *);
28035 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28036 };
28037
28038 inline hashval_t
28039 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28040 {
28041 return htab_hash_string (entry->info);
28042 }
28043
28044 inline bool
28045 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28046 const macinfo_entry *entry2)
28047 {
28048 return !strcmp (entry1->info, entry2->info);
28049 }
28050
28051 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28052
28053 /* Output a single .debug_macinfo entry. */
28054
28055 static void
28056 output_macinfo_op (macinfo_entry *ref)
28057 {
28058 int file_num;
28059 size_t len;
28060 struct indirect_string_node *node;
28061 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28062 struct dwarf_file_data *fd;
28063
28064 switch (ref->code)
28065 {
28066 case DW_MACINFO_start_file:
28067 fd = lookup_filename (ref->info);
28068 file_num = maybe_emit_file (fd);
28069 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28070 dw2_asm_output_data_uleb128 (ref->lineno,
28071 "Included from line number %lu",
28072 (unsigned long) ref->lineno);
28073 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28074 break;
28075 case DW_MACINFO_end_file:
28076 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28077 break;
28078 case DW_MACINFO_define:
28079 case DW_MACINFO_undef:
28080 len = strlen (ref->info) + 1;
28081 if (!dwarf_strict
28082 && len > DWARF_OFFSET_SIZE
28083 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28084 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28085 {
28086 ref->code = ref->code == DW_MACINFO_define
28087 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28088 output_macinfo_op (ref);
28089 return;
28090 }
28091 dw2_asm_output_data (1, ref->code,
28092 ref->code == DW_MACINFO_define
28093 ? "Define macro" : "Undefine macro");
28094 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28095 (unsigned long) ref->lineno);
28096 dw2_asm_output_nstring (ref->info, -1, "The macro");
28097 break;
28098 case DW_MACRO_define_strp:
28099 case DW_MACRO_undef_strp:
28100 node = find_AT_string (ref->info);
28101 gcc_assert (node
28102 && (node->form == DW_FORM_strp
28103 || node->form == dwarf_FORM (DW_FORM_strx)));
28104 dw2_asm_output_data (1, ref->code,
28105 ref->code == DW_MACRO_define_strp
28106 ? "Define macro strp"
28107 : "Undefine macro strp");
28108 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28109 (unsigned long) ref->lineno);
28110 if (node->form == DW_FORM_strp)
28111 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28112 debug_str_section, "The macro: \"%s\"",
28113 ref->info);
28114 else
28115 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28116 ref->info);
28117 break;
28118 case DW_MACRO_import:
28119 dw2_asm_output_data (1, ref->code, "Import");
28120 ASM_GENERATE_INTERNAL_LABEL (label,
28121 DEBUG_MACRO_SECTION_LABEL,
28122 ref->lineno + macinfo_label_base);
28123 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28124 break;
28125 default:
28126 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28127 ASM_COMMENT_START, (unsigned long) ref->code);
28128 break;
28129 }
28130 }
28131
28132 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28133 other compilation unit .debug_macinfo sections. IDX is the first
28134 index of a define/undef, return the number of ops that should be
28135 emitted in a comdat .debug_macinfo section and emit
28136 a DW_MACRO_import entry referencing it.
28137 If the define/undef entry should be emitted normally, return 0. */
28138
28139 static unsigned
28140 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28141 macinfo_hash_type **macinfo_htab)
28142 {
28143 macinfo_entry *first, *second, *cur, *inc;
28144 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28145 unsigned char checksum[16];
28146 struct md5_ctx ctx;
28147 char *grp_name, *tail;
28148 const char *base;
28149 unsigned int i, count, encoded_filename_len, linebuf_len;
28150 macinfo_entry **slot;
28151
28152 first = &(*macinfo_table)[idx];
28153 second = &(*macinfo_table)[idx + 1];
28154
28155 /* Optimize only if there are at least two consecutive define/undef ops,
28156 and either all of them are before first DW_MACINFO_start_file
28157 with lineno {0,1} (i.e. predefined macro block), or all of them are
28158 in some included header file. */
28159 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28160 return 0;
28161 if (vec_safe_is_empty (files))
28162 {
28163 if (first->lineno > 1 || second->lineno > 1)
28164 return 0;
28165 }
28166 else if (first->lineno == 0)
28167 return 0;
28168
28169 /* Find the last define/undef entry that can be grouped together
28170 with first and at the same time compute md5 checksum of their
28171 codes, linenumbers and strings. */
28172 md5_init_ctx (&ctx);
28173 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28174 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28175 break;
28176 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28177 break;
28178 else
28179 {
28180 unsigned char code = cur->code;
28181 md5_process_bytes (&code, 1, &ctx);
28182 checksum_uleb128 (cur->lineno, &ctx);
28183 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28184 }
28185 md5_finish_ctx (&ctx, checksum);
28186 count = i - idx;
28187
28188 /* From the containing include filename (if any) pick up just
28189 usable characters from its basename. */
28190 if (vec_safe_is_empty (files))
28191 base = "";
28192 else
28193 base = lbasename (files->last ().info);
28194 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28195 if (ISIDNUM (base[i]) || base[i] == '.')
28196 encoded_filename_len++;
28197 /* Count . at the end. */
28198 if (encoded_filename_len)
28199 encoded_filename_len++;
28200
28201 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28202 linebuf_len = strlen (linebuf);
28203
28204 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28205 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28206 + 16 * 2 + 1);
28207 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28208 tail = grp_name + 4;
28209 if (encoded_filename_len)
28210 {
28211 for (i = 0; base[i]; i++)
28212 if (ISIDNUM (base[i]) || base[i] == '.')
28213 *tail++ = base[i];
28214 *tail++ = '.';
28215 }
28216 memcpy (tail, linebuf, linebuf_len);
28217 tail += linebuf_len;
28218 *tail++ = '.';
28219 for (i = 0; i < 16; i++)
28220 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28221
28222 /* Construct a macinfo_entry for DW_MACRO_import
28223 in the empty vector entry before the first define/undef. */
28224 inc = &(*macinfo_table)[idx - 1];
28225 inc->code = DW_MACRO_import;
28226 inc->lineno = 0;
28227 inc->info = ggc_strdup (grp_name);
28228 if (!*macinfo_htab)
28229 *macinfo_htab = new macinfo_hash_type (10);
28230 /* Avoid emitting duplicates. */
28231 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28232 if (*slot != NULL)
28233 {
28234 inc->code = 0;
28235 inc->info = NULL;
28236 /* If such an entry has been used before, just emit
28237 a DW_MACRO_import op. */
28238 inc = *slot;
28239 output_macinfo_op (inc);
28240 /* And clear all macinfo_entry in the range to avoid emitting them
28241 in the second pass. */
28242 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28243 {
28244 cur->code = 0;
28245 cur->info = NULL;
28246 }
28247 }
28248 else
28249 {
28250 *slot = inc;
28251 inc->lineno = (*macinfo_htab)->elements ();
28252 output_macinfo_op (inc);
28253 }
28254 return count;
28255 }
28256
28257 /* Save any strings needed by the macinfo table in the debug str
28258 table. All strings must be collected into the table by the time
28259 index_string is called. */
28260
28261 static void
28262 save_macinfo_strings (void)
28263 {
28264 unsigned len;
28265 unsigned i;
28266 macinfo_entry *ref;
28267
28268 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28269 {
28270 switch (ref->code)
28271 {
28272 /* Match the logic in output_macinfo_op to decide on
28273 indirect strings. */
28274 case DW_MACINFO_define:
28275 case DW_MACINFO_undef:
28276 len = strlen (ref->info) + 1;
28277 if (!dwarf_strict
28278 && len > DWARF_OFFSET_SIZE
28279 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28280 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28281 set_indirect_string (find_AT_string (ref->info));
28282 break;
28283 case DW_MACRO_define_strp:
28284 case DW_MACRO_undef_strp:
28285 set_indirect_string (find_AT_string (ref->info));
28286 break;
28287 default:
28288 break;
28289 }
28290 }
28291 }
28292
28293 /* Output macinfo section(s). */
28294
28295 static void
28296 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28297 {
28298 unsigned i;
28299 unsigned long length = vec_safe_length (macinfo_table);
28300 macinfo_entry *ref;
28301 vec<macinfo_entry, va_gc> *files = NULL;
28302 macinfo_hash_type *macinfo_htab = NULL;
28303 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28304
28305 if (! length)
28306 return;
28307
28308 /* output_macinfo* uses these interchangeably. */
28309 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28310 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28311 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28312 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28313
28314 /* AIX Assembler inserts the length, so adjust the reference to match the
28315 offset expected by debuggers. */
28316 strcpy (dl_section_ref, debug_line_label);
28317 if (XCOFF_DEBUGGING_INFO)
28318 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28319
28320 /* For .debug_macro emit the section header. */
28321 if (!dwarf_strict || dwarf_version >= 5)
28322 {
28323 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28324 "DWARF macro version number");
28325 if (DWARF_OFFSET_SIZE == 8)
28326 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28327 else
28328 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28329 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28330 debug_line_section, NULL);
28331 }
28332
28333 /* In the first loop, it emits the primary .debug_macinfo section
28334 and after each emitted op the macinfo_entry is cleared.
28335 If a longer range of define/undef ops can be optimized using
28336 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28337 the vector before the first define/undef in the range and the
28338 whole range of define/undef ops is not emitted and kept. */
28339 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28340 {
28341 switch (ref->code)
28342 {
28343 case DW_MACINFO_start_file:
28344 vec_safe_push (files, *ref);
28345 break;
28346 case DW_MACINFO_end_file:
28347 if (!vec_safe_is_empty (files))
28348 files->pop ();
28349 break;
28350 case DW_MACINFO_define:
28351 case DW_MACINFO_undef:
28352 if ((!dwarf_strict || dwarf_version >= 5)
28353 && HAVE_COMDAT_GROUP
28354 && vec_safe_length (files) != 1
28355 && i > 0
28356 && i + 1 < length
28357 && (*macinfo_table)[i - 1].code == 0)
28358 {
28359 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28360 if (count)
28361 {
28362 i += count - 1;
28363 continue;
28364 }
28365 }
28366 break;
28367 case 0:
28368 /* A dummy entry may be inserted at the beginning to be able
28369 to optimize the whole block of predefined macros. */
28370 if (i == 0)
28371 continue;
28372 default:
28373 break;
28374 }
28375 output_macinfo_op (ref);
28376 ref->info = NULL;
28377 ref->code = 0;
28378 }
28379
28380 if (!macinfo_htab)
28381 return;
28382
28383 /* Save the number of transparent includes so we can adjust the
28384 label number for the fat LTO object DWARF. */
28385 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28386
28387 delete macinfo_htab;
28388 macinfo_htab = NULL;
28389
28390 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28391 terminate the current chain and switch to a new comdat .debug_macinfo
28392 section and emit the define/undef entries within it. */
28393 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28394 switch (ref->code)
28395 {
28396 case 0:
28397 continue;
28398 case DW_MACRO_import:
28399 {
28400 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28401 tree comdat_key = get_identifier (ref->info);
28402 /* Terminate the previous .debug_macinfo section. */
28403 dw2_asm_output_data (1, 0, "End compilation unit");
28404 targetm.asm_out.named_section (debug_macinfo_section_name,
28405 SECTION_DEBUG
28406 | SECTION_LINKONCE
28407 | (early_lto_debug
28408 ? SECTION_EXCLUDE : 0),
28409 comdat_key);
28410 ASM_GENERATE_INTERNAL_LABEL (label,
28411 DEBUG_MACRO_SECTION_LABEL,
28412 ref->lineno + macinfo_label_base);
28413 ASM_OUTPUT_LABEL (asm_out_file, label);
28414 ref->code = 0;
28415 ref->info = NULL;
28416 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28417 "DWARF macro version number");
28418 if (DWARF_OFFSET_SIZE == 8)
28419 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28420 else
28421 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28422 }
28423 break;
28424 case DW_MACINFO_define:
28425 case DW_MACINFO_undef:
28426 output_macinfo_op (ref);
28427 ref->code = 0;
28428 ref->info = NULL;
28429 break;
28430 default:
28431 gcc_unreachable ();
28432 }
28433
28434 macinfo_label_base += macinfo_label_base_adj;
28435 }
28436
28437 /* Initialize the various sections and labels for dwarf output and prefix
28438 them with PREFIX if non-NULL. Returns the generation (zero based
28439 number of times function was called). */
28440
28441 static unsigned
28442 init_sections_and_labels (bool early_lto_debug)
28443 {
28444 /* As we may get called multiple times have a generation count for
28445 labels. */
28446 static unsigned generation = 0;
28447
28448 if (early_lto_debug)
28449 {
28450 if (!dwarf_split_debug_info)
28451 {
28452 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28453 SECTION_DEBUG | SECTION_EXCLUDE,
28454 NULL);
28455 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28456 SECTION_DEBUG | SECTION_EXCLUDE,
28457 NULL);
28458 debug_macinfo_section_name
28459 = ((dwarf_strict && dwarf_version < 5)
28460 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28461 debug_macinfo_section = get_section (debug_macinfo_section_name,
28462 SECTION_DEBUG
28463 | SECTION_EXCLUDE, NULL);
28464 }
28465 else
28466 {
28467 /* ??? Which of the following do we need early? */
28468 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28469 SECTION_DEBUG | SECTION_EXCLUDE,
28470 NULL);
28471 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28472 SECTION_DEBUG | SECTION_EXCLUDE,
28473 NULL);
28474 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28475 SECTION_DEBUG
28476 | SECTION_EXCLUDE, NULL);
28477 debug_skeleton_abbrev_section
28478 = get_section (DEBUG_LTO_ABBREV_SECTION,
28479 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28480 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28481 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28482 generation);
28483
28484 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28485 stay in the main .o, but the skeleton_line goes into the split
28486 off dwo. */
28487 debug_skeleton_line_section
28488 = get_section (DEBUG_LTO_LINE_SECTION,
28489 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28490 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28491 DEBUG_SKELETON_LINE_SECTION_LABEL,
28492 generation);
28493 debug_str_offsets_section
28494 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28495 SECTION_DEBUG | SECTION_EXCLUDE,
28496 NULL);
28497 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28498 DEBUG_SKELETON_INFO_SECTION_LABEL,
28499 generation);
28500 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28501 DEBUG_STR_DWO_SECTION_FLAGS,
28502 NULL);
28503 debug_macinfo_section_name
28504 = ((dwarf_strict && dwarf_version < 5)
28505 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28506 debug_macinfo_section = get_section (debug_macinfo_section_name,
28507 SECTION_DEBUG | SECTION_EXCLUDE,
28508 NULL);
28509 }
28510 /* For macro info and the file table we have to refer to a
28511 debug_line section. */
28512 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28513 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28514 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28515 DEBUG_LINE_SECTION_LABEL, generation);
28516
28517 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28518 DEBUG_STR_SECTION_FLAGS
28519 | SECTION_EXCLUDE, NULL);
28520 if (!dwarf_split_debug_info)
28521 debug_line_str_section
28522 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28523 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28524 }
28525 else
28526 {
28527 if (!dwarf_split_debug_info)
28528 {
28529 debug_info_section = get_section (DEBUG_INFO_SECTION,
28530 SECTION_DEBUG, NULL);
28531 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28532 SECTION_DEBUG, NULL);
28533 debug_loc_section = get_section (dwarf_version >= 5
28534 ? DEBUG_LOCLISTS_SECTION
28535 : DEBUG_LOC_SECTION,
28536 SECTION_DEBUG, NULL);
28537 debug_macinfo_section_name
28538 = ((dwarf_strict && dwarf_version < 5)
28539 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28540 debug_macinfo_section = get_section (debug_macinfo_section_name,
28541 SECTION_DEBUG, NULL);
28542 }
28543 else
28544 {
28545 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28546 SECTION_DEBUG | SECTION_EXCLUDE,
28547 NULL);
28548 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28549 SECTION_DEBUG | SECTION_EXCLUDE,
28550 NULL);
28551 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28552 SECTION_DEBUG, NULL);
28553 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28554 SECTION_DEBUG, NULL);
28555 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28556 SECTION_DEBUG, NULL);
28557 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28558 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28559 generation);
28560
28561 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28562 stay in the main .o, but the skeleton_line goes into the
28563 split off dwo. */
28564 debug_skeleton_line_section
28565 = get_section (DEBUG_DWO_LINE_SECTION,
28566 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28567 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28568 DEBUG_SKELETON_LINE_SECTION_LABEL,
28569 generation);
28570 debug_str_offsets_section
28571 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28572 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28573 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28574 DEBUG_SKELETON_INFO_SECTION_LABEL,
28575 generation);
28576 debug_loc_section = get_section (dwarf_version >= 5
28577 ? DEBUG_DWO_LOCLISTS_SECTION
28578 : DEBUG_DWO_LOC_SECTION,
28579 SECTION_DEBUG | SECTION_EXCLUDE,
28580 NULL);
28581 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28582 DEBUG_STR_DWO_SECTION_FLAGS,
28583 NULL);
28584 debug_macinfo_section_name
28585 = ((dwarf_strict && dwarf_version < 5)
28586 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28587 debug_macinfo_section = get_section (debug_macinfo_section_name,
28588 SECTION_DEBUG | SECTION_EXCLUDE,
28589 NULL);
28590 }
28591 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28592 SECTION_DEBUG, NULL);
28593 debug_line_section = get_section (DEBUG_LINE_SECTION,
28594 SECTION_DEBUG, NULL);
28595 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28596 SECTION_DEBUG, NULL);
28597 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28598 SECTION_DEBUG, NULL);
28599 debug_str_section = get_section (DEBUG_STR_SECTION,
28600 DEBUG_STR_SECTION_FLAGS, NULL);
28601 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28602 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28603 DEBUG_STR_SECTION_FLAGS, NULL);
28604
28605 debug_ranges_section = get_section (dwarf_version >= 5
28606 ? DEBUG_RNGLISTS_SECTION
28607 : DEBUG_RANGES_SECTION,
28608 SECTION_DEBUG, NULL);
28609 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28610 SECTION_DEBUG, NULL);
28611 }
28612
28613 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28614 DEBUG_ABBREV_SECTION_LABEL, generation);
28615 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28616 DEBUG_INFO_SECTION_LABEL, generation);
28617 info_section_emitted = false;
28618 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28619 DEBUG_LINE_SECTION_LABEL, generation);
28620 /* There are up to 4 unique ranges labels per generation.
28621 See also output_rnglists. */
28622 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28623 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28624 if (dwarf_version >= 5 && dwarf_split_debug_info)
28625 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28626 DEBUG_RANGES_SECTION_LABEL,
28627 1 + generation * 4);
28628 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28629 DEBUG_ADDR_SECTION_LABEL, generation);
28630 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28631 (dwarf_strict && dwarf_version < 5)
28632 ? DEBUG_MACINFO_SECTION_LABEL
28633 : DEBUG_MACRO_SECTION_LABEL, generation);
28634 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28635 generation);
28636
28637 ++generation;
28638 return generation - 1;
28639 }
28640
28641 /* Set up for Dwarf output at the start of compilation. */
28642
28643 static void
28644 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28645 {
28646 /* Allocate the file_table. */
28647 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28648
28649 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28650 /* Allocate the decl_die_table. */
28651 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28652
28653 /* Allocate the decl_loc_table. */
28654 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28655
28656 /* Allocate the cached_dw_loc_list_table. */
28657 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28658
28659 /* Allocate the initial hunk of the abbrev_die_table. */
28660 vec_alloc (abbrev_die_table, 256);
28661 /* Zero-th entry is allocated, but unused. */
28662 abbrev_die_table->quick_push (NULL);
28663
28664 /* Allocate the dwarf_proc_stack_usage_map. */
28665 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28666
28667 /* Allocate the pubtypes and pubnames vectors. */
28668 vec_alloc (pubname_table, 32);
28669 vec_alloc (pubtype_table, 32);
28670
28671 vec_alloc (incomplete_types, 64);
28672
28673 vec_alloc (used_rtx_array, 32);
28674
28675 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28676 vec_alloc (macinfo_table, 64);
28677 #endif
28678
28679 /* If front-ends already registered a main translation unit but we were not
28680 ready to perform the association, do this now. */
28681 if (main_translation_unit != NULL_TREE)
28682 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28683 }
28684
28685 /* Called before compile () starts outputtting functions, variables
28686 and toplevel asms into assembly. */
28687
28688 static void
28689 dwarf2out_assembly_start (void)
28690 {
28691 if (text_section_line_info)
28692 return;
28693
28694 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28695 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28696 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28697 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28698 COLD_TEXT_SECTION_LABEL, 0);
28699 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28700
28701 switch_to_section (text_section);
28702 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28703 #endif
28704
28705 /* Make sure the line number table for .text always exists. */
28706 text_section_line_info = new_line_info_table ();
28707 text_section_line_info->end_label = text_end_label;
28708
28709 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28710 cur_line_info_table = text_section_line_info;
28711 #endif
28712
28713 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28714 && dwarf2out_do_cfi_asm ()
28715 && !dwarf2out_do_eh_frame ())
28716 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28717 }
28718
28719 /* A helper function for dwarf2out_finish called through
28720 htab_traverse. Assign a string its index. All strings must be
28721 collected into the table by the time index_string is called,
28722 because the indexing code relies on htab_traverse to traverse nodes
28723 in the same order for each run. */
28724
28725 int
28726 index_string (indirect_string_node **h, unsigned int *index)
28727 {
28728 indirect_string_node *node = *h;
28729
28730 find_string_form (node);
28731 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28732 {
28733 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28734 node->index = *index;
28735 *index += 1;
28736 }
28737 return 1;
28738 }
28739
28740 /* A helper function for output_indirect_strings called through
28741 htab_traverse. Output the offset to a string and update the
28742 current offset. */
28743
28744 int
28745 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28746 {
28747 indirect_string_node *node = *h;
28748
28749 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28750 {
28751 /* Assert that this node has been assigned an index. */
28752 gcc_assert (node->index != NO_INDEX_ASSIGNED
28753 && node->index != NOT_INDEXED);
28754 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28755 "indexed string 0x%x: %s", node->index, node->str);
28756 *offset += strlen (node->str) + 1;
28757 }
28758 return 1;
28759 }
28760
28761 /* A helper function for dwarf2out_finish called through
28762 htab_traverse. Output the indexed string. */
28763
28764 int
28765 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28766 {
28767 struct indirect_string_node *node = *h;
28768
28769 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28770 {
28771 /* Assert that the strings are output in the same order as their
28772 indexes were assigned. */
28773 gcc_assert (*cur_idx == node->index);
28774 assemble_string (node->str, strlen (node->str) + 1);
28775 *cur_idx += 1;
28776 }
28777 return 1;
28778 }
28779
28780 /* A helper function for output_indirect_strings. Counts the number
28781 of index strings offsets. Must match the logic of the functions
28782 output_index_string[_offsets] above. */
28783 int
28784 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28785 {
28786 struct indirect_string_node *node = *h;
28787
28788 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28789 *last_idx += 1;
28790 return 1;
28791 }
28792
28793 /* A helper function for dwarf2out_finish called through
28794 htab_traverse. Emit one queued .debug_str string. */
28795
28796 int
28797 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28798 {
28799 struct indirect_string_node *node = *h;
28800
28801 node->form = find_string_form (node);
28802 if (node->form == form && node->refcount > 0)
28803 {
28804 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28805 assemble_string (node->str, strlen (node->str) + 1);
28806 }
28807
28808 return 1;
28809 }
28810
28811 /* Output the indexed string table. */
28812
28813 static void
28814 output_indirect_strings (void)
28815 {
28816 switch_to_section (debug_str_section);
28817 if (!dwarf_split_debug_info)
28818 debug_str_hash->traverse<enum dwarf_form,
28819 output_indirect_string> (DW_FORM_strp);
28820 else
28821 {
28822 unsigned int offset = 0;
28823 unsigned int cur_idx = 0;
28824
28825 if (skeleton_debug_str_hash)
28826 skeleton_debug_str_hash->traverse<enum dwarf_form,
28827 output_indirect_string> (DW_FORM_strp);
28828
28829 switch_to_section (debug_str_offsets_section);
28830 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28831 header. Note that we don't need to generate a label to the
28832 actual index table following the header here, because this is
28833 for the split dwarf case only. In an .dwo file there is only
28834 one string offsets table (and one debug info section). But
28835 if we would start using string offset tables for the main (or
28836 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28837 pointing to the actual index after the header. Split dwarf
28838 units will never have a string offsets base attribute. When
28839 a split unit is moved into a .dwp file the string offsets can
28840 be found through the .debug_cu_index section table. */
28841 if (dwarf_version >= 5)
28842 {
28843 unsigned int last_idx = 0;
28844 unsigned long str_offsets_length;
28845
28846 debug_str_hash->traverse_noresize
28847 <unsigned int *, count_index_strings> (&last_idx);
28848 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28849 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28850 dw2_asm_output_data (4, 0xffffffff,
28851 "Escape value for 64-bit DWARF extension");
28852 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28853 "Length of string offsets unit");
28854 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28855 dw2_asm_output_data (2, 0, "Header zero padding");
28856 }
28857 debug_str_hash->traverse_noresize
28858 <unsigned int *, output_index_string_offset> (&offset);
28859 switch_to_section (debug_str_dwo_section);
28860 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28861 (&cur_idx);
28862 }
28863 }
28864
28865 /* Callback for htab_traverse to assign an index to an entry in the
28866 table, and to write that entry to the .debug_addr section. */
28867
28868 int
28869 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28870 {
28871 addr_table_entry *entry = *slot;
28872
28873 if (entry->refcount == 0)
28874 {
28875 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28876 || entry->index == NOT_INDEXED);
28877 return 1;
28878 }
28879
28880 gcc_assert (entry->index == *cur_index);
28881 (*cur_index)++;
28882
28883 switch (entry->kind)
28884 {
28885 case ate_kind_rtx:
28886 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28887 "0x%x", entry->index);
28888 break;
28889 case ate_kind_rtx_dtprel:
28890 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28891 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28892 DWARF2_ADDR_SIZE,
28893 entry->addr.rtl);
28894 fputc ('\n', asm_out_file);
28895 break;
28896 case ate_kind_label:
28897 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28898 "0x%x", entry->index);
28899 break;
28900 default:
28901 gcc_unreachable ();
28902 }
28903 return 1;
28904 }
28905
28906 /* A helper function for dwarf2out_finish. Counts the number
28907 of indexed addresses. Must match the logic of the functions
28908 output_addr_table_entry above. */
28909 int
28910 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28911 {
28912 addr_table_entry *entry = *slot;
28913
28914 if (entry->refcount > 0)
28915 *last_idx += 1;
28916 return 1;
28917 }
28918
28919 /* Produce the .debug_addr section. */
28920
28921 static void
28922 output_addr_table (void)
28923 {
28924 unsigned int index = 0;
28925 if (addr_index_table == NULL || addr_index_table->size () == 0)
28926 return;
28927
28928 switch_to_section (debug_addr_section);
28929 addr_index_table
28930 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28931 }
28932
28933 #if ENABLE_ASSERT_CHECKING
28934 /* Verify that all marks are clear. */
28935
28936 static void
28937 verify_marks_clear (dw_die_ref die)
28938 {
28939 dw_die_ref c;
28940
28941 gcc_assert (! die->die_mark);
28942 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28943 }
28944 #endif /* ENABLE_ASSERT_CHECKING */
28945
28946 /* Clear the marks for a die and its children.
28947 Be cool if the mark isn't set. */
28948
28949 static void
28950 prune_unmark_dies (dw_die_ref die)
28951 {
28952 dw_die_ref c;
28953
28954 if (die->die_mark)
28955 die->die_mark = 0;
28956 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28957 }
28958
28959 /* Given LOC that is referenced by a DIE we're marking as used, find all
28960 referenced DWARF procedures it references and mark them as used. */
28961
28962 static void
28963 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28964 {
28965 for (; loc != NULL; loc = loc->dw_loc_next)
28966 switch (loc->dw_loc_opc)
28967 {
28968 case DW_OP_implicit_pointer:
28969 case DW_OP_convert:
28970 case DW_OP_reinterpret:
28971 case DW_OP_GNU_implicit_pointer:
28972 case DW_OP_GNU_convert:
28973 case DW_OP_GNU_reinterpret:
28974 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28975 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28976 break;
28977 case DW_OP_GNU_variable_value:
28978 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28979 {
28980 dw_die_ref ref
28981 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28982 if (ref == NULL)
28983 break;
28984 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28985 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28986 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28987 }
28988 /* FALLTHRU */
28989 case DW_OP_call2:
28990 case DW_OP_call4:
28991 case DW_OP_call_ref:
28992 case DW_OP_const_type:
28993 case DW_OP_GNU_const_type:
28994 case DW_OP_GNU_parameter_ref:
28995 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28996 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28997 break;
28998 case DW_OP_regval_type:
28999 case DW_OP_deref_type:
29000 case DW_OP_GNU_regval_type:
29001 case DW_OP_GNU_deref_type:
29002 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29003 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29004 break;
29005 case DW_OP_entry_value:
29006 case DW_OP_GNU_entry_value:
29007 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29008 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29009 break;
29010 default:
29011 break;
29012 }
29013 }
29014
29015 /* Given DIE that we're marking as used, find any other dies
29016 it references as attributes and mark them as used. */
29017
29018 static void
29019 prune_unused_types_walk_attribs (dw_die_ref die)
29020 {
29021 dw_attr_node *a;
29022 unsigned ix;
29023
29024 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29025 {
29026 switch (AT_class (a))
29027 {
29028 /* Make sure DWARF procedures referenced by location descriptions will
29029 get emitted. */
29030 case dw_val_class_loc:
29031 prune_unused_types_walk_loc_descr (AT_loc (a));
29032 break;
29033 case dw_val_class_loc_list:
29034 for (dw_loc_list_ref list = AT_loc_list (a);
29035 list != NULL;
29036 list = list->dw_loc_next)
29037 prune_unused_types_walk_loc_descr (list->expr);
29038 break;
29039
29040 case dw_val_class_view_list:
29041 /* This points to a loc_list in another attribute, so it's
29042 already covered. */
29043 break;
29044
29045 case dw_val_class_die_ref:
29046 /* A reference to another DIE.
29047 Make sure that it will get emitted.
29048 If it was broken out into a comdat group, don't follow it. */
29049 if (! AT_ref (a)->comdat_type_p
29050 || a->dw_attr == DW_AT_specification)
29051 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29052 break;
29053
29054 case dw_val_class_str:
29055 /* Set the string's refcount to 0 so that prune_unused_types_mark
29056 accounts properly for it. */
29057 a->dw_attr_val.v.val_str->refcount = 0;
29058 break;
29059
29060 default:
29061 break;
29062 }
29063 }
29064 }
29065
29066 /* Mark the generic parameters and arguments children DIEs of DIE. */
29067
29068 static void
29069 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29070 {
29071 dw_die_ref c;
29072
29073 if (die == NULL || die->die_child == NULL)
29074 return;
29075 c = die->die_child;
29076 do
29077 {
29078 if (is_template_parameter (c))
29079 prune_unused_types_mark (c, 1);
29080 c = c->die_sib;
29081 } while (c && c != die->die_child);
29082 }
29083
29084 /* Mark DIE as being used. If DOKIDS is true, then walk down
29085 to DIE's children. */
29086
29087 static void
29088 prune_unused_types_mark (dw_die_ref die, int dokids)
29089 {
29090 dw_die_ref c;
29091
29092 if (die->die_mark == 0)
29093 {
29094 /* We haven't done this node yet. Mark it as used. */
29095 die->die_mark = 1;
29096 /* If this is the DIE of a generic type instantiation,
29097 mark the children DIEs that describe its generic parms and
29098 args. */
29099 prune_unused_types_mark_generic_parms_dies (die);
29100
29101 /* We also have to mark its parents as used.
29102 (But we don't want to mark our parent's kids due to this,
29103 unless it is a class.) */
29104 if (die->die_parent)
29105 prune_unused_types_mark (die->die_parent,
29106 class_scope_p (die->die_parent));
29107
29108 /* Mark any referenced nodes. */
29109 prune_unused_types_walk_attribs (die);
29110
29111 /* If this node is a specification,
29112 also mark the definition, if it exists. */
29113 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29114 prune_unused_types_mark (die->die_definition, 1);
29115 }
29116
29117 if (dokids && die->die_mark != 2)
29118 {
29119 /* We need to walk the children, but haven't done so yet.
29120 Remember that we've walked the kids. */
29121 die->die_mark = 2;
29122
29123 /* If this is an array type, we need to make sure our
29124 kids get marked, even if they're types. If we're
29125 breaking out types into comdat sections, do this
29126 for all type definitions. */
29127 if (die->die_tag == DW_TAG_array_type
29128 || (use_debug_types
29129 && is_type_die (die) && ! is_declaration_die (die)))
29130 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29131 else
29132 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29133 }
29134 }
29135
29136 /* For local classes, look if any static member functions were emitted
29137 and if so, mark them. */
29138
29139 static void
29140 prune_unused_types_walk_local_classes (dw_die_ref die)
29141 {
29142 dw_die_ref c;
29143
29144 if (die->die_mark == 2)
29145 return;
29146
29147 switch (die->die_tag)
29148 {
29149 case DW_TAG_structure_type:
29150 case DW_TAG_union_type:
29151 case DW_TAG_class_type:
29152 break;
29153
29154 case DW_TAG_subprogram:
29155 if (!get_AT_flag (die, DW_AT_declaration)
29156 || die->die_definition != NULL)
29157 prune_unused_types_mark (die, 1);
29158 return;
29159
29160 default:
29161 return;
29162 }
29163
29164 /* Mark children. */
29165 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29166 }
29167
29168 /* Walk the tree DIE and mark types that we actually use. */
29169
29170 static void
29171 prune_unused_types_walk (dw_die_ref die)
29172 {
29173 dw_die_ref c;
29174
29175 /* Don't do anything if this node is already marked and
29176 children have been marked as well. */
29177 if (die->die_mark == 2)
29178 return;
29179
29180 switch (die->die_tag)
29181 {
29182 case DW_TAG_structure_type:
29183 case DW_TAG_union_type:
29184 case DW_TAG_class_type:
29185 if (die->die_perennial_p)
29186 break;
29187
29188 for (c = die->die_parent; c; c = c->die_parent)
29189 if (c->die_tag == DW_TAG_subprogram)
29190 break;
29191
29192 /* Finding used static member functions inside of classes
29193 is needed just for local classes, because for other classes
29194 static member function DIEs with DW_AT_specification
29195 are emitted outside of the DW_TAG_*_type. If we ever change
29196 it, we'd need to call this even for non-local classes. */
29197 if (c)
29198 prune_unused_types_walk_local_classes (die);
29199
29200 /* It's a type node --- don't mark it. */
29201 return;
29202
29203 case DW_TAG_const_type:
29204 case DW_TAG_packed_type:
29205 case DW_TAG_pointer_type:
29206 case DW_TAG_reference_type:
29207 case DW_TAG_rvalue_reference_type:
29208 case DW_TAG_volatile_type:
29209 case DW_TAG_typedef:
29210 case DW_TAG_array_type:
29211 case DW_TAG_interface_type:
29212 case DW_TAG_friend:
29213 case DW_TAG_enumeration_type:
29214 case DW_TAG_subroutine_type:
29215 case DW_TAG_string_type:
29216 case DW_TAG_set_type:
29217 case DW_TAG_subrange_type:
29218 case DW_TAG_ptr_to_member_type:
29219 case DW_TAG_file_type:
29220 /* Type nodes are useful only when other DIEs reference them --- don't
29221 mark them. */
29222 /* FALLTHROUGH */
29223
29224 case DW_TAG_dwarf_procedure:
29225 /* Likewise for DWARF procedures. */
29226
29227 if (die->die_perennial_p)
29228 break;
29229
29230 return;
29231
29232 default:
29233 /* Mark everything else. */
29234 break;
29235 }
29236
29237 if (die->die_mark == 0)
29238 {
29239 die->die_mark = 1;
29240
29241 /* Now, mark any dies referenced from here. */
29242 prune_unused_types_walk_attribs (die);
29243 }
29244
29245 die->die_mark = 2;
29246
29247 /* Mark children. */
29248 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29249 }
29250
29251 /* Increment the string counts on strings referred to from DIE's
29252 attributes. */
29253
29254 static void
29255 prune_unused_types_update_strings (dw_die_ref die)
29256 {
29257 dw_attr_node *a;
29258 unsigned ix;
29259
29260 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29261 if (AT_class (a) == dw_val_class_str)
29262 {
29263 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29264 s->refcount++;
29265 /* Avoid unnecessarily putting strings that are used less than
29266 twice in the hash table. */
29267 if (s->refcount
29268 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29269 {
29270 indirect_string_node **slot
29271 = debug_str_hash->find_slot_with_hash (s->str,
29272 htab_hash_string (s->str),
29273 INSERT);
29274 gcc_assert (*slot == NULL);
29275 *slot = s;
29276 }
29277 }
29278 }
29279
29280 /* Mark DIE and its children as removed. */
29281
29282 static void
29283 mark_removed (dw_die_ref die)
29284 {
29285 dw_die_ref c;
29286 die->removed = true;
29287 FOR_EACH_CHILD (die, c, mark_removed (c));
29288 }
29289
29290 /* Remove from the tree DIE any dies that aren't marked. */
29291
29292 static void
29293 prune_unused_types_prune (dw_die_ref die)
29294 {
29295 dw_die_ref c;
29296
29297 gcc_assert (die->die_mark);
29298 prune_unused_types_update_strings (die);
29299
29300 if (! die->die_child)
29301 return;
29302
29303 c = die->die_child;
29304 do {
29305 dw_die_ref prev = c, next;
29306 for (c = c->die_sib; ! c->die_mark; c = next)
29307 if (c == die->die_child)
29308 {
29309 /* No marked children between 'prev' and the end of the list. */
29310 if (prev == c)
29311 /* No marked children at all. */
29312 die->die_child = NULL;
29313 else
29314 {
29315 prev->die_sib = c->die_sib;
29316 die->die_child = prev;
29317 }
29318 c->die_sib = NULL;
29319 mark_removed (c);
29320 return;
29321 }
29322 else
29323 {
29324 next = c->die_sib;
29325 c->die_sib = NULL;
29326 mark_removed (c);
29327 }
29328
29329 if (c != prev->die_sib)
29330 prev->die_sib = c;
29331 prune_unused_types_prune (c);
29332 } while (c != die->die_child);
29333 }
29334
29335 /* Remove dies representing declarations that we never use. */
29336
29337 static void
29338 prune_unused_types (void)
29339 {
29340 unsigned int i;
29341 limbo_die_node *node;
29342 comdat_type_node *ctnode;
29343 pubname_entry *pub;
29344 dw_die_ref base_type;
29345
29346 #if ENABLE_ASSERT_CHECKING
29347 /* All the marks should already be clear. */
29348 verify_marks_clear (comp_unit_die ());
29349 for (node = limbo_die_list; node; node = node->next)
29350 verify_marks_clear (node->die);
29351 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29352 verify_marks_clear (ctnode->root_die);
29353 #endif /* ENABLE_ASSERT_CHECKING */
29354
29355 /* Mark types that are used in global variables. */
29356 premark_types_used_by_global_vars ();
29357
29358 /* Set the mark on nodes that are actually used. */
29359 prune_unused_types_walk (comp_unit_die ());
29360 for (node = limbo_die_list; node; node = node->next)
29361 prune_unused_types_walk (node->die);
29362 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29363 {
29364 prune_unused_types_walk (ctnode->root_die);
29365 prune_unused_types_mark (ctnode->type_die, 1);
29366 }
29367
29368 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29369 are unusual in that they are pubnames that are the children of pubtypes.
29370 They should only be marked via their parent DW_TAG_enumeration_type die,
29371 not as roots in themselves. */
29372 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29373 if (pub->die->die_tag != DW_TAG_enumerator)
29374 prune_unused_types_mark (pub->die, 1);
29375 for (i = 0; base_types.iterate (i, &base_type); i++)
29376 prune_unused_types_mark (base_type, 1);
29377
29378 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29379 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29380 callees). */
29381 cgraph_node *cnode;
29382 FOR_EACH_FUNCTION (cnode)
29383 if (cnode->referred_to_p (false))
29384 {
29385 dw_die_ref die = lookup_decl_die (cnode->decl);
29386 if (die == NULL || die->die_mark)
29387 continue;
29388 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29389 if (e->caller != cnode
29390 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29391 {
29392 prune_unused_types_mark (die, 1);
29393 break;
29394 }
29395 }
29396
29397 if (debug_str_hash)
29398 debug_str_hash->empty ();
29399 if (skeleton_debug_str_hash)
29400 skeleton_debug_str_hash->empty ();
29401 prune_unused_types_prune (comp_unit_die ());
29402 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29403 {
29404 node = *pnode;
29405 if (!node->die->die_mark)
29406 *pnode = node->next;
29407 else
29408 {
29409 prune_unused_types_prune (node->die);
29410 pnode = &node->next;
29411 }
29412 }
29413 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29414 prune_unused_types_prune (ctnode->root_die);
29415
29416 /* Leave the marks clear. */
29417 prune_unmark_dies (comp_unit_die ());
29418 for (node = limbo_die_list; node; node = node->next)
29419 prune_unmark_dies (node->die);
29420 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29421 prune_unmark_dies (ctnode->root_die);
29422 }
29423
29424 /* Helpers to manipulate hash table of comdat type units. */
29425
29426 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29427 {
29428 static inline hashval_t hash (const comdat_type_node *);
29429 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29430 };
29431
29432 inline hashval_t
29433 comdat_type_hasher::hash (const comdat_type_node *type_node)
29434 {
29435 hashval_t h;
29436 memcpy (&h, type_node->signature, sizeof (h));
29437 return h;
29438 }
29439
29440 inline bool
29441 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29442 const comdat_type_node *type_node_2)
29443 {
29444 return (! memcmp (type_node_1->signature, type_node_2->signature,
29445 DWARF_TYPE_SIGNATURE_SIZE));
29446 }
29447
29448 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29449 to the location it would have been added, should we know its
29450 DECL_ASSEMBLER_NAME when we added other attributes. This will
29451 probably improve compactness of debug info, removing equivalent
29452 abbrevs, and hide any differences caused by deferring the
29453 computation of the assembler name, triggered by e.g. PCH. */
29454
29455 static inline void
29456 move_linkage_attr (dw_die_ref die)
29457 {
29458 unsigned ix = vec_safe_length (die->die_attr);
29459 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29460
29461 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29462 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29463
29464 while (--ix > 0)
29465 {
29466 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29467
29468 if (prev->dw_attr == DW_AT_decl_line
29469 || prev->dw_attr == DW_AT_decl_column
29470 || prev->dw_attr == DW_AT_name)
29471 break;
29472 }
29473
29474 if (ix != vec_safe_length (die->die_attr) - 1)
29475 {
29476 die->die_attr->pop ();
29477 die->die_attr->quick_insert (ix, linkage);
29478 }
29479 }
29480
29481 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29482 referenced from typed stack ops and count how often they are used. */
29483
29484 static void
29485 mark_base_types (dw_loc_descr_ref loc)
29486 {
29487 dw_die_ref base_type = NULL;
29488
29489 for (; loc; loc = loc->dw_loc_next)
29490 {
29491 switch (loc->dw_loc_opc)
29492 {
29493 case DW_OP_regval_type:
29494 case DW_OP_deref_type:
29495 case DW_OP_GNU_regval_type:
29496 case DW_OP_GNU_deref_type:
29497 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29498 break;
29499 case DW_OP_convert:
29500 case DW_OP_reinterpret:
29501 case DW_OP_GNU_convert:
29502 case DW_OP_GNU_reinterpret:
29503 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29504 continue;
29505 /* FALLTHRU */
29506 case DW_OP_const_type:
29507 case DW_OP_GNU_const_type:
29508 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29509 break;
29510 case DW_OP_entry_value:
29511 case DW_OP_GNU_entry_value:
29512 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29513 continue;
29514 default:
29515 continue;
29516 }
29517 gcc_assert (base_type->die_parent == comp_unit_die ());
29518 if (base_type->die_mark)
29519 base_type->die_mark++;
29520 else
29521 {
29522 base_types.safe_push (base_type);
29523 base_type->die_mark = 1;
29524 }
29525 }
29526 }
29527
29528 /* Comparison function for sorting marked base types. */
29529
29530 static int
29531 base_type_cmp (const void *x, const void *y)
29532 {
29533 dw_die_ref dx = *(const dw_die_ref *) x;
29534 dw_die_ref dy = *(const dw_die_ref *) y;
29535 unsigned int byte_size1, byte_size2;
29536 unsigned int encoding1, encoding2;
29537 unsigned int align1, align2;
29538 if (dx->die_mark > dy->die_mark)
29539 return -1;
29540 if (dx->die_mark < dy->die_mark)
29541 return 1;
29542 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29543 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29544 if (byte_size1 < byte_size2)
29545 return 1;
29546 if (byte_size1 > byte_size2)
29547 return -1;
29548 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29549 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29550 if (encoding1 < encoding2)
29551 return 1;
29552 if (encoding1 > encoding2)
29553 return -1;
29554 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29555 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29556 if (align1 < align2)
29557 return 1;
29558 if (align1 > align2)
29559 return -1;
29560 return 0;
29561 }
29562
29563 /* Move base types marked by mark_base_types as early as possible
29564 in the CU, sorted by decreasing usage count both to make the
29565 uleb128 references as small as possible and to make sure they
29566 will have die_offset already computed by calc_die_sizes when
29567 sizes of typed stack loc ops is computed. */
29568
29569 static void
29570 move_marked_base_types (void)
29571 {
29572 unsigned int i;
29573 dw_die_ref base_type, die, c;
29574
29575 if (base_types.is_empty ())
29576 return;
29577
29578 /* Sort by decreasing usage count, they will be added again in that
29579 order later on. */
29580 base_types.qsort (base_type_cmp);
29581 die = comp_unit_die ();
29582 c = die->die_child;
29583 do
29584 {
29585 dw_die_ref prev = c;
29586 c = c->die_sib;
29587 while (c->die_mark)
29588 {
29589 remove_child_with_prev (c, prev);
29590 /* As base types got marked, there must be at least
29591 one node other than DW_TAG_base_type. */
29592 gcc_assert (die->die_child != NULL);
29593 c = prev->die_sib;
29594 }
29595 }
29596 while (c != die->die_child);
29597 gcc_assert (die->die_child);
29598 c = die->die_child;
29599 for (i = 0; base_types.iterate (i, &base_type); i++)
29600 {
29601 base_type->die_mark = 0;
29602 base_type->die_sib = c->die_sib;
29603 c->die_sib = base_type;
29604 c = base_type;
29605 }
29606 }
29607
29608 /* Helper function for resolve_addr, attempt to resolve
29609 one CONST_STRING, return true if successful. Similarly verify that
29610 SYMBOL_REFs refer to variables emitted in the current CU. */
29611
29612 static bool
29613 resolve_one_addr (rtx *addr)
29614 {
29615 rtx rtl = *addr;
29616
29617 if (GET_CODE (rtl) == CONST_STRING)
29618 {
29619 size_t len = strlen (XSTR (rtl, 0)) + 1;
29620 tree t = build_string (len, XSTR (rtl, 0));
29621 tree tlen = size_int (len - 1);
29622 TREE_TYPE (t)
29623 = build_array_type (char_type_node, build_index_type (tlen));
29624 rtl = lookup_constant_def (t);
29625 if (!rtl || !MEM_P (rtl))
29626 return false;
29627 rtl = XEXP (rtl, 0);
29628 if (GET_CODE (rtl) == SYMBOL_REF
29629 && SYMBOL_REF_DECL (rtl)
29630 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29631 return false;
29632 vec_safe_push (used_rtx_array, rtl);
29633 *addr = rtl;
29634 return true;
29635 }
29636
29637 if (GET_CODE (rtl) == SYMBOL_REF
29638 && SYMBOL_REF_DECL (rtl))
29639 {
29640 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29641 {
29642 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29643 return false;
29644 }
29645 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29646 return false;
29647 }
29648
29649 if (GET_CODE (rtl) == CONST)
29650 {
29651 subrtx_ptr_iterator::array_type array;
29652 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29653 if (!resolve_one_addr (*iter))
29654 return false;
29655 }
29656
29657 return true;
29658 }
29659
29660 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29661 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29662 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29663
29664 static rtx
29665 string_cst_pool_decl (tree t)
29666 {
29667 rtx rtl = output_constant_def (t, 1);
29668 unsigned char *array;
29669 dw_loc_descr_ref l;
29670 tree decl;
29671 size_t len;
29672 dw_die_ref ref;
29673
29674 if (!rtl || !MEM_P (rtl))
29675 return NULL_RTX;
29676 rtl = XEXP (rtl, 0);
29677 if (GET_CODE (rtl) != SYMBOL_REF
29678 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29679 return NULL_RTX;
29680
29681 decl = SYMBOL_REF_DECL (rtl);
29682 if (!lookup_decl_die (decl))
29683 {
29684 len = TREE_STRING_LENGTH (t);
29685 vec_safe_push (used_rtx_array, rtl);
29686 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29687 array = ggc_vec_alloc<unsigned char> (len);
29688 memcpy (array, TREE_STRING_POINTER (t), len);
29689 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29690 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29691 l->dw_loc_oprnd2.v.val_vec.length = len;
29692 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29693 l->dw_loc_oprnd2.v.val_vec.array = array;
29694 add_AT_loc (ref, DW_AT_location, l);
29695 equate_decl_number_to_die (decl, ref);
29696 }
29697 return rtl;
29698 }
29699
29700 /* Helper function of resolve_addr_in_expr. LOC is
29701 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29702 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29703 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29704 with DW_OP_implicit_pointer if possible
29705 and return true, if unsuccessful, return false. */
29706
29707 static bool
29708 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29709 {
29710 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29711 HOST_WIDE_INT offset = 0;
29712 dw_die_ref ref = NULL;
29713 tree decl;
29714
29715 if (GET_CODE (rtl) == CONST
29716 && GET_CODE (XEXP (rtl, 0)) == PLUS
29717 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29718 {
29719 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29720 rtl = XEXP (XEXP (rtl, 0), 0);
29721 }
29722 if (GET_CODE (rtl) == CONST_STRING)
29723 {
29724 size_t len = strlen (XSTR (rtl, 0)) + 1;
29725 tree t = build_string (len, XSTR (rtl, 0));
29726 tree tlen = size_int (len - 1);
29727
29728 TREE_TYPE (t)
29729 = build_array_type (char_type_node, build_index_type (tlen));
29730 rtl = string_cst_pool_decl (t);
29731 if (!rtl)
29732 return false;
29733 }
29734 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29735 {
29736 decl = SYMBOL_REF_DECL (rtl);
29737 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29738 {
29739 ref = lookup_decl_die (decl);
29740 if (ref && (get_AT (ref, DW_AT_location)
29741 || get_AT (ref, DW_AT_const_value)))
29742 {
29743 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29744 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29745 loc->dw_loc_oprnd1.val_entry = NULL;
29746 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29747 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29748 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29749 loc->dw_loc_oprnd2.v.val_int = offset;
29750 return true;
29751 }
29752 }
29753 }
29754 return false;
29755 }
29756
29757 /* Helper function for resolve_addr, handle one location
29758 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29759 the location list couldn't be resolved. */
29760
29761 static bool
29762 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29763 {
29764 dw_loc_descr_ref keep = NULL;
29765 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29766 switch (loc->dw_loc_opc)
29767 {
29768 case DW_OP_addr:
29769 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29770 {
29771 if ((prev == NULL
29772 || prev->dw_loc_opc == DW_OP_piece
29773 || prev->dw_loc_opc == DW_OP_bit_piece)
29774 && loc->dw_loc_next
29775 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29776 && (!dwarf_strict || dwarf_version >= 5)
29777 && optimize_one_addr_into_implicit_ptr (loc))
29778 break;
29779 return false;
29780 }
29781 break;
29782 case DW_OP_GNU_addr_index:
29783 case DW_OP_addrx:
29784 case DW_OP_GNU_const_index:
29785 case DW_OP_constx:
29786 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29787 || loc->dw_loc_opc == DW_OP_addrx)
29788 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29789 || loc->dw_loc_opc == DW_OP_constx)
29790 && loc->dtprel))
29791 {
29792 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29793 if (!resolve_one_addr (&rtl))
29794 return false;
29795 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29796 loc->dw_loc_oprnd1.val_entry
29797 = add_addr_table_entry (rtl, ate_kind_rtx);
29798 }
29799 break;
29800 case DW_OP_const4u:
29801 case DW_OP_const8u:
29802 if (loc->dtprel
29803 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29804 return false;
29805 break;
29806 case DW_OP_plus_uconst:
29807 if (size_of_loc_descr (loc)
29808 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29809 + 1
29810 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29811 {
29812 dw_loc_descr_ref repl
29813 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29814 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29815 add_loc_descr (&repl, loc->dw_loc_next);
29816 *loc = *repl;
29817 }
29818 break;
29819 case DW_OP_implicit_value:
29820 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29821 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29822 return false;
29823 break;
29824 case DW_OP_implicit_pointer:
29825 case DW_OP_GNU_implicit_pointer:
29826 case DW_OP_GNU_parameter_ref:
29827 case DW_OP_GNU_variable_value:
29828 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29829 {
29830 dw_die_ref ref
29831 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29832 if (ref == NULL)
29833 return false;
29834 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29835 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29836 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29837 }
29838 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29839 {
29840 if (prev == NULL
29841 && loc->dw_loc_next == NULL
29842 && AT_class (a) == dw_val_class_loc)
29843 switch (a->dw_attr)
29844 {
29845 /* Following attributes allow both exprloc and reference,
29846 so if the whole expression is DW_OP_GNU_variable_value
29847 alone we could transform it into reference. */
29848 case DW_AT_byte_size:
29849 case DW_AT_bit_size:
29850 case DW_AT_lower_bound:
29851 case DW_AT_upper_bound:
29852 case DW_AT_bit_stride:
29853 case DW_AT_count:
29854 case DW_AT_allocated:
29855 case DW_AT_associated:
29856 case DW_AT_byte_stride:
29857 a->dw_attr_val.val_class = dw_val_class_die_ref;
29858 a->dw_attr_val.val_entry = NULL;
29859 a->dw_attr_val.v.val_die_ref.die
29860 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29861 a->dw_attr_val.v.val_die_ref.external = 0;
29862 return true;
29863 default:
29864 break;
29865 }
29866 if (dwarf_strict)
29867 return false;
29868 }
29869 break;
29870 case DW_OP_const_type:
29871 case DW_OP_regval_type:
29872 case DW_OP_deref_type:
29873 case DW_OP_convert:
29874 case DW_OP_reinterpret:
29875 case DW_OP_GNU_const_type:
29876 case DW_OP_GNU_regval_type:
29877 case DW_OP_GNU_deref_type:
29878 case DW_OP_GNU_convert:
29879 case DW_OP_GNU_reinterpret:
29880 while (loc->dw_loc_next
29881 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29882 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29883 {
29884 dw_die_ref base1, base2;
29885 unsigned enc1, enc2, size1, size2;
29886 if (loc->dw_loc_opc == DW_OP_regval_type
29887 || loc->dw_loc_opc == DW_OP_deref_type
29888 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29889 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29890 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29891 else if (loc->dw_loc_oprnd1.val_class
29892 == dw_val_class_unsigned_const)
29893 break;
29894 else
29895 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29896 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29897 == dw_val_class_unsigned_const)
29898 break;
29899 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29900 gcc_assert (base1->die_tag == DW_TAG_base_type
29901 && base2->die_tag == DW_TAG_base_type);
29902 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29903 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29904 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29905 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29906 if (size1 == size2
29907 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29908 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29909 && loc != keep)
29910 || enc1 == enc2))
29911 {
29912 /* Optimize away next DW_OP_convert after
29913 adjusting LOC's base type die reference. */
29914 if (loc->dw_loc_opc == DW_OP_regval_type
29915 || loc->dw_loc_opc == DW_OP_deref_type
29916 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29917 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29918 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29919 else
29920 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29921 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29922 continue;
29923 }
29924 /* Don't change integer DW_OP_convert after e.g. floating
29925 point typed stack entry. */
29926 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29927 keep = loc->dw_loc_next;
29928 break;
29929 }
29930 break;
29931 default:
29932 break;
29933 }
29934 return true;
29935 }
29936
29937 /* Helper function of resolve_addr. DIE had DW_AT_location of
29938 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29939 and DW_OP_addr couldn't be resolved. resolve_addr has already
29940 removed the DW_AT_location attribute. This function attempts to
29941 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29942 to it or DW_AT_const_value attribute, if possible. */
29943
29944 static void
29945 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29946 {
29947 if (!VAR_P (decl)
29948 || lookup_decl_die (decl) != die
29949 || DECL_EXTERNAL (decl)
29950 || !TREE_STATIC (decl)
29951 || DECL_INITIAL (decl) == NULL_TREE
29952 || DECL_P (DECL_INITIAL (decl))
29953 || get_AT (die, DW_AT_const_value))
29954 return;
29955
29956 tree init = DECL_INITIAL (decl);
29957 HOST_WIDE_INT offset = 0;
29958 /* For variables that have been optimized away and thus
29959 don't have a memory location, see if we can emit
29960 DW_AT_const_value instead. */
29961 if (tree_add_const_value_attribute (die, init))
29962 return;
29963 if (dwarf_strict && dwarf_version < 5)
29964 return;
29965 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29966 and ADDR_EXPR refers to a decl that has DW_AT_location or
29967 DW_AT_const_value (but isn't addressable, otherwise
29968 resolving the original DW_OP_addr wouldn't fail), see if
29969 we can add DW_OP_implicit_pointer. */
29970 STRIP_NOPS (init);
29971 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29972 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29973 {
29974 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29975 init = TREE_OPERAND (init, 0);
29976 STRIP_NOPS (init);
29977 }
29978 if (TREE_CODE (init) != ADDR_EXPR)
29979 return;
29980 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29981 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29982 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29983 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29984 && TREE_OPERAND (init, 0) != decl))
29985 {
29986 dw_die_ref ref;
29987 dw_loc_descr_ref l;
29988
29989 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29990 {
29991 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29992 if (!rtl)
29993 return;
29994 decl = SYMBOL_REF_DECL (rtl);
29995 }
29996 else
29997 decl = TREE_OPERAND (init, 0);
29998 ref = lookup_decl_die (decl);
29999 if (ref == NULL
30000 || (!get_AT (ref, DW_AT_location)
30001 && !get_AT (ref, DW_AT_const_value)))
30002 return;
30003 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30004 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30005 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30006 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30007 add_AT_loc (die, DW_AT_location, l);
30008 }
30009 }
30010
30011 /* Return NULL if l is a DWARF expression, or first op that is not
30012 valid DWARF expression. */
30013
30014 static dw_loc_descr_ref
30015 non_dwarf_expression (dw_loc_descr_ref l)
30016 {
30017 while (l)
30018 {
30019 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30020 return l;
30021 switch (l->dw_loc_opc)
30022 {
30023 case DW_OP_regx:
30024 case DW_OP_implicit_value:
30025 case DW_OP_stack_value:
30026 case DW_OP_implicit_pointer:
30027 case DW_OP_GNU_implicit_pointer:
30028 case DW_OP_GNU_parameter_ref:
30029 case DW_OP_piece:
30030 case DW_OP_bit_piece:
30031 return l;
30032 default:
30033 break;
30034 }
30035 l = l->dw_loc_next;
30036 }
30037 return NULL;
30038 }
30039
30040 /* Return adjusted copy of EXPR:
30041 If it is empty DWARF expression, return it.
30042 If it is valid non-empty DWARF expression,
30043 return copy of EXPR with DW_OP_deref appended to it.
30044 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30045 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30046 If it is DWARF expression followed by DW_OP_stack_value, return
30047 copy of the DWARF expression without anything appended.
30048 Otherwise, return NULL. */
30049
30050 static dw_loc_descr_ref
30051 copy_deref_exprloc (dw_loc_descr_ref expr)
30052 {
30053 dw_loc_descr_ref tail = NULL;
30054
30055 if (expr == NULL)
30056 return NULL;
30057
30058 dw_loc_descr_ref l = non_dwarf_expression (expr);
30059 if (l && l->dw_loc_next)
30060 return NULL;
30061
30062 if (l)
30063 {
30064 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30065 tail = new_loc_descr ((enum dwarf_location_atom)
30066 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30067 0, 0);
30068 else
30069 switch (l->dw_loc_opc)
30070 {
30071 case DW_OP_regx:
30072 tail = new_loc_descr (DW_OP_bregx,
30073 l->dw_loc_oprnd1.v.val_unsigned, 0);
30074 break;
30075 case DW_OP_stack_value:
30076 break;
30077 default:
30078 return NULL;
30079 }
30080 }
30081 else
30082 tail = new_loc_descr (DW_OP_deref, 0, 0);
30083
30084 dw_loc_descr_ref ret = NULL, *p = &ret;
30085 while (expr != l)
30086 {
30087 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30088 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30089 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30090 p = &(*p)->dw_loc_next;
30091 expr = expr->dw_loc_next;
30092 }
30093 *p = tail;
30094 return ret;
30095 }
30096
30097 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30098 reference to a variable or argument, adjust it if needed and return:
30099 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30100 attribute if present should be removed
30101 0 keep the attribute perhaps with minor modifications, no need to rescan
30102 1 if the attribute has been successfully adjusted. */
30103
30104 static int
30105 optimize_string_length (dw_attr_node *a)
30106 {
30107 dw_loc_descr_ref l = AT_loc (a), lv;
30108 dw_die_ref die;
30109 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30110 {
30111 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30112 die = lookup_decl_die (decl);
30113 if (die)
30114 {
30115 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30116 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30117 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30118 }
30119 else
30120 return -1;
30121 }
30122 else
30123 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30124
30125 /* DWARF5 allows reference class, so we can then reference the DIE.
30126 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30127 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30128 {
30129 a->dw_attr_val.val_class = dw_val_class_die_ref;
30130 a->dw_attr_val.val_entry = NULL;
30131 a->dw_attr_val.v.val_die_ref.die = die;
30132 a->dw_attr_val.v.val_die_ref.external = 0;
30133 return 0;
30134 }
30135
30136 dw_attr_node *av = get_AT (die, DW_AT_location);
30137 dw_loc_list_ref d;
30138 bool non_dwarf_expr = false;
30139
30140 if (av == NULL)
30141 return dwarf_strict ? -1 : 0;
30142 switch (AT_class (av))
30143 {
30144 case dw_val_class_loc_list:
30145 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30146 if (d->expr && non_dwarf_expression (d->expr))
30147 non_dwarf_expr = true;
30148 break;
30149 case dw_val_class_view_list:
30150 gcc_unreachable ();
30151 case dw_val_class_loc:
30152 lv = AT_loc (av);
30153 if (lv == NULL)
30154 return dwarf_strict ? -1 : 0;
30155 if (non_dwarf_expression (lv))
30156 non_dwarf_expr = true;
30157 break;
30158 default:
30159 return dwarf_strict ? -1 : 0;
30160 }
30161
30162 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30163 into DW_OP_call4 or DW_OP_GNU_variable_value into
30164 DW_OP_call4 DW_OP_deref, do so. */
30165 if (!non_dwarf_expr
30166 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30167 {
30168 l->dw_loc_opc = DW_OP_call4;
30169 if (l->dw_loc_next)
30170 l->dw_loc_next = NULL;
30171 else
30172 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30173 return 0;
30174 }
30175
30176 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30177 copy over the DW_AT_location attribute from die to a. */
30178 if (l->dw_loc_next != NULL)
30179 {
30180 a->dw_attr_val = av->dw_attr_val;
30181 return 1;
30182 }
30183
30184 dw_loc_list_ref list, *p;
30185 switch (AT_class (av))
30186 {
30187 case dw_val_class_loc_list:
30188 p = &list;
30189 list = NULL;
30190 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30191 {
30192 lv = copy_deref_exprloc (d->expr);
30193 if (lv)
30194 {
30195 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30196 p = &(*p)->dw_loc_next;
30197 }
30198 else if (!dwarf_strict && d->expr)
30199 return 0;
30200 }
30201 if (list == NULL)
30202 return dwarf_strict ? -1 : 0;
30203 a->dw_attr_val.val_class = dw_val_class_loc_list;
30204 gen_llsym (list);
30205 *AT_loc_list_ptr (a) = list;
30206 return 1;
30207 case dw_val_class_loc:
30208 lv = copy_deref_exprloc (AT_loc (av));
30209 if (lv == NULL)
30210 return dwarf_strict ? -1 : 0;
30211 a->dw_attr_val.v.val_loc = lv;
30212 return 1;
30213 default:
30214 gcc_unreachable ();
30215 }
30216 }
30217
30218 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30219 an address in .rodata section if the string literal is emitted there,
30220 or remove the containing location list or replace DW_AT_const_value
30221 with DW_AT_location and empty location expression, if it isn't found
30222 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30223 to something that has been emitted in the current CU. */
30224
30225 static void
30226 resolve_addr (dw_die_ref die)
30227 {
30228 dw_die_ref c;
30229 dw_attr_node *a;
30230 dw_loc_list_ref *curr, *start, loc;
30231 unsigned ix;
30232 bool remove_AT_byte_size = false;
30233
30234 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30235 switch (AT_class (a))
30236 {
30237 case dw_val_class_loc_list:
30238 start = curr = AT_loc_list_ptr (a);
30239 loc = *curr;
30240 gcc_assert (loc);
30241 /* The same list can be referenced more than once. See if we have
30242 already recorded the result from a previous pass. */
30243 if (loc->replaced)
30244 *curr = loc->dw_loc_next;
30245 else if (!loc->resolved_addr)
30246 {
30247 /* As things stand, we do not expect or allow one die to
30248 reference a suffix of another die's location list chain.
30249 References must be identical or completely separate.
30250 There is therefore no need to cache the result of this
30251 pass on any list other than the first; doing so
30252 would lead to unnecessary writes. */
30253 while (*curr)
30254 {
30255 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30256 if (!resolve_addr_in_expr (a, (*curr)->expr))
30257 {
30258 dw_loc_list_ref next = (*curr)->dw_loc_next;
30259 dw_loc_descr_ref l = (*curr)->expr;
30260
30261 if (next && (*curr)->ll_symbol)
30262 {
30263 gcc_assert (!next->ll_symbol);
30264 next->ll_symbol = (*curr)->ll_symbol;
30265 next->vl_symbol = (*curr)->vl_symbol;
30266 }
30267 if (dwarf_split_debug_info)
30268 remove_loc_list_addr_table_entries (l);
30269 *curr = next;
30270 }
30271 else
30272 {
30273 mark_base_types ((*curr)->expr);
30274 curr = &(*curr)->dw_loc_next;
30275 }
30276 }
30277 if (loc == *start)
30278 loc->resolved_addr = 1;
30279 else
30280 {
30281 loc->replaced = 1;
30282 loc->dw_loc_next = *start;
30283 }
30284 }
30285 if (!*start)
30286 {
30287 remove_AT (die, a->dw_attr);
30288 ix--;
30289 }
30290 break;
30291 case dw_val_class_view_list:
30292 {
30293 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30294 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30295 dw_val_node *llnode
30296 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30297 /* If we no longer have a loclist, or it no longer needs
30298 views, drop this attribute. */
30299 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30300 {
30301 remove_AT (die, a->dw_attr);
30302 ix--;
30303 }
30304 break;
30305 }
30306 case dw_val_class_loc:
30307 {
30308 dw_loc_descr_ref l = AT_loc (a);
30309 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30310 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30311 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30312 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30313 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30314 with DW_FORM_ref referencing the same DIE as
30315 DW_OP_GNU_variable_value used to reference. */
30316 if (a->dw_attr == DW_AT_string_length
30317 && l
30318 && l->dw_loc_opc == DW_OP_GNU_variable_value
30319 && (l->dw_loc_next == NULL
30320 || (l->dw_loc_next->dw_loc_next == NULL
30321 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30322 {
30323 switch (optimize_string_length (a))
30324 {
30325 case -1:
30326 remove_AT (die, a->dw_attr);
30327 ix--;
30328 /* If we drop DW_AT_string_length, we need to drop also
30329 DW_AT_{string_length_,}byte_size. */
30330 remove_AT_byte_size = true;
30331 continue;
30332 default:
30333 break;
30334 case 1:
30335 /* Even if we keep the optimized DW_AT_string_length,
30336 it might have changed AT_class, so process it again. */
30337 ix--;
30338 continue;
30339 }
30340 }
30341 /* For -gdwarf-2 don't attempt to optimize
30342 DW_AT_data_member_location containing
30343 DW_OP_plus_uconst - older consumers might
30344 rely on it being that op instead of a more complex,
30345 but shorter, location description. */
30346 if ((dwarf_version > 2
30347 || a->dw_attr != DW_AT_data_member_location
30348 || l == NULL
30349 || l->dw_loc_opc != DW_OP_plus_uconst
30350 || l->dw_loc_next != NULL)
30351 && !resolve_addr_in_expr (a, l))
30352 {
30353 if (dwarf_split_debug_info)
30354 remove_loc_list_addr_table_entries (l);
30355 if (l != NULL
30356 && l->dw_loc_next == NULL
30357 && l->dw_loc_opc == DW_OP_addr
30358 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30359 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30360 && a->dw_attr == DW_AT_location)
30361 {
30362 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30363 remove_AT (die, a->dw_attr);
30364 ix--;
30365 optimize_location_into_implicit_ptr (die, decl);
30366 break;
30367 }
30368 if (a->dw_attr == DW_AT_string_length)
30369 /* If we drop DW_AT_string_length, we need to drop also
30370 DW_AT_{string_length_,}byte_size. */
30371 remove_AT_byte_size = true;
30372 remove_AT (die, a->dw_attr);
30373 ix--;
30374 }
30375 else
30376 mark_base_types (l);
30377 }
30378 break;
30379 case dw_val_class_addr:
30380 if (a->dw_attr == DW_AT_const_value
30381 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30382 {
30383 if (AT_index (a) != NOT_INDEXED)
30384 remove_addr_table_entry (a->dw_attr_val.val_entry);
30385 remove_AT (die, a->dw_attr);
30386 ix--;
30387 }
30388 if ((die->die_tag == DW_TAG_call_site
30389 && a->dw_attr == DW_AT_call_origin)
30390 || (die->die_tag == DW_TAG_GNU_call_site
30391 && a->dw_attr == DW_AT_abstract_origin))
30392 {
30393 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30394 dw_die_ref tdie = lookup_decl_die (tdecl);
30395 dw_die_ref cdie;
30396 if (tdie == NULL
30397 && DECL_EXTERNAL (tdecl)
30398 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30399 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30400 {
30401 dw_die_ref pdie = cdie;
30402 /* Make sure we don't add these DIEs into type units.
30403 We could emit skeleton DIEs for context (namespaces,
30404 outer structs/classes) and a skeleton DIE for the
30405 innermost context with DW_AT_signature pointing to the
30406 type unit. See PR78835. */
30407 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30408 pdie = pdie->die_parent;
30409 if (pdie == NULL)
30410 {
30411 /* Creating a full DIE for tdecl is overly expensive and
30412 at this point even wrong when in the LTO phase
30413 as it can end up generating new type DIEs we didn't
30414 output and thus optimize_external_refs will crash. */
30415 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30416 add_AT_flag (tdie, DW_AT_external, 1);
30417 add_AT_flag (tdie, DW_AT_declaration, 1);
30418 add_linkage_attr (tdie, tdecl);
30419 add_name_and_src_coords_attributes (tdie, tdecl, true);
30420 equate_decl_number_to_die (tdecl, tdie);
30421 }
30422 }
30423 if (tdie)
30424 {
30425 a->dw_attr_val.val_class = dw_val_class_die_ref;
30426 a->dw_attr_val.v.val_die_ref.die = tdie;
30427 a->dw_attr_val.v.val_die_ref.external = 0;
30428 }
30429 else
30430 {
30431 if (AT_index (a) != NOT_INDEXED)
30432 remove_addr_table_entry (a->dw_attr_val.val_entry);
30433 remove_AT (die, a->dw_attr);
30434 ix--;
30435 }
30436 }
30437 break;
30438 default:
30439 break;
30440 }
30441
30442 if (remove_AT_byte_size)
30443 remove_AT (die, dwarf_version >= 5
30444 ? DW_AT_string_length_byte_size
30445 : DW_AT_byte_size);
30446
30447 FOR_EACH_CHILD (die, c, resolve_addr (c));
30448 }
30449 \f
30450 /* Helper routines for optimize_location_lists.
30451 This pass tries to share identical local lists in .debug_loc
30452 section. */
30453
30454 /* Iteratively hash operands of LOC opcode into HSTATE. */
30455
30456 static void
30457 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30458 {
30459 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30460 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30461
30462 switch (loc->dw_loc_opc)
30463 {
30464 case DW_OP_const4u:
30465 case DW_OP_const8u:
30466 if (loc->dtprel)
30467 goto hash_addr;
30468 /* FALLTHRU */
30469 case DW_OP_const1u:
30470 case DW_OP_const1s:
30471 case DW_OP_const2u:
30472 case DW_OP_const2s:
30473 case DW_OP_const4s:
30474 case DW_OP_const8s:
30475 case DW_OP_constu:
30476 case DW_OP_consts:
30477 case DW_OP_pick:
30478 case DW_OP_plus_uconst:
30479 case DW_OP_breg0:
30480 case DW_OP_breg1:
30481 case DW_OP_breg2:
30482 case DW_OP_breg3:
30483 case DW_OP_breg4:
30484 case DW_OP_breg5:
30485 case DW_OP_breg6:
30486 case DW_OP_breg7:
30487 case DW_OP_breg8:
30488 case DW_OP_breg9:
30489 case DW_OP_breg10:
30490 case DW_OP_breg11:
30491 case DW_OP_breg12:
30492 case DW_OP_breg13:
30493 case DW_OP_breg14:
30494 case DW_OP_breg15:
30495 case DW_OP_breg16:
30496 case DW_OP_breg17:
30497 case DW_OP_breg18:
30498 case DW_OP_breg19:
30499 case DW_OP_breg20:
30500 case DW_OP_breg21:
30501 case DW_OP_breg22:
30502 case DW_OP_breg23:
30503 case DW_OP_breg24:
30504 case DW_OP_breg25:
30505 case DW_OP_breg26:
30506 case DW_OP_breg27:
30507 case DW_OP_breg28:
30508 case DW_OP_breg29:
30509 case DW_OP_breg30:
30510 case DW_OP_breg31:
30511 case DW_OP_regx:
30512 case DW_OP_fbreg:
30513 case DW_OP_piece:
30514 case DW_OP_deref_size:
30515 case DW_OP_xderef_size:
30516 hstate.add_object (val1->v.val_int);
30517 break;
30518 case DW_OP_skip:
30519 case DW_OP_bra:
30520 {
30521 int offset;
30522
30523 gcc_assert (val1->val_class == dw_val_class_loc);
30524 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30525 hstate.add_object (offset);
30526 }
30527 break;
30528 case DW_OP_implicit_value:
30529 hstate.add_object (val1->v.val_unsigned);
30530 switch (val2->val_class)
30531 {
30532 case dw_val_class_const:
30533 hstate.add_object (val2->v.val_int);
30534 break;
30535 case dw_val_class_vec:
30536 {
30537 unsigned int elt_size = val2->v.val_vec.elt_size;
30538 unsigned int len = val2->v.val_vec.length;
30539
30540 hstate.add_int (elt_size);
30541 hstate.add_int (len);
30542 hstate.add (val2->v.val_vec.array, len * elt_size);
30543 }
30544 break;
30545 case dw_val_class_const_double:
30546 hstate.add_object (val2->v.val_double.low);
30547 hstate.add_object (val2->v.val_double.high);
30548 break;
30549 case dw_val_class_wide_int:
30550 hstate.add (val2->v.val_wide->get_val (),
30551 get_full_len (*val2->v.val_wide)
30552 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30553 break;
30554 case dw_val_class_addr:
30555 inchash::add_rtx (val2->v.val_addr, hstate);
30556 break;
30557 default:
30558 gcc_unreachable ();
30559 }
30560 break;
30561 case DW_OP_bregx:
30562 case DW_OP_bit_piece:
30563 hstate.add_object (val1->v.val_int);
30564 hstate.add_object (val2->v.val_int);
30565 break;
30566 case DW_OP_addr:
30567 hash_addr:
30568 if (loc->dtprel)
30569 {
30570 unsigned char dtprel = 0xd1;
30571 hstate.add_object (dtprel);
30572 }
30573 inchash::add_rtx (val1->v.val_addr, hstate);
30574 break;
30575 case DW_OP_GNU_addr_index:
30576 case DW_OP_addrx:
30577 case DW_OP_GNU_const_index:
30578 case DW_OP_constx:
30579 {
30580 if (loc->dtprel)
30581 {
30582 unsigned char dtprel = 0xd1;
30583 hstate.add_object (dtprel);
30584 }
30585 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30586 }
30587 break;
30588 case DW_OP_implicit_pointer:
30589 case DW_OP_GNU_implicit_pointer:
30590 hstate.add_int (val2->v.val_int);
30591 break;
30592 case DW_OP_entry_value:
30593 case DW_OP_GNU_entry_value:
30594 hstate.add_object (val1->v.val_loc);
30595 break;
30596 case DW_OP_regval_type:
30597 case DW_OP_deref_type:
30598 case DW_OP_GNU_regval_type:
30599 case DW_OP_GNU_deref_type:
30600 {
30601 unsigned int byte_size
30602 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30603 unsigned int encoding
30604 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30605 hstate.add_object (val1->v.val_int);
30606 hstate.add_object (byte_size);
30607 hstate.add_object (encoding);
30608 }
30609 break;
30610 case DW_OP_convert:
30611 case DW_OP_reinterpret:
30612 case DW_OP_GNU_convert:
30613 case DW_OP_GNU_reinterpret:
30614 if (val1->val_class == dw_val_class_unsigned_const)
30615 {
30616 hstate.add_object (val1->v.val_unsigned);
30617 break;
30618 }
30619 /* FALLTHRU */
30620 case DW_OP_const_type:
30621 case DW_OP_GNU_const_type:
30622 {
30623 unsigned int byte_size
30624 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30625 unsigned int encoding
30626 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30627 hstate.add_object (byte_size);
30628 hstate.add_object (encoding);
30629 if (loc->dw_loc_opc != DW_OP_const_type
30630 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30631 break;
30632 hstate.add_object (val2->val_class);
30633 switch (val2->val_class)
30634 {
30635 case dw_val_class_const:
30636 hstate.add_object (val2->v.val_int);
30637 break;
30638 case dw_val_class_vec:
30639 {
30640 unsigned int elt_size = val2->v.val_vec.elt_size;
30641 unsigned int len = val2->v.val_vec.length;
30642
30643 hstate.add_object (elt_size);
30644 hstate.add_object (len);
30645 hstate.add (val2->v.val_vec.array, len * elt_size);
30646 }
30647 break;
30648 case dw_val_class_const_double:
30649 hstate.add_object (val2->v.val_double.low);
30650 hstate.add_object (val2->v.val_double.high);
30651 break;
30652 case dw_val_class_wide_int:
30653 hstate.add (val2->v.val_wide->get_val (),
30654 get_full_len (*val2->v.val_wide)
30655 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30656 break;
30657 default:
30658 gcc_unreachable ();
30659 }
30660 }
30661 break;
30662
30663 default:
30664 /* Other codes have no operands. */
30665 break;
30666 }
30667 }
30668
30669 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30670
30671 static inline void
30672 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30673 {
30674 dw_loc_descr_ref l;
30675 bool sizes_computed = false;
30676 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30677 size_of_locs (loc);
30678
30679 for (l = loc; l != NULL; l = l->dw_loc_next)
30680 {
30681 enum dwarf_location_atom opc = l->dw_loc_opc;
30682 hstate.add_object (opc);
30683 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30684 {
30685 size_of_locs (loc);
30686 sizes_computed = true;
30687 }
30688 hash_loc_operands (l, hstate);
30689 }
30690 }
30691
30692 /* Compute hash of the whole location list LIST_HEAD. */
30693
30694 static inline void
30695 hash_loc_list (dw_loc_list_ref list_head)
30696 {
30697 dw_loc_list_ref curr = list_head;
30698 inchash::hash hstate;
30699
30700 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30701 {
30702 hstate.add (curr->begin, strlen (curr->begin) + 1);
30703 hstate.add (curr->end, strlen (curr->end) + 1);
30704 hstate.add_object (curr->vbegin);
30705 hstate.add_object (curr->vend);
30706 if (curr->section)
30707 hstate.add (curr->section, strlen (curr->section) + 1);
30708 hash_locs (curr->expr, hstate);
30709 }
30710 list_head->hash = hstate.end ();
30711 }
30712
30713 /* Return true if X and Y opcodes have the same operands. */
30714
30715 static inline bool
30716 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30717 {
30718 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30719 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30720 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30721 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30722
30723 switch (x->dw_loc_opc)
30724 {
30725 case DW_OP_const4u:
30726 case DW_OP_const8u:
30727 if (x->dtprel)
30728 goto hash_addr;
30729 /* FALLTHRU */
30730 case DW_OP_const1u:
30731 case DW_OP_const1s:
30732 case DW_OP_const2u:
30733 case DW_OP_const2s:
30734 case DW_OP_const4s:
30735 case DW_OP_const8s:
30736 case DW_OP_constu:
30737 case DW_OP_consts:
30738 case DW_OP_pick:
30739 case DW_OP_plus_uconst:
30740 case DW_OP_breg0:
30741 case DW_OP_breg1:
30742 case DW_OP_breg2:
30743 case DW_OP_breg3:
30744 case DW_OP_breg4:
30745 case DW_OP_breg5:
30746 case DW_OP_breg6:
30747 case DW_OP_breg7:
30748 case DW_OP_breg8:
30749 case DW_OP_breg9:
30750 case DW_OP_breg10:
30751 case DW_OP_breg11:
30752 case DW_OP_breg12:
30753 case DW_OP_breg13:
30754 case DW_OP_breg14:
30755 case DW_OP_breg15:
30756 case DW_OP_breg16:
30757 case DW_OP_breg17:
30758 case DW_OP_breg18:
30759 case DW_OP_breg19:
30760 case DW_OP_breg20:
30761 case DW_OP_breg21:
30762 case DW_OP_breg22:
30763 case DW_OP_breg23:
30764 case DW_OP_breg24:
30765 case DW_OP_breg25:
30766 case DW_OP_breg26:
30767 case DW_OP_breg27:
30768 case DW_OP_breg28:
30769 case DW_OP_breg29:
30770 case DW_OP_breg30:
30771 case DW_OP_breg31:
30772 case DW_OP_regx:
30773 case DW_OP_fbreg:
30774 case DW_OP_piece:
30775 case DW_OP_deref_size:
30776 case DW_OP_xderef_size:
30777 return valx1->v.val_int == valy1->v.val_int;
30778 case DW_OP_skip:
30779 case DW_OP_bra:
30780 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30781 can cause irrelevant differences in dw_loc_addr. */
30782 gcc_assert (valx1->val_class == dw_val_class_loc
30783 && valy1->val_class == dw_val_class_loc
30784 && (dwarf_split_debug_info
30785 || x->dw_loc_addr == y->dw_loc_addr));
30786 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30787 case DW_OP_implicit_value:
30788 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30789 || valx2->val_class != valy2->val_class)
30790 return false;
30791 switch (valx2->val_class)
30792 {
30793 case dw_val_class_const:
30794 return valx2->v.val_int == valy2->v.val_int;
30795 case dw_val_class_vec:
30796 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30797 && valx2->v.val_vec.length == valy2->v.val_vec.length
30798 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30799 valx2->v.val_vec.elt_size
30800 * valx2->v.val_vec.length) == 0;
30801 case dw_val_class_const_double:
30802 return valx2->v.val_double.low == valy2->v.val_double.low
30803 && valx2->v.val_double.high == valy2->v.val_double.high;
30804 case dw_val_class_wide_int:
30805 return *valx2->v.val_wide == *valy2->v.val_wide;
30806 case dw_val_class_addr:
30807 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30808 default:
30809 gcc_unreachable ();
30810 }
30811 case DW_OP_bregx:
30812 case DW_OP_bit_piece:
30813 return valx1->v.val_int == valy1->v.val_int
30814 && valx2->v.val_int == valy2->v.val_int;
30815 case DW_OP_addr:
30816 hash_addr:
30817 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30818 case DW_OP_GNU_addr_index:
30819 case DW_OP_addrx:
30820 case DW_OP_GNU_const_index:
30821 case DW_OP_constx:
30822 {
30823 rtx ax1 = valx1->val_entry->addr.rtl;
30824 rtx ay1 = valy1->val_entry->addr.rtl;
30825 return rtx_equal_p (ax1, ay1);
30826 }
30827 case DW_OP_implicit_pointer:
30828 case DW_OP_GNU_implicit_pointer:
30829 return valx1->val_class == dw_val_class_die_ref
30830 && valx1->val_class == valy1->val_class
30831 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30832 && valx2->v.val_int == valy2->v.val_int;
30833 case DW_OP_entry_value:
30834 case DW_OP_GNU_entry_value:
30835 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30836 case DW_OP_const_type:
30837 case DW_OP_GNU_const_type:
30838 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30839 || valx2->val_class != valy2->val_class)
30840 return false;
30841 switch (valx2->val_class)
30842 {
30843 case dw_val_class_const:
30844 return valx2->v.val_int == valy2->v.val_int;
30845 case dw_val_class_vec:
30846 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30847 && valx2->v.val_vec.length == valy2->v.val_vec.length
30848 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30849 valx2->v.val_vec.elt_size
30850 * valx2->v.val_vec.length) == 0;
30851 case dw_val_class_const_double:
30852 return valx2->v.val_double.low == valy2->v.val_double.low
30853 && valx2->v.val_double.high == valy2->v.val_double.high;
30854 case dw_val_class_wide_int:
30855 return *valx2->v.val_wide == *valy2->v.val_wide;
30856 default:
30857 gcc_unreachable ();
30858 }
30859 case DW_OP_regval_type:
30860 case DW_OP_deref_type:
30861 case DW_OP_GNU_regval_type:
30862 case DW_OP_GNU_deref_type:
30863 return valx1->v.val_int == valy1->v.val_int
30864 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30865 case DW_OP_convert:
30866 case DW_OP_reinterpret:
30867 case DW_OP_GNU_convert:
30868 case DW_OP_GNU_reinterpret:
30869 if (valx1->val_class != valy1->val_class)
30870 return false;
30871 if (valx1->val_class == dw_val_class_unsigned_const)
30872 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30873 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30874 case DW_OP_GNU_parameter_ref:
30875 return valx1->val_class == dw_val_class_die_ref
30876 && valx1->val_class == valy1->val_class
30877 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30878 default:
30879 /* Other codes have no operands. */
30880 return true;
30881 }
30882 }
30883
30884 /* Return true if DWARF location expressions X and Y are the same. */
30885
30886 static inline bool
30887 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30888 {
30889 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30890 if (x->dw_loc_opc != y->dw_loc_opc
30891 || x->dtprel != y->dtprel
30892 || !compare_loc_operands (x, y))
30893 break;
30894 return x == NULL && y == NULL;
30895 }
30896
30897 /* Hashtable helpers. */
30898
30899 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30900 {
30901 static inline hashval_t hash (const dw_loc_list_struct *);
30902 static inline bool equal (const dw_loc_list_struct *,
30903 const dw_loc_list_struct *);
30904 };
30905
30906 /* Return precomputed hash of location list X. */
30907
30908 inline hashval_t
30909 loc_list_hasher::hash (const dw_loc_list_struct *x)
30910 {
30911 return x->hash;
30912 }
30913
30914 /* Return true if location lists A and B are the same. */
30915
30916 inline bool
30917 loc_list_hasher::equal (const dw_loc_list_struct *a,
30918 const dw_loc_list_struct *b)
30919 {
30920 if (a == b)
30921 return 1;
30922 if (a->hash != b->hash)
30923 return 0;
30924 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30925 if (strcmp (a->begin, b->begin) != 0
30926 || strcmp (a->end, b->end) != 0
30927 || (a->section == NULL) != (b->section == NULL)
30928 || (a->section && strcmp (a->section, b->section) != 0)
30929 || a->vbegin != b->vbegin || a->vend != b->vend
30930 || !compare_locs (a->expr, b->expr))
30931 break;
30932 return a == NULL && b == NULL;
30933 }
30934
30935 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30936
30937
30938 /* Recursively optimize location lists referenced from DIE
30939 children and share them whenever possible. */
30940
30941 static void
30942 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30943 {
30944 dw_die_ref c;
30945 dw_attr_node *a;
30946 unsigned ix;
30947 dw_loc_list_struct **slot;
30948 bool drop_locviews = false;
30949 bool has_locviews = false;
30950
30951 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30952 if (AT_class (a) == dw_val_class_loc_list)
30953 {
30954 dw_loc_list_ref list = AT_loc_list (a);
30955 /* TODO: perform some optimizations here, before hashing
30956 it and storing into the hash table. */
30957 hash_loc_list (list);
30958 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30959 if (*slot == NULL)
30960 {
30961 *slot = list;
30962 if (loc_list_has_views (list))
30963 gcc_assert (list->vl_symbol);
30964 else if (list->vl_symbol)
30965 {
30966 drop_locviews = true;
30967 list->vl_symbol = NULL;
30968 }
30969 }
30970 else
30971 {
30972 if (list->vl_symbol && !(*slot)->vl_symbol)
30973 drop_locviews = true;
30974 a->dw_attr_val.v.val_loc_list = *slot;
30975 }
30976 }
30977 else if (AT_class (a) == dw_val_class_view_list)
30978 {
30979 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30980 has_locviews = true;
30981 }
30982
30983
30984 if (drop_locviews && has_locviews)
30985 remove_AT (die, DW_AT_GNU_locviews);
30986
30987 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30988 }
30989
30990
30991 /* Recursively assign each location list a unique index into the debug_addr
30992 section. */
30993
30994 static void
30995 index_location_lists (dw_die_ref die)
30996 {
30997 dw_die_ref c;
30998 dw_attr_node *a;
30999 unsigned ix;
31000
31001 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31002 if (AT_class (a) == dw_val_class_loc_list)
31003 {
31004 dw_loc_list_ref list = AT_loc_list (a);
31005 dw_loc_list_ref curr;
31006 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31007 {
31008 /* Don't index an entry that has already been indexed
31009 or won't be output. Make sure skip_loc_list_entry doesn't
31010 call size_of_locs, because that might cause circular dependency,
31011 index_location_lists requiring address table indexes to be
31012 computed, but adding new indexes through add_addr_table_entry
31013 and address table index computation requiring no new additions
31014 to the hash table. In the rare case of DWARF[234] >= 64KB
31015 location expression, we'll just waste unused address table entry
31016 for it. */
31017 if (curr->begin_entry != NULL
31018 || skip_loc_list_entry (curr))
31019 continue;
31020
31021 curr->begin_entry
31022 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31023 }
31024 }
31025
31026 FOR_EACH_CHILD (die, c, index_location_lists (c));
31027 }
31028
31029 /* Optimize location lists referenced from DIE
31030 children and share them whenever possible. */
31031
31032 static void
31033 optimize_location_lists (dw_die_ref die)
31034 {
31035 loc_list_hash_type htab (500);
31036 optimize_location_lists_1 (die, &htab);
31037 }
31038 \f
31039 /* Traverse the limbo die list, and add parent/child links. The only
31040 dies without parents that should be here are concrete instances of
31041 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31042 For concrete instances, we can get the parent die from the abstract
31043 instance. */
31044
31045 static void
31046 flush_limbo_die_list (void)
31047 {
31048 limbo_die_node *node;
31049
31050 /* get_context_die calls force_decl_die, which can put new DIEs on the
31051 limbo list in LTO mode when nested functions are put in a different
31052 partition than that of their parent function. */
31053 while ((node = limbo_die_list))
31054 {
31055 dw_die_ref die = node->die;
31056 limbo_die_list = node->next;
31057
31058 if (die->die_parent == NULL)
31059 {
31060 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31061
31062 if (origin && origin->die_parent)
31063 add_child_die (origin->die_parent, die);
31064 else if (is_cu_die (die))
31065 ;
31066 else if (seen_error ())
31067 /* It's OK to be confused by errors in the input. */
31068 add_child_die (comp_unit_die (), die);
31069 else
31070 {
31071 /* In certain situations, the lexical block containing a
31072 nested function can be optimized away, which results
31073 in the nested function die being orphaned. Likewise
31074 with the return type of that nested function. Force
31075 this to be a child of the containing function.
31076
31077 It may happen that even the containing function got fully
31078 inlined and optimized out. In that case we are lost and
31079 assign the empty child. This should not be big issue as
31080 the function is likely unreachable too. */
31081 gcc_assert (node->created_for);
31082
31083 if (DECL_P (node->created_for))
31084 origin = get_context_die (DECL_CONTEXT (node->created_for));
31085 else if (TYPE_P (node->created_for))
31086 origin = scope_die_for (node->created_for, comp_unit_die ());
31087 else
31088 origin = comp_unit_die ();
31089
31090 add_child_die (origin, die);
31091 }
31092 }
31093 }
31094 }
31095
31096 /* Reset DIEs so we can output them again. */
31097
31098 static void
31099 reset_dies (dw_die_ref die)
31100 {
31101 dw_die_ref c;
31102
31103 /* Remove stuff we re-generate. */
31104 die->die_mark = 0;
31105 die->die_offset = 0;
31106 die->die_abbrev = 0;
31107 remove_AT (die, DW_AT_sibling);
31108
31109 FOR_EACH_CHILD (die, c, reset_dies (c));
31110 }
31111
31112 /* Output stuff that dwarf requires at the end of every file,
31113 and generate the DWARF-2 debugging info. */
31114
31115 static void
31116 dwarf2out_finish (const char *filename)
31117 {
31118 comdat_type_node *ctnode;
31119 dw_die_ref main_comp_unit_die;
31120 unsigned char checksum[16];
31121 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31122
31123 /* Flush out any latecomers to the limbo party. */
31124 flush_limbo_die_list ();
31125
31126 if (inline_entry_data_table)
31127 gcc_assert (inline_entry_data_table->elements () == 0);
31128
31129 if (flag_checking)
31130 {
31131 verify_die (comp_unit_die ());
31132 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31133 verify_die (node->die);
31134 }
31135
31136 /* We shouldn't have any symbols with delayed asm names for
31137 DIEs generated after early finish. */
31138 gcc_assert (deferred_asm_name == NULL);
31139
31140 gen_remaining_tmpl_value_param_die_attribute ();
31141
31142 if (flag_generate_lto || flag_generate_offload)
31143 {
31144 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31145
31146 /* Prune stuff so that dwarf2out_finish runs successfully
31147 for the fat part of the object. */
31148 reset_dies (comp_unit_die ());
31149 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31150 reset_dies (node->die);
31151
31152 hash_table<comdat_type_hasher> comdat_type_table (100);
31153 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31154 {
31155 comdat_type_node **slot
31156 = comdat_type_table.find_slot (ctnode, INSERT);
31157
31158 /* Don't reset types twice. */
31159 if (*slot != HTAB_EMPTY_ENTRY)
31160 continue;
31161
31162 /* Remove the pointer to the line table. */
31163 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31164
31165 if (debug_info_level >= DINFO_LEVEL_TERSE)
31166 reset_dies (ctnode->root_die);
31167
31168 *slot = ctnode;
31169 }
31170
31171 /* Reset die CU symbol so we don't output it twice. */
31172 comp_unit_die ()->die_id.die_symbol = NULL;
31173
31174 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31175 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31176 if (have_macinfo)
31177 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31178
31179 /* Remove indirect string decisions. */
31180 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31181 if (debug_line_str_hash)
31182 {
31183 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31184 debug_line_str_hash = NULL;
31185 }
31186 }
31187
31188 #if ENABLE_ASSERT_CHECKING
31189 {
31190 dw_die_ref die = comp_unit_die (), c;
31191 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31192 }
31193 #endif
31194 resolve_addr (comp_unit_die ());
31195 move_marked_base_types ();
31196
31197 if (dump_file)
31198 {
31199 fprintf (dump_file, "DWARF for %s\n", filename);
31200 print_die (comp_unit_die (), dump_file);
31201 }
31202
31203 /* Initialize sections and labels used for actual assembler output. */
31204 unsigned generation = init_sections_and_labels (false);
31205
31206 /* Traverse the DIE's and add sibling attributes to those DIE's that
31207 have children. */
31208 add_sibling_attributes (comp_unit_die ());
31209 limbo_die_node *node;
31210 for (node = cu_die_list; node; node = node->next)
31211 add_sibling_attributes (node->die);
31212 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31213 add_sibling_attributes (ctnode->root_die);
31214
31215 /* When splitting DWARF info, we put some attributes in the
31216 skeleton compile_unit DIE that remains in the .o, while
31217 most attributes go in the DWO compile_unit_die. */
31218 if (dwarf_split_debug_info)
31219 {
31220 limbo_die_node *cu;
31221 main_comp_unit_die = gen_compile_unit_die (NULL);
31222 if (dwarf_version >= 5)
31223 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31224 cu = limbo_die_list;
31225 gcc_assert (cu->die == main_comp_unit_die);
31226 limbo_die_list = limbo_die_list->next;
31227 cu->next = cu_die_list;
31228 cu_die_list = cu;
31229 }
31230 else
31231 main_comp_unit_die = comp_unit_die ();
31232
31233 /* Output a terminator label for the .text section. */
31234 switch_to_section (text_section);
31235 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31236 if (cold_text_section)
31237 {
31238 switch_to_section (cold_text_section);
31239 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31240 }
31241
31242 /* We can only use the low/high_pc attributes if all of the code was
31243 in .text. */
31244 if (!have_multiple_function_sections
31245 || (dwarf_version < 3 && dwarf_strict))
31246 {
31247 /* Don't add if the CU has no associated code. */
31248 if (text_section_used)
31249 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31250 text_end_label, true);
31251 }
31252 else
31253 {
31254 unsigned fde_idx;
31255 dw_fde_ref fde;
31256 bool range_list_added = false;
31257
31258 if (text_section_used)
31259 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31260 text_end_label, &range_list_added, true);
31261 if (cold_text_section_used)
31262 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31263 cold_end_label, &range_list_added, true);
31264
31265 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31266 {
31267 if (DECL_IGNORED_P (fde->decl))
31268 continue;
31269 if (!fde->in_std_section)
31270 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31271 fde->dw_fde_end, &range_list_added,
31272 true);
31273 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31274 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31275 fde->dw_fde_second_end, &range_list_added,
31276 true);
31277 }
31278
31279 if (range_list_added)
31280 {
31281 /* We need to give .debug_loc and .debug_ranges an appropriate
31282 "base address". Use zero so that these addresses become
31283 absolute. Historically, we've emitted the unexpected
31284 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31285 Emit both to give time for other tools to adapt. */
31286 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31287 if (! dwarf_strict && dwarf_version < 4)
31288 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31289
31290 add_ranges (NULL);
31291 }
31292 }
31293
31294 /* AIX Assembler inserts the length, so adjust the reference to match the
31295 offset expected by debuggers. */
31296 strcpy (dl_section_ref, debug_line_section_label);
31297 if (XCOFF_DEBUGGING_INFO)
31298 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31299
31300 if (debug_info_level >= DINFO_LEVEL_TERSE)
31301 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31302 dl_section_ref);
31303
31304 if (have_macinfo)
31305 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31306 macinfo_section_label);
31307
31308 if (dwarf_split_debug_info)
31309 {
31310 if (have_location_lists)
31311 {
31312 /* Since we generate the loclists in the split DWARF .dwo
31313 file itself, we don't need to generate a loclists_base
31314 attribute for the split compile unit DIE. That attribute
31315 (and using relocatable sec_offset FORMs) isn't allowed
31316 for a split compile unit. Only if the .debug_loclists
31317 section was in the main file, would we need to generate a
31318 loclists_base attribute here (for the full or skeleton
31319 unit DIE). */
31320
31321 /* optimize_location_lists calculates the size of the lists,
31322 so index them first, and assign indices to the entries.
31323 Although optimize_location_lists will remove entries from
31324 the table, it only does so for duplicates, and therefore
31325 only reduces ref_counts to 1. */
31326 index_location_lists (comp_unit_die ());
31327 }
31328
31329 if (addr_index_table != NULL)
31330 {
31331 unsigned int index = 0;
31332 addr_index_table
31333 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31334 (&index);
31335 }
31336 }
31337
31338 loc_list_idx = 0;
31339 if (have_location_lists)
31340 {
31341 optimize_location_lists (comp_unit_die ());
31342 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31343 if (dwarf_version >= 5 && dwarf_split_debug_info)
31344 assign_location_list_indexes (comp_unit_die ());
31345 }
31346
31347 save_macinfo_strings ();
31348
31349 if (dwarf_split_debug_info)
31350 {
31351 unsigned int index = 0;
31352
31353 /* Add attributes common to skeleton compile_units and
31354 type_units. Because these attributes include strings, it
31355 must be done before freezing the string table. Top-level
31356 skeleton die attrs are added when the skeleton type unit is
31357 created, so ensure it is created by this point. */
31358 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31359 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31360 }
31361
31362 /* Output all of the compilation units. We put the main one last so that
31363 the offsets are available to output_pubnames. */
31364 for (node = cu_die_list; node; node = node->next)
31365 output_comp_unit (node->die, 0, NULL);
31366
31367 hash_table<comdat_type_hasher> comdat_type_table (100);
31368 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31369 {
31370 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31371
31372 /* Don't output duplicate types. */
31373 if (*slot != HTAB_EMPTY_ENTRY)
31374 continue;
31375
31376 /* Add a pointer to the line table for the main compilation unit
31377 so that the debugger can make sense of DW_AT_decl_file
31378 attributes. */
31379 if (debug_info_level >= DINFO_LEVEL_TERSE)
31380 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31381 (!dwarf_split_debug_info
31382 ? dl_section_ref
31383 : debug_skeleton_line_section_label));
31384
31385 output_comdat_type_unit (ctnode);
31386 *slot = ctnode;
31387 }
31388
31389 if (dwarf_split_debug_info)
31390 {
31391 int mark;
31392 struct md5_ctx ctx;
31393
31394 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31395 index_rnglists ();
31396
31397 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31398 md5_init_ctx (&ctx);
31399 mark = 0;
31400 die_checksum (comp_unit_die (), &ctx, &mark);
31401 unmark_all_dies (comp_unit_die ());
31402 md5_finish_ctx (&ctx, checksum);
31403
31404 if (dwarf_version < 5)
31405 {
31406 /* Use the first 8 bytes of the checksum as the dwo_id,
31407 and add it to both comp-unit DIEs. */
31408 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31409 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31410 }
31411
31412 /* Add the base offset of the ranges table to the skeleton
31413 comp-unit DIE. */
31414 if (!vec_safe_is_empty (ranges_table))
31415 {
31416 if (dwarf_version >= 5)
31417 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31418 ranges_base_label);
31419 else
31420 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31421 ranges_section_label);
31422 }
31423
31424 switch_to_section (debug_addr_section);
31425 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31426 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31427 before DWARF5, didn't have a header for .debug_addr units.
31428 DWARF5 specifies a small header when address tables are used. */
31429 if (dwarf_version >= 5)
31430 {
31431 unsigned int last_idx = 0;
31432 unsigned long addrs_length;
31433
31434 addr_index_table->traverse_noresize
31435 <unsigned int *, count_index_addrs> (&last_idx);
31436 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31437
31438 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31439 dw2_asm_output_data (4, 0xffffffff,
31440 "Escape value for 64-bit DWARF extension");
31441 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31442 "Length of Address Unit");
31443 dw2_asm_output_data (2, 5, "DWARF addr version");
31444 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31445 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31446 }
31447 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31448 output_addr_table ();
31449 }
31450
31451 /* Output the main compilation unit if non-empty or if .debug_macinfo
31452 or .debug_macro will be emitted. */
31453 output_comp_unit (comp_unit_die (), have_macinfo,
31454 dwarf_split_debug_info ? checksum : NULL);
31455
31456 if (dwarf_split_debug_info && info_section_emitted)
31457 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31458
31459 /* Output the abbreviation table. */
31460 if (vec_safe_length (abbrev_die_table) != 1)
31461 {
31462 switch_to_section (debug_abbrev_section);
31463 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31464 output_abbrev_section ();
31465 }
31466
31467 /* Output location list section if necessary. */
31468 if (have_location_lists)
31469 {
31470 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31471 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31472 /* Output the location lists info. */
31473 switch_to_section (debug_loc_section);
31474 if (dwarf_version >= 5)
31475 {
31476 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31477 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31478 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31479 dw2_asm_output_data (4, 0xffffffff,
31480 "Initial length escape value indicating "
31481 "64-bit DWARF extension");
31482 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31483 "Length of Location Lists");
31484 ASM_OUTPUT_LABEL (asm_out_file, l1);
31485 output_dwarf_version ();
31486 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31487 dw2_asm_output_data (1, 0, "Segment Size");
31488 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31489 "Offset Entry Count");
31490 }
31491 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31492 if (dwarf_version >= 5 && dwarf_split_debug_info)
31493 {
31494 unsigned int save_loc_list_idx = loc_list_idx;
31495 loc_list_idx = 0;
31496 output_loclists_offsets (comp_unit_die ());
31497 gcc_assert (save_loc_list_idx == loc_list_idx);
31498 }
31499 output_location_lists (comp_unit_die ());
31500 if (dwarf_version >= 5)
31501 ASM_OUTPUT_LABEL (asm_out_file, l2);
31502 }
31503
31504 output_pubtables ();
31505
31506 /* Output the address range information if a CU (.debug_info section)
31507 was emitted. We output an empty table even if we had no functions
31508 to put in it. This because the consumer has no way to tell the
31509 difference between an empty table that we omitted and failure to
31510 generate a table that would have contained data. */
31511 if (info_section_emitted)
31512 {
31513 switch_to_section (debug_aranges_section);
31514 output_aranges ();
31515 }
31516
31517 /* Output ranges section if necessary. */
31518 if (!vec_safe_is_empty (ranges_table))
31519 {
31520 if (dwarf_version >= 5)
31521 output_rnglists (generation);
31522 else
31523 output_ranges ();
31524 }
31525
31526 /* Have to end the macro section. */
31527 if (have_macinfo)
31528 {
31529 switch_to_section (debug_macinfo_section);
31530 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31531 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31532 : debug_skeleton_line_section_label, false);
31533 dw2_asm_output_data (1, 0, "End compilation unit");
31534 }
31535
31536 /* Output the source line correspondence table. We must do this
31537 even if there is no line information. Otherwise, on an empty
31538 translation unit, we will generate a present, but empty,
31539 .debug_info section. IRIX 6.5 `nm' will then complain when
31540 examining the file. This is done late so that any filenames
31541 used by the debug_info section are marked as 'used'. */
31542 switch_to_section (debug_line_section);
31543 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31544 if (! output_asm_line_debug_info ())
31545 output_line_info (false);
31546
31547 if (dwarf_split_debug_info && info_section_emitted)
31548 {
31549 switch_to_section (debug_skeleton_line_section);
31550 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31551 output_line_info (true);
31552 }
31553
31554 /* If we emitted any indirect strings, output the string table too. */
31555 if (debug_str_hash || skeleton_debug_str_hash)
31556 output_indirect_strings ();
31557 if (debug_line_str_hash)
31558 {
31559 switch_to_section (debug_line_str_section);
31560 const enum dwarf_form form = DW_FORM_line_strp;
31561 debug_line_str_hash->traverse<enum dwarf_form,
31562 output_indirect_string> (form);
31563 }
31564
31565 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31566 symview_upper_bound = 0;
31567 if (zero_view_p)
31568 bitmap_clear (zero_view_p);
31569 }
31570
31571 /* Returns a hash value for X (which really is a variable_value_struct). */
31572
31573 inline hashval_t
31574 variable_value_hasher::hash (variable_value_struct *x)
31575 {
31576 return (hashval_t) x->decl_id;
31577 }
31578
31579 /* Return nonzero if decl_id of variable_value_struct X is the same as
31580 UID of decl Y. */
31581
31582 inline bool
31583 variable_value_hasher::equal (variable_value_struct *x, tree y)
31584 {
31585 return x->decl_id == DECL_UID (y);
31586 }
31587
31588 /* Helper function for resolve_variable_value, handle
31589 DW_OP_GNU_variable_value in one location expression.
31590 Return true if exprloc has been changed into loclist. */
31591
31592 static bool
31593 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31594 {
31595 dw_loc_descr_ref next;
31596 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31597 {
31598 next = loc->dw_loc_next;
31599 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31600 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31601 continue;
31602
31603 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31604 if (DECL_CONTEXT (decl) != current_function_decl)
31605 continue;
31606
31607 dw_die_ref ref = lookup_decl_die (decl);
31608 if (ref)
31609 {
31610 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31611 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31612 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31613 continue;
31614 }
31615 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31616 if (l == NULL)
31617 continue;
31618 if (l->dw_loc_next)
31619 {
31620 if (AT_class (a) != dw_val_class_loc)
31621 continue;
31622 switch (a->dw_attr)
31623 {
31624 /* Following attributes allow both exprloc and loclist
31625 classes, so we can change them into a loclist. */
31626 case DW_AT_location:
31627 case DW_AT_string_length:
31628 case DW_AT_return_addr:
31629 case DW_AT_data_member_location:
31630 case DW_AT_frame_base:
31631 case DW_AT_segment:
31632 case DW_AT_static_link:
31633 case DW_AT_use_location:
31634 case DW_AT_vtable_elem_location:
31635 if (prev)
31636 {
31637 prev->dw_loc_next = NULL;
31638 prepend_loc_descr_to_each (l, AT_loc (a));
31639 }
31640 if (next)
31641 add_loc_descr_to_each (l, next);
31642 a->dw_attr_val.val_class = dw_val_class_loc_list;
31643 a->dw_attr_val.val_entry = NULL;
31644 a->dw_attr_val.v.val_loc_list = l;
31645 have_location_lists = true;
31646 return true;
31647 /* Following attributes allow both exprloc and reference,
31648 so if the whole expression is DW_OP_GNU_variable_value alone
31649 we could transform it into reference. */
31650 case DW_AT_byte_size:
31651 case DW_AT_bit_size:
31652 case DW_AT_lower_bound:
31653 case DW_AT_upper_bound:
31654 case DW_AT_bit_stride:
31655 case DW_AT_count:
31656 case DW_AT_allocated:
31657 case DW_AT_associated:
31658 case DW_AT_byte_stride:
31659 if (prev == NULL && next == NULL)
31660 break;
31661 /* FALLTHRU */
31662 default:
31663 if (dwarf_strict)
31664 continue;
31665 break;
31666 }
31667 /* Create DW_TAG_variable that we can refer to. */
31668 gen_decl_die (decl, NULL_TREE, NULL,
31669 lookup_decl_die (current_function_decl));
31670 ref = lookup_decl_die (decl);
31671 if (ref)
31672 {
31673 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31674 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31675 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31676 }
31677 continue;
31678 }
31679 if (prev)
31680 {
31681 prev->dw_loc_next = l->expr;
31682 add_loc_descr (&prev->dw_loc_next, next);
31683 free_loc_descr (loc, NULL);
31684 next = prev->dw_loc_next;
31685 }
31686 else
31687 {
31688 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31689 add_loc_descr (&loc, next);
31690 next = loc;
31691 }
31692 loc = prev;
31693 }
31694 return false;
31695 }
31696
31697 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31698
31699 static void
31700 resolve_variable_value (dw_die_ref die)
31701 {
31702 dw_attr_node *a;
31703 dw_loc_list_ref loc;
31704 unsigned ix;
31705
31706 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31707 switch (AT_class (a))
31708 {
31709 case dw_val_class_loc:
31710 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31711 break;
31712 /* FALLTHRU */
31713 case dw_val_class_loc_list:
31714 loc = AT_loc_list (a);
31715 gcc_assert (loc);
31716 for (; loc; loc = loc->dw_loc_next)
31717 resolve_variable_value_in_expr (a, loc->expr);
31718 break;
31719 default:
31720 break;
31721 }
31722 }
31723
31724 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31725 temporaries in the current function. */
31726
31727 static void
31728 resolve_variable_values (void)
31729 {
31730 if (!variable_value_hash || !current_function_decl)
31731 return;
31732
31733 struct variable_value_struct *node
31734 = variable_value_hash->find_with_hash (current_function_decl,
31735 DECL_UID (current_function_decl));
31736
31737 if (node == NULL)
31738 return;
31739
31740 unsigned int i;
31741 dw_die_ref die;
31742 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31743 resolve_variable_value (die);
31744 }
31745
31746 /* Helper function for note_variable_value, handle one location
31747 expression. */
31748
31749 static void
31750 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31751 {
31752 for (; loc; loc = loc->dw_loc_next)
31753 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31754 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31755 {
31756 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31757 dw_die_ref ref = lookup_decl_die (decl);
31758 if (! ref && (flag_generate_lto || flag_generate_offload))
31759 {
31760 /* ??? This is somewhat a hack because we do not create DIEs
31761 for variables not in BLOCK trees early but when generating
31762 early LTO output we need the dw_val_class_decl_ref to be
31763 fully resolved. For fat LTO objects we'd also like to
31764 undo this after LTO dwarf output. */
31765 gcc_assert (DECL_CONTEXT (decl));
31766 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31767 gcc_assert (ctx != NULL);
31768 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31769 ref = lookup_decl_die (decl);
31770 gcc_assert (ref != NULL);
31771 }
31772 if (ref)
31773 {
31774 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31775 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31776 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31777 continue;
31778 }
31779 if (VAR_P (decl)
31780 && DECL_CONTEXT (decl)
31781 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31782 && lookup_decl_die (DECL_CONTEXT (decl)))
31783 {
31784 if (!variable_value_hash)
31785 variable_value_hash
31786 = hash_table<variable_value_hasher>::create_ggc (10);
31787
31788 tree fndecl = DECL_CONTEXT (decl);
31789 struct variable_value_struct *node;
31790 struct variable_value_struct **slot
31791 = variable_value_hash->find_slot_with_hash (fndecl,
31792 DECL_UID (fndecl),
31793 INSERT);
31794 if (*slot == NULL)
31795 {
31796 node = ggc_cleared_alloc<variable_value_struct> ();
31797 node->decl_id = DECL_UID (fndecl);
31798 *slot = node;
31799 }
31800 else
31801 node = *slot;
31802
31803 vec_safe_push (node->dies, die);
31804 }
31805 }
31806 }
31807
31808 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31809 with dw_val_class_decl_ref operand. */
31810
31811 static void
31812 note_variable_value (dw_die_ref die)
31813 {
31814 dw_die_ref c;
31815 dw_attr_node *a;
31816 dw_loc_list_ref loc;
31817 unsigned ix;
31818
31819 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31820 switch (AT_class (a))
31821 {
31822 case dw_val_class_loc_list:
31823 loc = AT_loc_list (a);
31824 gcc_assert (loc);
31825 if (!loc->noted_variable_value)
31826 {
31827 loc->noted_variable_value = 1;
31828 for (; loc; loc = loc->dw_loc_next)
31829 note_variable_value_in_expr (die, loc->expr);
31830 }
31831 break;
31832 case dw_val_class_loc:
31833 note_variable_value_in_expr (die, AT_loc (a));
31834 break;
31835 default:
31836 break;
31837 }
31838
31839 /* Mark children. */
31840 FOR_EACH_CHILD (die, c, note_variable_value (c));
31841 }
31842
31843 /* Perform any cleanups needed after the early debug generation pass
31844 has run. */
31845
31846 static void
31847 dwarf2out_early_finish (const char *filename)
31848 {
31849 set_early_dwarf s;
31850 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31851
31852 /* PCH might result in DW_AT_producer string being restored from the
31853 header compilation, so always fill it with empty string initially
31854 and overwrite only here. */
31855 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31856 producer_string = gen_producer_string ();
31857 producer->dw_attr_val.v.val_str->refcount--;
31858 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31859
31860 /* Add the name for the main input file now. We delayed this from
31861 dwarf2out_init to avoid complications with PCH. */
31862 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31863 add_comp_dir_attribute (comp_unit_die ());
31864
31865 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31866 DW_AT_comp_dir into .debug_line_str section. */
31867 if (!output_asm_line_debug_info ()
31868 && dwarf_version >= 5
31869 && DWARF5_USE_DEBUG_LINE_STR)
31870 {
31871 for (int i = 0; i < 2; i++)
31872 {
31873 dw_attr_node *a = get_AT (comp_unit_die (),
31874 i ? DW_AT_comp_dir : DW_AT_name);
31875 if (a == NULL
31876 || AT_class (a) != dw_val_class_str
31877 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31878 continue;
31879
31880 if (! debug_line_str_hash)
31881 debug_line_str_hash
31882 = hash_table<indirect_string_hasher>::create_ggc (10);
31883
31884 struct indirect_string_node *node
31885 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31886 set_indirect_string (node);
31887 node->form = DW_FORM_line_strp;
31888 a->dw_attr_val.v.val_str->refcount--;
31889 a->dw_attr_val.v.val_str = node;
31890 }
31891 }
31892
31893 /* With LTO early dwarf was really finished at compile-time, so make
31894 sure to adjust the phase after annotating the LTRANS CU DIE. */
31895 if (in_lto_p)
31896 {
31897 early_dwarf_finished = true;
31898 if (dump_file)
31899 {
31900 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31901 print_die (comp_unit_die (), dump_file);
31902 }
31903 return;
31904 }
31905
31906 /* Walk through the list of incomplete types again, trying once more to
31907 emit full debugging info for them. */
31908 retry_incomplete_types ();
31909
31910 /* The point here is to flush out the limbo list so that it is empty
31911 and we don't need to stream it for LTO. */
31912 flush_limbo_die_list ();
31913
31914 gen_scheduled_generic_parms_dies ();
31915 gen_remaining_tmpl_value_param_die_attribute ();
31916
31917 /* Add DW_AT_linkage_name for all deferred DIEs. */
31918 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31919 {
31920 tree decl = node->created_for;
31921 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31922 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31923 ended up in deferred_asm_name before we knew it was
31924 constant and never written to disk. */
31925 && DECL_ASSEMBLER_NAME (decl))
31926 {
31927 add_linkage_attr (node->die, decl);
31928 move_linkage_attr (node->die);
31929 }
31930 }
31931 deferred_asm_name = NULL;
31932
31933 if (flag_eliminate_unused_debug_types)
31934 prune_unused_types ();
31935
31936 /* Generate separate COMDAT sections for type DIEs. */
31937 if (use_debug_types)
31938 {
31939 break_out_comdat_types (comp_unit_die ());
31940
31941 /* Each new type_unit DIE was added to the limbo die list when created.
31942 Since these have all been added to comdat_type_list, clear the
31943 limbo die list. */
31944 limbo_die_list = NULL;
31945
31946 /* For each new comdat type unit, copy declarations for incomplete
31947 types to make the new unit self-contained (i.e., no direct
31948 references to the main compile unit). */
31949 for (comdat_type_node *ctnode = comdat_type_list;
31950 ctnode != NULL; ctnode = ctnode->next)
31951 copy_decls_for_unworthy_types (ctnode->root_die);
31952 copy_decls_for_unworthy_types (comp_unit_die ());
31953
31954 /* In the process of copying declarations from one unit to another,
31955 we may have left some declarations behind that are no longer
31956 referenced. Prune them. */
31957 prune_unused_types ();
31958 }
31959
31960 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31961 with dw_val_class_decl_ref operand. */
31962 note_variable_value (comp_unit_die ());
31963 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31964 note_variable_value (node->die);
31965 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31966 ctnode = ctnode->next)
31967 note_variable_value (ctnode->root_die);
31968 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31969 note_variable_value (node->die);
31970
31971 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31972 both the main_cu and all skeleton TUs. Making this call unconditional
31973 would end up either adding a second copy of the AT_pubnames attribute, or
31974 requiring a special case in add_top_level_skeleton_die_attrs. */
31975 if (!dwarf_split_debug_info)
31976 add_AT_pubnames (comp_unit_die ());
31977
31978 /* The early debug phase is now finished. */
31979 early_dwarf_finished = true;
31980 if (dump_file)
31981 {
31982 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
31983 print_die (comp_unit_die (), dump_file);
31984 }
31985
31986 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31987 if ((!flag_generate_lto && !flag_generate_offload)
31988 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31989 copy_lto_debug_sections operation of the simple object support in
31990 libiberty is not implemented for them yet. */
31991 || TARGET_PECOFF || TARGET_COFF)
31992 return;
31993
31994 /* Now as we are going to output for LTO initialize sections and labels
31995 to the LTO variants. We don't need a random-seed postfix as other
31996 LTO sections as linking the LTO debug sections into one in a partial
31997 link is fine. */
31998 init_sections_and_labels (true);
31999
32000 /* The output below is modeled after dwarf2out_finish with all
32001 location related output removed and some LTO specific changes.
32002 Some refactoring might make both smaller and easier to match up. */
32003
32004 /* Traverse the DIE's and add add sibling attributes to those DIE's
32005 that have children. */
32006 add_sibling_attributes (comp_unit_die ());
32007 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32008 add_sibling_attributes (node->die);
32009 for (comdat_type_node *ctnode = comdat_type_list;
32010 ctnode != NULL; ctnode = ctnode->next)
32011 add_sibling_attributes (ctnode->root_die);
32012
32013 /* AIX Assembler inserts the length, so adjust the reference to match the
32014 offset expected by debuggers. */
32015 strcpy (dl_section_ref, debug_line_section_label);
32016 if (XCOFF_DEBUGGING_INFO)
32017 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32018
32019 if (debug_info_level >= DINFO_LEVEL_TERSE)
32020 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32021
32022 if (have_macinfo)
32023 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32024 macinfo_section_label);
32025
32026 save_macinfo_strings ();
32027
32028 if (dwarf_split_debug_info)
32029 {
32030 unsigned int index = 0;
32031 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32032 }
32033
32034 /* Output all of the compilation units. We put the main one last so that
32035 the offsets are available to output_pubnames. */
32036 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32037 output_comp_unit (node->die, 0, NULL);
32038
32039 hash_table<comdat_type_hasher> comdat_type_table (100);
32040 for (comdat_type_node *ctnode = comdat_type_list;
32041 ctnode != NULL; ctnode = ctnode->next)
32042 {
32043 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32044
32045 /* Don't output duplicate types. */
32046 if (*slot != HTAB_EMPTY_ENTRY)
32047 continue;
32048
32049 /* Add a pointer to the line table for the main compilation unit
32050 so that the debugger can make sense of DW_AT_decl_file
32051 attributes. */
32052 if (debug_info_level >= DINFO_LEVEL_TERSE)
32053 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32054 (!dwarf_split_debug_info
32055 ? debug_line_section_label
32056 : debug_skeleton_line_section_label));
32057
32058 output_comdat_type_unit (ctnode);
32059 *slot = ctnode;
32060 }
32061
32062 /* Stick a unique symbol to the main debuginfo section. */
32063 compute_comp_unit_symbol (comp_unit_die ());
32064
32065 /* Output the main compilation unit. We always need it if only for
32066 the CU symbol. */
32067 output_comp_unit (comp_unit_die (), true, NULL);
32068
32069 /* Output the abbreviation table. */
32070 if (vec_safe_length (abbrev_die_table) != 1)
32071 {
32072 switch_to_section (debug_abbrev_section);
32073 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32074 output_abbrev_section ();
32075 }
32076
32077 /* Have to end the macro section. */
32078 if (have_macinfo)
32079 {
32080 /* We have to save macinfo state if we need to output it again
32081 for the FAT part of the object. */
32082 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32083 if (flag_fat_lto_objects)
32084 macinfo_table = macinfo_table->copy ();
32085
32086 switch_to_section (debug_macinfo_section);
32087 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32088 output_macinfo (debug_line_section_label, true);
32089 dw2_asm_output_data (1, 0, "End compilation unit");
32090
32091 if (flag_fat_lto_objects)
32092 {
32093 vec_free (macinfo_table);
32094 macinfo_table = saved_macinfo_table;
32095 }
32096 }
32097
32098 /* Emit a skeleton debug_line section. */
32099 switch_to_section (debug_line_section);
32100 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32101 output_line_info (true);
32102
32103 /* If we emitted any indirect strings, output the string table too. */
32104 if (debug_str_hash || skeleton_debug_str_hash)
32105 output_indirect_strings ();
32106 if (debug_line_str_hash)
32107 {
32108 switch_to_section (debug_line_str_section);
32109 const enum dwarf_form form = DW_FORM_line_strp;
32110 debug_line_str_hash->traverse<enum dwarf_form,
32111 output_indirect_string> (form);
32112 }
32113
32114 /* Switch back to the text section. */
32115 switch_to_section (text_section);
32116 }
32117
32118 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32119 within the same process. For use by toplev::finalize. */
32120
32121 void
32122 dwarf2out_c_finalize (void)
32123 {
32124 last_var_location_insn = NULL;
32125 cached_next_real_insn = NULL;
32126 used_rtx_array = NULL;
32127 incomplete_types = NULL;
32128 debug_info_section = NULL;
32129 debug_skeleton_info_section = NULL;
32130 debug_abbrev_section = NULL;
32131 debug_skeleton_abbrev_section = NULL;
32132 debug_aranges_section = NULL;
32133 debug_addr_section = NULL;
32134 debug_macinfo_section = NULL;
32135 debug_line_section = NULL;
32136 debug_skeleton_line_section = NULL;
32137 debug_loc_section = NULL;
32138 debug_pubnames_section = NULL;
32139 debug_pubtypes_section = NULL;
32140 debug_str_section = NULL;
32141 debug_line_str_section = NULL;
32142 debug_str_dwo_section = NULL;
32143 debug_str_offsets_section = NULL;
32144 debug_ranges_section = NULL;
32145 debug_frame_section = NULL;
32146 fde_vec = NULL;
32147 debug_str_hash = NULL;
32148 debug_line_str_hash = NULL;
32149 skeleton_debug_str_hash = NULL;
32150 dw2_string_counter = 0;
32151 have_multiple_function_sections = false;
32152 text_section_used = false;
32153 cold_text_section_used = false;
32154 cold_text_section = NULL;
32155 current_unit_personality = NULL;
32156
32157 early_dwarf = false;
32158 early_dwarf_finished = false;
32159
32160 next_die_offset = 0;
32161 single_comp_unit_die = NULL;
32162 comdat_type_list = NULL;
32163 limbo_die_list = NULL;
32164 file_table = NULL;
32165 decl_die_table = NULL;
32166 common_block_die_table = NULL;
32167 decl_loc_table = NULL;
32168 call_arg_locations = NULL;
32169 call_arg_loc_last = NULL;
32170 call_site_count = -1;
32171 tail_call_site_count = -1;
32172 cached_dw_loc_list_table = NULL;
32173 abbrev_die_table = NULL;
32174 delete dwarf_proc_stack_usage_map;
32175 dwarf_proc_stack_usage_map = NULL;
32176 line_info_label_num = 0;
32177 cur_line_info_table = NULL;
32178 text_section_line_info = NULL;
32179 cold_text_section_line_info = NULL;
32180 separate_line_info = NULL;
32181 info_section_emitted = false;
32182 pubname_table = NULL;
32183 pubtype_table = NULL;
32184 macinfo_table = NULL;
32185 ranges_table = NULL;
32186 ranges_by_label = NULL;
32187 rnglist_idx = 0;
32188 have_location_lists = false;
32189 loclabel_num = 0;
32190 poc_label_num = 0;
32191 last_emitted_file = NULL;
32192 label_num = 0;
32193 tmpl_value_parm_die_table = NULL;
32194 generic_type_instances = NULL;
32195 frame_pointer_fb_offset = 0;
32196 frame_pointer_fb_offset_valid = false;
32197 base_types.release ();
32198 XDELETEVEC (producer_string);
32199 producer_string = NULL;
32200 }
32201
32202 #include "gt-dwarf2out.h"