]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
Move MEMMODEL_* from coretypes.h to memmodel.h
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2016 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "debug.h"
87 #include "common/common-target.h"
88 #include "langhooks.h"
89 #include "lra.h"
90 #include "dumpfile.h"
91 #include "opts.h"
92 #include "tree-dfa.h"
93 #include "gdb/gdb-index.h"
94 #include "rtl-iter.h"
95
96 static void dwarf2out_source_line (unsigned int, const char *, int, bool);
97 static rtx_insn *last_var_location_insn;
98 static rtx_insn *cached_next_real_insn;
99 static void dwarf2out_decl (tree);
100
101 #ifndef XCOFF_DEBUGGING_INFO
102 #define XCOFF_DEBUGGING_INFO 0
103 #endif
104
105 #ifndef HAVE_XCOFF_DWARF_EXTRAS
106 #define HAVE_XCOFF_DWARF_EXTRAS 0
107 #endif
108
109 #ifdef VMS_DEBUGGING_INFO
110 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
111
112 /* Define this macro to be a nonzero value if the directory specifications
113 which are output in the debug info should end with a separator. */
114 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
115 /* Define this macro to evaluate to a nonzero value if GCC should refrain
116 from generating indirect strings in DWARF2 debug information, for instance
117 if your target is stuck with an old version of GDB that is unable to
118 process them properly or uses VMS Debug. */
119 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
120 #else
121 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
122 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
123 #endif
124
125 /* ??? Poison these here until it can be done generically. They've been
126 totally replaced in this file; make sure it stays that way. */
127 #undef DWARF2_UNWIND_INFO
128 #undef DWARF2_FRAME_INFO
129 #if (GCC_VERSION >= 3000)
130 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
131 #endif
132
133 /* The size of the target's pointer type. */
134 #ifndef PTR_SIZE
135 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
136 #endif
137
138 /* Array of RTXes referenced by the debugging information, which therefore
139 must be kept around forever. */
140 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
141
142 /* A pointer to the base of a list of incomplete types which might be
143 completed at some later time. incomplete_types_list needs to be a
144 vec<tree, va_gc> *because we want to tell the garbage collector about
145 it. */
146 static GTY(()) vec<tree, va_gc> *incomplete_types;
147
148 /* A pointer to the base of a table of references to declaration
149 scopes. This table is a display which tracks the nesting
150 of declaration scopes at the current scope and containing
151 scopes. This table is used to find the proper place to
152 define type declaration DIE's. */
153 static GTY(()) vec<tree, va_gc> *decl_scope_table;
154
155 /* Pointers to various DWARF2 sections. */
156 static GTY(()) section *debug_info_section;
157 static GTY(()) section *debug_skeleton_info_section;
158 static GTY(()) section *debug_abbrev_section;
159 static GTY(()) section *debug_skeleton_abbrev_section;
160 static GTY(()) section *debug_aranges_section;
161 static GTY(()) section *debug_addr_section;
162 static GTY(()) section *debug_macinfo_section;
163 static const char *debug_macinfo_section_name;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_str_dwo_section;
171 static GTY(()) section *debug_str_offsets_section;
172 static GTY(()) section *debug_ranges_section;
173 static GTY(()) section *debug_frame_section;
174
175 /* Maximum size (in bytes) of an artificially generated label. */
176 #define MAX_ARTIFICIAL_LABEL_BYTES 30
177
178 /* According to the (draft) DWARF 3 specification, the initial length
179 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
180 bytes are 0xffffffff, followed by the length stored in the next 8
181 bytes.
182
183 However, the SGI/MIPS ABI uses an initial length which is equal to
184 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
185
186 #ifndef DWARF_INITIAL_LENGTH_SIZE
187 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
188 #endif
189
190 /* Round SIZE up to the nearest BOUNDARY. */
191 #define DWARF_ROUND(SIZE,BOUNDARY) \
192 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
193
194 /* CIE identifier. */
195 #if HOST_BITS_PER_WIDE_INT >= 64
196 #define DWARF_CIE_ID \
197 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
198 #else
199 #define DWARF_CIE_ID DW_CIE_ID
200 #endif
201
202
203 /* A vector for a table that contains frame description
204 information for each routine. */
205 #define NOT_INDEXED (-1U)
206 #define NO_INDEX_ASSIGNED (-2U)
207
208 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
209
210 struct GTY((for_user)) indirect_string_node {
211 const char *str;
212 unsigned int refcount;
213 enum dwarf_form form;
214 char *label;
215 unsigned int index;
216 };
217
218 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
219 {
220 typedef const char *compare_type;
221
222 static hashval_t hash (indirect_string_node *);
223 static bool equal (indirect_string_node *, const char *);
224 };
225
226 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
227
228 /* With split_debug_info, both the comp_dir and dwo_name go in the
229 main object file, rather than the dwo, similar to the force_direct
230 parameter elsewhere but with additional complications:
231
232 1) The string is needed in both the main object file and the dwo.
233 That is, the comp_dir and dwo_name will appear in both places.
234
235 2) Strings can use three forms: DW_FORM_string, DW_FORM_strp or
236 DW_FORM_GNU_str_index.
237
238 3) GCC chooses the form to use late, depending on the size and
239 reference count.
240
241 Rather than forcing the all debug string handling functions and
242 callers to deal with these complications, simply use a separate,
243 special-cased string table for any attribute that should go in the
244 main object file. This limits the complexity to just the places
245 that need it. */
246
247 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
248
249 static GTY(()) int dw2_string_counter;
250
251 /* True if the compilation unit places functions in more than one section. */
252 static GTY(()) bool have_multiple_function_sections = false;
253
254 /* Whether the default text and cold text sections have been used at all. */
255
256 static GTY(()) bool text_section_used = false;
257 static GTY(()) bool cold_text_section_used = false;
258
259 /* The default cold text section. */
260 static GTY(()) section *cold_text_section;
261
262 /* The DIE for C++14 'auto' in a function return type. */
263 static GTY(()) dw_die_ref auto_die;
264
265 /* The DIE for C++14 'decltype(auto)' in a function return type. */
266 static GTY(()) dw_die_ref decltype_auto_die;
267
268 /* Forward declarations for functions defined in this file. */
269
270 static void output_call_frame_info (int);
271 static void dwarf2out_note_section_used (void);
272
273 /* Personality decl of current unit. Used only when assembler does not support
274 personality CFI. */
275 static GTY(()) rtx current_unit_personality;
276
277 /* Data and reference forms for relocatable data. */
278 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
279 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
280
281 #ifndef DEBUG_FRAME_SECTION
282 #define DEBUG_FRAME_SECTION ".debug_frame"
283 #endif
284
285 #ifndef FUNC_BEGIN_LABEL
286 #define FUNC_BEGIN_LABEL "LFB"
287 #endif
288
289 #ifndef FUNC_END_LABEL
290 #define FUNC_END_LABEL "LFE"
291 #endif
292
293 #ifndef PROLOGUE_END_LABEL
294 #define PROLOGUE_END_LABEL "LPE"
295 #endif
296
297 #ifndef EPILOGUE_BEGIN_LABEL
298 #define EPILOGUE_BEGIN_LABEL "LEB"
299 #endif
300
301 #ifndef FRAME_BEGIN_LABEL
302 #define FRAME_BEGIN_LABEL "Lframe"
303 #endif
304 #define CIE_AFTER_SIZE_LABEL "LSCIE"
305 #define CIE_END_LABEL "LECIE"
306 #define FDE_LABEL "LSFDE"
307 #define FDE_AFTER_SIZE_LABEL "LASFDE"
308 #define FDE_END_LABEL "LEFDE"
309 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
310 #define LINE_NUMBER_END_LABEL "LELT"
311 #define LN_PROLOG_AS_LABEL "LASLTP"
312 #define LN_PROLOG_END_LABEL "LELTP"
313 #define DIE_LABEL_PREFIX "DW"
314 \f
315 /* Match the base name of a file to the base name of a compilation unit. */
316
317 static int
318 matches_main_base (const char *path)
319 {
320 /* Cache the last query. */
321 static const char *last_path = NULL;
322 static int last_match = 0;
323 if (path != last_path)
324 {
325 const char *base;
326 int length = base_of_path (path, &base);
327 last_path = path;
328 last_match = (length == main_input_baselength
329 && memcmp (base, main_input_basename, length) == 0);
330 }
331 return last_match;
332 }
333
334 #ifdef DEBUG_DEBUG_STRUCT
335
336 static int
337 dump_struct_debug (tree type, enum debug_info_usage usage,
338 enum debug_struct_file criterion, int generic,
339 int matches, int result)
340 {
341 /* Find the type name. */
342 tree type_decl = TYPE_STUB_DECL (type);
343 tree t = type_decl;
344 const char *name = 0;
345 if (TREE_CODE (t) == TYPE_DECL)
346 t = DECL_NAME (t);
347 if (t)
348 name = IDENTIFIER_POINTER (t);
349
350 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
351 criterion,
352 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
353 matches ? "bas" : "hdr",
354 generic ? "gen" : "ord",
355 usage == DINFO_USAGE_DFN ? ";" :
356 usage == DINFO_USAGE_DIR_USE ? "." : "*",
357 result,
358 (void*) type_decl, name);
359 return result;
360 }
361 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
362 dump_struct_debug (type, usage, criterion, generic, matches, result)
363
364 #else
365
366 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
367 (result)
368
369 #endif
370
371 /* Get the number of HOST_WIDE_INTs needed to represent the precision
372 of the number. Some constants have a large uniform precision, so
373 we get the precision needed for the actual value of the number. */
374
375 static unsigned int
376 get_full_len (const wide_int &op)
377 {
378 int prec = wi::min_precision (op, UNSIGNED);
379 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
380 / HOST_BITS_PER_WIDE_INT);
381 }
382
383 static bool
384 should_emit_struct_debug (tree type, enum debug_info_usage usage)
385 {
386 enum debug_struct_file criterion;
387 tree type_decl;
388 bool generic = lang_hooks.types.generic_p (type);
389
390 if (generic)
391 criterion = debug_struct_generic[usage];
392 else
393 criterion = debug_struct_ordinary[usage];
394
395 if (criterion == DINFO_STRUCT_FILE_NONE)
396 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
397 if (criterion == DINFO_STRUCT_FILE_ANY)
398 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
399
400 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
401
402 if (type_decl != NULL)
403 {
404 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
405 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
406
407 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
408 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
409 }
410
411 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
412 }
413 \f
414 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
415 switch to the data section instead, and write out a synthetic start label
416 for collect2 the first time around. */
417
418 static void
419 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
420 {
421 if (eh_frame_section == 0)
422 {
423 int flags;
424
425 if (EH_TABLES_CAN_BE_READ_ONLY)
426 {
427 int fde_encoding;
428 int per_encoding;
429 int lsda_encoding;
430
431 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
432 /*global=*/0);
433 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
434 /*global=*/1);
435 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
436 /*global=*/0);
437 flags = ((! flag_pic
438 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
439 && (fde_encoding & 0x70) != DW_EH_PE_aligned
440 && (per_encoding & 0x70) != DW_EH_PE_absptr
441 && (per_encoding & 0x70) != DW_EH_PE_aligned
442 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
443 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
444 ? 0 : SECTION_WRITE);
445 }
446 else
447 flags = SECTION_WRITE;
448
449 #ifdef EH_FRAME_SECTION_NAME
450 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
451 #else
452 eh_frame_section = ((flags == SECTION_WRITE)
453 ? data_section : readonly_data_section);
454 #endif /* EH_FRAME_SECTION_NAME */
455 }
456
457 switch_to_section (eh_frame_section);
458
459 #ifdef EH_FRAME_THROUGH_COLLECT2
460 /* We have no special eh_frame section. Emit special labels to guide
461 collect2. */
462 if (!back)
463 {
464 tree label = get_file_function_name ("F");
465 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
466 targetm.asm_out.globalize_label (asm_out_file,
467 IDENTIFIER_POINTER (label));
468 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
469 }
470 #endif
471 }
472
473 /* Switch [BACK] to the eh or debug frame table section, depending on
474 FOR_EH. */
475
476 static void
477 switch_to_frame_table_section (int for_eh, bool back)
478 {
479 if (for_eh)
480 switch_to_eh_frame_section (back);
481 else
482 {
483 if (!debug_frame_section)
484 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
485 SECTION_DEBUG, NULL);
486 switch_to_section (debug_frame_section);
487 }
488 }
489
490 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
491
492 enum dw_cfi_oprnd_type
493 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
494 {
495 switch (cfi)
496 {
497 case DW_CFA_nop:
498 case DW_CFA_GNU_window_save:
499 case DW_CFA_remember_state:
500 case DW_CFA_restore_state:
501 return dw_cfi_oprnd_unused;
502
503 case DW_CFA_set_loc:
504 case DW_CFA_advance_loc1:
505 case DW_CFA_advance_loc2:
506 case DW_CFA_advance_loc4:
507 case DW_CFA_MIPS_advance_loc8:
508 return dw_cfi_oprnd_addr;
509
510 case DW_CFA_offset:
511 case DW_CFA_offset_extended:
512 case DW_CFA_def_cfa:
513 case DW_CFA_offset_extended_sf:
514 case DW_CFA_def_cfa_sf:
515 case DW_CFA_restore:
516 case DW_CFA_restore_extended:
517 case DW_CFA_undefined:
518 case DW_CFA_same_value:
519 case DW_CFA_def_cfa_register:
520 case DW_CFA_register:
521 case DW_CFA_expression:
522 return dw_cfi_oprnd_reg_num;
523
524 case DW_CFA_def_cfa_offset:
525 case DW_CFA_GNU_args_size:
526 case DW_CFA_def_cfa_offset_sf:
527 return dw_cfi_oprnd_offset;
528
529 case DW_CFA_def_cfa_expression:
530 return dw_cfi_oprnd_loc;
531
532 default:
533 gcc_unreachable ();
534 }
535 }
536
537 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
538
539 enum dw_cfi_oprnd_type
540 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
541 {
542 switch (cfi)
543 {
544 case DW_CFA_def_cfa:
545 case DW_CFA_def_cfa_sf:
546 case DW_CFA_offset:
547 case DW_CFA_offset_extended_sf:
548 case DW_CFA_offset_extended:
549 return dw_cfi_oprnd_offset;
550
551 case DW_CFA_register:
552 return dw_cfi_oprnd_reg_num;
553
554 case DW_CFA_expression:
555 return dw_cfi_oprnd_loc;
556
557 default:
558 return dw_cfi_oprnd_unused;
559 }
560 }
561
562 /* Output one FDE. */
563
564 static void
565 output_fde (dw_fde_ref fde, bool for_eh, bool second,
566 char *section_start_label, int fde_encoding, char *augmentation,
567 bool any_lsda_needed, int lsda_encoding)
568 {
569 const char *begin, *end;
570 static unsigned int j;
571 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
572
573 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
574 /* empty */ 0);
575 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
576 for_eh + j);
577 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
578 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
579 if (!XCOFF_DEBUGGING_INFO || for_eh)
580 {
581 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
582 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
583 " indicating 64-bit DWARF extension");
584 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
585 "FDE Length");
586 }
587 ASM_OUTPUT_LABEL (asm_out_file, l1);
588
589 if (for_eh)
590 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
591 else
592 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
593 debug_frame_section, "FDE CIE offset");
594
595 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
596 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
597
598 if (for_eh)
599 {
600 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
601 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
602 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
603 "FDE initial location");
604 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
605 end, begin, "FDE address range");
606 }
607 else
608 {
609 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
610 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
611 }
612
613 if (augmentation[0])
614 {
615 if (any_lsda_needed)
616 {
617 int size = size_of_encoded_value (lsda_encoding);
618
619 if (lsda_encoding == DW_EH_PE_aligned)
620 {
621 int offset = ( 4 /* Length */
622 + 4 /* CIE offset */
623 + 2 * size_of_encoded_value (fde_encoding)
624 + 1 /* Augmentation size */ );
625 int pad = -offset & (PTR_SIZE - 1);
626
627 size += pad;
628 gcc_assert (size_of_uleb128 (size) == 1);
629 }
630
631 dw2_asm_output_data_uleb128 (size, "Augmentation size");
632
633 if (fde->uses_eh_lsda)
634 {
635 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
636 fde->funcdef_number);
637 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
638 gen_rtx_SYMBOL_REF (Pmode, l1),
639 false,
640 "Language Specific Data Area");
641 }
642 else
643 {
644 if (lsda_encoding == DW_EH_PE_aligned)
645 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
646 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
647 "Language Specific Data Area (none)");
648 }
649 }
650 else
651 dw2_asm_output_data_uleb128 (0, "Augmentation size");
652 }
653
654 /* Loop through the Call Frame Instructions associated with this FDE. */
655 fde->dw_fde_current_label = begin;
656 {
657 size_t from, until, i;
658
659 from = 0;
660 until = vec_safe_length (fde->dw_fde_cfi);
661
662 if (fde->dw_fde_second_begin == NULL)
663 ;
664 else if (!second)
665 until = fde->dw_fde_switch_cfi_index;
666 else
667 from = fde->dw_fde_switch_cfi_index;
668
669 for (i = from; i < until; i++)
670 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
671 }
672
673 /* If we are to emit a ref/link from function bodies to their frame tables,
674 do it now. This is typically performed to make sure that tables
675 associated with functions are dragged with them and not discarded in
676 garbage collecting links. We need to do this on a per function basis to
677 cope with -ffunction-sections. */
678
679 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
680 /* Switch to the function section, emit the ref to the tables, and
681 switch *back* into the table section. */
682 switch_to_section (function_section (fde->decl));
683 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
684 switch_to_frame_table_section (for_eh, true);
685 #endif
686
687 /* Pad the FDE out to an address sized boundary. */
688 ASM_OUTPUT_ALIGN (asm_out_file,
689 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
690 ASM_OUTPUT_LABEL (asm_out_file, l2);
691
692 j += 2;
693 }
694
695 /* Return true if frame description entry FDE is needed for EH. */
696
697 static bool
698 fde_needed_for_eh_p (dw_fde_ref fde)
699 {
700 if (flag_asynchronous_unwind_tables)
701 return true;
702
703 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
704 return true;
705
706 if (fde->uses_eh_lsda)
707 return true;
708
709 /* If exceptions are enabled, we have collected nothrow info. */
710 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
711 return false;
712
713 return true;
714 }
715
716 /* Output the call frame information used to record information
717 that relates to calculating the frame pointer, and records the
718 location of saved registers. */
719
720 static void
721 output_call_frame_info (int for_eh)
722 {
723 unsigned int i;
724 dw_fde_ref fde;
725 dw_cfi_ref cfi;
726 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
727 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
728 bool any_lsda_needed = false;
729 char augmentation[6];
730 int augmentation_size;
731 int fde_encoding = DW_EH_PE_absptr;
732 int per_encoding = DW_EH_PE_absptr;
733 int lsda_encoding = DW_EH_PE_absptr;
734 int return_reg;
735 rtx personality = NULL;
736 int dw_cie_version;
737
738 /* Don't emit a CIE if there won't be any FDEs. */
739 if (!fde_vec)
740 return;
741
742 /* Nothing to do if the assembler's doing it all. */
743 if (dwarf2out_do_cfi_asm ())
744 return;
745
746 /* If we don't have any functions we'll want to unwind out of, don't emit
747 any EH unwind information. If we make FDEs linkonce, we may have to
748 emit an empty label for an FDE that wouldn't otherwise be emitted. We
749 want to avoid having an FDE kept around when the function it refers to
750 is discarded. Example where this matters: a primary function template
751 in C++ requires EH information, an explicit specialization doesn't. */
752 if (for_eh)
753 {
754 bool any_eh_needed = false;
755
756 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
757 {
758 if (fde->uses_eh_lsda)
759 any_eh_needed = any_lsda_needed = true;
760 else if (fde_needed_for_eh_p (fde))
761 any_eh_needed = true;
762 else if (TARGET_USES_WEAK_UNWIND_INFO)
763 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
764 }
765
766 if (!any_eh_needed)
767 return;
768 }
769
770 /* We're going to be generating comments, so turn on app. */
771 if (flag_debug_asm)
772 app_enable ();
773
774 /* Switch to the proper frame section, first time. */
775 switch_to_frame_table_section (for_eh, false);
776
777 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
778 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
779
780 /* Output the CIE. */
781 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
782 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
783 if (!XCOFF_DEBUGGING_INFO || for_eh)
784 {
785 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
786 dw2_asm_output_data (4, 0xffffffff,
787 "Initial length escape value indicating 64-bit DWARF extension");
788 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
789 "Length of Common Information Entry");
790 }
791 ASM_OUTPUT_LABEL (asm_out_file, l1);
792
793 /* Now that the CIE pointer is PC-relative for EH,
794 use 0 to identify the CIE. */
795 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
796 (for_eh ? 0 : DWARF_CIE_ID),
797 "CIE Identifier Tag");
798
799 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
800 use CIE version 1, unless that would produce incorrect results
801 due to overflowing the return register column. */
802 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
803 dw_cie_version = 1;
804 if (return_reg >= 256 || dwarf_version > 2)
805 dw_cie_version = 3;
806 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
807
808 augmentation[0] = 0;
809 augmentation_size = 0;
810
811 personality = current_unit_personality;
812 if (for_eh)
813 {
814 char *p;
815
816 /* Augmentation:
817 z Indicates that a uleb128 is present to size the
818 augmentation section.
819 L Indicates the encoding (and thus presence) of
820 an LSDA pointer in the FDE augmentation.
821 R Indicates a non-default pointer encoding for
822 FDE code pointers.
823 P Indicates the presence of an encoding + language
824 personality routine in the CIE augmentation. */
825
826 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
827 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
828 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
829
830 p = augmentation + 1;
831 if (personality)
832 {
833 *p++ = 'P';
834 augmentation_size += 1 + size_of_encoded_value (per_encoding);
835 assemble_external_libcall (personality);
836 }
837 if (any_lsda_needed)
838 {
839 *p++ = 'L';
840 augmentation_size += 1;
841 }
842 if (fde_encoding != DW_EH_PE_absptr)
843 {
844 *p++ = 'R';
845 augmentation_size += 1;
846 }
847 if (p > augmentation + 1)
848 {
849 augmentation[0] = 'z';
850 *p = '\0';
851 }
852
853 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
854 if (personality && per_encoding == DW_EH_PE_aligned)
855 {
856 int offset = ( 4 /* Length */
857 + 4 /* CIE Id */
858 + 1 /* CIE version */
859 + strlen (augmentation) + 1 /* Augmentation */
860 + size_of_uleb128 (1) /* Code alignment */
861 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
862 + 1 /* RA column */
863 + 1 /* Augmentation size */
864 + 1 /* Personality encoding */ );
865 int pad = -offset & (PTR_SIZE - 1);
866
867 augmentation_size += pad;
868
869 /* Augmentations should be small, so there's scarce need to
870 iterate for a solution. Die if we exceed one uleb128 byte. */
871 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
872 }
873 }
874
875 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
876 if (dw_cie_version >= 4)
877 {
878 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
879 dw2_asm_output_data (1, 0, "CIE Segment Size");
880 }
881 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
882 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
883 "CIE Data Alignment Factor");
884
885 if (dw_cie_version == 1)
886 dw2_asm_output_data (1, return_reg, "CIE RA Column");
887 else
888 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
889
890 if (augmentation[0])
891 {
892 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
893 if (personality)
894 {
895 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
896 eh_data_format_name (per_encoding));
897 dw2_asm_output_encoded_addr_rtx (per_encoding,
898 personality,
899 true, NULL);
900 }
901
902 if (any_lsda_needed)
903 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
904 eh_data_format_name (lsda_encoding));
905
906 if (fde_encoding != DW_EH_PE_absptr)
907 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
908 eh_data_format_name (fde_encoding));
909 }
910
911 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
912 output_cfi (cfi, NULL, for_eh);
913
914 /* Pad the CIE out to an address sized boundary. */
915 ASM_OUTPUT_ALIGN (asm_out_file,
916 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
917 ASM_OUTPUT_LABEL (asm_out_file, l2);
918
919 /* Loop through all of the FDE's. */
920 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
921 {
922 unsigned int k;
923
924 /* Don't emit EH unwind info for leaf functions that don't need it. */
925 if (for_eh && !fde_needed_for_eh_p (fde))
926 continue;
927
928 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
929 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
930 augmentation, any_lsda_needed, lsda_encoding);
931 }
932
933 if (for_eh && targetm.terminate_dw2_eh_frame_info)
934 dw2_asm_output_data (4, 0, "End of Table");
935
936 /* Turn off app to make assembly quicker. */
937 if (flag_debug_asm)
938 app_disable ();
939 }
940
941 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
942
943 static void
944 dwarf2out_do_cfi_startproc (bool second)
945 {
946 int enc;
947 rtx ref;
948 rtx personality = get_personality_function (current_function_decl);
949
950 fprintf (asm_out_file, "\t.cfi_startproc\n");
951
952 if (personality)
953 {
954 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
955 ref = personality;
956
957 /* ??? The GAS support isn't entirely consistent. We have to
958 handle indirect support ourselves, but PC-relative is done
959 in the assembler. Further, the assembler can't handle any
960 of the weirder relocation types. */
961 if (enc & DW_EH_PE_indirect)
962 ref = dw2_force_const_mem (ref, true);
963
964 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
965 output_addr_const (asm_out_file, ref);
966 fputc ('\n', asm_out_file);
967 }
968
969 if (crtl->uses_eh_lsda)
970 {
971 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
972
973 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
974 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
975 current_function_funcdef_no);
976 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
977 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
978
979 if (enc & DW_EH_PE_indirect)
980 ref = dw2_force_const_mem (ref, true);
981
982 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
983 output_addr_const (asm_out_file, ref);
984 fputc ('\n', asm_out_file);
985 }
986 }
987
988 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
989 this allocation may be done before pass_final. */
990
991 dw_fde_ref
992 dwarf2out_alloc_current_fde (void)
993 {
994 dw_fde_ref fde;
995
996 fde = ggc_cleared_alloc<dw_fde_node> ();
997 fde->decl = current_function_decl;
998 fde->funcdef_number = current_function_funcdef_no;
999 fde->fde_index = vec_safe_length (fde_vec);
1000 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1001 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1002 fde->nothrow = crtl->nothrow;
1003 fde->drap_reg = INVALID_REGNUM;
1004 fde->vdrap_reg = INVALID_REGNUM;
1005
1006 /* Record the FDE associated with this function. */
1007 cfun->fde = fde;
1008 vec_safe_push (fde_vec, fde);
1009
1010 return fde;
1011 }
1012
1013 /* Output a marker (i.e. a label) for the beginning of a function, before
1014 the prologue. */
1015
1016 void
1017 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1018 const char *file ATTRIBUTE_UNUSED)
1019 {
1020 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1021 char * dup_label;
1022 dw_fde_ref fde;
1023 section *fnsec;
1024 bool do_frame;
1025
1026 current_function_func_begin_label = NULL;
1027
1028 do_frame = dwarf2out_do_frame ();
1029
1030 /* ??? current_function_func_begin_label is also used by except.c for
1031 call-site information. We must emit this label if it might be used. */
1032 if (!do_frame
1033 && (!flag_exceptions
1034 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1035 return;
1036
1037 fnsec = function_section (current_function_decl);
1038 switch_to_section (fnsec);
1039 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1040 current_function_funcdef_no);
1041 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1042 current_function_funcdef_no);
1043 dup_label = xstrdup (label);
1044 current_function_func_begin_label = dup_label;
1045
1046 /* We can elide the fde allocation if we're not emitting debug info. */
1047 if (!do_frame)
1048 return;
1049
1050 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1051 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1052 would include pass_dwarf2_frame. If we've not created the FDE yet,
1053 do so now. */
1054 fde = cfun->fde;
1055 if (fde == NULL)
1056 fde = dwarf2out_alloc_current_fde ();
1057
1058 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1059 fde->dw_fde_begin = dup_label;
1060 fde->dw_fde_current_label = dup_label;
1061 fde->in_std_section = (fnsec == text_section
1062 || (cold_text_section && fnsec == cold_text_section));
1063
1064 /* We only want to output line number information for the genuine dwarf2
1065 prologue case, not the eh frame case. */
1066 #ifdef DWARF2_DEBUGGING_INFO
1067 if (file)
1068 dwarf2out_source_line (line, file, 0, true);
1069 #endif
1070
1071 if (dwarf2out_do_cfi_asm ())
1072 dwarf2out_do_cfi_startproc (false);
1073 else
1074 {
1075 rtx personality = get_personality_function (current_function_decl);
1076 if (!current_unit_personality)
1077 current_unit_personality = personality;
1078
1079 /* We cannot keep a current personality per function as without CFI
1080 asm, at the point where we emit the CFI data, there is no current
1081 function anymore. */
1082 if (personality && current_unit_personality != personality)
1083 sorry ("multiple EH personalities are supported only with assemblers "
1084 "supporting .cfi_personality directive");
1085 }
1086 }
1087
1088 /* Output a marker (i.e. a label) for the end of the generated code
1089 for a function prologue. This gets called *after* the prologue code has
1090 been generated. */
1091
1092 void
1093 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1094 const char *file ATTRIBUTE_UNUSED)
1095 {
1096 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1097
1098 /* Output a label to mark the endpoint of the code generated for this
1099 function. */
1100 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1101 current_function_funcdef_no);
1102 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1103 current_function_funcdef_no);
1104 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1105 }
1106
1107 /* Output a marker (i.e. a label) for the beginning of the generated code
1108 for a function epilogue. This gets called *before* the prologue code has
1109 been generated. */
1110
1111 void
1112 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1113 const char *file ATTRIBUTE_UNUSED)
1114 {
1115 dw_fde_ref fde = cfun->fde;
1116 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1117
1118 if (fde->dw_fde_vms_begin_epilogue)
1119 return;
1120
1121 /* Output a label to mark the endpoint of the code generated for this
1122 function. */
1123 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1124 current_function_funcdef_no);
1125 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1126 current_function_funcdef_no);
1127 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1128 }
1129
1130 /* Output a marker (i.e. a label) for the absolute end of the generated code
1131 for a function definition. This gets called *after* the epilogue code has
1132 been generated. */
1133
1134 void
1135 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1136 const char *file ATTRIBUTE_UNUSED)
1137 {
1138 dw_fde_ref fde;
1139 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1140
1141 last_var_location_insn = NULL;
1142 cached_next_real_insn = NULL;
1143
1144 if (dwarf2out_do_cfi_asm ())
1145 fprintf (asm_out_file, "\t.cfi_endproc\n");
1146
1147 /* Output a label to mark the endpoint of the code generated for this
1148 function. */
1149 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1150 current_function_funcdef_no);
1151 ASM_OUTPUT_LABEL (asm_out_file, label);
1152 fde = cfun->fde;
1153 gcc_assert (fde != NULL);
1154 if (fde->dw_fde_second_begin == NULL)
1155 fde->dw_fde_end = xstrdup (label);
1156 }
1157
1158 void
1159 dwarf2out_frame_finish (void)
1160 {
1161 /* Output call frame information. */
1162 if (targetm.debug_unwind_info () == UI_DWARF2)
1163 output_call_frame_info (0);
1164
1165 /* Output another copy for the unwinder. */
1166 if ((flag_unwind_tables || flag_exceptions)
1167 && targetm_common.except_unwind_info (&global_options) == UI_DWARF2)
1168 output_call_frame_info (1);
1169 }
1170
1171 /* Note that the current function section is being used for code. */
1172
1173 static void
1174 dwarf2out_note_section_used (void)
1175 {
1176 section *sec = current_function_section ();
1177 if (sec == text_section)
1178 text_section_used = true;
1179 else if (sec == cold_text_section)
1180 cold_text_section_used = true;
1181 }
1182
1183 static void var_location_switch_text_section (void);
1184 static void set_cur_line_info_table (section *);
1185
1186 void
1187 dwarf2out_switch_text_section (void)
1188 {
1189 section *sect;
1190 dw_fde_ref fde = cfun->fde;
1191
1192 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1193
1194 if (!in_cold_section_p)
1195 {
1196 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1197 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1198 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1199 }
1200 else
1201 {
1202 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1203 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1204 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1205 }
1206 have_multiple_function_sections = true;
1207
1208 /* There is no need to mark used sections when not debugging. */
1209 if (cold_text_section != NULL)
1210 dwarf2out_note_section_used ();
1211
1212 if (dwarf2out_do_cfi_asm ())
1213 fprintf (asm_out_file, "\t.cfi_endproc\n");
1214
1215 /* Now do the real section switch. */
1216 sect = current_function_section ();
1217 switch_to_section (sect);
1218
1219 fde->second_in_std_section
1220 = (sect == text_section
1221 || (cold_text_section && sect == cold_text_section));
1222
1223 if (dwarf2out_do_cfi_asm ())
1224 dwarf2out_do_cfi_startproc (true);
1225
1226 var_location_switch_text_section ();
1227
1228 if (cold_text_section != NULL)
1229 set_cur_line_info_table (sect);
1230 }
1231 \f
1232 /* And now, the subset of the debugging information support code necessary
1233 for emitting location expressions. */
1234
1235 /* Data about a single source file. */
1236 struct GTY((for_user)) dwarf_file_data {
1237 const char * filename;
1238 int emitted_number;
1239 };
1240
1241 /* Describe an entry into the .debug_addr section. */
1242
1243 enum ate_kind {
1244 ate_kind_rtx,
1245 ate_kind_rtx_dtprel,
1246 ate_kind_label
1247 };
1248
1249 struct GTY((for_user)) addr_table_entry {
1250 enum ate_kind kind;
1251 unsigned int refcount;
1252 unsigned int index;
1253 union addr_table_entry_struct_union
1254 {
1255 rtx GTY ((tag ("0"))) rtl;
1256 char * GTY ((tag ("1"))) label;
1257 }
1258 GTY ((desc ("%1.kind"))) addr;
1259 };
1260
1261 /* Location lists are ranges + location descriptions for that range,
1262 so you can track variables that are in different places over
1263 their entire life. */
1264 typedef struct GTY(()) dw_loc_list_struct {
1265 dw_loc_list_ref dw_loc_next;
1266 const char *begin; /* Label and addr_entry for start of range */
1267 addr_table_entry *begin_entry;
1268 const char *end; /* Label for end of range */
1269 char *ll_symbol; /* Label for beginning of location list.
1270 Only on head of list */
1271 const char *section; /* Section this loclist is relative to */
1272 dw_loc_descr_ref expr;
1273 hashval_t hash;
1274 /* True if all addresses in this and subsequent lists are known to be
1275 resolved. */
1276 bool resolved_addr;
1277 /* True if this list has been replaced by dw_loc_next. */
1278 bool replaced;
1279 bool emitted;
1280 /* True if the range should be emitted even if begin and end
1281 are the same. */
1282 bool force;
1283 } dw_loc_list_node;
1284
1285 static dw_loc_descr_ref int_loc_descriptor (HOST_WIDE_INT);
1286 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1287
1288 /* Convert a DWARF stack opcode into its string name. */
1289
1290 static const char *
1291 dwarf_stack_op_name (unsigned int op)
1292 {
1293 const char *name = get_DW_OP_name (op);
1294
1295 if (name != NULL)
1296 return name;
1297
1298 return "OP_<unknown>";
1299 }
1300
1301 /* Return a pointer to a newly allocated location description. Location
1302 descriptions are simple expression terms that can be strung
1303 together to form more complicated location (address) descriptions. */
1304
1305 static inline dw_loc_descr_ref
1306 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1307 unsigned HOST_WIDE_INT oprnd2)
1308 {
1309 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1310
1311 descr->dw_loc_opc = op;
1312 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1313 descr->dw_loc_oprnd1.val_entry = NULL;
1314 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1315 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1316 descr->dw_loc_oprnd2.val_entry = NULL;
1317 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1318
1319 return descr;
1320 }
1321
1322 /* Return a pointer to a newly allocated location description for
1323 REG and OFFSET. */
1324
1325 static inline dw_loc_descr_ref
1326 new_reg_loc_descr (unsigned int reg, unsigned HOST_WIDE_INT offset)
1327 {
1328 if (reg <= 31)
1329 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1330 offset, 0);
1331 else
1332 return new_loc_descr (DW_OP_bregx, reg, offset);
1333 }
1334
1335 /* Add a location description term to a location description expression. */
1336
1337 static inline void
1338 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1339 {
1340 dw_loc_descr_ref *d;
1341
1342 /* Find the end of the chain. */
1343 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1344 ;
1345
1346 *d = descr;
1347 }
1348
1349 /* Compare two location operands for exact equality. */
1350
1351 static bool
1352 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1353 {
1354 if (a->val_class != b->val_class)
1355 return false;
1356 switch (a->val_class)
1357 {
1358 case dw_val_class_none:
1359 return true;
1360 case dw_val_class_addr:
1361 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1362
1363 case dw_val_class_offset:
1364 case dw_val_class_unsigned_const:
1365 case dw_val_class_const:
1366 case dw_val_class_range_list:
1367 case dw_val_class_lineptr:
1368 case dw_val_class_macptr:
1369 /* These are all HOST_WIDE_INT, signed or unsigned. */
1370 return a->v.val_unsigned == b->v.val_unsigned;
1371
1372 case dw_val_class_loc:
1373 return a->v.val_loc == b->v.val_loc;
1374 case dw_val_class_loc_list:
1375 return a->v.val_loc_list == b->v.val_loc_list;
1376 case dw_val_class_die_ref:
1377 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1378 case dw_val_class_fde_ref:
1379 return a->v.val_fde_index == b->v.val_fde_index;
1380 case dw_val_class_lbl_id:
1381 case dw_val_class_high_pc:
1382 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1383 case dw_val_class_str:
1384 return a->v.val_str == b->v.val_str;
1385 case dw_val_class_flag:
1386 return a->v.val_flag == b->v.val_flag;
1387 case dw_val_class_file:
1388 return a->v.val_file == b->v.val_file;
1389 case dw_val_class_decl_ref:
1390 return a->v.val_decl_ref == b->v.val_decl_ref;
1391
1392 case dw_val_class_const_double:
1393 return (a->v.val_double.high == b->v.val_double.high
1394 && a->v.val_double.low == b->v.val_double.low);
1395
1396 case dw_val_class_wide_int:
1397 return *a->v.val_wide == *b->v.val_wide;
1398
1399 case dw_val_class_vec:
1400 {
1401 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1402 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1403
1404 return (a_len == b_len
1405 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1406 }
1407
1408 case dw_val_class_data8:
1409 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1410
1411 case dw_val_class_vms_delta:
1412 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1413 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1414
1415 case dw_val_class_discr_value:
1416 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1417 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1418 case dw_val_class_discr_list:
1419 /* It makes no sense comparing two discriminant value lists. */
1420 return false;
1421 }
1422 gcc_unreachable ();
1423 }
1424
1425 /* Compare two location atoms for exact equality. */
1426
1427 static bool
1428 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1429 {
1430 if (a->dw_loc_opc != b->dw_loc_opc)
1431 return false;
1432
1433 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1434 address size, but since we always allocate cleared storage it
1435 should be zero for other types of locations. */
1436 if (a->dtprel != b->dtprel)
1437 return false;
1438
1439 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1440 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1441 }
1442
1443 /* Compare two complete location expressions for exact equality. */
1444
1445 bool
1446 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1447 {
1448 while (1)
1449 {
1450 if (a == b)
1451 return true;
1452 if (a == NULL || b == NULL)
1453 return false;
1454 if (!loc_descr_equal_p_1 (a, b))
1455 return false;
1456
1457 a = a->dw_loc_next;
1458 b = b->dw_loc_next;
1459 }
1460 }
1461
1462
1463 /* Add a constant OFFSET to a location expression. */
1464
1465 static void
1466 loc_descr_plus_const (dw_loc_descr_ref *list_head, HOST_WIDE_INT offset)
1467 {
1468 dw_loc_descr_ref loc;
1469 HOST_WIDE_INT *p;
1470
1471 gcc_assert (*list_head != NULL);
1472
1473 if (!offset)
1474 return;
1475
1476 /* Find the end of the chain. */
1477 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1478 ;
1479
1480 p = NULL;
1481 if (loc->dw_loc_opc == DW_OP_fbreg
1482 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1483 p = &loc->dw_loc_oprnd1.v.val_int;
1484 else if (loc->dw_loc_opc == DW_OP_bregx)
1485 p = &loc->dw_loc_oprnd2.v.val_int;
1486
1487 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1488 offset. Don't optimize if an signed integer overflow would happen. */
1489 if (p != NULL
1490 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1491 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1492 *p += offset;
1493
1494 else if (offset > 0)
1495 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1496
1497 else
1498 {
1499 loc->dw_loc_next = int_loc_descriptor (-offset);
1500 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1501 }
1502 }
1503
1504 /* Add a constant OFFSET to a location list. */
1505
1506 static void
1507 loc_list_plus_const (dw_loc_list_ref list_head, HOST_WIDE_INT offset)
1508 {
1509 dw_loc_list_ref d;
1510 for (d = list_head; d != NULL; d = d->dw_loc_next)
1511 loc_descr_plus_const (&d->expr, offset);
1512 }
1513
1514 #define DWARF_REF_SIZE \
1515 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1516
1517 static unsigned long int get_base_type_offset (dw_die_ref);
1518
1519 /* Return the size of a location descriptor. */
1520
1521 static unsigned long
1522 size_of_loc_descr (dw_loc_descr_ref loc)
1523 {
1524 unsigned long size = 1;
1525
1526 switch (loc->dw_loc_opc)
1527 {
1528 case DW_OP_addr:
1529 size += DWARF2_ADDR_SIZE;
1530 break;
1531 case DW_OP_GNU_addr_index:
1532 case DW_OP_GNU_const_index:
1533 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1534 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1535 break;
1536 case DW_OP_const1u:
1537 case DW_OP_const1s:
1538 size += 1;
1539 break;
1540 case DW_OP_const2u:
1541 case DW_OP_const2s:
1542 size += 2;
1543 break;
1544 case DW_OP_const4u:
1545 case DW_OP_const4s:
1546 size += 4;
1547 break;
1548 case DW_OP_const8u:
1549 case DW_OP_const8s:
1550 size += 8;
1551 break;
1552 case DW_OP_constu:
1553 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1554 break;
1555 case DW_OP_consts:
1556 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1557 break;
1558 case DW_OP_pick:
1559 size += 1;
1560 break;
1561 case DW_OP_plus_uconst:
1562 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1563 break;
1564 case DW_OP_skip:
1565 case DW_OP_bra:
1566 size += 2;
1567 break;
1568 case DW_OP_breg0:
1569 case DW_OP_breg1:
1570 case DW_OP_breg2:
1571 case DW_OP_breg3:
1572 case DW_OP_breg4:
1573 case DW_OP_breg5:
1574 case DW_OP_breg6:
1575 case DW_OP_breg7:
1576 case DW_OP_breg8:
1577 case DW_OP_breg9:
1578 case DW_OP_breg10:
1579 case DW_OP_breg11:
1580 case DW_OP_breg12:
1581 case DW_OP_breg13:
1582 case DW_OP_breg14:
1583 case DW_OP_breg15:
1584 case DW_OP_breg16:
1585 case DW_OP_breg17:
1586 case DW_OP_breg18:
1587 case DW_OP_breg19:
1588 case DW_OP_breg20:
1589 case DW_OP_breg21:
1590 case DW_OP_breg22:
1591 case DW_OP_breg23:
1592 case DW_OP_breg24:
1593 case DW_OP_breg25:
1594 case DW_OP_breg26:
1595 case DW_OP_breg27:
1596 case DW_OP_breg28:
1597 case DW_OP_breg29:
1598 case DW_OP_breg30:
1599 case DW_OP_breg31:
1600 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1601 break;
1602 case DW_OP_regx:
1603 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1604 break;
1605 case DW_OP_fbreg:
1606 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1607 break;
1608 case DW_OP_bregx:
1609 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1610 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1611 break;
1612 case DW_OP_piece:
1613 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1614 break;
1615 case DW_OP_bit_piece:
1616 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1617 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1618 break;
1619 case DW_OP_deref_size:
1620 case DW_OP_xderef_size:
1621 size += 1;
1622 break;
1623 case DW_OP_call2:
1624 size += 2;
1625 break;
1626 case DW_OP_call4:
1627 size += 4;
1628 break;
1629 case DW_OP_call_ref:
1630 size += DWARF_REF_SIZE;
1631 break;
1632 case DW_OP_implicit_value:
1633 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1634 + loc->dw_loc_oprnd1.v.val_unsigned;
1635 break;
1636 case DW_OP_GNU_implicit_pointer:
1637 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1638 break;
1639 case DW_OP_GNU_entry_value:
1640 {
1641 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1642 size += size_of_uleb128 (op_size) + op_size;
1643 break;
1644 }
1645 case DW_OP_GNU_const_type:
1646 {
1647 unsigned long o
1648 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1649 size += size_of_uleb128 (o) + 1;
1650 switch (loc->dw_loc_oprnd2.val_class)
1651 {
1652 case dw_val_class_vec:
1653 size += loc->dw_loc_oprnd2.v.val_vec.length
1654 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1655 break;
1656 case dw_val_class_const:
1657 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1658 break;
1659 case dw_val_class_const_double:
1660 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1661 break;
1662 case dw_val_class_wide_int:
1663 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1664 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1665 break;
1666 default:
1667 gcc_unreachable ();
1668 }
1669 break;
1670 }
1671 case DW_OP_GNU_regval_type:
1672 {
1673 unsigned long o
1674 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1675 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1676 + size_of_uleb128 (o);
1677 }
1678 break;
1679 case DW_OP_GNU_deref_type:
1680 {
1681 unsigned long o
1682 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1683 size += 1 + size_of_uleb128 (o);
1684 }
1685 break;
1686 case DW_OP_GNU_convert:
1687 case DW_OP_GNU_reinterpret:
1688 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1689 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1690 else
1691 {
1692 unsigned long o
1693 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1694 size += size_of_uleb128 (o);
1695 }
1696 break;
1697 case DW_OP_GNU_parameter_ref:
1698 size += 4;
1699 break;
1700 default:
1701 break;
1702 }
1703
1704 return size;
1705 }
1706
1707 /* Return the size of a series of location descriptors. */
1708
1709 unsigned long
1710 size_of_locs (dw_loc_descr_ref loc)
1711 {
1712 dw_loc_descr_ref l;
1713 unsigned long size;
1714
1715 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1716 field, to avoid writing to a PCH file. */
1717 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1718 {
1719 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1720 break;
1721 size += size_of_loc_descr (l);
1722 }
1723 if (! l)
1724 return size;
1725
1726 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1727 {
1728 l->dw_loc_addr = size;
1729 size += size_of_loc_descr (l);
1730 }
1731
1732 return size;
1733 }
1734
1735 /* Return the size of the value in a DW_AT_discr_value attribute. */
1736
1737 static int
1738 size_of_discr_value (dw_discr_value *discr_value)
1739 {
1740 if (discr_value->pos)
1741 return size_of_uleb128 (discr_value->v.uval);
1742 else
1743 return size_of_sleb128 (discr_value->v.sval);
1744 }
1745
1746 /* Return the size of the value in a DW_discr_list attribute. */
1747
1748 static int
1749 size_of_discr_list (dw_discr_list_ref discr_list)
1750 {
1751 int size = 0;
1752
1753 for (dw_discr_list_ref list = discr_list;
1754 list != NULL;
1755 list = list->dw_discr_next)
1756 {
1757 /* One byte for the discriminant value descriptor, and then one or two
1758 LEB128 numbers, depending on whether it's a single case label or a
1759 range label. */
1760 size += 1;
1761 size += size_of_discr_value (&list->dw_discr_lower_bound);
1762 if (list->dw_discr_range != 0)
1763 size += size_of_discr_value (&list->dw_discr_upper_bound);
1764 }
1765 return size;
1766 }
1767
1768 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1769 static void get_ref_die_offset_label (char *, dw_die_ref);
1770 static unsigned long int get_ref_die_offset (dw_die_ref);
1771
1772 /* Output location description stack opcode's operands (if any).
1773 The for_eh_or_skip parameter controls whether register numbers are
1774 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1775 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1776 info). This should be suppressed for the cases that have not been converted
1777 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1778
1779 static void
1780 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1781 {
1782 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1783 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1784
1785 switch (loc->dw_loc_opc)
1786 {
1787 #ifdef DWARF2_DEBUGGING_INFO
1788 case DW_OP_const2u:
1789 case DW_OP_const2s:
1790 dw2_asm_output_data (2, val1->v.val_int, NULL);
1791 break;
1792 case DW_OP_const4u:
1793 if (loc->dtprel)
1794 {
1795 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1796 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
1797 val1->v.val_addr);
1798 fputc ('\n', asm_out_file);
1799 break;
1800 }
1801 /* FALLTHRU */
1802 case DW_OP_const4s:
1803 dw2_asm_output_data (4, val1->v.val_int, NULL);
1804 break;
1805 case DW_OP_const8u:
1806 if (loc->dtprel)
1807 {
1808 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1809 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
1810 val1->v.val_addr);
1811 fputc ('\n', asm_out_file);
1812 break;
1813 }
1814 /* FALLTHRU */
1815 case DW_OP_const8s:
1816 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
1817 dw2_asm_output_data (8, val1->v.val_int, NULL);
1818 break;
1819 case DW_OP_skip:
1820 case DW_OP_bra:
1821 {
1822 int offset;
1823
1824 gcc_assert (val1->val_class == dw_val_class_loc);
1825 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
1826
1827 dw2_asm_output_data (2, offset, NULL);
1828 }
1829 break;
1830 case DW_OP_implicit_value:
1831 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1832 switch (val2->val_class)
1833 {
1834 case dw_val_class_const:
1835 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
1836 break;
1837 case dw_val_class_vec:
1838 {
1839 unsigned int elt_size = val2->v.val_vec.elt_size;
1840 unsigned int len = val2->v.val_vec.length;
1841 unsigned int i;
1842 unsigned char *p;
1843
1844 if (elt_size > sizeof (HOST_WIDE_INT))
1845 {
1846 elt_size /= 2;
1847 len *= 2;
1848 }
1849 for (i = 0, p = val2->v.val_vec.array;
1850 i < len;
1851 i++, p += elt_size)
1852 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
1853 "fp or vector constant word %u", i);
1854 }
1855 break;
1856 case dw_val_class_const_double:
1857 {
1858 unsigned HOST_WIDE_INT first, second;
1859
1860 if (WORDS_BIG_ENDIAN)
1861 {
1862 first = val2->v.val_double.high;
1863 second = val2->v.val_double.low;
1864 }
1865 else
1866 {
1867 first = val2->v.val_double.low;
1868 second = val2->v.val_double.high;
1869 }
1870 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1871 first, NULL);
1872 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1873 second, NULL);
1874 }
1875 break;
1876 case dw_val_class_wide_int:
1877 {
1878 int i;
1879 int len = get_full_len (*val2->v.val_wide);
1880 if (WORDS_BIG_ENDIAN)
1881 for (i = len - 1; i >= 0; --i)
1882 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1883 val2->v.val_wide->elt (i), NULL);
1884 else
1885 for (i = 0; i < len; ++i)
1886 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1887 val2->v.val_wide->elt (i), NULL);
1888 }
1889 break;
1890 case dw_val_class_addr:
1891 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
1892 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
1893 break;
1894 default:
1895 gcc_unreachable ();
1896 }
1897 break;
1898 #else
1899 case DW_OP_const2u:
1900 case DW_OP_const2s:
1901 case DW_OP_const4u:
1902 case DW_OP_const4s:
1903 case DW_OP_const8u:
1904 case DW_OP_const8s:
1905 case DW_OP_skip:
1906 case DW_OP_bra:
1907 case DW_OP_implicit_value:
1908 /* We currently don't make any attempt to make sure these are
1909 aligned properly like we do for the main unwind info, so
1910 don't support emitting things larger than a byte if we're
1911 only doing unwinding. */
1912 gcc_unreachable ();
1913 #endif
1914 case DW_OP_const1u:
1915 case DW_OP_const1s:
1916 dw2_asm_output_data (1, val1->v.val_int, NULL);
1917 break;
1918 case DW_OP_constu:
1919 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1920 break;
1921 case DW_OP_consts:
1922 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1923 break;
1924 case DW_OP_pick:
1925 dw2_asm_output_data (1, val1->v.val_int, NULL);
1926 break;
1927 case DW_OP_plus_uconst:
1928 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1929 break;
1930 case DW_OP_breg0:
1931 case DW_OP_breg1:
1932 case DW_OP_breg2:
1933 case DW_OP_breg3:
1934 case DW_OP_breg4:
1935 case DW_OP_breg5:
1936 case DW_OP_breg6:
1937 case DW_OP_breg7:
1938 case DW_OP_breg8:
1939 case DW_OP_breg9:
1940 case DW_OP_breg10:
1941 case DW_OP_breg11:
1942 case DW_OP_breg12:
1943 case DW_OP_breg13:
1944 case DW_OP_breg14:
1945 case DW_OP_breg15:
1946 case DW_OP_breg16:
1947 case DW_OP_breg17:
1948 case DW_OP_breg18:
1949 case DW_OP_breg19:
1950 case DW_OP_breg20:
1951 case DW_OP_breg21:
1952 case DW_OP_breg22:
1953 case DW_OP_breg23:
1954 case DW_OP_breg24:
1955 case DW_OP_breg25:
1956 case DW_OP_breg26:
1957 case DW_OP_breg27:
1958 case DW_OP_breg28:
1959 case DW_OP_breg29:
1960 case DW_OP_breg30:
1961 case DW_OP_breg31:
1962 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1963 break;
1964 case DW_OP_regx:
1965 {
1966 unsigned r = val1->v.val_unsigned;
1967 if (for_eh_or_skip >= 0)
1968 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
1969 gcc_assert (size_of_uleb128 (r)
1970 == size_of_uleb128 (val1->v.val_unsigned));
1971 dw2_asm_output_data_uleb128 (r, NULL);
1972 }
1973 break;
1974 case DW_OP_fbreg:
1975 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1976 break;
1977 case DW_OP_bregx:
1978 {
1979 unsigned r = val1->v.val_unsigned;
1980 if (for_eh_or_skip >= 0)
1981 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
1982 gcc_assert (size_of_uleb128 (r)
1983 == size_of_uleb128 (val1->v.val_unsigned));
1984 dw2_asm_output_data_uleb128 (r, NULL);
1985 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
1986 }
1987 break;
1988 case DW_OP_piece:
1989 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1990 break;
1991 case DW_OP_bit_piece:
1992 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1993 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
1994 break;
1995 case DW_OP_deref_size:
1996 case DW_OP_xderef_size:
1997 dw2_asm_output_data (1, val1->v.val_int, NULL);
1998 break;
1999
2000 case DW_OP_addr:
2001 if (loc->dtprel)
2002 {
2003 if (targetm.asm_out.output_dwarf_dtprel)
2004 {
2005 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2006 DWARF2_ADDR_SIZE,
2007 val1->v.val_addr);
2008 fputc ('\n', asm_out_file);
2009 }
2010 else
2011 gcc_unreachable ();
2012 }
2013 else
2014 {
2015 #ifdef DWARF2_DEBUGGING_INFO
2016 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2017 #else
2018 gcc_unreachable ();
2019 #endif
2020 }
2021 break;
2022
2023 case DW_OP_GNU_addr_index:
2024 case DW_OP_GNU_const_index:
2025 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2026 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2027 "(index into .debug_addr)");
2028 break;
2029
2030 case DW_OP_call2:
2031 case DW_OP_call4:
2032 {
2033 unsigned long die_offset
2034 = get_ref_die_offset (val1->v.val_die_ref.die);
2035 /* Make sure the offset has been computed and that we can encode it as
2036 an operand. */
2037 gcc_assert (die_offset > 0
2038 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2039 ? 0xffff
2040 : 0xffffffff));
2041 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2042 die_offset, NULL);
2043 }
2044 break;
2045
2046 case DW_OP_GNU_implicit_pointer:
2047 {
2048 char label[MAX_ARTIFICIAL_LABEL_BYTES
2049 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2050 gcc_assert (val1->val_class == dw_val_class_die_ref);
2051 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2052 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2053 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2054 }
2055 break;
2056
2057 case DW_OP_GNU_entry_value:
2058 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2059 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2060 break;
2061
2062 case DW_OP_GNU_const_type:
2063 {
2064 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2065 gcc_assert (o);
2066 dw2_asm_output_data_uleb128 (o, NULL);
2067 switch (val2->val_class)
2068 {
2069 case dw_val_class_const:
2070 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2071 dw2_asm_output_data (1, l, NULL);
2072 dw2_asm_output_data (l, val2->v.val_int, NULL);
2073 break;
2074 case dw_val_class_vec:
2075 {
2076 unsigned int elt_size = val2->v.val_vec.elt_size;
2077 unsigned int len = val2->v.val_vec.length;
2078 unsigned int i;
2079 unsigned char *p;
2080
2081 l = len * elt_size;
2082 dw2_asm_output_data (1, l, NULL);
2083 if (elt_size > sizeof (HOST_WIDE_INT))
2084 {
2085 elt_size /= 2;
2086 len *= 2;
2087 }
2088 for (i = 0, p = val2->v.val_vec.array;
2089 i < len;
2090 i++, p += elt_size)
2091 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2092 "fp or vector constant word %u", i);
2093 }
2094 break;
2095 case dw_val_class_const_double:
2096 {
2097 unsigned HOST_WIDE_INT first, second;
2098 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2099
2100 dw2_asm_output_data (1, 2 * l, NULL);
2101 if (WORDS_BIG_ENDIAN)
2102 {
2103 first = val2->v.val_double.high;
2104 second = val2->v.val_double.low;
2105 }
2106 else
2107 {
2108 first = val2->v.val_double.low;
2109 second = val2->v.val_double.high;
2110 }
2111 dw2_asm_output_data (l, first, NULL);
2112 dw2_asm_output_data (l, second, NULL);
2113 }
2114 break;
2115 case dw_val_class_wide_int:
2116 {
2117 int i;
2118 int len = get_full_len (*val2->v.val_wide);
2119 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2120
2121 dw2_asm_output_data (1, len * l, NULL);
2122 if (WORDS_BIG_ENDIAN)
2123 for (i = len - 1; i >= 0; --i)
2124 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2125 else
2126 for (i = 0; i < len; ++i)
2127 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2128 }
2129 break;
2130 default:
2131 gcc_unreachable ();
2132 }
2133 }
2134 break;
2135 case DW_OP_GNU_regval_type:
2136 {
2137 unsigned r = val1->v.val_unsigned;
2138 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2139 gcc_assert (o);
2140 if (for_eh_or_skip >= 0)
2141 {
2142 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2143 gcc_assert (size_of_uleb128 (r)
2144 == size_of_uleb128 (val1->v.val_unsigned));
2145 }
2146 dw2_asm_output_data_uleb128 (r, NULL);
2147 dw2_asm_output_data_uleb128 (o, NULL);
2148 }
2149 break;
2150 case DW_OP_GNU_deref_type:
2151 {
2152 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2153 gcc_assert (o);
2154 dw2_asm_output_data (1, val1->v.val_int, NULL);
2155 dw2_asm_output_data_uleb128 (o, NULL);
2156 }
2157 break;
2158 case DW_OP_GNU_convert:
2159 case DW_OP_GNU_reinterpret:
2160 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2161 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2162 else
2163 {
2164 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2165 gcc_assert (o);
2166 dw2_asm_output_data_uleb128 (o, NULL);
2167 }
2168 break;
2169
2170 case DW_OP_GNU_parameter_ref:
2171 {
2172 unsigned long o;
2173 gcc_assert (val1->val_class == dw_val_class_die_ref);
2174 o = get_ref_die_offset (val1->v.val_die_ref.die);
2175 dw2_asm_output_data (4, o, NULL);
2176 }
2177 break;
2178
2179 default:
2180 /* Other codes have no operands. */
2181 break;
2182 }
2183 }
2184
2185 /* Output a sequence of location operations.
2186 The for_eh_or_skip parameter controls whether register numbers are
2187 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2188 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2189 info). This should be suppressed for the cases that have not been converted
2190 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2191
2192 void
2193 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2194 {
2195 for (; loc != NULL; loc = loc->dw_loc_next)
2196 {
2197 enum dwarf_location_atom opc = loc->dw_loc_opc;
2198 /* Output the opcode. */
2199 if (for_eh_or_skip >= 0
2200 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2201 {
2202 unsigned r = (opc - DW_OP_breg0);
2203 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2204 gcc_assert (r <= 31);
2205 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2206 }
2207 else if (for_eh_or_skip >= 0
2208 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2209 {
2210 unsigned r = (opc - DW_OP_reg0);
2211 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2212 gcc_assert (r <= 31);
2213 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2214 }
2215
2216 dw2_asm_output_data (1, opc,
2217 "%s", dwarf_stack_op_name (opc));
2218
2219 /* Output the operand(s) (if any). */
2220 output_loc_operands (loc, for_eh_or_skip);
2221 }
2222 }
2223
2224 /* Output location description stack opcode's operands (if any).
2225 The output is single bytes on a line, suitable for .cfi_escape. */
2226
2227 static void
2228 output_loc_operands_raw (dw_loc_descr_ref loc)
2229 {
2230 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2231 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2232
2233 switch (loc->dw_loc_opc)
2234 {
2235 case DW_OP_addr:
2236 case DW_OP_GNU_addr_index:
2237 case DW_OP_GNU_const_index:
2238 case DW_OP_implicit_value:
2239 /* We cannot output addresses in .cfi_escape, only bytes. */
2240 gcc_unreachable ();
2241
2242 case DW_OP_const1u:
2243 case DW_OP_const1s:
2244 case DW_OP_pick:
2245 case DW_OP_deref_size:
2246 case DW_OP_xderef_size:
2247 fputc (',', asm_out_file);
2248 dw2_asm_output_data_raw (1, val1->v.val_int);
2249 break;
2250
2251 case DW_OP_const2u:
2252 case DW_OP_const2s:
2253 fputc (',', asm_out_file);
2254 dw2_asm_output_data_raw (2, val1->v.val_int);
2255 break;
2256
2257 case DW_OP_const4u:
2258 case DW_OP_const4s:
2259 fputc (',', asm_out_file);
2260 dw2_asm_output_data_raw (4, val1->v.val_int);
2261 break;
2262
2263 case DW_OP_const8u:
2264 case DW_OP_const8s:
2265 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2266 fputc (',', asm_out_file);
2267 dw2_asm_output_data_raw (8, val1->v.val_int);
2268 break;
2269
2270 case DW_OP_skip:
2271 case DW_OP_bra:
2272 {
2273 int offset;
2274
2275 gcc_assert (val1->val_class == dw_val_class_loc);
2276 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2277
2278 fputc (',', asm_out_file);
2279 dw2_asm_output_data_raw (2, offset);
2280 }
2281 break;
2282
2283 case DW_OP_regx:
2284 {
2285 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2286 gcc_assert (size_of_uleb128 (r)
2287 == size_of_uleb128 (val1->v.val_unsigned));
2288 fputc (',', asm_out_file);
2289 dw2_asm_output_data_uleb128_raw (r);
2290 }
2291 break;
2292
2293 case DW_OP_constu:
2294 case DW_OP_plus_uconst:
2295 case DW_OP_piece:
2296 fputc (',', asm_out_file);
2297 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2298 break;
2299
2300 case DW_OP_bit_piece:
2301 fputc (',', asm_out_file);
2302 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2303 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2304 break;
2305
2306 case DW_OP_consts:
2307 case DW_OP_breg0:
2308 case DW_OP_breg1:
2309 case DW_OP_breg2:
2310 case DW_OP_breg3:
2311 case DW_OP_breg4:
2312 case DW_OP_breg5:
2313 case DW_OP_breg6:
2314 case DW_OP_breg7:
2315 case DW_OP_breg8:
2316 case DW_OP_breg9:
2317 case DW_OP_breg10:
2318 case DW_OP_breg11:
2319 case DW_OP_breg12:
2320 case DW_OP_breg13:
2321 case DW_OP_breg14:
2322 case DW_OP_breg15:
2323 case DW_OP_breg16:
2324 case DW_OP_breg17:
2325 case DW_OP_breg18:
2326 case DW_OP_breg19:
2327 case DW_OP_breg20:
2328 case DW_OP_breg21:
2329 case DW_OP_breg22:
2330 case DW_OP_breg23:
2331 case DW_OP_breg24:
2332 case DW_OP_breg25:
2333 case DW_OP_breg26:
2334 case DW_OP_breg27:
2335 case DW_OP_breg28:
2336 case DW_OP_breg29:
2337 case DW_OP_breg30:
2338 case DW_OP_breg31:
2339 case DW_OP_fbreg:
2340 fputc (',', asm_out_file);
2341 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2342 break;
2343
2344 case DW_OP_bregx:
2345 {
2346 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2347 gcc_assert (size_of_uleb128 (r)
2348 == size_of_uleb128 (val1->v.val_unsigned));
2349 fputc (',', asm_out_file);
2350 dw2_asm_output_data_uleb128_raw (r);
2351 fputc (',', asm_out_file);
2352 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2353 }
2354 break;
2355
2356 case DW_OP_GNU_implicit_pointer:
2357 case DW_OP_GNU_entry_value:
2358 case DW_OP_GNU_const_type:
2359 case DW_OP_GNU_regval_type:
2360 case DW_OP_GNU_deref_type:
2361 case DW_OP_GNU_convert:
2362 case DW_OP_GNU_reinterpret:
2363 case DW_OP_GNU_parameter_ref:
2364 gcc_unreachable ();
2365 break;
2366
2367 default:
2368 /* Other codes have no operands. */
2369 break;
2370 }
2371 }
2372
2373 void
2374 output_loc_sequence_raw (dw_loc_descr_ref loc)
2375 {
2376 while (1)
2377 {
2378 enum dwarf_location_atom opc = loc->dw_loc_opc;
2379 /* Output the opcode. */
2380 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2381 {
2382 unsigned r = (opc - DW_OP_breg0);
2383 r = DWARF2_FRAME_REG_OUT (r, 1);
2384 gcc_assert (r <= 31);
2385 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2386 }
2387 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2388 {
2389 unsigned r = (opc - DW_OP_reg0);
2390 r = DWARF2_FRAME_REG_OUT (r, 1);
2391 gcc_assert (r <= 31);
2392 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2393 }
2394 /* Output the opcode. */
2395 fprintf (asm_out_file, "%#x", opc);
2396 output_loc_operands_raw (loc);
2397
2398 if (!loc->dw_loc_next)
2399 break;
2400 loc = loc->dw_loc_next;
2401
2402 fputc (',', asm_out_file);
2403 }
2404 }
2405
2406 /* This function builds a dwarf location descriptor sequence from a
2407 dw_cfa_location, adding the given OFFSET to the result of the
2408 expression. */
2409
2410 struct dw_loc_descr_node *
2411 build_cfa_loc (dw_cfa_location *cfa, HOST_WIDE_INT offset)
2412 {
2413 struct dw_loc_descr_node *head, *tmp;
2414
2415 offset += cfa->offset;
2416
2417 if (cfa->indirect)
2418 {
2419 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2420 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2421 head->dw_loc_oprnd1.val_entry = NULL;
2422 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2423 add_loc_descr (&head, tmp);
2424 if (offset != 0)
2425 {
2426 tmp = new_loc_descr (DW_OP_plus_uconst, offset, 0);
2427 add_loc_descr (&head, tmp);
2428 }
2429 }
2430 else
2431 head = new_reg_loc_descr (cfa->reg, offset);
2432
2433 return head;
2434 }
2435
2436 /* This function builds a dwarf location descriptor sequence for
2437 the address at OFFSET from the CFA when stack is aligned to
2438 ALIGNMENT byte. */
2439
2440 struct dw_loc_descr_node *
2441 build_cfa_aligned_loc (dw_cfa_location *cfa,
2442 HOST_WIDE_INT offset, HOST_WIDE_INT alignment)
2443 {
2444 struct dw_loc_descr_node *head;
2445 unsigned int dwarf_fp
2446 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2447
2448 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2449 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2450 {
2451 head = new_reg_loc_descr (dwarf_fp, 0);
2452 add_loc_descr (&head, int_loc_descriptor (alignment));
2453 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2454 loc_descr_plus_const (&head, offset);
2455 }
2456 else
2457 head = new_reg_loc_descr (dwarf_fp, offset);
2458 return head;
2459 }
2460 \f
2461 /* And now, the support for symbolic debugging information. */
2462
2463 /* .debug_str support. */
2464
2465 static void dwarf2out_init (const char *);
2466 static void dwarf2out_finish (const char *);
2467 static void dwarf2out_early_finish (const char *);
2468 static void dwarf2out_assembly_start (void);
2469 static void dwarf2out_define (unsigned int, const char *);
2470 static void dwarf2out_undef (unsigned int, const char *);
2471 static void dwarf2out_start_source_file (unsigned, const char *);
2472 static void dwarf2out_end_source_file (unsigned);
2473 static void dwarf2out_function_decl (tree);
2474 static void dwarf2out_begin_block (unsigned, unsigned);
2475 static void dwarf2out_end_block (unsigned, unsigned);
2476 static bool dwarf2out_ignore_block (const_tree);
2477 static void dwarf2out_early_global_decl (tree);
2478 static void dwarf2out_late_global_decl (tree);
2479 static void dwarf2out_type_decl (tree, int);
2480 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool);
2481 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2482 dw_die_ref);
2483 static void dwarf2out_abstract_function (tree);
2484 static void dwarf2out_var_location (rtx_insn *);
2485 static void dwarf2out_size_function (tree);
2486 static void dwarf2out_begin_function (tree);
2487 static void dwarf2out_end_function (unsigned int);
2488 static void dwarf2out_register_main_translation_unit (tree unit);
2489 static void dwarf2out_set_name (tree, tree);
2490
2491 /* The debug hooks structure. */
2492
2493 const struct gcc_debug_hooks dwarf2_debug_hooks =
2494 {
2495 dwarf2out_init,
2496 dwarf2out_finish,
2497 dwarf2out_early_finish,
2498 dwarf2out_assembly_start,
2499 dwarf2out_define,
2500 dwarf2out_undef,
2501 dwarf2out_start_source_file,
2502 dwarf2out_end_source_file,
2503 dwarf2out_begin_block,
2504 dwarf2out_end_block,
2505 dwarf2out_ignore_block,
2506 dwarf2out_source_line,
2507 dwarf2out_begin_prologue,
2508 #if VMS_DEBUGGING_INFO
2509 dwarf2out_vms_end_prologue,
2510 dwarf2out_vms_begin_epilogue,
2511 #else
2512 debug_nothing_int_charstar,
2513 debug_nothing_int_charstar,
2514 #endif
2515 dwarf2out_end_epilogue,
2516 dwarf2out_begin_function,
2517 dwarf2out_end_function, /* end_function */
2518 dwarf2out_register_main_translation_unit,
2519 dwarf2out_function_decl, /* function_decl */
2520 dwarf2out_early_global_decl,
2521 dwarf2out_late_global_decl,
2522 dwarf2out_type_decl, /* type_decl */
2523 dwarf2out_imported_module_or_decl,
2524 debug_nothing_tree, /* deferred_inline_function */
2525 /* The DWARF 2 backend tries to reduce debugging bloat by not
2526 emitting the abstract description of inline functions until
2527 something tries to reference them. */
2528 dwarf2out_abstract_function, /* outlining_inline_function */
2529 debug_nothing_rtx_code_label, /* label */
2530 debug_nothing_int, /* handle_pch */
2531 dwarf2out_var_location,
2532 dwarf2out_size_function, /* size_function */
2533 dwarf2out_switch_text_section,
2534 dwarf2out_set_name,
2535 1, /* start_end_main_source_file */
2536 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2537 };
2538
2539 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2540 {
2541 dwarf2out_init,
2542 debug_nothing_charstar,
2543 debug_nothing_charstar,
2544 dwarf2out_assembly_start,
2545 debug_nothing_int_charstar,
2546 debug_nothing_int_charstar,
2547 debug_nothing_int_charstar,
2548 debug_nothing_int,
2549 debug_nothing_int_int, /* begin_block */
2550 debug_nothing_int_int, /* end_block */
2551 debug_true_const_tree, /* ignore_block */
2552 dwarf2out_source_line, /* source_line */
2553 debug_nothing_int_charstar, /* begin_prologue */
2554 debug_nothing_int_charstar, /* end_prologue */
2555 debug_nothing_int_charstar, /* begin_epilogue */
2556 debug_nothing_int_charstar, /* end_epilogue */
2557 debug_nothing_tree, /* begin_function */
2558 debug_nothing_int, /* end_function */
2559 debug_nothing_tree, /* register_main_translation_unit */
2560 debug_nothing_tree, /* function_decl */
2561 debug_nothing_tree, /* early_global_decl */
2562 debug_nothing_tree, /* late_global_decl */
2563 debug_nothing_tree_int, /* type_decl */
2564 debug_nothing_tree_tree_tree_bool, /* imported_module_or_decl */
2565 debug_nothing_tree, /* deferred_inline_function */
2566 debug_nothing_tree, /* outlining_inline_function */
2567 debug_nothing_rtx_code_label, /* label */
2568 debug_nothing_int, /* handle_pch */
2569 debug_nothing_rtx_insn, /* var_location */
2570 debug_nothing_tree, /* size_function */
2571 debug_nothing_void, /* switch_text_section */
2572 debug_nothing_tree_tree, /* set_name */
2573 0, /* start_end_main_source_file */
2574 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2575 };
2576 \f
2577 /* NOTE: In the comments in this file, many references are made to
2578 "Debugging Information Entries". This term is abbreviated as `DIE'
2579 throughout the remainder of this file. */
2580
2581 /* An internal representation of the DWARF output is built, and then
2582 walked to generate the DWARF debugging info. The walk of the internal
2583 representation is done after the entire program has been compiled.
2584 The types below are used to describe the internal representation. */
2585
2586 /* Whether to put type DIEs into their own section .debug_types instead
2587 of making them part of the .debug_info section. Only supported for
2588 Dwarf V4 or higher and the user didn't disable them through
2589 -fno-debug-types-section. It is more efficient to put them in a
2590 separate comdat sections since the linker will then be able to
2591 remove duplicates. But not all tools support .debug_types sections
2592 yet. */
2593
2594 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2595
2596 /* Various DIE's use offsets relative to the beginning of the
2597 .debug_info section to refer to each other. */
2598
2599 typedef long int dw_offset;
2600
2601 struct comdat_type_node;
2602
2603 /* The entries in the line_info table more-or-less mirror the opcodes
2604 that are used in the real dwarf line table. Arrays of these entries
2605 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2606 supported. */
2607
2608 enum dw_line_info_opcode {
2609 /* Emit DW_LNE_set_address; the operand is the label index. */
2610 LI_set_address,
2611
2612 /* Emit a row to the matrix with the given line. This may be done
2613 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2614 special opcodes. */
2615 LI_set_line,
2616
2617 /* Emit a DW_LNS_set_file. */
2618 LI_set_file,
2619
2620 /* Emit a DW_LNS_set_column. */
2621 LI_set_column,
2622
2623 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2624 LI_negate_stmt,
2625
2626 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2627 LI_set_prologue_end,
2628 LI_set_epilogue_begin,
2629
2630 /* Emit a DW_LNE_set_discriminator. */
2631 LI_set_discriminator
2632 };
2633
2634 typedef struct GTY(()) dw_line_info_struct {
2635 enum dw_line_info_opcode opcode;
2636 unsigned int val;
2637 } dw_line_info_entry;
2638
2639
2640 struct GTY(()) dw_line_info_table {
2641 /* The label that marks the end of this section. */
2642 const char *end_label;
2643
2644 /* The values for the last row of the matrix, as collected in the table.
2645 These are used to minimize the changes to the next row. */
2646 unsigned int file_num;
2647 unsigned int line_num;
2648 unsigned int column_num;
2649 int discrim_num;
2650 bool is_stmt;
2651 bool in_use;
2652
2653 vec<dw_line_info_entry, va_gc> *entries;
2654 };
2655
2656
2657 /* Each DIE attribute has a field specifying the attribute kind,
2658 a link to the next attribute in the chain, and an attribute value.
2659 Attributes are typically linked below the DIE they modify. */
2660
2661 typedef struct GTY(()) dw_attr_struct {
2662 enum dwarf_attribute dw_attr;
2663 dw_val_node dw_attr_val;
2664 }
2665 dw_attr_node;
2666
2667
2668 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2669 The children of each node form a circular list linked by
2670 die_sib. die_child points to the node *before* the "first" child node. */
2671
2672 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2673 union die_symbol_or_type_node
2674 {
2675 const char * GTY ((tag ("0"))) die_symbol;
2676 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2677 }
2678 GTY ((desc ("%0.comdat_type_p"))) die_id;
2679 vec<dw_attr_node, va_gc> *die_attr;
2680 dw_die_ref die_parent;
2681 dw_die_ref die_child;
2682 dw_die_ref die_sib;
2683 dw_die_ref die_definition; /* ref from a specification to its definition */
2684 dw_offset die_offset;
2685 unsigned long die_abbrev;
2686 int die_mark;
2687 unsigned int decl_id;
2688 enum dwarf_tag die_tag;
2689 /* Die is used and must not be pruned as unused. */
2690 BOOL_BITFIELD die_perennial_p : 1;
2691 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2692 /* Whether this DIE was removed from the DIE tree, for example via
2693 prune_unused_types. We don't consider those present from the
2694 DIE lookup routines. */
2695 BOOL_BITFIELD removed : 1;
2696 /* Lots of spare bits. */
2697 }
2698 die_node;
2699
2700 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2701 static bool early_dwarf;
2702 static bool early_dwarf_finished;
2703 struct set_early_dwarf {
2704 bool saved;
2705 set_early_dwarf () : saved(early_dwarf)
2706 {
2707 gcc_assert (! early_dwarf_finished);
2708 early_dwarf = true;
2709 }
2710 ~set_early_dwarf () { early_dwarf = saved; }
2711 };
2712
2713 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2714 #define FOR_EACH_CHILD(die, c, expr) do { \
2715 c = die->die_child; \
2716 if (c) do { \
2717 c = c->die_sib; \
2718 expr; \
2719 } while (c != die->die_child); \
2720 } while (0)
2721
2722 /* The pubname structure */
2723
2724 typedef struct GTY(()) pubname_struct {
2725 dw_die_ref die;
2726 const char *name;
2727 }
2728 pubname_entry;
2729
2730
2731 struct GTY(()) dw_ranges {
2732 /* If this is positive, it's a block number, otherwise it's a
2733 bitwise-negated index into dw_ranges_by_label. */
2734 int num;
2735 };
2736
2737 /* A structure to hold a macinfo entry. */
2738
2739 typedef struct GTY(()) macinfo_struct {
2740 unsigned char code;
2741 unsigned HOST_WIDE_INT lineno;
2742 const char *info;
2743 }
2744 macinfo_entry;
2745
2746
2747 struct GTY(()) dw_ranges_by_label {
2748 const char *begin;
2749 const char *end;
2750 };
2751
2752 /* The comdat type node structure. */
2753 struct GTY(()) comdat_type_node
2754 {
2755 dw_die_ref root_die;
2756 dw_die_ref type_die;
2757 dw_die_ref skeleton_die;
2758 char signature[DWARF_TYPE_SIGNATURE_SIZE];
2759 comdat_type_node *next;
2760 };
2761
2762 /* A list of DIEs for which we can't determine ancestry (parent_die
2763 field) just yet. Later in dwarf2out_finish we will fill in the
2764 missing bits. */
2765 typedef struct GTY(()) limbo_die_struct {
2766 dw_die_ref die;
2767 /* The tree for which this DIE was created. We use this to
2768 determine ancestry later. */
2769 tree created_for;
2770 struct limbo_die_struct *next;
2771 }
2772 limbo_die_node;
2773
2774 typedef struct skeleton_chain_struct
2775 {
2776 dw_die_ref old_die;
2777 dw_die_ref new_die;
2778 struct skeleton_chain_struct *parent;
2779 }
2780 skeleton_chain_node;
2781
2782 /* Define a macro which returns nonzero for a TYPE_DECL which was
2783 implicitly generated for a type.
2784
2785 Note that, unlike the C front-end (which generates a NULL named
2786 TYPE_DECL node for each complete tagged type, each array type,
2787 and each function type node created) the C++ front-end generates
2788 a _named_ TYPE_DECL node for each tagged type node created.
2789 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
2790 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
2791 front-end, but for each type, tagged or not. */
2792
2793 #define TYPE_DECL_IS_STUB(decl) \
2794 (DECL_NAME (decl) == NULL_TREE \
2795 || (DECL_ARTIFICIAL (decl) \
2796 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
2797 /* This is necessary for stub decls that \
2798 appear in nested inline functions. */ \
2799 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
2800 && (decl_ultimate_origin (decl) \
2801 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
2802
2803 /* Information concerning the compilation unit's programming
2804 language, and compiler version. */
2805
2806 /* Fixed size portion of the DWARF compilation unit header. */
2807 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
2808 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 3)
2809
2810 /* Fixed size portion of the DWARF comdat type unit header. */
2811 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
2812 (DWARF_COMPILE_UNIT_HEADER_SIZE + DWARF_TYPE_SIGNATURE_SIZE \
2813 + DWARF_OFFSET_SIZE)
2814
2815 /* Fixed size portion of public names info. */
2816 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
2817
2818 /* Fixed size portion of the address range info. */
2819 #define DWARF_ARANGES_HEADER_SIZE \
2820 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
2821 DWARF2_ADDR_SIZE * 2) \
2822 - DWARF_INITIAL_LENGTH_SIZE)
2823
2824 /* Size of padding portion in the address range info. It must be
2825 aligned to twice the pointer size. */
2826 #define DWARF_ARANGES_PAD_SIZE \
2827 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
2828 DWARF2_ADDR_SIZE * 2) \
2829 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
2830
2831 /* Use assembler line directives if available. */
2832 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
2833 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
2834 #define DWARF2_ASM_LINE_DEBUG_INFO 1
2835 #else
2836 #define DWARF2_ASM_LINE_DEBUG_INFO 0
2837 #endif
2838 #endif
2839
2840 /* Minimum line offset in a special line info. opcode.
2841 This value was chosen to give a reasonable range of values. */
2842 #define DWARF_LINE_BASE -10
2843
2844 /* First special line opcode - leave room for the standard opcodes. */
2845 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
2846
2847 /* Range of line offsets in a special line info. opcode. */
2848 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
2849
2850 /* Flag that indicates the initial value of the is_stmt_start flag.
2851 In the present implementation, we do not mark any lines as
2852 the beginning of a source statement, because that information
2853 is not made available by the GCC front-end. */
2854 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
2855
2856 /* Maximum number of operations per instruction bundle. */
2857 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
2858 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
2859 #endif
2860
2861 /* This location is used by calc_die_sizes() to keep track
2862 the offset of each DIE within the .debug_info section. */
2863 static unsigned long next_die_offset;
2864
2865 /* Record the root of the DIE's built for the current compilation unit. */
2866 static GTY(()) dw_die_ref single_comp_unit_die;
2867
2868 /* A list of type DIEs that have been separated into comdat sections. */
2869 static GTY(()) comdat_type_node *comdat_type_list;
2870
2871 /* A list of CU DIEs that have been separated. */
2872 static GTY(()) limbo_die_node *cu_die_list;
2873
2874 /* A list of DIEs with a NULL parent waiting to be relocated. */
2875 static GTY(()) limbo_die_node *limbo_die_list;
2876
2877 /* A list of DIEs for which we may have to generate
2878 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
2879 static GTY(()) limbo_die_node *deferred_asm_name;
2880
2881 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
2882 {
2883 typedef const char *compare_type;
2884
2885 static hashval_t hash (dwarf_file_data *);
2886 static bool equal (dwarf_file_data *, const char *);
2887 };
2888
2889 /* Filenames referenced by this compilation unit. */
2890 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
2891
2892 struct decl_die_hasher : ggc_ptr_hash<die_node>
2893 {
2894 typedef tree compare_type;
2895
2896 static hashval_t hash (die_node *);
2897 static bool equal (die_node *, tree);
2898 };
2899 /* A hash table of references to DIE's that describe declarations.
2900 The key is a DECL_UID() which is a unique number identifying each decl. */
2901 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
2902
2903 struct block_die_hasher : ggc_ptr_hash<die_struct>
2904 {
2905 static hashval_t hash (die_struct *);
2906 static bool equal (die_struct *, die_struct *);
2907 };
2908
2909 /* A hash table of references to DIE's that describe COMMON blocks.
2910 The key is DECL_UID() ^ die_parent. */
2911 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
2912
2913 typedef struct GTY(()) die_arg_entry_struct {
2914 dw_die_ref die;
2915 tree arg;
2916 } die_arg_entry;
2917
2918
2919 /* Node of the variable location list. */
2920 struct GTY ((chain_next ("%h.next"))) var_loc_node {
2921 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
2922 EXPR_LIST chain. For small bitsizes, bitsize is encoded
2923 in mode of the EXPR_LIST node and first EXPR_LIST operand
2924 is either NOTE_INSN_VAR_LOCATION for a piece with a known
2925 location or NULL for padding. For larger bitsizes,
2926 mode is 0 and first operand is a CONCAT with bitsize
2927 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
2928 NULL as second operand. */
2929 rtx GTY (()) loc;
2930 const char * GTY (()) label;
2931 struct var_loc_node * GTY (()) next;
2932 };
2933
2934 /* Variable location list. */
2935 struct GTY ((for_user)) var_loc_list_def {
2936 struct var_loc_node * GTY (()) first;
2937
2938 /* Pointer to the last but one or last element of the
2939 chained list. If the list is empty, both first and
2940 last are NULL, if the list contains just one node
2941 or the last node certainly is not redundant, it points
2942 to the last node, otherwise points to the last but one.
2943 Do not mark it for GC because it is marked through the chain. */
2944 struct var_loc_node * GTY ((skip ("%h"))) last;
2945
2946 /* Pointer to the last element before section switch,
2947 if NULL, either sections weren't switched or first
2948 is after section switch. */
2949 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
2950
2951 /* DECL_UID of the variable decl. */
2952 unsigned int decl_id;
2953 };
2954 typedef struct var_loc_list_def var_loc_list;
2955
2956 /* Call argument location list. */
2957 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
2958 rtx GTY (()) call_arg_loc_note;
2959 const char * GTY (()) label;
2960 tree GTY (()) block;
2961 bool tail_call_p;
2962 rtx GTY (()) symbol_ref;
2963 struct call_arg_loc_node * GTY (()) next;
2964 };
2965
2966
2967 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
2968 {
2969 typedef const_tree compare_type;
2970
2971 static hashval_t hash (var_loc_list *);
2972 static bool equal (var_loc_list *, const_tree);
2973 };
2974
2975 /* Table of decl location linked lists. */
2976 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
2977
2978 /* Head and tail of call_arg_loc chain. */
2979 static GTY (()) struct call_arg_loc_node *call_arg_locations;
2980 static struct call_arg_loc_node *call_arg_loc_last;
2981
2982 /* Number of call sites in the current function. */
2983 static int call_site_count = -1;
2984 /* Number of tail call sites in the current function. */
2985 static int tail_call_site_count = -1;
2986
2987 /* A cached location list. */
2988 struct GTY ((for_user)) cached_dw_loc_list_def {
2989 /* The DECL_UID of the decl that this entry describes. */
2990 unsigned int decl_id;
2991
2992 /* The cached location list. */
2993 dw_loc_list_ref loc_list;
2994 };
2995 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
2996
2997 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
2998 {
2999
3000 typedef const_tree compare_type;
3001
3002 static hashval_t hash (cached_dw_loc_list *);
3003 static bool equal (cached_dw_loc_list *, const_tree);
3004 };
3005
3006 /* Table of cached location lists. */
3007 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3008
3009 /* A pointer to the base of a list of references to DIE's that
3010 are uniquely identified by their tag, presence/absence of
3011 children DIE's, and list of attribute/value pairs. */
3012 static GTY((length ("abbrev_die_table_allocated")))
3013 dw_die_ref *abbrev_die_table;
3014
3015 /* Number of elements currently allocated for abbrev_die_table. */
3016 static GTY(()) unsigned abbrev_die_table_allocated;
3017
3018 /* Number of elements in abbrev_die_table currently in use. */
3019 static GTY(()) unsigned abbrev_die_table_in_use;
3020
3021 /* A hash map to remember the stack usage for DWARF procedures. The value
3022 stored is the stack size difference between before the DWARF procedure
3023 invokation and after it returned. In other words, for a DWARF procedure
3024 that consumes N stack slots and that pushes M ones, this stores M - N. */
3025 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3026
3027 /* Size (in elements) of increments by which we may expand the
3028 abbrev_die_table. */
3029 #define ABBREV_DIE_TABLE_INCREMENT 256
3030
3031 /* A global counter for generating labels for line number data. */
3032 static unsigned int line_info_label_num;
3033
3034 /* The current table to which we should emit line number information
3035 for the current function. This will be set up at the beginning of
3036 assembly for the function. */
3037 static GTY(()) dw_line_info_table *cur_line_info_table;
3038
3039 /* The two default tables of line number info. */
3040 static GTY(()) dw_line_info_table *text_section_line_info;
3041 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3042
3043 /* The set of all non-default tables of line number info. */
3044 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3045
3046 /* A flag to tell pubnames/types export if there is an info section to
3047 refer to. */
3048 static bool info_section_emitted;
3049
3050 /* A pointer to the base of a table that contains a list of publicly
3051 accessible names. */
3052 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3053
3054 /* A pointer to the base of a table that contains a list of publicly
3055 accessible types. */
3056 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3057
3058 /* A pointer to the base of a table that contains a list of macro
3059 defines/undefines (and file start/end markers). */
3060 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3061
3062 /* True if .debug_macinfo or .debug_macros section is going to be
3063 emitted. */
3064 #define have_macinfo \
3065 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3066 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3067 && !macinfo_table->is_empty ())
3068
3069 /* Array of dies for which we should generate .debug_ranges info. */
3070 static GTY ((length ("ranges_table_allocated"))) dw_ranges *ranges_table;
3071
3072 /* Number of elements currently allocated for ranges_table. */
3073 static GTY(()) unsigned ranges_table_allocated;
3074
3075 /* Number of elements in ranges_table currently in use. */
3076 static GTY(()) unsigned ranges_table_in_use;
3077
3078 /* Array of pairs of labels referenced in ranges_table. */
3079 static GTY ((length ("ranges_by_label_allocated")))
3080 dw_ranges_by_label *ranges_by_label;
3081
3082 /* Number of elements currently allocated for ranges_by_label. */
3083 static GTY(()) unsigned ranges_by_label_allocated;
3084
3085 /* Number of elements in ranges_by_label currently in use. */
3086 static GTY(()) unsigned ranges_by_label_in_use;
3087
3088 /* Size (in elements) of increments by which we may expand the
3089 ranges_table. */
3090 #define RANGES_TABLE_INCREMENT 64
3091
3092 /* Whether we have location lists that need outputting */
3093 static GTY(()) bool have_location_lists;
3094
3095 /* Unique label counter. */
3096 static GTY(()) unsigned int loclabel_num;
3097
3098 /* Unique label counter for point-of-call tables. */
3099 static GTY(()) unsigned int poc_label_num;
3100
3101 /* The last file entry emitted by maybe_emit_file(). */
3102 static GTY(()) struct dwarf_file_data * last_emitted_file;
3103
3104 /* Number of internal labels generated by gen_internal_sym(). */
3105 static GTY(()) int label_num;
3106
3107 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3108
3109 /* Instances of generic types for which we need to generate debug
3110 info that describe their generic parameters and arguments. That
3111 generation needs to happen once all types are properly laid out so
3112 we do it at the end of compilation. */
3113 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3114
3115 /* Offset from the "steady-state frame pointer" to the frame base,
3116 within the current function. */
3117 static HOST_WIDE_INT frame_pointer_fb_offset;
3118 static bool frame_pointer_fb_offset_valid;
3119
3120 static vec<dw_die_ref> base_types;
3121
3122 /* Pointer to vector of DW_TAG_string_type DIEs that need finalization
3123 once all arguments are parsed. */
3124 static vec<dw_die_ref> *string_types;
3125
3126 /* Flags to represent a set of attribute classes for attributes that represent
3127 a scalar value (bounds, pointers, ...). */
3128 enum dw_scalar_form
3129 {
3130 dw_scalar_form_constant = 0x01,
3131 dw_scalar_form_exprloc = 0x02,
3132 dw_scalar_form_reference = 0x04
3133 };
3134
3135 /* Forward declarations for functions defined in this file. */
3136
3137 static int is_pseudo_reg (const_rtx);
3138 static tree type_main_variant (tree);
3139 static int is_tagged_type (const_tree);
3140 static const char *dwarf_tag_name (unsigned);
3141 static const char *dwarf_attr_name (unsigned);
3142 static const char *dwarf_form_name (unsigned);
3143 static tree decl_ultimate_origin (const_tree);
3144 static tree decl_class_context (tree);
3145 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3146 static inline enum dw_val_class AT_class (dw_attr_node *);
3147 static inline unsigned int AT_index (dw_attr_node *);
3148 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3149 static inline unsigned AT_flag (dw_attr_node *);
3150 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3151 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3152 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3153 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3154 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3155 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3156 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3157 unsigned int, unsigned char *);
3158 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3159 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3160 static inline const char *AT_string (dw_attr_node *);
3161 static enum dwarf_form AT_string_form (dw_attr_node *);
3162 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3163 static void add_AT_specification (dw_die_ref, dw_die_ref);
3164 static inline dw_die_ref AT_ref (dw_attr_node *);
3165 static inline int AT_ref_external (dw_attr_node *);
3166 static inline void set_AT_ref_external (dw_attr_node *, int);
3167 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3168 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3169 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3170 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3171 dw_loc_list_ref);
3172 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3173 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3174 static void remove_addr_table_entry (addr_table_entry *);
3175 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3176 static inline rtx AT_addr (dw_attr_node *);
3177 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3178 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3179 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3180 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3181 unsigned HOST_WIDE_INT);
3182 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3183 unsigned long, bool);
3184 static inline const char *AT_lbl (dw_attr_node *);
3185 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3186 static const char *get_AT_low_pc (dw_die_ref);
3187 static const char *get_AT_hi_pc (dw_die_ref);
3188 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3189 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3190 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3191 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3192 static bool is_cxx (void);
3193 static bool is_fortran (void);
3194 static bool is_ada (void);
3195 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3196 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3197 static void add_child_die (dw_die_ref, dw_die_ref);
3198 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3199 static dw_die_ref lookup_type_die (tree);
3200 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3201 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3202 static void equate_type_number_to_die (tree, dw_die_ref);
3203 static dw_die_ref lookup_decl_die (tree);
3204 static var_loc_list *lookup_decl_loc (const_tree);
3205 static void equate_decl_number_to_die (tree, dw_die_ref);
3206 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3207 static void print_spaces (FILE *);
3208 static void print_die (dw_die_ref, FILE *);
3209 static dw_die_ref push_new_compile_unit (dw_die_ref, dw_die_ref);
3210 static dw_die_ref pop_compile_unit (dw_die_ref);
3211 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3212 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3213 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3214 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3215 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3216 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3217 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3218 struct md5_ctx *, int *);
3219 struct checksum_attributes;
3220 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3221 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3222 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3223 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3224 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3225 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3226 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3227 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3228 static int same_die_p_wrap (dw_die_ref, dw_die_ref);
3229 static void compute_section_prefix (dw_die_ref);
3230 static int is_type_die (dw_die_ref);
3231 static int is_comdat_die (dw_die_ref);
3232 static int is_symbol_die (dw_die_ref);
3233 static inline bool is_template_instantiation (dw_die_ref);
3234 static void assign_symbol_names (dw_die_ref);
3235 static void break_out_includes (dw_die_ref);
3236 static int is_declaration_die (dw_die_ref);
3237 static int should_move_die_to_comdat (dw_die_ref);
3238 static dw_die_ref clone_as_declaration (dw_die_ref);
3239 static dw_die_ref clone_die (dw_die_ref);
3240 static dw_die_ref clone_tree (dw_die_ref);
3241 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3242 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3243 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3244 static dw_die_ref generate_skeleton (dw_die_ref);
3245 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3246 dw_die_ref,
3247 dw_die_ref);
3248 static void break_out_comdat_types (dw_die_ref);
3249 static void copy_decls_for_unworthy_types (dw_die_ref);
3250
3251 static void add_sibling_attributes (dw_die_ref);
3252 static void output_location_lists (dw_die_ref);
3253 static int constant_size (unsigned HOST_WIDE_INT);
3254 static unsigned long size_of_die (dw_die_ref);
3255 static void calc_die_sizes (dw_die_ref);
3256 static void calc_base_type_die_sizes (void);
3257 static void mark_dies (dw_die_ref);
3258 static void unmark_dies (dw_die_ref);
3259 static void unmark_all_dies (dw_die_ref);
3260 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3261 static unsigned long size_of_aranges (void);
3262 static enum dwarf_form value_format (dw_attr_node *);
3263 static void output_value_format (dw_attr_node *);
3264 static void output_abbrev_section (void);
3265 static void output_die_abbrevs (unsigned long, dw_die_ref);
3266 static void output_die_symbol (dw_die_ref);
3267 static void output_die (dw_die_ref);
3268 static void output_compilation_unit_header (void);
3269 static void output_comp_unit (dw_die_ref, int);
3270 static void output_comdat_type_unit (comdat_type_node *);
3271 static const char *dwarf2_name (tree, int);
3272 static void add_pubname (tree, dw_die_ref);
3273 static void add_enumerator_pubname (const char *, dw_die_ref);
3274 static void add_pubname_string (const char *, dw_die_ref);
3275 static void add_pubtype (tree, dw_die_ref);
3276 static void output_pubnames (vec<pubname_entry, va_gc> *);
3277 static void output_aranges (void);
3278 static unsigned int add_ranges_num (int);
3279 static unsigned int add_ranges (const_tree);
3280 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3281 bool *, bool);
3282 static void output_ranges (void);
3283 static dw_line_info_table *new_line_info_table (void);
3284 static void output_line_info (bool);
3285 static void output_file_names (void);
3286 static dw_die_ref base_type_die (tree, bool);
3287 static int is_base_type (tree);
3288 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3289 static int decl_quals (const_tree);
3290 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3291 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3292 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3293 static int type_is_enum (const_tree);
3294 static unsigned int dbx_reg_number (const_rtx);
3295 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3296 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3297 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3298 enum var_init_status);
3299 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3300 enum var_init_status);
3301 static dw_loc_descr_ref based_loc_descr (rtx, HOST_WIDE_INT,
3302 enum var_init_status);
3303 static int is_based_loc (const_rtx);
3304 static bool resolve_one_addr (rtx *);
3305 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3306 enum var_init_status);
3307 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3308 enum var_init_status);
3309 struct loc_descr_context;
3310 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3311 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3312 static dw_loc_list_ref loc_list_from_tree (tree, int,
3313 const struct loc_descr_context *);
3314 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3315 const struct loc_descr_context *);
3316 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3317 static tree field_type (const_tree);
3318 static unsigned int simple_type_align_in_bits (const_tree);
3319 static unsigned int simple_decl_align_in_bits (const_tree);
3320 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3321 struct vlr_context;
3322 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3323 HOST_WIDE_INT *);
3324 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3325 dw_loc_list_ref);
3326 static void add_data_member_location_attribute (dw_die_ref, tree,
3327 struct vlr_context *);
3328 static bool add_const_value_attribute (dw_die_ref, rtx);
3329 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3330 static void insert_wide_int (const wide_int &, unsigned char *, int);
3331 static void insert_float (const_rtx, unsigned char *);
3332 static rtx rtl_for_decl_location (tree);
3333 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3334 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3335 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3336 static void add_name_attribute (dw_die_ref, const char *);
3337 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3338 static void add_comp_dir_attribute (dw_die_ref);
3339 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3340 const struct loc_descr_context *);
3341 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3342 const struct loc_descr_context *);
3343 static void add_subscript_info (dw_die_ref, tree, bool);
3344 static void add_byte_size_attribute (dw_die_ref, tree);
3345 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3346 struct vlr_context *);
3347 static void add_bit_size_attribute (dw_die_ref, tree);
3348 static void add_prototyped_attribute (dw_die_ref, tree);
3349 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3350 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3351 static void add_src_coords_attributes (dw_die_ref, tree);
3352 static void add_name_and_src_coords_attributes (dw_die_ref, tree);
3353 static void add_discr_value (dw_die_ref, dw_discr_value *);
3354 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3355 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3356 static void push_decl_scope (tree);
3357 static void pop_decl_scope (void);
3358 static dw_die_ref scope_die_for (tree, dw_die_ref);
3359 static inline int local_scope_p (dw_die_ref);
3360 static inline int class_scope_p (dw_die_ref);
3361 static inline int class_or_namespace_scope_p (dw_die_ref);
3362 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3363 static void add_calling_convention_attribute (dw_die_ref, tree);
3364 static const char *type_tag (const_tree);
3365 static tree member_declared_type (const_tree);
3366 #if 0
3367 static const char *decl_start_label (tree);
3368 #endif
3369 static void gen_array_type_die (tree, dw_die_ref);
3370 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3371 #if 0
3372 static void gen_entry_point_die (tree, dw_die_ref);
3373 #endif
3374 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3375 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3376 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3377 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3378 static void gen_formal_types_die (tree, dw_die_ref);
3379 static void gen_subprogram_die (tree, dw_die_ref);
3380 static void gen_variable_die (tree, tree, dw_die_ref);
3381 static void gen_const_die (tree, dw_die_ref);
3382 static void gen_label_die (tree, dw_die_ref);
3383 static void gen_lexical_block_die (tree, dw_die_ref);
3384 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3385 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3386 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3387 static dw_die_ref gen_compile_unit_die (const char *);
3388 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3389 static void gen_member_die (tree, dw_die_ref);
3390 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3391 enum debug_info_usage);
3392 static void gen_subroutine_type_die (tree, dw_die_ref);
3393 static void gen_typedef_die (tree, dw_die_ref);
3394 static void gen_type_die (tree, dw_die_ref);
3395 static void gen_block_die (tree, dw_die_ref);
3396 static void decls_for_scope (tree, dw_die_ref);
3397 static bool is_naming_typedef_decl (const_tree);
3398 static inline dw_die_ref get_context_die (tree);
3399 static void gen_namespace_die (tree, dw_die_ref);
3400 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3401 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3402 static dw_die_ref force_decl_die (tree);
3403 static dw_die_ref force_type_die (tree);
3404 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3405 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3406 static struct dwarf_file_data * lookup_filename (const char *);
3407 static void retry_incomplete_types (void);
3408 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3409 static void gen_generic_params_dies (tree);
3410 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3411 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3412 static void splice_child_die (dw_die_ref, dw_die_ref);
3413 static int file_info_cmp (const void *, const void *);
3414 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3415 const char *, const char *);
3416 static void output_loc_list (dw_loc_list_ref);
3417 static char *gen_internal_sym (const char *);
3418 static bool want_pubnames (void);
3419
3420 static void prune_unmark_dies (dw_die_ref);
3421 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3422 static void prune_unused_types_mark (dw_die_ref, int);
3423 static void prune_unused_types_walk (dw_die_ref);
3424 static void prune_unused_types_walk_attribs (dw_die_ref);
3425 static void prune_unused_types_prune (dw_die_ref);
3426 static void prune_unused_types (void);
3427 static int maybe_emit_file (struct dwarf_file_data *fd);
3428 static inline const char *AT_vms_delta1 (dw_attr_node *);
3429 static inline const char *AT_vms_delta2 (dw_attr_node *);
3430 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3431 const char *, const char *);
3432 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3433 static void gen_remaining_tmpl_value_param_die_attribute (void);
3434 static bool generic_type_p (tree);
3435 static void schedule_generic_params_dies_gen (tree t);
3436 static void gen_scheduled_generic_parms_dies (void);
3437
3438 static const char *comp_dir_string (void);
3439
3440 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3441
3442 /* enum for tracking thread-local variables whose address is really an offset
3443 relative to the TLS pointer, which will need link-time relocation, but will
3444 not need relocation by the DWARF consumer. */
3445
3446 enum dtprel_bool
3447 {
3448 dtprel_false = 0,
3449 dtprel_true = 1
3450 };
3451
3452 /* Return the operator to use for an address of a variable. For dtprel_true, we
3453 use DW_OP_const*. For regular variables, which need both link-time
3454 relocation and consumer-level relocation (e.g., to account for shared objects
3455 loaded at a random address), we use DW_OP_addr*. */
3456
3457 static inline enum dwarf_location_atom
3458 dw_addr_op (enum dtprel_bool dtprel)
3459 {
3460 if (dtprel == dtprel_true)
3461 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3462 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3463 else
3464 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3465 }
3466
3467 /* Return a pointer to a newly allocated address location description. If
3468 dwarf_split_debug_info is true, then record the address with the appropriate
3469 relocation. */
3470 static inline dw_loc_descr_ref
3471 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3472 {
3473 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3474
3475 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3476 ref->dw_loc_oprnd1.v.val_addr = addr;
3477 ref->dtprel = dtprel;
3478 if (dwarf_split_debug_info)
3479 ref->dw_loc_oprnd1.val_entry
3480 = add_addr_table_entry (addr,
3481 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3482 else
3483 ref->dw_loc_oprnd1.val_entry = NULL;
3484
3485 return ref;
3486 }
3487
3488 /* Section names used to hold DWARF debugging information. */
3489
3490 #ifndef DEBUG_INFO_SECTION
3491 #define DEBUG_INFO_SECTION ".debug_info"
3492 #endif
3493 #ifndef DEBUG_DWO_INFO_SECTION
3494 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3495 #endif
3496 #ifndef DEBUG_ABBREV_SECTION
3497 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3498 #endif
3499 #ifndef DEBUG_DWO_ABBREV_SECTION
3500 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3501 #endif
3502 #ifndef DEBUG_ARANGES_SECTION
3503 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3504 #endif
3505 #ifndef DEBUG_ADDR_SECTION
3506 #define DEBUG_ADDR_SECTION ".debug_addr"
3507 #endif
3508 #ifndef DEBUG_MACINFO_SECTION
3509 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3510 #endif
3511 #ifndef DEBUG_DWO_MACINFO_SECTION
3512 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3513 #endif
3514 #ifndef DEBUG_DWO_MACRO_SECTION
3515 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3516 #endif
3517 #ifndef DEBUG_MACRO_SECTION
3518 #define DEBUG_MACRO_SECTION ".debug_macro"
3519 #endif
3520 #ifndef DEBUG_LINE_SECTION
3521 #define DEBUG_LINE_SECTION ".debug_line"
3522 #endif
3523 #ifndef DEBUG_DWO_LINE_SECTION
3524 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3525 #endif
3526 #ifndef DEBUG_LOC_SECTION
3527 #define DEBUG_LOC_SECTION ".debug_loc"
3528 #endif
3529 #ifndef DEBUG_DWO_LOC_SECTION
3530 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3531 #endif
3532 #ifndef DEBUG_PUBNAMES_SECTION
3533 #define DEBUG_PUBNAMES_SECTION \
3534 ((debug_generate_pub_sections == 2) \
3535 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3536 #endif
3537 #ifndef DEBUG_PUBTYPES_SECTION
3538 #define DEBUG_PUBTYPES_SECTION \
3539 ((debug_generate_pub_sections == 2) \
3540 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3541 #endif
3542 #ifndef DEBUG_STR_OFFSETS_SECTION
3543 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3544 #endif
3545 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3546 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3547 #endif
3548 #ifndef DEBUG_STR_DWO_SECTION
3549 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3550 #endif
3551 #ifndef DEBUG_STR_SECTION
3552 #define DEBUG_STR_SECTION ".debug_str"
3553 #endif
3554 #ifndef DEBUG_RANGES_SECTION
3555 #define DEBUG_RANGES_SECTION ".debug_ranges"
3556 #endif
3557
3558 /* Standard ELF section names for compiled code and data. */
3559 #ifndef TEXT_SECTION_NAME
3560 #define TEXT_SECTION_NAME ".text"
3561 #endif
3562
3563 /* Section flags for .debug_str section. */
3564 #define DEBUG_STR_SECTION_FLAGS \
3565 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3566 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3567 : SECTION_DEBUG)
3568
3569 /* Section flags for .debug_str.dwo section. */
3570 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3571
3572 /* Labels we insert at beginning sections we can reference instead of
3573 the section names themselves. */
3574
3575 #ifndef TEXT_SECTION_LABEL
3576 #define TEXT_SECTION_LABEL "Ltext"
3577 #endif
3578 #ifndef COLD_TEXT_SECTION_LABEL
3579 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3580 #endif
3581 #ifndef DEBUG_LINE_SECTION_LABEL
3582 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3583 #endif
3584 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3585 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3586 #endif
3587 #ifndef DEBUG_INFO_SECTION_LABEL
3588 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3589 #endif
3590 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3591 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3592 #endif
3593 #ifndef DEBUG_ABBREV_SECTION_LABEL
3594 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3595 #endif
3596 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3597 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3598 #endif
3599 #ifndef DEBUG_ADDR_SECTION_LABEL
3600 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3601 #endif
3602 #ifndef DEBUG_LOC_SECTION_LABEL
3603 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3604 #endif
3605 #ifndef DEBUG_RANGES_SECTION_LABEL
3606 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3607 #endif
3608 #ifndef DEBUG_MACINFO_SECTION_LABEL
3609 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3610 #endif
3611 #ifndef DEBUG_MACRO_SECTION_LABEL
3612 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3613 #endif
3614 #define SKELETON_COMP_DIE_ABBREV 1
3615 #define SKELETON_TYPE_DIE_ABBREV 2
3616
3617 /* Definitions of defaults for formats and names of various special
3618 (artificial) labels which may be generated within this file (when the -g
3619 options is used and DWARF2_DEBUGGING_INFO is in effect.
3620 If necessary, these may be overridden from within the tm.h file, but
3621 typically, overriding these defaults is unnecessary. */
3622
3623 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3624 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3625 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3626 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3627 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3628 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3629 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3630 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3631 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3632 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3633 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3634 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3635 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3636 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3637
3638 #ifndef TEXT_END_LABEL
3639 #define TEXT_END_LABEL "Letext"
3640 #endif
3641 #ifndef COLD_END_LABEL
3642 #define COLD_END_LABEL "Letext_cold"
3643 #endif
3644 #ifndef BLOCK_BEGIN_LABEL
3645 #define BLOCK_BEGIN_LABEL "LBB"
3646 #endif
3647 #ifndef BLOCK_END_LABEL
3648 #define BLOCK_END_LABEL "LBE"
3649 #endif
3650 #ifndef LINE_CODE_LABEL
3651 #define LINE_CODE_LABEL "LM"
3652 #endif
3653
3654 \f
3655 /* Return the root of the DIE's built for the current compilation unit. */
3656 static dw_die_ref
3657 comp_unit_die (void)
3658 {
3659 if (!single_comp_unit_die)
3660 single_comp_unit_die = gen_compile_unit_die (NULL);
3661 return single_comp_unit_die;
3662 }
3663
3664 /* We allow a language front-end to designate a function that is to be
3665 called to "demangle" any name before it is put into a DIE. */
3666
3667 static const char *(*demangle_name_func) (const char *);
3668
3669 void
3670 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3671 {
3672 demangle_name_func = func;
3673 }
3674
3675 /* Test if rtl node points to a pseudo register. */
3676
3677 static inline int
3678 is_pseudo_reg (const_rtx rtl)
3679 {
3680 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3681 || (GET_CODE (rtl) == SUBREG
3682 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3683 }
3684
3685 /* Return a reference to a type, with its const and volatile qualifiers
3686 removed. */
3687
3688 static inline tree
3689 type_main_variant (tree type)
3690 {
3691 type = TYPE_MAIN_VARIANT (type);
3692
3693 /* ??? There really should be only one main variant among any group of
3694 variants of a given type (and all of the MAIN_VARIANT values for all
3695 members of the group should point to that one type) but sometimes the C
3696 front-end messes this up for array types, so we work around that bug
3697 here. */
3698 if (TREE_CODE (type) == ARRAY_TYPE)
3699 while (type != TYPE_MAIN_VARIANT (type))
3700 type = TYPE_MAIN_VARIANT (type);
3701
3702 return type;
3703 }
3704
3705 /* Return nonzero if the given type node represents a tagged type. */
3706
3707 static inline int
3708 is_tagged_type (const_tree type)
3709 {
3710 enum tree_code code = TREE_CODE (type);
3711
3712 return (code == RECORD_TYPE || code == UNION_TYPE
3713 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
3714 }
3715
3716 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
3717
3718 static void
3719 get_ref_die_offset_label (char *label, dw_die_ref ref)
3720 {
3721 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
3722 }
3723
3724 /* Return die_offset of a DIE reference to a base type. */
3725
3726 static unsigned long int
3727 get_base_type_offset (dw_die_ref ref)
3728 {
3729 if (ref->die_offset)
3730 return ref->die_offset;
3731 if (comp_unit_die ()->die_abbrev)
3732 {
3733 calc_base_type_die_sizes ();
3734 gcc_assert (ref->die_offset);
3735 }
3736 return ref->die_offset;
3737 }
3738
3739 /* Return die_offset of a DIE reference other than base type. */
3740
3741 static unsigned long int
3742 get_ref_die_offset (dw_die_ref ref)
3743 {
3744 gcc_assert (ref->die_offset);
3745 return ref->die_offset;
3746 }
3747
3748 /* Convert a DIE tag into its string name. */
3749
3750 static const char *
3751 dwarf_tag_name (unsigned int tag)
3752 {
3753 const char *name = get_DW_TAG_name (tag);
3754
3755 if (name != NULL)
3756 return name;
3757
3758 return "DW_TAG_<unknown>";
3759 }
3760
3761 /* Convert a DWARF attribute code into its string name. */
3762
3763 static const char *
3764 dwarf_attr_name (unsigned int attr)
3765 {
3766 const char *name;
3767
3768 switch (attr)
3769 {
3770 #if VMS_DEBUGGING_INFO
3771 case DW_AT_HP_prologue:
3772 return "DW_AT_HP_prologue";
3773 #else
3774 case DW_AT_MIPS_loop_unroll_factor:
3775 return "DW_AT_MIPS_loop_unroll_factor";
3776 #endif
3777
3778 #if VMS_DEBUGGING_INFO
3779 case DW_AT_HP_epilogue:
3780 return "DW_AT_HP_epilogue";
3781 #else
3782 case DW_AT_MIPS_stride:
3783 return "DW_AT_MIPS_stride";
3784 #endif
3785 }
3786
3787 name = get_DW_AT_name (attr);
3788
3789 if (name != NULL)
3790 return name;
3791
3792 return "DW_AT_<unknown>";
3793 }
3794
3795 /* Convert a DWARF value form code into its string name. */
3796
3797 static const char *
3798 dwarf_form_name (unsigned int form)
3799 {
3800 const char *name = get_DW_FORM_name (form);
3801
3802 if (name != NULL)
3803 return name;
3804
3805 return "DW_FORM_<unknown>";
3806 }
3807 \f
3808 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
3809 instance of an inlined instance of a decl which is local to an inline
3810 function, so we have to trace all of the way back through the origin chain
3811 to find out what sort of node actually served as the original seed for the
3812 given block. */
3813
3814 static tree
3815 decl_ultimate_origin (const_tree decl)
3816 {
3817 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
3818 return NULL_TREE;
3819
3820 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
3821 we're trying to output the abstract instance of this function. */
3822 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
3823 return NULL_TREE;
3824
3825 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
3826 most distant ancestor, this should never happen. */
3827 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
3828
3829 return DECL_ABSTRACT_ORIGIN (decl);
3830 }
3831
3832 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
3833 of a virtual function may refer to a base class, so we check the 'this'
3834 parameter. */
3835
3836 static tree
3837 decl_class_context (tree decl)
3838 {
3839 tree context = NULL_TREE;
3840
3841 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
3842 context = DECL_CONTEXT (decl);
3843 else
3844 context = TYPE_MAIN_VARIANT
3845 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
3846
3847 if (context && !TYPE_P (context))
3848 context = NULL_TREE;
3849
3850 return context;
3851 }
3852 \f
3853 /* Add an attribute/value pair to a DIE. */
3854
3855 static inline void
3856 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
3857 {
3858 /* Maybe this should be an assert? */
3859 if (die == NULL)
3860 return;
3861
3862 vec_safe_reserve (die->die_attr, 1);
3863 vec_safe_push (die->die_attr, *attr);
3864 }
3865
3866 static inline enum dw_val_class
3867 AT_class (dw_attr_node *a)
3868 {
3869 return a->dw_attr_val.val_class;
3870 }
3871
3872 /* Return the index for any attribute that will be referenced with a
3873 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
3874 are stored in dw_attr_val.v.val_str for reference counting
3875 pruning. */
3876
3877 static inline unsigned int
3878 AT_index (dw_attr_node *a)
3879 {
3880 if (AT_class (a) == dw_val_class_str)
3881 return a->dw_attr_val.v.val_str->index;
3882 else if (a->dw_attr_val.val_entry != NULL)
3883 return a->dw_attr_val.val_entry->index;
3884 return NOT_INDEXED;
3885 }
3886
3887 /* Add a flag value attribute to a DIE. */
3888
3889 static inline void
3890 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
3891 {
3892 dw_attr_node attr;
3893
3894 attr.dw_attr = attr_kind;
3895 attr.dw_attr_val.val_class = dw_val_class_flag;
3896 attr.dw_attr_val.val_entry = NULL;
3897 attr.dw_attr_val.v.val_flag = flag;
3898 add_dwarf_attr (die, &attr);
3899 }
3900
3901 static inline unsigned
3902 AT_flag (dw_attr_node *a)
3903 {
3904 gcc_assert (a && AT_class (a) == dw_val_class_flag);
3905 return a->dw_attr_val.v.val_flag;
3906 }
3907
3908 /* Add a signed integer attribute value to a DIE. */
3909
3910 static inline void
3911 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
3912 {
3913 dw_attr_node attr;
3914
3915 attr.dw_attr = attr_kind;
3916 attr.dw_attr_val.val_class = dw_val_class_const;
3917 attr.dw_attr_val.val_entry = NULL;
3918 attr.dw_attr_val.v.val_int = int_val;
3919 add_dwarf_attr (die, &attr);
3920 }
3921
3922 static inline HOST_WIDE_INT
3923 AT_int (dw_attr_node *a)
3924 {
3925 gcc_assert (a && AT_class (a) == dw_val_class_const);
3926 return a->dw_attr_val.v.val_int;
3927 }
3928
3929 /* Add an unsigned integer attribute value to a DIE. */
3930
3931 static inline void
3932 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
3933 unsigned HOST_WIDE_INT unsigned_val)
3934 {
3935 dw_attr_node attr;
3936
3937 attr.dw_attr = attr_kind;
3938 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
3939 attr.dw_attr_val.val_entry = NULL;
3940 attr.dw_attr_val.v.val_unsigned = unsigned_val;
3941 add_dwarf_attr (die, &attr);
3942 }
3943
3944 static inline unsigned HOST_WIDE_INT
3945 AT_unsigned (dw_attr_node *a)
3946 {
3947 gcc_assert (a && AT_class (a) == dw_val_class_unsigned_const);
3948 return a->dw_attr_val.v.val_unsigned;
3949 }
3950
3951 /* Add an unsigned wide integer attribute value to a DIE. */
3952
3953 static inline void
3954 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
3955 const wide_int& w)
3956 {
3957 dw_attr_node attr;
3958
3959 attr.dw_attr = attr_kind;
3960 attr.dw_attr_val.val_class = dw_val_class_wide_int;
3961 attr.dw_attr_val.val_entry = NULL;
3962 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
3963 *attr.dw_attr_val.v.val_wide = w;
3964 add_dwarf_attr (die, &attr);
3965 }
3966
3967 /* Add an unsigned double integer attribute value to a DIE. */
3968
3969 static inline void
3970 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
3971 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
3972 {
3973 dw_attr_node attr;
3974
3975 attr.dw_attr = attr_kind;
3976 attr.dw_attr_val.val_class = dw_val_class_const_double;
3977 attr.dw_attr_val.val_entry = NULL;
3978 attr.dw_attr_val.v.val_double.high = high;
3979 attr.dw_attr_val.v.val_double.low = low;
3980 add_dwarf_attr (die, &attr);
3981 }
3982
3983 /* Add a floating point attribute value to a DIE and return it. */
3984
3985 static inline void
3986 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
3987 unsigned int length, unsigned int elt_size, unsigned char *array)
3988 {
3989 dw_attr_node attr;
3990
3991 attr.dw_attr = attr_kind;
3992 attr.dw_attr_val.val_class = dw_val_class_vec;
3993 attr.dw_attr_val.val_entry = NULL;
3994 attr.dw_attr_val.v.val_vec.length = length;
3995 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
3996 attr.dw_attr_val.v.val_vec.array = array;
3997 add_dwarf_attr (die, &attr);
3998 }
3999
4000 /* Add an 8-byte data attribute value to a DIE. */
4001
4002 static inline void
4003 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4004 unsigned char data8[8])
4005 {
4006 dw_attr_node attr;
4007
4008 attr.dw_attr = attr_kind;
4009 attr.dw_attr_val.val_class = dw_val_class_data8;
4010 attr.dw_attr_val.val_entry = NULL;
4011 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4012 add_dwarf_attr (die, &attr);
4013 }
4014
4015 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4016 dwarf_split_debug_info, address attributes in dies destined for the
4017 final executable have force_direct set to avoid using indexed
4018 references. */
4019
4020 static inline void
4021 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4022 bool force_direct)
4023 {
4024 dw_attr_node attr;
4025 char * lbl_id;
4026
4027 lbl_id = xstrdup (lbl_low);
4028 attr.dw_attr = DW_AT_low_pc;
4029 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4030 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4031 if (dwarf_split_debug_info && !force_direct)
4032 attr.dw_attr_val.val_entry
4033 = add_addr_table_entry (lbl_id, ate_kind_label);
4034 else
4035 attr.dw_attr_val.val_entry = NULL;
4036 add_dwarf_attr (die, &attr);
4037
4038 attr.dw_attr = DW_AT_high_pc;
4039 if (dwarf_version < 4)
4040 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4041 else
4042 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4043 lbl_id = xstrdup (lbl_high);
4044 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4045 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4046 && dwarf_split_debug_info && !force_direct)
4047 attr.dw_attr_val.val_entry
4048 = add_addr_table_entry (lbl_id, ate_kind_label);
4049 else
4050 attr.dw_attr_val.val_entry = NULL;
4051 add_dwarf_attr (die, &attr);
4052 }
4053
4054 /* Hash and equality functions for debug_str_hash. */
4055
4056 hashval_t
4057 indirect_string_hasher::hash (indirect_string_node *x)
4058 {
4059 return htab_hash_string (x->str);
4060 }
4061
4062 bool
4063 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4064 {
4065 return strcmp (x1->str, x2) == 0;
4066 }
4067
4068 /* Add STR to the given string hash table. */
4069
4070 static struct indirect_string_node *
4071 find_AT_string_in_table (const char *str,
4072 hash_table<indirect_string_hasher> *table)
4073 {
4074 struct indirect_string_node *node;
4075
4076 indirect_string_node **slot
4077 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4078 if (*slot == NULL)
4079 {
4080 node = ggc_cleared_alloc<indirect_string_node> ();
4081 node->str = ggc_strdup (str);
4082 *slot = node;
4083 }
4084 else
4085 node = *slot;
4086
4087 node->refcount++;
4088 return node;
4089 }
4090
4091 /* Add STR to the indirect string hash table. */
4092
4093 static struct indirect_string_node *
4094 find_AT_string (const char *str)
4095 {
4096 if (! debug_str_hash)
4097 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4098
4099 return find_AT_string_in_table (str, debug_str_hash);
4100 }
4101
4102 /* Add a string attribute value to a DIE. */
4103
4104 static inline void
4105 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4106 {
4107 dw_attr_node attr;
4108 struct indirect_string_node *node;
4109
4110 node = find_AT_string (str);
4111
4112 attr.dw_attr = attr_kind;
4113 attr.dw_attr_val.val_class = dw_val_class_str;
4114 attr.dw_attr_val.val_entry = NULL;
4115 attr.dw_attr_val.v.val_str = node;
4116 add_dwarf_attr (die, &attr);
4117 }
4118
4119 static inline const char *
4120 AT_string (dw_attr_node *a)
4121 {
4122 gcc_assert (a && AT_class (a) == dw_val_class_str);
4123 return a->dw_attr_val.v.val_str->str;
4124 }
4125
4126 /* Call this function directly to bypass AT_string_form's logic to put
4127 the string inline in the die. */
4128
4129 static void
4130 set_indirect_string (struct indirect_string_node *node)
4131 {
4132 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4133 /* Already indirect is a no op. */
4134 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4135 {
4136 gcc_assert (node->label);
4137 return;
4138 }
4139 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4140 ++dw2_string_counter;
4141 node->label = xstrdup (label);
4142
4143 if (!dwarf_split_debug_info)
4144 {
4145 node->form = DW_FORM_strp;
4146 node->index = NOT_INDEXED;
4147 }
4148 else
4149 {
4150 node->form = DW_FORM_GNU_str_index;
4151 node->index = NO_INDEX_ASSIGNED;
4152 }
4153 }
4154
4155 /* Find out whether a string should be output inline in DIE
4156 or out-of-line in .debug_str section. */
4157
4158 static enum dwarf_form
4159 find_string_form (struct indirect_string_node *node)
4160 {
4161 unsigned int len;
4162
4163 if (node->form)
4164 return node->form;
4165
4166 len = strlen (node->str) + 1;
4167
4168 /* If the string is shorter or equal to the size of the reference, it is
4169 always better to put it inline. */
4170 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4171 return node->form = DW_FORM_string;
4172
4173 /* If we cannot expect the linker to merge strings in .debug_str
4174 section, only put it into .debug_str if it is worth even in this
4175 single module. */
4176 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4177 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4178 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4179 return node->form = DW_FORM_string;
4180
4181 set_indirect_string (node);
4182
4183 return node->form;
4184 }
4185
4186 /* Find out whether the string referenced from the attribute should be
4187 output inline in DIE or out-of-line in .debug_str section. */
4188
4189 static enum dwarf_form
4190 AT_string_form (dw_attr_node *a)
4191 {
4192 gcc_assert (a && AT_class (a) == dw_val_class_str);
4193 return find_string_form (a->dw_attr_val.v.val_str);
4194 }
4195
4196 /* Add a DIE reference attribute value to a DIE. */
4197
4198 static inline void
4199 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4200 {
4201 dw_attr_node attr;
4202 gcc_checking_assert (targ_die != NULL);
4203
4204 /* With LTO we can end up trying to reference something we didn't create
4205 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4206 if (targ_die == NULL)
4207 return;
4208
4209 attr.dw_attr = attr_kind;
4210 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4211 attr.dw_attr_val.val_entry = NULL;
4212 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4213 attr.dw_attr_val.v.val_die_ref.external = 0;
4214 add_dwarf_attr (die, &attr);
4215 }
4216
4217 /* Change DIE reference REF to point to NEW_DIE instead. */
4218
4219 static inline void
4220 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4221 {
4222 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4223 ref->dw_attr_val.v.val_die_ref.die = new_die;
4224 ref->dw_attr_val.v.val_die_ref.external = 0;
4225 }
4226
4227 /* Add an AT_specification attribute to a DIE, and also make the back
4228 pointer from the specification to the definition. */
4229
4230 static inline void
4231 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4232 {
4233 add_AT_die_ref (die, DW_AT_specification, targ_die);
4234 gcc_assert (!targ_die->die_definition);
4235 targ_die->die_definition = die;
4236 }
4237
4238 static inline dw_die_ref
4239 AT_ref (dw_attr_node *a)
4240 {
4241 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4242 return a->dw_attr_val.v.val_die_ref.die;
4243 }
4244
4245 static inline int
4246 AT_ref_external (dw_attr_node *a)
4247 {
4248 if (a && AT_class (a) == dw_val_class_die_ref)
4249 return a->dw_attr_val.v.val_die_ref.external;
4250
4251 return 0;
4252 }
4253
4254 static inline void
4255 set_AT_ref_external (dw_attr_node *a, int i)
4256 {
4257 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4258 a->dw_attr_val.v.val_die_ref.external = i;
4259 }
4260
4261 /* Add an FDE reference attribute value to a DIE. */
4262
4263 static inline void
4264 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4265 {
4266 dw_attr_node attr;
4267
4268 attr.dw_attr = attr_kind;
4269 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4270 attr.dw_attr_val.val_entry = NULL;
4271 attr.dw_attr_val.v.val_fde_index = targ_fde;
4272 add_dwarf_attr (die, &attr);
4273 }
4274
4275 /* Add a location description attribute value to a DIE. */
4276
4277 static inline void
4278 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4279 {
4280 dw_attr_node attr;
4281
4282 attr.dw_attr = attr_kind;
4283 attr.dw_attr_val.val_class = dw_val_class_loc;
4284 attr.dw_attr_val.val_entry = NULL;
4285 attr.dw_attr_val.v.val_loc = loc;
4286 add_dwarf_attr (die, &attr);
4287 }
4288
4289 static inline dw_loc_descr_ref
4290 AT_loc (dw_attr_node *a)
4291 {
4292 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4293 return a->dw_attr_val.v.val_loc;
4294 }
4295
4296 static inline void
4297 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4298 {
4299 dw_attr_node attr;
4300
4301 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4302 return;
4303
4304 attr.dw_attr = attr_kind;
4305 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4306 attr.dw_attr_val.val_entry = NULL;
4307 attr.dw_attr_val.v.val_loc_list = loc_list;
4308 add_dwarf_attr (die, &attr);
4309 have_location_lists = true;
4310 }
4311
4312 static inline dw_loc_list_ref
4313 AT_loc_list (dw_attr_node *a)
4314 {
4315 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4316 return a->dw_attr_val.v.val_loc_list;
4317 }
4318
4319 static inline dw_loc_list_ref *
4320 AT_loc_list_ptr (dw_attr_node *a)
4321 {
4322 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4323 return &a->dw_attr_val.v.val_loc_list;
4324 }
4325
4326 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4327 {
4328 static hashval_t hash (addr_table_entry *);
4329 static bool equal (addr_table_entry *, addr_table_entry *);
4330 };
4331
4332 /* Table of entries into the .debug_addr section. */
4333
4334 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4335
4336 /* Hash an address_table_entry. */
4337
4338 hashval_t
4339 addr_hasher::hash (addr_table_entry *a)
4340 {
4341 inchash::hash hstate;
4342 switch (a->kind)
4343 {
4344 case ate_kind_rtx:
4345 hstate.add_int (0);
4346 break;
4347 case ate_kind_rtx_dtprel:
4348 hstate.add_int (1);
4349 break;
4350 case ate_kind_label:
4351 return htab_hash_string (a->addr.label);
4352 default:
4353 gcc_unreachable ();
4354 }
4355 inchash::add_rtx (a->addr.rtl, hstate);
4356 return hstate.end ();
4357 }
4358
4359 /* Determine equality for two address_table_entries. */
4360
4361 bool
4362 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4363 {
4364 if (a1->kind != a2->kind)
4365 return 0;
4366 switch (a1->kind)
4367 {
4368 case ate_kind_rtx:
4369 case ate_kind_rtx_dtprel:
4370 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4371 case ate_kind_label:
4372 return strcmp (a1->addr.label, a2->addr.label) == 0;
4373 default:
4374 gcc_unreachable ();
4375 }
4376 }
4377
4378 /* Initialize an addr_table_entry. */
4379
4380 void
4381 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4382 {
4383 e->kind = kind;
4384 switch (kind)
4385 {
4386 case ate_kind_rtx:
4387 case ate_kind_rtx_dtprel:
4388 e->addr.rtl = (rtx) addr;
4389 break;
4390 case ate_kind_label:
4391 e->addr.label = (char *) addr;
4392 break;
4393 }
4394 e->refcount = 0;
4395 e->index = NO_INDEX_ASSIGNED;
4396 }
4397
4398 /* Add attr to the address table entry to the table. Defer setting an
4399 index until output time. */
4400
4401 static addr_table_entry *
4402 add_addr_table_entry (void *addr, enum ate_kind kind)
4403 {
4404 addr_table_entry *node;
4405 addr_table_entry finder;
4406
4407 gcc_assert (dwarf_split_debug_info);
4408 if (! addr_index_table)
4409 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4410 init_addr_table_entry (&finder, kind, addr);
4411 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4412
4413 if (*slot == HTAB_EMPTY_ENTRY)
4414 {
4415 node = ggc_cleared_alloc<addr_table_entry> ();
4416 init_addr_table_entry (node, kind, addr);
4417 *slot = node;
4418 }
4419 else
4420 node = *slot;
4421
4422 node->refcount++;
4423 return node;
4424 }
4425
4426 /* Remove an entry from the addr table by decrementing its refcount.
4427 Strictly, decrementing the refcount would be enough, but the
4428 assertion that the entry is actually in the table has found
4429 bugs. */
4430
4431 static void
4432 remove_addr_table_entry (addr_table_entry *entry)
4433 {
4434 gcc_assert (dwarf_split_debug_info && addr_index_table);
4435 /* After an index is assigned, the table is frozen. */
4436 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4437 entry->refcount--;
4438 }
4439
4440 /* Given a location list, remove all addresses it refers to from the
4441 address_table. */
4442
4443 static void
4444 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4445 {
4446 for (; descr; descr = descr->dw_loc_next)
4447 if (descr->dw_loc_oprnd1.val_entry != NULL)
4448 {
4449 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4450 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4451 }
4452 }
4453
4454 /* A helper function for dwarf2out_finish called through
4455 htab_traverse. Assign an addr_table_entry its index. All entries
4456 must be collected into the table when this function is called,
4457 because the indexing code relies on htab_traverse to traverse nodes
4458 in the same order for each run. */
4459
4460 int
4461 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4462 {
4463 addr_table_entry *node = *h;
4464
4465 /* Don't index unreferenced nodes. */
4466 if (node->refcount == 0)
4467 return 1;
4468
4469 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4470 node->index = *index;
4471 *index += 1;
4472
4473 return 1;
4474 }
4475
4476 /* Add an address constant attribute value to a DIE. When using
4477 dwarf_split_debug_info, address attributes in dies destined for the
4478 final executable should be direct references--setting the parameter
4479 force_direct ensures this behavior. */
4480
4481 static inline void
4482 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4483 bool force_direct)
4484 {
4485 dw_attr_node attr;
4486
4487 attr.dw_attr = attr_kind;
4488 attr.dw_attr_val.val_class = dw_val_class_addr;
4489 attr.dw_attr_val.v.val_addr = addr;
4490 if (dwarf_split_debug_info && !force_direct)
4491 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4492 else
4493 attr.dw_attr_val.val_entry = NULL;
4494 add_dwarf_attr (die, &attr);
4495 }
4496
4497 /* Get the RTX from to an address DIE attribute. */
4498
4499 static inline rtx
4500 AT_addr (dw_attr_node *a)
4501 {
4502 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4503 return a->dw_attr_val.v.val_addr;
4504 }
4505
4506 /* Add a file attribute value to a DIE. */
4507
4508 static inline void
4509 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4510 struct dwarf_file_data *fd)
4511 {
4512 dw_attr_node attr;
4513
4514 attr.dw_attr = attr_kind;
4515 attr.dw_attr_val.val_class = dw_val_class_file;
4516 attr.dw_attr_val.val_entry = NULL;
4517 attr.dw_attr_val.v.val_file = fd;
4518 add_dwarf_attr (die, &attr);
4519 }
4520
4521 /* Get the dwarf_file_data from a file DIE attribute. */
4522
4523 static inline struct dwarf_file_data *
4524 AT_file (dw_attr_node *a)
4525 {
4526 gcc_assert (a && AT_class (a) == dw_val_class_file);
4527 return a->dw_attr_val.v.val_file;
4528 }
4529
4530 /* Add a vms delta attribute value to a DIE. */
4531
4532 static inline void
4533 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4534 const char *lbl1, const char *lbl2)
4535 {
4536 dw_attr_node attr;
4537
4538 attr.dw_attr = attr_kind;
4539 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4540 attr.dw_attr_val.val_entry = NULL;
4541 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4542 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4543 add_dwarf_attr (die, &attr);
4544 }
4545
4546 /* Add a label identifier attribute value to a DIE. */
4547
4548 static inline void
4549 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4550 const char *lbl_id)
4551 {
4552 dw_attr_node attr;
4553
4554 attr.dw_attr = attr_kind;
4555 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4556 attr.dw_attr_val.val_entry = NULL;
4557 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4558 if (dwarf_split_debug_info)
4559 attr.dw_attr_val.val_entry
4560 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4561 ate_kind_label);
4562 add_dwarf_attr (die, &attr);
4563 }
4564
4565 /* Add a section offset attribute value to a DIE, an offset into the
4566 debug_line section. */
4567
4568 static inline void
4569 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4570 const char *label)
4571 {
4572 dw_attr_node attr;
4573
4574 attr.dw_attr = attr_kind;
4575 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4576 attr.dw_attr_val.val_entry = NULL;
4577 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4578 add_dwarf_attr (die, &attr);
4579 }
4580
4581 /* Add a section offset attribute value to a DIE, an offset into the
4582 debug_macinfo section. */
4583
4584 static inline void
4585 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4586 const char *label)
4587 {
4588 dw_attr_node attr;
4589
4590 attr.dw_attr = attr_kind;
4591 attr.dw_attr_val.val_class = dw_val_class_macptr;
4592 attr.dw_attr_val.val_entry = NULL;
4593 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4594 add_dwarf_attr (die, &attr);
4595 }
4596
4597 /* Add an offset attribute value to a DIE. */
4598
4599 static inline void
4600 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4601 unsigned HOST_WIDE_INT offset)
4602 {
4603 dw_attr_node attr;
4604
4605 attr.dw_attr = attr_kind;
4606 attr.dw_attr_val.val_class = dw_val_class_offset;
4607 attr.dw_attr_val.val_entry = NULL;
4608 attr.dw_attr_val.v.val_offset = offset;
4609 add_dwarf_attr (die, &attr);
4610 }
4611
4612 /* Add a range_list attribute value to a DIE. When using
4613 dwarf_split_debug_info, address attributes in dies destined for the
4614 final executable should be direct references--setting the parameter
4615 force_direct ensures this behavior. */
4616
4617 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4618 #define RELOCATED_OFFSET (NULL)
4619
4620 static void
4621 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4622 long unsigned int offset, bool force_direct)
4623 {
4624 dw_attr_node attr;
4625
4626 attr.dw_attr = attr_kind;
4627 attr.dw_attr_val.val_class = dw_val_class_range_list;
4628 /* For the range_list attribute, use val_entry to store whether the
4629 offset should follow split-debug-info or normal semantics. This
4630 value is read in output_range_list_offset. */
4631 if (dwarf_split_debug_info && !force_direct)
4632 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4633 else
4634 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4635 attr.dw_attr_val.v.val_offset = offset;
4636 add_dwarf_attr (die, &attr);
4637 }
4638
4639 /* Return the start label of a delta attribute. */
4640
4641 static inline const char *
4642 AT_vms_delta1 (dw_attr_node *a)
4643 {
4644 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4645 return a->dw_attr_val.v.val_vms_delta.lbl1;
4646 }
4647
4648 /* Return the end label of a delta attribute. */
4649
4650 static inline const char *
4651 AT_vms_delta2 (dw_attr_node *a)
4652 {
4653 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4654 return a->dw_attr_val.v.val_vms_delta.lbl2;
4655 }
4656
4657 static inline const char *
4658 AT_lbl (dw_attr_node *a)
4659 {
4660 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
4661 || AT_class (a) == dw_val_class_lineptr
4662 || AT_class (a) == dw_val_class_macptr
4663 || AT_class (a) == dw_val_class_high_pc));
4664 return a->dw_attr_val.v.val_lbl_id;
4665 }
4666
4667 /* Get the attribute of type attr_kind. */
4668
4669 static dw_attr_node *
4670 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4671 {
4672 dw_attr_node *a;
4673 unsigned ix;
4674 dw_die_ref spec = NULL;
4675
4676 if (! die)
4677 return NULL;
4678
4679 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4680 if (a->dw_attr == attr_kind)
4681 return a;
4682 else if (a->dw_attr == DW_AT_specification
4683 || a->dw_attr == DW_AT_abstract_origin)
4684 spec = AT_ref (a);
4685
4686 if (spec)
4687 return get_AT (spec, attr_kind);
4688
4689 return NULL;
4690 }
4691
4692 /* Returns the parent of the declaration of DIE. */
4693
4694 static dw_die_ref
4695 get_die_parent (dw_die_ref die)
4696 {
4697 dw_die_ref t;
4698
4699 if (!die)
4700 return NULL;
4701
4702 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
4703 || (t = get_AT_ref (die, DW_AT_specification)))
4704 die = t;
4705
4706 return die->die_parent;
4707 }
4708
4709 /* Return the "low pc" attribute value, typically associated with a subprogram
4710 DIE. Return null if the "low pc" attribute is either not present, or if it
4711 cannot be represented as an assembler label identifier. */
4712
4713 static inline const char *
4714 get_AT_low_pc (dw_die_ref die)
4715 {
4716 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
4717
4718 return a ? AT_lbl (a) : NULL;
4719 }
4720
4721 /* Return the "high pc" attribute value, typically associated with a subprogram
4722 DIE. Return null if the "high pc" attribute is either not present, or if it
4723 cannot be represented as an assembler label identifier. */
4724
4725 static inline const char *
4726 get_AT_hi_pc (dw_die_ref die)
4727 {
4728 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
4729
4730 return a ? AT_lbl (a) : NULL;
4731 }
4732
4733 /* Return the value of the string attribute designated by ATTR_KIND, or
4734 NULL if it is not present. */
4735
4736 static inline const char *
4737 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
4738 {
4739 dw_attr_node *a = get_AT (die, attr_kind);
4740
4741 return a ? AT_string (a) : NULL;
4742 }
4743
4744 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
4745 if it is not present. */
4746
4747 static inline int
4748 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
4749 {
4750 dw_attr_node *a = get_AT (die, attr_kind);
4751
4752 return a ? AT_flag (a) : 0;
4753 }
4754
4755 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
4756 if it is not present. */
4757
4758 static inline unsigned
4759 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
4760 {
4761 dw_attr_node *a = get_AT (die, attr_kind);
4762
4763 return a ? AT_unsigned (a) : 0;
4764 }
4765
4766 static inline dw_die_ref
4767 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
4768 {
4769 dw_attr_node *a = get_AT (die, attr_kind);
4770
4771 return a ? AT_ref (a) : NULL;
4772 }
4773
4774 static inline struct dwarf_file_data *
4775 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
4776 {
4777 dw_attr_node *a = get_AT (die, attr_kind);
4778
4779 return a ? AT_file (a) : NULL;
4780 }
4781
4782 /* Return TRUE if the language is C++. */
4783
4784 static inline bool
4785 is_cxx (void)
4786 {
4787 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4788
4789 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
4790 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
4791 }
4792
4793 /* Return TRUE if the language is Java. */
4794
4795 static inline bool
4796 is_java (void)
4797 {
4798 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4799
4800 return lang == DW_LANG_Java;
4801 }
4802
4803 /* Return TRUE if the language is Fortran. */
4804
4805 static inline bool
4806 is_fortran (void)
4807 {
4808 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4809
4810 return (lang == DW_LANG_Fortran77
4811 || lang == DW_LANG_Fortran90
4812 || lang == DW_LANG_Fortran95
4813 || lang == DW_LANG_Fortran03
4814 || lang == DW_LANG_Fortran08);
4815 }
4816
4817 /* Return TRUE if the language is Ada. */
4818
4819 static inline bool
4820 is_ada (void)
4821 {
4822 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4823
4824 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
4825 }
4826
4827 /* Remove the specified attribute if present. Return TRUE if removal
4828 was successful. */
4829
4830 static bool
4831 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4832 {
4833 dw_attr_node *a;
4834 unsigned ix;
4835
4836 if (! die)
4837 return false;
4838
4839 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4840 if (a->dw_attr == attr_kind)
4841 {
4842 if (AT_class (a) == dw_val_class_str)
4843 if (a->dw_attr_val.v.val_str->refcount)
4844 a->dw_attr_val.v.val_str->refcount--;
4845
4846 /* vec::ordered_remove should help reduce the number of abbrevs
4847 that are needed. */
4848 die->die_attr->ordered_remove (ix);
4849 return true;
4850 }
4851 return false;
4852 }
4853
4854 /* Remove CHILD from its parent. PREV must have the property that
4855 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
4856
4857 static void
4858 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
4859 {
4860 gcc_assert (child->die_parent == prev->die_parent);
4861 gcc_assert (prev->die_sib == child);
4862 if (prev == child)
4863 {
4864 gcc_assert (child->die_parent->die_child == child);
4865 prev = NULL;
4866 }
4867 else
4868 prev->die_sib = child->die_sib;
4869 if (child->die_parent->die_child == child)
4870 child->die_parent->die_child = prev;
4871 child->die_sib = NULL;
4872 }
4873
4874 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
4875 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
4876
4877 static void
4878 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
4879 {
4880 dw_die_ref parent = old_child->die_parent;
4881
4882 gcc_assert (parent == prev->die_parent);
4883 gcc_assert (prev->die_sib == old_child);
4884
4885 new_child->die_parent = parent;
4886 if (prev == old_child)
4887 {
4888 gcc_assert (parent->die_child == old_child);
4889 new_child->die_sib = new_child;
4890 }
4891 else
4892 {
4893 prev->die_sib = new_child;
4894 new_child->die_sib = old_child->die_sib;
4895 }
4896 if (old_child->die_parent->die_child == old_child)
4897 old_child->die_parent->die_child = new_child;
4898 old_child->die_sib = NULL;
4899 }
4900
4901 /* Move all children from OLD_PARENT to NEW_PARENT. */
4902
4903 static void
4904 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
4905 {
4906 dw_die_ref c;
4907 new_parent->die_child = old_parent->die_child;
4908 old_parent->die_child = NULL;
4909 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
4910 }
4911
4912 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
4913 matches TAG. */
4914
4915 static void
4916 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
4917 {
4918 dw_die_ref c;
4919
4920 c = die->die_child;
4921 if (c) do {
4922 dw_die_ref prev = c;
4923 c = c->die_sib;
4924 while (c->die_tag == tag)
4925 {
4926 remove_child_with_prev (c, prev);
4927 c->die_parent = NULL;
4928 /* Might have removed every child. */
4929 if (die->die_child == NULL)
4930 return;
4931 c = prev->die_sib;
4932 }
4933 } while (c != die->die_child);
4934 }
4935
4936 /* Add a CHILD_DIE as the last child of DIE. */
4937
4938 static void
4939 add_child_die (dw_die_ref die, dw_die_ref child_die)
4940 {
4941 /* FIXME this should probably be an assert. */
4942 if (! die || ! child_die)
4943 return;
4944 gcc_assert (die != child_die);
4945
4946 child_die->die_parent = die;
4947 if (die->die_child)
4948 {
4949 child_die->die_sib = die->die_child->die_sib;
4950 die->die_child->die_sib = child_die;
4951 }
4952 else
4953 child_die->die_sib = child_die;
4954 die->die_child = child_die;
4955 }
4956
4957 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
4958
4959 static void
4960 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
4961 dw_die_ref after_die)
4962 {
4963 gcc_assert (die
4964 && child_die
4965 && after_die
4966 && die->die_child
4967 && die != child_die);
4968
4969 child_die->die_parent = die;
4970 child_die->die_sib = after_die->die_sib;
4971 after_die->die_sib = child_die;
4972 if (die->die_child == after_die)
4973 die->die_child = child_die;
4974 }
4975
4976 /* Unassociate CHILD from its parent, and make its parent be
4977 NEW_PARENT. */
4978
4979 static void
4980 reparent_child (dw_die_ref child, dw_die_ref new_parent)
4981 {
4982 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
4983 if (p->die_sib == child)
4984 {
4985 remove_child_with_prev (child, p);
4986 break;
4987 }
4988 add_child_die (new_parent, child);
4989 }
4990
4991 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
4992 is the specification, to the end of PARENT's list of children.
4993 This is done by removing and re-adding it. */
4994
4995 static void
4996 splice_child_die (dw_die_ref parent, dw_die_ref child)
4997 {
4998 /* We want the declaration DIE from inside the class, not the
4999 specification DIE at toplevel. */
5000 if (child->die_parent != parent)
5001 {
5002 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5003
5004 if (tmp)
5005 child = tmp;
5006 }
5007
5008 gcc_assert (child->die_parent == parent
5009 || (child->die_parent
5010 == get_AT_ref (parent, DW_AT_specification)));
5011
5012 reparent_child (child, parent);
5013 }
5014
5015 /* Create and return a new die with a parent of PARENT_DIE. If
5016 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5017 associated tree T must be supplied to determine parenthood
5018 later. */
5019
5020 static inline dw_die_ref
5021 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5022 {
5023 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5024
5025 die->die_tag = tag_value;
5026
5027 if (parent_die != NULL)
5028 add_child_die (parent_die, die);
5029 else
5030 {
5031 limbo_die_node *limbo_node;
5032
5033 /* No DIEs created after early dwarf should end up in limbo,
5034 because the limbo list should not persist past LTO
5035 streaming. */
5036 if (tag_value != DW_TAG_compile_unit
5037 /* These are allowed because they're generated while
5038 breaking out COMDAT units late. */
5039 && tag_value != DW_TAG_type_unit
5040 && !early_dwarf
5041 /* Allow nested functions to live in limbo because they will
5042 only temporarily live there, as decls_for_scope will fix
5043 them up. */
5044 && (TREE_CODE (t) != FUNCTION_DECL
5045 || !decl_function_context (t))
5046 /* Same as nested functions above but for types. Types that
5047 are local to a function will be fixed in
5048 decls_for_scope. */
5049 && (!RECORD_OR_UNION_TYPE_P (t)
5050 || !TYPE_CONTEXT (t)
5051 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5052 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5053 especially in the ltrans stage, but once we implement LTO
5054 dwarf streaming, we should remove this exception. */
5055 && !in_lto_p)
5056 {
5057 fprintf (stderr, "symbol ended up in limbo too late:");
5058 debug_generic_stmt (t);
5059 gcc_unreachable ();
5060 }
5061
5062 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5063 limbo_node->die = die;
5064 limbo_node->created_for = t;
5065 limbo_node->next = limbo_die_list;
5066 limbo_die_list = limbo_node;
5067 }
5068
5069 return die;
5070 }
5071
5072 /* Return the DIE associated with the given type specifier. */
5073
5074 static inline dw_die_ref
5075 lookup_type_die (tree type)
5076 {
5077 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5078 if (die && die->removed)
5079 {
5080 TYPE_SYMTAB_DIE (type) = NULL;
5081 return NULL;
5082 }
5083 return die;
5084 }
5085
5086 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5087 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5088 anonymous type instead the one of the naming typedef. */
5089
5090 static inline dw_die_ref
5091 strip_naming_typedef (tree type, dw_die_ref type_die)
5092 {
5093 if (type
5094 && TREE_CODE (type) == RECORD_TYPE
5095 && type_die
5096 && type_die->die_tag == DW_TAG_typedef
5097 && is_naming_typedef_decl (TYPE_NAME (type)))
5098 type_die = get_AT_ref (type_die, DW_AT_type);
5099 return type_die;
5100 }
5101
5102 /* Like lookup_type_die, but if type is an anonymous type named by a
5103 typedef[1], return the DIE of the anonymous type instead the one of
5104 the naming typedef. This is because in gen_typedef_die, we did
5105 equate the anonymous struct named by the typedef with the DIE of
5106 the naming typedef. So by default, lookup_type_die on an anonymous
5107 struct yields the DIE of the naming typedef.
5108
5109 [1]: Read the comment of is_naming_typedef_decl to learn about what
5110 a naming typedef is. */
5111
5112 static inline dw_die_ref
5113 lookup_type_die_strip_naming_typedef (tree type)
5114 {
5115 dw_die_ref die = lookup_type_die (type);
5116 return strip_naming_typedef (type, die);
5117 }
5118
5119 /* Equate a DIE to a given type specifier. */
5120
5121 static inline void
5122 equate_type_number_to_die (tree type, dw_die_ref type_die)
5123 {
5124 TYPE_SYMTAB_DIE (type) = type_die;
5125 }
5126
5127 /* Returns a hash value for X (which really is a die_struct). */
5128
5129 inline hashval_t
5130 decl_die_hasher::hash (die_node *x)
5131 {
5132 return (hashval_t) x->decl_id;
5133 }
5134
5135 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5136
5137 inline bool
5138 decl_die_hasher::equal (die_node *x, tree y)
5139 {
5140 return (x->decl_id == DECL_UID (y));
5141 }
5142
5143 /* Return the DIE associated with a given declaration. */
5144
5145 static inline dw_die_ref
5146 lookup_decl_die (tree decl)
5147 {
5148 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5149 NO_INSERT);
5150 if (!die)
5151 return NULL;
5152 if ((*die)->removed)
5153 {
5154 decl_die_table->clear_slot (die);
5155 return NULL;
5156 }
5157 return *die;
5158 }
5159
5160 /* Returns a hash value for X (which really is a var_loc_list). */
5161
5162 inline hashval_t
5163 decl_loc_hasher::hash (var_loc_list *x)
5164 {
5165 return (hashval_t) x->decl_id;
5166 }
5167
5168 /* Return nonzero if decl_id of var_loc_list X is the same as
5169 UID of decl *Y. */
5170
5171 inline bool
5172 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5173 {
5174 return (x->decl_id == DECL_UID (y));
5175 }
5176
5177 /* Return the var_loc list associated with a given declaration. */
5178
5179 static inline var_loc_list *
5180 lookup_decl_loc (const_tree decl)
5181 {
5182 if (!decl_loc_table)
5183 return NULL;
5184 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5185 }
5186
5187 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5188
5189 inline hashval_t
5190 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5191 {
5192 return (hashval_t) x->decl_id;
5193 }
5194
5195 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5196 UID of decl *Y. */
5197
5198 inline bool
5199 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5200 {
5201 return (x->decl_id == DECL_UID (y));
5202 }
5203
5204 /* Equate a DIE to a particular declaration. */
5205
5206 static void
5207 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5208 {
5209 unsigned int decl_id = DECL_UID (decl);
5210
5211 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5212 decl_die->decl_id = decl_id;
5213 }
5214
5215 /* Return how many bits covers PIECE EXPR_LIST. */
5216
5217 static HOST_WIDE_INT
5218 decl_piece_bitsize (rtx piece)
5219 {
5220 int ret = (int) GET_MODE (piece);
5221 if (ret)
5222 return ret;
5223 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5224 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5225 return INTVAL (XEXP (XEXP (piece, 0), 0));
5226 }
5227
5228 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5229
5230 static rtx *
5231 decl_piece_varloc_ptr (rtx piece)
5232 {
5233 if ((int) GET_MODE (piece))
5234 return &XEXP (piece, 0);
5235 else
5236 return &XEXP (XEXP (piece, 0), 1);
5237 }
5238
5239 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5240 Next is the chain of following piece nodes. */
5241
5242 static rtx_expr_list *
5243 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5244 {
5245 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5246 return alloc_EXPR_LIST (bitsize, loc_note, next);
5247 else
5248 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5249 GEN_INT (bitsize),
5250 loc_note), next);
5251 }
5252
5253 /* Return rtx that should be stored into loc field for
5254 LOC_NOTE and BITPOS/BITSIZE. */
5255
5256 static rtx
5257 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5258 HOST_WIDE_INT bitsize)
5259 {
5260 if (bitsize != -1)
5261 {
5262 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5263 if (bitpos != 0)
5264 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5265 }
5266 return loc_note;
5267 }
5268
5269 /* This function either modifies location piece list *DEST in
5270 place (if SRC and INNER is NULL), or copies location piece list
5271 *SRC to *DEST while modifying it. Location BITPOS is modified
5272 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5273 not copied and if needed some padding around it is added.
5274 When modifying in place, DEST should point to EXPR_LIST where
5275 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5276 to the start of the whole list and INNER points to the EXPR_LIST
5277 where earlier pieces cover PIECE_BITPOS bits. */
5278
5279 static void
5280 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5281 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5282 HOST_WIDE_INT bitsize, rtx loc_note)
5283 {
5284 HOST_WIDE_INT diff;
5285 bool copy = inner != NULL;
5286
5287 if (copy)
5288 {
5289 /* First copy all nodes preceding the current bitpos. */
5290 while (src != inner)
5291 {
5292 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5293 decl_piece_bitsize (*src), NULL_RTX);
5294 dest = &XEXP (*dest, 1);
5295 src = &XEXP (*src, 1);
5296 }
5297 }
5298 /* Add padding if needed. */
5299 if (bitpos != piece_bitpos)
5300 {
5301 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5302 copy ? NULL_RTX : *dest);
5303 dest = &XEXP (*dest, 1);
5304 }
5305 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5306 {
5307 gcc_assert (!copy);
5308 /* A piece with correct bitpos and bitsize already exist,
5309 just update the location for it and return. */
5310 *decl_piece_varloc_ptr (*dest) = loc_note;
5311 return;
5312 }
5313 /* Add the piece that changed. */
5314 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5315 dest = &XEXP (*dest, 1);
5316 /* Skip over pieces that overlap it. */
5317 diff = bitpos - piece_bitpos + bitsize;
5318 if (!copy)
5319 src = dest;
5320 while (diff > 0 && *src)
5321 {
5322 rtx piece = *src;
5323 diff -= decl_piece_bitsize (piece);
5324 if (copy)
5325 src = &XEXP (piece, 1);
5326 else
5327 {
5328 *src = XEXP (piece, 1);
5329 free_EXPR_LIST_node (piece);
5330 }
5331 }
5332 /* Add padding if needed. */
5333 if (diff < 0 && *src)
5334 {
5335 if (!copy)
5336 dest = src;
5337 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5338 dest = &XEXP (*dest, 1);
5339 }
5340 if (!copy)
5341 return;
5342 /* Finally copy all nodes following it. */
5343 while (*src)
5344 {
5345 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5346 decl_piece_bitsize (*src), NULL_RTX);
5347 dest = &XEXP (*dest, 1);
5348 src = &XEXP (*src, 1);
5349 }
5350 }
5351
5352 /* Add a variable location node to the linked list for DECL. */
5353
5354 static struct var_loc_node *
5355 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5356 {
5357 unsigned int decl_id;
5358 var_loc_list *temp;
5359 struct var_loc_node *loc = NULL;
5360 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5361
5362 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5363 {
5364 tree realdecl = DECL_DEBUG_EXPR (decl);
5365 if (handled_component_p (realdecl)
5366 || (TREE_CODE (realdecl) == MEM_REF
5367 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5368 {
5369 HOST_WIDE_INT maxsize;
5370 bool reverse;
5371 tree innerdecl
5372 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
5373 &reverse);
5374 if (!DECL_P (innerdecl)
5375 || DECL_IGNORED_P (innerdecl)
5376 || TREE_STATIC (innerdecl)
5377 || bitsize <= 0
5378 || bitpos + bitsize > 256
5379 || bitsize != maxsize)
5380 return NULL;
5381 decl = innerdecl;
5382 }
5383 }
5384
5385 decl_id = DECL_UID (decl);
5386 var_loc_list **slot
5387 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5388 if (*slot == NULL)
5389 {
5390 temp = ggc_cleared_alloc<var_loc_list> ();
5391 temp->decl_id = decl_id;
5392 *slot = temp;
5393 }
5394 else
5395 temp = *slot;
5396
5397 /* For PARM_DECLs try to keep around the original incoming value,
5398 even if that means we'll emit a zero-range .debug_loc entry. */
5399 if (temp->last
5400 && temp->first == temp->last
5401 && TREE_CODE (decl) == PARM_DECL
5402 && NOTE_P (temp->first->loc)
5403 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5404 && DECL_INCOMING_RTL (decl)
5405 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5406 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5407 == GET_CODE (DECL_INCOMING_RTL (decl))
5408 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5409 && (bitsize != -1
5410 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5411 NOTE_VAR_LOCATION_LOC (loc_note))
5412 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5413 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5414 {
5415 loc = ggc_cleared_alloc<var_loc_node> ();
5416 temp->first->next = loc;
5417 temp->last = loc;
5418 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5419 }
5420 else if (temp->last)
5421 {
5422 struct var_loc_node *last = temp->last, *unused = NULL;
5423 rtx *piece_loc = NULL, last_loc_note;
5424 HOST_WIDE_INT piece_bitpos = 0;
5425 if (last->next)
5426 {
5427 last = last->next;
5428 gcc_assert (last->next == NULL);
5429 }
5430 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5431 {
5432 piece_loc = &last->loc;
5433 do
5434 {
5435 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5436 if (piece_bitpos + cur_bitsize > bitpos)
5437 break;
5438 piece_bitpos += cur_bitsize;
5439 piece_loc = &XEXP (*piece_loc, 1);
5440 }
5441 while (*piece_loc);
5442 }
5443 /* TEMP->LAST here is either pointer to the last but one or
5444 last element in the chained list, LAST is pointer to the
5445 last element. */
5446 if (label && strcmp (last->label, label) == 0)
5447 {
5448 /* For SRA optimized variables if there weren't any real
5449 insns since last note, just modify the last node. */
5450 if (piece_loc != NULL)
5451 {
5452 adjust_piece_list (piece_loc, NULL, NULL,
5453 bitpos, piece_bitpos, bitsize, loc_note);
5454 return NULL;
5455 }
5456 /* If the last note doesn't cover any instructions, remove it. */
5457 if (temp->last != last)
5458 {
5459 temp->last->next = NULL;
5460 unused = last;
5461 last = temp->last;
5462 gcc_assert (strcmp (last->label, label) != 0);
5463 }
5464 else
5465 {
5466 gcc_assert (temp->first == temp->last
5467 || (temp->first->next == temp->last
5468 && TREE_CODE (decl) == PARM_DECL));
5469 memset (temp->last, '\0', sizeof (*temp->last));
5470 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
5471 return temp->last;
5472 }
5473 }
5474 if (bitsize == -1 && NOTE_P (last->loc))
5475 last_loc_note = last->loc;
5476 else if (piece_loc != NULL
5477 && *piece_loc != NULL_RTX
5478 && piece_bitpos == bitpos
5479 && decl_piece_bitsize (*piece_loc) == bitsize)
5480 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
5481 else
5482 last_loc_note = NULL_RTX;
5483 /* If the current location is the same as the end of the list,
5484 and either both or neither of the locations is uninitialized,
5485 we have nothing to do. */
5486 if (last_loc_note == NULL_RTX
5487 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
5488 NOTE_VAR_LOCATION_LOC (loc_note)))
5489 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5490 != NOTE_VAR_LOCATION_STATUS (loc_note))
5491 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5492 == VAR_INIT_STATUS_UNINITIALIZED)
5493 || (NOTE_VAR_LOCATION_STATUS (loc_note)
5494 == VAR_INIT_STATUS_UNINITIALIZED))))
5495 {
5496 /* Add LOC to the end of list and update LAST. If the last
5497 element of the list has been removed above, reuse its
5498 memory for the new node, otherwise allocate a new one. */
5499 if (unused)
5500 {
5501 loc = unused;
5502 memset (loc, '\0', sizeof (*loc));
5503 }
5504 else
5505 loc = ggc_cleared_alloc<var_loc_node> ();
5506 if (bitsize == -1 || piece_loc == NULL)
5507 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5508 else
5509 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
5510 bitpos, piece_bitpos, bitsize, loc_note);
5511 last->next = loc;
5512 /* Ensure TEMP->LAST will point either to the new last but one
5513 element of the chain, or to the last element in it. */
5514 if (last != temp->last)
5515 temp->last = last;
5516 }
5517 else if (unused)
5518 ggc_free (unused);
5519 }
5520 else
5521 {
5522 loc = ggc_cleared_alloc<var_loc_node> ();
5523 temp->first = loc;
5524 temp->last = loc;
5525 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5526 }
5527 return loc;
5528 }
5529 \f
5530 /* Keep track of the number of spaces used to indent the
5531 output of the debugging routines that print the structure of
5532 the DIE internal representation. */
5533 static int print_indent;
5534
5535 /* Indent the line the number of spaces given by print_indent. */
5536
5537 static inline void
5538 print_spaces (FILE *outfile)
5539 {
5540 fprintf (outfile, "%*s", print_indent, "");
5541 }
5542
5543 /* Print a type signature in hex. */
5544
5545 static inline void
5546 print_signature (FILE *outfile, char *sig)
5547 {
5548 int i;
5549
5550 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
5551 fprintf (outfile, "%02x", sig[i] & 0xff);
5552 }
5553
5554 static inline void
5555 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
5556 {
5557 if (discr_value->pos)
5558 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
5559 else
5560 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
5561 }
5562
5563 static void print_loc_descr (dw_loc_descr_ref, FILE *);
5564
5565 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
5566 RECURSE, output location descriptor operations. */
5567
5568 static void
5569 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
5570 {
5571 switch (val->val_class)
5572 {
5573 case dw_val_class_addr:
5574 fprintf (outfile, "address");
5575 break;
5576 case dw_val_class_offset:
5577 fprintf (outfile, "offset");
5578 break;
5579 case dw_val_class_loc:
5580 fprintf (outfile, "location descriptor");
5581 if (val->v.val_loc == NULL)
5582 fprintf (outfile, " -> <null>\n");
5583 else if (recurse)
5584 {
5585 fprintf (outfile, ":\n");
5586 print_indent += 4;
5587 print_loc_descr (val->v.val_loc, outfile);
5588 print_indent -= 4;
5589 }
5590 else
5591 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
5592 break;
5593 case dw_val_class_loc_list:
5594 fprintf (outfile, "location list -> label:%s",
5595 val->v.val_loc_list->ll_symbol);
5596 break;
5597 case dw_val_class_range_list:
5598 fprintf (outfile, "range list");
5599 break;
5600 case dw_val_class_const:
5601 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
5602 break;
5603 case dw_val_class_unsigned_const:
5604 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
5605 break;
5606 case dw_val_class_const_double:
5607 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
5608 HOST_WIDE_INT_PRINT_UNSIGNED")",
5609 val->v.val_double.high,
5610 val->v.val_double.low);
5611 break;
5612 case dw_val_class_wide_int:
5613 {
5614 int i = val->v.val_wide->get_len ();
5615 fprintf (outfile, "constant (");
5616 gcc_assert (i > 0);
5617 if (val->v.val_wide->elt (i - 1) == 0)
5618 fprintf (outfile, "0x");
5619 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
5620 val->v.val_wide->elt (--i));
5621 while (--i >= 0)
5622 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
5623 val->v.val_wide->elt (i));
5624 fprintf (outfile, ")");
5625 break;
5626 }
5627 case dw_val_class_vec:
5628 fprintf (outfile, "floating-point or vector constant");
5629 break;
5630 case dw_val_class_flag:
5631 fprintf (outfile, "%u", val->v.val_flag);
5632 break;
5633 case dw_val_class_die_ref:
5634 if (val->v.val_die_ref.die != NULL)
5635 {
5636 dw_die_ref die = val->v.val_die_ref.die;
5637
5638 if (die->comdat_type_p)
5639 {
5640 fprintf (outfile, "die -> signature: ");
5641 print_signature (outfile,
5642 die->die_id.die_type_node->signature);
5643 }
5644 else if (die->die_id.die_symbol)
5645 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
5646 else
5647 fprintf (outfile, "die -> %ld", die->die_offset);
5648 fprintf (outfile, " (%p)", (void *) die);
5649 }
5650 else
5651 fprintf (outfile, "die -> <null>");
5652 break;
5653 case dw_val_class_vms_delta:
5654 fprintf (outfile, "delta: @slotcount(%s-%s)",
5655 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
5656 break;
5657 case dw_val_class_lbl_id:
5658 case dw_val_class_lineptr:
5659 case dw_val_class_macptr:
5660 case dw_val_class_high_pc:
5661 fprintf (outfile, "label: %s", val->v.val_lbl_id);
5662 break;
5663 case dw_val_class_str:
5664 if (val->v.val_str->str != NULL)
5665 fprintf (outfile, "\"%s\"", val->v.val_str->str);
5666 else
5667 fprintf (outfile, "<null>");
5668 break;
5669 case dw_val_class_file:
5670 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
5671 val->v.val_file->emitted_number);
5672 break;
5673 case dw_val_class_data8:
5674 {
5675 int i;
5676
5677 for (i = 0; i < 8; i++)
5678 fprintf (outfile, "%02x", val->v.val_data8[i]);
5679 break;
5680 }
5681 case dw_val_class_discr_value:
5682 print_discr_value (outfile, &val->v.val_discr_value);
5683 break;
5684 case dw_val_class_discr_list:
5685 for (dw_discr_list_ref node = val->v.val_discr_list;
5686 node != NULL;
5687 node = node->dw_discr_next)
5688 {
5689 if (node->dw_discr_range)
5690 {
5691 fprintf (outfile, " .. ");
5692 print_discr_value (outfile, &node->dw_discr_lower_bound);
5693 print_discr_value (outfile, &node->dw_discr_upper_bound);
5694 }
5695 else
5696 print_discr_value (outfile, &node->dw_discr_lower_bound);
5697
5698 if (node->dw_discr_next != NULL)
5699 fprintf (outfile, " | ");
5700 }
5701 default:
5702 break;
5703 }
5704 }
5705
5706 /* Likewise, for a DIE attribute. */
5707
5708 static void
5709 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
5710 {
5711 print_dw_val (&a->dw_attr_val, recurse, outfile);
5712 }
5713
5714
5715 /* Print the list of operands in the LOC location description to OUTFILE. This
5716 routine is a debugging aid only. */
5717
5718 static void
5719 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
5720 {
5721 dw_loc_descr_ref l = loc;
5722
5723 if (loc == NULL)
5724 {
5725 print_spaces (outfile);
5726 fprintf (outfile, "<null>\n");
5727 return;
5728 }
5729
5730 for (l = loc; l != NULL; l = l->dw_loc_next)
5731 {
5732 print_spaces (outfile);
5733 fprintf (outfile, "(%p) %s",
5734 (void *) l,
5735 dwarf_stack_op_name (l->dw_loc_opc));
5736 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
5737 {
5738 fprintf (outfile, " ");
5739 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
5740 }
5741 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
5742 {
5743 fprintf (outfile, ", ");
5744 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
5745 }
5746 fprintf (outfile, "\n");
5747 }
5748 }
5749
5750 /* Print the information associated with a given DIE, and its children.
5751 This routine is a debugging aid only. */
5752
5753 static void
5754 print_die (dw_die_ref die, FILE *outfile)
5755 {
5756 dw_attr_node *a;
5757 dw_die_ref c;
5758 unsigned ix;
5759
5760 print_spaces (outfile);
5761 fprintf (outfile, "DIE %4ld: %s (%p)\n",
5762 die->die_offset, dwarf_tag_name (die->die_tag),
5763 (void*) die);
5764 print_spaces (outfile);
5765 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
5766 fprintf (outfile, " offset: %ld", die->die_offset);
5767 fprintf (outfile, " mark: %d\n", die->die_mark);
5768
5769 if (die->comdat_type_p)
5770 {
5771 print_spaces (outfile);
5772 fprintf (outfile, " signature: ");
5773 print_signature (outfile, die->die_id.die_type_node->signature);
5774 fprintf (outfile, "\n");
5775 }
5776
5777 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5778 {
5779 print_spaces (outfile);
5780 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
5781
5782 print_attribute (a, true, outfile);
5783 fprintf (outfile, "\n");
5784 }
5785
5786 if (die->die_child != NULL)
5787 {
5788 print_indent += 4;
5789 FOR_EACH_CHILD (die, c, print_die (c, outfile));
5790 print_indent -= 4;
5791 }
5792 if (print_indent == 0)
5793 fprintf (outfile, "\n");
5794 }
5795
5796 /* Print the list of operations in the LOC location description. */
5797
5798 DEBUG_FUNCTION void
5799 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
5800 {
5801 print_loc_descr (loc, stderr);
5802 }
5803
5804 /* Print the information collected for a given DIE. */
5805
5806 DEBUG_FUNCTION void
5807 debug_dwarf_die (dw_die_ref die)
5808 {
5809 print_die (die, stderr);
5810 }
5811
5812 DEBUG_FUNCTION void
5813 debug (die_struct &ref)
5814 {
5815 print_die (&ref, stderr);
5816 }
5817
5818 DEBUG_FUNCTION void
5819 debug (die_struct *ptr)
5820 {
5821 if (ptr)
5822 debug (*ptr);
5823 else
5824 fprintf (stderr, "<nil>\n");
5825 }
5826
5827
5828 /* Print all DWARF information collected for the compilation unit.
5829 This routine is a debugging aid only. */
5830
5831 DEBUG_FUNCTION void
5832 debug_dwarf (void)
5833 {
5834 print_indent = 0;
5835 print_die (comp_unit_die (), stderr);
5836 }
5837
5838 /* Verify the DIE tree structure. */
5839
5840 DEBUG_FUNCTION void
5841 verify_die (dw_die_ref die)
5842 {
5843 gcc_assert (!die->die_mark);
5844 if (die->die_parent == NULL
5845 && die->die_sib == NULL)
5846 return;
5847 /* Verify the die_sib list is cyclic. */
5848 dw_die_ref x = die;
5849 do
5850 {
5851 x->die_mark = 1;
5852 x = x->die_sib;
5853 }
5854 while (x && !x->die_mark);
5855 gcc_assert (x == die);
5856 x = die;
5857 do
5858 {
5859 /* Verify all dies have the same parent. */
5860 gcc_assert (x->die_parent == die->die_parent);
5861 if (x->die_child)
5862 {
5863 /* Verify the child has the proper parent and recurse. */
5864 gcc_assert (x->die_child->die_parent == x);
5865 verify_die (x->die_child);
5866 }
5867 x->die_mark = 0;
5868 x = x->die_sib;
5869 }
5870 while (x && x->die_mark);
5871 }
5872
5873 /* Sanity checks on DIEs. */
5874
5875 static void
5876 check_die (dw_die_ref die)
5877 {
5878 unsigned ix;
5879 dw_attr_node *a;
5880 bool inline_found = false;
5881 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
5882 int n_decl_line = 0, n_decl_file = 0;
5883 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5884 {
5885 switch (a->dw_attr)
5886 {
5887 case DW_AT_inline:
5888 if (a->dw_attr_val.v.val_unsigned)
5889 inline_found = true;
5890 break;
5891 case DW_AT_location:
5892 ++n_location;
5893 break;
5894 case DW_AT_low_pc:
5895 ++n_low_pc;
5896 break;
5897 case DW_AT_high_pc:
5898 ++n_high_pc;
5899 break;
5900 case DW_AT_artificial:
5901 ++n_artificial;
5902 break;
5903 case DW_AT_decl_line:
5904 ++n_decl_line;
5905 break;
5906 case DW_AT_decl_file:
5907 ++n_decl_file;
5908 break;
5909 default:
5910 break;
5911 }
5912 }
5913 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
5914 || n_decl_line > 1 || n_decl_file > 1)
5915 {
5916 fprintf (stderr, "Duplicate attributes in DIE:\n");
5917 debug_dwarf_die (die);
5918 gcc_unreachable ();
5919 }
5920 if (inline_found)
5921 {
5922 /* A debugging information entry that is a member of an abstract
5923 instance tree [that has DW_AT_inline] should not contain any
5924 attributes which describe aspects of the subroutine which vary
5925 between distinct inlined expansions or distinct out-of-line
5926 expansions. */
5927 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5928 gcc_assert (a->dw_attr != DW_AT_low_pc
5929 && a->dw_attr != DW_AT_high_pc
5930 && a->dw_attr != DW_AT_location
5931 && a->dw_attr != DW_AT_frame_base
5932 && a->dw_attr != DW_AT_GNU_all_call_sites);
5933 }
5934 }
5935 \f
5936 /* Start a new compilation unit DIE for an include file. OLD_UNIT is the CU
5937 for the enclosing include file, if any. BINCL_DIE is the DW_TAG_GNU_BINCL
5938 DIE that marks the start of the DIEs for this include file. */
5939
5940 static dw_die_ref
5941 push_new_compile_unit (dw_die_ref old_unit, dw_die_ref bincl_die)
5942 {
5943 const char *filename = get_AT_string (bincl_die, DW_AT_name);
5944 dw_die_ref new_unit = gen_compile_unit_die (filename);
5945
5946 new_unit->die_sib = old_unit;
5947 return new_unit;
5948 }
5949
5950 /* Close an include-file CU and reopen the enclosing one. */
5951
5952 static dw_die_ref
5953 pop_compile_unit (dw_die_ref old_unit)
5954 {
5955 dw_die_ref new_unit = old_unit->die_sib;
5956
5957 old_unit->die_sib = NULL;
5958 return new_unit;
5959 }
5960
5961 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
5962 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
5963 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
5964
5965 /* Calculate the checksum of a location expression. */
5966
5967 static inline void
5968 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
5969 {
5970 int tem;
5971 inchash::hash hstate;
5972 hashval_t hash;
5973
5974 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
5975 CHECKSUM (tem);
5976 hash_loc_operands (loc, hstate);
5977 hash = hstate.end();
5978 CHECKSUM (hash);
5979 }
5980
5981 /* Calculate the checksum of an attribute. */
5982
5983 static void
5984 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
5985 {
5986 dw_loc_descr_ref loc;
5987 rtx r;
5988
5989 CHECKSUM (at->dw_attr);
5990
5991 /* We don't care that this was compiled with a different compiler
5992 snapshot; if the output is the same, that's what matters. */
5993 if (at->dw_attr == DW_AT_producer)
5994 return;
5995
5996 switch (AT_class (at))
5997 {
5998 case dw_val_class_const:
5999 CHECKSUM (at->dw_attr_val.v.val_int);
6000 break;
6001 case dw_val_class_unsigned_const:
6002 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6003 break;
6004 case dw_val_class_const_double:
6005 CHECKSUM (at->dw_attr_val.v.val_double);
6006 break;
6007 case dw_val_class_wide_int:
6008 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6009 get_full_len (*at->dw_attr_val.v.val_wide)
6010 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6011 break;
6012 case dw_val_class_vec:
6013 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6014 (at->dw_attr_val.v.val_vec.length
6015 * at->dw_attr_val.v.val_vec.elt_size));
6016 break;
6017 case dw_val_class_flag:
6018 CHECKSUM (at->dw_attr_val.v.val_flag);
6019 break;
6020 case dw_val_class_str:
6021 CHECKSUM_STRING (AT_string (at));
6022 break;
6023
6024 case dw_val_class_addr:
6025 r = AT_addr (at);
6026 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6027 CHECKSUM_STRING (XSTR (r, 0));
6028 break;
6029
6030 case dw_val_class_offset:
6031 CHECKSUM (at->dw_attr_val.v.val_offset);
6032 break;
6033
6034 case dw_val_class_loc:
6035 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6036 loc_checksum (loc, ctx);
6037 break;
6038
6039 case dw_val_class_die_ref:
6040 die_checksum (AT_ref (at), ctx, mark);
6041 break;
6042
6043 case dw_val_class_fde_ref:
6044 case dw_val_class_vms_delta:
6045 case dw_val_class_lbl_id:
6046 case dw_val_class_lineptr:
6047 case dw_val_class_macptr:
6048 case dw_val_class_high_pc:
6049 break;
6050
6051 case dw_val_class_file:
6052 CHECKSUM_STRING (AT_file (at)->filename);
6053 break;
6054
6055 case dw_val_class_data8:
6056 CHECKSUM (at->dw_attr_val.v.val_data8);
6057 break;
6058
6059 default:
6060 break;
6061 }
6062 }
6063
6064 /* Calculate the checksum of a DIE. */
6065
6066 static void
6067 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6068 {
6069 dw_die_ref c;
6070 dw_attr_node *a;
6071 unsigned ix;
6072
6073 /* To avoid infinite recursion. */
6074 if (die->die_mark)
6075 {
6076 CHECKSUM (die->die_mark);
6077 return;
6078 }
6079 die->die_mark = ++(*mark);
6080
6081 CHECKSUM (die->die_tag);
6082
6083 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6084 attr_checksum (a, ctx, mark);
6085
6086 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6087 }
6088
6089 #undef CHECKSUM
6090 #undef CHECKSUM_BLOCK
6091 #undef CHECKSUM_STRING
6092
6093 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6094 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6095 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6096 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6097 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6098 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6099 #define CHECKSUM_ATTR(FOO) \
6100 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6101
6102 /* Calculate the checksum of a number in signed LEB128 format. */
6103
6104 static void
6105 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6106 {
6107 unsigned char byte;
6108 bool more;
6109
6110 while (1)
6111 {
6112 byte = (value & 0x7f);
6113 value >>= 7;
6114 more = !((value == 0 && (byte & 0x40) == 0)
6115 || (value == -1 && (byte & 0x40) != 0));
6116 if (more)
6117 byte |= 0x80;
6118 CHECKSUM (byte);
6119 if (!more)
6120 break;
6121 }
6122 }
6123
6124 /* Calculate the checksum of a number in unsigned LEB128 format. */
6125
6126 static void
6127 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6128 {
6129 while (1)
6130 {
6131 unsigned char byte = (value & 0x7f);
6132 value >>= 7;
6133 if (value != 0)
6134 /* More bytes to follow. */
6135 byte |= 0x80;
6136 CHECKSUM (byte);
6137 if (value == 0)
6138 break;
6139 }
6140 }
6141
6142 /* Checksum the context of the DIE. This adds the names of any
6143 surrounding namespaces or structures to the checksum. */
6144
6145 static void
6146 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6147 {
6148 const char *name;
6149 dw_die_ref spec;
6150 int tag = die->die_tag;
6151
6152 if (tag != DW_TAG_namespace
6153 && tag != DW_TAG_structure_type
6154 && tag != DW_TAG_class_type)
6155 return;
6156
6157 name = get_AT_string (die, DW_AT_name);
6158
6159 spec = get_AT_ref (die, DW_AT_specification);
6160 if (spec != NULL)
6161 die = spec;
6162
6163 if (die->die_parent != NULL)
6164 checksum_die_context (die->die_parent, ctx);
6165
6166 CHECKSUM_ULEB128 ('C');
6167 CHECKSUM_ULEB128 (tag);
6168 if (name != NULL)
6169 CHECKSUM_STRING (name);
6170 }
6171
6172 /* Calculate the checksum of a location expression. */
6173
6174 static inline void
6175 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6176 {
6177 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6178 were emitted as a DW_FORM_sdata instead of a location expression. */
6179 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6180 {
6181 CHECKSUM_ULEB128 (DW_FORM_sdata);
6182 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6183 return;
6184 }
6185
6186 /* Otherwise, just checksum the raw location expression. */
6187 while (loc != NULL)
6188 {
6189 inchash::hash hstate;
6190 hashval_t hash;
6191
6192 CHECKSUM_ULEB128 (loc->dtprel);
6193 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6194 hash_loc_operands (loc, hstate);
6195 hash = hstate.end ();
6196 CHECKSUM (hash);
6197 loc = loc->dw_loc_next;
6198 }
6199 }
6200
6201 /* Calculate the checksum of an attribute. */
6202
6203 static void
6204 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6205 struct md5_ctx *ctx, int *mark)
6206 {
6207 dw_loc_descr_ref loc;
6208 rtx r;
6209
6210 if (AT_class (at) == dw_val_class_die_ref)
6211 {
6212 dw_die_ref target_die = AT_ref (at);
6213
6214 /* For pointer and reference types, we checksum only the (qualified)
6215 name of the target type (if there is a name). For friend entries,
6216 we checksum only the (qualified) name of the target type or function.
6217 This allows the checksum to remain the same whether the target type
6218 is complete or not. */
6219 if ((at->dw_attr == DW_AT_type
6220 && (tag == DW_TAG_pointer_type
6221 || tag == DW_TAG_reference_type
6222 || tag == DW_TAG_rvalue_reference_type
6223 || tag == DW_TAG_ptr_to_member_type))
6224 || (at->dw_attr == DW_AT_friend
6225 && tag == DW_TAG_friend))
6226 {
6227 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6228
6229 if (name_attr != NULL)
6230 {
6231 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6232
6233 if (decl == NULL)
6234 decl = target_die;
6235 CHECKSUM_ULEB128 ('N');
6236 CHECKSUM_ULEB128 (at->dw_attr);
6237 if (decl->die_parent != NULL)
6238 checksum_die_context (decl->die_parent, ctx);
6239 CHECKSUM_ULEB128 ('E');
6240 CHECKSUM_STRING (AT_string (name_attr));
6241 return;
6242 }
6243 }
6244
6245 /* For all other references to another DIE, we check to see if the
6246 target DIE has already been visited. If it has, we emit a
6247 backward reference; if not, we descend recursively. */
6248 if (target_die->die_mark > 0)
6249 {
6250 CHECKSUM_ULEB128 ('R');
6251 CHECKSUM_ULEB128 (at->dw_attr);
6252 CHECKSUM_ULEB128 (target_die->die_mark);
6253 }
6254 else
6255 {
6256 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6257
6258 if (decl == NULL)
6259 decl = target_die;
6260 target_die->die_mark = ++(*mark);
6261 CHECKSUM_ULEB128 ('T');
6262 CHECKSUM_ULEB128 (at->dw_attr);
6263 if (decl->die_parent != NULL)
6264 checksum_die_context (decl->die_parent, ctx);
6265 die_checksum_ordered (target_die, ctx, mark);
6266 }
6267 return;
6268 }
6269
6270 CHECKSUM_ULEB128 ('A');
6271 CHECKSUM_ULEB128 (at->dw_attr);
6272
6273 switch (AT_class (at))
6274 {
6275 case dw_val_class_const:
6276 CHECKSUM_ULEB128 (DW_FORM_sdata);
6277 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6278 break;
6279
6280 case dw_val_class_unsigned_const:
6281 CHECKSUM_ULEB128 (DW_FORM_sdata);
6282 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6283 break;
6284
6285 case dw_val_class_const_double:
6286 CHECKSUM_ULEB128 (DW_FORM_block);
6287 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6288 CHECKSUM (at->dw_attr_val.v.val_double);
6289 break;
6290
6291 case dw_val_class_wide_int:
6292 CHECKSUM_ULEB128 (DW_FORM_block);
6293 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6294 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6295 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6296 get_full_len (*at->dw_attr_val.v.val_wide)
6297 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6298 break;
6299
6300 case dw_val_class_vec:
6301 CHECKSUM_ULEB128 (DW_FORM_block);
6302 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6303 * at->dw_attr_val.v.val_vec.elt_size);
6304 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6305 (at->dw_attr_val.v.val_vec.length
6306 * at->dw_attr_val.v.val_vec.elt_size));
6307 break;
6308
6309 case dw_val_class_flag:
6310 CHECKSUM_ULEB128 (DW_FORM_flag);
6311 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6312 break;
6313
6314 case dw_val_class_str:
6315 CHECKSUM_ULEB128 (DW_FORM_string);
6316 CHECKSUM_STRING (AT_string (at));
6317 break;
6318
6319 case dw_val_class_addr:
6320 r = AT_addr (at);
6321 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6322 CHECKSUM_ULEB128 (DW_FORM_string);
6323 CHECKSUM_STRING (XSTR (r, 0));
6324 break;
6325
6326 case dw_val_class_offset:
6327 CHECKSUM_ULEB128 (DW_FORM_sdata);
6328 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6329 break;
6330
6331 case dw_val_class_loc:
6332 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6333 loc_checksum_ordered (loc, ctx);
6334 break;
6335
6336 case dw_val_class_fde_ref:
6337 case dw_val_class_lbl_id:
6338 case dw_val_class_lineptr:
6339 case dw_val_class_macptr:
6340 case dw_val_class_high_pc:
6341 break;
6342
6343 case dw_val_class_file:
6344 CHECKSUM_ULEB128 (DW_FORM_string);
6345 CHECKSUM_STRING (AT_file (at)->filename);
6346 break;
6347
6348 case dw_val_class_data8:
6349 CHECKSUM (at->dw_attr_val.v.val_data8);
6350 break;
6351
6352 default:
6353 break;
6354 }
6355 }
6356
6357 struct checksum_attributes
6358 {
6359 dw_attr_node *at_name;
6360 dw_attr_node *at_type;
6361 dw_attr_node *at_friend;
6362 dw_attr_node *at_accessibility;
6363 dw_attr_node *at_address_class;
6364 dw_attr_node *at_allocated;
6365 dw_attr_node *at_artificial;
6366 dw_attr_node *at_associated;
6367 dw_attr_node *at_binary_scale;
6368 dw_attr_node *at_bit_offset;
6369 dw_attr_node *at_bit_size;
6370 dw_attr_node *at_bit_stride;
6371 dw_attr_node *at_byte_size;
6372 dw_attr_node *at_byte_stride;
6373 dw_attr_node *at_const_value;
6374 dw_attr_node *at_containing_type;
6375 dw_attr_node *at_count;
6376 dw_attr_node *at_data_location;
6377 dw_attr_node *at_data_member_location;
6378 dw_attr_node *at_decimal_scale;
6379 dw_attr_node *at_decimal_sign;
6380 dw_attr_node *at_default_value;
6381 dw_attr_node *at_digit_count;
6382 dw_attr_node *at_discr;
6383 dw_attr_node *at_discr_list;
6384 dw_attr_node *at_discr_value;
6385 dw_attr_node *at_encoding;
6386 dw_attr_node *at_endianity;
6387 dw_attr_node *at_explicit;
6388 dw_attr_node *at_is_optional;
6389 dw_attr_node *at_location;
6390 dw_attr_node *at_lower_bound;
6391 dw_attr_node *at_mutable;
6392 dw_attr_node *at_ordering;
6393 dw_attr_node *at_picture_string;
6394 dw_attr_node *at_prototyped;
6395 dw_attr_node *at_small;
6396 dw_attr_node *at_segment;
6397 dw_attr_node *at_string_length;
6398 dw_attr_node *at_string_length_bit_size;
6399 dw_attr_node *at_string_length_byte_size;
6400 dw_attr_node *at_threads_scaled;
6401 dw_attr_node *at_upper_bound;
6402 dw_attr_node *at_use_location;
6403 dw_attr_node *at_use_UTF8;
6404 dw_attr_node *at_variable_parameter;
6405 dw_attr_node *at_virtuality;
6406 dw_attr_node *at_visibility;
6407 dw_attr_node *at_vtable_elem_location;
6408 };
6409
6410 /* Collect the attributes that we will want to use for the checksum. */
6411
6412 static void
6413 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6414 {
6415 dw_attr_node *a;
6416 unsigned ix;
6417
6418 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6419 {
6420 switch (a->dw_attr)
6421 {
6422 case DW_AT_name:
6423 attrs->at_name = a;
6424 break;
6425 case DW_AT_type:
6426 attrs->at_type = a;
6427 break;
6428 case DW_AT_friend:
6429 attrs->at_friend = a;
6430 break;
6431 case DW_AT_accessibility:
6432 attrs->at_accessibility = a;
6433 break;
6434 case DW_AT_address_class:
6435 attrs->at_address_class = a;
6436 break;
6437 case DW_AT_allocated:
6438 attrs->at_allocated = a;
6439 break;
6440 case DW_AT_artificial:
6441 attrs->at_artificial = a;
6442 break;
6443 case DW_AT_associated:
6444 attrs->at_associated = a;
6445 break;
6446 case DW_AT_binary_scale:
6447 attrs->at_binary_scale = a;
6448 break;
6449 case DW_AT_bit_offset:
6450 attrs->at_bit_offset = a;
6451 break;
6452 case DW_AT_bit_size:
6453 attrs->at_bit_size = a;
6454 break;
6455 case DW_AT_bit_stride:
6456 attrs->at_bit_stride = a;
6457 break;
6458 case DW_AT_byte_size:
6459 attrs->at_byte_size = a;
6460 break;
6461 case DW_AT_byte_stride:
6462 attrs->at_byte_stride = a;
6463 break;
6464 case DW_AT_const_value:
6465 attrs->at_const_value = a;
6466 break;
6467 case DW_AT_containing_type:
6468 attrs->at_containing_type = a;
6469 break;
6470 case DW_AT_count:
6471 attrs->at_count = a;
6472 break;
6473 case DW_AT_data_location:
6474 attrs->at_data_location = a;
6475 break;
6476 case DW_AT_data_member_location:
6477 attrs->at_data_member_location = a;
6478 break;
6479 case DW_AT_decimal_scale:
6480 attrs->at_decimal_scale = a;
6481 break;
6482 case DW_AT_decimal_sign:
6483 attrs->at_decimal_sign = a;
6484 break;
6485 case DW_AT_default_value:
6486 attrs->at_default_value = a;
6487 break;
6488 case DW_AT_digit_count:
6489 attrs->at_digit_count = a;
6490 break;
6491 case DW_AT_discr:
6492 attrs->at_discr = a;
6493 break;
6494 case DW_AT_discr_list:
6495 attrs->at_discr_list = a;
6496 break;
6497 case DW_AT_discr_value:
6498 attrs->at_discr_value = a;
6499 break;
6500 case DW_AT_encoding:
6501 attrs->at_encoding = a;
6502 break;
6503 case DW_AT_endianity:
6504 attrs->at_endianity = a;
6505 break;
6506 case DW_AT_explicit:
6507 attrs->at_explicit = a;
6508 break;
6509 case DW_AT_is_optional:
6510 attrs->at_is_optional = a;
6511 break;
6512 case DW_AT_location:
6513 attrs->at_location = a;
6514 break;
6515 case DW_AT_lower_bound:
6516 attrs->at_lower_bound = a;
6517 break;
6518 case DW_AT_mutable:
6519 attrs->at_mutable = a;
6520 break;
6521 case DW_AT_ordering:
6522 attrs->at_ordering = a;
6523 break;
6524 case DW_AT_picture_string:
6525 attrs->at_picture_string = a;
6526 break;
6527 case DW_AT_prototyped:
6528 attrs->at_prototyped = a;
6529 break;
6530 case DW_AT_small:
6531 attrs->at_small = a;
6532 break;
6533 case DW_AT_segment:
6534 attrs->at_segment = a;
6535 break;
6536 case DW_AT_string_length:
6537 attrs->at_string_length = a;
6538 break;
6539 case DW_AT_string_length_bit_size:
6540 attrs->at_string_length_bit_size = a;
6541 break;
6542 case DW_AT_string_length_byte_size:
6543 attrs->at_string_length_byte_size = a;
6544 break;
6545 case DW_AT_threads_scaled:
6546 attrs->at_threads_scaled = a;
6547 break;
6548 case DW_AT_upper_bound:
6549 attrs->at_upper_bound = a;
6550 break;
6551 case DW_AT_use_location:
6552 attrs->at_use_location = a;
6553 break;
6554 case DW_AT_use_UTF8:
6555 attrs->at_use_UTF8 = a;
6556 break;
6557 case DW_AT_variable_parameter:
6558 attrs->at_variable_parameter = a;
6559 break;
6560 case DW_AT_virtuality:
6561 attrs->at_virtuality = a;
6562 break;
6563 case DW_AT_visibility:
6564 attrs->at_visibility = a;
6565 break;
6566 case DW_AT_vtable_elem_location:
6567 attrs->at_vtable_elem_location = a;
6568 break;
6569 default:
6570 break;
6571 }
6572 }
6573 }
6574
6575 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
6576
6577 static void
6578 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6579 {
6580 dw_die_ref c;
6581 dw_die_ref decl;
6582 struct checksum_attributes attrs;
6583
6584 CHECKSUM_ULEB128 ('D');
6585 CHECKSUM_ULEB128 (die->die_tag);
6586
6587 memset (&attrs, 0, sizeof (attrs));
6588
6589 decl = get_AT_ref (die, DW_AT_specification);
6590 if (decl != NULL)
6591 collect_checksum_attributes (&attrs, decl);
6592 collect_checksum_attributes (&attrs, die);
6593
6594 CHECKSUM_ATTR (attrs.at_name);
6595 CHECKSUM_ATTR (attrs.at_accessibility);
6596 CHECKSUM_ATTR (attrs.at_address_class);
6597 CHECKSUM_ATTR (attrs.at_allocated);
6598 CHECKSUM_ATTR (attrs.at_artificial);
6599 CHECKSUM_ATTR (attrs.at_associated);
6600 CHECKSUM_ATTR (attrs.at_binary_scale);
6601 CHECKSUM_ATTR (attrs.at_bit_offset);
6602 CHECKSUM_ATTR (attrs.at_bit_size);
6603 CHECKSUM_ATTR (attrs.at_bit_stride);
6604 CHECKSUM_ATTR (attrs.at_byte_size);
6605 CHECKSUM_ATTR (attrs.at_byte_stride);
6606 CHECKSUM_ATTR (attrs.at_const_value);
6607 CHECKSUM_ATTR (attrs.at_containing_type);
6608 CHECKSUM_ATTR (attrs.at_count);
6609 CHECKSUM_ATTR (attrs.at_data_location);
6610 CHECKSUM_ATTR (attrs.at_data_member_location);
6611 CHECKSUM_ATTR (attrs.at_decimal_scale);
6612 CHECKSUM_ATTR (attrs.at_decimal_sign);
6613 CHECKSUM_ATTR (attrs.at_default_value);
6614 CHECKSUM_ATTR (attrs.at_digit_count);
6615 CHECKSUM_ATTR (attrs.at_discr);
6616 CHECKSUM_ATTR (attrs.at_discr_list);
6617 CHECKSUM_ATTR (attrs.at_discr_value);
6618 CHECKSUM_ATTR (attrs.at_encoding);
6619 CHECKSUM_ATTR (attrs.at_endianity);
6620 CHECKSUM_ATTR (attrs.at_explicit);
6621 CHECKSUM_ATTR (attrs.at_is_optional);
6622 CHECKSUM_ATTR (attrs.at_location);
6623 CHECKSUM_ATTR (attrs.at_lower_bound);
6624 CHECKSUM_ATTR (attrs.at_mutable);
6625 CHECKSUM_ATTR (attrs.at_ordering);
6626 CHECKSUM_ATTR (attrs.at_picture_string);
6627 CHECKSUM_ATTR (attrs.at_prototyped);
6628 CHECKSUM_ATTR (attrs.at_small);
6629 CHECKSUM_ATTR (attrs.at_segment);
6630 CHECKSUM_ATTR (attrs.at_string_length);
6631 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
6632 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
6633 CHECKSUM_ATTR (attrs.at_threads_scaled);
6634 CHECKSUM_ATTR (attrs.at_upper_bound);
6635 CHECKSUM_ATTR (attrs.at_use_location);
6636 CHECKSUM_ATTR (attrs.at_use_UTF8);
6637 CHECKSUM_ATTR (attrs.at_variable_parameter);
6638 CHECKSUM_ATTR (attrs.at_virtuality);
6639 CHECKSUM_ATTR (attrs.at_visibility);
6640 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
6641 CHECKSUM_ATTR (attrs.at_type);
6642 CHECKSUM_ATTR (attrs.at_friend);
6643
6644 /* Checksum the child DIEs. */
6645 c = die->die_child;
6646 if (c) do {
6647 dw_attr_node *name_attr;
6648
6649 c = c->die_sib;
6650 name_attr = get_AT (c, DW_AT_name);
6651 if (is_template_instantiation (c))
6652 {
6653 /* Ignore instantiations of member type and function templates. */
6654 }
6655 else if (name_attr != NULL
6656 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
6657 {
6658 /* Use a shallow checksum for named nested types and member
6659 functions. */
6660 CHECKSUM_ULEB128 ('S');
6661 CHECKSUM_ULEB128 (c->die_tag);
6662 CHECKSUM_STRING (AT_string (name_attr));
6663 }
6664 else
6665 {
6666 /* Use a deep checksum for other children. */
6667 /* Mark this DIE so it gets processed when unmarking. */
6668 if (c->die_mark == 0)
6669 c->die_mark = -1;
6670 die_checksum_ordered (c, ctx, mark);
6671 }
6672 } while (c != die->die_child);
6673
6674 CHECKSUM_ULEB128 (0);
6675 }
6676
6677 /* Add a type name and tag to a hash. */
6678 static void
6679 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
6680 {
6681 CHECKSUM_ULEB128 (tag);
6682 CHECKSUM_STRING (name);
6683 }
6684
6685 #undef CHECKSUM
6686 #undef CHECKSUM_STRING
6687 #undef CHECKSUM_ATTR
6688 #undef CHECKSUM_LEB128
6689 #undef CHECKSUM_ULEB128
6690
6691 /* Generate the type signature for DIE. This is computed by generating an
6692 MD5 checksum over the DIE's tag, its relevant attributes, and its
6693 children. Attributes that are references to other DIEs are processed
6694 by recursion, using the MARK field to prevent infinite recursion.
6695 If the DIE is nested inside a namespace or another type, we also
6696 need to include that context in the signature. The lower 64 bits
6697 of the resulting MD5 checksum comprise the signature. */
6698
6699 static void
6700 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
6701 {
6702 int mark;
6703 const char *name;
6704 unsigned char checksum[16];
6705 struct md5_ctx ctx;
6706 dw_die_ref decl;
6707 dw_die_ref parent;
6708
6709 name = get_AT_string (die, DW_AT_name);
6710 decl = get_AT_ref (die, DW_AT_specification);
6711 parent = get_die_parent (die);
6712
6713 /* First, compute a signature for just the type name (and its surrounding
6714 context, if any. This is stored in the type unit DIE for link-time
6715 ODR (one-definition rule) checking. */
6716
6717 if (is_cxx () && name != NULL)
6718 {
6719 md5_init_ctx (&ctx);
6720
6721 /* Checksum the names of surrounding namespaces and structures. */
6722 if (parent != NULL)
6723 checksum_die_context (parent, &ctx);
6724
6725 /* Checksum the current DIE. */
6726 die_odr_checksum (die->die_tag, name, &ctx);
6727 md5_finish_ctx (&ctx, checksum);
6728
6729 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
6730 }
6731
6732 /* Next, compute the complete type signature. */
6733
6734 md5_init_ctx (&ctx);
6735 mark = 1;
6736 die->die_mark = mark;
6737
6738 /* Checksum the names of surrounding namespaces and structures. */
6739 if (parent != NULL)
6740 checksum_die_context (parent, &ctx);
6741
6742 /* Checksum the DIE and its children. */
6743 die_checksum_ordered (die, &ctx, &mark);
6744 unmark_all_dies (die);
6745 md5_finish_ctx (&ctx, checksum);
6746
6747 /* Store the signature in the type node and link the type DIE and the
6748 type node together. */
6749 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
6750 DWARF_TYPE_SIGNATURE_SIZE);
6751 die->comdat_type_p = true;
6752 die->die_id.die_type_node = type_node;
6753 type_node->type_die = die;
6754
6755 /* If the DIE is a specification, link its declaration to the type node
6756 as well. */
6757 if (decl != NULL)
6758 {
6759 decl->comdat_type_p = true;
6760 decl->die_id.die_type_node = type_node;
6761 }
6762 }
6763
6764 /* Do the location expressions look same? */
6765 static inline int
6766 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
6767 {
6768 return loc1->dw_loc_opc == loc2->dw_loc_opc
6769 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
6770 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
6771 }
6772
6773 /* Do the values look the same? */
6774 static int
6775 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
6776 {
6777 dw_loc_descr_ref loc1, loc2;
6778 rtx r1, r2;
6779
6780 if (v1->val_class != v2->val_class)
6781 return 0;
6782
6783 switch (v1->val_class)
6784 {
6785 case dw_val_class_const:
6786 return v1->v.val_int == v2->v.val_int;
6787 case dw_val_class_unsigned_const:
6788 return v1->v.val_unsigned == v2->v.val_unsigned;
6789 case dw_val_class_const_double:
6790 return v1->v.val_double.high == v2->v.val_double.high
6791 && v1->v.val_double.low == v2->v.val_double.low;
6792 case dw_val_class_wide_int:
6793 return *v1->v.val_wide == *v2->v.val_wide;
6794 case dw_val_class_vec:
6795 if (v1->v.val_vec.length != v2->v.val_vec.length
6796 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
6797 return 0;
6798 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
6799 v1->v.val_vec.length * v1->v.val_vec.elt_size))
6800 return 0;
6801 return 1;
6802 case dw_val_class_flag:
6803 return v1->v.val_flag == v2->v.val_flag;
6804 case dw_val_class_str:
6805 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
6806
6807 case dw_val_class_addr:
6808 r1 = v1->v.val_addr;
6809 r2 = v2->v.val_addr;
6810 if (GET_CODE (r1) != GET_CODE (r2))
6811 return 0;
6812 return !rtx_equal_p (r1, r2);
6813
6814 case dw_val_class_offset:
6815 return v1->v.val_offset == v2->v.val_offset;
6816
6817 case dw_val_class_loc:
6818 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
6819 loc1 && loc2;
6820 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
6821 if (!same_loc_p (loc1, loc2, mark))
6822 return 0;
6823 return !loc1 && !loc2;
6824
6825 case dw_val_class_die_ref:
6826 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
6827
6828 case dw_val_class_fde_ref:
6829 case dw_val_class_vms_delta:
6830 case dw_val_class_lbl_id:
6831 case dw_val_class_lineptr:
6832 case dw_val_class_macptr:
6833 case dw_val_class_high_pc:
6834 return 1;
6835
6836 case dw_val_class_file:
6837 return v1->v.val_file == v2->v.val_file;
6838
6839 case dw_val_class_data8:
6840 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
6841
6842 default:
6843 return 1;
6844 }
6845 }
6846
6847 /* Do the attributes look the same? */
6848
6849 static int
6850 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
6851 {
6852 if (at1->dw_attr != at2->dw_attr)
6853 return 0;
6854
6855 /* We don't care that this was compiled with a different compiler
6856 snapshot; if the output is the same, that's what matters. */
6857 if (at1->dw_attr == DW_AT_producer)
6858 return 1;
6859
6860 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
6861 }
6862
6863 /* Do the dies look the same? */
6864
6865 static int
6866 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
6867 {
6868 dw_die_ref c1, c2;
6869 dw_attr_node *a1;
6870 unsigned ix;
6871
6872 /* To avoid infinite recursion. */
6873 if (die1->die_mark)
6874 return die1->die_mark == die2->die_mark;
6875 die1->die_mark = die2->die_mark = ++(*mark);
6876
6877 if (die1->die_tag != die2->die_tag)
6878 return 0;
6879
6880 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
6881 return 0;
6882
6883 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
6884 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
6885 return 0;
6886
6887 c1 = die1->die_child;
6888 c2 = die2->die_child;
6889 if (! c1)
6890 {
6891 if (c2)
6892 return 0;
6893 }
6894 else
6895 for (;;)
6896 {
6897 if (!same_die_p (c1, c2, mark))
6898 return 0;
6899 c1 = c1->die_sib;
6900 c2 = c2->die_sib;
6901 if (c1 == die1->die_child)
6902 {
6903 if (c2 == die2->die_child)
6904 break;
6905 else
6906 return 0;
6907 }
6908 }
6909
6910 return 1;
6911 }
6912
6913 /* Do the dies look the same? Wrapper around same_die_p. */
6914
6915 static int
6916 same_die_p_wrap (dw_die_ref die1, dw_die_ref die2)
6917 {
6918 int mark = 0;
6919 int ret = same_die_p (die1, die2, &mark);
6920
6921 unmark_all_dies (die1);
6922 unmark_all_dies (die2);
6923
6924 return ret;
6925 }
6926
6927 /* The prefix to attach to symbols on DIEs in the current comdat debug
6928 info section. */
6929 static const char *comdat_symbol_id;
6930
6931 /* The index of the current symbol within the current comdat CU. */
6932 static unsigned int comdat_symbol_number;
6933
6934 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
6935 children, and set comdat_symbol_id accordingly. */
6936
6937 static void
6938 compute_section_prefix (dw_die_ref unit_die)
6939 {
6940 const char *die_name = get_AT_string (unit_die, DW_AT_name);
6941 const char *base = die_name ? lbasename (die_name) : "anonymous";
6942 char *name = XALLOCAVEC (char, strlen (base) + 64);
6943 char *p;
6944 int i, mark;
6945 unsigned char checksum[16];
6946 struct md5_ctx ctx;
6947
6948 /* Compute the checksum of the DIE, then append part of it as hex digits to
6949 the name filename of the unit. */
6950
6951 md5_init_ctx (&ctx);
6952 mark = 0;
6953 die_checksum (unit_die, &ctx, &mark);
6954 unmark_all_dies (unit_die);
6955 md5_finish_ctx (&ctx, checksum);
6956
6957 sprintf (name, "%s.", base);
6958 clean_symbol_name (name);
6959
6960 p = name + strlen (name);
6961 for (i = 0; i < 4; i++)
6962 {
6963 sprintf (p, "%.2x", checksum[i]);
6964 p += 2;
6965 }
6966
6967 comdat_symbol_id = unit_die->die_id.die_symbol = xstrdup (name);
6968 comdat_symbol_number = 0;
6969 }
6970
6971 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
6972
6973 static int
6974 is_type_die (dw_die_ref die)
6975 {
6976 switch (die->die_tag)
6977 {
6978 case DW_TAG_array_type:
6979 case DW_TAG_class_type:
6980 case DW_TAG_interface_type:
6981 case DW_TAG_enumeration_type:
6982 case DW_TAG_pointer_type:
6983 case DW_TAG_reference_type:
6984 case DW_TAG_rvalue_reference_type:
6985 case DW_TAG_string_type:
6986 case DW_TAG_structure_type:
6987 case DW_TAG_subroutine_type:
6988 case DW_TAG_union_type:
6989 case DW_TAG_ptr_to_member_type:
6990 case DW_TAG_set_type:
6991 case DW_TAG_subrange_type:
6992 case DW_TAG_base_type:
6993 case DW_TAG_const_type:
6994 case DW_TAG_file_type:
6995 case DW_TAG_packed_type:
6996 case DW_TAG_volatile_type:
6997 case DW_TAG_typedef:
6998 return 1;
6999 default:
7000 return 0;
7001 }
7002 }
7003
7004 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7005 Basically, we want to choose the bits that are likely to be shared between
7006 compilations (types) and leave out the bits that are specific to individual
7007 compilations (functions). */
7008
7009 static int
7010 is_comdat_die (dw_die_ref c)
7011 {
7012 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7013 we do for stabs. The advantage is a greater likelihood of sharing between
7014 objects that don't include headers in the same order (and therefore would
7015 put the base types in a different comdat). jason 8/28/00 */
7016
7017 if (c->die_tag == DW_TAG_base_type)
7018 return 0;
7019
7020 if (c->die_tag == DW_TAG_pointer_type
7021 || c->die_tag == DW_TAG_reference_type
7022 || c->die_tag == DW_TAG_rvalue_reference_type
7023 || c->die_tag == DW_TAG_const_type
7024 || c->die_tag == DW_TAG_volatile_type)
7025 {
7026 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7027
7028 return t ? is_comdat_die (t) : 0;
7029 }
7030
7031 return is_type_die (c);
7032 }
7033
7034 /* Returns 1 iff C is the sort of DIE that might be referred to from another
7035 compilation unit. */
7036
7037 static int
7038 is_symbol_die (dw_die_ref c)
7039 {
7040 return (is_type_die (c)
7041 || is_declaration_die (c)
7042 || c->die_tag == DW_TAG_namespace
7043 || c->die_tag == DW_TAG_module);
7044 }
7045
7046 /* Returns true iff C is a compile-unit DIE. */
7047
7048 static inline bool
7049 is_cu_die (dw_die_ref c)
7050 {
7051 return c && c->die_tag == DW_TAG_compile_unit;
7052 }
7053
7054 /* Returns true iff C is a unit DIE of some sort. */
7055
7056 static inline bool
7057 is_unit_die (dw_die_ref c)
7058 {
7059 return c && (c->die_tag == DW_TAG_compile_unit
7060 || c->die_tag == DW_TAG_partial_unit
7061 || c->die_tag == DW_TAG_type_unit);
7062 }
7063
7064 /* Returns true iff C is a namespace DIE. */
7065
7066 static inline bool
7067 is_namespace_die (dw_die_ref c)
7068 {
7069 return c && c->die_tag == DW_TAG_namespace;
7070 }
7071
7072 /* Returns true iff C is a class or structure DIE. */
7073
7074 static inline bool
7075 is_class_die (dw_die_ref c)
7076 {
7077 return c && (c->die_tag == DW_TAG_class_type
7078 || c->die_tag == DW_TAG_structure_type);
7079 }
7080
7081 /* Return non-zero if this DIE is a template parameter. */
7082
7083 static inline bool
7084 is_template_parameter (dw_die_ref die)
7085 {
7086 switch (die->die_tag)
7087 {
7088 case DW_TAG_template_type_param:
7089 case DW_TAG_template_value_param:
7090 case DW_TAG_GNU_template_template_param:
7091 case DW_TAG_GNU_template_parameter_pack:
7092 return true;
7093 default:
7094 return false;
7095 }
7096 }
7097
7098 /* Return non-zero if this DIE represents a template instantiation. */
7099
7100 static inline bool
7101 is_template_instantiation (dw_die_ref die)
7102 {
7103 dw_die_ref c;
7104
7105 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7106 return false;
7107 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7108 return false;
7109 }
7110
7111 static char *
7112 gen_internal_sym (const char *prefix)
7113 {
7114 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7115
7116 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7117 return xstrdup (buf);
7118 }
7119
7120 /* Assign symbols to all worthy DIEs under DIE. */
7121
7122 static void
7123 assign_symbol_names (dw_die_ref die)
7124 {
7125 dw_die_ref c;
7126
7127 if (is_symbol_die (die) && !die->comdat_type_p)
7128 {
7129 if (comdat_symbol_id)
7130 {
7131 char *p = XALLOCAVEC (char, strlen (comdat_symbol_id) + 64);
7132
7133 sprintf (p, "%s.%s.%x", DIE_LABEL_PREFIX,
7134 comdat_symbol_id, comdat_symbol_number++);
7135 die->die_id.die_symbol = xstrdup (p);
7136 }
7137 else
7138 die->die_id.die_symbol = gen_internal_sym ("LDIE");
7139 }
7140
7141 FOR_EACH_CHILD (die, c, assign_symbol_names (c));
7142 }
7143
7144 struct cu_hash_table_entry
7145 {
7146 dw_die_ref cu;
7147 unsigned min_comdat_num, max_comdat_num;
7148 struct cu_hash_table_entry *next;
7149 };
7150
7151 /* Helpers to manipulate hash table of CUs. */
7152
7153 struct cu_hash_table_entry_hasher : pointer_hash <cu_hash_table_entry>
7154 {
7155 typedef die_struct *compare_type;
7156 static inline hashval_t hash (const cu_hash_table_entry *);
7157 static inline bool equal (const cu_hash_table_entry *, const die_struct *);
7158 static inline void remove (cu_hash_table_entry *);
7159 };
7160
7161 inline hashval_t
7162 cu_hash_table_entry_hasher::hash (const cu_hash_table_entry *entry)
7163 {
7164 return htab_hash_string (entry->cu->die_id.die_symbol);
7165 }
7166
7167 inline bool
7168 cu_hash_table_entry_hasher::equal (const cu_hash_table_entry *entry1,
7169 const die_struct *entry2)
7170 {
7171 return !strcmp (entry1->cu->die_id.die_symbol, entry2->die_id.die_symbol);
7172 }
7173
7174 inline void
7175 cu_hash_table_entry_hasher::remove (cu_hash_table_entry *entry)
7176 {
7177 struct cu_hash_table_entry *next;
7178
7179 while (entry)
7180 {
7181 next = entry->next;
7182 free (entry);
7183 entry = next;
7184 }
7185 }
7186
7187 typedef hash_table<cu_hash_table_entry_hasher> cu_hash_type;
7188
7189 /* Check whether we have already seen this CU and set up SYM_NUM
7190 accordingly. */
7191 static int
7192 check_duplicate_cu (dw_die_ref cu, cu_hash_type *htable, unsigned int *sym_num)
7193 {
7194 struct cu_hash_table_entry dummy;
7195 struct cu_hash_table_entry **slot, *entry, *last = &dummy;
7196
7197 dummy.max_comdat_num = 0;
7198
7199 slot = htable->find_slot_with_hash (cu,
7200 htab_hash_string (cu->die_id.die_symbol),
7201 INSERT);
7202 entry = *slot;
7203
7204 for (; entry; last = entry, entry = entry->next)
7205 {
7206 if (same_die_p_wrap (cu, entry->cu))
7207 break;
7208 }
7209
7210 if (entry)
7211 {
7212 *sym_num = entry->min_comdat_num;
7213 return 1;
7214 }
7215
7216 entry = XCNEW (struct cu_hash_table_entry);
7217 entry->cu = cu;
7218 entry->min_comdat_num = *sym_num = last->max_comdat_num;
7219 entry->next = *slot;
7220 *slot = entry;
7221
7222 return 0;
7223 }
7224
7225 /* Record SYM_NUM to record of CU in HTABLE. */
7226 static void
7227 record_comdat_symbol_number (dw_die_ref cu, cu_hash_type *htable,
7228 unsigned int sym_num)
7229 {
7230 struct cu_hash_table_entry **slot, *entry;
7231
7232 slot = htable->find_slot_with_hash (cu,
7233 htab_hash_string (cu->die_id.die_symbol),
7234 NO_INSERT);
7235 entry = *slot;
7236
7237 entry->max_comdat_num = sym_num;
7238 }
7239
7240 /* Traverse the DIE (which is always comp_unit_die), and set up
7241 additional compilation units for each of the include files we see
7242 bracketed by BINCL/EINCL. */
7243
7244 static void
7245 break_out_includes (dw_die_ref die)
7246 {
7247 dw_die_ref c;
7248 dw_die_ref unit = NULL;
7249 limbo_die_node *node, **pnode;
7250
7251 c = die->die_child;
7252 if (c) do {
7253 dw_die_ref prev = c;
7254 c = c->die_sib;
7255 while (c->die_tag == DW_TAG_GNU_BINCL || c->die_tag == DW_TAG_GNU_EINCL
7256 || (unit && is_comdat_die (c)))
7257 {
7258 dw_die_ref next = c->die_sib;
7259
7260 /* This DIE is for a secondary CU; remove it from the main one. */
7261 remove_child_with_prev (c, prev);
7262
7263 if (c->die_tag == DW_TAG_GNU_BINCL)
7264 unit = push_new_compile_unit (unit, c);
7265 else if (c->die_tag == DW_TAG_GNU_EINCL)
7266 unit = pop_compile_unit (unit);
7267 else
7268 add_child_die (unit, c);
7269 c = next;
7270 if (c == die->die_child)
7271 break;
7272 }
7273 } while (c != die->die_child);
7274
7275 #if 0
7276 /* We can only use this in debugging, since the frontend doesn't check
7277 to make sure that we leave every include file we enter. */
7278 gcc_assert (!unit);
7279 #endif
7280
7281 assign_symbol_names (die);
7282 cu_hash_type cu_hash_table (10);
7283 for (node = limbo_die_list, pnode = &limbo_die_list;
7284 node;
7285 node = node->next)
7286 {
7287 int is_dupl;
7288
7289 compute_section_prefix (node->die);
7290 is_dupl = check_duplicate_cu (node->die, &cu_hash_table,
7291 &comdat_symbol_number);
7292 assign_symbol_names (node->die);
7293 if (is_dupl)
7294 *pnode = node->next;
7295 else
7296 {
7297 pnode = &node->next;
7298 record_comdat_symbol_number (node->die, &cu_hash_table,
7299 comdat_symbol_number);
7300 }
7301 }
7302 }
7303
7304 /* Return non-zero if this DIE is a declaration. */
7305
7306 static int
7307 is_declaration_die (dw_die_ref die)
7308 {
7309 dw_attr_node *a;
7310 unsigned ix;
7311
7312 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7313 if (a->dw_attr == DW_AT_declaration)
7314 return 1;
7315
7316 return 0;
7317 }
7318
7319 /* Return non-zero if this DIE is nested inside a subprogram. */
7320
7321 static int
7322 is_nested_in_subprogram (dw_die_ref die)
7323 {
7324 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7325
7326 if (decl == NULL)
7327 decl = die;
7328 return local_scope_p (decl);
7329 }
7330
7331 /* Return non-zero if this DIE contains a defining declaration of a
7332 subprogram. */
7333
7334 static int
7335 contains_subprogram_definition (dw_die_ref die)
7336 {
7337 dw_die_ref c;
7338
7339 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7340 return 1;
7341 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7342 return 0;
7343 }
7344
7345 /* Return non-zero if this is a type DIE that should be moved to a
7346 COMDAT .debug_types section. */
7347
7348 static int
7349 should_move_die_to_comdat (dw_die_ref die)
7350 {
7351 switch (die->die_tag)
7352 {
7353 case DW_TAG_class_type:
7354 case DW_TAG_structure_type:
7355 case DW_TAG_enumeration_type:
7356 case DW_TAG_union_type:
7357 /* Don't move declarations, inlined instances, types nested in a
7358 subprogram, or types that contain subprogram definitions. */
7359 if (is_declaration_die (die)
7360 || get_AT (die, DW_AT_abstract_origin)
7361 || is_nested_in_subprogram (die)
7362 || contains_subprogram_definition (die))
7363 return 0;
7364 return 1;
7365 case DW_TAG_array_type:
7366 case DW_TAG_interface_type:
7367 case DW_TAG_pointer_type:
7368 case DW_TAG_reference_type:
7369 case DW_TAG_rvalue_reference_type:
7370 case DW_TAG_string_type:
7371 case DW_TAG_subroutine_type:
7372 case DW_TAG_ptr_to_member_type:
7373 case DW_TAG_set_type:
7374 case DW_TAG_subrange_type:
7375 case DW_TAG_base_type:
7376 case DW_TAG_const_type:
7377 case DW_TAG_file_type:
7378 case DW_TAG_packed_type:
7379 case DW_TAG_volatile_type:
7380 case DW_TAG_typedef:
7381 default:
7382 return 0;
7383 }
7384 }
7385
7386 /* Make a clone of DIE. */
7387
7388 static dw_die_ref
7389 clone_die (dw_die_ref die)
7390 {
7391 dw_die_ref clone;
7392 dw_attr_node *a;
7393 unsigned ix;
7394
7395 clone = ggc_cleared_alloc<die_node> ();
7396 clone->die_tag = die->die_tag;
7397
7398 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7399 add_dwarf_attr (clone, a);
7400
7401 return clone;
7402 }
7403
7404 /* Make a clone of the tree rooted at DIE. */
7405
7406 static dw_die_ref
7407 clone_tree (dw_die_ref die)
7408 {
7409 dw_die_ref c;
7410 dw_die_ref clone = clone_die (die);
7411
7412 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7413
7414 return clone;
7415 }
7416
7417 /* Make a clone of DIE as a declaration. */
7418
7419 static dw_die_ref
7420 clone_as_declaration (dw_die_ref die)
7421 {
7422 dw_die_ref clone;
7423 dw_die_ref decl;
7424 dw_attr_node *a;
7425 unsigned ix;
7426
7427 /* If the DIE is already a declaration, just clone it. */
7428 if (is_declaration_die (die))
7429 return clone_die (die);
7430
7431 /* If the DIE is a specification, just clone its declaration DIE. */
7432 decl = get_AT_ref (die, DW_AT_specification);
7433 if (decl != NULL)
7434 {
7435 clone = clone_die (decl);
7436 if (die->comdat_type_p)
7437 add_AT_die_ref (clone, DW_AT_signature, die);
7438 return clone;
7439 }
7440
7441 clone = ggc_cleared_alloc<die_node> ();
7442 clone->die_tag = die->die_tag;
7443
7444 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7445 {
7446 /* We don't want to copy over all attributes.
7447 For example we don't want DW_AT_byte_size because otherwise we will no
7448 longer have a declaration and GDB will treat it as a definition. */
7449
7450 switch (a->dw_attr)
7451 {
7452 case DW_AT_abstract_origin:
7453 case DW_AT_artificial:
7454 case DW_AT_containing_type:
7455 case DW_AT_external:
7456 case DW_AT_name:
7457 case DW_AT_type:
7458 case DW_AT_virtuality:
7459 case DW_AT_linkage_name:
7460 case DW_AT_MIPS_linkage_name:
7461 add_dwarf_attr (clone, a);
7462 break;
7463 case DW_AT_byte_size:
7464 default:
7465 break;
7466 }
7467 }
7468
7469 if (die->comdat_type_p)
7470 add_AT_die_ref (clone, DW_AT_signature, die);
7471
7472 add_AT_flag (clone, DW_AT_declaration, 1);
7473 return clone;
7474 }
7475
7476
7477 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7478
7479 struct decl_table_entry
7480 {
7481 dw_die_ref orig;
7482 dw_die_ref copy;
7483 };
7484
7485 /* Helpers to manipulate hash table of copied declarations. */
7486
7487 /* Hashtable helpers. */
7488
7489 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7490 {
7491 typedef die_struct *compare_type;
7492 static inline hashval_t hash (const decl_table_entry *);
7493 static inline bool equal (const decl_table_entry *, const die_struct *);
7494 };
7495
7496 inline hashval_t
7497 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7498 {
7499 return htab_hash_pointer (entry->orig);
7500 }
7501
7502 inline bool
7503 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7504 const die_struct *entry2)
7505 {
7506 return entry1->orig == entry2;
7507 }
7508
7509 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7510
7511 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7512 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7513 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7514 to check if the ancestor has already been copied into UNIT. */
7515
7516 static dw_die_ref
7517 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7518 decl_hash_type *decl_table)
7519 {
7520 dw_die_ref parent = die->die_parent;
7521 dw_die_ref new_parent = unit;
7522 dw_die_ref copy;
7523 decl_table_entry **slot = NULL;
7524 struct decl_table_entry *entry = NULL;
7525
7526 if (decl_table)
7527 {
7528 /* Check if the entry has already been copied to UNIT. */
7529 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7530 INSERT);
7531 if (*slot != HTAB_EMPTY_ENTRY)
7532 {
7533 entry = *slot;
7534 return entry->copy;
7535 }
7536
7537 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7538 entry = XCNEW (struct decl_table_entry);
7539 entry->orig = die;
7540 entry->copy = NULL;
7541 *slot = entry;
7542 }
7543
7544 if (parent != NULL)
7545 {
7546 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7547 if (spec != NULL)
7548 parent = spec;
7549 if (!is_unit_die (parent))
7550 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7551 }
7552
7553 copy = clone_as_declaration (die);
7554 add_child_die (new_parent, copy);
7555
7556 if (decl_table)
7557 {
7558 /* Record the pointer to the copy. */
7559 entry->copy = copy;
7560 }
7561
7562 return copy;
7563 }
7564 /* Copy the declaration context to the new type unit DIE. This includes
7565 any surrounding namespace or type declarations. If the DIE has an
7566 AT_specification attribute, it also includes attributes and children
7567 attached to the specification, and returns a pointer to the original
7568 parent of the declaration DIE. Returns NULL otherwise. */
7569
7570 static dw_die_ref
7571 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7572 {
7573 dw_die_ref decl;
7574 dw_die_ref new_decl;
7575 dw_die_ref orig_parent = NULL;
7576
7577 decl = get_AT_ref (die, DW_AT_specification);
7578 if (decl == NULL)
7579 decl = die;
7580 else
7581 {
7582 unsigned ix;
7583 dw_die_ref c;
7584 dw_attr_node *a;
7585
7586 /* The original DIE will be changed to a declaration, and must
7587 be moved to be a child of the original declaration DIE. */
7588 orig_parent = decl->die_parent;
7589
7590 /* Copy the type node pointer from the new DIE to the original
7591 declaration DIE so we can forward references later. */
7592 decl->comdat_type_p = true;
7593 decl->die_id.die_type_node = die->die_id.die_type_node;
7594
7595 remove_AT (die, DW_AT_specification);
7596
7597 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7598 {
7599 if (a->dw_attr != DW_AT_name
7600 && a->dw_attr != DW_AT_declaration
7601 && a->dw_attr != DW_AT_external)
7602 add_dwarf_attr (die, a);
7603 }
7604
7605 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7606 }
7607
7608 if (decl->die_parent != NULL
7609 && !is_unit_die (decl->die_parent))
7610 {
7611 new_decl = copy_ancestor_tree (unit, decl, NULL);
7612 if (new_decl != NULL)
7613 {
7614 remove_AT (new_decl, DW_AT_signature);
7615 add_AT_specification (die, new_decl);
7616 }
7617 }
7618
7619 return orig_parent;
7620 }
7621
7622 /* Generate the skeleton ancestor tree for the given NODE, then clone
7623 the DIE and add the clone into the tree. */
7624
7625 static void
7626 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7627 {
7628 if (node->new_die != NULL)
7629 return;
7630
7631 node->new_die = clone_as_declaration (node->old_die);
7632
7633 if (node->parent != NULL)
7634 {
7635 generate_skeleton_ancestor_tree (node->parent);
7636 add_child_die (node->parent->new_die, node->new_die);
7637 }
7638 }
7639
7640 /* Generate a skeleton tree of DIEs containing any declarations that are
7641 found in the original tree. We traverse the tree looking for declaration
7642 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7643
7644 static void
7645 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7646 {
7647 skeleton_chain_node node;
7648 dw_die_ref c;
7649 dw_die_ref first;
7650 dw_die_ref prev = NULL;
7651 dw_die_ref next = NULL;
7652
7653 node.parent = parent;
7654
7655 first = c = parent->old_die->die_child;
7656 if (c)
7657 next = c->die_sib;
7658 if (c) do {
7659 if (prev == NULL || prev->die_sib == c)
7660 prev = c;
7661 c = next;
7662 next = (c == first ? NULL : c->die_sib);
7663 node.old_die = c;
7664 node.new_die = NULL;
7665 if (is_declaration_die (c))
7666 {
7667 if (is_template_instantiation (c))
7668 {
7669 /* Instantiated templates do not need to be cloned into the
7670 type unit. Just move the DIE and its children back to
7671 the skeleton tree (in the main CU). */
7672 remove_child_with_prev (c, prev);
7673 add_child_die (parent->new_die, c);
7674 c = prev;
7675 }
7676 else
7677 {
7678 /* Clone the existing DIE, move the original to the skeleton
7679 tree (which is in the main CU), and put the clone, with
7680 all the original's children, where the original came from
7681 (which is about to be moved to the type unit). */
7682 dw_die_ref clone = clone_die (c);
7683 move_all_children (c, clone);
7684
7685 /* If the original has a DW_AT_object_pointer attribute,
7686 it would now point to a child DIE just moved to the
7687 cloned tree, so we need to remove that attribute from
7688 the original. */
7689 remove_AT (c, DW_AT_object_pointer);
7690
7691 replace_child (c, clone, prev);
7692 generate_skeleton_ancestor_tree (parent);
7693 add_child_die (parent->new_die, c);
7694 node.new_die = c;
7695 c = clone;
7696 }
7697 }
7698 generate_skeleton_bottom_up (&node);
7699 } while (next != NULL);
7700 }
7701
7702 /* Wrapper function for generate_skeleton_bottom_up. */
7703
7704 static dw_die_ref
7705 generate_skeleton (dw_die_ref die)
7706 {
7707 skeleton_chain_node node;
7708
7709 node.old_die = die;
7710 node.new_die = NULL;
7711 node.parent = NULL;
7712
7713 /* If this type definition is nested inside another type,
7714 and is not an instantiation of a template, always leave
7715 at least a declaration in its place. */
7716 if (die->die_parent != NULL
7717 && is_type_die (die->die_parent)
7718 && !is_template_instantiation (die))
7719 node.new_die = clone_as_declaration (die);
7720
7721 generate_skeleton_bottom_up (&node);
7722 return node.new_die;
7723 }
7724
7725 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
7726 declaration. The original DIE is moved to a new compile unit so that
7727 existing references to it follow it to the new location. If any of the
7728 original DIE's descendants is a declaration, we need to replace the
7729 original DIE with a skeleton tree and move the declarations back into the
7730 skeleton tree. */
7731
7732 static dw_die_ref
7733 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
7734 dw_die_ref prev)
7735 {
7736 dw_die_ref skeleton, orig_parent;
7737
7738 /* Copy the declaration context to the type unit DIE. If the returned
7739 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
7740 that DIE. */
7741 orig_parent = copy_declaration_context (unit, child);
7742
7743 skeleton = generate_skeleton (child);
7744 if (skeleton == NULL)
7745 remove_child_with_prev (child, prev);
7746 else
7747 {
7748 skeleton->comdat_type_p = true;
7749 skeleton->die_id.die_type_node = child->die_id.die_type_node;
7750
7751 /* If the original DIE was a specification, we need to put
7752 the skeleton under the parent DIE of the declaration.
7753 This leaves the original declaration in the tree, but
7754 it will be pruned later since there are no longer any
7755 references to it. */
7756 if (orig_parent != NULL)
7757 {
7758 remove_child_with_prev (child, prev);
7759 add_child_die (orig_parent, skeleton);
7760 }
7761 else
7762 replace_child (child, skeleton, prev);
7763 }
7764
7765 return skeleton;
7766 }
7767
7768 static void
7769 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
7770 comdat_type_node *type_node,
7771 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
7772
7773 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
7774 procedure, put it under TYPE_NODE and return the copy. Continue looking for
7775 DWARF procedure references in the DW_AT_location attribute. */
7776
7777 static dw_die_ref
7778 copy_dwarf_procedure (dw_die_ref die,
7779 comdat_type_node *type_node,
7780 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7781 {
7782 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
7783
7784 /* DWARF procedures are not supposed to have children... */
7785 gcc_assert (die->die_child == NULL);
7786
7787 /* ... and they are supposed to have only one attribute: DW_AT_location. */
7788 gcc_assert (vec_safe_length (die->die_attr) == 1
7789 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
7790
7791 /* Do not copy more than once DWARF procedures. */
7792 bool existed;
7793 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
7794 if (existed)
7795 return die_copy;
7796
7797 die_copy = clone_die (die);
7798 add_child_die (type_node->root_die, die_copy);
7799 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
7800 return die_copy;
7801 }
7802
7803 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
7804 procedures in DIE's attributes. */
7805
7806 static void
7807 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
7808 comdat_type_node *type_node,
7809 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7810 {
7811 dw_attr_node *a;
7812 unsigned i;
7813
7814 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
7815 {
7816 dw_loc_descr_ref loc;
7817
7818 if (a->dw_attr_val.val_class != dw_val_class_loc)
7819 continue;
7820
7821 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
7822 {
7823 switch (loc->dw_loc_opc)
7824 {
7825 case DW_OP_call2:
7826 case DW_OP_call4:
7827 case DW_OP_call_ref:
7828 gcc_assert (loc->dw_loc_oprnd1.val_class
7829 == dw_val_class_die_ref);
7830 loc->dw_loc_oprnd1.v.val_die_ref.die
7831 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
7832 type_node,
7833 copied_dwarf_procs);
7834
7835 default:
7836 break;
7837 }
7838 }
7839 }
7840 }
7841
7842 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
7843 rewrite references to point to the copies.
7844
7845 References are looked for in DIE's attributes and recursively in all its
7846 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
7847 mapping from old DWARF procedures to their copy. It is used not to copy
7848 twice the same DWARF procedure under TYPE_NODE. */
7849
7850 static void
7851 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
7852 comdat_type_node *type_node,
7853 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7854 {
7855 dw_die_ref c;
7856
7857 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
7858 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
7859 type_node,
7860 copied_dwarf_procs));
7861 }
7862
7863 /* Traverse the DIE and set up additional .debug_types sections for each
7864 type worthy of being placed in a COMDAT section. */
7865
7866 static void
7867 break_out_comdat_types (dw_die_ref die)
7868 {
7869 dw_die_ref c;
7870 dw_die_ref first;
7871 dw_die_ref prev = NULL;
7872 dw_die_ref next = NULL;
7873 dw_die_ref unit = NULL;
7874
7875 first = c = die->die_child;
7876 if (c)
7877 next = c->die_sib;
7878 if (c) do {
7879 if (prev == NULL || prev->die_sib == c)
7880 prev = c;
7881 c = next;
7882 next = (c == first ? NULL : c->die_sib);
7883 if (should_move_die_to_comdat (c))
7884 {
7885 dw_die_ref replacement;
7886 comdat_type_node *type_node;
7887
7888 /* Break out nested types into their own type units. */
7889 break_out_comdat_types (c);
7890
7891 /* Create a new type unit DIE as the root for the new tree, and
7892 add it to the list of comdat types. */
7893 unit = new_die (DW_TAG_type_unit, NULL, NULL);
7894 add_AT_unsigned (unit, DW_AT_language,
7895 get_AT_unsigned (comp_unit_die (), DW_AT_language));
7896 type_node = ggc_cleared_alloc<comdat_type_node> ();
7897 type_node->root_die = unit;
7898 type_node->next = comdat_type_list;
7899 comdat_type_list = type_node;
7900
7901 /* Generate the type signature. */
7902 generate_type_signature (c, type_node);
7903
7904 /* Copy the declaration context, attributes, and children of the
7905 declaration into the new type unit DIE, then remove this DIE
7906 from the main CU (or replace it with a skeleton if necessary). */
7907 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
7908 type_node->skeleton_die = replacement;
7909
7910 /* Add the DIE to the new compunit. */
7911 add_child_die (unit, c);
7912
7913 /* Types can reference DWARF procedures for type size or data location
7914 expressions. Calls in DWARF expressions cannot target procedures
7915 that are not in the same section. So we must copy DWARF procedures
7916 along with this type and then rewrite references to them. */
7917 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
7918 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
7919
7920 if (replacement != NULL)
7921 c = replacement;
7922 }
7923 else if (c->die_tag == DW_TAG_namespace
7924 || c->die_tag == DW_TAG_class_type
7925 || c->die_tag == DW_TAG_structure_type
7926 || c->die_tag == DW_TAG_union_type)
7927 {
7928 /* Look for nested types that can be broken out. */
7929 break_out_comdat_types (c);
7930 }
7931 } while (next != NULL);
7932 }
7933
7934 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
7935 Enter all the cloned children into the hash table decl_table. */
7936
7937 static dw_die_ref
7938 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
7939 {
7940 dw_die_ref c;
7941 dw_die_ref clone;
7942 struct decl_table_entry *entry;
7943 decl_table_entry **slot;
7944
7945 if (die->die_tag == DW_TAG_subprogram)
7946 clone = clone_as_declaration (die);
7947 else
7948 clone = clone_die (die);
7949
7950 slot = decl_table->find_slot_with_hash (die,
7951 htab_hash_pointer (die), INSERT);
7952
7953 /* Assert that DIE isn't in the hash table yet. If it would be there
7954 before, the ancestors would be necessarily there as well, therefore
7955 clone_tree_partial wouldn't be called. */
7956 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
7957
7958 entry = XCNEW (struct decl_table_entry);
7959 entry->orig = die;
7960 entry->copy = clone;
7961 *slot = entry;
7962
7963 if (die->die_tag != DW_TAG_subprogram)
7964 FOR_EACH_CHILD (die, c,
7965 add_child_die (clone, clone_tree_partial (c, decl_table)));
7966
7967 return clone;
7968 }
7969
7970 /* Walk the DIE and its children, looking for references to incomplete
7971 or trivial types that are unmarked (i.e., that are not in the current
7972 type_unit). */
7973
7974 static void
7975 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
7976 {
7977 dw_die_ref c;
7978 dw_attr_node *a;
7979 unsigned ix;
7980
7981 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7982 {
7983 if (AT_class (a) == dw_val_class_die_ref)
7984 {
7985 dw_die_ref targ = AT_ref (a);
7986 decl_table_entry **slot;
7987 struct decl_table_entry *entry;
7988
7989 if (targ->die_mark != 0 || targ->comdat_type_p)
7990 continue;
7991
7992 slot = decl_table->find_slot_with_hash (targ,
7993 htab_hash_pointer (targ),
7994 INSERT);
7995
7996 if (*slot != HTAB_EMPTY_ENTRY)
7997 {
7998 /* TARG has already been copied, so we just need to
7999 modify the reference to point to the copy. */
8000 entry = *slot;
8001 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8002 }
8003 else
8004 {
8005 dw_die_ref parent = unit;
8006 dw_die_ref copy = clone_die (targ);
8007
8008 /* Record in DECL_TABLE that TARG has been copied.
8009 Need to do this now, before the recursive call,
8010 because DECL_TABLE may be expanded and SLOT
8011 would no longer be a valid pointer. */
8012 entry = XCNEW (struct decl_table_entry);
8013 entry->orig = targ;
8014 entry->copy = copy;
8015 *slot = entry;
8016
8017 /* If TARG is not a declaration DIE, we need to copy its
8018 children. */
8019 if (!is_declaration_die (targ))
8020 {
8021 FOR_EACH_CHILD (
8022 targ, c,
8023 add_child_die (copy,
8024 clone_tree_partial (c, decl_table)));
8025 }
8026
8027 /* Make sure the cloned tree is marked as part of the
8028 type unit. */
8029 mark_dies (copy);
8030
8031 /* If TARG has surrounding context, copy its ancestor tree
8032 into the new type unit. */
8033 if (targ->die_parent != NULL
8034 && !is_unit_die (targ->die_parent))
8035 parent = copy_ancestor_tree (unit, targ->die_parent,
8036 decl_table);
8037
8038 add_child_die (parent, copy);
8039 a->dw_attr_val.v.val_die_ref.die = copy;
8040
8041 /* Make sure the newly-copied DIE is walked. If it was
8042 installed in a previously-added context, it won't
8043 get visited otherwise. */
8044 if (parent != unit)
8045 {
8046 /* Find the highest point of the newly-added tree,
8047 mark each node along the way, and walk from there. */
8048 parent->die_mark = 1;
8049 while (parent->die_parent
8050 && parent->die_parent->die_mark == 0)
8051 {
8052 parent = parent->die_parent;
8053 parent->die_mark = 1;
8054 }
8055 copy_decls_walk (unit, parent, decl_table);
8056 }
8057 }
8058 }
8059 }
8060
8061 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8062 }
8063
8064 /* Copy declarations for "unworthy" types into the new comdat section.
8065 Incomplete types, modified types, and certain other types aren't broken
8066 out into comdat sections of their own, so they don't have a signature,
8067 and we need to copy the declaration into the same section so that we
8068 don't have an external reference. */
8069
8070 static void
8071 copy_decls_for_unworthy_types (dw_die_ref unit)
8072 {
8073 mark_dies (unit);
8074 decl_hash_type decl_table (10);
8075 copy_decls_walk (unit, unit, &decl_table);
8076 unmark_dies (unit);
8077 }
8078
8079 /* Traverse the DIE and add a sibling attribute if it may have the
8080 effect of speeding up access to siblings. To save some space,
8081 avoid generating sibling attributes for DIE's without children. */
8082
8083 static void
8084 add_sibling_attributes (dw_die_ref die)
8085 {
8086 dw_die_ref c;
8087
8088 if (! die->die_child)
8089 return;
8090
8091 if (die->die_parent && die != die->die_parent->die_child)
8092 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8093
8094 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8095 }
8096
8097 /* Output all location lists for the DIE and its children. */
8098
8099 static void
8100 output_location_lists (dw_die_ref die)
8101 {
8102 dw_die_ref c;
8103 dw_attr_node *a;
8104 unsigned ix;
8105
8106 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8107 if (AT_class (a) == dw_val_class_loc_list)
8108 output_loc_list (AT_loc_list (a));
8109
8110 FOR_EACH_CHILD (die, c, output_location_lists (c));
8111 }
8112
8113 /* We want to limit the number of external references, because they are
8114 larger than local references: a relocation takes multiple words, and
8115 even a sig8 reference is always eight bytes, whereas a local reference
8116 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8117 So if we encounter multiple external references to the same type DIE, we
8118 make a local typedef stub for it and redirect all references there.
8119
8120 This is the element of the hash table for keeping track of these
8121 references. */
8122
8123 struct external_ref
8124 {
8125 dw_die_ref type;
8126 dw_die_ref stub;
8127 unsigned n_refs;
8128 };
8129
8130 /* Hashtable helpers. */
8131
8132 struct external_ref_hasher : free_ptr_hash <external_ref>
8133 {
8134 static inline hashval_t hash (const external_ref *);
8135 static inline bool equal (const external_ref *, const external_ref *);
8136 };
8137
8138 inline hashval_t
8139 external_ref_hasher::hash (const external_ref *r)
8140 {
8141 dw_die_ref die = r->type;
8142 hashval_t h = 0;
8143
8144 /* We can't use the address of the DIE for hashing, because
8145 that will make the order of the stub DIEs non-deterministic. */
8146 if (! die->comdat_type_p)
8147 /* We have a symbol; use it to compute a hash. */
8148 h = htab_hash_string (die->die_id.die_symbol);
8149 else
8150 {
8151 /* We have a type signature; use a subset of the bits as the hash.
8152 The 8-byte signature is at least as large as hashval_t. */
8153 comdat_type_node *type_node = die->die_id.die_type_node;
8154 memcpy (&h, type_node->signature, sizeof (h));
8155 }
8156 return h;
8157 }
8158
8159 inline bool
8160 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8161 {
8162 return r1->type == r2->type;
8163 }
8164
8165 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8166
8167 /* Return a pointer to the external_ref for references to DIE. */
8168
8169 static struct external_ref *
8170 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8171 {
8172 struct external_ref ref, *ref_p;
8173 external_ref **slot;
8174
8175 ref.type = die;
8176 slot = map->find_slot (&ref, INSERT);
8177 if (*slot != HTAB_EMPTY_ENTRY)
8178 return *slot;
8179
8180 ref_p = XCNEW (struct external_ref);
8181 ref_p->type = die;
8182 *slot = ref_p;
8183 return ref_p;
8184 }
8185
8186 /* Subroutine of optimize_external_refs, below.
8187
8188 If we see a type skeleton, record it as our stub. If we see external
8189 references, remember how many we've seen. */
8190
8191 static void
8192 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8193 {
8194 dw_die_ref c;
8195 dw_attr_node *a;
8196 unsigned ix;
8197 struct external_ref *ref_p;
8198
8199 if (is_type_die (die)
8200 && (c = get_AT_ref (die, DW_AT_signature)))
8201 {
8202 /* This is a local skeleton; use it for local references. */
8203 ref_p = lookup_external_ref (map, c);
8204 ref_p->stub = die;
8205 }
8206
8207 /* Scan the DIE references, and remember any that refer to DIEs from
8208 other CUs (i.e. those which are not marked). */
8209 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8210 if (AT_class (a) == dw_val_class_die_ref
8211 && (c = AT_ref (a))->die_mark == 0
8212 && is_type_die (c))
8213 {
8214 ref_p = lookup_external_ref (map, c);
8215 ref_p->n_refs++;
8216 }
8217
8218 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8219 }
8220
8221 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8222 points to an external_ref, DATA is the CU we're processing. If we don't
8223 already have a local stub, and we have multiple refs, build a stub. */
8224
8225 int
8226 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8227 {
8228 struct external_ref *ref_p = *slot;
8229
8230 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8231 {
8232 /* We have multiple references to this type, so build a small stub.
8233 Both of these forms are a bit dodgy from the perspective of the
8234 DWARF standard, since technically they should have names. */
8235 dw_die_ref cu = data;
8236 dw_die_ref type = ref_p->type;
8237 dw_die_ref stub = NULL;
8238
8239 if (type->comdat_type_p)
8240 {
8241 /* If we refer to this type via sig8, use AT_signature. */
8242 stub = new_die (type->die_tag, cu, NULL_TREE);
8243 add_AT_die_ref (stub, DW_AT_signature, type);
8244 }
8245 else
8246 {
8247 /* Otherwise, use a typedef with no name. */
8248 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8249 add_AT_die_ref (stub, DW_AT_type, type);
8250 }
8251
8252 stub->die_mark++;
8253 ref_p->stub = stub;
8254 }
8255 return 1;
8256 }
8257
8258 /* DIE is a unit; look through all the DIE references to see if there are
8259 any external references to types, and if so, create local stubs for
8260 them which will be applied in build_abbrev_table. This is useful because
8261 references to local DIEs are smaller. */
8262
8263 static external_ref_hash_type *
8264 optimize_external_refs (dw_die_ref die)
8265 {
8266 external_ref_hash_type *map = new external_ref_hash_type (10);
8267 optimize_external_refs_1 (die, map);
8268 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8269 return map;
8270 }
8271
8272 /* The format of each DIE (and its attribute value pairs) is encoded in an
8273 abbreviation table. This routine builds the abbreviation table and assigns
8274 a unique abbreviation id for each abbreviation entry. The children of each
8275 die are visited recursively. */
8276
8277 static void
8278 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8279 {
8280 unsigned long abbrev_id;
8281 unsigned int n_alloc;
8282 dw_die_ref c;
8283 dw_attr_node *a;
8284 unsigned ix;
8285
8286 /* Scan the DIE references, and replace any that refer to
8287 DIEs from other CUs (i.e. those which are not marked) with
8288 the local stubs we built in optimize_external_refs. */
8289 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8290 if (AT_class (a) == dw_val_class_die_ref
8291 && (c = AT_ref (a))->die_mark == 0)
8292 {
8293 struct external_ref *ref_p;
8294 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8295
8296 ref_p = lookup_external_ref (extern_map, c);
8297 if (ref_p->stub && ref_p->stub != die)
8298 change_AT_die_ref (a, ref_p->stub);
8299 else
8300 /* We aren't changing this reference, so mark it external. */
8301 set_AT_ref_external (a, 1);
8302 }
8303
8304 for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
8305 {
8306 dw_die_ref abbrev = abbrev_die_table[abbrev_id];
8307 dw_attr_node *die_a, *abbrev_a;
8308 unsigned ix;
8309 bool ok = true;
8310
8311 if (abbrev->die_tag != die->die_tag)
8312 continue;
8313 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8314 continue;
8315
8316 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8317 continue;
8318
8319 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8320 {
8321 abbrev_a = &(*abbrev->die_attr)[ix];
8322 if ((abbrev_a->dw_attr != die_a->dw_attr)
8323 || (value_format (abbrev_a) != value_format (die_a)))
8324 {
8325 ok = false;
8326 break;
8327 }
8328 }
8329 if (ok)
8330 break;
8331 }
8332
8333 if (abbrev_id >= abbrev_die_table_in_use)
8334 {
8335 if (abbrev_die_table_in_use >= abbrev_die_table_allocated)
8336 {
8337 n_alloc = abbrev_die_table_allocated + ABBREV_DIE_TABLE_INCREMENT;
8338 abbrev_die_table = GGC_RESIZEVEC (dw_die_ref, abbrev_die_table,
8339 n_alloc);
8340
8341 memset (&abbrev_die_table[abbrev_die_table_allocated], 0,
8342 (n_alloc - abbrev_die_table_allocated) * sizeof (dw_die_ref));
8343 abbrev_die_table_allocated = n_alloc;
8344 }
8345
8346 ++abbrev_die_table_in_use;
8347 abbrev_die_table[abbrev_id] = die;
8348 }
8349
8350 die->die_abbrev = abbrev_id;
8351 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8352 }
8353 \f
8354 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8355
8356 static int
8357 constant_size (unsigned HOST_WIDE_INT value)
8358 {
8359 int log;
8360
8361 if (value == 0)
8362 log = 0;
8363 else
8364 log = floor_log2 (value);
8365
8366 log = log / 8;
8367 log = 1 << (floor_log2 (log) + 1);
8368
8369 return log;
8370 }
8371
8372 /* Return the size of a DIE as it is represented in the
8373 .debug_info section. */
8374
8375 static unsigned long
8376 size_of_die (dw_die_ref die)
8377 {
8378 unsigned long size = 0;
8379 dw_attr_node *a;
8380 unsigned ix;
8381 enum dwarf_form form;
8382
8383 size += size_of_uleb128 (die->die_abbrev);
8384 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8385 {
8386 switch (AT_class (a))
8387 {
8388 case dw_val_class_addr:
8389 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8390 {
8391 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8392 size += size_of_uleb128 (AT_index (a));
8393 }
8394 else
8395 size += DWARF2_ADDR_SIZE;
8396 break;
8397 case dw_val_class_offset:
8398 size += DWARF_OFFSET_SIZE;
8399 break;
8400 case dw_val_class_loc:
8401 {
8402 unsigned long lsize = size_of_locs (AT_loc (a));
8403
8404 /* Block length. */
8405 if (dwarf_version >= 4)
8406 size += size_of_uleb128 (lsize);
8407 else
8408 size += constant_size (lsize);
8409 size += lsize;
8410 }
8411 break;
8412 case dw_val_class_loc_list:
8413 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8414 {
8415 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8416 size += size_of_uleb128 (AT_index (a));
8417 }
8418 else
8419 size += DWARF_OFFSET_SIZE;
8420 break;
8421 case dw_val_class_range_list:
8422 size += DWARF_OFFSET_SIZE;
8423 break;
8424 case dw_val_class_const:
8425 size += size_of_sleb128 (AT_int (a));
8426 break;
8427 case dw_val_class_unsigned_const:
8428 {
8429 int csize = constant_size (AT_unsigned (a));
8430 if (dwarf_version == 3
8431 && a->dw_attr == DW_AT_data_member_location
8432 && csize >= 4)
8433 size += size_of_uleb128 (AT_unsigned (a));
8434 else
8435 size += csize;
8436 }
8437 break;
8438 case dw_val_class_const_double:
8439 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
8440 if (HOST_BITS_PER_WIDE_INT >= 64)
8441 size++; /* block */
8442 break;
8443 case dw_val_class_wide_int:
8444 size += (get_full_len (*a->dw_attr_val.v.val_wide)
8445 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
8446 if (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT
8447 > 64)
8448 size++; /* block */
8449 break;
8450 case dw_val_class_vec:
8451 size += constant_size (a->dw_attr_val.v.val_vec.length
8452 * a->dw_attr_val.v.val_vec.elt_size)
8453 + a->dw_attr_val.v.val_vec.length
8454 * a->dw_attr_val.v.val_vec.elt_size; /* block */
8455 break;
8456 case dw_val_class_flag:
8457 if (dwarf_version >= 4)
8458 /* Currently all add_AT_flag calls pass in 1 as last argument,
8459 so DW_FORM_flag_present can be used. If that ever changes,
8460 we'll need to use DW_FORM_flag and have some optimization
8461 in build_abbrev_table that will change those to
8462 DW_FORM_flag_present if it is set to 1 in all DIEs using
8463 the same abbrev entry. */
8464 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8465 else
8466 size += 1;
8467 break;
8468 case dw_val_class_die_ref:
8469 if (AT_ref_external (a))
8470 {
8471 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
8472 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
8473 is sized by target address length, whereas in DWARF3
8474 it's always sized as an offset. */
8475 if (use_debug_types)
8476 size += DWARF_TYPE_SIGNATURE_SIZE;
8477 else if (dwarf_version == 2)
8478 size += DWARF2_ADDR_SIZE;
8479 else
8480 size += DWARF_OFFSET_SIZE;
8481 }
8482 else
8483 size += DWARF_OFFSET_SIZE;
8484 break;
8485 case dw_val_class_fde_ref:
8486 size += DWARF_OFFSET_SIZE;
8487 break;
8488 case dw_val_class_lbl_id:
8489 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8490 {
8491 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8492 size += size_of_uleb128 (AT_index (a));
8493 }
8494 else
8495 size += DWARF2_ADDR_SIZE;
8496 break;
8497 case dw_val_class_lineptr:
8498 case dw_val_class_macptr:
8499 size += DWARF_OFFSET_SIZE;
8500 break;
8501 case dw_val_class_str:
8502 form = AT_string_form (a);
8503 if (form == DW_FORM_strp)
8504 size += DWARF_OFFSET_SIZE;
8505 else if (form == DW_FORM_GNU_str_index)
8506 size += size_of_uleb128 (AT_index (a));
8507 else
8508 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
8509 break;
8510 case dw_val_class_file:
8511 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
8512 break;
8513 case dw_val_class_data8:
8514 size += 8;
8515 break;
8516 case dw_val_class_vms_delta:
8517 size += DWARF_OFFSET_SIZE;
8518 break;
8519 case dw_val_class_high_pc:
8520 size += DWARF2_ADDR_SIZE;
8521 break;
8522 case dw_val_class_discr_value:
8523 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
8524 break;
8525 case dw_val_class_discr_list:
8526 {
8527 unsigned block_size = size_of_discr_list (AT_discr_list (a));
8528
8529 /* This is a block, so we have the block length and then its
8530 data. */
8531 size += constant_size (block_size) + block_size;
8532 }
8533 break;
8534 default:
8535 gcc_unreachable ();
8536 }
8537 }
8538
8539 return size;
8540 }
8541
8542 /* Size the debugging information associated with a given DIE. Visits the
8543 DIE's children recursively. Updates the global variable next_die_offset, on
8544 each time through. Uses the current value of next_die_offset to update the
8545 die_offset field in each DIE. */
8546
8547 static void
8548 calc_die_sizes (dw_die_ref die)
8549 {
8550 dw_die_ref c;
8551
8552 gcc_assert (die->die_offset == 0
8553 || (unsigned long int) die->die_offset == next_die_offset);
8554 die->die_offset = next_die_offset;
8555 next_die_offset += size_of_die (die);
8556
8557 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
8558
8559 if (die->die_child != NULL)
8560 /* Count the null byte used to terminate sibling lists. */
8561 next_die_offset += 1;
8562 }
8563
8564 /* Size just the base type children at the start of the CU.
8565 This is needed because build_abbrev needs to size locs
8566 and sizing of type based stack ops needs to know die_offset
8567 values for the base types. */
8568
8569 static void
8570 calc_base_type_die_sizes (void)
8571 {
8572 unsigned long die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
8573 unsigned int i;
8574 dw_die_ref base_type;
8575 #if ENABLE_ASSERT_CHECKING
8576 dw_die_ref prev = comp_unit_die ()->die_child;
8577 #endif
8578
8579 die_offset += size_of_die (comp_unit_die ());
8580 for (i = 0; base_types.iterate (i, &base_type); i++)
8581 {
8582 #if ENABLE_ASSERT_CHECKING
8583 gcc_assert (base_type->die_offset == 0
8584 && prev->die_sib == base_type
8585 && base_type->die_child == NULL
8586 && base_type->die_abbrev);
8587 prev = base_type;
8588 #endif
8589 base_type->die_offset = die_offset;
8590 die_offset += size_of_die (base_type);
8591 }
8592 }
8593
8594 /* Set the marks for a die and its children. We do this so
8595 that we know whether or not a reference needs to use FORM_ref_addr; only
8596 DIEs in the same CU will be marked. We used to clear out the offset
8597 and use that as the flag, but ran into ordering problems. */
8598
8599 static void
8600 mark_dies (dw_die_ref die)
8601 {
8602 dw_die_ref c;
8603
8604 gcc_assert (!die->die_mark);
8605
8606 die->die_mark = 1;
8607 FOR_EACH_CHILD (die, c, mark_dies (c));
8608 }
8609
8610 /* Clear the marks for a die and its children. */
8611
8612 static void
8613 unmark_dies (dw_die_ref die)
8614 {
8615 dw_die_ref c;
8616
8617 if (! use_debug_types)
8618 gcc_assert (die->die_mark);
8619
8620 die->die_mark = 0;
8621 FOR_EACH_CHILD (die, c, unmark_dies (c));
8622 }
8623
8624 /* Clear the marks for a die, its children and referred dies. */
8625
8626 static void
8627 unmark_all_dies (dw_die_ref die)
8628 {
8629 dw_die_ref c;
8630 dw_attr_node *a;
8631 unsigned ix;
8632
8633 if (!die->die_mark)
8634 return;
8635 die->die_mark = 0;
8636
8637 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
8638
8639 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8640 if (AT_class (a) == dw_val_class_die_ref)
8641 unmark_all_dies (AT_ref (a));
8642 }
8643
8644 /* Calculate if the entry should appear in the final output file. It may be
8645 from a pruned a type. */
8646
8647 static bool
8648 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
8649 {
8650 /* By limiting gnu pubnames to definitions only, gold can generate a
8651 gdb index without entries for declarations, which don't include
8652 enough information to be useful. */
8653 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
8654 return false;
8655
8656 if (table == pubname_table)
8657 {
8658 /* Enumerator names are part of the pubname table, but the
8659 parent DW_TAG_enumeration_type die may have been pruned.
8660 Don't output them if that is the case. */
8661 if (p->die->die_tag == DW_TAG_enumerator &&
8662 (p->die->die_parent == NULL
8663 || !p->die->die_parent->die_perennial_p))
8664 return false;
8665
8666 /* Everything else in the pubname table is included. */
8667 return true;
8668 }
8669
8670 /* The pubtypes table shouldn't include types that have been
8671 pruned. */
8672 return (p->die->die_offset != 0
8673 || !flag_eliminate_unused_debug_types);
8674 }
8675
8676 /* Return the size of the .debug_pubnames or .debug_pubtypes table
8677 generated for the compilation unit. */
8678
8679 static unsigned long
8680 size_of_pubnames (vec<pubname_entry, va_gc> *names)
8681 {
8682 unsigned long size;
8683 unsigned i;
8684 pubname_entry *p;
8685 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
8686
8687 size = DWARF_PUBNAMES_HEADER_SIZE;
8688 FOR_EACH_VEC_ELT (*names, i, p)
8689 if (include_pubname_in_output (names, p))
8690 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
8691
8692 size += DWARF_OFFSET_SIZE;
8693 return size;
8694 }
8695
8696 /* Return the size of the information in the .debug_aranges section. */
8697
8698 static unsigned long
8699 size_of_aranges (void)
8700 {
8701 unsigned long size;
8702
8703 size = DWARF_ARANGES_HEADER_SIZE;
8704
8705 /* Count the address/length pair for this compilation unit. */
8706 if (text_section_used)
8707 size += 2 * DWARF2_ADDR_SIZE;
8708 if (cold_text_section_used)
8709 size += 2 * DWARF2_ADDR_SIZE;
8710 if (have_multiple_function_sections)
8711 {
8712 unsigned fde_idx;
8713 dw_fde_ref fde;
8714
8715 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
8716 {
8717 if (DECL_IGNORED_P (fde->decl))
8718 continue;
8719 if (!fde->in_std_section)
8720 size += 2 * DWARF2_ADDR_SIZE;
8721 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
8722 size += 2 * DWARF2_ADDR_SIZE;
8723 }
8724 }
8725
8726 /* Count the two zero words used to terminated the address range table. */
8727 size += 2 * DWARF2_ADDR_SIZE;
8728 return size;
8729 }
8730 \f
8731 /* Select the encoding of an attribute value. */
8732
8733 static enum dwarf_form
8734 value_format (dw_attr_node *a)
8735 {
8736 switch (AT_class (a))
8737 {
8738 case dw_val_class_addr:
8739 /* Only very few attributes allow DW_FORM_addr. */
8740 switch (a->dw_attr)
8741 {
8742 case DW_AT_low_pc:
8743 case DW_AT_high_pc:
8744 case DW_AT_entry_pc:
8745 case DW_AT_trampoline:
8746 return (AT_index (a) == NOT_INDEXED
8747 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
8748 default:
8749 break;
8750 }
8751 switch (DWARF2_ADDR_SIZE)
8752 {
8753 case 1:
8754 return DW_FORM_data1;
8755 case 2:
8756 return DW_FORM_data2;
8757 case 4:
8758 return DW_FORM_data4;
8759 case 8:
8760 return DW_FORM_data8;
8761 default:
8762 gcc_unreachable ();
8763 }
8764 case dw_val_class_range_list:
8765 case dw_val_class_loc_list:
8766 if (dwarf_version >= 4)
8767 return DW_FORM_sec_offset;
8768 /* FALLTHRU */
8769 case dw_val_class_vms_delta:
8770 case dw_val_class_offset:
8771 switch (DWARF_OFFSET_SIZE)
8772 {
8773 case 4:
8774 return DW_FORM_data4;
8775 case 8:
8776 return DW_FORM_data8;
8777 default:
8778 gcc_unreachable ();
8779 }
8780 case dw_val_class_loc:
8781 if (dwarf_version >= 4)
8782 return DW_FORM_exprloc;
8783 switch (constant_size (size_of_locs (AT_loc (a))))
8784 {
8785 case 1:
8786 return DW_FORM_block1;
8787 case 2:
8788 return DW_FORM_block2;
8789 case 4:
8790 return DW_FORM_block4;
8791 default:
8792 gcc_unreachable ();
8793 }
8794 case dw_val_class_const:
8795 return DW_FORM_sdata;
8796 case dw_val_class_unsigned_const:
8797 switch (constant_size (AT_unsigned (a)))
8798 {
8799 case 1:
8800 return DW_FORM_data1;
8801 case 2:
8802 return DW_FORM_data2;
8803 case 4:
8804 /* In DWARF3 DW_AT_data_member_location with
8805 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
8806 constant, so we need to use DW_FORM_udata if we need
8807 a large constant. */
8808 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
8809 return DW_FORM_udata;
8810 return DW_FORM_data4;
8811 case 8:
8812 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
8813 return DW_FORM_udata;
8814 return DW_FORM_data8;
8815 default:
8816 gcc_unreachable ();
8817 }
8818 case dw_val_class_const_double:
8819 switch (HOST_BITS_PER_WIDE_INT)
8820 {
8821 case 8:
8822 return DW_FORM_data2;
8823 case 16:
8824 return DW_FORM_data4;
8825 case 32:
8826 return DW_FORM_data8;
8827 case 64:
8828 default:
8829 return DW_FORM_block1;
8830 }
8831 case dw_val_class_wide_int:
8832 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
8833 {
8834 case 8:
8835 return DW_FORM_data1;
8836 case 16:
8837 return DW_FORM_data2;
8838 case 32:
8839 return DW_FORM_data4;
8840 case 64:
8841 return DW_FORM_data8;
8842 default:
8843 return DW_FORM_block1;
8844 }
8845 case dw_val_class_vec:
8846 switch (constant_size (a->dw_attr_val.v.val_vec.length
8847 * a->dw_attr_val.v.val_vec.elt_size))
8848 {
8849 case 1:
8850 return DW_FORM_block1;
8851 case 2:
8852 return DW_FORM_block2;
8853 case 4:
8854 return DW_FORM_block4;
8855 default:
8856 gcc_unreachable ();
8857 }
8858 case dw_val_class_flag:
8859 if (dwarf_version >= 4)
8860 {
8861 /* Currently all add_AT_flag calls pass in 1 as last argument,
8862 so DW_FORM_flag_present can be used. If that ever changes,
8863 we'll need to use DW_FORM_flag and have some optimization
8864 in build_abbrev_table that will change those to
8865 DW_FORM_flag_present if it is set to 1 in all DIEs using
8866 the same abbrev entry. */
8867 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8868 return DW_FORM_flag_present;
8869 }
8870 return DW_FORM_flag;
8871 case dw_val_class_die_ref:
8872 if (AT_ref_external (a))
8873 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
8874 else
8875 return DW_FORM_ref;
8876 case dw_val_class_fde_ref:
8877 return DW_FORM_data;
8878 case dw_val_class_lbl_id:
8879 return (AT_index (a) == NOT_INDEXED
8880 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
8881 case dw_val_class_lineptr:
8882 case dw_val_class_macptr:
8883 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
8884 case dw_val_class_str:
8885 return AT_string_form (a);
8886 case dw_val_class_file:
8887 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
8888 {
8889 case 1:
8890 return DW_FORM_data1;
8891 case 2:
8892 return DW_FORM_data2;
8893 case 4:
8894 return DW_FORM_data4;
8895 default:
8896 gcc_unreachable ();
8897 }
8898
8899 case dw_val_class_data8:
8900 return DW_FORM_data8;
8901
8902 case dw_val_class_high_pc:
8903 switch (DWARF2_ADDR_SIZE)
8904 {
8905 case 1:
8906 return DW_FORM_data1;
8907 case 2:
8908 return DW_FORM_data2;
8909 case 4:
8910 return DW_FORM_data4;
8911 case 8:
8912 return DW_FORM_data8;
8913 default:
8914 gcc_unreachable ();
8915 }
8916
8917 case dw_val_class_discr_value:
8918 return (a->dw_attr_val.v.val_discr_value.pos
8919 ? DW_FORM_udata
8920 : DW_FORM_sdata);
8921 case dw_val_class_discr_list:
8922 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
8923 {
8924 case 1:
8925 return DW_FORM_block1;
8926 case 2:
8927 return DW_FORM_block2;
8928 case 4:
8929 return DW_FORM_block4;
8930 default:
8931 gcc_unreachable ();
8932 }
8933
8934 default:
8935 gcc_unreachable ();
8936 }
8937 }
8938
8939 /* Output the encoding of an attribute value. */
8940
8941 static void
8942 output_value_format (dw_attr_node *a)
8943 {
8944 enum dwarf_form form = value_format (a);
8945
8946 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
8947 }
8948
8949 /* Given a die and id, produce the appropriate abbreviations. */
8950
8951 static void
8952 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
8953 {
8954 unsigned ix;
8955 dw_attr_node *a_attr;
8956
8957 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
8958 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
8959 dwarf_tag_name (abbrev->die_tag));
8960
8961 if (abbrev->die_child != NULL)
8962 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
8963 else
8964 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
8965
8966 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
8967 {
8968 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
8969 dwarf_attr_name (a_attr->dw_attr));
8970 output_value_format (a_attr);
8971 }
8972
8973 dw2_asm_output_data (1, 0, NULL);
8974 dw2_asm_output_data (1, 0, NULL);
8975 }
8976
8977
8978 /* Output the .debug_abbrev section which defines the DIE abbreviation
8979 table. */
8980
8981 static void
8982 output_abbrev_section (void)
8983 {
8984 unsigned long abbrev_id;
8985
8986 for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
8987 output_die_abbrevs (abbrev_id, abbrev_die_table[abbrev_id]);
8988
8989 /* Terminate the table. */
8990 dw2_asm_output_data (1, 0, NULL);
8991 }
8992
8993 /* Output a symbol we can use to refer to this DIE from another CU. */
8994
8995 static inline void
8996 output_die_symbol (dw_die_ref die)
8997 {
8998 const char *sym = die->die_id.die_symbol;
8999
9000 gcc_assert (!die->comdat_type_p);
9001
9002 if (sym == 0)
9003 return;
9004
9005 if (strncmp (sym, DIE_LABEL_PREFIX, sizeof (DIE_LABEL_PREFIX) - 1) == 0)
9006 /* We make these global, not weak; if the target doesn't support
9007 .linkonce, it doesn't support combining the sections, so debugging
9008 will break. */
9009 targetm.asm_out.globalize_label (asm_out_file, sym);
9010
9011 ASM_OUTPUT_LABEL (asm_out_file, sym);
9012 }
9013
9014 /* Return a new location list, given the begin and end range, and the
9015 expression. */
9016
9017 static inline dw_loc_list_ref
9018 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9019 const char *section)
9020 {
9021 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9022
9023 retlist->begin = begin;
9024 retlist->begin_entry = NULL;
9025 retlist->end = end;
9026 retlist->expr = expr;
9027 retlist->section = section;
9028
9029 return retlist;
9030 }
9031
9032 /* Generate a new internal symbol for this location list node, if it
9033 hasn't got one yet. */
9034
9035 static inline void
9036 gen_llsym (dw_loc_list_ref list)
9037 {
9038 gcc_assert (!list->ll_symbol);
9039 list->ll_symbol = gen_internal_sym ("LLST");
9040 }
9041
9042 /* Output the location list given to us. */
9043
9044 static void
9045 output_loc_list (dw_loc_list_ref list_head)
9046 {
9047 dw_loc_list_ref curr = list_head;
9048
9049 if (list_head->emitted)
9050 return;
9051 list_head->emitted = true;
9052
9053 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9054
9055 /* Walk the location list, and output each range + expression. */
9056 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9057 {
9058 unsigned long size;
9059 /* Don't output an entry that starts and ends at the same address. */
9060 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9061 continue;
9062 size = size_of_locs (curr->expr);
9063 /* If the expression is too large, drop it on the floor. We could
9064 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9065 in the expression, but >= 64KB expressions for a single value
9066 in a single range are unlikely very useful. */
9067 if (size > 0xffff)
9068 continue;
9069 if (dwarf_split_debug_info)
9070 {
9071 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9072 "Location list start/length entry (%s)",
9073 list_head->ll_symbol);
9074 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9075 "Location list range start index (%s)",
9076 curr->begin);
9077 /* The length field is 4 bytes. If we ever need to support
9078 an 8-byte length, we can add a new DW_LLE code or fall back
9079 to DW_LLE_GNU_start_end_entry. */
9080 dw2_asm_output_delta (4, curr->end, curr->begin,
9081 "Location list range length (%s)",
9082 list_head->ll_symbol);
9083 }
9084 else if (!have_multiple_function_sections)
9085 {
9086 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9087 "Location list begin address (%s)",
9088 list_head->ll_symbol);
9089 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9090 "Location list end address (%s)",
9091 list_head->ll_symbol);
9092 }
9093 else
9094 {
9095 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9096 "Location list begin address (%s)",
9097 list_head->ll_symbol);
9098 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9099 "Location list end address (%s)",
9100 list_head->ll_symbol);
9101 }
9102
9103 /* Output the block length for this list of location operations. */
9104 gcc_assert (size <= 0xffff);
9105 dw2_asm_output_data (2, size, "%s", "Location expression size");
9106
9107 output_loc_sequence (curr->expr, -1);
9108 }
9109
9110 if (dwarf_split_debug_info)
9111 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9112 "Location list terminator (%s)",
9113 list_head->ll_symbol);
9114 else
9115 {
9116 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9117 "Location list terminator begin (%s)",
9118 list_head->ll_symbol);
9119 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9120 "Location list terminator end (%s)",
9121 list_head->ll_symbol);
9122 }
9123 }
9124
9125 /* Output a range_list offset into the debug_range section. Emit a
9126 relocated reference if val_entry is NULL, otherwise, emit an
9127 indirect reference. */
9128
9129 static void
9130 output_range_list_offset (dw_attr_node *a)
9131 {
9132 const char *name = dwarf_attr_name (a->dw_attr);
9133
9134 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9135 {
9136 char *p = strchr (ranges_section_label, '\0');
9137 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX, a->dw_attr_val.v.val_offset);
9138 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9139 debug_ranges_section, "%s", name);
9140 *p = '\0';
9141 }
9142 else
9143 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9144 "%s (offset from %s)", name, ranges_section_label);
9145 }
9146
9147 /* Output the offset into the debug_loc section. */
9148
9149 static void
9150 output_loc_list_offset (dw_attr_node *a)
9151 {
9152 char *sym = AT_loc_list (a)->ll_symbol;
9153
9154 gcc_assert (sym);
9155 if (dwarf_split_debug_info)
9156 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9157 "%s", dwarf_attr_name (a->dw_attr));
9158 else
9159 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9160 "%s", dwarf_attr_name (a->dw_attr));
9161 }
9162
9163 /* Output an attribute's index or value appropriately. */
9164
9165 static void
9166 output_attr_index_or_value (dw_attr_node *a)
9167 {
9168 const char *name = dwarf_attr_name (a->dw_attr);
9169
9170 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9171 {
9172 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9173 return;
9174 }
9175 switch (AT_class (a))
9176 {
9177 case dw_val_class_addr:
9178 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9179 break;
9180 case dw_val_class_high_pc:
9181 case dw_val_class_lbl_id:
9182 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9183 break;
9184 case dw_val_class_loc_list:
9185 output_loc_list_offset (a);
9186 break;
9187 default:
9188 gcc_unreachable ();
9189 }
9190 }
9191
9192 /* Output a type signature. */
9193
9194 static inline void
9195 output_signature (const char *sig, const char *name)
9196 {
9197 int i;
9198
9199 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9200 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9201 }
9202
9203 /* Output a discriminant value. */
9204
9205 static inline void
9206 output_discr_value (dw_discr_value *discr_value, const char *name)
9207 {
9208 if (discr_value->pos)
9209 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9210 else
9211 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9212 }
9213
9214 /* Output the DIE and its attributes. Called recursively to generate
9215 the definitions of each child DIE. */
9216
9217 static void
9218 output_die (dw_die_ref die)
9219 {
9220 dw_attr_node *a;
9221 dw_die_ref c;
9222 unsigned long size;
9223 unsigned ix;
9224
9225 /* If someone in another CU might refer to us, set up a symbol for
9226 them to point to. */
9227 if (! die->comdat_type_p && die->die_id.die_symbol)
9228 output_die_symbol (die);
9229
9230 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
9231 (unsigned long)die->die_offset,
9232 dwarf_tag_name (die->die_tag));
9233
9234 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9235 {
9236 const char *name = dwarf_attr_name (a->dw_attr);
9237
9238 switch (AT_class (a))
9239 {
9240 case dw_val_class_addr:
9241 output_attr_index_or_value (a);
9242 break;
9243
9244 case dw_val_class_offset:
9245 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9246 "%s", name);
9247 break;
9248
9249 case dw_val_class_range_list:
9250 output_range_list_offset (a);
9251 break;
9252
9253 case dw_val_class_loc:
9254 size = size_of_locs (AT_loc (a));
9255
9256 /* Output the block length for this list of location operations. */
9257 if (dwarf_version >= 4)
9258 dw2_asm_output_data_uleb128 (size, "%s", name);
9259 else
9260 dw2_asm_output_data (constant_size (size), size, "%s", name);
9261
9262 output_loc_sequence (AT_loc (a), -1);
9263 break;
9264
9265 case dw_val_class_const:
9266 /* ??? It would be slightly more efficient to use a scheme like is
9267 used for unsigned constants below, but gdb 4.x does not sign
9268 extend. Gdb 5.x does sign extend. */
9269 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
9270 break;
9271
9272 case dw_val_class_unsigned_const:
9273 {
9274 int csize = constant_size (AT_unsigned (a));
9275 if (dwarf_version == 3
9276 && a->dw_attr == DW_AT_data_member_location
9277 && csize >= 4)
9278 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
9279 else
9280 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
9281 }
9282 break;
9283
9284 case dw_val_class_const_double:
9285 {
9286 unsigned HOST_WIDE_INT first, second;
9287
9288 if (HOST_BITS_PER_WIDE_INT >= 64)
9289 dw2_asm_output_data (1,
9290 HOST_BITS_PER_DOUBLE_INT
9291 / HOST_BITS_PER_CHAR,
9292 NULL);
9293
9294 if (WORDS_BIG_ENDIAN)
9295 {
9296 first = a->dw_attr_val.v.val_double.high;
9297 second = a->dw_attr_val.v.val_double.low;
9298 }
9299 else
9300 {
9301 first = a->dw_attr_val.v.val_double.low;
9302 second = a->dw_attr_val.v.val_double.high;
9303 }
9304
9305 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
9306 first, "%s", name);
9307 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
9308 second, NULL);
9309 }
9310 break;
9311
9312 case dw_val_class_wide_int:
9313 {
9314 int i;
9315 int len = get_full_len (*a->dw_attr_val.v.val_wide);
9316 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
9317 if (len * HOST_BITS_PER_WIDE_INT > 64)
9318 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide) * l,
9319 NULL);
9320
9321 if (WORDS_BIG_ENDIAN)
9322 for (i = len - 1; i >= 0; --i)
9323 {
9324 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
9325 "%s", name);
9326 name = "";
9327 }
9328 else
9329 for (i = 0; i < len; ++i)
9330 {
9331 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
9332 "%s", name);
9333 name = "";
9334 }
9335 }
9336 break;
9337
9338 case dw_val_class_vec:
9339 {
9340 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
9341 unsigned int len = a->dw_attr_val.v.val_vec.length;
9342 unsigned int i;
9343 unsigned char *p;
9344
9345 dw2_asm_output_data (constant_size (len * elt_size),
9346 len * elt_size, "%s", name);
9347 if (elt_size > sizeof (HOST_WIDE_INT))
9348 {
9349 elt_size /= 2;
9350 len *= 2;
9351 }
9352 for (i = 0, p = a->dw_attr_val.v.val_vec.array;
9353 i < len;
9354 i++, p += elt_size)
9355 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
9356 "fp or vector constant word %u", i);
9357 break;
9358 }
9359
9360 case dw_val_class_flag:
9361 if (dwarf_version >= 4)
9362 {
9363 /* Currently all add_AT_flag calls pass in 1 as last argument,
9364 so DW_FORM_flag_present can be used. If that ever changes,
9365 we'll need to use DW_FORM_flag and have some optimization
9366 in build_abbrev_table that will change those to
9367 DW_FORM_flag_present if it is set to 1 in all DIEs using
9368 the same abbrev entry. */
9369 gcc_assert (AT_flag (a) == 1);
9370 if (flag_debug_asm)
9371 fprintf (asm_out_file, "\t\t\t%s %s\n",
9372 ASM_COMMENT_START, name);
9373 break;
9374 }
9375 dw2_asm_output_data (1, AT_flag (a), "%s", name);
9376 break;
9377
9378 case dw_val_class_loc_list:
9379 output_attr_index_or_value (a);
9380 break;
9381
9382 case dw_val_class_die_ref:
9383 if (AT_ref_external (a))
9384 {
9385 if (AT_ref (a)->comdat_type_p)
9386 {
9387 comdat_type_node *type_node =
9388 AT_ref (a)->die_id.die_type_node;
9389
9390 gcc_assert (type_node);
9391 output_signature (type_node->signature, name);
9392 }
9393 else
9394 {
9395 const char *sym = AT_ref (a)->die_id.die_symbol;
9396 int size;
9397
9398 gcc_assert (sym);
9399 /* In DWARF2, DW_FORM_ref_addr is sized by target address
9400 length, whereas in DWARF3 it's always sized as an
9401 offset. */
9402 if (dwarf_version == 2)
9403 size = DWARF2_ADDR_SIZE;
9404 else
9405 size = DWARF_OFFSET_SIZE;
9406 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
9407 name);
9408 }
9409 }
9410 else
9411 {
9412 gcc_assert (AT_ref (a)->die_offset);
9413 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
9414 "%s", name);
9415 }
9416 break;
9417
9418 case dw_val_class_fde_ref:
9419 {
9420 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
9421
9422 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
9423 a->dw_attr_val.v.val_fde_index * 2);
9424 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
9425 "%s", name);
9426 }
9427 break;
9428
9429 case dw_val_class_vms_delta:
9430 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
9431 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
9432 AT_vms_delta2 (a), AT_vms_delta1 (a),
9433 "%s", name);
9434 #else
9435 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
9436 AT_vms_delta2 (a), AT_vms_delta1 (a),
9437 "%s", name);
9438 #endif
9439 break;
9440
9441 case dw_val_class_lbl_id:
9442 output_attr_index_or_value (a);
9443 break;
9444
9445 case dw_val_class_lineptr:
9446 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
9447 debug_line_section, "%s", name);
9448 break;
9449
9450 case dw_val_class_macptr:
9451 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
9452 debug_macinfo_section, "%s", name);
9453 break;
9454
9455 case dw_val_class_str:
9456 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
9457 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
9458 a->dw_attr_val.v.val_str->label,
9459 debug_str_section,
9460 "%s: \"%s\"", name, AT_string (a));
9461 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
9462 dw2_asm_output_data_uleb128 (AT_index (a),
9463 "%s: \"%s\"", name, AT_string (a));
9464 else
9465 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
9466 break;
9467
9468 case dw_val_class_file:
9469 {
9470 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
9471
9472 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
9473 a->dw_attr_val.v.val_file->filename);
9474 break;
9475 }
9476
9477 case dw_val_class_data8:
9478 {
9479 int i;
9480
9481 for (i = 0; i < 8; i++)
9482 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
9483 i == 0 ? "%s" : NULL, name);
9484 break;
9485 }
9486
9487 case dw_val_class_high_pc:
9488 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
9489 get_AT_low_pc (die), "DW_AT_high_pc");
9490 break;
9491
9492 case dw_val_class_discr_value:
9493 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
9494 break;
9495
9496 case dw_val_class_discr_list:
9497 {
9498 dw_discr_list_ref list = AT_discr_list (a);
9499 const int size = size_of_discr_list (list);
9500
9501 /* This is a block, so output its length first. */
9502 dw2_asm_output_data (constant_size (size), size,
9503 "%s: block size", name);
9504
9505 for (; list != NULL; list = list->dw_discr_next)
9506 {
9507 /* One byte for the discriminant value descriptor, and then as
9508 many LEB128 numbers as required. */
9509 if (list->dw_discr_range)
9510 dw2_asm_output_data (1, DW_DSC_range,
9511 "%s: DW_DSC_range", name);
9512 else
9513 dw2_asm_output_data (1, DW_DSC_label,
9514 "%s: DW_DSC_label", name);
9515
9516 output_discr_value (&list->dw_discr_lower_bound, name);
9517 if (list->dw_discr_range)
9518 output_discr_value (&list->dw_discr_upper_bound, name);
9519 }
9520 break;
9521 }
9522
9523 default:
9524 gcc_unreachable ();
9525 }
9526 }
9527
9528 FOR_EACH_CHILD (die, c, output_die (c));
9529
9530 /* Add null byte to terminate sibling list. */
9531 if (die->die_child != NULL)
9532 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
9533 (unsigned long) die->die_offset);
9534 }
9535
9536 /* Output the compilation unit that appears at the beginning of the
9537 .debug_info section, and precedes the DIE descriptions. */
9538
9539 static void
9540 output_compilation_unit_header (void)
9541 {
9542 /* We don't support actual DWARFv5 units yet, we just use some
9543 DWARFv5 draft DIE tags in DWARFv4 format. */
9544 int ver = dwarf_version < 5 ? dwarf_version : 4;
9545
9546 if (!XCOFF_DEBUGGING_INFO)
9547 {
9548 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9549 dw2_asm_output_data (4, 0xffffffff,
9550 "Initial length escape value indicating 64-bit DWARF extension");
9551 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9552 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
9553 "Length of Compilation Unit Info");
9554 }
9555
9556 dw2_asm_output_data (2, ver, "DWARF version number");
9557 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
9558 debug_abbrev_section,
9559 "Offset Into Abbrev. Section");
9560 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
9561 }
9562
9563 /* Output the compilation unit DIE and its children. */
9564
9565 static void
9566 output_comp_unit (dw_die_ref die, int output_if_empty)
9567 {
9568 const char *secname, *oldsym;
9569 char *tmp;
9570
9571 /* Unless we are outputting main CU, we may throw away empty ones. */
9572 if (!output_if_empty && die->die_child == NULL)
9573 return;
9574
9575 /* Even if there are no children of this DIE, we must output the information
9576 about the compilation unit. Otherwise, on an empty translation unit, we
9577 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
9578 will then complain when examining the file. First mark all the DIEs in
9579 this CU so we know which get local refs. */
9580 mark_dies (die);
9581
9582 external_ref_hash_type *extern_map = optimize_external_refs (die);
9583
9584 build_abbrev_table (die, extern_map);
9585
9586 delete extern_map;
9587
9588 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
9589 next_die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
9590 calc_die_sizes (die);
9591
9592 oldsym = die->die_id.die_symbol;
9593 if (oldsym)
9594 {
9595 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
9596
9597 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
9598 secname = tmp;
9599 die->die_id.die_symbol = NULL;
9600 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
9601 }
9602 else
9603 {
9604 switch_to_section (debug_info_section);
9605 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
9606 info_section_emitted = true;
9607 }
9608
9609 /* Output debugging information. */
9610 output_compilation_unit_header ();
9611 output_die (die);
9612
9613 /* Leave the marks on the main CU, so we can check them in
9614 output_pubnames. */
9615 if (oldsym)
9616 {
9617 unmark_dies (die);
9618 die->die_id.die_symbol = oldsym;
9619 }
9620 }
9621
9622 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
9623 and .debug_pubtypes. This is configured per-target, but can be
9624 overridden by the -gpubnames or -gno-pubnames options. */
9625
9626 static inline bool
9627 want_pubnames (void)
9628 {
9629 if (debug_info_level <= DINFO_LEVEL_TERSE)
9630 return false;
9631 if (debug_generate_pub_sections != -1)
9632 return debug_generate_pub_sections;
9633 return targetm.want_debug_pub_sections;
9634 }
9635
9636 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
9637
9638 static void
9639 add_AT_pubnames (dw_die_ref die)
9640 {
9641 if (want_pubnames ())
9642 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
9643 }
9644
9645 /* Add a string attribute value to a skeleton DIE. */
9646
9647 static inline void
9648 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
9649 const char *str)
9650 {
9651 dw_attr_node attr;
9652 struct indirect_string_node *node;
9653
9654 if (! skeleton_debug_str_hash)
9655 skeleton_debug_str_hash
9656 = hash_table<indirect_string_hasher>::create_ggc (10);
9657
9658 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
9659 find_string_form (node);
9660 if (node->form == DW_FORM_GNU_str_index)
9661 node->form = DW_FORM_strp;
9662
9663 attr.dw_attr = attr_kind;
9664 attr.dw_attr_val.val_class = dw_val_class_str;
9665 attr.dw_attr_val.val_entry = NULL;
9666 attr.dw_attr_val.v.val_str = node;
9667 add_dwarf_attr (die, &attr);
9668 }
9669
9670 /* Helper function to generate top-level dies for skeleton debug_info and
9671 debug_types. */
9672
9673 static void
9674 add_top_level_skeleton_die_attrs (dw_die_ref die)
9675 {
9676 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
9677 const char *comp_dir = comp_dir_string ();
9678
9679 add_skeleton_AT_string (die, DW_AT_GNU_dwo_name, dwo_file_name);
9680 if (comp_dir != NULL)
9681 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
9682 add_AT_pubnames (die);
9683 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
9684 }
9685
9686 /* Output skeleton debug sections that point to the dwo file. */
9687
9688 static void
9689 output_skeleton_debug_sections (dw_die_ref comp_unit)
9690 {
9691 /* We don't support actual DWARFv5 units yet, we just use some
9692 DWARFv5 draft DIE tags in DWARFv4 format. */
9693 int ver = dwarf_version < 5 ? dwarf_version : 4;
9694
9695 /* These attributes will be found in the full debug_info section. */
9696 remove_AT (comp_unit, DW_AT_producer);
9697 remove_AT (comp_unit, DW_AT_language);
9698
9699 switch_to_section (debug_skeleton_info_section);
9700 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
9701
9702 /* Produce the skeleton compilation-unit header. This one differs enough from
9703 a normal CU header that it's better not to call output_compilation_unit
9704 header. */
9705 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9706 dw2_asm_output_data (4, 0xffffffff,
9707 "Initial length escape value indicating 64-bit DWARF extension");
9708
9709 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9710 DWARF_COMPILE_UNIT_HEADER_SIZE
9711 - DWARF_INITIAL_LENGTH_SIZE
9712 + size_of_die (comp_unit),
9713 "Length of Compilation Unit Info");
9714 dw2_asm_output_data (2, ver, "DWARF version number");
9715 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
9716 debug_abbrev_section,
9717 "Offset Into Abbrev. Section");
9718 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
9719
9720 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
9721 output_die (comp_unit);
9722
9723 /* Build the skeleton debug_abbrev section. */
9724 switch_to_section (debug_skeleton_abbrev_section);
9725 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
9726
9727 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
9728
9729 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
9730 }
9731
9732 /* Output a comdat type unit DIE and its children. */
9733
9734 static void
9735 output_comdat_type_unit (comdat_type_node *node)
9736 {
9737 const char *secname;
9738 char *tmp;
9739 int i;
9740 #if defined (OBJECT_FORMAT_ELF)
9741 tree comdat_key;
9742 #endif
9743
9744 /* First mark all the DIEs in this CU so we know which get local refs. */
9745 mark_dies (node->root_die);
9746
9747 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
9748
9749 build_abbrev_table (node->root_die, extern_map);
9750
9751 delete extern_map;
9752 extern_map = NULL;
9753
9754 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
9755 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
9756 calc_die_sizes (node->root_die);
9757
9758 #if defined (OBJECT_FORMAT_ELF)
9759 if (!dwarf_split_debug_info)
9760 secname = ".debug_types";
9761 else
9762 secname = ".debug_types.dwo";
9763
9764 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
9765 sprintf (tmp, "wt.");
9766 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9767 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
9768 comdat_key = get_identifier (tmp);
9769 targetm.asm_out.named_section (secname,
9770 SECTION_DEBUG | SECTION_LINKONCE,
9771 comdat_key);
9772 #else
9773 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
9774 sprintf (tmp, ".gnu.linkonce.wt.");
9775 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9776 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
9777 secname = tmp;
9778 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
9779 #endif
9780
9781 /* Output debugging information. */
9782 output_compilation_unit_header ();
9783 output_signature (node->signature, "Type Signature");
9784 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
9785 "Offset to Type DIE");
9786 output_die (node->root_die);
9787
9788 unmark_dies (node->root_die);
9789 }
9790
9791 /* Return the DWARF2/3 pubname associated with a decl. */
9792
9793 static const char *
9794 dwarf2_name (tree decl, int scope)
9795 {
9796 if (DECL_NAMELESS (decl))
9797 return NULL;
9798 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
9799 }
9800
9801 /* Add a new entry to .debug_pubnames if appropriate. */
9802
9803 static void
9804 add_pubname_string (const char *str, dw_die_ref die)
9805 {
9806 pubname_entry e;
9807
9808 e.die = die;
9809 e.name = xstrdup (str);
9810 vec_safe_push (pubname_table, e);
9811 }
9812
9813 static void
9814 add_pubname (tree decl, dw_die_ref die)
9815 {
9816 if (!want_pubnames ())
9817 return;
9818
9819 /* Don't add items to the table when we expect that the consumer will have
9820 just read the enclosing die. For example, if the consumer is looking at a
9821 class_member, it will either be inside the class already, or will have just
9822 looked up the class to find the member. Either way, searching the class is
9823 faster than searching the index. */
9824 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
9825 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
9826 {
9827 const char *name = dwarf2_name (decl, 1);
9828
9829 if (name)
9830 add_pubname_string (name, die);
9831 }
9832 }
9833
9834 /* Add an enumerator to the pubnames section. */
9835
9836 static void
9837 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
9838 {
9839 pubname_entry e;
9840
9841 gcc_assert (scope_name);
9842 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
9843 e.die = die;
9844 vec_safe_push (pubname_table, e);
9845 }
9846
9847 /* Add a new entry to .debug_pubtypes if appropriate. */
9848
9849 static void
9850 add_pubtype (tree decl, dw_die_ref die)
9851 {
9852 pubname_entry e;
9853
9854 if (!want_pubnames ())
9855 return;
9856
9857 if ((TREE_PUBLIC (decl)
9858 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
9859 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
9860 {
9861 tree scope = NULL;
9862 const char *scope_name = "";
9863 const char *sep = is_cxx () ? "::" : ".";
9864 const char *name;
9865
9866 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
9867 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
9868 {
9869 scope_name = lang_hooks.dwarf_name (scope, 1);
9870 if (scope_name != NULL && scope_name[0] != '\0')
9871 scope_name = concat (scope_name, sep, NULL);
9872 else
9873 scope_name = "";
9874 }
9875
9876 if (TYPE_P (decl))
9877 name = type_tag (decl);
9878 else
9879 name = lang_hooks.dwarf_name (decl, 1);
9880
9881 /* If we don't have a name for the type, there's no point in adding
9882 it to the table. */
9883 if (name != NULL && name[0] != '\0')
9884 {
9885 e.die = die;
9886 e.name = concat (scope_name, name, NULL);
9887 vec_safe_push (pubtype_table, e);
9888 }
9889
9890 /* Although it might be more consistent to add the pubinfo for the
9891 enumerators as their dies are created, they should only be added if the
9892 enum type meets the criteria above. So rather than re-check the parent
9893 enum type whenever an enumerator die is created, just output them all
9894 here. This isn't protected by the name conditional because anonymous
9895 enums don't have names. */
9896 if (die->die_tag == DW_TAG_enumeration_type)
9897 {
9898 dw_die_ref c;
9899
9900 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
9901 }
9902 }
9903 }
9904
9905 /* Output a single entry in the pubnames table. */
9906
9907 static void
9908 output_pubname (dw_offset die_offset, pubname_entry *entry)
9909 {
9910 dw_die_ref die = entry->die;
9911 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
9912
9913 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
9914
9915 if (debug_generate_pub_sections == 2)
9916 {
9917 /* This logic follows gdb's method for determining the value of the flag
9918 byte. */
9919 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
9920 switch (die->die_tag)
9921 {
9922 case DW_TAG_typedef:
9923 case DW_TAG_base_type:
9924 case DW_TAG_subrange_type:
9925 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9926 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9927 break;
9928 case DW_TAG_enumerator:
9929 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9930 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9931 if (!is_cxx () && !is_java ())
9932 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9933 break;
9934 case DW_TAG_subprogram:
9935 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9936 GDB_INDEX_SYMBOL_KIND_FUNCTION);
9937 if (!is_ada ())
9938 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9939 break;
9940 case DW_TAG_constant:
9941 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9942 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9943 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9944 break;
9945 case DW_TAG_variable:
9946 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9947 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9948 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9949 break;
9950 case DW_TAG_namespace:
9951 case DW_TAG_imported_declaration:
9952 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9953 break;
9954 case DW_TAG_class_type:
9955 case DW_TAG_interface_type:
9956 case DW_TAG_structure_type:
9957 case DW_TAG_union_type:
9958 case DW_TAG_enumeration_type:
9959 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9960 if (!is_cxx () && !is_java ())
9961 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9962 break;
9963 default:
9964 /* An unusual tag. Leave the flag-byte empty. */
9965 break;
9966 }
9967 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
9968 "GDB-index flags");
9969 }
9970
9971 dw2_asm_output_nstring (entry->name, -1, "external name");
9972 }
9973
9974
9975 /* Output the public names table used to speed up access to externally
9976 visible names; or the public types table used to find type definitions. */
9977
9978 static void
9979 output_pubnames (vec<pubname_entry, va_gc> *names)
9980 {
9981 unsigned i;
9982 unsigned long pubnames_length = size_of_pubnames (names);
9983 pubname_entry *pub;
9984
9985 if (!XCOFF_DEBUGGING_INFO)
9986 {
9987 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9988 dw2_asm_output_data (4, 0xffffffff,
9989 "Initial length escape value indicating 64-bit DWARF extension");
9990 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
9991 "Pub Info Length");
9992 }
9993
9994 /* Version number for pubnames/pubtypes is independent of dwarf version. */
9995 dw2_asm_output_data (2, 2, "DWARF Version");
9996
9997 if (dwarf_split_debug_info)
9998 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
9999 debug_skeleton_info_section,
10000 "Offset of Compilation Unit Info");
10001 else
10002 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10003 debug_info_section,
10004 "Offset of Compilation Unit Info");
10005 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10006 "Compilation Unit Length");
10007
10008 FOR_EACH_VEC_ELT (*names, i, pub)
10009 {
10010 if (include_pubname_in_output (names, pub))
10011 {
10012 dw_offset die_offset = pub->die->die_offset;
10013
10014 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10015 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10016 gcc_assert (pub->die->die_mark);
10017
10018 /* If we're putting types in their own .debug_types sections,
10019 the .debug_pubtypes table will still point to the compile
10020 unit (not the type unit), so we want to use the offset of
10021 the skeleton DIE (if there is one). */
10022 if (pub->die->comdat_type_p && names == pubtype_table)
10023 {
10024 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10025
10026 if (type_node != NULL)
10027 die_offset = (type_node->skeleton_die != NULL
10028 ? type_node->skeleton_die->die_offset
10029 : comp_unit_die ()->die_offset);
10030 }
10031
10032 output_pubname (die_offset, pub);
10033 }
10034 }
10035
10036 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10037 }
10038
10039 /* Output public names and types tables if necessary. */
10040
10041 static void
10042 output_pubtables (void)
10043 {
10044 if (!want_pubnames () || !info_section_emitted)
10045 return;
10046
10047 switch_to_section (debug_pubnames_section);
10048 output_pubnames (pubname_table);
10049 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10050 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10051 simply won't look for the section. */
10052 switch_to_section (debug_pubtypes_section);
10053 output_pubnames (pubtype_table);
10054 }
10055
10056
10057 /* Output the information that goes into the .debug_aranges table.
10058 Namely, define the beginning and ending address range of the
10059 text section generated for this compilation unit. */
10060
10061 static void
10062 output_aranges (void)
10063 {
10064 unsigned i;
10065 unsigned long aranges_length = size_of_aranges ();
10066
10067 if (!XCOFF_DEBUGGING_INFO)
10068 {
10069 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10070 dw2_asm_output_data (4, 0xffffffff,
10071 "Initial length escape value indicating 64-bit DWARF extension");
10072 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10073 "Length of Address Ranges Info");
10074 }
10075
10076 /* Version number for aranges is still 2, even up to DWARF5. */
10077 dw2_asm_output_data (2, 2, "DWARF Version");
10078 if (dwarf_split_debug_info)
10079 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10080 debug_skeleton_info_section,
10081 "Offset of Compilation Unit Info");
10082 else
10083 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10084 debug_info_section,
10085 "Offset of Compilation Unit Info");
10086 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10087 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10088
10089 /* We need to align to twice the pointer size here. */
10090 if (DWARF_ARANGES_PAD_SIZE)
10091 {
10092 /* Pad using a 2 byte words so that padding is correct for any
10093 pointer size. */
10094 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10095 2 * DWARF2_ADDR_SIZE);
10096 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10097 dw2_asm_output_data (2, 0, NULL);
10098 }
10099
10100 /* It is necessary not to output these entries if the sections were
10101 not used; if the sections were not used, the length will be 0 and
10102 the address may end up as 0 if the section is discarded by ld
10103 --gc-sections, leaving an invalid (0, 0) entry that can be
10104 confused with the terminator. */
10105 if (text_section_used)
10106 {
10107 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10108 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10109 text_section_label, "Length");
10110 }
10111 if (cold_text_section_used)
10112 {
10113 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
10114 "Address");
10115 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
10116 cold_text_section_label, "Length");
10117 }
10118
10119 if (have_multiple_function_sections)
10120 {
10121 unsigned fde_idx;
10122 dw_fde_ref fde;
10123
10124 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
10125 {
10126 if (DECL_IGNORED_P (fde->decl))
10127 continue;
10128 if (!fde->in_std_section)
10129 {
10130 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
10131 "Address");
10132 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
10133 fde->dw_fde_begin, "Length");
10134 }
10135 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
10136 {
10137 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
10138 "Address");
10139 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
10140 fde->dw_fde_second_begin, "Length");
10141 }
10142 }
10143 }
10144
10145 /* Output the terminator words. */
10146 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10147 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10148 }
10149
10150 /* Add a new entry to .debug_ranges. Return the offset at which it
10151 was placed. */
10152
10153 static unsigned int
10154 add_ranges_num (int num)
10155 {
10156 unsigned int in_use = ranges_table_in_use;
10157
10158 if (in_use == ranges_table_allocated)
10159 {
10160 ranges_table_allocated += RANGES_TABLE_INCREMENT;
10161 ranges_table = GGC_RESIZEVEC (dw_ranges, ranges_table,
10162 ranges_table_allocated);
10163 memset (ranges_table + ranges_table_in_use, 0,
10164 RANGES_TABLE_INCREMENT * sizeof (dw_ranges));
10165 }
10166
10167 ranges_table[in_use].num = num;
10168 ranges_table_in_use = in_use + 1;
10169
10170 return in_use * 2 * DWARF2_ADDR_SIZE;
10171 }
10172
10173 /* Add a new entry to .debug_ranges corresponding to a block, or a
10174 range terminator if BLOCK is NULL. */
10175
10176 static unsigned int
10177 add_ranges (const_tree block)
10178 {
10179 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0);
10180 }
10181
10182 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
10183 When using dwarf_split_debug_info, address attributes in dies destined
10184 for the final executable should be direct references--setting the
10185 parameter force_direct ensures this behavior. */
10186
10187 static void
10188 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
10189 bool *added, bool force_direct)
10190 {
10191 unsigned int in_use = ranges_by_label_in_use;
10192 unsigned int offset;
10193
10194 if (in_use == ranges_by_label_allocated)
10195 {
10196 ranges_by_label_allocated += RANGES_TABLE_INCREMENT;
10197 ranges_by_label = GGC_RESIZEVEC (dw_ranges_by_label, ranges_by_label,
10198 ranges_by_label_allocated);
10199 memset (ranges_by_label + ranges_by_label_in_use, 0,
10200 RANGES_TABLE_INCREMENT * sizeof (dw_ranges_by_label));
10201 }
10202
10203 ranges_by_label[in_use].begin = begin;
10204 ranges_by_label[in_use].end = end;
10205 ranges_by_label_in_use = in_use + 1;
10206
10207 offset = add_ranges_num (-(int)in_use - 1);
10208 if (!*added)
10209 {
10210 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
10211 *added = true;
10212 }
10213 }
10214
10215 static void
10216 output_ranges (void)
10217 {
10218 unsigned i;
10219 static const char *const start_fmt = "Offset %#x";
10220 const char *fmt = start_fmt;
10221
10222 for (i = 0; i < ranges_table_in_use; i++)
10223 {
10224 int block_num = ranges_table[i].num;
10225
10226 if (block_num > 0)
10227 {
10228 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
10229 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
10230
10231 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
10232 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
10233
10234 /* If all code is in the text section, then the compilation
10235 unit base address defaults to DW_AT_low_pc, which is the
10236 base of the text section. */
10237 if (!have_multiple_function_sections)
10238 {
10239 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
10240 text_section_label,
10241 fmt, i * 2 * DWARF2_ADDR_SIZE);
10242 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
10243 text_section_label, NULL);
10244 }
10245
10246 /* Otherwise, the compilation unit base address is zero,
10247 which allows us to use absolute addresses, and not worry
10248 about whether the target supports cross-section
10249 arithmetic. */
10250 else
10251 {
10252 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
10253 fmt, i * 2 * DWARF2_ADDR_SIZE);
10254 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
10255 }
10256
10257 fmt = NULL;
10258 }
10259
10260 /* Negative block_num stands for an index into ranges_by_label. */
10261 else if (block_num < 0)
10262 {
10263 int lab_idx = - block_num - 1;
10264
10265 if (!have_multiple_function_sections)
10266 {
10267 gcc_unreachable ();
10268 #if 0
10269 /* If we ever use add_ranges_by_labels () for a single
10270 function section, all we have to do is to take out
10271 the #if 0 above. */
10272 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
10273 ranges_by_label[lab_idx].begin,
10274 text_section_label,
10275 fmt, i * 2 * DWARF2_ADDR_SIZE);
10276 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
10277 ranges_by_label[lab_idx].end,
10278 text_section_label, NULL);
10279 #endif
10280 }
10281 else
10282 {
10283 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
10284 ranges_by_label[lab_idx].begin,
10285 fmt, i * 2 * DWARF2_ADDR_SIZE);
10286 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
10287 ranges_by_label[lab_idx].end,
10288 NULL);
10289 }
10290 }
10291 else
10292 {
10293 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10294 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10295 fmt = start_fmt;
10296 }
10297 }
10298 }
10299
10300 /* Data structure containing information about input files. */
10301 struct file_info
10302 {
10303 const char *path; /* Complete file name. */
10304 const char *fname; /* File name part. */
10305 int length; /* Length of entire string. */
10306 struct dwarf_file_data * file_idx; /* Index in input file table. */
10307 int dir_idx; /* Index in directory table. */
10308 };
10309
10310 /* Data structure containing information about directories with source
10311 files. */
10312 struct dir_info
10313 {
10314 const char *path; /* Path including directory name. */
10315 int length; /* Path length. */
10316 int prefix; /* Index of directory entry which is a prefix. */
10317 int count; /* Number of files in this directory. */
10318 int dir_idx; /* Index of directory used as base. */
10319 };
10320
10321 /* Callback function for file_info comparison. We sort by looking at
10322 the directories in the path. */
10323
10324 static int
10325 file_info_cmp (const void *p1, const void *p2)
10326 {
10327 const struct file_info *const s1 = (const struct file_info *) p1;
10328 const struct file_info *const s2 = (const struct file_info *) p2;
10329 const unsigned char *cp1;
10330 const unsigned char *cp2;
10331
10332 /* Take care of file names without directories. We need to make sure that
10333 we return consistent values to qsort since some will get confused if
10334 we return the same value when identical operands are passed in opposite
10335 orders. So if neither has a directory, return 0 and otherwise return
10336 1 or -1 depending on which one has the directory. */
10337 if ((s1->path == s1->fname || s2->path == s2->fname))
10338 return (s2->path == s2->fname) - (s1->path == s1->fname);
10339
10340 cp1 = (const unsigned char *) s1->path;
10341 cp2 = (const unsigned char *) s2->path;
10342
10343 while (1)
10344 {
10345 ++cp1;
10346 ++cp2;
10347 /* Reached the end of the first path? If so, handle like above. */
10348 if ((cp1 == (const unsigned char *) s1->fname)
10349 || (cp2 == (const unsigned char *) s2->fname))
10350 return ((cp2 == (const unsigned char *) s2->fname)
10351 - (cp1 == (const unsigned char *) s1->fname));
10352
10353 /* Character of current path component the same? */
10354 else if (*cp1 != *cp2)
10355 return *cp1 - *cp2;
10356 }
10357 }
10358
10359 struct file_name_acquire_data
10360 {
10361 struct file_info *files;
10362 int used_files;
10363 int max_files;
10364 };
10365
10366 /* Traversal function for the hash table. */
10367
10368 int
10369 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
10370 {
10371 struct dwarf_file_data *d = *slot;
10372 struct file_info *fi;
10373 const char *f;
10374
10375 gcc_assert (fnad->max_files >= d->emitted_number);
10376
10377 if (! d->emitted_number)
10378 return 1;
10379
10380 gcc_assert (fnad->max_files != fnad->used_files);
10381
10382 fi = fnad->files + fnad->used_files++;
10383
10384 /* Skip all leading "./". */
10385 f = d->filename;
10386 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
10387 f += 2;
10388
10389 /* Create a new array entry. */
10390 fi->path = f;
10391 fi->length = strlen (f);
10392 fi->file_idx = d;
10393
10394 /* Search for the file name part. */
10395 f = strrchr (f, DIR_SEPARATOR);
10396 #if defined (DIR_SEPARATOR_2)
10397 {
10398 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
10399
10400 if (g != NULL)
10401 {
10402 if (f == NULL || f < g)
10403 f = g;
10404 }
10405 }
10406 #endif
10407
10408 fi->fname = f == NULL ? fi->path : f + 1;
10409 return 1;
10410 }
10411
10412 /* Output the directory table and the file name table. We try to minimize
10413 the total amount of memory needed. A heuristic is used to avoid large
10414 slowdowns with many input files. */
10415
10416 static void
10417 output_file_names (void)
10418 {
10419 struct file_name_acquire_data fnad;
10420 int numfiles;
10421 struct file_info *files;
10422 struct dir_info *dirs;
10423 int *saved;
10424 int *savehere;
10425 int *backmap;
10426 int ndirs;
10427 int idx_offset;
10428 int i;
10429
10430 if (!last_emitted_file)
10431 {
10432 dw2_asm_output_data (1, 0, "End directory table");
10433 dw2_asm_output_data (1, 0, "End file name table");
10434 return;
10435 }
10436
10437 numfiles = last_emitted_file->emitted_number;
10438
10439 /* Allocate the various arrays we need. */
10440 files = XALLOCAVEC (struct file_info, numfiles);
10441 dirs = XALLOCAVEC (struct dir_info, numfiles);
10442
10443 fnad.files = files;
10444 fnad.used_files = 0;
10445 fnad.max_files = numfiles;
10446 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
10447 gcc_assert (fnad.used_files == fnad.max_files);
10448
10449 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
10450
10451 /* Find all the different directories used. */
10452 dirs[0].path = files[0].path;
10453 dirs[0].length = files[0].fname - files[0].path;
10454 dirs[0].prefix = -1;
10455 dirs[0].count = 1;
10456 dirs[0].dir_idx = 0;
10457 files[0].dir_idx = 0;
10458 ndirs = 1;
10459
10460 for (i = 1; i < numfiles; i++)
10461 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
10462 && memcmp (dirs[ndirs - 1].path, files[i].path,
10463 dirs[ndirs - 1].length) == 0)
10464 {
10465 /* Same directory as last entry. */
10466 files[i].dir_idx = ndirs - 1;
10467 ++dirs[ndirs - 1].count;
10468 }
10469 else
10470 {
10471 int j;
10472
10473 /* This is a new directory. */
10474 dirs[ndirs].path = files[i].path;
10475 dirs[ndirs].length = files[i].fname - files[i].path;
10476 dirs[ndirs].count = 1;
10477 dirs[ndirs].dir_idx = ndirs;
10478 files[i].dir_idx = ndirs;
10479
10480 /* Search for a prefix. */
10481 dirs[ndirs].prefix = -1;
10482 for (j = 0; j < ndirs; j++)
10483 if (dirs[j].length < dirs[ndirs].length
10484 && dirs[j].length > 1
10485 && (dirs[ndirs].prefix == -1
10486 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
10487 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
10488 dirs[ndirs].prefix = j;
10489
10490 ++ndirs;
10491 }
10492
10493 /* Now to the actual work. We have to find a subset of the directories which
10494 allow expressing the file name using references to the directory table
10495 with the least amount of characters. We do not do an exhaustive search
10496 where we would have to check out every combination of every single
10497 possible prefix. Instead we use a heuristic which provides nearly optimal
10498 results in most cases and never is much off. */
10499 saved = XALLOCAVEC (int, ndirs);
10500 savehere = XALLOCAVEC (int, ndirs);
10501
10502 memset (saved, '\0', ndirs * sizeof (saved[0]));
10503 for (i = 0; i < ndirs; i++)
10504 {
10505 int j;
10506 int total;
10507
10508 /* We can always save some space for the current directory. But this
10509 does not mean it will be enough to justify adding the directory. */
10510 savehere[i] = dirs[i].length;
10511 total = (savehere[i] - saved[i]) * dirs[i].count;
10512
10513 for (j = i + 1; j < ndirs; j++)
10514 {
10515 savehere[j] = 0;
10516 if (saved[j] < dirs[i].length)
10517 {
10518 /* Determine whether the dirs[i] path is a prefix of the
10519 dirs[j] path. */
10520 int k;
10521
10522 k = dirs[j].prefix;
10523 while (k != -1 && k != (int) i)
10524 k = dirs[k].prefix;
10525
10526 if (k == (int) i)
10527 {
10528 /* Yes it is. We can possibly save some memory by
10529 writing the filenames in dirs[j] relative to
10530 dirs[i]. */
10531 savehere[j] = dirs[i].length;
10532 total += (savehere[j] - saved[j]) * dirs[j].count;
10533 }
10534 }
10535 }
10536
10537 /* Check whether we can save enough to justify adding the dirs[i]
10538 directory. */
10539 if (total > dirs[i].length + 1)
10540 {
10541 /* It's worthwhile adding. */
10542 for (j = i; j < ndirs; j++)
10543 if (savehere[j] > 0)
10544 {
10545 /* Remember how much we saved for this directory so far. */
10546 saved[j] = savehere[j];
10547
10548 /* Remember the prefix directory. */
10549 dirs[j].dir_idx = i;
10550 }
10551 }
10552 }
10553
10554 /* Emit the directory name table. */
10555 idx_offset = dirs[0].length > 0 ? 1 : 0;
10556 for (i = 1 - idx_offset; i < ndirs; i++)
10557 dw2_asm_output_nstring (dirs[i].path,
10558 dirs[i].length
10559 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
10560 "Directory Entry: %#x", i + idx_offset);
10561
10562 dw2_asm_output_data (1, 0, "End directory table");
10563
10564 /* We have to emit them in the order of emitted_number since that's
10565 used in the debug info generation. To do this efficiently we
10566 generate a back-mapping of the indices first. */
10567 backmap = XALLOCAVEC (int, numfiles);
10568 for (i = 0; i < numfiles; i++)
10569 backmap[files[i].file_idx->emitted_number - 1] = i;
10570
10571 /* Now write all the file names. */
10572 for (i = 0; i < numfiles; i++)
10573 {
10574 int file_idx = backmap[i];
10575 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
10576
10577 #ifdef VMS_DEBUGGING_INFO
10578 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
10579
10580 /* Setting these fields can lead to debugger miscomparisons,
10581 but VMS Debug requires them to be set correctly. */
10582
10583 int ver;
10584 long long cdt;
10585 long siz;
10586 int maxfilelen = strlen (files[file_idx].path)
10587 + dirs[dir_idx].length
10588 + MAX_VMS_VERSION_LEN + 1;
10589 char *filebuf = XALLOCAVEC (char, maxfilelen);
10590
10591 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
10592 snprintf (filebuf, maxfilelen, "%s;%d",
10593 files[file_idx].path + dirs[dir_idx].length, ver);
10594
10595 dw2_asm_output_nstring
10596 (filebuf, -1, "File Entry: %#x", (unsigned) i + 1);
10597
10598 /* Include directory index. */
10599 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
10600
10601 /* Modification time. */
10602 dw2_asm_output_data_uleb128
10603 ((vms_file_stats_name (files[file_idx].path, &cdt, 0, 0, 0) == 0)
10604 ? cdt : 0,
10605 NULL);
10606
10607 /* File length in bytes. */
10608 dw2_asm_output_data_uleb128
10609 ((vms_file_stats_name (files[file_idx].path, 0, &siz, 0, 0) == 0)
10610 ? siz : 0,
10611 NULL);
10612 #else
10613 dw2_asm_output_nstring (files[file_idx].path + dirs[dir_idx].length, -1,
10614 "File Entry: %#x", (unsigned) i + 1);
10615
10616 /* Include directory index. */
10617 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
10618
10619 /* Modification time. */
10620 dw2_asm_output_data_uleb128 (0, NULL);
10621
10622 /* File length in bytes. */
10623 dw2_asm_output_data_uleb128 (0, NULL);
10624 #endif /* VMS_DEBUGGING_INFO */
10625 }
10626
10627 dw2_asm_output_data (1, 0, "End file name table");
10628 }
10629
10630
10631 /* Output one line number table into the .debug_line section. */
10632
10633 static void
10634 output_one_line_info_table (dw_line_info_table *table)
10635 {
10636 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
10637 unsigned int current_line = 1;
10638 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
10639 dw_line_info_entry *ent;
10640 size_t i;
10641
10642 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
10643 {
10644 switch (ent->opcode)
10645 {
10646 case LI_set_address:
10647 /* ??? Unfortunately, we have little choice here currently, and
10648 must always use the most general form. GCC does not know the
10649 address delta itself, so we can't use DW_LNS_advance_pc. Many
10650 ports do have length attributes which will give an upper bound
10651 on the address range. We could perhaps use length attributes
10652 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
10653 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
10654
10655 /* This can handle any delta. This takes
10656 4+DWARF2_ADDR_SIZE bytes. */
10657 dw2_asm_output_data (1, 0, "set address %s", line_label);
10658 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
10659 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
10660 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
10661 break;
10662
10663 case LI_set_line:
10664 if (ent->val == current_line)
10665 {
10666 /* We still need to start a new row, so output a copy insn. */
10667 dw2_asm_output_data (1, DW_LNS_copy,
10668 "copy line %u", current_line);
10669 }
10670 else
10671 {
10672 int line_offset = ent->val - current_line;
10673 int line_delta = line_offset - DWARF_LINE_BASE;
10674
10675 current_line = ent->val;
10676 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
10677 {
10678 /* This can handle deltas from -10 to 234, using the current
10679 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
10680 This takes 1 byte. */
10681 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
10682 "line %u", current_line);
10683 }
10684 else
10685 {
10686 /* This can handle any delta. This takes at least 4 bytes,
10687 depending on the value being encoded. */
10688 dw2_asm_output_data (1, DW_LNS_advance_line,
10689 "advance to line %u", current_line);
10690 dw2_asm_output_data_sleb128 (line_offset, NULL);
10691 dw2_asm_output_data (1, DW_LNS_copy, NULL);
10692 }
10693 }
10694 break;
10695
10696 case LI_set_file:
10697 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
10698 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
10699 break;
10700
10701 case LI_set_column:
10702 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
10703 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
10704 break;
10705
10706 case LI_negate_stmt:
10707 current_is_stmt = !current_is_stmt;
10708 dw2_asm_output_data (1, DW_LNS_negate_stmt,
10709 "is_stmt %d", current_is_stmt);
10710 break;
10711
10712 case LI_set_prologue_end:
10713 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
10714 "set prologue end");
10715 break;
10716
10717 case LI_set_epilogue_begin:
10718 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
10719 "set epilogue begin");
10720 break;
10721
10722 case LI_set_discriminator:
10723 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
10724 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
10725 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
10726 dw2_asm_output_data_uleb128 (ent->val, NULL);
10727 break;
10728 }
10729 }
10730
10731 /* Emit debug info for the address of the end of the table. */
10732 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
10733 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
10734 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
10735 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
10736
10737 dw2_asm_output_data (1, 0, "end sequence");
10738 dw2_asm_output_data_uleb128 (1, NULL);
10739 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
10740 }
10741
10742 /* Output the source line number correspondence information. This
10743 information goes into the .debug_line section. */
10744
10745 static void
10746 output_line_info (bool prologue_only)
10747 {
10748 static unsigned int generation;
10749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
10750 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
10751 /* We don't support DWARFv5 line tables yet. */
10752 int ver = dwarf_version < 5 ? dwarf_version : 4;
10753 bool saw_one = false;
10754 int opc;
10755
10756 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
10757 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
10758 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
10759 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
10760
10761 if (!XCOFF_DEBUGGING_INFO)
10762 {
10763 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10764 dw2_asm_output_data (4, 0xffffffff,
10765 "Initial length escape value indicating 64-bit DWARF extension");
10766 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
10767 "Length of Source Line Info");
10768 }
10769
10770 ASM_OUTPUT_LABEL (asm_out_file, l1);
10771
10772 dw2_asm_output_data (2, ver, "DWARF Version");
10773 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
10774 ASM_OUTPUT_LABEL (asm_out_file, p1);
10775
10776 /* Define the architecture-dependent minimum instruction length (in bytes).
10777 In this implementation of DWARF, this field is used for information
10778 purposes only. Since GCC generates assembly language, we have no
10779 a priori knowledge of how many instruction bytes are generated for each
10780 source line, and therefore can use only the DW_LNE_set_address and
10781 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
10782 this as '1', which is "correct enough" for all architectures,
10783 and don't let the target override. */
10784 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
10785
10786 if (ver >= 4)
10787 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
10788 "Maximum Operations Per Instruction");
10789 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
10790 "Default is_stmt_start flag");
10791 dw2_asm_output_data (1, DWARF_LINE_BASE,
10792 "Line Base Value (Special Opcodes)");
10793 dw2_asm_output_data (1, DWARF_LINE_RANGE,
10794 "Line Range Value (Special Opcodes)");
10795 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
10796 "Special Opcode Base");
10797
10798 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
10799 {
10800 int n_op_args;
10801 switch (opc)
10802 {
10803 case DW_LNS_advance_pc:
10804 case DW_LNS_advance_line:
10805 case DW_LNS_set_file:
10806 case DW_LNS_set_column:
10807 case DW_LNS_fixed_advance_pc:
10808 case DW_LNS_set_isa:
10809 n_op_args = 1;
10810 break;
10811 default:
10812 n_op_args = 0;
10813 break;
10814 }
10815
10816 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
10817 opc, n_op_args);
10818 }
10819
10820 /* Write out the information about the files we use. */
10821 output_file_names ();
10822 ASM_OUTPUT_LABEL (asm_out_file, p2);
10823 if (prologue_only)
10824 {
10825 /* Output the marker for the end of the line number info. */
10826 ASM_OUTPUT_LABEL (asm_out_file, l2);
10827 return;
10828 }
10829
10830 if (separate_line_info)
10831 {
10832 dw_line_info_table *table;
10833 size_t i;
10834
10835 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
10836 if (table->in_use)
10837 {
10838 output_one_line_info_table (table);
10839 saw_one = true;
10840 }
10841 }
10842 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
10843 {
10844 output_one_line_info_table (cold_text_section_line_info);
10845 saw_one = true;
10846 }
10847
10848 /* ??? Some Darwin linkers crash on a .debug_line section with no
10849 sequences. Further, merely a DW_LNE_end_sequence entry is not
10850 sufficient -- the address column must also be initialized.
10851 Make sure to output at least one set_address/end_sequence pair,
10852 choosing .text since that section is always present. */
10853 if (text_section_line_info->in_use || !saw_one)
10854 output_one_line_info_table (text_section_line_info);
10855
10856 /* Output the marker for the end of the line number info. */
10857 ASM_OUTPUT_LABEL (asm_out_file, l2);
10858 }
10859 \f
10860 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
10861
10862 static inline bool
10863 need_endianity_attribute_p (bool reverse)
10864 {
10865 return reverse && (dwarf_version >= 3 || !dwarf_strict);
10866 }
10867
10868 /* Given a pointer to a tree node for some base type, return a pointer to
10869 a DIE that describes the given type. REVERSE is true if the type is
10870 to be interpreted in the reverse storage order wrt the target order.
10871
10872 This routine must only be called for GCC type nodes that correspond to
10873 Dwarf base (fundamental) types. */
10874
10875 static dw_die_ref
10876 base_type_die (tree type, bool reverse)
10877 {
10878 dw_die_ref base_type_result;
10879 enum dwarf_type encoding;
10880 bool fpt_used = false;
10881 struct fixed_point_type_info fpt_info;
10882 tree type_bias = NULL_TREE;
10883
10884 if (TREE_CODE (type) == ERROR_MARK || TREE_CODE (type) == VOID_TYPE)
10885 return 0;
10886
10887 /* If this is a subtype that should not be emitted as a subrange type,
10888 use the base type. See subrange_type_for_debug_p. */
10889 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
10890 type = TREE_TYPE (type);
10891
10892 switch (TREE_CODE (type))
10893 {
10894 case INTEGER_TYPE:
10895 if ((dwarf_version >= 4 || !dwarf_strict)
10896 && TYPE_NAME (type)
10897 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
10898 && DECL_IS_BUILTIN (TYPE_NAME (type))
10899 && DECL_NAME (TYPE_NAME (type)))
10900 {
10901 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
10902 if (strcmp (name, "char16_t") == 0
10903 || strcmp (name, "char32_t") == 0)
10904 {
10905 encoding = DW_ATE_UTF;
10906 break;
10907 }
10908 }
10909 if ((dwarf_version >= 3 || !dwarf_strict)
10910 && lang_hooks.types.get_fixed_point_type_info)
10911 {
10912 memset (&fpt_info, 0, sizeof (fpt_info));
10913 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
10914 {
10915 fpt_used = true;
10916 encoding = ((TYPE_UNSIGNED (type))
10917 ? DW_ATE_unsigned_fixed
10918 : DW_ATE_signed_fixed);
10919 break;
10920 }
10921 }
10922 if (TYPE_STRING_FLAG (type))
10923 {
10924 if (TYPE_UNSIGNED (type))
10925 encoding = DW_ATE_unsigned_char;
10926 else
10927 encoding = DW_ATE_signed_char;
10928 }
10929 else if (TYPE_UNSIGNED (type))
10930 encoding = DW_ATE_unsigned;
10931 else
10932 encoding = DW_ATE_signed;
10933
10934 if (!dwarf_strict
10935 && lang_hooks.types.get_type_bias)
10936 type_bias = lang_hooks.types.get_type_bias (type);
10937 break;
10938
10939 case REAL_TYPE:
10940 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
10941 {
10942 if (dwarf_version >= 3 || !dwarf_strict)
10943 encoding = DW_ATE_decimal_float;
10944 else
10945 encoding = DW_ATE_lo_user;
10946 }
10947 else
10948 encoding = DW_ATE_float;
10949 break;
10950
10951 case FIXED_POINT_TYPE:
10952 if (!(dwarf_version >= 3 || !dwarf_strict))
10953 encoding = DW_ATE_lo_user;
10954 else if (TYPE_UNSIGNED (type))
10955 encoding = DW_ATE_unsigned_fixed;
10956 else
10957 encoding = DW_ATE_signed_fixed;
10958 break;
10959
10960 /* Dwarf2 doesn't know anything about complex ints, so use
10961 a user defined type for it. */
10962 case COMPLEX_TYPE:
10963 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
10964 encoding = DW_ATE_complex_float;
10965 else
10966 encoding = DW_ATE_lo_user;
10967 break;
10968
10969 case BOOLEAN_TYPE:
10970 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
10971 encoding = DW_ATE_boolean;
10972 break;
10973
10974 default:
10975 /* No other TREE_CODEs are Dwarf fundamental types. */
10976 gcc_unreachable ();
10977 }
10978
10979 base_type_result = new_die (DW_TAG_base_type, comp_unit_die (), type);
10980
10981 add_AT_unsigned (base_type_result, DW_AT_byte_size,
10982 int_size_in_bytes (type));
10983 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
10984
10985 if (need_endianity_attribute_p (reverse))
10986 add_AT_unsigned (base_type_result, DW_AT_endianity,
10987 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
10988
10989 if (fpt_used)
10990 {
10991 switch (fpt_info.scale_factor_kind)
10992 {
10993 case fixed_point_scale_factor_binary:
10994 add_AT_int (base_type_result, DW_AT_binary_scale,
10995 fpt_info.scale_factor.binary);
10996 break;
10997
10998 case fixed_point_scale_factor_decimal:
10999 add_AT_int (base_type_result, DW_AT_decimal_scale,
11000 fpt_info.scale_factor.decimal);
11001 break;
11002
11003 case fixed_point_scale_factor_arbitrary:
11004 /* Arbitrary scale factors cannot be described in standard DWARF,
11005 yet. */
11006 if (!dwarf_strict)
11007 {
11008 /* Describe the scale factor as a rational constant. */
11009 const dw_die_ref scale_factor
11010 = new_die (DW_TAG_constant, comp_unit_die (), type);
11011
11012 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
11013 fpt_info.scale_factor.arbitrary.numerator);
11014 add_AT_int (scale_factor, DW_AT_GNU_denominator,
11015 fpt_info.scale_factor.arbitrary.denominator);
11016
11017 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
11018 }
11019 break;
11020
11021 default:
11022 gcc_unreachable ();
11023 }
11024 }
11025
11026 if (type_bias)
11027 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
11028 dw_scalar_form_constant
11029 | dw_scalar_form_exprloc
11030 | dw_scalar_form_reference,
11031 NULL);
11032
11033 add_pubtype (type, base_type_result);
11034
11035 return base_type_result;
11036 }
11037
11038 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
11039 named 'auto' in its type: return true for it, false otherwise. */
11040
11041 static inline bool
11042 is_cxx_auto (tree type)
11043 {
11044 if (is_cxx ())
11045 {
11046 tree name = TYPE_IDENTIFIER (type);
11047 if (name == get_identifier ("auto")
11048 || name == get_identifier ("decltype(auto)"))
11049 return true;
11050 }
11051 return false;
11052 }
11053
11054 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
11055 given input type is a Dwarf "fundamental" type. Otherwise return null. */
11056
11057 static inline int
11058 is_base_type (tree type)
11059 {
11060 switch (TREE_CODE (type))
11061 {
11062 case ERROR_MARK:
11063 case VOID_TYPE:
11064 case INTEGER_TYPE:
11065 case REAL_TYPE:
11066 case FIXED_POINT_TYPE:
11067 case COMPLEX_TYPE:
11068 case BOOLEAN_TYPE:
11069 case POINTER_BOUNDS_TYPE:
11070 return 1;
11071
11072 case ARRAY_TYPE:
11073 case RECORD_TYPE:
11074 case UNION_TYPE:
11075 case QUAL_UNION_TYPE:
11076 case ENUMERAL_TYPE:
11077 case FUNCTION_TYPE:
11078 case METHOD_TYPE:
11079 case POINTER_TYPE:
11080 case REFERENCE_TYPE:
11081 case NULLPTR_TYPE:
11082 case OFFSET_TYPE:
11083 case LANG_TYPE:
11084 case VECTOR_TYPE:
11085 return 0;
11086
11087 default:
11088 if (is_cxx_auto (type))
11089 return 0;
11090 gcc_unreachable ();
11091 }
11092
11093 return 0;
11094 }
11095
11096 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
11097 node, return the size in bits for the type if it is a constant, or else
11098 return the alignment for the type if the type's size is not constant, or
11099 else return BITS_PER_WORD if the type actually turns out to be an
11100 ERROR_MARK node. */
11101
11102 static inline unsigned HOST_WIDE_INT
11103 simple_type_size_in_bits (const_tree type)
11104 {
11105 if (TREE_CODE (type) == ERROR_MARK)
11106 return BITS_PER_WORD;
11107 else if (TYPE_SIZE (type) == NULL_TREE)
11108 return 0;
11109 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
11110 return tree_to_uhwi (TYPE_SIZE (type));
11111 else
11112 return TYPE_ALIGN (type);
11113 }
11114
11115 /* Similarly, but return an offset_int instead of UHWI. */
11116
11117 static inline offset_int
11118 offset_int_type_size_in_bits (const_tree type)
11119 {
11120 if (TREE_CODE (type) == ERROR_MARK)
11121 return BITS_PER_WORD;
11122 else if (TYPE_SIZE (type) == NULL_TREE)
11123 return 0;
11124 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
11125 return wi::to_offset (TYPE_SIZE (type));
11126 else
11127 return TYPE_ALIGN (type);
11128 }
11129
11130 /* Given a pointer to a tree node for a subrange type, return a pointer
11131 to a DIE that describes the given type. */
11132
11133 static dw_die_ref
11134 subrange_type_die (tree type, tree low, tree high, tree bias,
11135 dw_die_ref context_die)
11136 {
11137 dw_die_ref subrange_die;
11138 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
11139
11140 if (context_die == NULL)
11141 context_die = comp_unit_die ();
11142
11143 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
11144
11145 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
11146 {
11147 /* The size of the subrange type and its base type do not match,
11148 so we need to generate a size attribute for the subrange type. */
11149 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
11150 }
11151
11152 if (low)
11153 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
11154 if (high)
11155 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
11156 if (bias && !dwarf_strict)
11157 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
11158 dw_scalar_form_constant
11159 | dw_scalar_form_exprloc
11160 | dw_scalar_form_reference,
11161 NULL);
11162
11163 return subrange_die;
11164 }
11165
11166 /* Returns the (const and/or volatile) cv_qualifiers associated with
11167 the decl node. This will normally be augmented with the
11168 cv_qualifiers of the underlying type in add_type_attribute. */
11169
11170 static int
11171 decl_quals (const_tree decl)
11172 {
11173 return ((TREE_READONLY (decl)
11174 /* The C++ front-end correctly marks reference-typed
11175 variables as readonly, but from a language (and debug
11176 info) standpoint they are not const-qualified. */
11177 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
11178 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
11179 | (TREE_THIS_VOLATILE (decl)
11180 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
11181 }
11182
11183 /* Determine the TYPE whose qualifiers match the largest strict subset
11184 of the given TYPE_QUALS, and return its qualifiers. Ignore all
11185 qualifiers outside QUAL_MASK. */
11186
11187 static int
11188 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
11189 {
11190 tree t;
11191 int best_rank = 0, best_qual = 0, max_rank;
11192
11193 type_quals &= qual_mask;
11194 max_rank = popcount_hwi (type_quals) - 1;
11195
11196 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
11197 t = TYPE_NEXT_VARIANT (t))
11198 {
11199 int q = TYPE_QUALS (t) & qual_mask;
11200
11201 if ((q & type_quals) == q && q != type_quals
11202 && check_base_type (t, type))
11203 {
11204 int rank = popcount_hwi (q);
11205
11206 if (rank > best_rank)
11207 {
11208 best_rank = rank;
11209 best_qual = q;
11210 }
11211 }
11212 }
11213
11214 return best_qual;
11215 }
11216
11217 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
11218 static const dwarf_qual_info_t dwarf_qual_info[] =
11219 {
11220 { TYPE_QUAL_CONST, DW_TAG_const_type },
11221 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
11222 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
11223 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
11224 };
11225 static const unsigned int dwarf_qual_info_size
11226 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
11227
11228 /* If DIE is a qualified DIE of some base DIE with the same parent,
11229 return the base DIE, otherwise return NULL. Set MASK to the
11230 qualifiers added compared to the returned DIE. */
11231
11232 static dw_die_ref
11233 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
11234 {
11235 unsigned int i;
11236 for (i = 0; i < dwarf_qual_info_size; i++)
11237 if (die->die_tag == dwarf_qual_info[i].t)
11238 break;
11239 if (i == dwarf_qual_info_size)
11240 return NULL;
11241 if (vec_safe_length (die->die_attr) != 1)
11242 return NULL;
11243 dw_die_ref type = get_AT_ref (die, DW_AT_type);
11244 if (type == NULL || type->die_parent != die->die_parent)
11245 return NULL;
11246 *mask |= dwarf_qual_info[i].q;
11247 if (depth)
11248 {
11249 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
11250 if (ret)
11251 return ret;
11252 }
11253 return type;
11254 }
11255
11256 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
11257 entry that chains the modifiers specified by CV_QUALS in front of the
11258 given type. REVERSE is true if the type is to be interpreted in the
11259 reverse storage order wrt the target order. */
11260
11261 static dw_die_ref
11262 modified_type_die (tree type, int cv_quals, bool reverse,
11263 dw_die_ref context_die)
11264 {
11265 enum tree_code code = TREE_CODE (type);
11266 dw_die_ref mod_type_die;
11267 dw_die_ref sub_die = NULL;
11268 tree item_type = NULL;
11269 tree qualified_type;
11270 tree name, low, high;
11271 dw_die_ref mod_scope;
11272 /* Only these cv-qualifiers are currently handled. */
11273 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
11274 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC);
11275
11276 if (code == ERROR_MARK)
11277 return NULL;
11278
11279 if (lang_hooks.types.get_debug_type)
11280 {
11281 tree debug_type = lang_hooks.types.get_debug_type (type);
11282
11283 if (debug_type != NULL_TREE && debug_type != type)
11284 return modified_type_die (debug_type, cv_quals, reverse, context_die);
11285 }
11286
11287 cv_quals &= cv_qual_mask;
11288
11289 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
11290 tag modifier (and not an attribute) old consumers won't be able
11291 to handle it. */
11292 if (dwarf_version < 3)
11293 cv_quals &= ~TYPE_QUAL_RESTRICT;
11294
11295 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
11296 if (dwarf_version < 5)
11297 cv_quals &= ~TYPE_QUAL_ATOMIC;
11298
11299 /* See if we already have the appropriately qualified variant of
11300 this type. */
11301 qualified_type = get_qualified_type (type, cv_quals);
11302
11303 if (qualified_type == sizetype
11304 && TYPE_NAME (qualified_type)
11305 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
11306 {
11307 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
11308
11309 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
11310 && TYPE_PRECISION (t)
11311 == TYPE_PRECISION (qualified_type)
11312 && TYPE_UNSIGNED (t)
11313 == TYPE_UNSIGNED (qualified_type));
11314 qualified_type = t;
11315 }
11316
11317 /* If we do, then we can just use its DIE, if it exists. */
11318 if (qualified_type)
11319 {
11320 mod_type_die = lookup_type_die (qualified_type);
11321
11322 /* DW_AT_endianity doesn't come from a qualifier on the type. */
11323 if (mod_type_die
11324 && (!need_endianity_attribute_p (reverse)
11325 || !is_base_type (type)
11326 || get_AT_unsigned (mod_type_die, DW_AT_endianity)))
11327 return mod_type_die;
11328 }
11329
11330 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
11331
11332 /* Handle C typedef types. */
11333 if (name && TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name)
11334 && !DECL_ARTIFICIAL (name))
11335 {
11336 tree dtype = TREE_TYPE (name);
11337
11338 if (qualified_type == dtype)
11339 {
11340 /* For a named type, use the typedef. */
11341 gen_type_die (qualified_type, context_die);
11342 return lookup_type_die (qualified_type);
11343 }
11344 else
11345 {
11346 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
11347 dquals &= cv_qual_mask;
11348 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
11349 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
11350 /* cv-unqualified version of named type. Just use
11351 the unnamed type to which it refers. */
11352 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
11353 reverse, context_die);
11354 /* Else cv-qualified version of named type; fall through. */
11355 }
11356 }
11357
11358 mod_scope = scope_die_for (type, context_die);
11359
11360 if (cv_quals)
11361 {
11362 int sub_quals = 0, first_quals = 0;
11363 unsigned i;
11364 dw_die_ref first = NULL, last = NULL;
11365
11366 /* Determine a lesser qualified type that most closely matches
11367 this one. Then generate DW_TAG_* entries for the remaining
11368 qualifiers. */
11369 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
11370 cv_qual_mask);
11371 if (sub_quals && use_debug_types)
11372 {
11373 bool needed = false;
11374 /* If emitting type units, make sure the order of qualifiers
11375 is canonical. Thus, start from unqualified type if
11376 an earlier qualifier is missing in sub_quals, but some later
11377 one is present there. */
11378 for (i = 0; i < dwarf_qual_info_size; i++)
11379 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
11380 needed = true;
11381 else if (needed && (dwarf_qual_info[i].q & cv_quals))
11382 {
11383 sub_quals = 0;
11384 break;
11385 }
11386 }
11387 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
11388 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
11389 {
11390 /* As not all intermediate qualified DIEs have corresponding
11391 tree types, ensure that qualified DIEs in the same scope
11392 as their DW_AT_type are emitted after their DW_AT_type,
11393 only with other qualified DIEs for the same type possibly
11394 in between them. Determine the range of such qualified
11395 DIEs now (first being the base type, last being corresponding
11396 last qualified DIE for it). */
11397 unsigned int count = 0;
11398 first = qualified_die_p (mod_type_die, &first_quals,
11399 dwarf_qual_info_size);
11400 if (first == NULL)
11401 first = mod_type_die;
11402 gcc_assert ((first_quals & ~sub_quals) == 0);
11403 for (count = 0, last = first;
11404 count < (1U << dwarf_qual_info_size);
11405 count++, last = last->die_sib)
11406 {
11407 int quals = 0;
11408 if (last == mod_scope->die_child)
11409 break;
11410 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
11411 != first)
11412 break;
11413 }
11414 }
11415
11416 for (i = 0; i < dwarf_qual_info_size; i++)
11417 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
11418 {
11419 dw_die_ref d;
11420 if (first && first != last)
11421 {
11422 for (d = first->die_sib; ; d = d->die_sib)
11423 {
11424 int quals = 0;
11425 qualified_die_p (d, &quals, dwarf_qual_info_size);
11426 if (quals == (first_quals | dwarf_qual_info[i].q))
11427 break;
11428 if (d == last)
11429 {
11430 d = NULL;
11431 break;
11432 }
11433 }
11434 if (d)
11435 {
11436 mod_type_die = d;
11437 continue;
11438 }
11439 }
11440 if (first)
11441 {
11442 d = ggc_cleared_alloc<die_node> ();
11443 d->die_tag = dwarf_qual_info[i].t;
11444 add_child_die_after (mod_scope, d, last);
11445 last = d;
11446 }
11447 else
11448 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
11449 if (mod_type_die)
11450 add_AT_die_ref (d, DW_AT_type, mod_type_die);
11451 mod_type_die = d;
11452 first_quals |= dwarf_qual_info[i].q;
11453 }
11454 }
11455 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
11456 {
11457 dwarf_tag tag = DW_TAG_pointer_type;
11458 if (code == REFERENCE_TYPE)
11459 {
11460 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
11461 tag = DW_TAG_rvalue_reference_type;
11462 else
11463 tag = DW_TAG_reference_type;
11464 }
11465 mod_type_die = new_die (tag, mod_scope, type);
11466
11467 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
11468 simple_type_size_in_bits (type) / BITS_PER_UNIT);
11469 item_type = TREE_TYPE (type);
11470
11471 addr_space_t as = TYPE_ADDR_SPACE (item_type);
11472 if (!ADDR_SPACE_GENERIC_P (as))
11473 {
11474 int action = targetm.addr_space.debug (as);
11475 if (action >= 0)
11476 {
11477 /* Positive values indicate an address_class. */
11478 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
11479 }
11480 else
11481 {
11482 /* Negative values indicate an (inverted) segment base reg. */
11483 dw_loc_descr_ref d
11484 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
11485 add_AT_loc (mod_type_die, DW_AT_segment, d);
11486 }
11487 }
11488 }
11489 else if (code == INTEGER_TYPE
11490 && TREE_TYPE (type) != NULL_TREE
11491 && subrange_type_for_debug_p (type, &low, &high))
11492 {
11493 tree bias = NULL_TREE;
11494 if (lang_hooks.types.get_type_bias)
11495 bias = lang_hooks.types.get_type_bias (type);
11496 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
11497 item_type = TREE_TYPE (type);
11498 }
11499 else if (is_base_type (type))
11500 mod_type_die = base_type_die (type, reverse);
11501 else
11502 {
11503 gen_type_die (type, context_die);
11504
11505 /* We have to get the type_main_variant here (and pass that to the
11506 `lookup_type_die' routine) because the ..._TYPE node we have
11507 might simply be a *copy* of some original type node (where the
11508 copy was created to help us keep track of typedef names) and
11509 that copy might have a different TYPE_UID from the original
11510 ..._TYPE node. */
11511 if (TREE_CODE (type) != VECTOR_TYPE
11512 && TREE_CODE (type) != ARRAY_TYPE)
11513 return lookup_type_die (type_main_variant (type));
11514 else
11515 /* Vectors have the debugging information in the type,
11516 not the main variant. */
11517 return lookup_type_die (type);
11518 }
11519
11520 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
11521 don't output a DW_TAG_typedef, since there isn't one in the
11522 user's program; just attach a DW_AT_name to the type.
11523 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
11524 if the base type already has the same name. */
11525 if (name
11526 && ((TREE_CODE (name) != TYPE_DECL
11527 && (qualified_type == TYPE_MAIN_VARIANT (type)
11528 || (cv_quals == TYPE_UNQUALIFIED)))
11529 || (TREE_CODE (name) == TYPE_DECL
11530 && TREE_TYPE (name) == qualified_type
11531 && DECL_NAME (name))))
11532 {
11533 if (TREE_CODE (name) == TYPE_DECL)
11534 /* Could just call add_name_and_src_coords_attributes here,
11535 but since this is a builtin type it doesn't have any
11536 useful source coordinates anyway. */
11537 name = DECL_NAME (name);
11538 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
11539 }
11540 /* This probably indicates a bug. */
11541 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
11542 {
11543 name = TYPE_IDENTIFIER (type);
11544 add_name_attribute (mod_type_die,
11545 name ? IDENTIFIER_POINTER (name) : "__unknown__");
11546 }
11547
11548 if (qualified_type)
11549 equate_type_number_to_die (qualified_type, mod_type_die);
11550
11551 if (item_type)
11552 /* We must do this after the equate_type_number_to_die call, in case
11553 this is a recursive type. This ensures that the modified_type_die
11554 recursion will terminate even if the type is recursive. Recursive
11555 types are possible in Ada. */
11556 sub_die = modified_type_die (item_type,
11557 TYPE_QUALS_NO_ADDR_SPACE (item_type),
11558 reverse,
11559 context_die);
11560
11561 if (sub_die != NULL)
11562 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
11563
11564 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
11565 if (TYPE_ARTIFICIAL (type))
11566 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
11567
11568 return mod_type_die;
11569 }
11570
11571 /* Generate DIEs for the generic parameters of T.
11572 T must be either a generic type or a generic function.
11573 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
11574
11575 static void
11576 gen_generic_params_dies (tree t)
11577 {
11578 tree parms, args;
11579 int parms_num, i;
11580 dw_die_ref die = NULL;
11581 int non_default;
11582
11583 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
11584 return;
11585
11586 if (TYPE_P (t))
11587 die = lookup_type_die (t);
11588 else if (DECL_P (t))
11589 die = lookup_decl_die (t);
11590
11591 gcc_assert (die);
11592
11593 parms = lang_hooks.get_innermost_generic_parms (t);
11594 if (!parms)
11595 /* T has no generic parameter. It means T is neither a generic type
11596 or function. End of story. */
11597 return;
11598
11599 parms_num = TREE_VEC_LENGTH (parms);
11600 args = lang_hooks.get_innermost_generic_args (t);
11601 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
11602 non_default = int_cst_value (TREE_CHAIN (args));
11603 else
11604 non_default = TREE_VEC_LENGTH (args);
11605 for (i = 0; i < parms_num; i++)
11606 {
11607 tree parm, arg, arg_pack_elems;
11608 dw_die_ref parm_die;
11609
11610 parm = TREE_VEC_ELT (parms, i);
11611 arg = TREE_VEC_ELT (args, i);
11612 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
11613 gcc_assert (parm && TREE_VALUE (parm) && arg);
11614
11615 if (parm && TREE_VALUE (parm) && arg)
11616 {
11617 /* If PARM represents a template parameter pack,
11618 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
11619 by DW_TAG_template_*_parameter DIEs for the argument
11620 pack elements of ARG. Note that ARG would then be
11621 an argument pack. */
11622 if (arg_pack_elems)
11623 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
11624 arg_pack_elems,
11625 die);
11626 else
11627 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
11628 true /* emit name */, die);
11629 if (i >= non_default)
11630 add_AT_flag (parm_die, DW_AT_default_value, 1);
11631 }
11632 }
11633 }
11634
11635 /* Create and return a DIE for PARM which should be
11636 the representation of a generic type parameter.
11637 For instance, in the C++ front end, PARM would be a template parameter.
11638 ARG is the argument to PARM.
11639 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
11640 name of the PARM.
11641 PARENT_DIE is the parent DIE which the new created DIE should be added to,
11642 as a child node. */
11643
11644 static dw_die_ref
11645 generic_parameter_die (tree parm, tree arg,
11646 bool emit_name_p,
11647 dw_die_ref parent_die)
11648 {
11649 dw_die_ref tmpl_die = NULL;
11650 const char *name = NULL;
11651
11652 if (!parm || !DECL_NAME (parm) || !arg)
11653 return NULL;
11654
11655 /* We support non-type generic parameters and arguments,
11656 type generic parameters and arguments, as well as
11657 generic generic parameters (a.k.a. template template parameters in C++)
11658 and arguments. */
11659 if (TREE_CODE (parm) == PARM_DECL)
11660 /* PARM is a nontype generic parameter */
11661 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
11662 else if (TREE_CODE (parm) == TYPE_DECL)
11663 /* PARM is a type generic parameter. */
11664 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
11665 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
11666 /* PARM is a generic generic parameter.
11667 Its DIE is a GNU extension. It shall have a
11668 DW_AT_name attribute to represent the name of the template template
11669 parameter, and a DW_AT_GNU_template_name attribute to represent the
11670 name of the template template argument. */
11671 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
11672 parent_die, parm);
11673 else
11674 gcc_unreachable ();
11675
11676 if (tmpl_die)
11677 {
11678 tree tmpl_type;
11679
11680 /* If PARM is a generic parameter pack, it means we are
11681 emitting debug info for a template argument pack element.
11682 In other terms, ARG is a template argument pack element.
11683 In that case, we don't emit any DW_AT_name attribute for
11684 the die. */
11685 if (emit_name_p)
11686 {
11687 name = IDENTIFIER_POINTER (DECL_NAME (parm));
11688 gcc_assert (name);
11689 add_AT_string (tmpl_die, DW_AT_name, name);
11690 }
11691
11692 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
11693 {
11694 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
11695 TMPL_DIE should have a child DW_AT_type attribute that is set
11696 to the type of the argument to PARM, which is ARG.
11697 If PARM is a type generic parameter, TMPL_DIE should have a
11698 child DW_AT_type that is set to ARG. */
11699 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
11700 add_type_attribute (tmpl_die, tmpl_type,
11701 (TREE_THIS_VOLATILE (tmpl_type)
11702 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
11703 false, parent_die);
11704 }
11705 else
11706 {
11707 /* So TMPL_DIE is a DIE representing a
11708 a generic generic template parameter, a.k.a template template
11709 parameter in C++ and arg is a template. */
11710
11711 /* The DW_AT_GNU_template_name attribute of the DIE must be set
11712 to the name of the argument. */
11713 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
11714 if (name)
11715 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
11716 }
11717
11718 if (TREE_CODE (parm) == PARM_DECL)
11719 /* So PARM is a non-type generic parameter.
11720 DWARF3 5.6.8 says we must set a DW_AT_const_value child
11721 attribute of TMPL_DIE which value represents the value
11722 of ARG.
11723 We must be careful here:
11724 The value of ARG might reference some function decls.
11725 We might currently be emitting debug info for a generic
11726 type and types are emitted before function decls, we don't
11727 know if the function decls referenced by ARG will actually be
11728 emitted after cgraph computations.
11729 So must defer the generation of the DW_AT_const_value to
11730 after cgraph is ready. */
11731 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
11732 }
11733
11734 return tmpl_die;
11735 }
11736
11737 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
11738 PARM_PACK must be a template parameter pack. The returned DIE
11739 will be child DIE of PARENT_DIE. */
11740
11741 static dw_die_ref
11742 template_parameter_pack_die (tree parm_pack,
11743 tree parm_pack_args,
11744 dw_die_ref parent_die)
11745 {
11746 dw_die_ref die;
11747 int j;
11748
11749 gcc_assert (parent_die && parm_pack);
11750
11751 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
11752 add_name_and_src_coords_attributes (die, parm_pack);
11753 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
11754 generic_parameter_die (parm_pack,
11755 TREE_VEC_ELT (parm_pack_args, j),
11756 false /* Don't emit DW_AT_name */,
11757 die);
11758 return die;
11759 }
11760
11761 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
11762 an enumerated type. */
11763
11764 static inline int
11765 type_is_enum (const_tree type)
11766 {
11767 return TREE_CODE (type) == ENUMERAL_TYPE;
11768 }
11769
11770 /* Return the DBX register number described by a given RTL node. */
11771
11772 static unsigned int
11773 dbx_reg_number (const_rtx rtl)
11774 {
11775 unsigned regno = REGNO (rtl);
11776
11777 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
11778
11779 #ifdef LEAF_REG_REMAP
11780 if (crtl->uses_only_leaf_regs)
11781 {
11782 int leaf_reg = LEAF_REG_REMAP (regno);
11783 if (leaf_reg != -1)
11784 regno = (unsigned) leaf_reg;
11785 }
11786 #endif
11787
11788 regno = DBX_REGISTER_NUMBER (regno);
11789 gcc_assert (regno != INVALID_REGNUM);
11790 return regno;
11791 }
11792
11793 /* Optionally add a DW_OP_piece term to a location description expression.
11794 DW_OP_piece is only added if the location description expression already
11795 doesn't end with DW_OP_piece. */
11796
11797 static void
11798 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
11799 {
11800 dw_loc_descr_ref loc;
11801
11802 if (*list_head != NULL)
11803 {
11804 /* Find the end of the chain. */
11805 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
11806 ;
11807
11808 if (loc->dw_loc_opc != DW_OP_piece)
11809 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
11810 }
11811 }
11812
11813 /* Return a location descriptor that designates a machine register or
11814 zero if there is none. */
11815
11816 static dw_loc_descr_ref
11817 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
11818 {
11819 rtx regs;
11820
11821 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
11822 return 0;
11823
11824 /* We only use "frame base" when we're sure we're talking about the
11825 post-prologue local stack frame. We do this by *not* running
11826 register elimination until this point, and recognizing the special
11827 argument pointer and soft frame pointer rtx's.
11828 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
11829 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
11830 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
11831 {
11832 dw_loc_descr_ref result = NULL;
11833
11834 if (dwarf_version >= 4 || !dwarf_strict)
11835 {
11836 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
11837 initialized);
11838 if (result)
11839 add_loc_descr (&result,
11840 new_loc_descr (DW_OP_stack_value, 0, 0));
11841 }
11842 return result;
11843 }
11844
11845 regs = targetm.dwarf_register_span (rtl);
11846
11847 if (REG_NREGS (rtl) > 1 || regs)
11848 return multiple_reg_loc_descriptor (rtl, regs, initialized);
11849 else
11850 {
11851 unsigned int dbx_regnum = dbx_reg_number (rtl);
11852 if (dbx_regnum == IGNORED_DWARF_REGNUM)
11853 return 0;
11854 return one_reg_loc_descriptor (dbx_regnum, initialized);
11855 }
11856 }
11857
11858 /* Return a location descriptor that designates a machine register for
11859 a given hard register number. */
11860
11861 static dw_loc_descr_ref
11862 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
11863 {
11864 dw_loc_descr_ref reg_loc_descr;
11865
11866 if (regno <= 31)
11867 reg_loc_descr
11868 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
11869 else
11870 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
11871
11872 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
11873 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
11874
11875 return reg_loc_descr;
11876 }
11877
11878 /* Given an RTL of a register, return a location descriptor that
11879 designates a value that spans more than one register. */
11880
11881 static dw_loc_descr_ref
11882 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
11883 enum var_init_status initialized)
11884 {
11885 int size, i;
11886 dw_loc_descr_ref loc_result = NULL;
11887
11888 /* Simple, contiguous registers. */
11889 if (regs == NULL_RTX)
11890 {
11891 unsigned reg = REGNO (rtl);
11892 int nregs;
11893
11894 #ifdef LEAF_REG_REMAP
11895 if (crtl->uses_only_leaf_regs)
11896 {
11897 int leaf_reg = LEAF_REG_REMAP (reg);
11898 if (leaf_reg != -1)
11899 reg = (unsigned) leaf_reg;
11900 }
11901 #endif
11902
11903 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
11904 nregs = REG_NREGS (rtl);
11905
11906 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
11907
11908 loc_result = NULL;
11909 while (nregs--)
11910 {
11911 dw_loc_descr_ref t;
11912
11913 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
11914 VAR_INIT_STATUS_INITIALIZED);
11915 add_loc_descr (&loc_result, t);
11916 add_loc_descr_op_piece (&loc_result, size);
11917 ++reg;
11918 }
11919 return loc_result;
11920 }
11921
11922 /* Now onto stupid register sets in non contiguous locations. */
11923
11924 gcc_assert (GET_CODE (regs) == PARALLEL);
11925
11926 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
11927 loc_result = NULL;
11928
11929 for (i = 0; i < XVECLEN (regs, 0); ++i)
11930 {
11931 dw_loc_descr_ref t;
11932
11933 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
11934 VAR_INIT_STATUS_INITIALIZED);
11935 add_loc_descr (&loc_result, t);
11936 add_loc_descr_op_piece (&loc_result, size);
11937 }
11938
11939 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
11940 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
11941 return loc_result;
11942 }
11943
11944 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
11945
11946 /* Return a location descriptor that designates a constant i,
11947 as a compound operation from constant (i >> shift), constant shift
11948 and DW_OP_shl. */
11949
11950 static dw_loc_descr_ref
11951 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
11952 {
11953 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
11954 add_loc_descr (&ret, int_loc_descriptor (shift));
11955 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
11956 return ret;
11957 }
11958
11959 /* Return a location descriptor that designates a constant. */
11960
11961 static dw_loc_descr_ref
11962 int_loc_descriptor (HOST_WIDE_INT i)
11963 {
11964 enum dwarf_location_atom op;
11965
11966 /* Pick the smallest representation of a constant, rather than just
11967 defaulting to the LEB encoding. */
11968 if (i >= 0)
11969 {
11970 int clz = clz_hwi (i);
11971 int ctz = ctz_hwi (i);
11972 if (i <= 31)
11973 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
11974 else if (i <= 0xff)
11975 op = DW_OP_const1u;
11976 else if (i <= 0xffff)
11977 op = DW_OP_const2u;
11978 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
11979 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
11980 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
11981 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
11982 while DW_OP_const4u is 5 bytes. */
11983 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
11984 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
11985 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
11986 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
11987 while DW_OP_const4u is 5 bytes. */
11988 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
11989
11990 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
11991 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
11992 <= 4)
11993 {
11994 /* As i >= 2**31, the double cast above will yield a negative number.
11995 Since wrapping is defined in DWARF expressions we can output big
11996 positive integers as small negative ones, regardless of the size
11997 of host wide ints.
11998
11999 Here, since the evaluator will handle 32-bit values and since i >=
12000 2**31, we know it's going to be interpreted as a negative literal:
12001 store it this way if we can do better than 5 bytes this way. */
12002 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
12003 }
12004 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12005 op = DW_OP_const4u;
12006
12007 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
12008 least 6 bytes: see if we can do better before falling back to it. */
12009 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12010 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
12011 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
12012 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
12013 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
12014 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
12015 >= HOST_BITS_PER_WIDE_INT)
12016 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
12017 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
12018 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
12019 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
12020 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
12021 && size_of_uleb128 (i) > 6)
12022 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
12023 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
12024 else
12025 op = DW_OP_constu;
12026 }
12027 else
12028 {
12029 if (i >= -0x80)
12030 op = DW_OP_const1s;
12031 else if (i >= -0x8000)
12032 op = DW_OP_const2s;
12033 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
12034 {
12035 if (size_of_int_loc_descriptor (i) < 5)
12036 {
12037 dw_loc_descr_ref ret = int_loc_descriptor (-i);
12038 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
12039 return ret;
12040 }
12041 op = DW_OP_const4s;
12042 }
12043 else
12044 {
12045 if (size_of_int_loc_descriptor (i)
12046 < (unsigned long) 1 + size_of_sleb128 (i))
12047 {
12048 dw_loc_descr_ref ret = int_loc_descriptor (-i);
12049 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
12050 return ret;
12051 }
12052 op = DW_OP_consts;
12053 }
12054 }
12055
12056 return new_loc_descr (op, i, 0);
12057 }
12058
12059 /* Likewise, for unsigned constants. */
12060
12061 static dw_loc_descr_ref
12062 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
12063 {
12064 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
12065 const unsigned HOST_WIDE_INT max_uint
12066 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
12067
12068 /* If possible, use the clever signed constants handling. */
12069 if (i <= max_int)
12070 return int_loc_descriptor ((HOST_WIDE_INT) i);
12071
12072 /* Here, we are left with positive numbers that cannot be represented as
12073 HOST_WIDE_INT, i.e.:
12074 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
12075
12076 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
12077 whereas may be better to output a negative integer: thanks to integer
12078 wrapping, we know that:
12079 x = x - 2 ** DWARF2_ADDR_SIZE
12080 = x - 2 * (max (HOST_WIDE_INT) + 1)
12081 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
12082 small negative integers. Let's try that in cases it will clearly improve
12083 the encoding: there is no gain turning DW_OP_const4u into
12084 DW_OP_const4s. */
12085 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
12086 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
12087 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
12088 {
12089 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
12090
12091 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
12092 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
12093 const HOST_WIDE_INT second_shift
12094 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
12095
12096 /* So we finally have:
12097 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
12098 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
12099 return int_loc_descriptor (second_shift);
12100 }
12101
12102 /* Last chance: fallback to a simple constant operation. */
12103 return new_loc_descr
12104 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12105 ? DW_OP_const4u
12106 : DW_OP_const8u,
12107 i, 0);
12108 }
12109
12110 /* Generate and return a location description that computes the unsigned
12111 comparison of the two stack top entries (a OP b where b is the top-most
12112 entry and a is the second one). The KIND of comparison can be LT_EXPR,
12113 LE_EXPR, GT_EXPR or GE_EXPR. */
12114
12115 static dw_loc_descr_ref
12116 uint_comparison_loc_list (enum tree_code kind)
12117 {
12118 enum dwarf_location_atom op, flip_op;
12119 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
12120
12121 switch (kind)
12122 {
12123 case LT_EXPR:
12124 op = DW_OP_lt;
12125 break;
12126 case LE_EXPR:
12127 op = DW_OP_le;
12128 break;
12129 case GT_EXPR:
12130 op = DW_OP_gt;
12131 break;
12132 case GE_EXPR:
12133 op = DW_OP_ge;
12134 break;
12135 default:
12136 gcc_unreachable ();
12137 }
12138
12139 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
12140 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
12141
12142 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
12143 possible to perform unsigned comparisons: we just have to distinguish
12144 three cases:
12145
12146 1. when a and b have the same sign (as signed integers); then we should
12147 return: a OP(signed) b;
12148
12149 2. when a is a negative signed integer while b is a positive one, then a
12150 is a greater unsigned integer than b; likewise when a and b's roles
12151 are flipped.
12152
12153 So first, compare the sign of the two operands. */
12154 ret = new_loc_descr (DW_OP_over, 0, 0);
12155 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
12156 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
12157 /* If they have different signs (i.e. they have different sign bits), then
12158 the stack top value has now the sign bit set and thus it's smaller than
12159 zero. */
12160 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
12161 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
12162 add_loc_descr (&ret, bra_node);
12163
12164 /* We are in case 1. At this point, we know both operands have the same
12165 sign, to it's safe to use the built-in signed comparison. */
12166 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12167 add_loc_descr (&ret, jmp_node);
12168
12169 /* We are in case 2. Here, we know both operands do not have the same sign,
12170 so we have to flip the signed comparison. */
12171 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
12172 tmp = new_loc_descr (flip_op, 0, 0);
12173 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12174 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
12175 add_loc_descr (&ret, tmp);
12176
12177 /* This dummy operation is necessary to make the two branches join. */
12178 tmp = new_loc_descr (DW_OP_nop, 0, 0);
12179 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12180 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
12181 add_loc_descr (&ret, tmp);
12182
12183 return ret;
12184 }
12185
12186 /* Likewise, but takes the location description lists (might be destructive on
12187 them). Return NULL if either is NULL or if concatenation fails. */
12188
12189 static dw_loc_list_ref
12190 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
12191 enum tree_code kind)
12192 {
12193 if (left == NULL || right == NULL)
12194 return NULL;
12195
12196 add_loc_list (&left, right);
12197 if (left == NULL)
12198 return NULL;
12199
12200 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
12201 return left;
12202 }
12203
12204 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
12205 without actually allocating it. */
12206
12207 static unsigned long
12208 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
12209 {
12210 return size_of_int_loc_descriptor (i >> shift)
12211 + size_of_int_loc_descriptor (shift)
12212 + 1;
12213 }
12214
12215 /* Return size_of_locs (int_loc_descriptor (i)) without
12216 actually allocating it. */
12217
12218 static unsigned long
12219 size_of_int_loc_descriptor (HOST_WIDE_INT i)
12220 {
12221 unsigned long s;
12222
12223 if (i >= 0)
12224 {
12225 int clz, ctz;
12226 if (i <= 31)
12227 return 1;
12228 else if (i <= 0xff)
12229 return 2;
12230 else if (i <= 0xffff)
12231 return 3;
12232 clz = clz_hwi (i);
12233 ctz = ctz_hwi (i);
12234 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
12235 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
12236 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12237 - clz - 5);
12238 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12239 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
12240 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12241 - clz - 8);
12242 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
12243 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
12244 <= 4)
12245 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
12246 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12247 return 5;
12248 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
12249 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12250 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
12251 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12252 - clz - 8);
12253 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
12254 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
12255 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12256 - clz - 16);
12257 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
12258 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
12259 && s > 6)
12260 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12261 - clz - 32);
12262 else
12263 return 1 + s;
12264 }
12265 else
12266 {
12267 if (i >= -0x80)
12268 return 2;
12269 else if (i >= -0x8000)
12270 return 3;
12271 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
12272 {
12273 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
12274 {
12275 s = size_of_int_loc_descriptor (-i) + 1;
12276 if (s < 5)
12277 return s;
12278 }
12279 return 5;
12280 }
12281 else
12282 {
12283 unsigned long r = 1 + size_of_sleb128 (i);
12284 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
12285 {
12286 s = size_of_int_loc_descriptor (-i) + 1;
12287 if (s < r)
12288 return s;
12289 }
12290 return r;
12291 }
12292 }
12293 }
12294
12295 /* Return loc description representing "address" of integer value.
12296 This can appear only as toplevel expression. */
12297
12298 static dw_loc_descr_ref
12299 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
12300 {
12301 int litsize;
12302 dw_loc_descr_ref loc_result = NULL;
12303
12304 if (!(dwarf_version >= 4 || !dwarf_strict))
12305 return NULL;
12306
12307 litsize = size_of_int_loc_descriptor (i);
12308 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
12309 is more compact. For DW_OP_stack_value we need:
12310 litsize + 1 (DW_OP_stack_value)
12311 and for DW_OP_implicit_value:
12312 1 (DW_OP_implicit_value) + 1 (length) + size. */
12313 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
12314 {
12315 loc_result = int_loc_descriptor (i);
12316 add_loc_descr (&loc_result,
12317 new_loc_descr (DW_OP_stack_value, 0, 0));
12318 return loc_result;
12319 }
12320
12321 loc_result = new_loc_descr (DW_OP_implicit_value,
12322 size, 0);
12323 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
12324 loc_result->dw_loc_oprnd2.v.val_int = i;
12325 return loc_result;
12326 }
12327
12328 /* Return a location descriptor that designates a base+offset location. */
12329
12330 static dw_loc_descr_ref
12331 based_loc_descr (rtx reg, HOST_WIDE_INT offset,
12332 enum var_init_status initialized)
12333 {
12334 unsigned int regno;
12335 dw_loc_descr_ref result;
12336 dw_fde_ref fde = cfun->fde;
12337
12338 /* We only use "frame base" when we're sure we're talking about the
12339 post-prologue local stack frame. We do this by *not* running
12340 register elimination until this point, and recognizing the special
12341 argument pointer and soft frame pointer rtx's. */
12342 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
12343 {
12344 rtx elim = (ira_use_lra_p
12345 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
12346 : eliminate_regs (reg, VOIDmode, NULL_RTX));
12347
12348 if (elim != reg)
12349 {
12350 if (GET_CODE (elim) == PLUS)
12351 {
12352 offset += INTVAL (XEXP (elim, 1));
12353 elim = XEXP (elim, 0);
12354 }
12355 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
12356 && (elim == hard_frame_pointer_rtx
12357 || elim == stack_pointer_rtx))
12358 || elim == (frame_pointer_needed
12359 ? hard_frame_pointer_rtx
12360 : stack_pointer_rtx));
12361
12362 /* If drap register is used to align stack, use frame
12363 pointer + offset to access stack variables. If stack
12364 is aligned without drap, use stack pointer + offset to
12365 access stack variables. */
12366 if (crtl->stack_realign_tried
12367 && reg == frame_pointer_rtx)
12368 {
12369 int base_reg
12370 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
12371 ? HARD_FRAME_POINTER_REGNUM
12372 : REGNO (elim));
12373 return new_reg_loc_descr (base_reg, offset);
12374 }
12375
12376 gcc_assert (frame_pointer_fb_offset_valid);
12377 offset += frame_pointer_fb_offset;
12378 return new_loc_descr (DW_OP_fbreg, offset, 0);
12379 }
12380 }
12381
12382 regno = REGNO (reg);
12383 #ifdef LEAF_REG_REMAP
12384 if (crtl->uses_only_leaf_regs)
12385 {
12386 int leaf_reg = LEAF_REG_REMAP (regno);
12387 if (leaf_reg != -1)
12388 regno = (unsigned) leaf_reg;
12389 }
12390 #endif
12391 regno = DWARF_FRAME_REGNUM (regno);
12392
12393 if (!optimize && fde
12394 && (fde->drap_reg == regno || fde->vdrap_reg == regno))
12395 {
12396 /* Use cfa+offset to represent the location of arguments passed
12397 on the stack when drap is used to align stack.
12398 Only do this when not optimizing, for optimized code var-tracking
12399 is supposed to track where the arguments live and the register
12400 used as vdrap or drap in some spot might be used for something
12401 else in other part of the routine. */
12402 return new_loc_descr (DW_OP_fbreg, offset, 0);
12403 }
12404
12405 if (regno <= 31)
12406 result = new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + regno),
12407 offset, 0);
12408 else
12409 result = new_loc_descr (DW_OP_bregx, regno, offset);
12410
12411 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
12412 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
12413
12414 return result;
12415 }
12416
12417 /* Return true if this RTL expression describes a base+offset calculation. */
12418
12419 static inline int
12420 is_based_loc (const_rtx rtl)
12421 {
12422 return (GET_CODE (rtl) == PLUS
12423 && ((REG_P (XEXP (rtl, 0))
12424 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
12425 && CONST_INT_P (XEXP (rtl, 1)))));
12426 }
12427
12428 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
12429 failed. */
12430
12431 static dw_loc_descr_ref
12432 tls_mem_loc_descriptor (rtx mem)
12433 {
12434 tree base;
12435 dw_loc_descr_ref loc_result;
12436
12437 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
12438 return NULL;
12439
12440 base = get_base_address (MEM_EXPR (mem));
12441 if (base == NULL
12442 || !VAR_P (base)
12443 || !DECL_THREAD_LOCAL_P (base))
12444 return NULL;
12445
12446 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
12447 if (loc_result == NULL)
12448 return NULL;
12449
12450 if (MEM_OFFSET (mem))
12451 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
12452
12453 return loc_result;
12454 }
12455
12456 /* Output debug info about reason why we failed to expand expression as dwarf
12457 expression. */
12458
12459 static void
12460 expansion_failed (tree expr, rtx rtl, char const *reason)
12461 {
12462 if (dump_file && (dump_flags & TDF_DETAILS))
12463 {
12464 fprintf (dump_file, "Failed to expand as dwarf: ");
12465 if (expr)
12466 print_generic_expr (dump_file, expr, dump_flags);
12467 if (rtl)
12468 {
12469 fprintf (dump_file, "\n");
12470 print_rtl (dump_file, rtl);
12471 }
12472 fprintf (dump_file, "\nReason: %s\n", reason);
12473 }
12474 }
12475
12476 /* Helper function for const_ok_for_output. */
12477
12478 static bool
12479 const_ok_for_output_1 (rtx rtl)
12480 {
12481 if (GET_CODE (rtl) == UNSPEC)
12482 {
12483 /* If delegitimize_address couldn't do anything with the UNSPEC, assume
12484 we can't express it in the debug info. */
12485 /* Don't complain about TLS UNSPECs, those are just too hard to
12486 delegitimize. Note this could be a non-decl SYMBOL_REF such as
12487 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
12488 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
12489 if (flag_checking
12490 && (XVECLEN (rtl, 0) == 0
12491 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
12492 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
12493 inform (current_function_decl
12494 ? DECL_SOURCE_LOCATION (current_function_decl)
12495 : UNKNOWN_LOCATION,
12496 #if NUM_UNSPEC_VALUES > 0
12497 "non-delegitimized UNSPEC %s (%d) found in variable location",
12498 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
12499 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
12500 XINT (rtl, 1));
12501 #else
12502 "non-delegitimized UNSPEC %d found in variable location",
12503 XINT (rtl, 1));
12504 #endif
12505 expansion_failed (NULL_TREE, rtl,
12506 "UNSPEC hasn't been delegitimized.\n");
12507 return false;
12508 }
12509
12510 if (targetm.const_not_ok_for_debug_p (rtl))
12511 {
12512 expansion_failed (NULL_TREE, rtl,
12513 "Expression rejected for debug by the backend.\n");
12514 return false;
12515 }
12516
12517 /* FIXME: Refer to PR60655. It is possible for simplification
12518 of rtl expressions in var tracking to produce such expressions.
12519 We should really identify / validate expressions
12520 enclosed in CONST that can be handled by assemblers on various
12521 targets and only handle legitimate cases here. */
12522 if (GET_CODE (rtl) != SYMBOL_REF)
12523 {
12524 if (GET_CODE (rtl) == NOT)
12525 return false;
12526 return true;
12527 }
12528
12529 if (CONSTANT_POOL_ADDRESS_P (rtl))
12530 {
12531 bool marked;
12532 get_pool_constant_mark (rtl, &marked);
12533 /* If all references to this pool constant were optimized away,
12534 it was not output and thus we can't represent it. */
12535 if (!marked)
12536 {
12537 expansion_failed (NULL_TREE, rtl,
12538 "Constant was removed from constant pool.\n");
12539 return false;
12540 }
12541 }
12542
12543 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
12544 return false;
12545
12546 /* Avoid references to external symbols in debug info, on several targets
12547 the linker might even refuse to link when linking a shared library,
12548 and in many other cases the relocations for .debug_info/.debug_loc are
12549 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
12550 to be defined within the same shared library or executable are fine. */
12551 if (SYMBOL_REF_EXTERNAL_P (rtl))
12552 {
12553 tree decl = SYMBOL_REF_DECL (rtl);
12554
12555 if (decl == NULL || !targetm.binds_local_p (decl))
12556 {
12557 expansion_failed (NULL_TREE, rtl,
12558 "Symbol not defined in current TU.\n");
12559 return false;
12560 }
12561 }
12562
12563 return true;
12564 }
12565
12566 /* Return true if constant RTL can be emitted in DW_OP_addr or
12567 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
12568 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
12569
12570 static bool
12571 const_ok_for_output (rtx rtl)
12572 {
12573 if (GET_CODE (rtl) == SYMBOL_REF)
12574 return const_ok_for_output_1 (rtl);
12575
12576 if (GET_CODE (rtl) == CONST)
12577 {
12578 subrtx_var_iterator::array_type array;
12579 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
12580 if (!const_ok_for_output_1 (*iter))
12581 return false;
12582 return true;
12583 }
12584
12585 return true;
12586 }
12587
12588 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
12589 if possible, NULL otherwise. */
12590
12591 static dw_die_ref
12592 base_type_for_mode (machine_mode mode, bool unsignedp)
12593 {
12594 dw_die_ref type_die;
12595 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
12596
12597 if (type == NULL)
12598 return NULL;
12599 switch (TREE_CODE (type))
12600 {
12601 case INTEGER_TYPE:
12602 case REAL_TYPE:
12603 break;
12604 default:
12605 return NULL;
12606 }
12607 type_die = lookup_type_die (type);
12608 if (!type_die)
12609 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
12610 comp_unit_die ());
12611 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
12612 return NULL;
12613 return type_die;
12614 }
12615
12616 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
12617 type matching MODE, or, if MODE is narrower than or as wide as
12618 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
12619 possible. */
12620
12621 static dw_loc_descr_ref
12622 convert_descriptor_to_mode (machine_mode mode, dw_loc_descr_ref op)
12623 {
12624 machine_mode outer_mode = mode;
12625 dw_die_ref type_die;
12626 dw_loc_descr_ref cvt;
12627
12628 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
12629 {
12630 add_loc_descr (&op, new_loc_descr (DW_OP_GNU_convert, 0, 0));
12631 return op;
12632 }
12633 type_die = base_type_for_mode (outer_mode, 1);
12634 if (type_die == NULL)
12635 return NULL;
12636 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12637 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12638 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12639 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12640 add_loc_descr (&op, cvt);
12641 return op;
12642 }
12643
12644 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
12645
12646 static dw_loc_descr_ref
12647 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
12648 dw_loc_descr_ref op1)
12649 {
12650 dw_loc_descr_ref ret = op0;
12651 add_loc_descr (&ret, op1);
12652 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12653 if (STORE_FLAG_VALUE != 1)
12654 {
12655 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
12656 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
12657 }
12658 return ret;
12659 }
12660
12661 /* Return location descriptor for signed comparison OP RTL. */
12662
12663 static dw_loc_descr_ref
12664 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
12665 machine_mode mem_mode)
12666 {
12667 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
12668 dw_loc_descr_ref op0, op1;
12669 int shift;
12670
12671 if (op_mode == VOIDmode)
12672 op_mode = GET_MODE (XEXP (rtl, 1));
12673 if (op_mode == VOIDmode)
12674 return NULL;
12675
12676 if (dwarf_strict
12677 && (!SCALAR_INT_MODE_P (op_mode)
12678 || GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE))
12679 return NULL;
12680
12681 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
12682 VAR_INIT_STATUS_INITIALIZED);
12683 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
12684 VAR_INIT_STATUS_INITIALIZED);
12685
12686 if (op0 == NULL || op1 == NULL)
12687 return NULL;
12688
12689 if (!SCALAR_INT_MODE_P (op_mode)
12690 || GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
12691 return compare_loc_descriptor (op, op0, op1);
12692
12693 if (GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
12694 {
12695 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
12696 dw_loc_descr_ref cvt;
12697
12698 if (type_die == NULL)
12699 return NULL;
12700 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12701 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12702 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12703 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12704 add_loc_descr (&op0, cvt);
12705 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12706 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12707 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12708 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12709 add_loc_descr (&op1, cvt);
12710 return compare_loc_descriptor (op, op0, op1);
12711 }
12712
12713 shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
12714 /* For eq/ne, if the operands are known to be zero-extended,
12715 there is no need to do the fancy shifting up. */
12716 if (op == DW_OP_eq || op == DW_OP_ne)
12717 {
12718 dw_loc_descr_ref last0, last1;
12719 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
12720 ;
12721 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
12722 ;
12723 /* deref_size zero extends, and for constants we can check
12724 whether they are zero extended or not. */
12725 if (((last0->dw_loc_opc == DW_OP_deref_size
12726 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
12727 || (CONST_INT_P (XEXP (rtl, 0))
12728 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
12729 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
12730 && ((last1->dw_loc_opc == DW_OP_deref_size
12731 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
12732 || (CONST_INT_P (XEXP (rtl, 1))
12733 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
12734 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
12735 return compare_loc_descriptor (op, op0, op1);
12736
12737 /* EQ/NE comparison against constant in narrower type than
12738 DWARF2_ADDR_SIZE can be performed either as
12739 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
12740 DW_OP_{eq,ne}
12741 or
12742 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
12743 DW_OP_{eq,ne}. Pick whatever is shorter. */
12744 if (CONST_INT_P (XEXP (rtl, 1))
12745 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
12746 && (size_of_int_loc_descriptor (shift) + 1
12747 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift)
12748 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
12749 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
12750 & GET_MODE_MASK (op_mode))))
12751 {
12752 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
12753 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12754 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
12755 & GET_MODE_MASK (op_mode));
12756 return compare_loc_descriptor (op, op0, op1);
12757 }
12758 }
12759 add_loc_descr (&op0, int_loc_descriptor (shift));
12760 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
12761 if (CONST_INT_P (XEXP (rtl, 1)))
12762 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
12763 else
12764 {
12765 add_loc_descr (&op1, int_loc_descriptor (shift));
12766 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
12767 }
12768 return compare_loc_descriptor (op, op0, op1);
12769 }
12770
12771 /* Return location descriptor for unsigned comparison OP RTL. */
12772
12773 static dw_loc_descr_ref
12774 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
12775 machine_mode mem_mode)
12776 {
12777 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
12778 dw_loc_descr_ref op0, op1;
12779
12780 if (op_mode == VOIDmode)
12781 op_mode = GET_MODE (XEXP (rtl, 1));
12782 if (op_mode == VOIDmode)
12783 return NULL;
12784 if (!SCALAR_INT_MODE_P (op_mode))
12785 return NULL;
12786
12787 if (dwarf_strict && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
12788 return NULL;
12789
12790 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
12791 VAR_INIT_STATUS_INITIALIZED);
12792 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
12793 VAR_INIT_STATUS_INITIALIZED);
12794
12795 if (op0 == NULL || op1 == NULL)
12796 return NULL;
12797
12798 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
12799 {
12800 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
12801 dw_loc_descr_ref last0, last1;
12802 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
12803 ;
12804 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
12805 ;
12806 if (CONST_INT_P (XEXP (rtl, 0)))
12807 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
12808 /* deref_size zero extends, so no need to mask it again. */
12809 else if (last0->dw_loc_opc != DW_OP_deref_size
12810 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
12811 {
12812 add_loc_descr (&op0, int_loc_descriptor (mask));
12813 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12814 }
12815 if (CONST_INT_P (XEXP (rtl, 1)))
12816 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
12817 /* deref_size zero extends, so no need to mask it again. */
12818 else if (last1->dw_loc_opc != DW_OP_deref_size
12819 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
12820 {
12821 add_loc_descr (&op1, int_loc_descriptor (mask));
12822 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
12823 }
12824 }
12825 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
12826 {
12827 HOST_WIDE_INT bias = 1;
12828 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
12829 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12830 if (CONST_INT_P (XEXP (rtl, 1)))
12831 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
12832 + INTVAL (XEXP (rtl, 1)));
12833 else
12834 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
12835 bias, 0));
12836 }
12837 return compare_loc_descriptor (op, op0, op1);
12838 }
12839
12840 /* Return location descriptor for {U,S}{MIN,MAX}. */
12841
12842 static dw_loc_descr_ref
12843 minmax_loc_descriptor (rtx rtl, machine_mode mode,
12844 machine_mode mem_mode)
12845 {
12846 enum dwarf_location_atom op;
12847 dw_loc_descr_ref op0, op1, ret;
12848 dw_loc_descr_ref bra_node, drop_node;
12849
12850 if (dwarf_strict
12851 && (!SCALAR_INT_MODE_P (mode)
12852 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE))
12853 return NULL;
12854
12855 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12856 VAR_INIT_STATUS_INITIALIZED);
12857 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
12858 VAR_INIT_STATUS_INITIALIZED);
12859
12860 if (op0 == NULL || op1 == NULL)
12861 return NULL;
12862
12863 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
12864 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
12865 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
12866 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
12867 {
12868 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
12869 {
12870 HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12871 add_loc_descr (&op0, int_loc_descriptor (mask));
12872 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12873 add_loc_descr (&op1, int_loc_descriptor (mask));
12874 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
12875 }
12876 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
12877 {
12878 HOST_WIDE_INT bias = 1;
12879 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
12880 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12881 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12882 }
12883 }
12884 else if (!SCALAR_INT_MODE_P (mode)
12885 && GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
12886 {
12887 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode)) * BITS_PER_UNIT;
12888 add_loc_descr (&op0, int_loc_descriptor (shift));
12889 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
12890 add_loc_descr (&op1, int_loc_descriptor (shift));
12891 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
12892 }
12893 else if (SCALAR_INT_MODE_P (mode)
12894 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
12895 {
12896 dw_die_ref type_die = base_type_for_mode (mode, 0);
12897 dw_loc_descr_ref cvt;
12898 if (type_die == NULL)
12899 return NULL;
12900 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12901 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12902 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12903 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12904 add_loc_descr (&op0, cvt);
12905 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12906 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12907 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12908 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12909 add_loc_descr (&op1, cvt);
12910 }
12911
12912 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
12913 op = DW_OP_lt;
12914 else
12915 op = DW_OP_gt;
12916 ret = op0;
12917 add_loc_descr (&ret, op1);
12918 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12919 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
12920 add_loc_descr (&ret, bra_node);
12921 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
12922 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
12923 add_loc_descr (&ret, drop_node);
12924 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12925 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
12926 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
12927 && SCALAR_INT_MODE_P (mode)
12928 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
12929 ret = convert_descriptor_to_mode (mode, ret);
12930 return ret;
12931 }
12932
12933 /* Helper function for mem_loc_descriptor. Perform OP binary op,
12934 but after converting arguments to type_die, afterwards
12935 convert back to unsigned. */
12936
12937 static dw_loc_descr_ref
12938 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
12939 machine_mode mode, machine_mode mem_mode)
12940 {
12941 dw_loc_descr_ref cvt, op0, op1;
12942
12943 if (type_die == NULL)
12944 return NULL;
12945 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12946 VAR_INIT_STATUS_INITIALIZED);
12947 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
12948 VAR_INIT_STATUS_INITIALIZED);
12949 if (op0 == NULL || op1 == NULL)
12950 return NULL;
12951 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12952 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12953 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12954 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12955 add_loc_descr (&op0, cvt);
12956 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12957 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12958 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12959 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12960 add_loc_descr (&op1, cvt);
12961 add_loc_descr (&op0, op1);
12962 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
12963 return convert_descriptor_to_mode (mode, op0);
12964 }
12965
12966 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
12967 const0 is DW_OP_lit0 or corresponding typed constant,
12968 const1 is DW_OP_lit1 or corresponding typed constant
12969 and constMSB is constant with just the MSB bit set
12970 for the mode):
12971 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
12972 L1: const0 DW_OP_swap
12973 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
12974 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12975 L3: DW_OP_drop
12976 L4: DW_OP_nop
12977
12978 CTZ is similar:
12979 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
12980 L1: const0 DW_OP_swap
12981 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
12982 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12983 L3: DW_OP_drop
12984 L4: DW_OP_nop
12985
12986 FFS is similar:
12987 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
12988 L1: const1 DW_OP_swap
12989 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
12990 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12991 L3: DW_OP_drop
12992 L4: DW_OP_nop */
12993
12994 static dw_loc_descr_ref
12995 clz_loc_descriptor (rtx rtl, machine_mode mode,
12996 machine_mode mem_mode)
12997 {
12998 dw_loc_descr_ref op0, ret, tmp;
12999 HOST_WIDE_INT valv;
13000 dw_loc_descr_ref l1jump, l1label;
13001 dw_loc_descr_ref l2jump, l2label;
13002 dw_loc_descr_ref l3jump, l3label;
13003 dw_loc_descr_ref l4jump, l4label;
13004 rtx msb;
13005
13006 if (!SCALAR_INT_MODE_P (mode)
13007 || GET_MODE (XEXP (rtl, 0)) != mode)
13008 return NULL;
13009
13010 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13011 VAR_INIT_STATUS_INITIALIZED);
13012 if (op0 == NULL)
13013 return NULL;
13014 ret = op0;
13015 if (GET_CODE (rtl) == CLZ)
13016 {
13017 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
13018 valv = GET_MODE_BITSIZE (mode);
13019 }
13020 else if (GET_CODE (rtl) == FFS)
13021 valv = 0;
13022 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
13023 valv = GET_MODE_BITSIZE (mode);
13024 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13025 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
13026 add_loc_descr (&ret, l1jump);
13027 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
13028 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
13029 VAR_INIT_STATUS_INITIALIZED);
13030 if (tmp == NULL)
13031 return NULL;
13032 add_loc_descr (&ret, tmp);
13033 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
13034 add_loc_descr (&ret, l4jump);
13035 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
13036 ? const1_rtx : const0_rtx,
13037 mode, mem_mode,
13038 VAR_INIT_STATUS_INITIALIZED);
13039 if (l1label == NULL)
13040 return NULL;
13041 add_loc_descr (&ret, l1label);
13042 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13043 l2label = new_loc_descr (DW_OP_dup, 0, 0);
13044 add_loc_descr (&ret, l2label);
13045 if (GET_CODE (rtl) != CLZ)
13046 msb = const1_rtx;
13047 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
13048 msb = GEN_INT (HOST_WIDE_INT_1U
13049 << (GET_MODE_BITSIZE (mode) - 1));
13050 else
13051 msb = immed_wide_int_const
13052 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
13053 GET_MODE_PRECISION (mode)), mode);
13054 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
13055 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
13056 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
13057 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
13058 else
13059 tmp = mem_loc_descriptor (msb, mode, mem_mode,
13060 VAR_INIT_STATUS_INITIALIZED);
13061 if (tmp == NULL)
13062 return NULL;
13063 add_loc_descr (&ret, tmp);
13064 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13065 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
13066 add_loc_descr (&ret, l3jump);
13067 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13068 VAR_INIT_STATUS_INITIALIZED);
13069 if (tmp == NULL)
13070 return NULL;
13071 add_loc_descr (&ret, tmp);
13072 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
13073 ? DW_OP_shl : DW_OP_shr, 0, 0));
13074 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13075 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
13076 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13077 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
13078 add_loc_descr (&ret, l2jump);
13079 l3label = new_loc_descr (DW_OP_drop, 0, 0);
13080 add_loc_descr (&ret, l3label);
13081 l4label = new_loc_descr (DW_OP_nop, 0, 0);
13082 add_loc_descr (&ret, l4label);
13083 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13084 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13085 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13086 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13087 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13088 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
13089 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13090 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
13091 return ret;
13092 }
13093
13094 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
13095 const1 is DW_OP_lit1 or corresponding typed constant):
13096 const0 DW_OP_swap
13097 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
13098 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
13099 L2: DW_OP_drop
13100
13101 PARITY is similar:
13102 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
13103 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
13104 L2: DW_OP_drop */
13105
13106 static dw_loc_descr_ref
13107 popcount_loc_descriptor (rtx rtl, machine_mode mode,
13108 machine_mode mem_mode)
13109 {
13110 dw_loc_descr_ref op0, ret, tmp;
13111 dw_loc_descr_ref l1jump, l1label;
13112 dw_loc_descr_ref l2jump, l2label;
13113
13114 if (!SCALAR_INT_MODE_P (mode)
13115 || GET_MODE (XEXP (rtl, 0)) != mode)
13116 return NULL;
13117
13118 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13119 VAR_INIT_STATUS_INITIALIZED);
13120 if (op0 == NULL)
13121 return NULL;
13122 ret = op0;
13123 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13124 VAR_INIT_STATUS_INITIALIZED);
13125 if (tmp == NULL)
13126 return NULL;
13127 add_loc_descr (&ret, tmp);
13128 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13129 l1label = new_loc_descr (DW_OP_dup, 0, 0);
13130 add_loc_descr (&ret, l1label);
13131 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
13132 add_loc_descr (&ret, l2jump);
13133 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13134 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
13135 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13136 VAR_INIT_STATUS_INITIALIZED);
13137 if (tmp == NULL)
13138 return NULL;
13139 add_loc_descr (&ret, tmp);
13140 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13141 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
13142 ? DW_OP_plus : DW_OP_xor, 0, 0));
13143 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13144 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13145 VAR_INIT_STATUS_INITIALIZED);
13146 add_loc_descr (&ret, tmp);
13147 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13148 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
13149 add_loc_descr (&ret, l1jump);
13150 l2label = new_loc_descr (DW_OP_drop, 0, 0);
13151 add_loc_descr (&ret, l2label);
13152 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13153 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13154 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13155 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13156 return ret;
13157 }
13158
13159 /* BSWAP (constS is initial shift count, either 56 or 24):
13160 constS const0
13161 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
13162 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
13163 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
13164 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
13165 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
13166
13167 static dw_loc_descr_ref
13168 bswap_loc_descriptor (rtx rtl, machine_mode mode,
13169 machine_mode mem_mode)
13170 {
13171 dw_loc_descr_ref op0, ret, tmp;
13172 dw_loc_descr_ref l1jump, l1label;
13173 dw_loc_descr_ref l2jump, l2label;
13174
13175 if (!SCALAR_INT_MODE_P (mode)
13176 || BITS_PER_UNIT != 8
13177 || (GET_MODE_BITSIZE (mode) != 32
13178 && GET_MODE_BITSIZE (mode) != 64))
13179 return NULL;
13180
13181 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13182 VAR_INIT_STATUS_INITIALIZED);
13183 if (op0 == NULL)
13184 return NULL;
13185
13186 ret = op0;
13187 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
13188 mode, mem_mode,
13189 VAR_INIT_STATUS_INITIALIZED);
13190 if (tmp == NULL)
13191 return NULL;
13192 add_loc_descr (&ret, tmp);
13193 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13194 VAR_INIT_STATUS_INITIALIZED);
13195 if (tmp == NULL)
13196 return NULL;
13197 add_loc_descr (&ret, tmp);
13198 l1label = new_loc_descr (DW_OP_pick, 2, 0);
13199 add_loc_descr (&ret, l1label);
13200 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
13201 mode, mem_mode,
13202 VAR_INIT_STATUS_INITIALIZED);
13203 add_loc_descr (&ret, tmp);
13204 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
13205 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
13206 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13207 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
13208 VAR_INIT_STATUS_INITIALIZED);
13209 if (tmp == NULL)
13210 return NULL;
13211 add_loc_descr (&ret, tmp);
13212 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13213 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
13214 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13215 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
13216 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13217 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13218 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13219 VAR_INIT_STATUS_INITIALIZED);
13220 add_loc_descr (&ret, tmp);
13221 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
13222 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
13223 add_loc_descr (&ret, l2jump);
13224 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
13225 VAR_INIT_STATUS_INITIALIZED);
13226 add_loc_descr (&ret, tmp);
13227 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
13228 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13229 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
13230 add_loc_descr (&ret, l1jump);
13231 l2label = new_loc_descr (DW_OP_drop, 0, 0);
13232 add_loc_descr (&ret, l2label);
13233 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13234 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
13235 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13236 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13237 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13238 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13239 return ret;
13240 }
13241
13242 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
13243 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
13244 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
13245 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
13246
13247 ROTATERT is similar:
13248 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
13249 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
13250 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
13251
13252 static dw_loc_descr_ref
13253 rotate_loc_descriptor (rtx rtl, machine_mode mode,
13254 machine_mode mem_mode)
13255 {
13256 rtx rtlop1 = XEXP (rtl, 1);
13257 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
13258 int i;
13259
13260 if (!SCALAR_INT_MODE_P (mode))
13261 return NULL;
13262
13263 if (GET_MODE (rtlop1) != VOIDmode
13264 && GET_MODE_BITSIZE (GET_MODE (rtlop1)) < GET_MODE_BITSIZE (mode))
13265 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
13266 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13267 VAR_INIT_STATUS_INITIALIZED);
13268 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
13269 VAR_INIT_STATUS_INITIALIZED);
13270 if (op0 == NULL || op1 == NULL)
13271 return NULL;
13272 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
13273 for (i = 0; i < 2; i++)
13274 {
13275 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
13276 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
13277 mode, mem_mode,
13278 VAR_INIT_STATUS_INITIALIZED);
13279 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
13280 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
13281 ? DW_OP_const4u
13282 : HOST_BITS_PER_WIDE_INT == 64
13283 ? DW_OP_const8u : DW_OP_constu,
13284 GET_MODE_MASK (mode), 0);
13285 else
13286 mask[i] = NULL;
13287 if (mask[i] == NULL)
13288 return NULL;
13289 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
13290 }
13291 ret = op0;
13292 add_loc_descr (&ret, op1);
13293 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13294 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13295 if (GET_CODE (rtl) == ROTATERT)
13296 {
13297 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13298 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
13299 GET_MODE_BITSIZE (mode), 0));
13300 }
13301 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13302 if (mask[0] != NULL)
13303 add_loc_descr (&ret, mask[0]);
13304 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
13305 if (mask[1] != NULL)
13306 {
13307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13308 add_loc_descr (&ret, mask[1]);
13309 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13310 }
13311 if (GET_CODE (rtl) == ROTATE)
13312 {
13313 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13314 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
13315 GET_MODE_BITSIZE (mode), 0));
13316 }
13317 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13318 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
13319 return ret;
13320 }
13321
13322 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
13323 for DEBUG_PARAMETER_REF RTL. */
13324
13325 static dw_loc_descr_ref
13326 parameter_ref_descriptor (rtx rtl)
13327 {
13328 dw_loc_descr_ref ret;
13329 dw_die_ref ref;
13330
13331 if (dwarf_strict)
13332 return NULL;
13333 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
13334 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
13335 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
13336 if (ref)
13337 {
13338 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13339 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
13340 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
13341 }
13342 else
13343 {
13344 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
13345 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
13346 }
13347 return ret;
13348 }
13349
13350 /* The following routine converts the RTL for a variable or parameter
13351 (resident in memory) into an equivalent Dwarf representation of a
13352 mechanism for getting the address of that same variable onto the top of a
13353 hypothetical "address evaluation" stack.
13354
13355 When creating memory location descriptors, we are effectively transforming
13356 the RTL for a memory-resident object into its Dwarf postfix expression
13357 equivalent. This routine recursively descends an RTL tree, turning
13358 it into Dwarf postfix code as it goes.
13359
13360 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
13361
13362 MEM_MODE is the mode of the memory reference, needed to handle some
13363 autoincrement addressing modes.
13364
13365 Return 0 if we can't represent the location. */
13366
13367 dw_loc_descr_ref
13368 mem_loc_descriptor (rtx rtl, machine_mode mode,
13369 machine_mode mem_mode,
13370 enum var_init_status initialized)
13371 {
13372 dw_loc_descr_ref mem_loc_result = NULL;
13373 enum dwarf_location_atom op;
13374 dw_loc_descr_ref op0, op1;
13375 rtx inner = NULL_RTX;
13376
13377 if (mode == VOIDmode)
13378 mode = GET_MODE (rtl);
13379
13380 /* Note that for a dynamically sized array, the location we will generate a
13381 description of here will be the lowest numbered location which is
13382 actually within the array. That's *not* necessarily the same as the
13383 zeroth element of the array. */
13384
13385 rtl = targetm.delegitimize_address (rtl);
13386
13387 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
13388 return NULL;
13389
13390 switch (GET_CODE (rtl))
13391 {
13392 case POST_INC:
13393 case POST_DEC:
13394 case POST_MODIFY:
13395 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
13396
13397 case SUBREG:
13398 /* The case of a subreg may arise when we have a local (register)
13399 variable or a formal (register) parameter which doesn't quite fill
13400 up an entire register. For now, just assume that it is
13401 legitimate to make the Dwarf info refer to the whole register which
13402 contains the given subreg. */
13403 if (!subreg_lowpart_p (rtl))
13404 break;
13405 inner = SUBREG_REG (rtl);
13406 /* FALLTHRU */
13407 case TRUNCATE:
13408 if (inner == NULL_RTX)
13409 inner = XEXP (rtl, 0);
13410 if (SCALAR_INT_MODE_P (mode)
13411 && SCALAR_INT_MODE_P (GET_MODE (inner))
13412 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13413 #ifdef POINTERS_EXTEND_UNSIGNED
13414 || (mode == Pmode && mem_mode != VOIDmode)
13415 #endif
13416 )
13417 && GET_MODE_SIZE (GET_MODE (inner)) <= DWARF2_ADDR_SIZE)
13418 {
13419 mem_loc_result = mem_loc_descriptor (inner,
13420 GET_MODE (inner),
13421 mem_mode, initialized);
13422 break;
13423 }
13424 if (dwarf_strict)
13425 break;
13426 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (inner)))
13427 break;
13428 if (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (inner))
13429 && (!SCALAR_INT_MODE_P (mode)
13430 || !SCALAR_INT_MODE_P (GET_MODE (inner))))
13431 break;
13432 else
13433 {
13434 dw_die_ref type_die;
13435 dw_loc_descr_ref cvt;
13436
13437 mem_loc_result = mem_loc_descriptor (inner,
13438 GET_MODE (inner),
13439 mem_mode, initialized);
13440 if (mem_loc_result == NULL)
13441 break;
13442 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13443 if (type_die == NULL)
13444 {
13445 mem_loc_result = NULL;
13446 break;
13447 }
13448 if (GET_MODE_SIZE (mode)
13449 != GET_MODE_SIZE (GET_MODE (inner)))
13450 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13451 else
13452 cvt = new_loc_descr (DW_OP_GNU_reinterpret, 0, 0);
13453 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13454 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13455 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13456 add_loc_descr (&mem_loc_result, cvt);
13457 if (SCALAR_INT_MODE_P (mode)
13458 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13459 {
13460 /* Convert it to untyped afterwards. */
13461 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13462 add_loc_descr (&mem_loc_result, cvt);
13463 }
13464 }
13465 break;
13466
13467 case REG:
13468 if (! SCALAR_INT_MODE_P (mode)
13469 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13470 && rtl != arg_pointer_rtx
13471 && rtl != frame_pointer_rtx
13472 #ifdef POINTERS_EXTEND_UNSIGNED
13473 && (mode != Pmode || mem_mode == VOIDmode)
13474 #endif
13475 ))
13476 {
13477 dw_die_ref type_die;
13478 unsigned int dbx_regnum;
13479
13480 if (dwarf_strict)
13481 break;
13482 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
13483 break;
13484 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13485 if (type_die == NULL)
13486 break;
13487
13488 dbx_regnum = dbx_reg_number (rtl);
13489 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13490 break;
13491 mem_loc_result = new_loc_descr (DW_OP_GNU_regval_type,
13492 dbx_regnum, 0);
13493 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
13494 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
13495 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
13496 break;
13497 }
13498 /* Whenever a register number forms a part of the description of the
13499 method for calculating the (dynamic) address of a memory resident
13500 object, DWARF rules require the register number be referred to as
13501 a "base register". This distinction is not based in any way upon
13502 what category of register the hardware believes the given register
13503 belongs to. This is strictly DWARF terminology we're dealing with
13504 here. Note that in cases where the location of a memory-resident
13505 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
13506 OP_CONST (0)) the actual DWARF location descriptor that we generate
13507 may just be OP_BASEREG (basereg). This may look deceptively like
13508 the object in question was allocated to a register (rather than in
13509 memory) so DWARF consumers need to be aware of the subtle
13510 distinction between OP_REG and OP_BASEREG. */
13511 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
13512 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
13513 else if (stack_realign_drap
13514 && crtl->drap_reg
13515 && crtl->args.internal_arg_pointer == rtl
13516 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
13517 {
13518 /* If RTL is internal_arg_pointer, which has been optimized
13519 out, use DRAP instead. */
13520 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
13521 VAR_INIT_STATUS_INITIALIZED);
13522 }
13523 break;
13524
13525 case SIGN_EXTEND:
13526 case ZERO_EXTEND:
13527 if (!SCALAR_INT_MODE_P (mode))
13528 break;
13529 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
13530 mem_mode, VAR_INIT_STATUS_INITIALIZED);
13531 if (op0 == 0)
13532 break;
13533 else if (GET_CODE (rtl) == ZERO_EXTEND
13534 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13535 && GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
13536 < HOST_BITS_PER_WIDE_INT
13537 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
13538 to expand zero extend as two shifts instead of
13539 masking. */
13540 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4)
13541 {
13542 machine_mode imode = GET_MODE (XEXP (rtl, 0));
13543 mem_loc_result = op0;
13544 add_loc_descr (&mem_loc_result,
13545 int_loc_descriptor (GET_MODE_MASK (imode)));
13546 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
13547 }
13548 else if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13549 {
13550 int shift = DWARF2_ADDR_SIZE
13551 - GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
13552 shift *= BITS_PER_UNIT;
13553 if (GET_CODE (rtl) == SIGN_EXTEND)
13554 op = DW_OP_shra;
13555 else
13556 op = DW_OP_shr;
13557 mem_loc_result = op0;
13558 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
13559 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
13560 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
13561 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13562 }
13563 else if (!dwarf_strict)
13564 {
13565 dw_die_ref type_die1, type_die2;
13566 dw_loc_descr_ref cvt;
13567
13568 type_die1 = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
13569 GET_CODE (rtl) == ZERO_EXTEND);
13570 if (type_die1 == NULL)
13571 break;
13572 type_die2 = base_type_for_mode (mode, 1);
13573 if (type_die2 == NULL)
13574 break;
13575 mem_loc_result = op0;
13576 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13577 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13578 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
13579 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13580 add_loc_descr (&mem_loc_result, cvt);
13581 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13582 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13583 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
13584 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13585 add_loc_descr (&mem_loc_result, cvt);
13586 }
13587 break;
13588
13589 case MEM:
13590 {
13591 rtx new_rtl = avoid_constant_pool_reference (rtl);
13592 if (new_rtl != rtl)
13593 {
13594 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
13595 initialized);
13596 if (mem_loc_result != NULL)
13597 return mem_loc_result;
13598 }
13599 }
13600 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
13601 get_address_mode (rtl), mode,
13602 VAR_INIT_STATUS_INITIALIZED);
13603 if (mem_loc_result == NULL)
13604 mem_loc_result = tls_mem_loc_descriptor (rtl);
13605 if (mem_loc_result != NULL)
13606 {
13607 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13608 || !SCALAR_INT_MODE_P(mode))
13609 {
13610 dw_die_ref type_die;
13611 dw_loc_descr_ref deref;
13612
13613 if (dwarf_strict)
13614 return NULL;
13615 type_die
13616 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13617 if (type_die == NULL)
13618 return NULL;
13619 deref = new_loc_descr (DW_OP_GNU_deref_type,
13620 GET_MODE_SIZE (mode), 0);
13621 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
13622 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
13623 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
13624 add_loc_descr (&mem_loc_result, deref);
13625 }
13626 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
13627 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
13628 else
13629 add_loc_descr (&mem_loc_result,
13630 new_loc_descr (DW_OP_deref_size,
13631 GET_MODE_SIZE (mode), 0));
13632 }
13633 break;
13634
13635 case LO_SUM:
13636 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
13637
13638 case LABEL_REF:
13639 /* Some ports can transform a symbol ref into a label ref, because
13640 the symbol ref is too far away and has to be dumped into a constant
13641 pool. */
13642 case CONST:
13643 case SYMBOL_REF:
13644 if (!SCALAR_INT_MODE_P (mode)
13645 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13646 #ifdef POINTERS_EXTEND_UNSIGNED
13647 && (mode != Pmode || mem_mode == VOIDmode)
13648 #endif
13649 ))
13650 break;
13651 if (GET_CODE (rtl) == SYMBOL_REF
13652 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13653 {
13654 dw_loc_descr_ref temp;
13655
13656 /* If this is not defined, we have no way to emit the data. */
13657 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
13658 break;
13659
13660 temp = new_addr_loc_descr (rtl, dtprel_true);
13661
13662 mem_loc_result = new_loc_descr (DW_OP_GNU_push_tls_address, 0, 0);
13663 add_loc_descr (&mem_loc_result, temp);
13664
13665 break;
13666 }
13667
13668 if (!const_ok_for_output (rtl))
13669 {
13670 if (GET_CODE (rtl) == CONST)
13671 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13672 initialized);
13673 break;
13674 }
13675
13676 symref:
13677 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
13678 vec_safe_push (used_rtx_array, rtl);
13679 break;
13680
13681 case CONCAT:
13682 case CONCATN:
13683 case VAR_LOCATION:
13684 case DEBUG_IMPLICIT_PTR:
13685 expansion_failed (NULL_TREE, rtl,
13686 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
13687 return 0;
13688
13689 case ENTRY_VALUE:
13690 if (dwarf_strict)
13691 return NULL;
13692 if (REG_P (ENTRY_VALUE_EXP (rtl)))
13693 {
13694 if (!SCALAR_INT_MODE_P (mode)
13695 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
13696 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
13697 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
13698 else
13699 {
13700 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
13701 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13702 return NULL;
13703 op0 = one_reg_loc_descriptor (dbx_regnum,
13704 VAR_INIT_STATUS_INITIALIZED);
13705 }
13706 }
13707 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
13708 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
13709 {
13710 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
13711 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
13712 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
13713 return NULL;
13714 }
13715 else
13716 gcc_unreachable ();
13717 if (op0 == NULL)
13718 return NULL;
13719 mem_loc_result = new_loc_descr (DW_OP_GNU_entry_value, 0, 0);
13720 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
13721 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
13722 break;
13723
13724 case DEBUG_PARAMETER_REF:
13725 mem_loc_result = parameter_ref_descriptor (rtl);
13726 break;
13727
13728 case PRE_MODIFY:
13729 /* Extract the PLUS expression nested inside and fall into
13730 PLUS code below. */
13731 rtl = XEXP (rtl, 1);
13732 goto plus;
13733
13734 case PRE_INC:
13735 case PRE_DEC:
13736 /* Turn these into a PLUS expression and fall into the PLUS code
13737 below. */
13738 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
13739 gen_int_mode (GET_CODE (rtl) == PRE_INC
13740 ? GET_MODE_UNIT_SIZE (mem_mode)
13741 : -GET_MODE_UNIT_SIZE (mem_mode),
13742 mode));
13743
13744 /* fall through */
13745
13746 case PLUS:
13747 plus:
13748 if (is_based_loc (rtl)
13749 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13750 || XEXP (rtl, 0) == arg_pointer_rtx
13751 || XEXP (rtl, 0) == frame_pointer_rtx)
13752 && SCALAR_INT_MODE_P (mode))
13753 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
13754 INTVAL (XEXP (rtl, 1)),
13755 VAR_INIT_STATUS_INITIALIZED);
13756 else
13757 {
13758 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13759 VAR_INIT_STATUS_INITIALIZED);
13760 if (mem_loc_result == 0)
13761 break;
13762
13763 if (CONST_INT_P (XEXP (rtl, 1))
13764 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13765 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
13766 else
13767 {
13768 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13769 VAR_INIT_STATUS_INITIALIZED);
13770 if (op1 == 0)
13771 return NULL;
13772 add_loc_descr (&mem_loc_result, op1);
13773 add_loc_descr (&mem_loc_result,
13774 new_loc_descr (DW_OP_plus, 0, 0));
13775 }
13776 }
13777 break;
13778
13779 /* If a pseudo-reg is optimized away, it is possible for it to
13780 be replaced with a MEM containing a multiply or shift. */
13781 case MINUS:
13782 op = DW_OP_minus;
13783 goto do_binop;
13784
13785 case MULT:
13786 op = DW_OP_mul;
13787 goto do_binop;
13788
13789 case DIV:
13790 if (!dwarf_strict
13791 && SCALAR_INT_MODE_P (mode)
13792 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
13793 {
13794 mem_loc_result = typed_binop (DW_OP_div, rtl,
13795 base_type_for_mode (mode, 0),
13796 mode, mem_mode);
13797 break;
13798 }
13799 op = DW_OP_div;
13800 goto do_binop;
13801
13802 case UMOD:
13803 op = DW_OP_mod;
13804 goto do_binop;
13805
13806 case ASHIFT:
13807 op = DW_OP_shl;
13808 goto do_shift;
13809
13810 case ASHIFTRT:
13811 op = DW_OP_shra;
13812 goto do_shift;
13813
13814 case LSHIFTRT:
13815 op = DW_OP_shr;
13816 goto do_shift;
13817
13818 do_shift:
13819 if (!SCALAR_INT_MODE_P (mode))
13820 break;
13821 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13822 VAR_INIT_STATUS_INITIALIZED);
13823 {
13824 rtx rtlop1 = XEXP (rtl, 1);
13825 if (GET_MODE (rtlop1) != VOIDmode
13826 && GET_MODE_BITSIZE (GET_MODE (rtlop1))
13827 < GET_MODE_BITSIZE (mode))
13828 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
13829 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
13830 VAR_INIT_STATUS_INITIALIZED);
13831 }
13832
13833 if (op0 == 0 || op1 == 0)
13834 break;
13835
13836 mem_loc_result = op0;
13837 add_loc_descr (&mem_loc_result, op1);
13838 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13839 break;
13840
13841 case AND:
13842 op = DW_OP_and;
13843 goto do_binop;
13844
13845 case IOR:
13846 op = DW_OP_or;
13847 goto do_binop;
13848
13849 case XOR:
13850 op = DW_OP_xor;
13851 goto do_binop;
13852
13853 do_binop:
13854 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13855 VAR_INIT_STATUS_INITIALIZED);
13856 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13857 VAR_INIT_STATUS_INITIALIZED);
13858
13859 if (op0 == 0 || op1 == 0)
13860 break;
13861
13862 mem_loc_result = op0;
13863 add_loc_descr (&mem_loc_result, op1);
13864 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13865 break;
13866
13867 case MOD:
13868 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE && !dwarf_strict)
13869 {
13870 mem_loc_result = typed_binop (DW_OP_mod, rtl,
13871 base_type_for_mode (mode, 0),
13872 mode, mem_mode);
13873 break;
13874 }
13875
13876 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13877 VAR_INIT_STATUS_INITIALIZED);
13878 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13879 VAR_INIT_STATUS_INITIALIZED);
13880
13881 if (op0 == 0 || op1 == 0)
13882 break;
13883
13884 mem_loc_result = op0;
13885 add_loc_descr (&mem_loc_result, op1);
13886 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
13887 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
13888 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
13889 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
13890 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
13891 break;
13892
13893 case UDIV:
13894 if (!dwarf_strict && SCALAR_INT_MODE_P (mode))
13895 {
13896 if (GET_MODE_CLASS (mode) > DWARF2_ADDR_SIZE)
13897 {
13898 op = DW_OP_div;
13899 goto do_binop;
13900 }
13901 mem_loc_result = typed_binop (DW_OP_div, rtl,
13902 base_type_for_mode (mode, 1),
13903 mode, mem_mode);
13904 }
13905 break;
13906
13907 case NOT:
13908 op = DW_OP_not;
13909 goto do_unop;
13910
13911 case ABS:
13912 op = DW_OP_abs;
13913 goto do_unop;
13914
13915 case NEG:
13916 op = DW_OP_neg;
13917 goto do_unop;
13918
13919 do_unop:
13920 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13921 VAR_INIT_STATUS_INITIALIZED);
13922
13923 if (op0 == 0)
13924 break;
13925
13926 mem_loc_result = op0;
13927 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13928 break;
13929
13930 case CONST_INT:
13931 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13932 #ifdef POINTERS_EXTEND_UNSIGNED
13933 || (mode == Pmode
13934 && mem_mode != VOIDmode
13935 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
13936 #endif
13937 )
13938 {
13939 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
13940 break;
13941 }
13942 if (!dwarf_strict
13943 && (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT
13944 || GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT))
13945 {
13946 dw_die_ref type_die = base_type_for_mode (mode, 1);
13947 machine_mode amode;
13948 if (type_die == NULL)
13949 return NULL;
13950 amode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT,
13951 MODE_INT, 0);
13952 if (INTVAL (rtl) >= 0
13953 && amode != BLKmode
13954 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
13955 /* const DW_OP_GNU_convert <XXX> vs.
13956 DW_OP_GNU_const_type <XXX, 1, const>. */
13957 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
13958 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode))
13959 {
13960 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
13961 op0 = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13962 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13963 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13964 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
13965 add_loc_descr (&mem_loc_result, op0);
13966 return mem_loc_result;
13967 }
13968 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0,
13969 INTVAL (rtl));
13970 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13971 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13972 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13973 if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
13974 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13975 else
13976 {
13977 mem_loc_result->dw_loc_oprnd2.val_class
13978 = dw_val_class_const_double;
13979 mem_loc_result->dw_loc_oprnd2.v.val_double
13980 = double_int::from_shwi (INTVAL (rtl));
13981 }
13982 }
13983 break;
13984
13985 case CONST_DOUBLE:
13986 if (!dwarf_strict)
13987 {
13988 dw_die_ref type_die;
13989
13990 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
13991 CONST_DOUBLE rtx could represent either a large integer
13992 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
13993 the value is always a floating point constant.
13994
13995 When it is an integer, a CONST_DOUBLE is used whenever
13996 the constant requires 2 HWIs to be adequately represented.
13997 We output CONST_DOUBLEs as blocks. */
13998 if (mode == VOIDmode
13999 || (GET_MODE (rtl) == VOIDmode
14000 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
14001 break;
14002 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14003 if (type_die == NULL)
14004 return NULL;
14005 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
14006 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14007 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14008 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
14009 #if TARGET_SUPPORTS_WIDE_INT == 0
14010 if (!SCALAR_FLOAT_MODE_P (mode))
14011 {
14012 mem_loc_result->dw_loc_oprnd2.val_class
14013 = dw_val_class_const_double;
14014 mem_loc_result->dw_loc_oprnd2.v.val_double
14015 = rtx_to_double_int (rtl);
14016 }
14017 else
14018 #endif
14019 {
14020 unsigned int length = GET_MODE_SIZE (mode);
14021 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
14022
14023 insert_float (rtl, array);
14024 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14025 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
14026 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
14027 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14028 }
14029 }
14030 break;
14031
14032 case CONST_WIDE_INT:
14033 if (!dwarf_strict)
14034 {
14035 dw_die_ref type_die;
14036
14037 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14038 if (type_die == NULL)
14039 return NULL;
14040 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
14041 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14042 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14043 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
14044 mem_loc_result->dw_loc_oprnd2.val_class
14045 = dw_val_class_wide_int;
14046 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
14047 *mem_loc_result->dw_loc_oprnd2.v.val_wide = std::make_pair (rtl, mode);
14048 }
14049 break;
14050
14051 case EQ:
14052 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
14053 break;
14054
14055 case GE:
14056 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
14057 break;
14058
14059 case GT:
14060 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
14061 break;
14062
14063 case LE:
14064 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
14065 break;
14066
14067 case LT:
14068 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
14069 break;
14070
14071 case NE:
14072 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
14073 break;
14074
14075 case GEU:
14076 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
14077 break;
14078
14079 case GTU:
14080 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
14081 break;
14082
14083 case LEU:
14084 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
14085 break;
14086
14087 case LTU:
14088 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
14089 break;
14090
14091 case UMIN:
14092 case UMAX:
14093 if (!SCALAR_INT_MODE_P (mode))
14094 break;
14095 /* FALLTHRU */
14096 case SMIN:
14097 case SMAX:
14098 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
14099 break;
14100
14101 case ZERO_EXTRACT:
14102 case SIGN_EXTRACT:
14103 if (CONST_INT_P (XEXP (rtl, 1))
14104 && CONST_INT_P (XEXP (rtl, 2))
14105 && ((unsigned) INTVAL (XEXP (rtl, 1))
14106 + (unsigned) INTVAL (XEXP (rtl, 2))
14107 <= GET_MODE_BITSIZE (mode))
14108 && SCALAR_INT_MODE_P (mode)
14109 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
14110 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= DWARF2_ADDR_SIZE)
14111 {
14112 int shift, size;
14113 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14114 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14115 if (op0 == 0)
14116 break;
14117 if (GET_CODE (rtl) == SIGN_EXTRACT)
14118 op = DW_OP_shra;
14119 else
14120 op = DW_OP_shr;
14121 mem_loc_result = op0;
14122 size = INTVAL (XEXP (rtl, 1));
14123 shift = INTVAL (XEXP (rtl, 2));
14124 if (BITS_BIG_ENDIAN)
14125 shift = GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
14126 - shift - size;
14127 if (shift + size != (int) DWARF2_ADDR_SIZE)
14128 {
14129 add_loc_descr (&mem_loc_result,
14130 int_loc_descriptor (DWARF2_ADDR_SIZE
14131 - shift - size));
14132 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14133 }
14134 if (size != (int) DWARF2_ADDR_SIZE)
14135 {
14136 add_loc_descr (&mem_loc_result,
14137 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
14138 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14139 }
14140 }
14141 break;
14142
14143 case IF_THEN_ELSE:
14144 {
14145 dw_loc_descr_ref op2, bra_node, drop_node;
14146 op0 = mem_loc_descriptor (XEXP (rtl, 0),
14147 GET_MODE (XEXP (rtl, 0)) == VOIDmode
14148 ? word_mode : GET_MODE (XEXP (rtl, 0)),
14149 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14150 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14151 VAR_INIT_STATUS_INITIALIZED);
14152 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
14153 VAR_INIT_STATUS_INITIALIZED);
14154 if (op0 == NULL || op1 == NULL || op2 == NULL)
14155 break;
14156
14157 mem_loc_result = op1;
14158 add_loc_descr (&mem_loc_result, op2);
14159 add_loc_descr (&mem_loc_result, op0);
14160 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14161 add_loc_descr (&mem_loc_result, bra_node);
14162 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
14163 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14164 add_loc_descr (&mem_loc_result, drop_node);
14165 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14166 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14167 }
14168 break;
14169
14170 case FLOAT_EXTEND:
14171 case FLOAT_TRUNCATE:
14172 case FLOAT:
14173 case UNSIGNED_FLOAT:
14174 case FIX:
14175 case UNSIGNED_FIX:
14176 if (!dwarf_strict)
14177 {
14178 dw_die_ref type_die;
14179 dw_loc_descr_ref cvt;
14180
14181 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14182 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14183 if (op0 == NULL)
14184 break;
14185 if (SCALAR_INT_MODE_P (GET_MODE (XEXP (rtl, 0)))
14186 && (GET_CODE (rtl) == FLOAT
14187 || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)))
14188 <= DWARF2_ADDR_SIZE))
14189 {
14190 type_die = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
14191 GET_CODE (rtl) == UNSIGNED_FLOAT);
14192 if (type_die == NULL)
14193 break;
14194 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
14195 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14196 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14197 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14198 add_loc_descr (&op0, cvt);
14199 }
14200 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
14201 if (type_die == NULL)
14202 break;
14203 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
14204 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14205 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14206 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14207 add_loc_descr (&op0, cvt);
14208 if (SCALAR_INT_MODE_P (mode)
14209 && (GET_CODE (rtl) == FIX
14210 || GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE))
14211 {
14212 op0 = convert_descriptor_to_mode (mode, op0);
14213 if (op0 == NULL)
14214 break;
14215 }
14216 mem_loc_result = op0;
14217 }
14218 break;
14219
14220 case CLZ:
14221 case CTZ:
14222 case FFS:
14223 mem_loc_result = clz_loc_descriptor (rtl, mode, mem_mode);
14224 break;
14225
14226 case POPCOUNT:
14227 case PARITY:
14228 mem_loc_result = popcount_loc_descriptor (rtl, mode, mem_mode);
14229 break;
14230
14231 case BSWAP:
14232 mem_loc_result = bswap_loc_descriptor (rtl, mode, mem_mode);
14233 break;
14234
14235 case ROTATE:
14236 case ROTATERT:
14237 mem_loc_result = rotate_loc_descriptor (rtl, mode, mem_mode);
14238 break;
14239
14240 case COMPARE:
14241 /* In theory, we could implement the above. */
14242 /* DWARF cannot represent the unsigned compare operations
14243 natively. */
14244 case SS_MULT:
14245 case US_MULT:
14246 case SS_DIV:
14247 case US_DIV:
14248 case SS_PLUS:
14249 case US_PLUS:
14250 case SS_MINUS:
14251 case US_MINUS:
14252 case SS_NEG:
14253 case US_NEG:
14254 case SS_ABS:
14255 case SS_ASHIFT:
14256 case US_ASHIFT:
14257 case SS_TRUNCATE:
14258 case US_TRUNCATE:
14259 case UNORDERED:
14260 case ORDERED:
14261 case UNEQ:
14262 case UNGE:
14263 case UNGT:
14264 case UNLE:
14265 case UNLT:
14266 case LTGT:
14267 case FRACT_CONVERT:
14268 case UNSIGNED_FRACT_CONVERT:
14269 case SAT_FRACT:
14270 case UNSIGNED_SAT_FRACT:
14271 case SQRT:
14272 case ASM_OPERANDS:
14273 case VEC_MERGE:
14274 case VEC_SELECT:
14275 case VEC_CONCAT:
14276 case VEC_DUPLICATE:
14277 case UNSPEC:
14278 case HIGH:
14279 case FMA:
14280 case STRICT_LOW_PART:
14281 case CONST_VECTOR:
14282 case CONST_FIXED:
14283 case CLRSB:
14284 case CLOBBER:
14285 /* If delegitimize_address couldn't do anything with the UNSPEC, we
14286 can't express it in the debug info. This can happen e.g. with some
14287 TLS UNSPECs. */
14288 break;
14289
14290 case CONST_STRING:
14291 resolve_one_addr (&rtl);
14292 goto symref;
14293
14294 default:
14295 if (flag_checking)
14296 {
14297 print_rtl (stderr, rtl);
14298 gcc_unreachable ();
14299 }
14300 break;
14301 }
14302
14303 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14304 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14305
14306 return mem_loc_result;
14307 }
14308
14309 /* Return a descriptor that describes the concatenation of two locations.
14310 This is typically a complex variable. */
14311
14312 static dw_loc_descr_ref
14313 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
14314 {
14315 dw_loc_descr_ref cc_loc_result = NULL;
14316 dw_loc_descr_ref x0_ref
14317 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14318 dw_loc_descr_ref x1_ref
14319 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14320
14321 if (x0_ref == 0 || x1_ref == 0)
14322 return 0;
14323
14324 cc_loc_result = x0_ref;
14325 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
14326
14327 add_loc_descr (&cc_loc_result, x1_ref);
14328 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
14329
14330 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14331 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14332
14333 return cc_loc_result;
14334 }
14335
14336 /* Return a descriptor that describes the concatenation of N
14337 locations. */
14338
14339 static dw_loc_descr_ref
14340 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
14341 {
14342 unsigned int i;
14343 dw_loc_descr_ref cc_loc_result = NULL;
14344 unsigned int n = XVECLEN (concatn, 0);
14345
14346 for (i = 0; i < n; ++i)
14347 {
14348 dw_loc_descr_ref ref;
14349 rtx x = XVECEXP (concatn, 0, i);
14350
14351 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14352 if (ref == NULL)
14353 return NULL;
14354
14355 add_loc_descr (&cc_loc_result, ref);
14356 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
14357 }
14358
14359 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14360 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14361
14362 return cc_loc_result;
14363 }
14364
14365 /* Helper function for loc_descriptor. Return DW_OP_GNU_implicit_pointer
14366 for DEBUG_IMPLICIT_PTR RTL. */
14367
14368 static dw_loc_descr_ref
14369 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
14370 {
14371 dw_loc_descr_ref ret;
14372 dw_die_ref ref;
14373
14374 if (dwarf_strict)
14375 return NULL;
14376 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
14377 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
14378 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
14379 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
14380 ret = new_loc_descr (DW_OP_GNU_implicit_pointer, 0, offset);
14381 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
14382 if (ref)
14383 {
14384 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14385 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14386 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14387 }
14388 else
14389 {
14390 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14391 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
14392 }
14393 return ret;
14394 }
14395
14396 /* Output a proper Dwarf location descriptor for a variable or parameter
14397 which is either allocated in a register or in a memory location. For a
14398 register, we just generate an OP_REG and the register number. For a
14399 memory location we provide a Dwarf postfix expression describing how to
14400 generate the (dynamic) address of the object onto the address stack.
14401
14402 MODE is mode of the decl if this loc_descriptor is going to be used in
14403 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
14404 allowed, VOIDmode otherwise.
14405
14406 If we don't know how to describe it, return 0. */
14407
14408 static dw_loc_descr_ref
14409 loc_descriptor (rtx rtl, machine_mode mode,
14410 enum var_init_status initialized)
14411 {
14412 dw_loc_descr_ref loc_result = NULL;
14413
14414 switch (GET_CODE (rtl))
14415 {
14416 case SUBREG:
14417 /* The case of a subreg may arise when we have a local (register)
14418 variable or a formal (register) parameter which doesn't quite fill
14419 up an entire register. For now, just assume that it is
14420 legitimate to make the Dwarf info refer to the whole register which
14421 contains the given subreg. */
14422 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
14423 loc_result = loc_descriptor (SUBREG_REG (rtl),
14424 GET_MODE (SUBREG_REG (rtl)), initialized);
14425 else
14426 goto do_default;
14427 break;
14428
14429 case REG:
14430 loc_result = reg_loc_descriptor (rtl, initialized);
14431 break;
14432
14433 case MEM:
14434 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
14435 GET_MODE (rtl), initialized);
14436 if (loc_result == NULL)
14437 loc_result = tls_mem_loc_descriptor (rtl);
14438 if (loc_result == NULL)
14439 {
14440 rtx new_rtl = avoid_constant_pool_reference (rtl);
14441 if (new_rtl != rtl)
14442 loc_result = loc_descriptor (new_rtl, mode, initialized);
14443 }
14444 break;
14445
14446 case CONCAT:
14447 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
14448 initialized);
14449 break;
14450
14451 case CONCATN:
14452 loc_result = concatn_loc_descriptor (rtl, initialized);
14453 break;
14454
14455 case VAR_LOCATION:
14456 /* Single part. */
14457 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
14458 {
14459 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
14460 if (GET_CODE (loc) == EXPR_LIST)
14461 loc = XEXP (loc, 0);
14462 loc_result = loc_descriptor (loc, mode, initialized);
14463 break;
14464 }
14465
14466 rtl = XEXP (rtl, 1);
14467 /* FALLTHRU */
14468
14469 case PARALLEL:
14470 {
14471 rtvec par_elems = XVEC (rtl, 0);
14472 int num_elem = GET_NUM_ELEM (par_elems);
14473 machine_mode mode;
14474 int i;
14475
14476 /* Create the first one, so we have something to add to. */
14477 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
14478 VOIDmode, initialized);
14479 if (loc_result == NULL)
14480 return NULL;
14481 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
14482 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
14483 for (i = 1; i < num_elem; i++)
14484 {
14485 dw_loc_descr_ref temp;
14486
14487 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
14488 VOIDmode, initialized);
14489 if (temp == NULL)
14490 return NULL;
14491 add_loc_descr (&loc_result, temp);
14492 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
14493 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
14494 }
14495 }
14496 break;
14497
14498 case CONST_INT:
14499 if (mode != VOIDmode && mode != BLKmode)
14500 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (mode),
14501 INTVAL (rtl));
14502 break;
14503
14504 case CONST_DOUBLE:
14505 if (mode == VOIDmode)
14506 mode = GET_MODE (rtl);
14507
14508 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14509 {
14510 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
14511
14512 /* Note that a CONST_DOUBLE rtx could represent either an integer
14513 or a floating-point constant. A CONST_DOUBLE is used whenever
14514 the constant requires more than one word in order to be
14515 adequately represented. We output CONST_DOUBLEs as blocks. */
14516 loc_result = new_loc_descr (DW_OP_implicit_value,
14517 GET_MODE_SIZE (mode), 0);
14518 #if TARGET_SUPPORTS_WIDE_INT == 0
14519 if (!SCALAR_FLOAT_MODE_P (mode))
14520 {
14521 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
14522 loc_result->dw_loc_oprnd2.v.val_double
14523 = rtx_to_double_int (rtl);
14524 }
14525 else
14526 #endif
14527 {
14528 unsigned int length = GET_MODE_SIZE (mode);
14529 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
14530
14531 insert_float (rtl, array);
14532 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14533 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
14534 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
14535 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14536 }
14537 }
14538 break;
14539
14540 case CONST_WIDE_INT:
14541 if (mode == VOIDmode)
14542 mode = GET_MODE (rtl);
14543
14544 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14545 {
14546 loc_result = new_loc_descr (DW_OP_implicit_value,
14547 GET_MODE_SIZE (mode), 0);
14548 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
14549 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
14550 *loc_result->dw_loc_oprnd2.v.val_wide = std::make_pair (rtl, mode);
14551 }
14552 break;
14553
14554 case CONST_VECTOR:
14555 if (mode == VOIDmode)
14556 mode = GET_MODE (rtl);
14557
14558 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14559 {
14560 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
14561 unsigned int length = CONST_VECTOR_NUNITS (rtl);
14562 unsigned char *array
14563 = ggc_vec_alloc<unsigned char> (length * elt_size);
14564 unsigned int i;
14565 unsigned char *p;
14566 machine_mode imode = GET_MODE_INNER (mode);
14567
14568 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
14569 switch (GET_MODE_CLASS (mode))
14570 {
14571 case MODE_VECTOR_INT:
14572 for (i = 0, p = array; i < length; i++, p += elt_size)
14573 {
14574 rtx elt = CONST_VECTOR_ELT (rtl, i);
14575 insert_wide_int (std::make_pair (elt, imode), p, elt_size);
14576 }
14577 break;
14578
14579 case MODE_VECTOR_FLOAT:
14580 for (i = 0, p = array; i < length; i++, p += elt_size)
14581 {
14582 rtx elt = CONST_VECTOR_ELT (rtl, i);
14583 insert_float (elt, p);
14584 }
14585 break;
14586
14587 default:
14588 gcc_unreachable ();
14589 }
14590
14591 loc_result = new_loc_descr (DW_OP_implicit_value,
14592 length * elt_size, 0);
14593 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14594 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
14595 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
14596 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14597 }
14598 break;
14599
14600 case CONST:
14601 if (mode == VOIDmode
14602 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
14603 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
14604 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
14605 {
14606 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
14607 break;
14608 }
14609 /* FALLTHROUGH */
14610 case SYMBOL_REF:
14611 if (!const_ok_for_output (rtl))
14612 break;
14613 /* FALLTHROUGH */
14614 case LABEL_REF:
14615 if (mode != VOIDmode && GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE
14616 && (dwarf_version >= 4 || !dwarf_strict))
14617 {
14618 loc_result = new_addr_loc_descr (rtl, dtprel_false);
14619 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
14620 vec_safe_push (used_rtx_array, rtl);
14621 }
14622 break;
14623
14624 case DEBUG_IMPLICIT_PTR:
14625 loc_result = implicit_ptr_descriptor (rtl, 0);
14626 break;
14627
14628 case PLUS:
14629 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
14630 && CONST_INT_P (XEXP (rtl, 1)))
14631 {
14632 loc_result
14633 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
14634 break;
14635 }
14636 /* FALLTHRU */
14637 do_default:
14638 default:
14639 if ((SCALAR_INT_MODE_P (mode)
14640 && GET_MODE (rtl) == mode
14641 && GET_MODE_SIZE (GET_MODE (rtl)) <= DWARF2_ADDR_SIZE
14642 && dwarf_version >= 4)
14643 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
14644 {
14645 /* Value expression. */
14646 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
14647 if (loc_result)
14648 add_loc_descr (&loc_result,
14649 new_loc_descr (DW_OP_stack_value, 0, 0));
14650 }
14651 break;
14652 }
14653
14654 return loc_result;
14655 }
14656
14657 /* We need to figure out what section we should use as the base for the
14658 address ranges where a given location is valid.
14659 1. If this particular DECL has a section associated with it, use that.
14660 2. If this function has a section associated with it, use that.
14661 3. Otherwise, use the text section.
14662 XXX: If you split a variable across multiple sections, we won't notice. */
14663
14664 static const char *
14665 secname_for_decl (const_tree decl)
14666 {
14667 const char *secname;
14668
14669 if (VAR_OR_FUNCTION_DECL_P (decl)
14670 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
14671 && DECL_SECTION_NAME (decl))
14672 secname = DECL_SECTION_NAME (decl);
14673 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
14674 secname = DECL_SECTION_NAME (current_function_decl);
14675 else if (cfun && in_cold_section_p)
14676 secname = crtl->subsections.cold_section_label;
14677 else
14678 secname = text_section_label;
14679
14680 return secname;
14681 }
14682
14683 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
14684
14685 static bool
14686 decl_by_reference_p (tree decl)
14687 {
14688 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
14689 || VAR_P (decl))
14690 && DECL_BY_REFERENCE (decl));
14691 }
14692
14693 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
14694 for VARLOC. */
14695
14696 static dw_loc_descr_ref
14697 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
14698 enum var_init_status initialized)
14699 {
14700 int have_address = 0;
14701 dw_loc_descr_ref descr;
14702 machine_mode mode;
14703
14704 if (want_address != 2)
14705 {
14706 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
14707 /* Single part. */
14708 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
14709 {
14710 varloc = PAT_VAR_LOCATION_LOC (varloc);
14711 if (GET_CODE (varloc) == EXPR_LIST)
14712 varloc = XEXP (varloc, 0);
14713 mode = GET_MODE (varloc);
14714 if (MEM_P (varloc))
14715 {
14716 rtx addr = XEXP (varloc, 0);
14717 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
14718 mode, initialized);
14719 if (descr)
14720 have_address = 1;
14721 else
14722 {
14723 rtx x = avoid_constant_pool_reference (varloc);
14724 if (x != varloc)
14725 descr = mem_loc_descriptor (x, mode, VOIDmode,
14726 initialized);
14727 }
14728 }
14729 else
14730 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
14731 }
14732 else
14733 return 0;
14734 }
14735 else
14736 {
14737 if (GET_CODE (varloc) == VAR_LOCATION)
14738 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
14739 else
14740 mode = DECL_MODE (loc);
14741 descr = loc_descriptor (varloc, mode, initialized);
14742 have_address = 1;
14743 }
14744
14745 if (!descr)
14746 return 0;
14747
14748 if (want_address == 2 && !have_address
14749 && (dwarf_version >= 4 || !dwarf_strict))
14750 {
14751 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
14752 {
14753 expansion_failed (loc, NULL_RTX,
14754 "DWARF address size mismatch");
14755 return 0;
14756 }
14757 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
14758 have_address = 1;
14759 }
14760 /* Show if we can't fill the request for an address. */
14761 if (want_address && !have_address)
14762 {
14763 expansion_failed (loc, NULL_RTX,
14764 "Want address and only have value");
14765 return 0;
14766 }
14767
14768 /* If we've got an address and don't want one, dereference. */
14769 if (!want_address && have_address)
14770 {
14771 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
14772 enum dwarf_location_atom op;
14773
14774 if (size > DWARF2_ADDR_SIZE || size == -1)
14775 {
14776 expansion_failed (loc, NULL_RTX,
14777 "DWARF address size mismatch");
14778 return 0;
14779 }
14780 else if (size == DWARF2_ADDR_SIZE)
14781 op = DW_OP_deref;
14782 else
14783 op = DW_OP_deref_size;
14784
14785 add_loc_descr (&descr, new_loc_descr (op, size, 0));
14786 }
14787
14788 return descr;
14789 }
14790
14791 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
14792 if it is not possible. */
14793
14794 static dw_loc_descr_ref
14795 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
14796 {
14797 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
14798 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
14799 else if (dwarf_version >= 3 || !dwarf_strict)
14800 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
14801 else
14802 return NULL;
14803 }
14804
14805 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
14806 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
14807
14808 static dw_loc_descr_ref
14809 dw_sra_loc_expr (tree decl, rtx loc)
14810 {
14811 rtx p;
14812 unsigned HOST_WIDE_INT padsize = 0;
14813 dw_loc_descr_ref descr, *descr_tail;
14814 unsigned HOST_WIDE_INT decl_size;
14815 rtx varloc;
14816 enum var_init_status initialized;
14817
14818 if (DECL_SIZE (decl) == NULL
14819 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
14820 return NULL;
14821
14822 decl_size = tree_to_uhwi (DECL_SIZE (decl));
14823 descr = NULL;
14824 descr_tail = &descr;
14825
14826 for (p = loc; p; p = XEXP (p, 1))
14827 {
14828 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
14829 rtx loc_note = *decl_piece_varloc_ptr (p);
14830 dw_loc_descr_ref cur_descr;
14831 dw_loc_descr_ref *tail, last = NULL;
14832 unsigned HOST_WIDE_INT opsize = 0;
14833
14834 if (loc_note == NULL_RTX
14835 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
14836 {
14837 padsize += bitsize;
14838 continue;
14839 }
14840 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
14841 varloc = NOTE_VAR_LOCATION (loc_note);
14842 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
14843 if (cur_descr == NULL)
14844 {
14845 padsize += bitsize;
14846 continue;
14847 }
14848
14849 /* Check that cur_descr either doesn't use
14850 DW_OP_*piece operations, or their sum is equal
14851 to bitsize. Otherwise we can't embed it. */
14852 for (tail = &cur_descr; *tail != NULL;
14853 tail = &(*tail)->dw_loc_next)
14854 if ((*tail)->dw_loc_opc == DW_OP_piece)
14855 {
14856 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
14857 * BITS_PER_UNIT;
14858 last = *tail;
14859 }
14860 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
14861 {
14862 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
14863 last = *tail;
14864 }
14865
14866 if (last != NULL && opsize != bitsize)
14867 {
14868 padsize += bitsize;
14869 /* Discard the current piece of the descriptor and release any
14870 addr_table entries it uses. */
14871 remove_loc_list_addr_table_entries (cur_descr);
14872 continue;
14873 }
14874
14875 /* If there is a hole, add DW_OP_*piece after empty DWARF
14876 expression, which means that those bits are optimized out. */
14877 if (padsize)
14878 {
14879 if (padsize > decl_size)
14880 {
14881 remove_loc_list_addr_table_entries (cur_descr);
14882 goto discard_descr;
14883 }
14884 decl_size -= padsize;
14885 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
14886 if (*descr_tail == NULL)
14887 {
14888 remove_loc_list_addr_table_entries (cur_descr);
14889 goto discard_descr;
14890 }
14891 descr_tail = &(*descr_tail)->dw_loc_next;
14892 padsize = 0;
14893 }
14894 *descr_tail = cur_descr;
14895 descr_tail = tail;
14896 if (bitsize > decl_size)
14897 goto discard_descr;
14898 decl_size -= bitsize;
14899 if (last == NULL)
14900 {
14901 HOST_WIDE_INT offset = 0;
14902 if (GET_CODE (varloc) == VAR_LOCATION
14903 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
14904 {
14905 varloc = PAT_VAR_LOCATION_LOC (varloc);
14906 if (GET_CODE (varloc) == EXPR_LIST)
14907 varloc = XEXP (varloc, 0);
14908 }
14909 do
14910 {
14911 if (GET_CODE (varloc) == CONST
14912 || GET_CODE (varloc) == SIGN_EXTEND
14913 || GET_CODE (varloc) == ZERO_EXTEND)
14914 varloc = XEXP (varloc, 0);
14915 else if (GET_CODE (varloc) == SUBREG)
14916 varloc = SUBREG_REG (varloc);
14917 else
14918 break;
14919 }
14920 while (1);
14921 /* DW_OP_bit_size offset should be zero for register
14922 or implicit location descriptions and empty location
14923 descriptions, but for memory addresses needs big endian
14924 adjustment. */
14925 if (MEM_P (varloc))
14926 {
14927 unsigned HOST_WIDE_INT memsize
14928 = MEM_SIZE (varloc) * BITS_PER_UNIT;
14929 if (memsize != bitsize)
14930 {
14931 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
14932 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
14933 goto discard_descr;
14934 if (memsize < bitsize)
14935 goto discard_descr;
14936 if (BITS_BIG_ENDIAN)
14937 offset = memsize - bitsize;
14938 }
14939 }
14940
14941 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
14942 if (*descr_tail == NULL)
14943 goto discard_descr;
14944 descr_tail = &(*descr_tail)->dw_loc_next;
14945 }
14946 }
14947
14948 /* If there were any non-empty expressions, add padding till the end of
14949 the decl. */
14950 if (descr != NULL && decl_size != 0)
14951 {
14952 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
14953 if (*descr_tail == NULL)
14954 goto discard_descr;
14955 }
14956 return descr;
14957
14958 discard_descr:
14959 /* Discard the descriptor and release any addr_table entries it uses. */
14960 remove_loc_list_addr_table_entries (descr);
14961 return NULL;
14962 }
14963
14964 /* Return the dwarf representation of the location list LOC_LIST of
14965 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
14966 function. */
14967
14968 static dw_loc_list_ref
14969 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
14970 {
14971 const char *endname, *secname;
14972 rtx varloc;
14973 enum var_init_status initialized;
14974 struct var_loc_node *node;
14975 dw_loc_descr_ref descr;
14976 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
14977 dw_loc_list_ref list = NULL;
14978 dw_loc_list_ref *listp = &list;
14979
14980 /* Now that we know what section we are using for a base,
14981 actually construct the list of locations.
14982 The first location information is what is passed to the
14983 function that creates the location list, and the remaining
14984 locations just get added on to that list.
14985 Note that we only know the start address for a location
14986 (IE location changes), so to build the range, we use
14987 the range [current location start, next location start].
14988 This means we have to special case the last node, and generate
14989 a range of [last location start, end of function label]. */
14990
14991 secname = secname_for_decl (decl);
14992
14993 for (node = loc_list->first; node; node = node->next)
14994 if (GET_CODE (node->loc) == EXPR_LIST
14995 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
14996 {
14997 if (GET_CODE (node->loc) == EXPR_LIST)
14998 {
14999 /* This requires DW_OP_{,bit_}piece, which is not usable
15000 inside DWARF expressions. */
15001 if (want_address != 2)
15002 continue;
15003 descr = dw_sra_loc_expr (decl, node->loc);
15004 if (descr == NULL)
15005 continue;
15006 }
15007 else
15008 {
15009 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
15010 varloc = NOTE_VAR_LOCATION (node->loc);
15011 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
15012 }
15013 if (descr)
15014 {
15015 bool range_across_switch = false;
15016 /* If section switch happens in between node->label
15017 and node->next->label (or end of function) and
15018 we can't emit it as a single entry list,
15019 emit two ranges, first one ending at the end
15020 of first partition and second one starting at the
15021 beginning of second partition. */
15022 if (node == loc_list->last_before_switch
15023 && (node != loc_list->first || loc_list->first->next)
15024 && current_function_decl)
15025 {
15026 endname = cfun->fde->dw_fde_end;
15027 range_across_switch = true;
15028 }
15029 /* The variable has a location between NODE->LABEL and
15030 NODE->NEXT->LABEL. */
15031 else if (node->next)
15032 endname = node->next->label;
15033 /* If the variable has a location at the last label
15034 it keeps its location until the end of function. */
15035 else if (!current_function_decl)
15036 endname = text_end_label;
15037 else
15038 {
15039 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
15040 current_function_funcdef_no);
15041 endname = ggc_strdup (label_id);
15042 }
15043
15044 *listp = new_loc_list (descr, node->label, endname, secname);
15045 if (TREE_CODE (decl) == PARM_DECL
15046 && node == loc_list->first
15047 && NOTE_P (node->loc)
15048 && strcmp (node->label, endname) == 0)
15049 (*listp)->force = true;
15050 listp = &(*listp)->dw_loc_next;
15051
15052 if (range_across_switch)
15053 {
15054 if (GET_CODE (node->loc) == EXPR_LIST)
15055 descr = dw_sra_loc_expr (decl, node->loc);
15056 else
15057 {
15058 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
15059 varloc = NOTE_VAR_LOCATION (node->loc);
15060 descr = dw_loc_list_1 (decl, varloc, want_address,
15061 initialized);
15062 }
15063 gcc_assert (descr);
15064 /* The variable has a location between NODE->LABEL and
15065 NODE->NEXT->LABEL. */
15066 if (node->next)
15067 endname = node->next->label;
15068 else
15069 endname = cfun->fde->dw_fde_second_end;
15070 *listp = new_loc_list (descr,
15071 cfun->fde->dw_fde_second_begin,
15072 endname, secname);
15073 listp = &(*listp)->dw_loc_next;
15074 }
15075 }
15076 }
15077
15078 /* Try to avoid the overhead of a location list emitting a location
15079 expression instead, but only if we didn't have more than one
15080 location entry in the first place. If some entries were not
15081 representable, we don't want to pretend a single entry that was
15082 applies to the entire scope in which the variable is
15083 available. */
15084 if (list && loc_list->first->next)
15085 gen_llsym (list);
15086
15087 return list;
15088 }
15089
15090 /* Return if the loc_list has only single element and thus can be represented
15091 as location description. */
15092
15093 static bool
15094 single_element_loc_list_p (dw_loc_list_ref list)
15095 {
15096 gcc_assert (!list->dw_loc_next || list->ll_symbol);
15097 return !list->ll_symbol;
15098 }
15099
15100 /* To each location in list LIST add loc descr REF. */
15101
15102 static void
15103 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
15104 {
15105 dw_loc_descr_ref copy;
15106 add_loc_descr (&list->expr, ref);
15107 list = list->dw_loc_next;
15108 while (list)
15109 {
15110 copy = ggc_alloc<dw_loc_descr_node> ();
15111 memcpy (copy, ref, sizeof (dw_loc_descr_node));
15112 add_loc_descr (&list->expr, copy);
15113 while (copy->dw_loc_next)
15114 {
15115 dw_loc_descr_ref new_copy = ggc_alloc<dw_loc_descr_node> ();
15116 memcpy (new_copy, copy->dw_loc_next, sizeof (dw_loc_descr_node));
15117 copy->dw_loc_next = new_copy;
15118 copy = new_copy;
15119 }
15120 list = list->dw_loc_next;
15121 }
15122 }
15123
15124 /* Given two lists RET and LIST
15125 produce location list that is result of adding expression in LIST
15126 to expression in RET on each position in program.
15127 Might be destructive on both RET and LIST.
15128
15129 TODO: We handle only simple cases of RET or LIST having at most one
15130 element. General case would inolve sorting the lists in program order
15131 and merging them that will need some additional work.
15132 Adding that will improve quality of debug info especially for SRA-ed
15133 structures. */
15134
15135 static void
15136 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
15137 {
15138 if (!list)
15139 return;
15140 if (!*ret)
15141 {
15142 *ret = list;
15143 return;
15144 }
15145 if (!list->dw_loc_next)
15146 {
15147 add_loc_descr_to_each (*ret, list->expr);
15148 return;
15149 }
15150 if (!(*ret)->dw_loc_next)
15151 {
15152 add_loc_descr_to_each (list, (*ret)->expr);
15153 *ret = list;
15154 return;
15155 }
15156 expansion_failed (NULL_TREE, NULL_RTX,
15157 "Don't know how to merge two non-trivial"
15158 " location lists.\n");
15159 *ret = NULL;
15160 return;
15161 }
15162
15163 /* LOC is constant expression. Try a luck, look it up in constant
15164 pool and return its loc_descr of its address. */
15165
15166 static dw_loc_descr_ref
15167 cst_pool_loc_descr (tree loc)
15168 {
15169 /* Get an RTL for this, if something has been emitted. */
15170 rtx rtl = lookup_constant_def (loc);
15171
15172 if (!rtl || !MEM_P (rtl))
15173 {
15174 gcc_assert (!rtl);
15175 return 0;
15176 }
15177 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
15178
15179 /* TODO: We might get more coverage if we was actually delaying expansion
15180 of all expressions till end of compilation when constant pools are fully
15181 populated. */
15182 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
15183 {
15184 expansion_failed (loc, NULL_RTX,
15185 "CST value in contant pool but not marked.");
15186 return 0;
15187 }
15188 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15189 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
15190 }
15191
15192 /* Return dw_loc_list representing address of addr_expr LOC
15193 by looking for inner INDIRECT_REF expression and turning
15194 it into simple arithmetics.
15195
15196 See loc_list_from_tree for the meaning of CONTEXT. */
15197
15198 static dw_loc_list_ref
15199 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
15200 const loc_descr_context *context)
15201 {
15202 tree obj, offset;
15203 HOST_WIDE_INT bitsize, bitpos, bytepos;
15204 machine_mode mode;
15205 int unsignedp, reversep, volatilep = 0;
15206 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
15207
15208 obj = get_inner_reference (TREE_OPERAND (loc, 0),
15209 &bitsize, &bitpos, &offset, &mode,
15210 &unsignedp, &reversep, &volatilep);
15211 STRIP_NOPS (obj);
15212 if (bitpos % BITS_PER_UNIT)
15213 {
15214 expansion_failed (loc, NULL_RTX, "bitfield access");
15215 return 0;
15216 }
15217 if (!INDIRECT_REF_P (obj))
15218 {
15219 expansion_failed (obj,
15220 NULL_RTX, "no indirect ref in inner refrence");
15221 return 0;
15222 }
15223 if (!offset && !bitpos)
15224 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
15225 context);
15226 else if (toplev
15227 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
15228 && (dwarf_version >= 4 || !dwarf_strict))
15229 {
15230 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
15231 if (!list_ret)
15232 return 0;
15233 if (offset)
15234 {
15235 /* Variable offset. */
15236 list_ret1 = loc_list_from_tree (offset, 0, context);
15237 if (list_ret1 == 0)
15238 return 0;
15239 add_loc_list (&list_ret, list_ret1);
15240 if (!list_ret)
15241 return 0;
15242 add_loc_descr_to_each (list_ret,
15243 new_loc_descr (DW_OP_plus, 0, 0));
15244 }
15245 bytepos = bitpos / BITS_PER_UNIT;
15246 if (bytepos > 0)
15247 add_loc_descr_to_each (list_ret,
15248 new_loc_descr (DW_OP_plus_uconst,
15249 bytepos, 0));
15250 else if (bytepos < 0)
15251 loc_list_plus_const (list_ret, bytepos);
15252 add_loc_descr_to_each (list_ret,
15253 new_loc_descr (DW_OP_stack_value, 0, 0));
15254 }
15255 return list_ret;
15256 }
15257
15258 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
15259 all operations from LOC are nops, move to the last one. Insert in NOPS all
15260 operations that are skipped. */
15261
15262 static void
15263 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
15264 hash_set<dw_loc_descr_ref> &nops)
15265 {
15266 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
15267 {
15268 nops.add (loc);
15269 loc = loc->dw_loc_next;
15270 }
15271 }
15272
15273 /* Helper for loc_descr_without_nops: free the location description operation
15274 P. */
15275
15276 bool
15277 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
15278 {
15279 ggc_free (loc);
15280 return true;
15281 }
15282
15283 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
15284 finishes LOC. */
15285
15286 static void
15287 loc_descr_without_nops (dw_loc_descr_ref &loc)
15288 {
15289 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
15290 return;
15291
15292 /* Set of all DW_OP_nop operations we remove. */
15293 hash_set<dw_loc_descr_ref> nops;
15294
15295 /* First, strip all prefix NOP operations in order to keep the head of the
15296 operations list. */
15297 loc_descr_to_next_no_nop (loc, nops);
15298
15299 for (dw_loc_descr_ref cur = loc; cur != NULL;)
15300 {
15301 /* For control flow operations: strip "prefix" nops in destination
15302 labels. */
15303 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
15304 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
15305 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
15306 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
15307
15308 /* Do the same for the operations that follow, then move to the next
15309 iteration. */
15310 if (cur->dw_loc_next != NULL)
15311 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
15312 cur = cur->dw_loc_next;
15313 }
15314
15315 nops.traverse<void *, free_loc_descr> (NULL);
15316 }
15317
15318
15319 struct dwarf_procedure_info;
15320
15321 /* Helper structure for location descriptions generation. */
15322 struct loc_descr_context
15323 {
15324 /* The type that is implicitly referenced by DW_OP_push_object_address, or
15325 NULL_TREE if DW_OP_push_object_address in invalid for this location
15326 description. This is used when processing PLACEHOLDER_EXPR nodes. */
15327 tree context_type;
15328 /* The ..._DECL node that should be translated as a
15329 DW_OP_push_object_address operation. */
15330 tree base_decl;
15331 /* Information about the DWARF procedure we are currently generating. NULL if
15332 we are not generating a DWARF procedure. */
15333 struct dwarf_procedure_info *dpi;
15334 };
15335
15336 /* DWARF procedures generation
15337
15338 DWARF expressions (aka. location descriptions) are used to encode variable
15339 things such as sizes or offsets. Such computations can have redundant parts
15340 that can be factorized in order to reduce the size of the output debug
15341 information. This is the whole point of DWARF procedures.
15342
15343 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
15344 already factorized into functions ("size functions") in order to handle very
15345 big and complex types. Such functions are quite simple: they have integral
15346 arguments, they return an integral result and their body contains only a
15347 return statement with arithmetic expressions. This is the only kind of
15348 function we are interested in translating into DWARF procedures, here.
15349
15350 DWARF expressions and DWARF procedure are executed using a stack, so we have
15351 to define some calling convention for them to interact. Let's say that:
15352
15353 - Before calling a DWARF procedure, DWARF expressions must push on the stack
15354 all arguments in reverse order (right-to-left) so that when the DWARF
15355 procedure execution starts, the first argument is the top of the stack.
15356
15357 - Then, when returning, the DWARF procedure must have consumed all arguments
15358 on the stack, must have pushed the result and touched nothing else.
15359
15360 - Each integral argument and the result are integral types can be hold in a
15361 single stack slot.
15362
15363 - We call "frame offset" the number of stack slots that are "under DWARF
15364 procedure control": it includes the arguments slots, the temporaries and
15365 the result slot. Thus, it is equal to the number of arguments when the
15366 procedure execution starts and must be equal to one (the result) when it
15367 returns. */
15368
15369 /* Helper structure used when generating operations for a DWARF procedure. */
15370 struct dwarf_procedure_info
15371 {
15372 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
15373 currently translated. */
15374 tree fndecl;
15375 /* The number of arguments FNDECL takes. */
15376 unsigned args_count;
15377 };
15378
15379 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
15380 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
15381 equate it to this DIE. */
15382
15383 static dw_die_ref
15384 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
15385 dw_die_ref parent_die)
15386 {
15387 dw_die_ref dwarf_proc_die;
15388
15389 if ((dwarf_version < 3 && dwarf_strict)
15390 || location == NULL)
15391 return NULL;
15392
15393 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
15394 if (fndecl)
15395 equate_decl_number_to_die (fndecl, dwarf_proc_die);
15396 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
15397 return dwarf_proc_die;
15398 }
15399
15400 /* Return whether TYPE is a supported type as a DWARF procedure argument
15401 type or return type (we handle only scalar types and pointer types that
15402 aren't wider than the DWARF expression evaluation stack. */
15403
15404 static bool
15405 is_handled_procedure_type (tree type)
15406 {
15407 return ((INTEGRAL_TYPE_P (type)
15408 || TREE_CODE (type) == OFFSET_TYPE
15409 || TREE_CODE (type) == POINTER_TYPE)
15410 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
15411 }
15412
15413 /* Helper for resolve_args_picking: do the same but stop when coming across
15414 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
15415 offset *before* evaluating the corresponding operation. */
15416
15417 static bool
15418 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
15419 struct dwarf_procedure_info *dpi,
15420 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
15421 {
15422 /* The "frame_offset" identifier is already used to name a macro... */
15423 unsigned frame_offset_ = initial_frame_offset;
15424 dw_loc_descr_ref l;
15425
15426 for (l = loc; l != NULL;)
15427 {
15428 bool existed;
15429 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
15430
15431 /* If we already met this node, there is nothing to compute anymore. */
15432 if (existed)
15433 {
15434 /* Make sure that the stack size is consistent wherever the execution
15435 flow comes from. */
15436 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
15437 break;
15438 }
15439 l_frame_offset = frame_offset_;
15440
15441 /* If needed, relocate the picking offset with respect to the frame
15442 offset. */
15443 if (l->dw_loc_opc == DW_OP_pick && l->frame_offset_rel)
15444 {
15445 /* frame_offset_ is the size of the current stack frame, including
15446 incoming arguments. Besides, the arguments are pushed
15447 right-to-left. Thus, in order to access the Nth argument from
15448 this operation node, the picking has to skip temporaries *plus*
15449 one stack slot per argument (0 for the first one, 1 for the second
15450 one, etc.).
15451
15452 The targetted argument number (N) is already set as the operand,
15453 and the number of temporaries can be computed with:
15454 frame_offsets_ - dpi->args_count */
15455 l->dw_loc_oprnd1.v.val_unsigned += frame_offset_ - dpi->args_count;
15456
15457 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
15458 if (l->dw_loc_oprnd1.v.val_unsigned > 255)
15459 return false;
15460 }
15461
15462 /* Update frame_offset according to the effect the current operation has
15463 on the stack. */
15464 switch (l->dw_loc_opc)
15465 {
15466 case DW_OP_deref:
15467 case DW_OP_swap:
15468 case DW_OP_rot:
15469 case DW_OP_abs:
15470 case DW_OP_neg:
15471 case DW_OP_not:
15472 case DW_OP_plus_uconst:
15473 case DW_OP_skip:
15474 case DW_OP_reg0:
15475 case DW_OP_reg1:
15476 case DW_OP_reg2:
15477 case DW_OP_reg3:
15478 case DW_OP_reg4:
15479 case DW_OP_reg5:
15480 case DW_OP_reg6:
15481 case DW_OP_reg7:
15482 case DW_OP_reg8:
15483 case DW_OP_reg9:
15484 case DW_OP_reg10:
15485 case DW_OP_reg11:
15486 case DW_OP_reg12:
15487 case DW_OP_reg13:
15488 case DW_OP_reg14:
15489 case DW_OP_reg15:
15490 case DW_OP_reg16:
15491 case DW_OP_reg17:
15492 case DW_OP_reg18:
15493 case DW_OP_reg19:
15494 case DW_OP_reg20:
15495 case DW_OP_reg21:
15496 case DW_OP_reg22:
15497 case DW_OP_reg23:
15498 case DW_OP_reg24:
15499 case DW_OP_reg25:
15500 case DW_OP_reg26:
15501 case DW_OP_reg27:
15502 case DW_OP_reg28:
15503 case DW_OP_reg29:
15504 case DW_OP_reg30:
15505 case DW_OP_reg31:
15506 case DW_OP_bregx:
15507 case DW_OP_piece:
15508 case DW_OP_deref_size:
15509 case DW_OP_nop:
15510 case DW_OP_form_tls_address:
15511 case DW_OP_bit_piece:
15512 case DW_OP_implicit_value:
15513 case DW_OP_stack_value:
15514 break;
15515
15516 case DW_OP_addr:
15517 case DW_OP_const1u:
15518 case DW_OP_const1s:
15519 case DW_OP_const2u:
15520 case DW_OP_const2s:
15521 case DW_OP_const4u:
15522 case DW_OP_const4s:
15523 case DW_OP_const8u:
15524 case DW_OP_const8s:
15525 case DW_OP_constu:
15526 case DW_OP_consts:
15527 case DW_OP_dup:
15528 case DW_OP_over:
15529 case DW_OP_pick:
15530 case DW_OP_lit0:
15531 case DW_OP_lit1:
15532 case DW_OP_lit2:
15533 case DW_OP_lit3:
15534 case DW_OP_lit4:
15535 case DW_OP_lit5:
15536 case DW_OP_lit6:
15537 case DW_OP_lit7:
15538 case DW_OP_lit8:
15539 case DW_OP_lit9:
15540 case DW_OP_lit10:
15541 case DW_OP_lit11:
15542 case DW_OP_lit12:
15543 case DW_OP_lit13:
15544 case DW_OP_lit14:
15545 case DW_OP_lit15:
15546 case DW_OP_lit16:
15547 case DW_OP_lit17:
15548 case DW_OP_lit18:
15549 case DW_OP_lit19:
15550 case DW_OP_lit20:
15551 case DW_OP_lit21:
15552 case DW_OP_lit22:
15553 case DW_OP_lit23:
15554 case DW_OP_lit24:
15555 case DW_OP_lit25:
15556 case DW_OP_lit26:
15557 case DW_OP_lit27:
15558 case DW_OP_lit28:
15559 case DW_OP_lit29:
15560 case DW_OP_lit30:
15561 case DW_OP_lit31:
15562 case DW_OP_breg0:
15563 case DW_OP_breg1:
15564 case DW_OP_breg2:
15565 case DW_OP_breg3:
15566 case DW_OP_breg4:
15567 case DW_OP_breg5:
15568 case DW_OP_breg6:
15569 case DW_OP_breg7:
15570 case DW_OP_breg8:
15571 case DW_OP_breg9:
15572 case DW_OP_breg10:
15573 case DW_OP_breg11:
15574 case DW_OP_breg12:
15575 case DW_OP_breg13:
15576 case DW_OP_breg14:
15577 case DW_OP_breg15:
15578 case DW_OP_breg16:
15579 case DW_OP_breg17:
15580 case DW_OP_breg18:
15581 case DW_OP_breg19:
15582 case DW_OP_breg20:
15583 case DW_OP_breg21:
15584 case DW_OP_breg22:
15585 case DW_OP_breg23:
15586 case DW_OP_breg24:
15587 case DW_OP_breg25:
15588 case DW_OP_breg26:
15589 case DW_OP_breg27:
15590 case DW_OP_breg28:
15591 case DW_OP_breg29:
15592 case DW_OP_breg30:
15593 case DW_OP_breg31:
15594 case DW_OP_fbreg:
15595 case DW_OP_push_object_address:
15596 case DW_OP_call_frame_cfa:
15597 ++frame_offset_;
15598 break;
15599
15600 case DW_OP_drop:
15601 case DW_OP_xderef:
15602 case DW_OP_and:
15603 case DW_OP_div:
15604 case DW_OP_minus:
15605 case DW_OP_mod:
15606 case DW_OP_mul:
15607 case DW_OP_or:
15608 case DW_OP_plus:
15609 case DW_OP_shl:
15610 case DW_OP_shr:
15611 case DW_OP_shra:
15612 case DW_OP_xor:
15613 case DW_OP_bra:
15614 case DW_OP_eq:
15615 case DW_OP_ge:
15616 case DW_OP_gt:
15617 case DW_OP_le:
15618 case DW_OP_lt:
15619 case DW_OP_ne:
15620 case DW_OP_regx:
15621 case DW_OP_xderef_size:
15622 --frame_offset_;
15623 break;
15624
15625 case DW_OP_call2:
15626 case DW_OP_call4:
15627 case DW_OP_call_ref:
15628 {
15629 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
15630 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
15631
15632 if (stack_usage == NULL)
15633 return false;
15634 frame_offset_ += *stack_usage;
15635 break;
15636 }
15637
15638 case DW_OP_GNU_push_tls_address:
15639 case DW_OP_GNU_uninit:
15640 case DW_OP_GNU_encoded_addr:
15641 case DW_OP_GNU_implicit_pointer:
15642 case DW_OP_GNU_entry_value:
15643 case DW_OP_GNU_const_type:
15644 case DW_OP_GNU_regval_type:
15645 case DW_OP_GNU_deref_type:
15646 case DW_OP_GNU_convert:
15647 case DW_OP_GNU_reinterpret:
15648 case DW_OP_GNU_parameter_ref:
15649 /* loc_list_from_tree will probably not output these operations for
15650 size functions, so assume they will not appear here. */
15651 /* Fall through... */
15652
15653 default:
15654 gcc_unreachable ();
15655 }
15656
15657 /* Now, follow the control flow (except subroutine calls). */
15658 switch (l->dw_loc_opc)
15659 {
15660 case DW_OP_bra:
15661 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
15662 frame_offsets))
15663 return false;
15664 /* Fall through. */
15665
15666 case DW_OP_skip:
15667 l = l->dw_loc_oprnd1.v.val_loc;
15668 break;
15669
15670 case DW_OP_stack_value:
15671 return true;
15672
15673 default:
15674 l = l->dw_loc_next;
15675 break;
15676 }
15677 }
15678
15679 return true;
15680 }
15681
15682 /* Make a DFS over operations reachable through LOC (i.e. follow branch
15683 operations) in order to resolve the operand of DW_OP_pick operations that
15684 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
15685 offset *before* LOC is executed. Return if all relocations were
15686 successful. */
15687
15688 static bool
15689 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
15690 struct dwarf_procedure_info *dpi)
15691 {
15692 /* Associate to all visited operations the frame offset *before* evaluating
15693 this operation. */
15694 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
15695
15696 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
15697 frame_offsets);
15698 }
15699
15700 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
15701 Return NULL if it is not possible. */
15702
15703 static dw_die_ref
15704 function_to_dwarf_procedure (tree fndecl)
15705 {
15706 struct loc_descr_context ctx;
15707 struct dwarf_procedure_info dpi;
15708 dw_die_ref dwarf_proc_die;
15709 tree tree_body = DECL_SAVED_TREE (fndecl);
15710 dw_loc_descr_ref loc_body, epilogue;
15711
15712 tree cursor;
15713 unsigned i;
15714
15715 /* Do not generate multiple DWARF procedures for the same function
15716 declaration. */
15717 dwarf_proc_die = lookup_decl_die (fndecl);
15718 if (dwarf_proc_die != NULL)
15719 return dwarf_proc_die;
15720
15721 /* DWARF procedures are available starting with the DWARFv3 standard. */
15722 if (dwarf_version < 3 && dwarf_strict)
15723 return NULL;
15724
15725 /* We handle only functions for which we still have a body, that return a
15726 supported type and that takes arguments with supported types. Note that
15727 there is no point translating functions that return nothing. */
15728 if (tree_body == NULL_TREE
15729 || DECL_RESULT (fndecl) == NULL_TREE
15730 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
15731 return NULL;
15732
15733 for (cursor = DECL_ARGUMENTS (fndecl);
15734 cursor != NULL_TREE;
15735 cursor = TREE_CHAIN (cursor))
15736 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
15737 return NULL;
15738
15739 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
15740 if (TREE_CODE (tree_body) != RETURN_EXPR)
15741 return NULL;
15742 tree_body = TREE_OPERAND (tree_body, 0);
15743 if (TREE_CODE (tree_body) != MODIFY_EXPR
15744 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
15745 return NULL;
15746 tree_body = TREE_OPERAND (tree_body, 1);
15747
15748 /* Try to translate the body expression itself. Note that this will probably
15749 cause an infinite recursion if its call graph has a cycle. This is very
15750 unlikely for size functions, however, so don't bother with such things at
15751 the moment. */
15752 ctx.context_type = NULL_TREE;
15753 ctx.base_decl = NULL_TREE;
15754 ctx.dpi = &dpi;
15755 dpi.fndecl = fndecl;
15756 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
15757 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
15758 if (!loc_body)
15759 return NULL;
15760
15761 /* After evaluating all operands in "loc_body", we should still have on the
15762 stack all arguments plus the desired function result (top of the stack).
15763 Generate code in order to keep only the result in our stack frame. */
15764 epilogue = NULL;
15765 for (i = 0; i < dpi.args_count; ++i)
15766 {
15767 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
15768 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
15769 op_couple->dw_loc_next->dw_loc_next = epilogue;
15770 epilogue = op_couple;
15771 }
15772 add_loc_descr (&loc_body, epilogue);
15773 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
15774 return NULL;
15775
15776 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
15777 because they are considered useful. Now there is an epilogue, they are
15778 not anymore, so give it another try. */
15779 loc_descr_without_nops (loc_body);
15780
15781 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
15782 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
15783 though, given that size functions do not come from source, so they should
15784 not have a dedicated DW_TAG_subprogram DIE. */
15785 dwarf_proc_die
15786 = new_dwarf_proc_die (loc_body, fndecl,
15787 get_context_die (DECL_CONTEXT (fndecl)));
15788
15789 /* The called DWARF procedure consumes one stack slot per argument and
15790 returns one stack slot. */
15791 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
15792
15793 return dwarf_proc_die;
15794 }
15795
15796
15797 /* Generate Dwarf location list representing LOC.
15798 If WANT_ADDRESS is false, expression computing LOC will be computed
15799 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
15800 if WANT_ADDRESS is 2, expression computing address useable in location
15801 will be returned (i.e. DW_OP_reg can be used
15802 to refer to register values).
15803
15804 CONTEXT provides information to customize the location descriptions
15805 generation. Its context_type field specifies what type is implicitly
15806 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
15807 will not be generated.
15808
15809 Its DPI field determines whether we are generating a DWARF expression for a
15810 DWARF procedure, so PARM_DECL references are processed specifically.
15811
15812 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
15813 and dpi fields were null. */
15814
15815 static dw_loc_list_ref
15816 loc_list_from_tree_1 (tree loc, int want_address,
15817 const struct loc_descr_context *context)
15818 {
15819 dw_loc_descr_ref ret = NULL, ret1 = NULL;
15820 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
15821 int have_address = 0;
15822 enum dwarf_location_atom op;
15823
15824 /* ??? Most of the time we do not take proper care for sign/zero
15825 extending the values properly. Hopefully this won't be a real
15826 problem... */
15827
15828 if (context != NULL
15829 && context->base_decl == loc
15830 && want_address == 0)
15831 {
15832 if (dwarf_version >= 3 || !dwarf_strict)
15833 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
15834 NULL, NULL, NULL);
15835 else
15836 return NULL;
15837 }
15838
15839 switch (TREE_CODE (loc))
15840 {
15841 case ERROR_MARK:
15842 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
15843 return 0;
15844
15845 case PLACEHOLDER_EXPR:
15846 /* This case involves extracting fields from an object to determine the
15847 position of other fields. It is supposed to appear only as the first
15848 operand of COMPONENT_REF nodes and to reference precisely the type
15849 that the context allows. */
15850 if (context != NULL
15851 && TREE_TYPE (loc) == context->context_type
15852 && want_address >= 1)
15853 {
15854 if (dwarf_version >= 3 || !dwarf_strict)
15855 {
15856 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
15857 have_address = 1;
15858 break;
15859 }
15860 else
15861 return NULL;
15862 }
15863 else
15864 expansion_failed (loc, NULL_RTX,
15865 "PLACEHOLDER_EXPR for an unexpected type");
15866 break;
15867
15868 case CALL_EXPR:
15869 {
15870 const int nargs = call_expr_nargs (loc);
15871 tree callee = get_callee_fndecl (loc);
15872 int i;
15873 dw_die_ref dwarf_proc;
15874
15875 if (callee == NULL_TREE)
15876 goto call_expansion_failed;
15877
15878 /* We handle only functions that return an integer. */
15879 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
15880 goto call_expansion_failed;
15881
15882 dwarf_proc = function_to_dwarf_procedure (callee);
15883 if (dwarf_proc == NULL)
15884 goto call_expansion_failed;
15885
15886 /* Evaluate arguments right-to-left so that the first argument will
15887 be the top-most one on the stack. */
15888 for (i = nargs - 1; i >= 0; --i)
15889 {
15890 dw_loc_descr_ref loc_descr
15891 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
15892 context);
15893
15894 if (loc_descr == NULL)
15895 goto call_expansion_failed;
15896
15897 add_loc_descr (&ret, loc_descr);
15898 }
15899
15900 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
15901 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15902 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
15903 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
15904 add_loc_descr (&ret, ret1);
15905 break;
15906
15907 call_expansion_failed:
15908 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
15909 /* There are no opcodes for these operations. */
15910 return 0;
15911 }
15912
15913 case PREINCREMENT_EXPR:
15914 case PREDECREMENT_EXPR:
15915 case POSTINCREMENT_EXPR:
15916 case POSTDECREMENT_EXPR:
15917 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
15918 /* There are no opcodes for these operations. */
15919 return 0;
15920
15921 case ADDR_EXPR:
15922 /* If we already want an address, see if there is INDIRECT_REF inside
15923 e.g. for &this->field. */
15924 if (want_address)
15925 {
15926 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
15927 (loc, want_address == 2, context);
15928 if (list_ret)
15929 have_address = 1;
15930 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
15931 && (ret = cst_pool_loc_descr (loc)))
15932 have_address = 1;
15933 }
15934 /* Otherwise, process the argument and look for the address. */
15935 if (!list_ret && !ret)
15936 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
15937 else
15938 {
15939 if (want_address)
15940 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
15941 return NULL;
15942 }
15943 break;
15944
15945 case VAR_DECL:
15946 if (DECL_THREAD_LOCAL_P (loc))
15947 {
15948 rtx rtl;
15949 enum dwarf_location_atom tls_op;
15950 enum dtprel_bool dtprel = dtprel_false;
15951
15952 if (targetm.have_tls)
15953 {
15954 /* If this is not defined, we have no way to emit the
15955 data. */
15956 if (!targetm.asm_out.output_dwarf_dtprel)
15957 return 0;
15958
15959 /* The way DW_OP_GNU_push_tls_address is specified, we
15960 can only look up addresses of objects in the current
15961 module. We used DW_OP_addr as first op, but that's
15962 wrong, because DW_OP_addr is relocated by the debug
15963 info consumer, while DW_OP_GNU_push_tls_address
15964 operand shouldn't be. */
15965 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
15966 return 0;
15967 dtprel = dtprel_true;
15968 tls_op = DW_OP_GNU_push_tls_address;
15969 }
15970 else
15971 {
15972 if (!targetm.emutls.debug_form_tls_address
15973 || !(dwarf_version >= 3 || !dwarf_strict))
15974 return 0;
15975 /* We stuffed the control variable into the DECL_VALUE_EXPR
15976 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
15977 no longer appear in gimple code. We used the control
15978 variable in specific so that we could pick it up here. */
15979 loc = DECL_VALUE_EXPR (loc);
15980 tls_op = DW_OP_form_tls_address;
15981 }
15982
15983 rtl = rtl_for_decl_location (loc);
15984 if (rtl == NULL_RTX)
15985 return 0;
15986
15987 if (!MEM_P (rtl))
15988 return 0;
15989 rtl = XEXP (rtl, 0);
15990 if (! CONSTANT_P (rtl))
15991 return 0;
15992
15993 ret = new_addr_loc_descr (rtl, dtprel);
15994 ret1 = new_loc_descr (tls_op, 0, 0);
15995 add_loc_descr (&ret, ret1);
15996
15997 have_address = 1;
15998 break;
15999 }
16000 /* FALLTHRU */
16001
16002 case PARM_DECL:
16003 if (context != NULL && context->dpi != NULL
16004 && DECL_CONTEXT (loc) == context->dpi->fndecl)
16005 {
16006 /* We are generating code for a DWARF procedure and we want to access
16007 one of its arguments: find the appropriate argument offset and let
16008 the resolve_args_picking pass compute the offset that complies
16009 with the stack frame size. */
16010 unsigned i = 0;
16011 tree cursor;
16012
16013 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
16014 cursor != NULL_TREE && cursor != loc;
16015 cursor = TREE_CHAIN (cursor), ++i)
16016 ;
16017 /* If we are translating a DWARF procedure, all referenced parameters
16018 must belong to the current function. */
16019 gcc_assert (cursor != NULL_TREE);
16020
16021 ret = new_loc_descr (DW_OP_pick, i, 0);
16022 ret->frame_offset_rel = 1;
16023 break;
16024 }
16025 /* FALLTHRU */
16026
16027 case RESULT_DECL:
16028 if (DECL_HAS_VALUE_EXPR_P (loc))
16029 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
16030 want_address, context);
16031 /* FALLTHRU */
16032
16033 case FUNCTION_DECL:
16034 {
16035 rtx rtl;
16036 var_loc_list *loc_list = lookup_decl_loc (loc);
16037
16038 if (loc_list && loc_list->first)
16039 {
16040 list_ret = dw_loc_list (loc_list, loc, want_address);
16041 have_address = want_address != 0;
16042 break;
16043 }
16044 rtl = rtl_for_decl_location (loc);
16045 if (rtl == NULL_RTX)
16046 {
16047 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
16048 return 0;
16049 }
16050 else if (CONST_INT_P (rtl))
16051 {
16052 HOST_WIDE_INT val = INTVAL (rtl);
16053 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16054 val &= GET_MODE_MASK (DECL_MODE (loc));
16055 ret = int_loc_descriptor (val);
16056 }
16057 else if (GET_CODE (rtl) == CONST_STRING)
16058 {
16059 expansion_failed (loc, NULL_RTX, "CONST_STRING");
16060 return 0;
16061 }
16062 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
16063 ret = new_addr_loc_descr (rtl, dtprel_false);
16064 else
16065 {
16066 machine_mode mode, mem_mode;
16067
16068 /* Certain constructs can only be represented at top-level. */
16069 if (want_address == 2)
16070 {
16071 ret = loc_descriptor (rtl, VOIDmode,
16072 VAR_INIT_STATUS_INITIALIZED);
16073 have_address = 1;
16074 }
16075 else
16076 {
16077 mode = GET_MODE (rtl);
16078 mem_mode = VOIDmode;
16079 if (MEM_P (rtl))
16080 {
16081 mem_mode = mode;
16082 mode = get_address_mode (rtl);
16083 rtl = XEXP (rtl, 0);
16084 have_address = 1;
16085 }
16086 ret = mem_loc_descriptor (rtl, mode, mem_mode,
16087 VAR_INIT_STATUS_INITIALIZED);
16088 }
16089 if (!ret)
16090 expansion_failed (loc, rtl,
16091 "failed to produce loc descriptor for rtl");
16092 }
16093 }
16094 break;
16095
16096 case MEM_REF:
16097 if (!integer_zerop (TREE_OPERAND (loc, 1)))
16098 {
16099 have_address = 1;
16100 goto do_plus;
16101 }
16102 /* Fallthru. */
16103 case INDIRECT_REF:
16104 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16105 have_address = 1;
16106 break;
16107
16108 case TARGET_MEM_REF:
16109 case SSA_NAME:
16110 case DEBUG_EXPR_DECL:
16111 return NULL;
16112
16113 case COMPOUND_EXPR:
16114 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
16115 context);
16116
16117 CASE_CONVERT:
16118 case VIEW_CONVERT_EXPR:
16119 case SAVE_EXPR:
16120 case MODIFY_EXPR:
16121 case NON_LVALUE_EXPR:
16122 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
16123 context);
16124
16125 case COMPONENT_REF:
16126 case BIT_FIELD_REF:
16127 case ARRAY_REF:
16128 case ARRAY_RANGE_REF:
16129 case REALPART_EXPR:
16130 case IMAGPART_EXPR:
16131 {
16132 tree obj, offset;
16133 HOST_WIDE_INT bitsize, bitpos, bytepos;
16134 machine_mode mode;
16135 int unsignedp, reversep, volatilep = 0;
16136
16137 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
16138 &unsignedp, &reversep, &volatilep);
16139
16140 gcc_assert (obj != loc);
16141
16142 list_ret = loc_list_from_tree_1 (obj,
16143 want_address == 2
16144 && !bitpos && !offset ? 2 : 1,
16145 context);
16146 /* TODO: We can extract value of the small expression via shifting even
16147 for nonzero bitpos. */
16148 if (list_ret == 0)
16149 return 0;
16150 if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
16151 {
16152 expansion_failed (loc, NULL_RTX,
16153 "bitfield access");
16154 return 0;
16155 }
16156
16157 if (offset != NULL_TREE)
16158 {
16159 /* Variable offset. */
16160 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
16161 if (list_ret1 == 0)
16162 return 0;
16163 add_loc_list (&list_ret, list_ret1);
16164 if (!list_ret)
16165 return 0;
16166 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
16167 }
16168
16169 bytepos = bitpos / BITS_PER_UNIT;
16170 if (bytepos > 0)
16171 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
16172 else if (bytepos < 0)
16173 loc_list_plus_const (list_ret, bytepos);
16174
16175 have_address = 1;
16176 break;
16177 }
16178
16179 case INTEGER_CST:
16180 if ((want_address || !tree_fits_shwi_p (loc))
16181 && (ret = cst_pool_loc_descr (loc)))
16182 have_address = 1;
16183 else if (want_address == 2
16184 && tree_fits_shwi_p (loc)
16185 && (ret = address_of_int_loc_descriptor
16186 (int_size_in_bytes (TREE_TYPE (loc)),
16187 tree_to_shwi (loc))))
16188 have_address = 1;
16189 else if (tree_fits_shwi_p (loc))
16190 ret = int_loc_descriptor (tree_to_shwi (loc));
16191 else if (tree_fits_uhwi_p (loc))
16192 ret = uint_loc_descriptor (tree_to_uhwi (loc));
16193 else
16194 {
16195 expansion_failed (loc, NULL_RTX,
16196 "Integer operand is not host integer");
16197 return 0;
16198 }
16199 break;
16200
16201 case CONSTRUCTOR:
16202 case REAL_CST:
16203 case STRING_CST:
16204 case COMPLEX_CST:
16205 if ((ret = cst_pool_loc_descr (loc)))
16206 have_address = 1;
16207 else if (TREE_CODE (loc) == CONSTRUCTOR)
16208 {
16209 tree type = TREE_TYPE (loc);
16210 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
16211 unsigned HOST_WIDE_INT offset = 0;
16212 unsigned HOST_WIDE_INT cnt;
16213 constructor_elt *ce;
16214
16215 if (TREE_CODE (type) == RECORD_TYPE)
16216 {
16217 /* This is very limited, but it's enough to output
16218 pointers to member functions, as long as the
16219 referenced function is defined in the current
16220 translation unit. */
16221 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
16222 {
16223 tree val = ce->value;
16224
16225 tree field = ce->index;
16226
16227 if (val)
16228 STRIP_NOPS (val);
16229
16230 if (!field || DECL_BIT_FIELD (field))
16231 {
16232 expansion_failed (loc, NULL_RTX,
16233 "bitfield in record type constructor");
16234 size = offset = (unsigned HOST_WIDE_INT)-1;
16235 ret = NULL;
16236 break;
16237 }
16238
16239 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
16240 unsigned HOST_WIDE_INT pos = int_byte_position (field);
16241 gcc_assert (pos + fieldsize <= size);
16242 if (pos < offset)
16243 {
16244 expansion_failed (loc, NULL_RTX,
16245 "out-of-order fields in record constructor");
16246 size = offset = (unsigned HOST_WIDE_INT)-1;
16247 ret = NULL;
16248 break;
16249 }
16250 if (pos > offset)
16251 {
16252 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
16253 add_loc_descr (&ret, ret1);
16254 offset = pos;
16255 }
16256 if (val && fieldsize != 0)
16257 {
16258 ret1 = loc_descriptor_from_tree (val, want_address, context);
16259 if (!ret1)
16260 {
16261 expansion_failed (loc, NULL_RTX,
16262 "unsupported expression in field");
16263 size = offset = (unsigned HOST_WIDE_INT)-1;
16264 ret = NULL;
16265 break;
16266 }
16267 add_loc_descr (&ret, ret1);
16268 }
16269 if (fieldsize)
16270 {
16271 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
16272 add_loc_descr (&ret, ret1);
16273 offset = pos + fieldsize;
16274 }
16275 }
16276
16277 if (offset != size)
16278 {
16279 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
16280 add_loc_descr (&ret, ret1);
16281 offset = size;
16282 }
16283
16284 have_address = !!want_address;
16285 }
16286 else
16287 expansion_failed (loc, NULL_RTX,
16288 "constructor of non-record type");
16289 }
16290 else
16291 /* We can construct small constants here using int_loc_descriptor. */
16292 expansion_failed (loc, NULL_RTX,
16293 "constructor or constant not in constant pool");
16294 break;
16295
16296 case TRUTH_AND_EXPR:
16297 case TRUTH_ANDIF_EXPR:
16298 case BIT_AND_EXPR:
16299 op = DW_OP_and;
16300 goto do_binop;
16301
16302 case TRUTH_XOR_EXPR:
16303 case BIT_XOR_EXPR:
16304 op = DW_OP_xor;
16305 goto do_binop;
16306
16307 case TRUTH_OR_EXPR:
16308 case TRUTH_ORIF_EXPR:
16309 case BIT_IOR_EXPR:
16310 op = DW_OP_or;
16311 goto do_binop;
16312
16313 case FLOOR_DIV_EXPR:
16314 case CEIL_DIV_EXPR:
16315 case ROUND_DIV_EXPR:
16316 case TRUNC_DIV_EXPR:
16317 case EXACT_DIV_EXPR:
16318 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16319 return 0;
16320 op = DW_OP_div;
16321 goto do_binop;
16322
16323 case MINUS_EXPR:
16324 op = DW_OP_minus;
16325 goto do_binop;
16326
16327 case FLOOR_MOD_EXPR:
16328 case CEIL_MOD_EXPR:
16329 case ROUND_MOD_EXPR:
16330 case TRUNC_MOD_EXPR:
16331 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16332 {
16333 op = DW_OP_mod;
16334 goto do_binop;
16335 }
16336 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16337 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
16338 if (list_ret == 0 || list_ret1 == 0)
16339 return 0;
16340
16341 add_loc_list (&list_ret, list_ret1);
16342 if (list_ret == 0)
16343 return 0;
16344 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
16345 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
16346 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
16347 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
16348 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
16349 break;
16350
16351 case MULT_EXPR:
16352 op = DW_OP_mul;
16353 goto do_binop;
16354
16355 case LSHIFT_EXPR:
16356 op = DW_OP_shl;
16357 goto do_binop;
16358
16359 case RSHIFT_EXPR:
16360 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
16361 goto do_binop;
16362
16363 case POINTER_PLUS_EXPR:
16364 case PLUS_EXPR:
16365 do_plus:
16366 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
16367 {
16368 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
16369 smarter to encode their opposite. The DW_OP_plus_uconst operation
16370 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
16371 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
16372 bytes, Y being the size of the operation that pushes the opposite
16373 of the addend. So let's choose the smallest representation. */
16374 const tree tree_addend = TREE_OPERAND (loc, 1);
16375 offset_int wi_addend;
16376 HOST_WIDE_INT shwi_addend;
16377 dw_loc_descr_ref loc_naddend;
16378
16379 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16380 if (list_ret == 0)
16381 return 0;
16382
16383 /* Try to get the literal to push. It is the opposite of the addend,
16384 so as we rely on wrapping during DWARF evaluation, first decode
16385 the literal as a "DWARF-sized" signed number. */
16386 wi_addend = wi::to_offset (tree_addend);
16387 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
16388 shwi_addend = wi_addend.to_shwi ();
16389 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
16390 ? int_loc_descriptor (-shwi_addend)
16391 : NULL;
16392
16393 if (loc_naddend != NULL
16394 && ((unsigned) size_of_uleb128 (shwi_addend)
16395 > size_of_loc_descr (loc_naddend)))
16396 {
16397 add_loc_descr_to_each (list_ret, loc_naddend);
16398 add_loc_descr_to_each (list_ret,
16399 new_loc_descr (DW_OP_minus, 0, 0));
16400 }
16401 else
16402 {
16403 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
16404 {
16405 loc_naddend = loc_cur;
16406 loc_cur = loc_cur->dw_loc_next;
16407 ggc_free (loc_naddend);
16408 }
16409 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
16410 }
16411 break;
16412 }
16413
16414 op = DW_OP_plus;
16415 goto do_binop;
16416
16417 case LE_EXPR:
16418 op = DW_OP_le;
16419 goto do_comp_binop;
16420
16421 case GE_EXPR:
16422 op = DW_OP_ge;
16423 goto do_comp_binop;
16424
16425 case LT_EXPR:
16426 op = DW_OP_lt;
16427 goto do_comp_binop;
16428
16429 case GT_EXPR:
16430 op = DW_OP_gt;
16431 goto do_comp_binop;
16432
16433 do_comp_binop:
16434 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
16435 {
16436 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
16437 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
16438 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
16439 TREE_CODE (loc));
16440 break;
16441 }
16442 else
16443 goto do_binop;
16444
16445 case EQ_EXPR:
16446 op = DW_OP_eq;
16447 goto do_binop;
16448
16449 case NE_EXPR:
16450 op = DW_OP_ne;
16451 goto do_binop;
16452
16453 do_binop:
16454 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16455 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
16456 if (list_ret == 0 || list_ret1 == 0)
16457 return 0;
16458
16459 add_loc_list (&list_ret, list_ret1);
16460 if (list_ret == 0)
16461 return 0;
16462 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
16463 break;
16464
16465 case TRUTH_NOT_EXPR:
16466 case BIT_NOT_EXPR:
16467 op = DW_OP_not;
16468 goto do_unop;
16469
16470 case ABS_EXPR:
16471 op = DW_OP_abs;
16472 goto do_unop;
16473
16474 case NEGATE_EXPR:
16475 op = DW_OP_neg;
16476 goto do_unop;
16477
16478 do_unop:
16479 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16480 if (list_ret == 0)
16481 return 0;
16482
16483 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
16484 break;
16485
16486 case MIN_EXPR:
16487 case MAX_EXPR:
16488 {
16489 const enum tree_code code =
16490 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
16491
16492 loc = build3 (COND_EXPR, TREE_TYPE (loc),
16493 build2 (code, integer_type_node,
16494 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
16495 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
16496 }
16497
16498 /* fall through */
16499
16500 case COND_EXPR:
16501 {
16502 dw_loc_descr_ref lhs
16503 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
16504 dw_loc_list_ref rhs
16505 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
16506 dw_loc_descr_ref bra_node, jump_node, tmp;
16507
16508 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16509 if (list_ret == 0 || lhs == 0 || rhs == 0)
16510 return 0;
16511
16512 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16513 add_loc_descr_to_each (list_ret, bra_node);
16514
16515 add_loc_list (&list_ret, rhs);
16516 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
16517 add_loc_descr_to_each (list_ret, jump_node);
16518
16519 add_loc_descr_to_each (list_ret, lhs);
16520 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16521 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
16522
16523 /* ??? Need a node to point the skip at. Use a nop. */
16524 tmp = new_loc_descr (DW_OP_nop, 0, 0);
16525 add_loc_descr_to_each (list_ret, tmp);
16526 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16527 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
16528 }
16529 break;
16530
16531 case FIX_TRUNC_EXPR:
16532 return 0;
16533
16534 default:
16535 /* Leave front-end specific codes as simply unknown. This comes
16536 up, for instance, with the C STMT_EXPR. */
16537 if ((unsigned int) TREE_CODE (loc)
16538 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
16539 {
16540 expansion_failed (loc, NULL_RTX,
16541 "language specific tree node");
16542 return 0;
16543 }
16544
16545 /* Otherwise this is a generic code; we should just lists all of
16546 these explicitly. We forgot one. */
16547 if (flag_checking)
16548 gcc_unreachable ();
16549
16550 /* In a release build, we want to degrade gracefully: better to
16551 generate incomplete debugging information than to crash. */
16552 return NULL;
16553 }
16554
16555 if (!ret && !list_ret)
16556 return 0;
16557
16558 if (want_address == 2 && !have_address
16559 && (dwarf_version >= 4 || !dwarf_strict))
16560 {
16561 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16562 {
16563 expansion_failed (loc, NULL_RTX,
16564 "DWARF address size mismatch");
16565 return 0;
16566 }
16567 if (ret)
16568 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
16569 else
16570 add_loc_descr_to_each (list_ret,
16571 new_loc_descr (DW_OP_stack_value, 0, 0));
16572 have_address = 1;
16573 }
16574 /* Show if we can't fill the request for an address. */
16575 if (want_address && !have_address)
16576 {
16577 expansion_failed (loc, NULL_RTX,
16578 "Want address and only have value");
16579 return 0;
16580 }
16581
16582 gcc_assert (!ret || !list_ret);
16583
16584 /* If we've got an address and don't want one, dereference. */
16585 if (!want_address && have_address)
16586 {
16587 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16588
16589 if (size > DWARF2_ADDR_SIZE || size == -1)
16590 {
16591 expansion_failed (loc, NULL_RTX,
16592 "DWARF address size mismatch");
16593 return 0;
16594 }
16595 else if (size == DWARF2_ADDR_SIZE)
16596 op = DW_OP_deref;
16597 else
16598 op = DW_OP_deref_size;
16599
16600 if (ret)
16601 add_loc_descr (&ret, new_loc_descr (op, size, 0));
16602 else
16603 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
16604 }
16605 if (ret)
16606 list_ret = new_loc_list (ret, NULL, NULL, NULL);
16607
16608 return list_ret;
16609 }
16610
16611 /* Likewise, but strip useless DW_OP_nop operations in the resulting
16612 expressions. */
16613
16614 static dw_loc_list_ref
16615 loc_list_from_tree (tree loc, int want_address,
16616 const struct loc_descr_context *context)
16617 {
16618 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
16619
16620 for (dw_loc_list_ref loc_cur = result;
16621 loc_cur != NULL; loc_cur =
16622 loc_cur->dw_loc_next)
16623 loc_descr_without_nops (loc_cur->expr);
16624 return result;
16625 }
16626
16627 /* Same as above but return only single location expression. */
16628 static dw_loc_descr_ref
16629 loc_descriptor_from_tree (tree loc, int want_address,
16630 const struct loc_descr_context *context)
16631 {
16632 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
16633 if (!ret)
16634 return NULL;
16635 if (ret->dw_loc_next)
16636 {
16637 expansion_failed (loc, NULL_RTX,
16638 "Location list where only loc descriptor needed");
16639 return NULL;
16640 }
16641 return ret->expr;
16642 }
16643
16644 /* Given a value, round it up to the lowest multiple of `boundary'
16645 which is not less than the value itself. */
16646
16647 static inline HOST_WIDE_INT
16648 ceiling (HOST_WIDE_INT value, unsigned int boundary)
16649 {
16650 return (((value + boundary - 1) / boundary) * boundary);
16651 }
16652
16653 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
16654 pointer to the declared type for the relevant field variable, or return
16655 `integer_type_node' if the given node turns out to be an
16656 ERROR_MARK node. */
16657
16658 static inline tree
16659 field_type (const_tree decl)
16660 {
16661 tree type;
16662
16663 if (TREE_CODE (decl) == ERROR_MARK)
16664 return integer_type_node;
16665
16666 type = DECL_BIT_FIELD_TYPE (decl);
16667 if (type == NULL_TREE)
16668 type = TREE_TYPE (decl);
16669
16670 return type;
16671 }
16672
16673 /* Given a pointer to a tree node, return the alignment in bits for
16674 it, or else return BITS_PER_WORD if the node actually turns out to
16675 be an ERROR_MARK node. */
16676
16677 static inline unsigned
16678 simple_type_align_in_bits (const_tree type)
16679 {
16680 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
16681 }
16682
16683 static inline unsigned
16684 simple_decl_align_in_bits (const_tree decl)
16685 {
16686 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
16687 }
16688
16689 /* Return the result of rounding T up to ALIGN. */
16690
16691 static inline offset_int
16692 round_up_to_align (const offset_int &t, unsigned int align)
16693 {
16694 return wi::udiv_trunc (t + align - 1, align) * align;
16695 }
16696
16697 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
16698 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
16699 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
16700 if we fail to return the size in one of these two forms. */
16701
16702 static dw_loc_descr_ref
16703 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
16704 {
16705 tree tree_size;
16706 struct loc_descr_context ctx;
16707
16708 /* Return a constant integer in priority, if possible. */
16709 *cst_size = int_size_in_bytes (type);
16710 if (*cst_size != -1)
16711 return NULL;
16712
16713 ctx.context_type = const_cast<tree> (type);
16714 ctx.base_decl = NULL_TREE;
16715 ctx.dpi = NULL;
16716
16717 type = TYPE_MAIN_VARIANT (type);
16718 tree_size = TYPE_SIZE_UNIT (type);
16719 return ((tree_size != NULL_TREE)
16720 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
16721 : NULL);
16722 }
16723
16724 /* Helper structure for RECORD_TYPE processing. */
16725 struct vlr_context
16726 {
16727 /* Root RECORD_TYPE. It is needed to generate data member location
16728 descriptions in variable-length records (VLR), but also to cope with
16729 variants, which are composed of nested structures multiplexed with
16730 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
16731 function processing a FIELD_DECL, it is required to be non null. */
16732 tree struct_type;
16733 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
16734 QUAL_UNION_TYPE), this holds an expression that computes the offset for
16735 this variant part as part of the root record (in storage units). For
16736 regular records, it must be NULL_TREE. */
16737 tree variant_part_offset;
16738 };
16739
16740 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
16741 addressed byte of the "containing object" for the given FIELD_DECL. If
16742 possible, return a native constant through CST_OFFSET (in which case NULL is
16743 returned); otherwise return a DWARF expression that computes the offset.
16744
16745 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
16746 that offset is, either because the argument turns out to be a pointer to an
16747 ERROR_MARK node, or because the offset expression is too complex for us.
16748
16749 CTX is required: see the comment for VLR_CONTEXT. */
16750
16751 static dw_loc_descr_ref
16752 field_byte_offset (const_tree decl, struct vlr_context *ctx,
16753 HOST_WIDE_INT *cst_offset)
16754 {
16755 offset_int object_offset_in_bits;
16756 offset_int object_offset_in_bytes;
16757 offset_int bitpos_int;
16758 bool is_byte_offset_cst, is_bit_offset_cst;
16759 tree tree_result;
16760 dw_loc_list_ref loc_result;
16761
16762 *cst_offset = 0;
16763
16764 if (TREE_CODE (decl) == ERROR_MARK)
16765 return NULL;
16766 else
16767 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
16768
16769 is_bit_offset_cst = TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST;
16770 is_byte_offset_cst = TREE_CODE (DECL_FIELD_OFFSET (decl)) != INTEGER_CST;
16771
16772 /* We cannot handle variable bit offsets at the moment, so abort if it's the
16773 case. */
16774 if (is_bit_offset_cst)
16775 return NULL;
16776
16777 #ifdef PCC_BITFIELD_TYPE_MATTERS
16778 /* We used to handle only constant offsets in all cases. Now, we handle
16779 properly dynamic byte offsets only when PCC bitfield type doesn't
16780 matter. */
16781 if (PCC_BITFIELD_TYPE_MATTERS && is_byte_offset_cst && is_bit_offset_cst)
16782 {
16783 tree type;
16784 tree field_size_tree;
16785 offset_int deepest_bitpos;
16786 offset_int field_size_in_bits;
16787 unsigned int type_align_in_bits;
16788 unsigned int decl_align_in_bits;
16789 offset_int type_size_in_bits;
16790
16791 bitpos_int = wi::to_offset (bit_position (decl));
16792 type = field_type (decl);
16793 type_size_in_bits = offset_int_type_size_in_bits (type);
16794 type_align_in_bits = simple_type_align_in_bits (type);
16795
16796 field_size_tree = DECL_SIZE (decl);
16797
16798 /* The size could be unspecified if there was an error, or for
16799 a flexible array member. */
16800 if (!field_size_tree)
16801 field_size_tree = bitsize_zero_node;
16802
16803 /* If the size of the field is not constant, use the type size. */
16804 if (TREE_CODE (field_size_tree) == INTEGER_CST)
16805 field_size_in_bits = wi::to_offset (field_size_tree);
16806 else
16807 field_size_in_bits = type_size_in_bits;
16808
16809 decl_align_in_bits = simple_decl_align_in_bits (decl);
16810
16811 /* The GCC front-end doesn't make any attempt to keep track of the
16812 starting bit offset (relative to the start of the containing
16813 structure type) of the hypothetical "containing object" for a
16814 bit-field. Thus, when computing the byte offset value for the
16815 start of the "containing object" of a bit-field, we must deduce
16816 this information on our own. This can be rather tricky to do in
16817 some cases. For example, handling the following structure type
16818 definition when compiling for an i386/i486 target (which only
16819 aligns long long's to 32-bit boundaries) can be very tricky:
16820
16821 struct S { int field1; long long field2:31; };
16822
16823 Fortunately, there is a simple rule-of-thumb which can be used
16824 in such cases. When compiling for an i386/i486, GCC will
16825 allocate 8 bytes for the structure shown above. It decides to
16826 do this based upon one simple rule for bit-field allocation.
16827 GCC allocates each "containing object" for each bit-field at
16828 the first (i.e. lowest addressed) legitimate alignment boundary
16829 (based upon the required minimum alignment for the declared
16830 type of the field) which it can possibly use, subject to the
16831 condition that there is still enough available space remaining
16832 in the containing object (when allocated at the selected point)
16833 to fully accommodate all of the bits of the bit-field itself.
16834
16835 This simple rule makes it obvious why GCC allocates 8 bytes for
16836 each object of the structure type shown above. When looking
16837 for a place to allocate the "containing object" for `field2',
16838 the compiler simply tries to allocate a 64-bit "containing
16839 object" at each successive 32-bit boundary (starting at zero)
16840 until it finds a place to allocate that 64- bit field such that
16841 at least 31 contiguous (and previously unallocated) bits remain
16842 within that selected 64 bit field. (As it turns out, for the
16843 example above, the compiler finds it is OK to allocate the
16844 "containing object" 64-bit field at bit-offset zero within the
16845 structure type.)
16846
16847 Here we attempt to work backwards from the limited set of facts
16848 we're given, and we try to deduce from those facts, where GCC
16849 must have believed that the containing object started (within
16850 the structure type). The value we deduce is then used (by the
16851 callers of this routine) to generate DW_AT_location and
16852 DW_AT_bit_offset attributes for fields (both bit-fields and, in
16853 the case of DW_AT_location, regular fields as well). */
16854
16855 /* Figure out the bit-distance from the start of the structure to
16856 the "deepest" bit of the bit-field. */
16857 deepest_bitpos = bitpos_int + field_size_in_bits;
16858
16859 /* This is the tricky part. Use some fancy footwork to deduce
16860 where the lowest addressed bit of the containing object must
16861 be. */
16862 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
16863
16864 /* Round up to type_align by default. This works best for
16865 bitfields. */
16866 object_offset_in_bits
16867 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
16868
16869 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
16870 {
16871 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
16872
16873 /* Round up to decl_align instead. */
16874 object_offset_in_bits
16875 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
16876 }
16877 }
16878 #endif /* PCC_BITFIELD_TYPE_MATTERS */
16879
16880 tree_result = byte_position (decl);
16881 if (ctx->variant_part_offset != NULL_TREE)
16882 tree_result = fold (build2 (PLUS_EXPR, TREE_TYPE (tree_result),
16883 ctx->variant_part_offset, tree_result));
16884
16885 /* If the byte offset is a constant, it's simplier to handle a native
16886 constant rather than a DWARF expression. */
16887 if (TREE_CODE (tree_result) == INTEGER_CST)
16888 {
16889 *cst_offset = wi::to_offset (tree_result).to_shwi ();
16890 return NULL;
16891 }
16892 struct loc_descr_context loc_ctx = {
16893 ctx->struct_type, /* context_type */
16894 NULL_TREE, /* base_decl */
16895 NULL /* dpi */
16896 };
16897 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
16898
16899 /* We want a DWARF expression: abort if we only have a location list with
16900 multiple elements. */
16901 if (!loc_result || !single_element_loc_list_p (loc_result))
16902 return NULL;
16903 else
16904 return loc_result->expr;
16905 }
16906 \f
16907 /* The following routines define various Dwarf attributes and any data
16908 associated with them. */
16909
16910 /* Add a location description attribute value to a DIE.
16911
16912 This emits location attributes suitable for whole variables and
16913 whole parameters. Note that the location attributes for struct fields are
16914 generated by the routine `data_member_location_attribute' below. */
16915
16916 static inline void
16917 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
16918 dw_loc_list_ref descr)
16919 {
16920 if (descr == 0)
16921 return;
16922 if (single_element_loc_list_p (descr))
16923 add_AT_loc (die, attr_kind, descr->expr);
16924 else
16925 add_AT_loc_list (die, attr_kind, descr);
16926 }
16927
16928 /* Add DW_AT_accessibility attribute to DIE if needed. */
16929
16930 static void
16931 add_accessibility_attribute (dw_die_ref die, tree decl)
16932 {
16933 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
16934 children, otherwise the default is DW_ACCESS_public. In DWARF2
16935 the default has always been DW_ACCESS_public. */
16936 if (TREE_PROTECTED (decl))
16937 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
16938 else if (TREE_PRIVATE (decl))
16939 {
16940 if (dwarf_version == 2
16941 || die->die_parent == NULL
16942 || die->die_parent->die_tag != DW_TAG_class_type)
16943 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
16944 }
16945 else if (dwarf_version > 2
16946 && die->die_parent
16947 && die->die_parent->die_tag == DW_TAG_class_type)
16948 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
16949 }
16950
16951 /* Attach the specialized form of location attribute used for data members of
16952 struct and union types. In the special case of a FIELD_DECL node which
16953 represents a bit-field, the "offset" part of this special location
16954 descriptor must indicate the distance in bytes from the lowest-addressed
16955 byte of the containing struct or union type to the lowest-addressed byte of
16956 the "containing object" for the bit-field. (See the `field_byte_offset'
16957 function above).
16958
16959 For any given bit-field, the "containing object" is a hypothetical object
16960 (of some integral or enum type) within which the given bit-field lives. The
16961 type of this hypothetical "containing object" is always the same as the
16962 declared type of the individual bit-field itself (for GCC anyway... the
16963 DWARF spec doesn't actually mandate this). Note that it is the size (in
16964 bytes) of the hypothetical "containing object" which will be given in the
16965 DW_AT_byte_size attribute for this bit-field. (See the
16966 `byte_size_attribute' function below.) It is also used when calculating the
16967 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
16968 function below.)
16969
16970 CTX is required: see the comment for VLR_CONTEXT. */
16971
16972 static void
16973 add_data_member_location_attribute (dw_die_ref die,
16974 tree decl,
16975 struct vlr_context *ctx)
16976 {
16977 HOST_WIDE_INT offset;
16978 dw_loc_descr_ref loc_descr = 0;
16979
16980 if (TREE_CODE (decl) == TREE_BINFO)
16981 {
16982 /* We're working on the TAG_inheritance for a base class. */
16983 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
16984 {
16985 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
16986 aren't at a fixed offset from all (sub)objects of the same
16987 type. We need to extract the appropriate offset from our
16988 vtable. The following dwarf expression means
16989
16990 BaseAddr = ObAddr + *((*ObAddr) - Offset)
16991
16992 This is specific to the V3 ABI, of course. */
16993
16994 dw_loc_descr_ref tmp;
16995
16996 /* Make a copy of the object address. */
16997 tmp = new_loc_descr (DW_OP_dup, 0, 0);
16998 add_loc_descr (&loc_descr, tmp);
16999
17000 /* Extract the vtable address. */
17001 tmp = new_loc_descr (DW_OP_deref, 0, 0);
17002 add_loc_descr (&loc_descr, tmp);
17003
17004 /* Calculate the address of the offset. */
17005 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
17006 gcc_assert (offset < 0);
17007
17008 tmp = int_loc_descriptor (-offset);
17009 add_loc_descr (&loc_descr, tmp);
17010 tmp = new_loc_descr (DW_OP_minus, 0, 0);
17011 add_loc_descr (&loc_descr, tmp);
17012
17013 /* Extract the offset. */
17014 tmp = new_loc_descr (DW_OP_deref, 0, 0);
17015 add_loc_descr (&loc_descr, tmp);
17016
17017 /* Add it to the object address. */
17018 tmp = new_loc_descr (DW_OP_plus, 0, 0);
17019 add_loc_descr (&loc_descr, tmp);
17020 }
17021 else
17022 offset = tree_to_shwi (BINFO_OFFSET (decl));
17023 }
17024 else
17025 {
17026 loc_descr = field_byte_offset (decl, ctx, &offset);
17027
17028 /* If loc_descr is available then we know the field offset is dynamic.
17029 However, GDB does not handle dynamic field offsets very well at the
17030 moment. */
17031 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
17032 {
17033 loc_descr = NULL;
17034 offset = 0;
17035 }
17036
17037 /* Data member location evalutation starts with the base address on the
17038 stack. Compute the field offset and add it to this base address. */
17039 else if (loc_descr != NULL)
17040 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
17041 }
17042
17043 if (! loc_descr)
17044 {
17045 if (dwarf_version > 2)
17046 {
17047 /* Don't need to output a location expression, just the constant. */
17048 if (offset < 0)
17049 add_AT_int (die, DW_AT_data_member_location, offset);
17050 else
17051 add_AT_unsigned (die, DW_AT_data_member_location, offset);
17052 return;
17053 }
17054 else
17055 {
17056 enum dwarf_location_atom op;
17057
17058 /* The DWARF2 standard says that we should assume that the structure
17059 address is already on the stack, so we can specify a structure
17060 field address by using DW_OP_plus_uconst. */
17061 op = DW_OP_plus_uconst;
17062 loc_descr = new_loc_descr (op, offset, 0);
17063 }
17064 }
17065
17066 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
17067 }
17068
17069 /* Writes integer values to dw_vec_const array. */
17070
17071 static void
17072 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
17073 {
17074 while (size != 0)
17075 {
17076 *dest++ = val & 0xff;
17077 val >>= 8;
17078 --size;
17079 }
17080 }
17081
17082 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
17083
17084 static HOST_WIDE_INT
17085 extract_int (const unsigned char *src, unsigned int size)
17086 {
17087 HOST_WIDE_INT val = 0;
17088
17089 src += size;
17090 while (size != 0)
17091 {
17092 val <<= 8;
17093 val |= *--src & 0xff;
17094 --size;
17095 }
17096 return val;
17097 }
17098
17099 /* Writes wide_int values to dw_vec_const array. */
17100
17101 static void
17102 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
17103 {
17104 int i;
17105
17106 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
17107 {
17108 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
17109 return;
17110 }
17111
17112 /* We'd have to extend this code to support odd sizes. */
17113 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
17114
17115 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
17116
17117 if (WORDS_BIG_ENDIAN)
17118 for (i = n - 1; i >= 0; i--)
17119 {
17120 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
17121 dest += sizeof (HOST_WIDE_INT);
17122 }
17123 else
17124 for (i = 0; i < n; i++)
17125 {
17126 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
17127 dest += sizeof (HOST_WIDE_INT);
17128 }
17129 }
17130
17131 /* Writes floating point values to dw_vec_const array. */
17132
17133 static void
17134 insert_float (const_rtx rtl, unsigned char *array)
17135 {
17136 long val[4];
17137 int i;
17138
17139 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), GET_MODE (rtl));
17140
17141 /* real_to_target puts 32-bit pieces in each long. Pack them. */
17142 for (i = 0; i < GET_MODE_SIZE (GET_MODE (rtl)) / 4; i++)
17143 {
17144 insert_int (val[i], 4, array);
17145 array += 4;
17146 }
17147 }
17148
17149 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
17150 does not have a "location" either in memory or in a register. These
17151 things can arise in GNU C when a constant is passed as an actual parameter
17152 to an inlined function. They can also arise in C++ where declared
17153 constants do not necessarily get memory "homes". */
17154
17155 static bool
17156 add_const_value_attribute (dw_die_ref die, rtx rtl)
17157 {
17158 switch (GET_CODE (rtl))
17159 {
17160 case CONST_INT:
17161 {
17162 HOST_WIDE_INT val = INTVAL (rtl);
17163
17164 if (val < 0)
17165 add_AT_int (die, DW_AT_const_value, val);
17166 else
17167 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
17168 }
17169 return true;
17170
17171 case CONST_WIDE_INT:
17172 {
17173 wide_int w1 = std::make_pair (rtl, MAX_MODE_INT);
17174 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
17175 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
17176 wide_int w = wi::zext (w1, prec);
17177 add_AT_wide (die, DW_AT_const_value, w);
17178 }
17179 return true;
17180
17181 case CONST_DOUBLE:
17182 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
17183 floating-point constant. A CONST_DOUBLE is used whenever the
17184 constant requires more than one word in order to be adequately
17185 represented. */
17186 {
17187 machine_mode mode = GET_MODE (rtl);
17188
17189 if (TARGET_SUPPORTS_WIDE_INT == 0 && !SCALAR_FLOAT_MODE_P (mode))
17190 add_AT_double (die, DW_AT_const_value,
17191 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
17192 else
17193 {
17194 unsigned int length = GET_MODE_SIZE (mode);
17195 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
17196
17197 insert_float (rtl, array);
17198 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
17199 }
17200 }
17201 return true;
17202
17203 case CONST_VECTOR:
17204 {
17205 machine_mode mode = GET_MODE (rtl);
17206 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
17207 unsigned int length = CONST_VECTOR_NUNITS (rtl);
17208 unsigned char *array
17209 = ggc_vec_alloc<unsigned char> (length * elt_size);
17210 unsigned int i;
17211 unsigned char *p;
17212 machine_mode imode = GET_MODE_INNER (mode);
17213
17214 switch (GET_MODE_CLASS (mode))
17215 {
17216 case MODE_VECTOR_INT:
17217 for (i = 0, p = array; i < length; i++, p += elt_size)
17218 {
17219 rtx elt = CONST_VECTOR_ELT (rtl, i);
17220 insert_wide_int (std::make_pair (elt, imode), p, elt_size);
17221 }
17222 break;
17223
17224 case MODE_VECTOR_FLOAT:
17225 for (i = 0, p = array; i < length; i++, p += elt_size)
17226 {
17227 rtx elt = CONST_VECTOR_ELT (rtl, i);
17228 insert_float (elt, p);
17229 }
17230 break;
17231
17232 default:
17233 gcc_unreachable ();
17234 }
17235
17236 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
17237 }
17238 return true;
17239
17240 case CONST_STRING:
17241 if (dwarf_version >= 4 || !dwarf_strict)
17242 {
17243 dw_loc_descr_ref loc_result;
17244 resolve_one_addr (&rtl);
17245 rtl_addr:
17246 loc_result = new_addr_loc_descr (rtl, dtprel_false);
17247 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
17248 add_AT_loc (die, DW_AT_location, loc_result);
17249 vec_safe_push (used_rtx_array, rtl);
17250 return true;
17251 }
17252 return false;
17253
17254 case CONST:
17255 if (CONSTANT_P (XEXP (rtl, 0)))
17256 return add_const_value_attribute (die, XEXP (rtl, 0));
17257 /* FALLTHROUGH */
17258 case SYMBOL_REF:
17259 if (!const_ok_for_output (rtl))
17260 return false;
17261 /* FALLTHROUGH */
17262 case LABEL_REF:
17263 if (dwarf_version >= 4 || !dwarf_strict)
17264 goto rtl_addr;
17265 return false;
17266
17267 case PLUS:
17268 /* In cases where an inlined instance of an inline function is passed
17269 the address of an `auto' variable (which is local to the caller) we
17270 can get a situation where the DECL_RTL of the artificial local
17271 variable (for the inlining) which acts as a stand-in for the
17272 corresponding formal parameter (of the inline function) will look
17273 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
17274 exactly a compile-time constant expression, but it isn't the address
17275 of the (artificial) local variable either. Rather, it represents the
17276 *value* which the artificial local variable always has during its
17277 lifetime. We currently have no way to represent such quasi-constant
17278 values in Dwarf, so for now we just punt and generate nothing. */
17279 return false;
17280
17281 case HIGH:
17282 case CONST_FIXED:
17283 return false;
17284
17285 case MEM:
17286 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
17287 && MEM_READONLY_P (rtl)
17288 && GET_MODE (rtl) == BLKmode)
17289 {
17290 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
17291 return true;
17292 }
17293 return false;
17294
17295 default:
17296 /* No other kinds of rtx should be possible here. */
17297 gcc_unreachable ();
17298 }
17299 return false;
17300 }
17301
17302 /* Determine whether the evaluation of EXPR references any variables
17303 or functions which aren't otherwise used (and therefore may not be
17304 output). */
17305 static tree
17306 reference_to_unused (tree * tp, int * walk_subtrees,
17307 void * data ATTRIBUTE_UNUSED)
17308 {
17309 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
17310 *walk_subtrees = 0;
17311
17312 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
17313 && ! TREE_ASM_WRITTEN (*tp))
17314 return *tp;
17315 /* ??? The C++ FE emits debug information for using decls, so
17316 putting gcc_unreachable here falls over. See PR31899. For now
17317 be conservative. */
17318 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
17319 return *tp;
17320 else if (VAR_P (*tp))
17321 {
17322 varpool_node *node = varpool_node::get (*tp);
17323 if (!node || !node->definition)
17324 return *tp;
17325 }
17326 else if (TREE_CODE (*tp) == FUNCTION_DECL
17327 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
17328 {
17329 /* The call graph machinery must have finished analyzing,
17330 optimizing and gimplifying the CU by now.
17331 So if *TP has no call graph node associated
17332 to it, it means *TP will not be emitted. */
17333 if (!cgraph_node::get (*tp))
17334 return *tp;
17335 }
17336 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
17337 return *tp;
17338
17339 return NULL_TREE;
17340 }
17341
17342 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
17343 for use in a later add_const_value_attribute call. */
17344
17345 static rtx
17346 rtl_for_decl_init (tree init, tree type)
17347 {
17348 rtx rtl = NULL_RTX;
17349
17350 STRIP_NOPS (init);
17351
17352 /* If a variable is initialized with a string constant without embedded
17353 zeros, build CONST_STRING. */
17354 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
17355 {
17356 tree enttype = TREE_TYPE (type);
17357 tree domain = TYPE_DOMAIN (type);
17358 machine_mode mode = TYPE_MODE (enttype);
17359
17360 if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1
17361 && domain
17362 && integer_zerop (TYPE_MIN_VALUE (domain))
17363 && compare_tree_int (TYPE_MAX_VALUE (domain),
17364 TREE_STRING_LENGTH (init) - 1) == 0
17365 && ((size_t) TREE_STRING_LENGTH (init)
17366 == strlen (TREE_STRING_POINTER (init)) + 1))
17367 {
17368 rtl = gen_rtx_CONST_STRING (VOIDmode,
17369 ggc_strdup (TREE_STRING_POINTER (init)));
17370 rtl = gen_rtx_MEM (BLKmode, rtl);
17371 MEM_READONLY_P (rtl) = 1;
17372 }
17373 }
17374 /* Other aggregates, and complex values, could be represented using
17375 CONCAT: FIXME! */
17376 else if (AGGREGATE_TYPE_P (type)
17377 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
17378 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
17379 || TREE_CODE (type) == COMPLEX_TYPE)
17380 ;
17381 /* Vectors only work if their mode is supported by the target.
17382 FIXME: generic vectors ought to work too. */
17383 else if (TREE_CODE (type) == VECTOR_TYPE
17384 && !VECTOR_MODE_P (TYPE_MODE (type)))
17385 ;
17386 /* If the initializer is something that we know will expand into an
17387 immediate RTL constant, expand it now. We must be careful not to
17388 reference variables which won't be output. */
17389 else if (initializer_constant_valid_p (init, type)
17390 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
17391 {
17392 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
17393 possible. */
17394 if (TREE_CODE (type) == VECTOR_TYPE)
17395 switch (TREE_CODE (init))
17396 {
17397 case VECTOR_CST:
17398 break;
17399 case CONSTRUCTOR:
17400 if (TREE_CONSTANT (init))
17401 {
17402 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
17403 bool constant_p = true;
17404 tree value;
17405 unsigned HOST_WIDE_INT ix;
17406
17407 /* Even when ctor is constant, it might contain non-*_CST
17408 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
17409 belong into VECTOR_CST nodes. */
17410 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
17411 if (!CONSTANT_CLASS_P (value))
17412 {
17413 constant_p = false;
17414 break;
17415 }
17416
17417 if (constant_p)
17418 {
17419 init = build_vector_from_ctor (type, elts);
17420 break;
17421 }
17422 }
17423 /* FALLTHRU */
17424
17425 default:
17426 return NULL;
17427 }
17428
17429 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
17430
17431 /* If expand_expr returns a MEM, it wasn't immediate. */
17432 gcc_assert (!rtl || !MEM_P (rtl));
17433 }
17434
17435 return rtl;
17436 }
17437
17438 /* Generate RTL for the variable DECL to represent its location. */
17439
17440 static rtx
17441 rtl_for_decl_location (tree decl)
17442 {
17443 rtx rtl;
17444
17445 /* Here we have to decide where we are going to say the parameter "lives"
17446 (as far as the debugger is concerned). We only have a couple of
17447 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
17448
17449 DECL_RTL normally indicates where the parameter lives during most of the
17450 activation of the function. If optimization is enabled however, this
17451 could be either NULL or else a pseudo-reg. Both of those cases indicate
17452 that the parameter doesn't really live anywhere (as far as the code
17453 generation parts of GCC are concerned) during most of the function's
17454 activation. That will happen (for example) if the parameter is never
17455 referenced within the function.
17456
17457 We could just generate a location descriptor here for all non-NULL
17458 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
17459 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
17460 where DECL_RTL is NULL or is a pseudo-reg.
17461
17462 Note however that we can only get away with using DECL_INCOMING_RTL as
17463 a backup substitute for DECL_RTL in certain limited cases. In cases
17464 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
17465 we can be sure that the parameter was passed using the same type as it is
17466 declared to have within the function, and that its DECL_INCOMING_RTL
17467 points us to a place where a value of that type is passed.
17468
17469 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
17470 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
17471 because in these cases DECL_INCOMING_RTL points us to a value of some
17472 type which is *different* from the type of the parameter itself. Thus,
17473 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
17474 such cases, the debugger would end up (for example) trying to fetch a
17475 `float' from a place which actually contains the first part of a
17476 `double'. That would lead to really incorrect and confusing
17477 output at debug-time.
17478
17479 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
17480 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
17481 are a couple of exceptions however. On little-endian machines we can
17482 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
17483 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
17484 an integral type that is smaller than TREE_TYPE (decl). These cases arise
17485 when (on a little-endian machine) a non-prototyped function has a
17486 parameter declared to be of type `short' or `char'. In such cases,
17487 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
17488 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
17489 passed `int' value. If the debugger then uses that address to fetch
17490 a `short' or a `char' (on a little-endian machine) the result will be
17491 the correct data, so we allow for such exceptional cases below.
17492
17493 Note that our goal here is to describe the place where the given formal
17494 parameter lives during most of the function's activation (i.e. between the
17495 end of the prologue and the start of the epilogue). We'll do that as best
17496 as we can. Note however that if the given formal parameter is modified
17497 sometime during the execution of the function, then a stack backtrace (at
17498 debug-time) will show the function as having been called with the *new*
17499 value rather than the value which was originally passed in. This happens
17500 rarely enough that it is not a major problem, but it *is* a problem, and
17501 I'd like to fix it.
17502
17503 A future version of dwarf2out.c may generate two additional attributes for
17504 any given DW_TAG_formal_parameter DIE which will describe the "passed
17505 type" and the "passed location" for the given formal parameter in addition
17506 to the attributes we now generate to indicate the "declared type" and the
17507 "active location" for each parameter. This additional set of attributes
17508 could be used by debuggers for stack backtraces. Separately, note that
17509 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
17510 This happens (for example) for inlined-instances of inline function formal
17511 parameters which are never referenced. This really shouldn't be
17512 happening. All PARM_DECL nodes should get valid non-NULL
17513 DECL_INCOMING_RTL values. FIXME. */
17514
17515 /* Use DECL_RTL as the "location" unless we find something better. */
17516 rtl = DECL_RTL_IF_SET (decl);
17517
17518 /* When generating abstract instances, ignore everything except
17519 constants, symbols living in memory, and symbols living in
17520 fixed registers. */
17521 if (! reload_completed)
17522 {
17523 if (rtl
17524 && (CONSTANT_P (rtl)
17525 || (MEM_P (rtl)
17526 && CONSTANT_P (XEXP (rtl, 0)))
17527 || (REG_P (rtl)
17528 && VAR_P (decl)
17529 && TREE_STATIC (decl))))
17530 {
17531 rtl = targetm.delegitimize_address (rtl);
17532 return rtl;
17533 }
17534 rtl = NULL_RTX;
17535 }
17536 else if (TREE_CODE (decl) == PARM_DECL)
17537 {
17538 if (rtl == NULL_RTX
17539 || is_pseudo_reg (rtl)
17540 || (MEM_P (rtl)
17541 && is_pseudo_reg (XEXP (rtl, 0))
17542 && DECL_INCOMING_RTL (decl)
17543 && MEM_P (DECL_INCOMING_RTL (decl))
17544 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
17545 {
17546 tree declared_type = TREE_TYPE (decl);
17547 tree passed_type = DECL_ARG_TYPE (decl);
17548 machine_mode dmode = TYPE_MODE (declared_type);
17549 machine_mode pmode = TYPE_MODE (passed_type);
17550
17551 /* This decl represents a formal parameter which was optimized out.
17552 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
17553 all cases where (rtl == NULL_RTX) just below. */
17554 if (dmode == pmode)
17555 rtl = DECL_INCOMING_RTL (decl);
17556 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
17557 && SCALAR_INT_MODE_P (dmode)
17558 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
17559 && DECL_INCOMING_RTL (decl))
17560 {
17561 rtx inc = DECL_INCOMING_RTL (decl);
17562 if (REG_P (inc))
17563 rtl = inc;
17564 else if (MEM_P (inc))
17565 {
17566 if (BYTES_BIG_ENDIAN)
17567 rtl = adjust_address_nv (inc, dmode,
17568 GET_MODE_SIZE (pmode)
17569 - GET_MODE_SIZE (dmode));
17570 else
17571 rtl = inc;
17572 }
17573 }
17574 }
17575
17576 /* If the parm was passed in registers, but lives on the stack, then
17577 make a big endian correction if the mode of the type of the
17578 parameter is not the same as the mode of the rtl. */
17579 /* ??? This is the same series of checks that are made in dbxout.c before
17580 we reach the big endian correction code there. It isn't clear if all
17581 of these checks are necessary here, but keeping them all is the safe
17582 thing to do. */
17583 else if (MEM_P (rtl)
17584 && XEXP (rtl, 0) != const0_rtx
17585 && ! CONSTANT_P (XEXP (rtl, 0))
17586 /* Not passed in memory. */
17587 && !MEM_P (DECL_INCOMING_RTL (decl))
17588 /* Not passed by invisible reference. */
17589 && (!REG_P (XEXP (rtl, 0))
17590 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
17591 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
17592 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
17593 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
17594 #endif
17595 )
17596 /* Big endian correction check. */
17597 && BYTES_BIG_ENDIAN
17598 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
17599 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
17600 < UNITS_PER_WORD))
17601 {
17602 machine_mode addr_mode = get_address_mode (rtl);
17603 int offset = (UNITS_PER_WORD
17604 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
17605
17606 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
17607 plus_constant (addr_mode, XEXP (rtl, 0), offset));
17608 }
17609 }
17610 else if (VAR_P (decl)
17611 && rtl
17612 && MEM_P (rtl)
17613 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))
17614 && BYTES_BIG_ENDIAN)
17615 {
17616 machine_mode addr_mode = get_address_mode (rtl);
17617 int rsize = GET_MODE_SIZE (GET_MODE (rtl));
17618 int dsize = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)));
17619
17620 /* If a variable is declared "register" yet is smaller than
17621 a register, then if we store the variable to memory, it
17622 looks like we're storing a register-sized value, when in
17623 fact we are not. We need to adjust the offset of the
17624 storage location to reflect the actual value's bytes,
17625 else gdb will not be able to display it. */
17626 if (rsize > dsize)
17627 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
17628 plus_constant (addr_mode, XEXP (rtl, 0),
17629 rsize - dsize));
17630 }
17631
17632 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
17633 and will have been substituted directly into all expressions that use it.
17634 C does not have such a concept, but C++ and other languages do. */
17635 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
17636 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
17637
17638 if (rtl)
17639 rtl = targetm.delegitimize_address (rtl);
17640
17641 /* If we don't look past the constant pool, we risk emitting a
17642 reference to a constant pool entry that isn't referenced from
17643 code, and thus is not emitted. */
17644 if (rtl)
17645 rtl = avoid_constant_pool_reference (rtl);
17646
17647 /* Try harder to get a rtl. If this symbol ends up not being emitted
17648 in the current CU, resolve_addr will remove the expression referencing
17649 it. */
17650 if (rtl == NULL_RTX
17651 && VAR_P (decl)
17652 && !DECL_EXTERNAL (decl)
17653 && TREE_STATIC (decl)
17654 && DECL_NAME (decl)
17655 && !DECL_HARD_REGISTER (decl)
17656 && DECL_MODE (decl) != VOIDmode)
17657 {
17658 rtl = make_decl_rtl_for_debug (decl);
17659 if (!MEM_P (rtl)
17660 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
17661 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
17662 rtl = NULL_RTX;
17663 }
17664
17665 return rtl;
17666 }
17667
17668 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
17669 returned. If so, the decl for the COMMON block is returned, and the
17670 value is the offset into the common block for the symbol. */
17671
17672 static tree
17673 fortran_common (tree decl, HOST_WIDE_INT *value)
17674 {
17675 tree val_expr, cvar;
17676 machine_mode mode;
17677 HOST_WIDE_INT bitsize, bitpos;
17678 tree offset;
17679 int unsignedp, reversep, volatilep = 0;
17680
17681 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
17682 it does not have a value (the offset into the common area), or if it
17683 is thread local (as opposed to global) then it isn't common, and shouldn't
17684 be handled as such. */
17685 if (!VAR_P (decl)
17686 || !TREE_STATIC (decl)
17687 || !DECL_HAS_VALUE_EXPR_P (decl)
17688 || !is_fortran ())
17689 return NULL_TREE;
17690
17691 val_expr = DECL_VALUE_EXPR (decl);
17692 if (TREE_CODE (val_expr) != COMPONENT_REF)
17693 return NULL_TREE;
17694
17695 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
17696 &unsignedp, &reversep, &volatilep);
17697
17698 if (cvar == NULL_TREE
17699 || !VAR_P (cvar)
17700 || DECL_ARTIFICIAL (cvar)
17701 || !TREE_PUBLIC (cvar))
17702 return NULL_TREE;
17703
17704 *value = 0;
17705 if (offset != NULL)
17706 {
17707 if (!tree_fits_shwi_p (offset))
17708 return NULL_TREE;
17709 *value = tree_to_shwi (offset);
17710 }
17711 if (bitpos != 0)
17712 *value += bitpos / BITS_PER_UNIT;
17713
17714 return cvar;
17715 }
17716
17717 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
17718 data attribute for a variable or a parameter. We generate the
17719 DW_AT_const_value attribute only in those cases where the given variable
17720 or parameter does not have a true "location" either in memory or in a
17721 register. This can happen (for example) when a constant is passed as an
17722 actual argument in a call to an inline function. (It's possible that
17723 these things can crop up in other ways also.) Note that one type of
17724 constant value which can be passed into an inlined function is a constant
17725 pointer. This can happen for example if an actual argument in an inlined
17726 function call evaluates to a compile-time constant address.
17727
17728 CACHE_P is true if it is worth caching the location list for DECL,
17729 so that future calls can reuse it rather than regenerate it from scratch.
17730 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
17731 since we will need to refer to them each time the function is inlined. */
17732
17733 static bool
17734 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
17735 {
17736 rtx rtl;
17737 dw_loc_list_ref list;
17738 var_loc_list *loc_list;
17739 cached_dw_loc_list *cache;
17740
17741 if (early_dwarf)
17742 return false;
17743
17744 if (TREE_CODE (decl) == ERROR_MARK)
17745 return false;
17746
17747 if (get_AT (die, DW_AT_location)
17748 || get_AT (die, DW_AT_const_value))
17749 return true;
17750
17751 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
17752 || TREE_CODE (decl) == RESULT_DECL);
17753
17754 /* Try to get some constant RTL for this decl, and use that as the value of
17755 the location. */
17756
17757 rtl = rtl_for_decl_location (decl);
17758 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
17759 && add_const_value_attribute (die, rtl))
17760 return true;
17761
17762 /* See if we have single element location list that is equivalent to
17763 a constant value. That way we are better to use add_const_value_attribute
17764 rather than expanding constant value equivalent. */
17765 loc_list = lookup_decl_loc (decl);
17766 if (loc_list
17767 && loc_list->first
17768 && loc_list->first->next == NULL
17769 && NOTE_P (loc_list->first->loc)
17770 && NOTE_VAR_LOCATION (loc_list->first->loc)
17771 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
17772 {
17773 struct var_loc_node *node;
17774
17775 node = loc_list->first;
17776 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
17777 if (GET_CODE (rtl) == EXPR_LIST)
17778 rtl = XEXP (rtl, 0);
17779 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
17780 && add_const_value_attribute (die, rtl))
17781 return true;
17782 }
17783 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
17784 list several times. See if we've already cached the contents. */
17785 list = NULL;
17786 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
17787 cache_p = false;
17788 if (cache_p)
17789 {
17790 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
17791 if (cache)
17792 list = cache->loc_list;
17793 }
17794 if (list == NULL)
17795 {
17796 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
17797 NULL);
17798 /* It is usually worth caching this result if the decl is from
17799 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
17800 if (cache_p && list && list->dw_loc_next)
17801 {
17802 cached_dw_loc_list **slot
17803 = cached_dw_loc_list_table->find_slot_with_hash (decl,
17804 DECL_UID (decl),
17805 INSERT);
17806 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
17807 cache->decl_id = DECL_UID (decl);
17808 cache->loc_list = list;
17809 *slot = cache;
17810 }
17811 }
17812 if (list)
17813 {
17814 add_AT_location_description (die, DW_AT_location, list);
17815 return true;
17816 }
17817 /* None of that worked, so it must not really have a location;
17818 try adding a constant value attribute from the DECL_INITIAL. */
17819 return tree_add_const_value_attribute_for_decl (die, decl);
17820 }
17821
17822 /* Helper function for tree_add_const_value_attribute. Natively encode
17823 initializer INIT into an array. Return true if successful. */
17824
17825 static bool
17826 native_encode_initializer (tree init, unsigned char *array, int size)
17827 {
17828 tree type;
17829
17830 if (init == NULL_TREE)
17831 return false;
17832
17833 STRIP_NOPS (init);
17834 switch (TREE_CODE (init))
17835 {
17836 case STRING_CST:
17837 type = TREE_TYPE (init);
17838 if (TREE_CODE (type) == ARRAY_TYPE)
17839 {
17840 tree enttype = TREE_TYPE (type);
17841 machine_mode mode = TYPE_MODE (enttype);
17842
17843 if (GET_MODE_CLASS (mode) != MODE_INT || GET_MODE_SIZE (mode) != 1)
17844 return false;
17845 if (int_size_in_bytes (type) != size)
17846 return false;
17847 if (size > TREE_STRING_LENGTH (init))
17848 {
17849 memcpy (array, TREE_STRING_POINTER (init),
17850 TREE_STRING_LENGTH (init));
17851 memset (array + TREE_STRING_LENGTH (init),
17852 '\0', size - TREE_STRING_LENGTH (init));
17853 }
17854 else
17855 memcpy (array, TREE_STRING_POINTER (init), size);
17856 return true;
17857 }
17858 return false;
17859 case CONSTRUCTOR:
17860 type = TREE_TYPE (init);
17861 if (int_size_in_bytes (type) != size)
17862 return false;
17863 if (TREE_CODE (type) == ARRAY_TYPE)
17864 {
17865 HOST_WIDE_INT min_index;
17866 unsigned HOST_WIDE_INT cnt;
17867 int curpos = 0, fieldsize;
17868 constructor_elt *ce;
17869
17870 if (TYPE_DOMAIN (type) == NULL_TREE
17871 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
17872 return false;
17873
17874 fieldsize = int_size_in_bytes (TREE_TYPE (type));
17875 if (fieldsize <= 0)
17876 return false;
17877
17878 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
17879 memset (array, '\0', size);
17880 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
17881 {
17882 tree val = ce->value;
17883 tree index = ce->index;
17884 int pos = curpos;
17885 if (index && TREE_CODE (index) == RANGE_EXPR)
17886 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
17887 * fieldsize;
17888 else if (index)
17889 pos = (tree_to_shwi (index) - min_index) * fieldsize;
17890
17891 if (val)
17892 {
17893 STRIP_NOPS (val);
17894 if (!native_encode_initializer (val, array + pos, fieldsize))
17895 return false;
17896 }
17897 curpos = pos + fieldsize;
17898 if (index && TREE_CODE (index) == RANGE_EXPR)
17899 {
17900 int count = tree_to_shwi (TREE_OPERAND (index, 1))
17901 - tree_to_shwi (TREE_OPERAND (index, 0));
17902 while (count-- > 0)
17903 {
17904 if (val)
17905 memcpy (array + curpos, array + pos, fieldsize);
17906 curpos += fieldsize;
17907 }
17908 }
17909 gcc_assert (curpos <= size);
17910 }
17911 return true;
17912 }
17913 else if (TREE_CODE (type) == RECORD_TYPE
17914 || TREE_CODE (type) == UNION_TYPE)
17915 {
17916 tree field = NULL_TREE;
17917 unsigned HOST_WIDE_INT cnt;
17918 constructor_elt *ce;
17919
17920 if (int_size_in_bytes (type) != size)
17921 return false;
17922
17923 if (TREE_CODE (type) == RECORD_TYPE)
17924 field = TYPE_FIELDS (type);
17925
17926 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
17927 {
17928 tree val = ce->value;
17929 int pos, fieldsize;
17930
17931 if (ce->index != 0)
17932 field = ce->index;
17933
17934 if (val)
17935 STRIP_NOPS (val);
17936
17937 if (field == NULL_TREE || DECL_BIT_FIELD (field))
17938 return false;
17939
17940 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
17941 && TYPE_DOMAIN (TREE_TYPE (field))
17942 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
17943 return false;
17944 else if (DECL_SIZE_UNIT (field) == NULL_TREE
17945 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
17946 return false;
17947 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17948 pos = int_byte_position (field);
17949 gcc_assert (pos + fieldsize <= size);
17950 if (val && fieldsize != 0
17951 && !native_encode_initializer (val, array + pos, fieldsize))
17952 return false;
17953 }
17954 return true;
17955 }
17956 return false;
17957 case VIEW_CONVERT_EXPR:
17958 case NON_LVALUE_EXPR:
17959 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
17960 default:
17961 return native_encode_expr (init, array, size) == size;
17962 }
17963 }
17964
17965 /* Attach a DW_AT_const_value attribute to DIE. The value of the
17966 attribute is the const value T. */
17967
17968 static bool
17969 tree_add_const_value_attribute (dw_die_ref die, tree t)
17970 {
17971 tree init;
17972 tree type = TREE_TYPE (t);
17973 rtx rtl;
17974
17975 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
17976 return false;
17977
17978 init = t;
17979 gcc_assert (!DECL_P (init));
17980
17981 if (! early_dwarf)
17982 {
17983 rtl = rtl_for_decl_init (init, type);
17984 if (rtl)
17985 return add_const_value_attribute (die, rtl);
17986 }
17987 /* If the host and target are sane, try harder. */
17988 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
17989 && initializer_constant_valid_p (init, type))
17990 {
17991 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
17992 if (size > 0 && (int) size == size)
17993 {
17994 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
17995
17996 if (native_encode_initializer (init, array, size))
17997 {
17998 add_AT_vec (die, DW_AT_const_value, size, 1, array);
17999 return true;
18000 }
18001 ggc_free (array);
18002 }
18003 }
18004 return false;
18005 }
18006
18007 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
18008 attribute is the const value of T, where T is an integral constant
18009 variable with static storage duration
18010 (so it can't be a PARM_DECL or a RESULT_DECL). */
18011
18012 static bool
18013 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
18014 {
18015
18016 if (!decl
18017 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
18018 || (VAR_P (decl) && !TREE_STATIC (decl)))
18019 return false;
18020
18021 if (TREE_READONLY (decl)
18022 && ! TREE_THIS_VOLATILE (decl)
18023 && DECL_INITIAL (decl))
18024 /* OK */;
18025 else
18026 return false;
18027
18028 /* Don't add DW_AT_const_value if abstract origin already has one. */
18029 if (get_AT (var_die, DW_AT_const_value))
18030 return false;
18031
18032 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
18033 }
18034
18035 /* Convert the CFI instructions for the current function into a
18036 location list. This is used for DW_AT_frame_base when we targeting
18037 a dwarf2 consumer that does not support the dwarf3
18038 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
18039 expressions. */
18040
18041 static dw_loc_list_ref
18042 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
18043 {
18044 int ix;
18045 dw_fde_ref fde;
18046 dw_loc_list_ref list, *list_tail;
18047 dw_cfi_ref cfi;
18048 dw_cfa_location last_cfa, next_cfa;
18049 const char *start_label, *last_label, *section;
18050 dw_cfa_location remember;
18051
18052 fde = cfun->fde;
18053 gcc_assert (fde != NULL);
18054
18055 section = secname_for_decl (current_function_decl);
18056 list_tail = &list;
18057 list = NULL;
18058
18059 memset (&next_cfa, 0, sizeof (next_cfa));
18060 next_cfa.reg = INVALID_REGNUM;
18061 remember = next_cfa;
18062
18063 start_label = fde->dw_fde_begin;
18064
18065 /* ??? Bald assumption that the CIE opcode list does not contain
18066 advance opcodes. */
18067 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
18068 lookup_cfa_1 (cfi, &next_cfa, &remember);
18069
18070 last_cfa = next_cfa;
18071 last_label = start_label;
18072
18073 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
18074 {
18075 /* If the first partition contained no CFI adjustments, the
18076 CIE opcodes apply to the whole first partition. */
18077 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18078 fde->dw_fde_begin, fde->dw_fde_end, section);
18079 list_tail =&(*list_tail)->dw_loc_next;
18080 start_label = last_label = fde->dw_fde_second_begin;
18081 }
18082
18083 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
18084 {
18085 switch (cfi->dw_cfi_opc)
18086 {
18087 case DW_CFA_set_loc:
18088 case DW_CFA_advance_loc1:
18089 case DW_CFA_advance_loc2:
18090 case DW_CFA_advance_loc4:
18091 if (!cfa_equal_p (&last_cfa, &next_cfa))
18092 {
18093 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18094 start_label, last_label, section);
18095
18096 list_tail = &(*list_tail)->dw_loc_next;
18097 last_cfa = next_cfa;
18098 start_label = last_label;
18099 }
18100 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
18101 break;
18102
18103 case DW_CFA_advance_loc:
18104 /* The encoding is complex enough that we should never emit this. */
18105 gcc_unreachable ();
18106
18107 default:
18108 lookup_cfa_1 (cfi, &next_cfa, &remember);
18109 break;
18110 }
18111 if (ix + 1 == fde->dw_fde_switch_cfi_index)
18112 {
18113 if (!cfa_equal_p (&last_cfa, &next_cfa))
18114 {
18115 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18116 start_label, last_label, section);
18117
18118 list_tail = &(*list_tail)->dw_loc_next;
18119 last_cfa = next_cfa;
18120 start_label = last_label;
18121 }
18122 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18123 start_label, fde->dw_fde_end, section);
18124 list_tail = &(*list_tail)->dw_loc_next;
18125 start_label = last_label = fde->dw_fde_second_begin;
18126 }
18127 }
18128
18129 if (!cfa_equal_p (&last_cfa, &next_cfa))
18130 {
18131 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18132 start_label, last_label, section);
18133 list_tail = &(*list_tail)->dw_loc_next;
18134 start_label = last_label;
18135 }
18136
18137 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
18138 start_label,
18139 fde->dw_fde_second_begin
18140 ? fde->dw_fde_second_end : fde->dw_fde_end,
18141 section);
18142
18143 if (list && list->dw_loc_next)
18144 gen_llsym (list);
18145
18146 return list;
18147 }
18148
18149 /* Compute a displacement from the "steady-state frame pointer" to the
18150 frame base (often the same as the CFA), and store it in
18151 frame_pointer_fb_offset. OFFSET is added to the displacement
18152 before the latter is negated. */
18153
18154 static void
18155 compute_frame_pointer_to_fb_displacement (HOST_WIDE_INT offset)
18156 {
18157 rtx reg, elim;
18158
18159 #ifdef FRAME_POINTER_CFA_OFFSET
18160 reg = frame_pointer_rtx;
18161 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
18162 #else
18163 reg = arg_pointer_rtx;
18164 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
18165 #endif
18166
18167 elim = (ira_use_lra_p
18168 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
18169 : eliminate_regs (reg, VOIDmode, NULL_RTX));
18170 if (GET_CODE (elim) == PLUS)
18171 {
18172 offset += INTVAL (XEXP (elim, 1));
18173 elim = XEXP (elim, 0);
18174 }
18175
18176 frame_pointer_fb_offset = -offset;
18177
18178 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
18179 in which to eliminate. This is because it's stack pointer isn't
18180 directly accessible as a register within the ISA. To work around
18181 this, assume that while we cannot provide a proper value for
18182 frame_pointer_fb_offset, we won't need one either. */
18183 frame_pointer_fb_offset_valid
18184 = ((SUPPORTS_STACK_ALIGNMENT
18185 && (elim == hard_frame_pointer_rtx
18186 || elim == stack_pointer_rtx))
18187 || elim == (frame_pointer_needed
18188 ? hard_frame_pointer_rtx
18189 : stack_pointer_rtx));
18190 }
18191
18192 /* Generate a DW_AT_name attribute given some string value to be included as
18193 the value of the attribute. */
18194
18195 static void
18196 add_name_attribute (dw_die_ref die, const char *name_string)
18197 {
18198 if (name_string != NULL && *name_string != 0)
18199 {
18200 if (demangle_name_func)
18201 name_string = (*demangle_name_func) (name_string);
18202
18203 add_AT_string (die, DW_AT_name, name_string);
18204 }
18205 }
18206
18207 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
18208 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
18209 of TYPE accordingly.
18210
18211 ??? This is a temporary measure until after we're able to generate
18212 regular DWARF for the complex Ada type system. */
18213
18214 static void
18215 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
18216 dw_die_ref context_die)
18217 {
18218 tree dtype;
18219 dw_die_ref dtype_die;
18220
18221 if (!lang_hooks.types.descriptive_type)
18222 return;
18223
18224 dtype = lang_hooks.types.descriptive_type (type);
18225 if (!dtype)
18226 return;
18227
18228 dtype_die = lookup_type_die (dtype);
18229 if (!dtype_die)
18230 {
18231 gen_type_die (dtype, context_die);
18232 dtype_die = lookup_type_die (dtype);
18233 gcc_assert (dtype_die);
18234 }
18235
18236 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
18237 }
18238
18239 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
18240
18241 static const char *
18242 comp_dir_string (void)
18243 {
18244 const char *wd;
18245 char *wd1;
18246 static const char *cached_wd = NULL;
18247
18248 if (cached_wd != NULL)
18249 return cached_wd;
18250
18251 wd = get_src_pwd ();
18252 if (wd == NULL)
18253 return NULL;
18254
18255 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
18256 {
18257 int wdlen;
18258
18259 wdlen = strlen (wd);
18260 wd1 = ggc_vec_alloc<char> (wdlen + 2);
18261 strcpy (wd1, wd);
18262 wd1 [wdlen] = DIR_SEPARATOR;
18263 wd1 [wdlen + 1] = 0;
18264 wd = wd1;
18265 }
18266
18267 cached_wd = remap_debug_filename (wd);
18268 return cached_wd;
18269 }
18270
18271 /* Generate a DW_AT_comp_dir attribute for DIE. */
18272
18273 static void
18274 add_comp_dir_attribute (dw_die_ref die)
18275 {
18276 const char * wd = comp_dir_string ();
18277 if (wd != NULL)
18278 add_AT_string (die, DW_AT_comp_dir, wd);
18279 }
18280
18281 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
18282 pointer computation, ...), output a representation for that bound according
18283 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
18284 loc_list_from_tree for the meaning of CONTEXT. */
18285
18286 static void
18287 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
18288 int forms, const struct loc_descr_context *context)
18289 {
18290 dw_die_ref context_die, decl_die;
18291 dw_loc_list_ref list;
18292
18293 bool strip_conversions = true;
18294
18295 while (strip_conversions)
18296 switch (TREE_CODE (value))
18297 {
18298 case ERROR_MARK:
18299 case SAVE_EXPR:
18300 return;
18301
18302 CASE_CONVERT:
18303 case VIEW_CONVERT_EXPR:
18304 value = TREE_OPERAND (value, 0);
18305 break;
18306
18307 default:
18308 strip_conversions = false;
18309 break;
18310 }
18311
18312 /* If possible and permitted, output the attribute as a constant. */
18313 if ((forms & dw_scalar_form_constant) != 0
18314 && TREE_CODE (value) == INTEGER_CST)
18315 {
18316 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
18317
18318 /* If HOST_WIDE_INT is big enough then represent the bound as
18319 a constant value. We need to choose a form based on
18320 whether the type is signed or unsigned. We cannot just
18321 call add_AT_unsigned if the value itself is positive
18322 (add_AT_unsigned might add the unsigned value encoded as
18323 DW_FORM_data[1248]). Some DWARF consumers will lookup the
18324 bounds type and then sign extend any unsigned values found
18325 for signed types. This is needed only for
18326 DW_AT_{lower,upper}_bound, since for most other attributes,
18327 consumers will treat DW_FORM_data[1248] as unsigned values,
18328 regardless of the underlying type. */
18329 if (prec <= HOST_BITS_PER_WIDE_INT
18330 || tree_fits_uhwi_p (value))
18331 {
18332 if (TYPE_UNSIGNED (TREE_TYPE (value)))
18333 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
18334 else
18335 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
18336 }
18337 else
18338 /* Otherwise represent the bound as an unsigned value with
18339 the precision of its type. The precision and signedness
18340 of the type will be necessary to re-interpret it
18341 unambiguously. */
18342 add_AT_wide (die, attr, value);
18343 return;
18344 }
18345
18346 /* Otherwise, if it's possible and permitted too, output a reference to
18347 another DIE. */
18348 if ((forms & dw_scalar_form_reference) != 0)
18349 {
18350 tree decl = NULL_TREE;
18351
18352 /* Some type attributes reference an outer type. For instance, the upper
18353 bound of an array may reference an embedding record (this happens in
18354 Ada). */
18355 if (TREE_CODE (value) == COMPONENT_REF
18356 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
18357 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
18358 decl = TREE_OPERAND (value, 1);
18359
18360 else if (VAR_P (value)
18361 || TREE_CODE (value) == PARM_DECL
18362 || TREE_CODE (value) == RESULT_DECL)
18363 decl = value;
18364
18365 if (decl != NULL_TREE)
18366 {
18367 dw_die_ref decl_die = lookup_decl_die (decl);
18368
18369 /* ??? Can this happen, or should the variable have been bound
18370 first? Probably it can, since I imagine that we try to create
18371 the types of parameters in the order in which they exist in
18372 the list, and won't have created a forward reference to a
18373 later parameter. */
18374 if (decl_die != NULL)
18375 {
18376 add_AT_die_ref (die, attr, decl_die);
18377 return;
18378 }
18379 }
18380 }
18381
18382 /* Last chance: try to create a stack operation procedure to evaluate the
18383 value. Do nothing if even that is not possible or permitted. */
18384 if ((forms & dw_scalar_form_exprloc) == 0)
18385 return;
18386
18387 list = loc_list_from_tree (value, 2, context);
18388 if (list == NULL || single_element_loc_list_p (list))
18389 {
18390 /* If this attribute is not a reference nor constant, it is
18391 a DWARF expression rather than location description. For that
18392 loc_list_from_tree (value, 0, &context) is needed. */
18393 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
18394 if (list2 && single_element_loc_list_p (list2))
18395 {
18396 add_AT_loc (die, attr, list2->expr);
18397 return;
18398 }
18399 }
18400
18401 /* If that failed to give a single element location list, fall back to
18402 outputting this as a reference... still if permitted. */
18403 if (list == NULL || (forms & dw_scalar_form_reference) == 0)
18404 return;
18405
18406 if (current_function_decl == 0)
18407 context_die = comp_unit_die ();
18408 else
18409 context_die = lookup_decl_die (current_function_decl);
18410
18411 decl_die = new_die (DW_TAG_variable, context_die, value);
18412 add_AT_flag (decl_die, DW_AT_artificial, 1);
18413 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
18414 context_die);
18415 add_AT_location_description (decl_die, DW_AT_location, list);
18416 add_AT_die_ref (die, attr, decl_die);
18417 }
18418
18419 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
18420 default. */
18421
18422 static int
18423 lower_bound_default (void)
18424 {
18425 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
18426 {
18427 case DW_LANG_C:
18428 case DW_LANG_C89:
18429 case DW_LANG_C99:
18430 case DW_LANG_C11:
18431 case DW_LANG_C_plus_plus:
18432 case DW_LANG_C_plus_plus_11:
18433 case DW_LANG_C_plus_plus_14:
18434 case DW_LANG_ObjC:
18435 case DW_LANG_ObjC_plus_plus:
18436 case DW_LANG_Java:
18437 return 0;
18438 case DW_LANG_Fortran77:
18439 case DW_LANG_Fortran90:
18440 case DW_LANG_Fortran95:
18441 case DW_LANG_Fortran03:
18442 case DW_LANG_Fortran08:
18443 return 1;
18444 case DW_LANG_UPC:
18445 case DW_LANG_D:
18446 case DW_LANG_Python:
18447 return dwarf_version >= 4 ? 0 : -1;
18448 case DW_LANG_Ada95:
18449 case DW_LANG_Ada83:
18450 case DW_LANG_Cobol74:
18451 case DW_LANG_Cobol85:
18452 case DW_LANG_Pascal83:
18453 case DW_LANG_Modula2:
18454 case DW_LANG_PLI:
18455 return dwarf_version >= 4 ? 1 : -1;
18456 default:
18457 return -1;
18458 }
18459 }
18460
18461 /* Given a tree node describing an array bound (either lower or upper) output
18462 a representation for that bound. */
18463
18464 static void
18465 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
18466 tree bound, const struct loc_descr_context *context)
18467 {
18468 int dflt;
18469
18470 while (1)
18471 switch (TREE_CODE (bound))
18472 {
18473 /* Strip all conversions. */
18474 CASE_CONVERT:
18475 case VIEW_CONVERT_EXPR:
18476 bound = TREE_OPERAND (bound, 0);
18477 break;
18478
18479 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
18480 are even omitted when they are the default. */
18481 case INTEGER_CST:
18482 /* If the value for this bound is the default one, we can even omit the
18483 attribute. */
18484 if (bound_attr == DW_AT_lower_bound
18485 && tree_fits_shwi_p (bound)
18486 && (dflt = lower_bound_default ()) != -1
18487 && tree_to_shwi (bound) == dflt)
18488 return;
18489
18490 /* FALLTHRU */
18491
18492 default:
18493 /* Because of the complex interaction there can be with other GNAT
18494 encodings, GDB isn't ready yet to handle proper DWARF description
18495 for self-referencial subrange bounds: let GNAT encodings do the
18496 magic in such a case. */
18497 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
18498 && contains_placeholder_p (bound))
18499 return;
18500
18501 add_scalar_info (subrange_die, bound_attr, bound,
18502 dw_scalar_form_constant
18503 | dw_scalar_form_exprloc
18504 | dw_scalar_form_reference,
18505 context);
18506 return;
18507 }
18508 }
18509
18510 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
18511 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
18512 Note that the block of subscript information for an array type also
18513 includes information about the element type of the given array type.
18514
18515 This function reuses previously set type and bound information if
18516 available. */
18517
18518 static void
18519 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
18520 {
18521 unsigned dimension_number;
18522 tree lower, upper;
18523 dw_die_ref child = type_die->die_child;
18524
18525 for (dimension_number = 0;
18526 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
18527 type = TREE_TYPE (type), dimension_number++)
18528 {
18529 tree domain = TYPE_DOMAIN (type);
18530
18531 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
18532 break;
18533
18534 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
18535 and (in GNU C only) variable bounds. Handle all three forms
18536 here. */
18537
18538 /* Find and reuse a previously generated DW_TAG_subrange_type if
18539 available.
18540
18541 For multi-dimensional arrays, as we iterate through the
18542 various dimensions in the enclosing for loop above, we also
18543 iterate through the DIE children and pick at each
18544 DW_TAG_subrange_type previously generated (if available).
18545 Each child DW_TAG_subrange_type DIE describes the range of
18546 the current dimension. At this point we should have as many
18547 DW_TAG_subrange_type's as we have dimensions in the
18548 array. */
18549 dw_die_ref subrange_die = NULL;
18550 if (child)
18551 while (1)
18552 {
18553 child = child->die_sib;
18554 if (child->die_tag == DW_TAG_subrange_type)
18555 subrange_die = child;
18556 if (child == type_die->die_child)
18557 {
18558 /* If we wrapped around, stop looking next time. */
18559 child = NULL;
18560 break;
18561 }
18562 if (child->die_tag == DW_TAG_subrange_type)
18563 break;
18564 }
18565 if (!subrange_die)
18566 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
18567
18568 if (domain)
18569 {
18570 /* We have an array type with specified bounds. */
18571 lower = TYPE_MIN_VALUE (domain);
18572 upper = TYPE_MAX_VALUE (domain);
18573
18574 /* Define the index type. */
18575 if (TREE_TYPE (domain)
18576 && !get_AT (subrange_die, DW_AT_type))
18577 {
18578 /* ??? This is probably an Ada unnamed subrange type. Ignore the
18579 TREE_TYPE field. We can't emit debug info for this
18580 because it is an unnamed integral type. */
18581 if (TREE_CODE (domain) == INTEGER_TYPE
18582 && TYPE_NAME (domain) == NULL_TREE
18583 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
18584 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
18585 ;
18586 else
18587 add_type_attribute (subrange_die, TREE_TYPE (domain),
18588 TYPE_UNQUALIFIED, false, type_die);
18589 }
18590
18591 /* ??? If upper is NULL, the array has unspecified length,
18592 but it does have a lower bound. This happens with Fortran
18593 dimension arr(N:*)
18594 Since the debugger is definitely going to need to know N
18595 to produce useful results, go ahead and output the lower
18596 bound solo, and hope the debugger can cope. */
18597
18598 if (!get_AT (subrange_die, DW_AT_lower_bound))
18599 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
18600 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
18601 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
18602 }
18603
18604 /* Otherwise we have an array type with an unspecified length. The
18605 DWARF-2 spec does not say how to handle this; let's just leave out the
18606 bounds. */
18607 }
18608 }
18609
18610 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
18611
18612 static void
18613 add_byte_size_attribute (dw_die_ref die, tree tree_node)
18614 {
18615 dw_die_ref decl_die;
18616 HOST_WIDE_INT size;
18617 dw_loc_descr_ref size_expr = NULL;
18618
18619 switch (TREE_CODE (tree_node))
18620 {
18621 case ERROR_MARK:
18622 size = 0;
18623 break;
18624 case ENUMERAL_TYPE:
18625 case RECORD_TYPE:
18626 case UNION_TYPE:
18627 case QUAL_UNION_TYPE:
18628 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
18629 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
18630 {
18631 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
18632 return;
18633 }
18634 size_expr = type_byte_size (tree_node, &size);
18635 break;
18636 case FIELD_DECL:
18637 /* For a data member of a struct or union, the DW_AT_byte_size is
18638 generally given as the number of bytes normally allocated for an
18639 object of the *declared* type of the member itself. This is true
18640 even for bit-fields. */
18641 size = int_size_in_bytes (field_type (tree_node));
18642 break;
18643 default:
18644 gcc_unreachable ();
18645 }
18646
18647 /* Support for dynamically-sized objects was introduced by DWARFv3.
18648 At the moment, GDB does not handle variable byte sizes very well,
18649 though. */
18650 if ((dwarf_version >= 3 || !dwarf_strict)
18651 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
18652 && size_expr != NULL)
18653 add_AT_loc (die, DW_AT_byte_size, size_expr);
18654
18655 /* Note that `size' might be -1 when we get to this point. If it is, that
18656 indicates that the byte size of the entity in question is variable and
18657 that we could not generate a DWARF expression that computes it. */
18658 if (size >= 0)
18659 add_AT_unsigned (die, DW_AT_byte_size, size);
18660 }
18661
18662 /* For a FIELD_DECL node which represents a bit-field, output an attribute
18663 which specifies the distance in bits from the highest order bit of the
18664 "containing object" for the bit-field to the highest order bit of the
18665 bit-field itself.
18666
18667 For any given bit-field, the "containing object" is a hypothetical object
18668 (of some integral or enum type) within which the given bit-field lives. The
18669 type of this hypothetical "containing object" is always the same as the
18670 declared type of the individual bit-field itself. The determination of the
18671 exact location of the "containing object" for a bit-field is rather
18672 complicated. It's handled by the `field_byte_offset' function (above).
18673
18674 CTX is required: see the comment for VLR_CONTEXT.
18675
18676 Note that it is the size (in bytes) of the hypothetical "containing object"
18677 which will be given in the DW_AT_byte_size attribute for this bit-field.
18678 (See `byte_size_attribute' above). */
18679
18680 static inline void
18681 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
18682 {
18683 HOST_WIDE_INT object_offset_in_bytes;
18684 tree original_type = DECL_BIT_FIELD_TYPE (decl);
18685 HOST_WIDE_INT bitpos_int;
18686 HOST_WIDE_INT highest_order_object_bit_offset;
18687 HOST_WIDE_INT highest_order_field_bit_offset;
18688 HOST_WIDE_INT bit_offset;
18689
18690 field_byte_offset (decl, ctx, &object_offset_in_bytes);
18691
18692 /* Must be a field and a bit field. */
18693 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
18694
18695 /* We can't yet handle bit-fields whose offsets are variable, so if we
18696 encounter such things, just return without generating any attribute
18697 whatsoever. Likewise for variable or too large size. */
18698 if (! tree_fits_shwi_p (bit_position (decl))
18699 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
18700 return;
18701
18702 bitpos_int = int_bit_position (decl);
18703
18704 /* Note that the bit offset is always the distance (in bits) from the
18705 highest-order bit of the "containing object" to the highest-order bit of
18706 the bit-field itself. Since the "high-order end" of any object or field
18707 is different on big-endian and little-endian machines, the computation
18708 below must take account of these differences. */
18709 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
18710 highest_order_field_bit_offset = bitpos_int;
18711
18712 if (! BYTES_BIG_ENDIAN)
18713 {
18714 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
18715 highest_order_object_bit_offset +=
18716 simple_type_size_in_bits (original_type);
18717 }
18718
18719 bit_offset
18720 = (! BYTES_BIG_ENDIAN
18721 ? highest_order_object_bit_offset - highest_order_field_bit_offset
18722 : highest_order_field_bit_offset - highest_order_object_bit_offset);
18723
18724 if (bit_offset < 0)
18725 add_AT_int (die, DW_AT_bit_offset, bit_offset);
18726 else
18727 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
18728 }
18729
18730 /* For a FIELD_DECL node which represents a bit field, output an attribute
18731 which specifies the length in bits of the given field. */
18732
18733 static inline void
18734 add_bit_size_attribute (dw_die_ref die, tree decl)
18735 {
18736 /* Must be a field and a bit field. */
18737 gcc_assert (TREE_CODE (decl) == FIELD_DECL
18738 && DECL_BIT_FIELD_TYPE (decl));
18739
18740 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
18741 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
18742 }
18743
18744 /* If the compiled language is ANSI C, then add a 'prototyped'
18745 attribute, if arg types are given for the parameters of a function. */
18746
18747 static inline void
18748 add_prototyped_attribute (dw_die_ref die, tree func_type)
18749 {
18750 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
18751 {
18752 case DW_LANG_C:
18753 case DW_LANG_C89:
18754 case DW_LANG_C99:
18755 case DW_LANG_C11:
18756 case DW_LANG_ObjC:
18757 if (prototype_p (func_type))
18758 add_AT_flag (die, DW_AT_prototyped, 1);
18759 break;
18760 default:
18761 break;
18762 }
18763 }
18764
18765 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
18766 by looking in the type declaration, the object declaration equate table or
18767 the block mapping. */
18768
18769 static inline dw_die_ref
18770 add_abstract_origin_attribute (dw_die_ref die, tree origin)
18771 {
18772 dw_die_ref origin_die = NULL;
18773
18774 if (TREE_CODE (origin) != FUNCTION_DECL
18775 && TREE_CODE (origin) != BLOCK)
18776 {
18777 /* We may have gotten separated from the block for the inlined
18778 function, if we're in an exception handler or some such; make
18779 sure that the abstract function has been written out.
18780
18781 Doing this for nested functions is wrong, however; functions are
18782 distinct units, and our context might not even be inline. */
18783 tree fn = origin;
18784
18785 if (TYPE_P (fn))
18786 fn = TYPE_STUB_DECL (fn);
18787
18788 fn = decl_function_context (fn);
18789 if (fn)
18790 dwarf2out_abstract_function (fn);
18791 }
18792
18793 if (DECL_P (origin))
18794 origin_die = lookup_decl_die (origin);
18795 else if (TYPE_P (origin))
18796 origin_die = lookup_type_die (origin);
18797 else if (TREE_CODE (origin) == BLOCK)
18798 origin_die = BLOCK_DIE (origin);
18799
18800 /* XXX: Functions that are never lowered don't always have correct block
18801 trees (in the case of java, they simply have no block tree, in some other
18802 languages). For these functions, there is nothing we can really do to
18803 output correct debug info for inlined functions in all cases. Rather
18804 than die, we'll just produce deficient debug info now, in that we will
18805 have variables without a proper abstract origin. In the future, when all
18806 functions are lowered, we should re-add a gcc_assert (origin_die)
18807 here. */
18808
18809 if (origin_die)
18810 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
18811 return origin_die;
18812 }
18813
18814 /* We do not currently support the pure_virtual attribute. */
18815
18816 static inline void
18817 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
18818 {
18819 if (DECL_VINDEX (func_decl))
18820 {
18821 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
18822
18823 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
18824 add_AT_loc (die, DW_AT_vtable_elem_location,
18825 new_loc_descr (DW_OP_constu,
18826 tree_to_shwi (DECL_VINDEX (func_decl)),
18827 0));
18828
18829 /* GNU extension: Record what type this method came from originally. */
18830 if (debug_info_level > DINFO_LEVEL_TERSE
18831 && DECL_CONTEXT (func_decl))
18832 add_AT_die_ref (die, DW_AT_containing_type,
18833 lookup_type_die (DECL_CONTEXT (func_decl)));
18834 }
18835 }
18836 \f
18837 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
18838 given decl. This used to be a vendor extension until after DWARF 4
18839 standardized it. */
18840
18841 static void
18842 add_linkage_attr (dw_die_ref die, tree decl)
18843 {
18844 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18845
18846 /* Mimic what assemble_name_raw does with a leading '*'. */
18847 if (name[0] == '*')
18848 name = &name[1];
18849
18850 if (dwarf_version >= 4)
18851 add_AT_string (die, DW_AT_linkage_name, name);
18852 else
18853 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
18854 }
18855
18856 /* Add source coordinate attributes for the given decl. */
18857
18858 static void
18859 add_src_coords_attributes (dw_die_ref die, tree decl)
18860 {
18861 expanded_location s;
18862
18863 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
18864 return;
18865 s = expand_location (DECL_SOURCE_LOCATION (decl));
18866 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
18867 add_AT_unsigned (die, DW_AT_decl_line, s.line);
18868 }
18869
18870 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
18871
18872 static void
18873 add_linkage_name_raw (dw_die_ref die, tree decl)
18874 {
18875 /* Defer until we have an assembler name set. */
18876 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
18877 {
18878 limbo_die_node *asm_name;
18879
18880 asm_name = ggc_cleared_alloc<limbo_die_node> ();
18881 asm_name->die = die;
18882 asm_name->created_for = decl;
18883 asm_name->next = deferred_asm_name;
18884 deferred_asm_name = asm_name;
18885 }
18886 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
18887 add_linkage_attr (die, decl);
18888 }
18889
18890 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
18891
18892 static void
18893 add_linkage_name (dw_die_ref die, tree decl)
18894 {
18895 if (debug_info_level > DINFO_LEVEL_NONE
18896 && VAR_OR_FUNCTION_DECL_P (decl)
18897 && TREE_PUBLIC (decl)
18898 && !(VAR_P (decl) && DECL_REGISTER (decl))
18899 && die->die_tag != DW_TAG_member)
18900 add_linkage_name_raw (die, decl);
18901 }
18902
18903 /* Add a DW_AT_name attribute and source coordinate attribute for the
18904 given decl, but only if it actually has a name. */
18905
18906 static void
18907 add_name_and_src_coords_attributes (dw_die_ref die, tree decl)
18908 {
18909 tree decl_name;
18910
18911 decl_name = DECL_NAME (decl);
18912 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
18913 {
18914 const char *name = dwarf2_name (decl, 0);
18915 if (name)
18916 add_name_attribute (die, name);
18917 if (! DECL_ARTIFICIAL (decl))
18918 add_src_coords_attributes (die, decl);
18919
18920 add_linkage_name (die, decl);
18921 }
18922
18923 #ifdef VMS_DEBUGGING_INFO
18924 /* Get the function's name, as described by its RTL. This may be different
18925 from the DECL_NAME name used in the source file. */
18926 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
18927 {
18928 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
18929 XEXP (DECL_RTL (decl), 0), false);
18930 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
18931 }
18932 #endif /* VMS_DEBUGGING_INFO */
18933 }
18934
18935 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
18936
18937 static void
18938 add_discr_value (dw_die_ref die, dw_discr_value *value)
18939 {
18940 dw_attr_node attr;
18941
18942 attr.dw_attr = DW_AT_discr_value;
18943 attr.dw_attr_val.val_class = dw_val_class_discr_value;
18944 attr.dw_attr_val.val_entry = NULL;
18945 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
18946 if (value->pos)
18947 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
18948 else
18949 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
18950 add_dwarf_attr (die, &attr);
18951 }
18952
18953 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
18954
18955 static void
18956 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
18957 {
18958 dw_attr_node attr;
18959
18960 attr.dw_attr = DW_AT_discr_list;
18961 attr.dw_attr_val.val_class = dw_val_class_discr_list;
18962 attr.dw_attr_val.val_entry = NULL;
18963 attr.dw_attr_val.v.val_discr_list = discr_list;
18964 add_dwarf_attr (die, &attr);
18965 }
18966
18967 static inline dw_discr_list_ref
18968 AT_discr_list (dw_attr_node *attr)
18969 {
18970 return attr->dw_attr_val.v.val_discr_list;
18971 }
18972
18973 #ifdef VMS_DEBUGGING_INFO
18974 /* Output the debug main pointer die for VMS */
18975
18976 void
18977 dwarf2out_vms_debug_main_pointer (void)
18978 {
18979 char label[MAX_ARTIFICIAL_LABEL_BYTES];
18980 dw_die_ref die;
18981
18982 /* Allocate the VMS debug main subprogram die. */
18983 die = ggc_cleared_alloc<die_node> ();
18984 die->die_tag = DW_TAG_subprogram;
18985 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
18986 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
18987 current_function_funcdef_no);
18988 add_AT_lbl_id (die, DW_AT_entry_pc, label);
18989
18990 /* Make it the first child of comp_unit_die (). */
18991 die->die_parent = comp_unit_die ();
18992 if (comp_unit_die ()->die_child)
18993 {
18994 die->die_sib = comp_unit_die ()->die_child->die_sib;
18995 comp_unit_die ()->die_child->die_sib = die;
18996 }
18997 else
18998 {
18999 die->die_sib = die;
19000 comp_unit_die ()->die_child = die;
19001 }
19002 }
19003 #endif /* VMS_DEBUGGING_INFO */
19004
19005 /* Push a new declaration scope. */
19006
19007 static void
19008 push_decl_scope (tree scope)
19009 {
19010 vec_safe_push (decl_scope_table, scope);
19011 }
19012
19013 /* Pop a declaration scope. */
19014
19015 static inline void
19016 pop_decl_scope (void)
19017 {
19018 decl_scope_table->pop ();
19019 }
19020
19021 /* walk_tree helper function for uses_local_type, below. */
19022
19023 static tree
19024 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
19025 {
19026 if (!TYPE_P (*tp))
19027 *walk_subtrees = 0;
19028 else
19029 {
19030 tree name = TYPE_NAME (*tp);
19031 if (name && DECL_P (name) && decl_function_context (name))
19032 return *tp;
19033 }
19034 return NULL_TREE;
19035 }
19036
19037 /* If TYPE involves a function-local type (including a local typedef to a
19038 non-local type), returns that type; otherwise returns NULL_TREE. */
19039
19040 static tree
19041 uses_local_type (tree type)
19042 {
19043 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
19044 return used;
19045 }
19046
19047 /* Return the DIE for the scope that immediately contains this type.
19048 Non-named types that do not involve a function-local type get global
19049 scope. Named types nested in namespaces or other types get their
19050 containing scope. All other types (i.e. function-local named types) get
19051 the current active scope. */
19052
19053 static dw_die_ref
19054 scope_die_for (tree t, dw_die_ref context_die)
19055 {
19056 dw_die_ref scope_die = NULL;
19057 tree containing_scope;
19058
19059 /* Non-types always go in the current scope. */
19060 gcc_assert (TYPE_P (t));
19061
19062 /* Use the scope of the typedef, rather than the scope of the type
19063 it refers to. */
19064 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
19065 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
19066 else
19067 containing_scope = TYPE_CONTEXT (t);
19068
19069 /* Use the containing namespace if there is one. */
19070 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
19071 {
19072 if (context_die == lookup_decl_die (containing_scope))
19073 /* OK */;
19074 else if (debug_info_level > DINFO_LEVEL_TERSE)
19075 context_die = get_context_die (containing_scope);
19076 else
19077 containing_scope = NULL_TREE;
19078 }
19079
19080 /* Ignore function type "scopes" from the C frontend. They mean that
19081 a tagged type is local to a parmlist of a function declarator, but
19082 that isn't useful to DWARF. */
19083 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
19084 containing_scope = NULL_TREE;
19085
19086 if (SCOPE_FILE_SCOPE_P (containing_scope))
19087 {
19088 /* If T uses a local type keep it local as well, to avoid references
19089 to function-local DIEs from outside the function. */
19090 if (current_function_decl && uses_local_type (t))
19091 scope_die = context_die;
19092 else
19093 scope_die = comp_unit_die ();
19094 }
19095 else if (TYPE_P (containing_scope))
19096 {
19097 /* For types, we can just look up the appropriate DIE. */
19098 if (debug_info_level > DINFO_LEVEL_TERSE)
19099 scope_die = get_context_die (containing_scope);
19100 else
19101 {
19102 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
19103 if (scope_die == NULL)
19104 scope_die = comp_unit_die ();
19105 }
19106 }
19107 else
19108 scope_die = context_die;
19109
19110 return scope_die;
19111 }
19112
19113 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
19114
19115 static inline int
19116 local_scope_p (dw_die_ref context_die)
19117 {
19118 for (; context_die; context_die = context_die->die_parent)
19119 if (context_die->die_tag == DW_TAG_inlined_subroutine
19120 || context_die->die_tag == DW_TAG_subprogram)
19121 return 1;
19122
19123 return 0;
19124 }
19125
19126 /* Returns nonzero if CONTEXT_DIE is a class. */
19127
19128 static inline int
19129 class_scope_p (dw_die_ref context_die)
19130 {
19131 return (context_die
19132 && (context_die->die_tag == DW_TAG_structure_type
19133 || context_die->die_tag == DW_TAG_class_type
19134 || context_die->die_tag == DW_TAG_interface_type
19135 || context_die->die_tag == DW_TAG_union_type));
19136 }
19137
19138 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
19139 whether or not to treat a DIE in this context as a declaration. */
19140
19141 static inline int
19142 class_or_namespace_scope_p (dw_die_ref context_die)
19143 {
19144 return (class_scope_p (context_die)
19145 || (context_die && context_die->die_tag == DW_TAG_namespace));
19146 }
19147
19148 /* Many forms of DIEs require a "type description" attribute. This
19149 routine locates the proper "type descriptor" die for the type given
19150 by 'type' plus any additional qualifiers given by 'cv_quals', and
19151 adds a DW_AT_type attribute below the given die. */
19152
19153 static void
19154 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
19155 bool reverse, dw_die_ref context_die)
19156 {
19157 enum tree_code code = TREE_CODE (type);
19158 dw_die_ref type_die = NULL;
19159
19160 /* ??? If this type is an unnamed subrange type of an integral, floating-point
19161 or fixed-point type, use the inner type. This is because we have no
19162 support for unnamed types in base_type_die. This can happen if this is
19163 an Ada subrange type. Correct solution is emit a subrange type die. */
19164 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
19165 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
19166 type = TREE_TYPE (type), code = TREE_CODE (type);
19167
19168 if (code == ERROR_MARK
19169 /* Handle a special case. For functions whose return type is void, we
19170 generate *no* type attribute. (Note that no object may have type
19171 `void', so this only applies to function return types). */
19172 || code == VOID_TYPE)
19173 return;
19174
19175 type_die = modified_type_die (type,
19176 cv_quals | TYPE_QUALS_NO_ADDR_SPACE (type),
19177 reverse,
19178 context_die);
19179
19180 if (type_die != NULL)
19181 add_AT_die_ref (object_die, DW_AT_type, type_die);
19182 }
19183
19184 /* Given an object die, add the calling convention attribute for the
19185 function call type. */
19186 static void
19187 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
19188 {
19189 enum dwarf_calling_convention value = DW_CC_normal;
19190
19191 value = ((enum dwarf_calling_convention)
19192 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
19193
19194 if (is_fortran ()
19195 && !strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "MAIN__"))
19196 {
19197 /* DWARF 2 doesn't provide a way to identify a program's source-level
19198 entry point. DW_AT_calling_convention attributes are only meant
19199 to describe functions' calling conventions. However, lacking a
19200 better way to signal the Fortran main program, we used this for
19201 a long time, following existing custom. Now, DWARF 4 has
19202 DW_AT_main_subprogram, which we add below, but some tools still
19203 rely on the old way, which we thus keep. */
19204 value = DW_CC_program;
19205
19206 if (dwarf_version >= 4 || !dwarf_strict)
19207 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
19208 }
19209
19210 /* Only add the attribute if the backend requests it, and
19211 is not DW_CC_normal. */
19212 if (value && (value != DW_CC_normal))
19213 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
19214 }
19215
19216 /* Given a tree pointer to a struct, class, union, or enum type node, return
19217 a pointer to the (string) tag name for the given type, or zero if the type
19218 was declared without a tag. */
19219
19220 static const char *
19221 type_tag (const_tree type)
19222 {
19223 const char *name = 0;
19224
19225 if (TYPE_NAME (type) != 0)
19226 {
19227 tree t = 0;
19228
19229 /* Find the IDENTIFIER_NODE for the type name. */
19230 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
19231 && !TYPE_NAMELESS (type))
19232 t = TYPE_NAME (type);
19233
19234 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
19235 a TYPE_DECL node, regardless of whether or not a `typedef' was
19236 involved. */
19237 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
19238 && ! DECL_IGNORED_P (TYPE_NAME (type)))
19239 {
19240 /* We want to be extra verbose. Don't call dwarf_name if
19241 DECL_NAME isn't set. The default hook for decl_printable_name
19242 doesn't like that, and in this context it's correct to return
19243 0, instead of "<anonymous>" or the like. */
19244 if (DECL_NAME (TYPE_NAME (type))
19245 && !DECL_NAMELESS (TYPE_NAME (type)))
19246 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
19247 }
19248
19249 /* Now get the name as a string, or invent one. */
19250 if (!name && t != 0)
19251 name = IDENTIFIER_POINTER (t);
19252 }
19253
19254 return (name == 0 || *name == '\0') ? 0 : name;
19255 }
19256
19257 /* Return the type associated with a data member, make a special check
19258 for bit field types. */
19259
19260 static inline tree
19261 member_declared_type (const_tree member)
19262 {
19263 return (DECL_BIT_FIELD_TYPE (member)
19264 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
19265 }
19266
19267 /* Get the decl's label, as described by its RTL. This may be different
19268 from the DECL_NAME name used in the source file. */
19269
19270 #if 0
19271 static const char *
19272 decl_start_label (tree decl)
19273 {
19274 rtx x;
19275 const char *fnname;
19276
19277 x = DECL_RTL (decl);
19278 gcc_assert (MEM_P (x));
19279
19280 x = XEXP (x, 0);
19281 gcc_assert (GET_CODE (x) == SYMBOL_REF);
19282
19283 fnname = XSTR (x, 0);
19284 return fnname;
19285 }
19286 #endif
19287 \f
19288 /* For variable-length arrays that have been previously generated, but
19289 may be incomplete due to missing subscript info, fill the subscript
19290 info. Return TRUE if this is one of those cases. */
19291 static bool
19292 fill_variable_array_bounds (tree type)
19293 {
19294 if (TREE_ASM_WRITTEN (type)
19295 && TREE_CODE (type) == ARRAY_TYPE
19296 && variably_modified_type_p (type, NULL))
19297 {
19298 dw_die_ref array_die = lookup_type_die (type);
19299 if (!array_die)
19300 return false;
19301 add_subscript_info (array_die, type, !is_ada ());
19302 return true;
19303 }
19304 return false;
19305 }
19306
19307 /* These routines generate the internal representation of the DIE's for
19308 the compilation unit. Debugging information is collected by walking
19309 the declaration trees passed in from dwarf2out_decl(). */
19310
19311 static void
19312 gen_array_type_die (tree type, dw_die_ref context_die)
19313 {
19314 dw_die_ref array_die;
19315
19316 /* GNU compilers represent multidimensional array types as sequences of one
19317 dimensional array types whose element types are themselves array types.
19318 We sometimes squish that down to a single array_type DIE with multiple
19319 subscripts in the Dwarf debugging info. The draft Dwarf specification
19320 say that we are allowed to do this kind of compression in C, because
19321 there is no difference between an array of arrays and a multidimensional
19322 array. We don't do this for Ada to remain as close as possible to the
19323 actual representation, which is especially important against the language
19324 flexibilty wrt arrays of variable size. */
19325
19326 bool collapse_nested_arrays = !is_ada ();
19327
19328 if (fill_variable_array_bounds (type))
19329 return;
19330
19331 dw_die_ref scope_die = scope_die_for (type, context_die);
19332 tree element_type;
19333
19334 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
19335 DW_TAG_string_type doesn't have DW_AT_type attribute). */
19336 if (TYPE_STRING_FLAG (type)
19337 && TREE_CODE (type) == ARRAY_TYPE
19338 && is_fortran ()
19339 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
19340 {
19341 HOST_WIDE_INT size;
19342
19343 array_die = new_die (DW_TAG_string_type, scope_die, type);
19344 add_name_attribute (array_die, type_tag (type));
19345 equate_type_number_to_die (type, array_die);
19346 size = int_size_in_bytes (type);
19347 if (size >= 0)
19348 add_AT_unsigned (array_die, DW_AT_byte_size, size);
19349 else if (TYPE_DOMAIN (type) != NULL_TREE
19350 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
19351 {
19352 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
19353 tree rszdecl = szdecl;
19354 HOST_WIDE_INT rsize = 0;
19355
19356 size = int_size_in_bytes (TREE_TYPE (szdecl));
19357 if (!DECL_P (szdecl))
19358 {
19359 if (TREE_CODE (szdecl) == INDIRECT_REF
19360 && DECL_P (TREE_OPERAND (szdecl, 0)))
19361 {
19362 rszdecl = TREE_OPERAND (szdecl, 0);
19363 rsize = int_size_in_bytes (TREE_TYPE (rszdecl));
19364 if (rsize <= 0)
19365 size = 0;
19366 }
19367 else
19368 size = 0;
19369 }
19370 if (size > 0)
19371 {
19372 dw_loc_list_ref loc = loc_list_from_tree (szdecl, 2, NULL);
19373 if (loc == NULL
19374 && early_dwarf
19375 && current_function_decl
19376 && DECL_CONTEXT (rszdecl) == current_function_decl)
19377 {
19378 dw_die_ref ref = lookup_decl_die (rszdecl);
19379 dw_loc_descr_ref l = NULL;
19380 if (ref)
19381 {
19382 l = new_loc_descr (DW_OP_call4, 0, 0);
19383 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
19384 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
19385 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
19386 }
19387 else if (TREE_CODE (rszdecl) == PARM_DECL
19388 && string_types)
19389 {
19390 l = new_loc_descr (DW_OP_call4, 0, 0);
19391 l->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
19392 l->dw_loc_oprnd1.v.val_decl_ref = rszdecl;
19393 string_types->safe_push (array_die);
19394 }
19395 if (l && rszdecl != szdecl)
19396 {
19397 if (rsize == DWARF2_ADDR_SIZE)
19398 add_loc_descr (&l, new_loc_descr (DW_OP_deref,
19399 0, 0));
19400 else
19401 add_loc_descr (&l, new_loc_descr (DW_OP_deref_size,
19402 rsize, 0));
19403 }
19404 if (l)
19405 loc = new_loc_list (l, NULL, NULL, NULL);
19406 }
19407 if (loc)
19408 {
19409 add_AT_location_description (array_die, DW_AT_string_length,
19410 loc);
19411 if (size != DWARF2_ADDR_SIZE)
19412 add_AT_unsigned (array_die, dwarf_version >= 5
19413 ? DW_AT_string_length_byte_size
19414 : DW_AT_byte_size, size);
19415 }
19416 }
19417 }
19418 return;
19419 }
19420
19421 array_die = new_die (DW_TAG_array_type, scope_die, type);
19422 add_name_attribute (array_die, type_tag (type));
19423 equate_type_number_to_die (type, array_die);
19424
19425 if (TREE_CODE (type) == VECTOR_TYPE)
19426 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
19427
19428 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
19429 if (is_fortran ()
19430 && TREE_CODE (type) == ARRAY_TYPE
19431 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
19432 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
19433 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
19434
19435 #if 0
19436 /* We default the array ordering. SDB will probably do
19437 the right things even if DW_AT_ordering is not present. It's not even
19438 an issue until we start to get into multidimensional arrays anyway. If
19439 SDB is ever caught doing the Wrong Thing for multi-dimensional arrays,
19440 then we'll have to put the DW_AT_ordering attribute back in. (But if
19441 and when we find out that we need to put these in, we will only do so
19442 for multidimensional arrays. */
19443 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
19444 #endif
19445
19446 if (TREE_CODE (type) == VECTOR_TYPE)
19447 {
19448 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
19449 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
19450 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
19451 add_bound_info (subrange_die, DW_AT_upper_bound,
19452 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
19453 }
19454 else
19455 add_subscript_info (array_die, type, collapse_nested_arrays);
19456
19457 /* Add representation of the type of the elements of this array type and
19458 emit the corresponding DIE if we haven't done it already. */
19459 element_type = TREE_TYPE (type);
19460 if (collapse_nested_arrays)
19461 while (TREE_CODE (element_type) == ARRAY_TYPE)
19462 {
19463 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
19464 break;
19465 element_type = TREE_TYPE (element_type);
19466 }
19467
19468 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
19469 TREE_CODE (type) == ARRAY_TYPE
19470 && TYPE_REVERSE_STORAGE_ORDER (type),
19471 context_die);
19472
19473 add_gnat_descriptive_type_attribute (array_die, type, context_die);
19474 if (TYPE_ARTIFICIAL (type))
19475 add_AT_flag (array_die, DW_AT_artificial, 1);
19476
19477 if (get_AT (array_die, DW_AT_name))
19478 add_pubtype (type, array_die);
19479 }
19480
19481 /* After all arguments are created, adjust any DW_TAG_string_type
19482 DIEs DW_AT_string_length attributes. */
19483
19484 static void
19485 adjust_string_types (void)
19486 {
19487 dw_die_ref array_die;
19488 unsigned int i;
19489 FOR_EACH_VEC_ELT (*string_types, i, array_die)
19490 {
19491 dw_attr_node *a = get_AT (array_die, DW_AT_string_length);
19492 if (a == NULL)
19493 continue;
19494 dw_loc_descr_ref loc = AT_loc (a);
19495 gcc_assert (loc->dw_loc_opc == DW_OP_call4
19496 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref);
19497 dw_die_ref ref = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
19498 if (ref)
19499 {
19500 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
19501 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
19502 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
19503 }
19504 else
19505 {
19506 remove_AT (array_die, DW_AT_string_length);
19507 remove_AT (array_die, dwarf_version >= 5
19508 ? DW_AT_string_length_byte_size
19509 : DW_AT_byte_size);
19510 }
19511 }
19512 }
19513
19514 /* This routine generates DIE for array with hidden descriptor, details
19515 are filled into *info by a langhook. */
19516
19517 static void
19518 gen_descr_array_type_die (tree type, struct array_descr_info *info,
19519 dw_die_ref context_die)
19520 {
19521 const dw_die_ref scope_die = scope_die_for (type, context_die);
19522 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
19523 const struct loc_descr_context context = { type, info->base_decl, NULL };
19524 int dim;
19525
19526 add_name_attribute (array_die, type_tag (type));
19527 equate_type_number_to_die (type, array_die);
19528
19529 if (info->ndimensions > 1)
19530 switch (info->ordering)
19531 {
19532 case array_descr_ordering_row_major:
19533 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
19534 break;
19535 case array_descr_ordering_column_major:
19536 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
19537 break;
19538 default:
19539 break;
19540 }
19541
19542 if (dwarf_version >= 3 || !dwarf_strict)
19543 {
19544 if (info->data_location)
19545 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
19546 dw_scalar_form_exprloc, &context);
19547 if (info->associated)
19548 add_scalar_info (array_die, DW_AT_associated, info->associated,
19549 dw_scalar_form_constant
19550 | dw_scalar_form_exprloc
19551 | dw_scalar_form_reference, &context);
19552 if (info->allocated)
19553 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
19554 dw_scalar_form_constant
19555 | dw_scalar_form_exprloc
19556 | dw_scalar_form_reference, &context);
19557 if (info->stride)
19558 {
19559 const enum dwarf_attribute attr
19560 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
19561 const int forms
19562 = (info->stride_in_bits)
19563 ? dw_scalar_form_constant
19564 : (dw_scalar_form_constant
19565 | dw_scalar_form_exprloc
19566 | dw_scalar_form_reference);
19567
19568 add_scalar_info (array_die, attr, info->stride, forms, &context);
19569 }
19570 }
19571
19572 add_gnat_descriptive_type_attribute (array_die, type, context_die);
19573
19574 for (dim = 0; dim < info->ndimensions; dim++)
19575 {
19576 dw_die_ref subrange_die
19577 = new_die (DW_TAG_subrange_type, array_die, NULL);
19578
19579 if (info->dimen[dim].bounds_type)
19580 add_type_attribute (subrange_die,
19581 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
19582 false, context_die);
19583 if (info->dimen[dim].lower_bound)
19584 add_bound_info (subrange_die, DW_AT_lower_bound,
19585 info->dimen[dim].lower_bound, &context);
19586 if (info->dimen[dim].upper_bound)
19587 add_bound_info (subrange_die, DW_AT_upper_bound,
19588 info->dimen[dim].upper_bound, &context);
19589 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
19590 add_scalar_info (subrange_die, DW_AT_byte_stride,
19591 info->dimen[dim].stride,
19592 dw_scalar_form_constant
19593 | dw_scalar_form_exprloc
19594 | dw_scalar_form_reference,
19595 &context);
19596 }
19597
19598 gen_type_die (info->element_type, context_die);
19599 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
19600 TREE_CODE (type) == ARRAY_TYPE
19601 && TYPE_REVERSE_STORAGE_ORDER (type),
19602 context_die);
19603
19604 if (get_AT (array_die, DW_AT_name))
19605 add_pubtype (type, array_die);
19606 }
19607
19608 #if 0
19609 static void
19610 gen_entry_point_die (tree decl, dw_die_ref context_die)
19611 {
19612 tree origin = decl_ultimate_origin (decl);
19613 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
19614
19615 if (origin != NULL)
19616 add_abstract_origin_attribute (decl_die, origin);
19617 else
19618 {
19619 add_name_and_src_coords_attributes (decl_die, decl);
19620 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
19621 TYPE_UNQUALIFIED, false, context_die);
19622 }
19623
19624 if (DECL_ABSTRACT_P (decl))
19625 equate_decl_number_to_die (decl, decl_die);
19626 else
19627 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
19628 }
19629 #endif
19630
19631 /* Walk through the list of incomplete types again, trying once more to
19632 emit full debugging info for them. */
19633
19634 static void
19635 retry_incomplete_types (void)
19636 {
19637 set_early_dwarf s;
19638 int i;
19639
19640 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
19641 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
19642 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
19643 vec_safe_truncate (incomplete_types, 0);
19644 }
19645
19646 /* Determine what tag to use for a record type. */
19647
19648 static enum dwarf_tag
19649 record_type_tag (tree type)
19650 {
19651 if (! lang_hooks.types.classify_record)
19652 return DW_TAG_structure_type;
19653
19654 switch (lang_hooks.types.classify_record (type))
19655 {
19656 case RECORD_IS_STRUCT:
19657 return DW_TAG_structure_type;
19658
19659 case RECORD_IS_CLASS:
19660 return DW_TAG_class_type;
19661
19662 case RECORD_IS_INTERFACE:
19663 if (dwarf_version >= 3 || !dwarf_strict)
19664 return DW_TAG_interface_type;
19665 return DW_TAG_structure_type;
19666
19667 default:
19668 gcc_unreachable ();
19669 }
19670 }
19671
19672 /* Generate a DIE to represent an enumeration type. Note that these DIEs
19673 include all of the information about the enumeration values also. Each
19674 enumerated type name/value is listed as a child of the enumerated type
19675 DIE. */
19676
19677 static dw_die_ref
19678 gen_enumeration_type_die (tree type, dw_die_ref context_die)
19679 {
19680 dw_die_ref type_die = lookup_type_die (type);
19681
19682 if (type_die == NULL)
19683 {
19684 type_die = new_die (DW_TAG_enumeration_type,
19685 scope_die_for (type, context_die), type);
19686 equate_type_number_to_die (type, type_die);
19687 add_name_attribute (type_die, type_tag (type));
19688 if (dwarf_version >= 4 || !dwarf_strict)
19689 {
19690 if (ENUM_IS_SCOPED (type))
19691 add_AT_flag (type_die, DW_AT_enum_class, 1);
19692 if (ENUM_IS_OPAQUE (type))
19693 add_AT_flag (type_die, DW_AT_declaration, 1);
19694 }
19695 }
19696 else if (! TYPE_SIZE (type))
19697 return type_die;
19698 else
19699 remove_AT (type_die, DW_AT_declaration);
19700
19701 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
19702 given enum type is incomplete, do not generate the DW_AT_byte_size
19703 attribute or the DW_AT_element_list attribute. */
19704 if (TYPE_SIZE (type))
19705 {
19706 tree link;
19707
19708 TREE_ASM_WRITTEN (type) = 1;
19709 add_byte_size_attribute (type_die, type);
19710 if (dwarf_version >= 3 || !dwarf_strict)
19711 {
19712 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
19713 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
19714 context_die);
19715 }
19716 if (TYPE_STUB_DECL (type) != NULL_TREE)
19717 {
19718 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
19719 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
19720 }
19721
19722 /* If the first reference to this type was as the return type of an
19723 inline function, then it may not have a parent. Fix this now. */
19724 if (type_die->die_parent == NULL)
19725 add_child_die (scope_die_for (type, context_die), type_die);
19726
19727 for (link = TYPE_VALUES (type);
19728 link != NULL; link = TREE_CHAIN (link))
19729 {
19730 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
19731 tree value = TREE_VALUE (link);
19732
19733 add_name_attribute (enum_die,
19734 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
19735
19736 if (TREE_CODE (value) == CONST_DECL)
19737 value = DECL_INITIAL (value);
19738
19739 if (simple_type_size_in_bits (TREE_TYPE (value))
19740 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
19741 {
19742 /* For constant forms created by add_AT_unsigned DWARF
19743 consumers (GDB, elfutils, etc.) always zero extend
19744 the value. Only when the actual value is negative
19745 do we need to use add_AT_int to generate a constant
19746 form that can represent negative values. */
19747 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
19748 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
19749 add_AT_unsigned (enum_die, DW_AT_const_value,
19750 (unsigned HOST_WIDE_INT) val);
19751 else
19752 add_AT_int (enum_die, DW_AT_const_value, val);
19753 }
19754 else
19755 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
19756 that here. TODO: This should be re-worked to use correct
19757 signed/unsigned double tags for all cases. */
19758 add_AT_wide (enum_die, DW_AT_const_value, value);
19759 }
19760
19761 add_gnat_descriptive_type_attribute (type_die, type, context_die);
19762 if (TYPE_ARTIFICIAL (type))
19763 add_AT_flag (type_die, DW_AT_artificial, 1);
19764 }
19765 else
19766 add_AT_flag (type_die, DW_AT_declaration, 1);
19767
19768 add_pubtype (type, type_die);
19769
19770 return type_die;
19771 }
19772
19773 /* Generate a DIE to represent either a real live formal parameter decl or to
19774 represent just the type of some formal parameter position in some function
19775 type.
19776
19777 Note that this routine is a bit unusual because its argument may be a
19778 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
19779 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
19780 node. If it's the former then this function is being called to output a
19781 DIE to represent a formal parameter object (or some inlining thereof). If
19782 it's the latter, then this function is only being called to output a
19783 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
19784 argument type of some subprogram type.
19785 If EMIT_NAME_P is true, name and source coordinate attributes
19786 are emitted. */
19787
19788 static dw_die_ref
19789 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
19790 dw_die_ref context_die)
19791 {
19792 tree node_or_origin = node ? node : origin;
19793 tree ultimate_origin;
19794 dw_die_ref parm_die = NULL;
19795
19796 if (TREE_CODE_CLASS (TREE_CODE (node_or_origin)) == tcc_declaration)
19797 {
19798 parm_die = lookup_decl_die (node);
19799
19800 /* If the contexts differ, we may not be talking about the same
19801 thing. */
19802 if (parm_die && parm_die->die_parent != context_die)
19803 {
19804 if (!DECL_ABSTRACT_P (node))
19805 {
19806 /* This can happen when creating an inlined instance, in
19807 which case we need to create a new DIE that will get
19808 annotated with DW_AT_abstract_origin. */
19809 parm_die = NULL;
19810 }
19811 else
19812 {
19813 /* FIXME: Reuse DIE even with a differing context.
19814
19815 This can happen when calling
19816 dwarf2out_abstract_function to build debug info for
19817 the abstract instance of a function for which we have
19818 already generated a DIE in
19819 dwarf2out_early_global_decl.
19820
19821 Once we remove dwarf2out_abstract_function, we should
19822 have a call to gcc_unreachable here. */
19823 }
19824 }
19825
19826 if (parm_die && parm_die->die_parent == NULL)
19827 {
19828 /* Check that parm_die already has the right attributes that
19829 we would have added below. If any attributes are
19830 missing, fall through to add them. */
19831 if (! DECL_ABSTRACT_P (node_or_origin)
19832 && !get_AT (parm_die, DW_AT_location)
19833 && !get_AT (parm_die, DW_AT_const_value))
19834 /* We are missing location info, and are about to add it. */
19835 ;
19836 else
19837 {
19838 add_child_die (context_die, parm_die);
19839 return parm_die;
19840 }
19841 }
19842 }
19843
19844 /* If we have a previously generated DIE, use it, unless this is an
19845 concrete instance (origin != NULL), in which case we need a new
19846 DIE with a corresponding DW_AT_abstract_origin. */
19847 bool reusing_die;
19848 if (parm_die && origin == NULL)
19849 reusing_die = true;
19850 else
19851 {
19852 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
19853 reusing_die = false;
19854 }
19855
19856 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
19857 {
19858 case tcc_declaration:
19859 ultimate_origin = decl_ultimate_origin (node_or_origin);
19860 if (node || ultimate_origin)
19861 origin = ultimate_origin;
19862
19863 if (reusing_die)
19864 goto add_location;
19865
19866 if (origin != NULL)
19867 add_abstract_origin_attribute (parm_die, origin);
19868 else if (emit_name_p)
19869 add_name_and_src_coords_attributes (parm_die, node);
19870 if (origin == NULL
19871 || (! DECL_ABSTRACT_P (node_or_origin)
19872 && variably_modified_type_p (TREE_TYPE (node_or_origin),
19873 decl_function_context
19874 (node_or_origin))))
19875 {
19876 tree type = TREE_TYPE (node_or_origin);
19877 if (decl_by_reference_p (node_or_origin))
19878 add_type_attribute (parm_die, TREE_TYPE (type),
19879 TYPE_UNQUALIFIED,
19880 false, context_die);
19881 else
19882 add_type_attribute (parm_die, type,
19883 decl_quals (node_or_origin),
19884 false, context_die);
19885 }
19886 if (origin == NULL && DECL_ARTIFICIAL (node))
19887 add_AT_flag (parm_die, DW_AT_artificial, 1);
19888 add_location:
19889 if (node && node != origin)
19890 equate_decl_number_to_die (node, parm_die);
19891 if (! DECL_ABSTRACT_P (node_or_origin))
19892 add_location_or_const_value_attribute (parm_die, node_or_origin,
19893 node == NULL);
19894
19895 break;
19896
19897 case tcc_type:
19898 /* We were called with some kind of a ..._TYPE node. */
19899 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
19900 context_die);
19901 break;
19902
19903 default:
19904 gcc_unreachable ();
19905 }
19906
19907 return parm_die;
19908 }
19909
19910 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
19911 children DW_TAG_formal_parameter DIEs representing the arguments of the
19912 parameter pack.
19913
19914 PARM_PACK must be a function parameter pack.
19915 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
19916 must point to the subsequent arguments of the function PACK_ARG belongs to.
19917 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
19918 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
19919 following the last one for which a DIE was generated. */
19920
19921 static dw_die_ref
19922 gen_formal_parameter_pack_die (tree parm_pack,
19923 tree pack_arg,
19924 dw_die_ref subr_die,
19925 tree *next_arg)
19926 {
19927 tree arg;
19928 dw_die_ref parm_pack_die;
19929
19930 gcc_assert (parm_pack
19931 && lang_hooks.function_parameter_pack_p (parm_pack)
19932 && subr_die);
19933
19934 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
19935 add_src_coords_attributes (parm_pack_die, parm_pack);
19936
19937 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
19938 {
19939 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
19940 parm_pack))
19941 break;
19942 gen_formal_parameter_die (arg, NULL,
19943 false /* Don't emit name attribute. */,
19944 parm_pack_die);
19945 }
19946 if (next_arg)
19947 *next_arg = arg;
19948 return parm_pack_die;
19949 }
19950
19951 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
19952 at the end of an (ANSI prototyped) formal parameters list. */
19953
19954 static void
19955 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
19956 {
19957 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
19958 }
19959
19960 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
19961 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
19962 parameters as specified in some function type specification (except for
19963 those which appear as part of a function *definition*). */
19964
19965 static void
19966 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
19967 {
19968 tree link;
19969 tree formal_type = NULL;
19970 tree first_parm_type;
19971 tree arg;
19972
19973 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
19974 {
19975 arg = DECL_ARGUMENTS (function_or_method_type);
19976 function_or_method_type = TREE_TYPE (function_or_method_type);
19977 }
19978 else
19979 arg = NULL_TREE;
19980
19981 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
19982
19983 /* Make our first pass over the list of formal parameter types and output a
19984 DW_TAG_formal_parameter DIE for each one. */
19985 for (link = first_parm_type; link; )
19986 {
19987 dw_die_ref parm_die;
19988
19989 formal_type = TREE_VALUE (link);
19990 if (formal_type == void_type_node)
19991 break;
19992
19993 /* Output a (nameless) DIE to represent the formal parameter itself. */
19994 if (!POINTER_BOUNDS_TYPE_P (formal_type))
19995 {
19996 parm_die = gen_formal_parameter_die (formal_type, NULL,
19997 true /* Emit name attribute. */,
19998 context_die);
19999 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
20000 && link == first_parm_type)
20001 {
20002 add_AT_flag (parm_die, DW_AT_artificial, 1);
20003 if (dwarf_version >= 3 || !dwarf_strict)
20004 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
20005 }
20006 else if (arg && DECL_ARTIFICIAL (arg))
20007 add_AT_flag (parm_die, DW_AT_artificial, 1);
20008 }
20009
20010 link = TREE_CHAIN (link);
20011 if (arg)
20012 arg = DECL_CHAIN (arg);
20013 }
20014
20015 /* If this function type has an ellipsis, add a
20016 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
20017 if (formal_type != void_type_node)
20018 gen_unspecified_parameters_die (function_or_method_type, context_die);
20019
20020 /* Make our second (and final) pass over the list of formal parameter types
20021 and output DIEs to represent those types (as necessary). */
20022 for (link = TYPE_ARG_TYPES (function_or_method_type);
20023 link && TREE_VALUE (link);
20024 link = TREE_CHAIN (link))
20025 gen_type_die (TREE_VALUE (link), context_die);
20026 }
20027
20028 /* We want to generate the DIE for TYPE so that we can generate the
20029 die for MEMBER, which has been defined; we will need to refer back
20030 to the member declaration nested within TYPE. If we're trying to
20031 generate minimal debug info for TYPE, processing TYPE won't do the
20032 trick; we need to attach the member declaration by hand. */
20033
20034 static void
20035 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
20036 {
20037 gen_type_die (type, context_die);
20038
20039 /* If we're trying to avoid duplicate debug info, we may not have
20040 emitted the member decl for this function. Emit it now. */
20041 if (TYPE_STUB_DECL (type)
20042 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
20043 && ! lookup_decl_die (member))
20044 {
20045 dw_die_ref type_die;
20046 gcc_assert (!decl_ultimate_origin (member));
20047
20048 push_decl_scope (type);
20049 type_die = lookup_type_die_strip_naming_typedef (type);
20050 if (TREE_CODE (member) == FUNCTION_DECL)
20051 gen_subprogram_die (member, type_die);
20052 else if (TREE_CODE (member) == FIELD_DECL)
20053 {
20054 /* Ignore the nameless fields that are used to skip bits but handle
20055 C++ anonymous unions and structs. */
20056 if (DECL_NAME (member) != NULL_TREE
20057 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
20058 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
20059 {
20060 struct vlr_context vlr_ctx = {
20061 DECL_CONTEXT (member), /* struct_type */
20062 NULL_TREE /* variant_part_offset */
20063 };
20064 gen_type_die (member_declared_type (member), type_die);
20065 gen_field_die (member, &vlr_ctx, type_die);
20066 }
20067 }
20068 else
20069 gen_variable_die (member, NULL_TREE, type_die);
20070
20071 pop_decl_scope ();
20072 }
20073 }
20074 \f
20075 /* Forward declare these functions, because they are mutually recursive
20076 with their set_block_* pairing functions. */
20077 static void set_decl_origin_self (tree);
20078 static void set_decl_abstract_flags (tree, vec<tree> &);
20079
20080 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
20081 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
20082 that it points to the node itself, thus indicating that the node is its
20083 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
20084 the given node is NULL, recursively descend the decl/block tree which
20085 it is the root of, and for each other ..._DECL or BLOCK node contained
20086 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
20087 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
20088 values to point to themselves. */
20089
20090 static void
20091 set_block_origin_self (tree stmt)
20092 {
20093 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
20094 {
20095 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
20096
20097 {
20098 tree local_decl;
20099
20100 for (local_decl = BLOCK_VARS (stmt);
20101 local_decl != NULL_TREE;
20102 local_decl = DECL_CHAIN (local_decl))
20103 /* Do not recurse on nested functions since the inlining status
20104 of parent and child can be different as per the DWARF spec. */
20105 if (TREE_CODE (local_decl) != FUNCTION_DECL
20106 && !DECL_EXTERNAL (local_decl))
20107 set_decl_origin_self (local_decl);
20108 }
20109
20110 {
20111 tree subblock;
20112
20113 for (subblock = BLOCK_SUBBLOCKS (stmt);
20114 subblock != NULL_TREE;
20115 subblock = BLOCK_CHAIN (subblock))
20116 set_block_origin_self (subblock); /* Recurse. */
20117 }
20118 }
20119 }
20120
20121 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
20122 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
20123 node to so that it points to the node itself, thus indicating that the
20124 node represents its own (abstract) origin. Additionally, if the
20125 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
20126 the decl/block tree of which the given node is the root of, and for
20127 each other ..._DECL or BLOCK node contained therein whose
20128 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
20129 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
20130 point to themselves. */
20131
20132 static void
20133 set_decl_origin_self (tree decl)
20134 {
20135 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
20136 {
20137 DECL_ABSTRACT_ORIGIN (decl) = decl;
20138 if (TREE_CODE (decl) == FUNCTION_DECL)
20139 {
20140 tree arg;
20141
20142 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
20143 DECL_ABSTRACT_ORIGIN (arg) = arg;
20144 if (DECL_INITIAL (decl) != NULL_TREE
20145 && DECL_INITIAL (decl) != error_mark_node)
20146 set_block_origin_self (DECL_INITIAL (decl));
20147 }
20148 }
20149 }
20150 \f
20151 /* Given a pointer to some BLOCK node, set the BLOCK_ABSTRACT flag to 1
20152 and if it wasn't 1 before, push it to abstract_vec vector.
20153 For all local decls and all local sub-blocks (recursively) do it
20154 too. */
20155
20156 static void
20157 set_block_abstract_flags (tree stmt, vec<tree> &abstract_vec)
20158 {
20159 tree local_decl;
20160 tree subblock;
20161 unsigned int i;
20162
20163 if (!BLOCK_ABSTRACT (stmt))
20164 {
20165 abstract_vec.safe_push (stmt);
20166 BLOCK_ABSTRACT (stmt) = 1;
20167 }
20168
20169 for (local_decl = BLOCK_VARS (stmt);
20170 local_decl != NULL_TREE;
20171 local_decl = DECL_CHAIN (local_decl))
20172 if (! DECL_EXTERNAL (local_decl))
20173 set_decl_abstract_flags (local_decl, abstract_vec);
20174
20175 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
20176 {
20177 local_decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
20178 if ((VAR_P (local_decl) && !TREE_STATIC (local_decl))
20179 || TREE_CODE (local_decl) == PARM_DECL)
20180 set_decl_abstract_flags (local_decl, abstract_vec);
20181 }
20182
20183 for (subblock = BLOCK_SUBBLOCKS (stmt);
20184 subblock != NULL_TREE;
20185 subblock = BLOCK_CHAIN (subblock))
20186 set_block_abstract_flags (subblock, abstract_vec);
20187 }
20188
20189 /* Given a pointer to some ..._DECL node, set DECL_ABSTRACT_P flag on it
20190 to 1 and if it wasn't 1 before, push to abstract_vec vector.
20191 In the case where the decl is a FUNCTION_DECL also set the abstract
20192 flags for all of the parameters, local vars, local
20193 blocks and sub-blocks (recursively). */
20194
20195 static void
20196 set_decl_abstract_flags (tree decl, vec<tree> &abstract_vec)
20197 {
20198 if (!DECL_ABSTRACT_P (decl))
20199 {
20200 abstract_vec.safe_push (decl);
20201 DECL_ABSTRACT_P (decl) = 1;
20202 }
20203
20204 if (TREE_CODE (decl) == FUNCTION_DECL)
20205 {
20206 tree arg;
20207
20208 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
20209 if (!DECL_ABSTRACT_P (arg))
20210 {
20211 abstract_vec.safe_push (arg);
20212 DECL_ABSTRACT_P (arg) = 1;
20213 }
20214 if (DECL_INITIAL (decl) != NULL_TREE
20215 && DECL_INITIAL (decl) != error_mark_node)
20216 set_block_abstract_flags (DECL_INITIAL (decl), abstract_vec);
20217 }
20218 }
20219
20220 /* Generate the DWARF2 info for the "abstract" instance of a function which we
20221 may later generate inlined and/or out-of-line instances of.
20222
20223 FIXME: In the early-dwarf world, this function, and most of the
20224 DECL_ABSTRACT code should be obsoleted. The early DIE _is_
20225 the abstract instance. All we would need to do is annotate
20226 the early DIE with the appropriate DW_AT_inline in late
20227 dwarf (perhaps in gen_inlined_subroutine_die).
20228
20229 However, we can't do this yet, because LTO streaming of DIEs
20230 has not been implemented yet. */
20231
20232 static void
20233 dwarf2out_abstract_function (tree decl)
20234 {
20235 dw_die_ref old_die;
20236 tree save_fn;
20237 tree context;
20238 hash_table<decl_loc_hasher> *old_decl_loc_table;
20239 hash_table<dw_loc_list_hasher> *old_cached_dw_loc_list_table;
20240 int old_call_site_count, old_tail_call_site_count;
20241 struct call_arg_loc_node *old_call_arg_locations;
20242
20243 /* Make sure we have the actual abstract inline, not a clone. */
20244 decl = DECL_ORIGIN (decl);
20245
20246 old_die = lookup_decl_die (decl);
20247 if (old_die && get_AT (old_die, DW_AT_inline))
20248 /* We've already generated the abstract instance. */
20249 return;
20250
20251 /* We can be called while recursively when seeing block defining inlined subroutine
20252 DIE. Be sure to not clobber the outer location table nor use it or we would
20253 get locations in abstract instantces. */
20254 old_decl_loc_table = decl_loc_table;
20255 decl_loc_table = NULL;
20256 old_cached_dw_loc_list_table = cached_dw_loc_list_table;
20257 cached_dw_loc_list_table = NULL;
20258 old_call_arg_locations = call_arg_locations;
20259 call_arg_locations = NULL;
20260 old_call_site_count = call_site_count;
20261 call_site_count = -1;
20262 old_tail_call_site_count = tail_call_site_count;
20263 tail_call_site_count = -1;
20264
20265 /* Be sure we've emitted the in-class declaration DIE (if any) first, so
20266 we don't get confused by DECL_ABSTRACT_P. */
20267 if (debug_info_level > DINFO_LEVEL_TERSE)
20268 {
20269 context = decl_class_context (decl);
20270 if (context)
20271 gen_type_die_for_member
20272 (context, decl, decl_function_context (decl) ? NULL : comp_unit_die ());
20273 }
20274
20275 /* Pretend we've just finished compiling this function. */
20276 save_fn = current_function_decl;
20277 current_function_decl = decl;
20278
20279 auto_vec<tree, 64> abstract_vec;
20280 set_decl_abstract_flags (decl, abstract_vec);
20281 dwarf2out_decl (decl);
20282 unsigned int i;
20283 tree t;
20284 FOR_EACH_VEC_ELT (abstract_vec, i, t)
20285 if (TREE_CODE (t) == BLOCK)
20286 BLOCK_ABSTRACT (t) = 0;
20287 else
20288 DECL_ABSTRACT_P (t) = 0;
20289
20290 current_function_decl = save_fn;
20291 decl_loc_table = old_decl_loc_table;
20292 cached_dw_loc_list_table = old_cached_dw_loc_list_table;
20293 call_arg_locations = old_call_arg_locations;
20294 call_site_count = old_call_site_count;
20295 tail_call_site_count = old_tail_call_site_count;
20296 }
20297
20298 /* Helper function of premark_used_types() which gets called through
20299 htab_traverse.
20300
20301 Marks the DIE of a given type in *SLOT as perennial, so it never gets
20302 marked as unused by prune_unused_types. */
20303
20304 bool
20305 premark_used_types_helper (tree const &type, void *)
20306 {
20307 dw_die_ref die;
20308
20309 die = lookup_type_die (type);
20310 if (die != NULL)
20311 die->die_perennial_p = 1;
20312 return true;
20313 }
20314
20315 /* Helper function of premark_types_used_by_global_vars which gets called
20316 through htab_traverse.
20317
20318 Marks the DIE of a given type in *SLOT as perennial, so it never gets
20319 marked as unused by prune_unused_types. The DIE of the type is marked
20320 only if the global variable using the type will actually be emitted. */
20321
20322 int
20323 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
20324 void *)
20325 {
20326 struct types_used_by_vars_entry *entry;
20327 dw_die_ref die;
20328
20329 entry = (struct types_used_by_vars_entry *) *slot;
20330 gcc_assert (entry->type != NULL
20331 && entry->var_decl != NULL);
20332 die = lookup_type_die (entry->type);
20333 if (die)
20334 {
20335 /* Ask cgraph if the global variable really is to be emitted.
20336 If yes, then we'll keep the DIE of ENTRY->TYPE. */
20337 varpool_node *node = varpool_node::get (entry->var_decl);
20338 if (node && node->definition)
20339 {
20340 die->die_perennial_p = 1;
20341 /* Keep the parent DIEs as well. */
20342 while ((die = die->die_parent) && die->die_perennial_p == 0)
20343 die->die_perennial_p = 1;
20344 }
20345 }
20346 return 1;
20347 }
20348
20349 /* Mark all members of used_types_hash as perennial. */
20350
20351 static void
20352 premark_used_types (struct function *fun)
20353 {
20354 if (fun && fun->used_types_hash)
20355 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
20356 }
20357
20358 /* Mark all members of types_used_by_vars_entry as perennial. */
20359
20360 static void
20361 premark_types_used_by_global_vars (void)
20362 {
20363 if (types_used_by_vars_hash)
20364 types_used_by_vars_hash
20365 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
20366 }
20367
20368 /* Generate a DW_TAG_GNU_call_site DIE in function DECL under SUBR_DIE
20369 for CA_LOC call arg loc node. */
20370
20371 static dw_die_ref
20372 gen_call_site_die (tree decl, dw_die_ref subr_die,
20373 struct call_arg_loc_node *ca_loc)
20374 {
20375 dw_die_ref stmt_die = NULL, die;
20376 tree block = ca_loc->block;
20377
20378 while (block
20379 && block != DECL_INITIAL (decl)
20380 && TREE_CODE (block) == BLOCK)
20381 {
20382 stmt_die = BLOCK_DIE (block);
20383 if (stmt_die)
20384 break;
20385 block = BLOCK_SUPERCONTEXT (block);
20386 }
20387 if (stmt_die == NULL)
20388 stmt_die = subr_die;
20389 die = new_die (DW_TAG_GNU_call_site, stmt_die, NULL_TREE);
20390 add_AT_lbl_id (die, DW_AT_low_pc, ca_loc->label);
20391 if (ca_loc->tail_call_p)
20392 add_AT_flag (die, DW_AT_GNU_tail_call, 1);
20393 if (ca_loc->symbol_ref)
20394 {
20395 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
20396 if (tdie)
20397 add_AT_die_ref (die, DW_AT_abstract_origin, tdie);
20398 else
20399 add_AT_addr (die, DW_AT_abstract_origin, ca_loc->symbol_ref, false);
20400 }
20401 return die;
20402 }
20403
20404 /* Generate a DIE to represent a declared function (either file-scope or
20405 block-local). */
20406
20407 static void
20408 gen_subprogram_die (tree decl, dw_die_ref context_die)
20409 {
20410 tree origin = decl_ultimate_origin (decl);
20411 dw_die_ref subr_die;
20412 dw_die_ref old_die = lookup_decl_die (decl);
20413
20414 /* This function gets called multiple times for different stages of
20415 the debug process. For example, for func() in this code:
20416
20417 namespace S
20418 {
20419 void func() { ... }
20420 }
20421
20422 ...we get called 4 times. Twice in early debug and twice in
20423 late debug:
20424
20425 Early debug
20426 -----------
20427
20428 1. Once while generating func() within the namespace. This is
20429 the declaration. The declaration bit below is set, as the
20430 context is the namespace.
20431
20432 A new DIE will be generated with DW_AT_declaration set.
20433
20434 2. Once for func() itself. This is the specification. The
20435 declaration bit below is clear as the context is the CU.
20436
20437 We will use the cached DIE from (1) to create a new DIE with
20438 DW_AT_specification pointing to the declaration in (1).
20439
20440 Late debug via rest_of_handle_final()
20441 -------------------------------------
20442
20443 3. Once generating func() within the namespace. This is also the
20444 declaration, as in (1), but this time we will early exit below
20445 as we have a cached DIE and a declaration needs no additional
20446 annotations (no locations), as the source declaration line
20447 info is enough.
20448
20449 4. Once for func() itself. As in (2), this is the specification,
20450 but this time we will re-use the cached DIE, and just annotate
20451 it with the location information that should now be available.
20452
20453 For something without namespaces, but with abstract instances, we
20454 are also called a multiple times:
20455
20456 class Base
20457 {
20458 public:
20459 Base (); // constructor declaration (1)
20460 };
20461
20462 Base::Base () { } // constructor specification (2)
20463
20464 Early debug
20465 -----------
20466
20467 1. Once for the Base() constructor by virtue of it being a
20468 member of the Base class. This is done via
20469 rest_of_type_compilation.
20470
20471 This is a declaration, so a new DIE will be created with
20472 DW_AT_declaration.
20473
20474 2. Once for the Base() constructor definition, but this time
20475 while generating the abstract instance of the base
20476 constructor (__base_ctor) which is being generated via early
20477 debug of reachable functions.
20478
20479 Even though we have a cached version of the declaration (1),
20480 we will create a DW_AT_specification of the declaration DIE
20481 in (1).
20482
20483 3. Once for the __base_ctor itself, but this time, we generate
20484 an DW_AT_abstract_origin version of the DW_AT_specification in
20485 (2).
20486
20487 Late debug via rest_of_handle_final
20488 -----------------------------------
20489
20490 4. One final time for the __base_ctor (which will have a cached
20491 DIE with DW_AT_abstract_origin created in (3). This time,
20492 we will just annotate the location information now
20493 available.
20494 */
20495 int declaration = (current_function_decl != decl
20496 || class_or_namespace_scope_p (context_die));
20497
20498 /* Now that the C++ front end lazily declares artificial member fns, we
20499 might need to retrofit the declaration into its class. */
20500 if (!declaration && !origin && !old_die
20501 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
20502 && !class_or_namespace_scope_p (context_die)
20503 && debug_info_level > DINFO_LEVEL_TERSE)
20504 old_die = force_decl_die (decl);
20505
20506 /* An inlined instance, tag a new DIE with DW_AT_abstract_origin. */
20507 if (origin != NULL)
20508 {
20509 gcc_assert (!declaration || local_scope_p (context_die));
20510
20511 /* Fixup die_parent for the abstract instance of a nested
20512 inline function. */
20513 if (old_die && old_die->die_parent == NULL)
20514 add_child_die (context_die, old_die);
20515
20516 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
20517 {
20518 /* If we have a DW_AT_abstract_origin we have a working
20519 cached version. */
20520 subr_die = old_die;
20521 }
20522 else
20523 {
20524 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20525 add_abstract_origin_attribute (subr_die, origin);
20526 /* This is where the actual code for a cloned function is.
20527 Let's emit linkage name attribute for it. This helps
20528 debuggers to e.g, set breakpoints into
20529 constructors/destructors when the user asks "break
20530 K::K". */
20531 add_linkage_name (subr_die, decl);
20532 }
20533 }
20534 /* A cached copy, possibly from early dwarf generation. Reuse as
20535 much as possible. */
20536 else if (old_die)
20537 {
20538 /* A declaration that has been previously dumped needs no
20539 additional information. */
20540 if (declaration)
20541 return;
20542
20543 if (!get_AT_flag (old_die, DW_AT_declaration)
20544 /* We can have a normal definition following an inline one in the
20545 case of redefinition of GNU C extern inlines.
20546 It seems reasonable to use AT_specification in this case. */
20547 && !get_AT (old_die, DW_AT_inline))
20548 {
20549 /* Detect and ignore this case, where we are trying to output
20550 something we have already output. */
20551 if (get_AT (old_die, DW_AT_low_pc)
20552 || get_AT (old_die, DW_AT_ranges))
20553 return;
20554
20555 /* If we have no location information, this must be a
20556 partially generated DIE from early dwarf generation.
20557 Fall through and generate it. */
20558 }
20559
20560 /* If the definition comes from the same place as the declaration,
20561 maybe use the old DIE. We always want the DIE for this function
20562 that has the *_pc attributes to be under comp_unit_die so the
20563 debugger can find it. We also need to do this for abstract
20564 instances of inlines, since the spec requires the out-of-line copy
20565 to have the same parent. For local class methods, this doesn't
20566 apply; we just use the old DIE. */
20567 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
20568 struct dwarf_file_data * file_index = lookup_filename (s.file);
20569 if ((is_cu_die (old_die->die_parent)
20570 /* This condition fixes the inconsistency/ICE with the
20571 following Fortran test (or some derivative thereof) while
20572 building libgfortran:
20573
20574 module some_m
20575 contains
20576 logical function funky (FLAG)
20577 funky = .true.
20578 end function
20579 end module
20580 */
20581 || (old_die->die_parent
20582 && old_die->die_parent->die_tag == DW_TAG_module)
20583 || context_die == NULL)
20584 && (DECL_ARTIFICIAL (decl)
20585 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
20586 && (get_AT_unsigned (old_die, DW_AT_decl_line)
20587 == (unsigned) s.line))))
20588 {
20589 subr_die = old_die;
20590
20591 /* Clear out the declaration attribute, but leave the
20592 parameters so they can be augmented with location
20593 information later. Unless this was a declaration, in
20594 which case, wipe out the nameless parameters and recreate
20595 them further down. */
20596 if (remove_AT (subr_die, DW_AT_declaration))
20597 {
20598
20599 remove_AT (subr_die, DW_AT_object_pointer);
20600 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
20601 }
20602 }
20603 /* Make a specification pointing to the previously built
20604 declaration. */
20605 else
20606 {
20607 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20608 add_AT_specification (subr_die, old_die);
20609 add_pubname (decl, subr_die);
20610 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
20611 add_AT_file (subr_die, DW_AT_decl_file, file_index);
20612 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
20613 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
20614
20615 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
20616 emit the real type on the definition die. */
20617 if (is_cxx() && debug_info_level > DINFO_LEVEL_TERSE)
20618 {
20619 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
20620 if (die == auto_die || die == decltype_auto_die)
20621 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
20622 TYPE_UNQUALIFIED, false, context_die);
20623 }
20624
20625 /* When we process the method declaration, we haven't seen
20626 the out-of-class defaulted definition yet, so we have to
20627 recheck now. */
20628 int defaulted = lang_hooks.decls.function_decl_defaulted (decl);
20629 if (defaulted && (dwarf_version >= 5 || ! dwarf_strict)
20630 && !get_AT (subr_die, DW_AT_defaulted))
20631 switch (defaulted)
20632 {
20633 case 2:
20634 add_AT_unsigned (subr_die, DW_AT_defaulted,
20635 DW_DEFAULTED_out_of_class);
20636 break;
20637
20638 case 1: /* This must have been handled before. */
20639 default:
20640 gcc_unreachable ();
20641 }
20642 }
20643 }
20644 /* Create a fresh DIE for anything else. */
20645 else
20646 {
20647 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20648
20649 if (TREE_PUBLIC (decl))
20650 add_AT_flag (subr_die, DW_AT_external, 1);
20651
20652 add_name_and_src_coords_attributes (subr_die, decl);
20653 add_pubname (decl, subr_die);
20654 if (debug_info_level > DINFO_LEVEL_TERSE)
20655 {
20656 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
20657 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
20658 TYPE_UNQUALIFIED, false, context_die);
20659 }
20660
20661 add_pure_or_virtual_attribute (subr_die, decl);
20662 if (DECL_ARTIFICIAL (decl))
20663 add_AT_flag (subr_die, DW_AT_artificial, 1);
20664
20665 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
20666 add_AT_flag (subr_die, DW_AT_noreturn, 1);
20667
20668 add_accessibility_attribute (subr_die, decl);
20669 }
20670
20671 /* Unless we have an existing non-declaration DIE, equate the new
20672 DIE. */
20673 if (!old_die || is_declaration_die (old_die))
20674 equate_decl_number_to_die (decl, subr_die);
20675
20676 if (declaration)
20677 {
20678 if (!old_die || !get_AT (old_die, DW_AT_inline))
20679 {
20680 add_AT_flag (subr_die, DW_AT_declaration, 1);
20681
20682 /* If this is an explicit function declaration then generate
20683 a DW_AT_explicit attribute. */
20684 if (lang_hooks.decls.function_decl_explicit_p (decl)
20685 && (dwarf_version >= 3 || !dwarf_strict))
20686 add_AT_flag (subr_die, DW_AT_explicit, 1);
20687
20688 /* If this is a C++11 deleted special function member then generate
20689 a DW_AT_deleted attribute. */
20690 if (lang_hooks.decls.function_decl_deleted_p (decl)
20691 && (dwarf_version >= 5 || ! dwarf_strict))
20692 add_AT_flag (subr_die, DW_AT_deleted, 1);
20693
20694 /* If this is a C++11 defaulted special function member then
20695 generate a DW_AT_GNU_defaulted attribute. */
20696 int defaulted = lang_hooks.decls.function_decl_defaulted (decl);
20697 if (defaulted && (dwarf_version >= 5 || ! dwarf_strict))
20698 switch (defaulted)
20699 {
20700 case 1:
20701 add_AT_unsigned (subr_die, DW_AT_defaulted,
20702 DW_DEFAULTED_in_class);
20703 break;
20704
20705 /* It is likely that this will never hit, since we
20706 don't have the out-of-class definition yet when we
20707 process the class definition and the method
20708 declaration. We recheck elsewhere, but leave it
20709 here just in case. */
20710 case 2:
20711 add_AT_unsigned (subr_die, DW_AT_defaulted,
20712 DW_DEFAULTED_out_of_class);
20713 break;
20714
20715 default:
20716 gcc_unreachable ();
20717 }
20718 }
20719 }
20720 /* Tag abstract instances with DW_AT_inline. */
20721 else if (DECL_ABSTRACT_P (decl))
20722 {
20723 if (DECL_DECLARED_INLINE_P (decl))
20724 {
20725 if (cgraph_function_possibly_inlined_p (decl))
20726 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_inlined);
20727 else
20728 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_not_inlined);
20729 }
20730 else
20731 {
20732 if (cgraph_function_possibly_inlined_p (decl))
20733 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_inlined);
20734 else
20735 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_not_inlined);
20736 }
20737
20738 if (DECL_DECLARED_INLINE_P (decl)
20739 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
20740 add_AT_flag (subr_die, DW_AT_artificial, 1);
20741 }
20742 /* For non DECL_EXTERNALs, if range information is available, fill
20743 the DIE with it. */
20744 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
20745 {
20746 HOST_WIDE_INT cfa_fb_offset;
20747
20748 struct function *fun = DECL_STRUCT_FUNCTION (decl);
20749
20750 if (!flag_reorder_blocks_and_partition)
20751 {
20752 dw_fde_ref fde = fun->fde;
20753 if (fde->dw_fde_begin)
20754 {
20755 /* We have already generated the labels. */
20756 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
20757 fde->dw_fde_end, false);
20758 }
20759 else
20760 {
20761 /* Create start/end labels and add the range. */
20762 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
20763 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
20764 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
20765 current_function_funcdef_no);
20766 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
20767 current_function_funcdef_no);
20768 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
20769 false);
20770 }
20771
20772 #if VMS_DEBUGGING_INFO
20773 /* HP OpenVMS Industry Standard 64: DWARF Extensions
20774 Section 2.3 Prologue and Epilogue Attributes:
20775 When a breakpoint is set on entry to a function, it is generally
20776 desirable for execution to be suspended, not on the very first
20777 instruction of the function, but rather at a point after the
20778 function's frame has been set up, after any language defined local
20779 declaration processing has been completed, and before execution of
20780 the first statement of the function begins. Debuggers generally
20781 cannot properly determine where this point is. Similarly for a
20782 breakpoint set on exit from a function. The prologue and epilogue
20783 attributes allow a compiler to communicate the location(s) to use. */
20784
20785 {
20786 if (fde->dw_fde_vms_end_prologue)
20787 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
20788 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
20789
20790 if (fde->dw_fde_vms_begin_epilogue)
20791 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
20792 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
20793 }
20794 #endif
20795
20796 }
20797 else
20798 {
20799 /* Generate pubnames entries for the split function code ranges. */
20800 dw_fde_ref fde = fun->fde;
20801
20802 if (fde->dw_fde_second_begin)
20803 {
20804 if (dwarf_version >= 3 || !dwarf_strict)
20805 {
20806 /* We should use ranges for non-contiguous code section
20807 addresses. Use the actual code range for the initial
20808 section, since the HOT/COLD labels might precede an
20809 alignment offset. */
20810 bool range_list_added = false;
20811 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
20812 fde->dw_fde_end, &range_list_added,
20813 false);
20814 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
20815 fde->dw_fde_second_end,
20816 &range_list_added, false);
20817 if (range_list_added)
20818 add_ranges (NULL);
20819 }
20820 else
20821 {
20822 /* There is no real support in DW2 for this .. so we make
20823 a work-around. First, emit the pub name for the segment
20824 containing the function label. Then make and emit a
20825 simplified subprogram DIE for the second segment with the
20826 name pre-fixed by __hot/cold_sect_of_. We use the same
20827 linkage name for the second die so that gdb will find both
20828 sections when given "b foo". */
20829 const char *name = NULL;
20830 tree decl_name = DECL_NAME (decl);
20831 dw_die_ref seg_die;
20832
20833 /* Do the 'primary' section. */
20834 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
20835 fde->dw_fde_end, false);
20836
20837 /* Build a minimal DIE for the secondary section. */
20838 seg_die = new_die (DW_TAG_subprogram,
20839 subr_die->die_parent, decl);
20840
20841 if (TREE_PUBLIC (decl))
20842 add_AT_flag (seg_die, DW_AT_external, 1);
20843
20844 if (decl_name != NULL
20845 && IDENTIFIER_POINTER (decl_name) != NULL)
20846 {
20847 name = dwarf2_name (decl, 1);
20848 if (! DECL_ARTIFICIAL (decl))
20849 add_src_coords_attributes (seg_die, decl);
20850
20851 add_linkage_name (seg_die, decl);
20852 }
20853 gcc_assert (name != NULL);
20854 add_pure_or_virtual_attribute (seg_die, decl);
20855 if (DECL_ARTIFICIAL (decl))
20856 add_AT_flag (seg_die, DW_AT_artificial, 1);
20857
20858 name = concat ("__second_sect_of_", name, NULL);
20859 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
20860 fde->dw_fde_second_end, false);
20861 add_name_attribute (seg_die, name);
20862 if (want_pubnames ())
20863 add_pubname_string (name, seg_die);
20864 }
20865 }
20866 else
20867 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
20868 false);
20869 }
20870
20871 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
20872
20873 /* We define the "frame base" as the function's CFA. This is more
20874 convenient for several reasons: (1) It's stable across the prologue
20875 and epilogue, which makes it better than just a frame pointer,
20876 (2) With dwarf3, there exists a one-byte encoding that allows us
20877 to reference the .debug_frame data by proxy, but failing that,
20878 (3) We can at least reuse the code inspection and interpretation
20879 code that determines the CFA position at various points in the
20880 function. */
20881 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
20882 {
20883 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
20884 add_AT_loc (subr_die, DW_AT_frame_base, op);
20885 }
20886 else
20887 {
20888 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
20889 if (list->dw_loc_next)
20890 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
20891 else
20892 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
20893 }
20894
20895 /* Compute a displacement from the "steady-state frame pointer" to
20896 the CFA. The former is what all stack slots and argument slots
20897 will reference in the rtl; the latter is what we've told the
20898 debugger about. We'll need to adjust all frame_base references
20899 by this displacement. */
20900 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
20901
20902 if (fun->static_chain_decl)
20903 {
20904 /* DWARF requires here a location expression that computes the
20905 address of the enclosing subprogram's frame base. The machinery
20906 in tree-nested.c is supposed to store this specific address in the
20907 last field of the FRAME record. */
20908 const tree frame_type
20909 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
20910 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
20911
20912 tree fb_expr
20913 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
20914 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
20915 fb_expr, fb_decl, NULL_TREE);
20916
20917 add_AT_location_description (subr_die, DW_AT_static_link,
20918 loc_list_from_tree (fb_expr, 0, NULL));
20919 }
20920 }
20921
20922 /* Generate child dies for template paramaters. */
20923 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
20924 gen_generic_params_dies (decl);
20925
20926 /* Now output descriptions of the arguments for this function. This gets
20927 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
20928 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
20929 `...' at the end of the formal parameter list. In order to find out if
20930 there was a trailing ellipsis or not, we must instead look at the type
20931 associated with the FUNCTION_DECL. This will be a node of type
20932 FUNCTION_TYPE. If the chain of type nodes hanging off of this
20933 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
20934 an ellipsis at the end. */
20935
20936 /* In the case where we are describing a mere function declaration, all we
20937 need to do here (and all we *can* do here) is to describe the *types* of
20938 its formal parameters. */
20939 if (debug_info_level <= DINFO_LEVEL_TERSE)
20940 ;
20941 else if (declaration)
20942 gen_formal_types_die (decl, subr_die);
20943 else
20944 {
20945 /* Generate DIEs to represent all known formal parameters. */
20946 tree parm = DECL_ARGUMENTS (decl);
20947 tree generic_decl = early_dwarf
20948 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
20949 tree generic_decl_parm = generic_decl
20950 ? DECL_ARGUMENTS (generic_decl)
20951 : NULL;
20952 auto_vec<dw_die_ref> string_types_vec;
20953 if (string_types == NULL)
20954 string_types = &string_types_vec;
20955
20956 /* Now we want to walk the list of parameters of the function and
20957 emit their relevant DIEs.
20958
20959 We consider the case of DECL being an instance of a generic function
20960 as well as it being a normal function.
20961
20962 If DECL is an instance of a generic function we walk the
20963 parameters of the generic function declaration _and_ the parameters of
20964 DECL itself. This is useful because we want to emit specific DIEs for
20965 function parameter packs and those are declared as part of the
20966 generic function declaration. In that particular case,
20967 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
20968 That DIE has children DIEs representing the set of arguments
20969 of the pack. Note that the set of pack arguments can be empty.
20970 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
20971 children DIE.
20972
20973 Otherwise, we just consider the parameters of DECL. */
20974 while (generic_decl_parm || parm)
20975 {
20976 if (generic_decl_parm
20977 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
20978 gen_formal_parameter_pack_die (generic_decl_parm,
20979 parm, subr_die,
20980 &parm);
20981 else if (parm && !POINTER_BOUNDS_P (parm))
20982 {
20983 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
20984
20985 if (parm == DECL_ARGUMENTS (decl)
20986 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
20987 && parm_die
20988 && (dwarf_version >= 3 || !dwarf_strict))
20989 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
20990
20991 parm = DECL_CHAIN (parm);
20992 }
20993 else if (parm)
20994 parm = DECL_CHAIN (parm);
20995
20996 if (generic_decl_parm)
20997 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
20998 }
20999
21000 /* Decide whether we need an unspecified_parameters DIE at the end.
21001 There are 2 more cases to do this for: 1) the ansi ... declaration -
21002 this is detectable when the end of the arg list is not a
21003 void_type_node 2) an unprototyped function declaration (not a
21004 definition). This just means that we have no info about the
21005 parameters at all. */
21006 if (early_dwarf)
21007 {
21008 if (prototype_p (TREE_TYPE (decl)))
21009 {
21010 /* This is the prototyped case, check for.... */
21011 if (stdarg_p (TREE_TYPE (decl)))
21012 gen_unspecified_parameters_die (decl, subr_die);
21013 }
21014 else if (DECL_INITIAL (decl) == NULL_TREE)
21015 gen_unspecified_parameters_die (decl, subr_die);
21016 }
21017
21018 /* Adjust DW_TAG_string_type DIEs if needed, now that all arguments
21019 have DIEs. */
21020 if (string_types == &string_types_vec)
21021 {
21022 adjust_string_types ();
21023 string_types = NULL;
21024 }
21025 }
21026
21027 if (subr_die != old_die)
21028 /* Add the calling convention attribute if requested. */
21029 add_calling_convention_attribute (subr_die, decl);
21030
21031 /* Output Dwarf info for all of the stuff within the body of the function
21032 (if it has one - it may be just a declaration).
21033
21034 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
21035 a function. This BLOCK actually represents the outermost binding contour
21036 for the function, i.e. the contour in which the function's formal
21037 parameters and labels get declared. Curiously, it appears that the front
21038 end doesn't actually put the PARM_DECL nodes for the current function onto
21039 the BLOCK_VARS list for this outer scope, but are strung off of the
21040 DECL_ARGUMENTS list for the function instead.
21041
21042 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
21043 the LABEL_DECL nodes for the function however, and we output DWARF info
21044 for those in decls_for_scope. Just within the `outer_scope' there will be
21045 a BLOCK node representing the function's outermost pair of curly braces,
21046 and any blocks used for the base and member initializers of a C++
21047 constructor function. */
21048 tree outer_scope = DECL_INITIAL (decl);
21049 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
21050 {
21051 int call_site_note_count = 0;
21052 int tail_call_site_note_count = 0;
21053
21054 /* Emit a DW_TAG_variable DIE for a named return value. */
21055 if (DECL_NAME (DECL_RESULT (decl)))
21056 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
21057
21058 /* The first time through decls_for_scope we will generate the
21059 DIEs for the locals. The second time, we fill in the
21060 location info. */
21061 decls_for_scope (outer_scope, subr_die);
21062
21063 if (call_arg_locations && !dwarf_strict)
21064 {
21065 struct call_arg_loc_node *ca_loc;
21066 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
21067 {
21068 dw_die_ref die = NULL;
21069 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
21070 rtx arg, next_arg;
21071
21072 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
21073 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
21074 : NULL_RTX);
21075 arg; arg = next_arg)
21076 {
21077 dw_loc_descr_ref reg, val;
21078 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
21079 dw_die_ref cdie, tdie = NULL;
21080
21081 next_arg = XEXP (arg, 1);
21082 if (REG_P (XEXP (XEXP (arg, 0), 0))
21083 && next_arg
21084 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
21085 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
21086 && REGNO (XEXP (XEXP (arg, 0), 0))
21087 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
21088 next_arg = XEXP (next_arg, 1);
21089 if (mode == VOIDmode)
21090 {
21091 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
21092 if (mode == VOIDmode)
21093 mode = GET_MODE (XEXP (arg, 0));
21094 }
21095 if (mode == VOIDmode || mode == BLKmode)
21096 continue;
21097 /* Get dynamic information about call target only if we
21098 have no static information: we cannot generate both
21099 DW_AT_abstract_origin and DW_AT_GNU_call_site_target
21100 attributes. */
21101 if (ca_loc->symbol_ref == NULL_RTX)
21102 {
21103 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
21104 {
21105 tloc = XEXP (XEXP (arg, 0), 1);
21106 continue;
21107 }
21108 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
21109 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
21110 {
21111 tlocc = XEXP (XEXP (arg, 0), 1);
21112 continue;
21113 }
21114 }
21115 reg = NULL;
21116 if (REG_P (XEXP (XEXP (arg, 0), 0)))
21117 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
21118 VAR_INIT_STATUS_INITIALIZED);
21119 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
21120 {
21121 rtx mem = XEXP (XEXP (arg, 0), 0);
21122 reg = mem_loc_descriptor (XEXP (mem, 0),
21123 get_address_mode (mem),
21124 GET_MODE (mem),
21125 VAR_INIT_STATUS_INITIALIZED);
21126 }
21127 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
21128 == DEBUG_PARAMETER_REF)
21129 {
21130 tree tdecl
21131 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
21132 tdie = lookup_decl_die (tdecl);
21133 if (tdie == NULL)
21134 continue;
21135 }
21136 else
21137 continue;
21138 if (reg == NULL
21139 && GET_CODE (XEXP (XEXP (arg, 0), 0))
21140 != DEBUG_PARAMETER_REF)
21141 continue;
21142 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
21143 VOIDmode,
21144 VAR_INIT_STATUS_INITIALIZED);
21145 if (val == NULL)
21146 continue;
21147 if (die == NULL)
21148 die = gen_call_site_die (decl, subr_die, ca_loc);
21149 cdie = new_die (DW_TAG_GNU_call_site_parameter, die,
21150 NULL_TREE);
21151 if (reg != NULL)
21152 add_AT_loc (cdie, DW_AT_location, reg);
21153 else if (tdie != NULL)
21154 add_AT_die_ref (cdie, DW_AT_abstract_origin, tdie);
21155 add_AT_loc (cdie, DW_AT_GNU_call_site_value, val);
21156 if (next_arg != XEXP (arg, 1))
21157 {
21158 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
21159 if (mode == VOIDmode)
21160 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
21161 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
21162 0), 1),
21163 mode, VOIDmode,
21164 VAR_INIT_STATUS_INITIALIZED);
21165 if (val != NULL)
21166 add_AT_loc (cdie, DW_AT_GNU_call_site_data_value, val);
21167 }
21168 }
21169 if (die == NULL
21170 && (ca_loc->symbol_ref || tloc))
21171 die = gen_call_site_die (decl, subr_die, ca_loc);
21172 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
21173 {
21174 dw_loc_descr_ref tval = NULL;
21175
21176 if (tloc != NULL_RTX)
21177 tval = mem_loc_descriptor (tloc,
21178 GET_MODE (tloc) == VOIDmode
21179 ? Pmode : GET_MODE (tloc),
21180 VOIDmode,
21181 VAR_INIT_STATUS_INITIALIZED);
21182 if (tval)
21183 add_AT_loc (die, DW_AT_GNU_call_site_target, tval);
21184 else if (tlocc != NULL_RTX)
21185 {
21186 tval = mem_loc_descriptor (tlocc,
21187 GET_MODE (tlocc) == VOIDmode
21188 ? Pmode : GET_MODE (tlocc),
21189 VOIDmode,
21190 VAR_INIT_STATUS_INITIALIZED);
21191 if (tval)
21192 add_AT_loc (die, DW_AT_GNU_call_site_target_clobbered,
21193 tval);
21194 }
21195 }
21196 if (die != NULL)
21197 {
21198 call_site_note_count++;
21199 if (ca_loc->tail_call_p)
21200 tail_call_site_note_count++;
21201 }
21202 }
21203 }
21204 call_arg_locations = NULL;
21205 call_arg_loc_last = NULL;
21206 if (tail_call_site_count >= 0
21207 && tail_call_site_count == tail_call_site_note_count
21208 && !dwarf_strict)
21209 {
21210 if (call_site_count >= 0
21211 && call_site_count == call_site_note_count)
21212 add_AT_flag (subr_die, DW_AT_GNU_all_call_sites, 1);
21213 else
21214 add_AT_flag (subr_die, DW_AT_GNU_all_tail_call_sites, 1);
21215 }
21216 call_site_count = -1;
21217 tail_call_site_count = -1;
21218 }
21219
21220 /* Mark used types after we have created DIEs for the functions scopes. */
21221 premark_used_types (DECL_STRUCT_FUNCTION (decl));
21222 }
21223
21224 /* Returns a hash value for X (which really is a die_struct). */
21225
21226 hashval_t
21227 block_die_hasher::hash (die_struct *d)
21228 {
21229 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
21230 }
21231
21232 /* Return nonzero if decl_id and die_parent of die_struct X is the same
21233 as decl_id and die_parent of die_struct Y. */
21234
21235 bool
21236 block_die_hasher::equal (die_struct *x, die_struct *y)
21237 {
21238 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
21239 }
21240
21241 /* Return TRUE if DECL, which may have been previously generated as
21242 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
21243 true if decl (or its origin) is either an extern declaration or a
21244 class/namespace scoped declaration.
21245
21246 The declare_in_namespace support causes us to get two DIEs for one
21247 variable, both of which are declarations. We want to avoid
21248 considering one to be a specification, so we must test for
21249 DECLARATION and DW_AT_declaration. */
21250 static inline bool
21251 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
21252 {
21253 return (old_die && TREE_STATIC (decl) && !declaration
21254 && get_AT_flag (old_die, DW_AT_declaration) == 1);
21255 }
21256
21257 /* Return true if DECL is a local static. */
21258
21259 static inline bool
21260 local_function_static (tree decl)
21261 {
21262 gcc_assert (VAR_P (decl));
21263 return TREE_STATIC (decl)
21264 && DECL_CONTEXT (decl)
21265 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
21266 }
21267
21268 /* Generate a DIE to represent a declared data object.
21269 Either DECL or ORIGIN must be non-null. */
21270
21271 static void
21272 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
21273 {
21274 HOST_WIDE_INT off = 0;
21275 tree com_decl;
21276 tree decl_or_origin = decl ? decl : origin;
21277 tree ultimate_origin;
21278 dw_die_ref var_die;
21279 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
21280 dw_die_ref origin_die = NULL;
21281 bool declaration = (DECL_EXTERNAL (decl_or_origin)
21282 || class_or_namespace_scope_p (context_die));
21283 bool specialization_p = false;
21284
21285 ultimate_origin = decl_ultimate_origin (decl_or_origin);
21286 if (decl || ultimate_origin)
21287 origin = ultimate_origin;
21288 com_decl = fortran_common (decl_or_origin, &off);
21289
21290 /* Symbol in common gets emitted as a child of the common block, in the form
21291 of a data member. */
21292 if (com_decl)
21293 {
21294 dw_die_ref com_die;
21295 dw_loc_list_ref loc = NULL;
21296 die_node com_die_arg;
21297
21298 var_die = lookup_decl_die (decl_or_origin);
21299 if (var_die)
21300 {
21301 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
21302 {
21303 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
21304 if (loc)
21305 {
21306 if (off)
21307 {
21308 /* Optimize the common case. */
21309 if (single_element_loc_list_p (loc)
21310 && loc->expr->dw_loc_opc == DW_OP_addr
21311 && loc->expr->dw_loc_next == NULL
21312 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
21313 == SYMBOL_REF)
21314 {
21315 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
21316 loc->expr->dw_loc_oprnd1.v.val_addr
21317 = plus_constant (GET_MODE (x), x , off);
21318 }
21319 else
21320 loc_list_plus_const (loc, off);
21321 }
21322 add_AT_location_description (var_die, DW_AT_location, loc);
21323 remove_AT (var_die, DW_AT_declaration);
21324 }
21325 }
21326 return;
21327 }
21328
21329 if (common_block_die_table == NULL)
21330 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
21331
21332 com_die_arg.decl_id = DECL_UID (com_decl);
21333 com_die_arg.die_parent = context_die;
21334 com_die = common_block_die_table->find (&com_die_arg);
21335 if (! early_dwarf)
21336 loc = loc_list_from_tree (com_decl, 2, NULL);
21337 if (com_die == NULL)
21338 {
21339 const char *cnam
21340 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
21341 die_node **slot;
21342
21343 com_die = new_die (DW_TAG_common_block, context_die, decl);
21344 add_name_and_src_coords_attributes (com_die, com_decl);
21345 if (loc)
21346 {
21347 add_AT_location_description (com_die, DW_AT_location, loc);
21348 /* Avoid sharing the same loc descriptor between
21349 DW_TAG_common_block and DW_TAG_variable. */
21350 loc = loc_list_from_tree (com_decl, 2, NULL);
21351 }
21352 else if (DECL_EXTERNAL (decl_or_origin))
21353 add_AT_flag (com_die, DW_AT_declaration, 1);
21354 if (want_pubnames ())
21355 add_pubname_string (cnam, com_die); /* ??? needed? */
21356 com_die->decl_id = DECL_UID (com_decl);
21357 slot = common_block_die_table->find_slot (com_die, INSERT);
21358 *slot = com_die;
21359 }
21360 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
21361 {
21362 add_AT_location_description (com_die, DW_AT_location, loc);
21363 loc = loc_list_from_tree (com_decl, 2, NULL);
21364 remove_AT (com_die, DW_AT_declaration);
21365 }
21366 var_die = new_die (DW_TAG_variable, com_die, decl);
21367 add_name_and_src_coords_attributes (var_die, decl_or_origin);
21368 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
21369 decl_quals (decl_or_origin), false,
21370 context_die);
21371 add_AT_flag (var_die, DW_AT_external, 1);
21372 if (loc)
21373 {
21374 if (off)
21375 {
21376 /* Optimize the common case. */
21377 if (single_element_loc_list_p (loc)
21378 && loc->expr->dw_loc_opc == DW_OP_addr
21379 && loc->expr->dw_loc_next == NULL
21380 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
21381 {
21382 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
21383 loc->expr->dw_loc_oprnd1.v.val_addr
21384 = plus_constant (GET_MODE (x), x, off);
21385 }
21386 else
21387 loc_list_plus_const (loc, off);
21388 }
21389 add_AT_location_description (var_die, DW_AT_location, loc);
21390 }
21391 else if (DECL_EXTERNAL (decl_or_origin))
21392 add_AT_flag (var_die, DW_AT_declaration, 1);
21393 if (decl)
21394 equate_decl_number_to_die (decl, var_die);
21395 return;
21396 }
21397
21398 if (old_die)
21399 {
21400 if (declaration)
21401 {
21402 /* A declaration that has been previously dumped, needs no
21403 further annotations, since it doesn't need location on
21404 the second pass. */
21405 return;
21406 }
21407 else if (decl_will_get_specification_p (old_die, decl, declaration)
21408 && !get_AT (old_die, DW_AT_specification))
21409 {
21410 /* Fall-thru so we can make a new variable die along with a
21411 DW_AT_specification. */
21412 }
21413 else if (origin && old_die->die_parent != context_die)
21414 {
21415 /* If we will be creating an inlined instance, we need a
21416 new DIE that will get annotated with
21417 DW_AT_abstract_origin. Clear things so we can get a
21418 new DIE. */
21419 gcc_assert (!DECL_ABSTRACT_P (decl));
21420 old_die = NULL;
21421 }
21422 else
21423 {
21424 /* If a DIE was dumped early, it still needs location info.
21425 Skip to where we fill the location bits. */
21426 var_die = old_die;
21427 goto gen_variable_die_location;
21428 }
21429 }
21430
21431 /* For static data members, the declaration in the class is supposed
21432 to have DW_TAG_member tag; the specification should still be
21433 DW_TAG_variable referencing the DW_TAG_member DIE. */
21434 if (declaration && class_scope_p (context_die))
21435 var_die = new_die (DW_TAG_member, context_die, decl);
21436 else
21437 var_die = new_die (DW_TAG_variable, context_die, decl);
21438
21439 if (origin != NULL)
21440 origin_die = add_abstract_origin_attribute (var_die, origin);
21441
21442 /* Loop unrolling can create multiple blocks that refer to the same
21443 static variable, so we must test for the DW_AT_declaration flag.
21444
21445 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
21446 copy decls and set the DECL_ABSTRACT_P flag on them instead of
21447 sharing them.
21448
21449 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
21450 else if (decl_will_get_specification_p (old_die, decl, declaration))
21451 {
21452 /* This is a definition of a C++ class level static. */
21453 add_AT_specification (var_die, old_die);
21454 specialization_p = true;
21455 if (DECL_NAME (decl))
21456 {
21457 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
21458 struct dwarf_file_data * file_index = lookup_filename (s.file);
21459
21460 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
21461 add_AT_file (var_die, DW_AT_decl_file, file_index);
21462
21463 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
21464 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
21465
21466 if (old_die->die_tag == DW_TAG_member)
21467 add_linkage_name (var_die, decl);
21468 }
21469 }
21470 else
21471 add_name_and_src_coords_attributes (var_die, decl);
21472
21473 if ((origin == NULL && !specialization_p)
21474 || (origin != NULL
21475 && !DECL_ABSTRACT_P (decl_or_origin)
21476 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
21477 decl_function_context
21478 (decl_or_origin))))
21479 {
21480 tree type = TREE_TYPE (decl_or_origin);
21481
21482 if (decl_by_reference_p (decl_or_origin))
21483 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21484 context_die);
21485 else
21486 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
21487 context_die);
21488 }
21489
21490 if (origin == NULL && !specialization_p)
21491 {
21492 if (TREE_PUBLIC (decl))
21493 add_AT_flag (var_die, DW_AT_external, 1);
21494
21495 if (DECL_ARTIFICIAL (decl))
21496 add_AT_flag (var_die, DW_AT_artificial, 1);
21497
21498 add_accessibility_attribute (var_die, decl);
21499 }
21500
21501 if (declaration)
21502 add_AT_flag (var_die, DW_AT_declaration, 1);
21503
21504 if (decl && (DECL_ABSTRACT_P (decl)
21505 || !old_die || is_declaration_die (old_die)))
21506 equate_decl_number_to_die (decl, var_die);
21507
21508 gen_variable_die_location:
21509 if (! declaration
21510 && (! DECL_ABSTRACT_P (decl_or_origin)
21511 /* Local static vars are shared between all clones/inlines,
21512 so emit DW_AT_location on the abstract DIE if DECL_RTL is
21513 already set. */
21514 || (VAR_P (decl_or_origin)
21515 && TREE_STATIC (decl_or_origin)
21516 && DECL_RTL_SET_P (decl_or_origin)))
21517 /* When abstract origin already has DW_AT_location attribute, no need
21518 to add it again. */
21519 && (origin_die == NULL || get_AT (origin_die, DW_AT_location) == NULL))
21520 {
21521 if (early_dwarf)
21522 add_pubname (decl_or_origin, var_die);
21523 else
21524 add_location_or_const_value_attribute (var_die, decl_or_origin,
21525 decl == NULL);
21526 }
21527 else
21528 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
21529 }
21530
21531 /* Generate a DIE to represent a named constant. */
21532
21533 static void
21534 gen_const_die (tree decl, dw_die_ref context_die)
21535 {
21536 dw_die_ref const_die;
21537 tree type = TREE_TYPE (decl);
21538
21539 const_die = lookup_decl_die (decl);
21540 if (const_die)
21541 return;
21542
21543 const_die = new_die (DW_TAG_constant, context_die, decl);
21544 equate_decl_number_to_die (decl, const_die);
21545 add_name_and_src_coords_attributes (const_die, decl);
21546 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
21547 if (TREE_PUBLIC (decl))
21548 add_AT_flag (const_die, DW_AT_external, 1);
21549 if (DECL_ARTIFICIAL (decl))
21550 add_AT_flag (const_die, DW_AT_artificial, 1);
21551 tree_add_const_value_attribute_for_decl (const_die, decl);
21552 }
21553
21554 /* Generate a DIE to represent a label identifier. */
21555
21556 static void
21557 gen_label_die (tree decl, dw_die_ref context_die)
21558 {
21559 tree origin = decl_ultimate_origin (decl);
21560 dw_die_ref lbl_die = lookup_decl_die (decl);
21561 rtx insn;
21562 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21563
21564 if (!lbl_die)
21565 {
21566 lbl_die = new_die (DW_TAG_label, context_die, decl);
21567 equate_decl_number_to_die (decl, lbl_die);
21568
21569 if (origin != NULL)
21570 add_abstract_origin_attribute (lbl_die, origin);
21571 else
21572 add_name_and_src_coords_attributes (lbl_die, decl);
21573 }
21574
21575 if (DECL_ABSTRACT_P (decl))
21576 equate_decl_number_to_die (decl, lbl_die);
21577 else if (! early_dwarf)
21578 {
21579 insn = DECL_RTL_IF_SET (decl);
21580
21581 /* Deleted labels are programmer specified labels which have been
21582 eliminated because of various optimizations. We still emit them
21583 here so that it is possible to put breakpoints on them. */
21584 if (insn
21585 && (LABEL_P (insn)
21586 || ((NOTE_P (insn)
21587 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
21588 {
21589 /* When optimization is enabled (via -O) some parts of the compiler
21590 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
21591 represent source-level labels which were explicitly declared by
21592 the user. This really shouldn't be happening though, so catch
21593 it if it ever does happen. */
21594 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
21595
21596 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
21597 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
21598 }
21599 else if (insn
21600 && NOTE_P (insn)
21601 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
21602 && CODE_LABEL_NUMBER (insn) != -1)
21603 {
21604 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
21605 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
21606 }
21607 }
21608 }
21609
21610 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
21611 attributes to the DIE for a block STMT, to describe where the inlined
21612 function was called from. This is similar to add_src_coords_attributes. */
21613
21614 static inline void
21615 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
21616 {
21617 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
21618
21619 if (dwarf_version >= 3 || !dwarf_strict)
21620 {
21621 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
21622 add_AT_unsigned (die, DW_AT_call_line, s.line);
21623 }
21624 }
21625
21626
21627 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
21628 Add low_pc and high_pc attributes to the DIE for a block STMT. */
21629
21630 static inline void
21631 add_high_low_attributes (tree stmt, dw_die_ref die)
21632 {
21633 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21634
21635 if (BLOCK_FRAGMENT_CHAIN (stmt)
21636 && (dwarf_version >= 3 || !dwarf_strict))
21637 {
21638 tree chain, superblock = NULL_TREE;
21639 dw_die_ref pdie;
21640 dw_attr_node *attr = NULL;
21641
21642 if (inlined_function_outer_scope_p (stmt))
21643 {
21644 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
21645 BLOCK_NUMBER (stmt));
21646 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21647 }
21648
21649 /* Optimize duplicate .debug_ranges lists or even tails of
21650 lists. If this BLOCK has same ranges as its supercontext,
21651 lookup DW_AT_ranges attribute in the supercontext (and
21652 recursively so), verify that the ranges_table contains the
21653 right values and use it instead of adding a new .debug_range. */
21654 for (chain = stmt, pdie = die;
21655 BLOCK_SAME_RANGE (chain);
21656 chain = BLOCK_SUPERCONTEXT (chain))
21657 {
21658 dw_attr_node *new_attr;
21659
21660 pdie = pdie->die_parent;
21661 if (pdie == NULL)
21662 break;
21663 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
21664 break;
21665 new_attr = get_AT (pdie, DW_AT_ranges);
21666 if (new_attr == NULL
21667 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
21668 break;
21669 attr = new_attr;
21670 superblock = BLOCK_SUPERCONTEXT (chain);
21671 }
21672 if (attr != NULL
21673 && (ranges_table[attr->dw_attr_val.v.val_offset
21674 / 2 / DWARF2_ADDR_SIZE].num
21675 == BLOCK_NUMBER (superblock))
21676 && BLOCK_FRAGMENT_CHAIN (superblock))
21677 {
21678 unsigned long off = attr->dw_attr_val.v.val_offset
21679 / 2 / DWARF2_ADDR_SIZE;
21680 unsigned long supercnt = 0, thiscnt = 0;
21681 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
21682 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
21683 {
21684 ++supercnt;
21685 gcc_checking_assert (ranges_table[off + supercnt].num
21686 == BLOCK_NUMBER (chain));
21687 }
21688 gcc_checking_assert (ranges_table[off + supercnt + 1].num == 0);
21689 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
21690 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
21691 ++thiscnt;
21692 gcc_assert (supercnt >= thiscnt);
21693 add_AT_range_list (die, DW_AT_ranges,
21694 ((off + supercnt - thiscnt)
21695 * 2 * DWARF2_ADDR_SIZE),
21696 false);
21697 return;
21698 }
21699
21700 add_AT_range_list (die, DW_AT_ranges, add_ranges (stmt), false);
21701
21702 chain = BLOCK_FRAGMENT_CHAIN (stmt);
21703 do
21704 {
21705 add_ranges (chain);
21706 chain = BLOCK_FRAGMENT_CHAIN (chain);
21707 }
21708 while (chain);
21709 add_ranges (NULL);
21710 }
21711 else
21712 {
21713 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
21714 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
21715 BLOCK_NUMBER (stmt));
21716 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
21717 BLOCK_NUMBER (stmt));
21718 add_AT_low_high_pc (die, label, label_high, false);
21719 }
21720 }
21721
21722 /* Generate a DIE for a lexical block. */
21723
21724 static void
21725 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
21726 {
21727 dw_die_ref old_die = BLOCK_DIE (stmt);
21728 dw_die_ref stmt_die = NULL;
21729 if (!old_die)
21730 {
21731 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
21732 BLOCK_DIE (stmt) = stmt_die;
21733 }
21734
21735 if (BLOCK_ABSTRACT (stmt))
21736 {
21737 if (old_die)
21738 {
21739 /* This must have been generated early and it won't even
21740 need location information since it's a DW_AT_inline
21741 function. */
21742 if (flag_checking)
21743 for (dw_die_ref c = context_die; c; c = c->die_parent)
21744 if (c->die_tag == DW_TAG_inlined_subroutine
21745 || c->die_tag == DW_TAG_subprogram)
21746 {
21747 gcc_assert (get_AT (c, DW_AT_inline));
21748 break;
21749 }
21750 return;
21751 }
21752 }
21753 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
21754 {
21755 /* If this is an inlined instance, create a new lexical die for
21756 anything below to attach DW_AT_abstract_origin to. */
21757 if (old_die)
21758 {
21759 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
21760 BLOCK_DIE (stmt) = stmt_die;
21761 old_die = NULL;
21762 }
21763
21764 tree origin = block_ultimate_origin (stmt);
21765 if (origin != NULL_TREE && origin != stmt)
21766 add_abstract_origin_attribute (stmt_die, origin);
21767 }
21768
21769 if (old_die)
21770 stmt_die = old_die;
21771
21772 /* A non abstract block whose blocks have already been reordered
21773 should have the instruction range for this block. If so, set the
21774 high/low attributes. */
21775 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
21776 {
21777 gcc_assert (stmt_die);
21778 add_high_low_attributes (stmt, stmt_die);
21779 }
21780
21781 decls_for_scope (stmt, stmt_die);
21782 }
21783
21784 /* Generate a DIE for an inlined subprogram. */
21785
21786 static void
21787 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
21788 {
21789 tree decl;
21790
21791 /* The instance of function that is effectively being inlined shall not
21792 be abstract. */
21793 gcc_assert (! BLOCK_ABSTRACT (stmt));
21794
21795 decl = block_ultimate_origin (stmt);
21796
21797 /* Make sure any inlined functions are known to be inlineable. */
21798 gcc_checking_assert (DECL_ABSTRACT_P (decl)
21799 || cgraph_function_possibly_inlined_p (decl));
21800
21801 /* Emit info for the abstract instance first, if we haven't yet. We
21802 must emit this even if the block is abstract, otherwise when we
21803 emit the block below (or elsewhere), we may end up trying to emit
21804 a die whose origin die hasn't been emitted, and crashing. */
21805 dwarf2out_abstract_function (decl);
21806
21807 if (! BLOCK_ABSTRACT (stmt))
21808 {
21809 dw_die_ref subr_die
21810 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
21811
21812 if (call_arg_locations)
21813 BLOCK_DIE (stmt) = subr_die;
21814 add_abstract_origin_attribute (subr_die, decl);
21815 if (TREE_ASM_WRITTEN (stmt))
21816 add_high_low_attributes (stmt, subr_die);
21817 add_call_src_coords_attributes (stmt, subr_die);
21818
21819 decls_for_scope (stmt, subr_die);
21820 }
21821 }
21822
21823 /* Generate a DIE for a field in a record, or structure. CTX is required: see
21824 the comment for VLR_CONTEXT. */
21825
21826 static void
21827 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
21828 {
21829 dw_die_ref decl_die;
21830
21831 if (TREE_TYPE (decl) == error_mark_node)
21832 return;
21833
21834 decl_die = new_die (DW_TAG_member, context_die, decl);
21835 add_name_and_src_coords_attributes (decl_die, decl);
21836 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
21837 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
21838 context_die);
21839
21840 if (DECL_BIT_FIELD_TYPE (decl))
21841 {
21842 add_byte_size_attribute (decl_die, decl);
21843 add_bit_size_attribute (decl_die, decl);
21844 add_bit_offset_attribute (decl_die, decl, ctx);
21845 }
21846
21847 /* If we have a variant part offset, then we are supposed to process a member
21848 of a QUAL_UNION_TYPE, which is how we represent variant parts in
21849 trees. */
21850 gcc_assert (ctx->variant_part_offset == NULL_TREE
21851 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
21852 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
21853 add_data_member_location_attribute (decl_die, decl, ctx);
21854
21855 if (DECL_ARTIFICIAL (decl))
21856 add_AT_flag (decl_die, DW_AT_artificial, 1);
21857
21858 add_accessibility_attribute (decl_die, decl);
21859
21860 /* Equate decl number to die, so that we can look up this decl later on. */
21861 equate_decl_number_to_die (decl, decl_die);
21862 }
21863
21864 #if 0
21865 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
21866 Use modified_type_die instead.
21867 We keep this code here just in case these types of DIEs may be needed to
21868 represent certain things in other languages (e.g. Pascal) someday. */
21869
21870 static void
21871 gen_pointer_type_die (tree type, dw_die_ref context_die)
21872 {
21873 dw_die_ref ptr_die
21874 = new_die (DW_TAG_pointer_type, scope_die_for (type, context_die), type);
21875
21876 equate_type_number_to_die (type, ptr_die);
21877 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21878 context_die);
21879 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
21880 }
21881
21882 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
21883 Use modified_type_die instead.
21884 We keep this code here just in case these types of DIEs may be needed to
21885 represent certain things in other languages (e.g. Pascal) someday. */
21886
21887 static void
21888 gen_reference_type_die (tree type, dw_die_ref context_die)
21889 {
21890 dw_die_ref ref_die, scope_die = scope_die_for (type, context_die);
21891
21892 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
21893 ref_die = new_die (DW_TAG_rvalue_reference_type, scope_die, type);
21894 else
21895 ref_die = new_die (DW_TAG_reference_type, scope_die, type);
21896
21897 equate_type_number_to_die (type, ref_die);
21898 add_type_attribute (ref_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21899 context_die);
21900 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
21901 }
21902 #endif
21903
21904 /* Generate a DIE for a pointer to a member type. */
21905
21906 static void
21907 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
21908 {
21909 dw_die_ref ptr_die
21910 = new_die (DW_TAG_ptr_to_member_type,
21911 scope_die_for (type, context_die), type);
21912
21913 equate_type_number_to_die (type, ptr_die);
21914 add_AT_die_ref (ptr_die, DW_AT_containing_type,
21915 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
21916 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21917 context_die);
21918 }
21919
21920 static char *producer_string;
21921
21922 /* Return a heap allocated producer string including command line options
21923 if -grecord-gcc-switches. */
21924
21925 static char *
21926 gen_producer_string (void)
21927 {
21928 size_t j;
21929 auto_vec<const char *> switches;
21930 const char *language_string = lang_hooks.name;
21931 char *producer, *tail;
21932 const char *p;
21933 size_t len = dwarf_record_gcc_switches ? 0 : 3;
21934 size_t plen = strlen (language_string) + 1 + strlen (version_string);
21935
21936 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
21937 switch (save_decoded_options[j].opt_index)
21938 {
21939 case OPT_o:
21940 case OPT_d:
21941 case OPT_dumpbase:
21942 case OPT_dumpdir:
21943 case OPT_auxbase:
21944 case OPT_auxbase_strip:
21945 case OPT_quiet:
21946 case OPT_version:
21947 case OPT_v:
21948 case OPT_w:
21949 case OPT_L:
21950 case OPT_D:
21951 case OPT_I:
21952 case OPT_U:
21953 case OPT_SPECIAL_unknown:
21954 case OPT_SPECIAL_ignore:
21955 case OPT_SPECIAL_program_name:
21956 case OPT_SPECIAL_input_file:
21957 case OPT_grecord_gcc_switches:
21958 case OPT_gno_record_gcc_switches:
21959 case OPT__output_pch_:
21960 case OPT_fdiagnostics_show_location_:
21961 case OPT_fdiagnostics_show_option:
21962 case OPT_fdiagnostics_show_caret:
21963 case OPT_fdiagnostics_color_:
21964 case OPT_fverbose_asm:
21965 case OPT____:
21966 case OPT__sysroot_:
21967 case OPT_nostdinc:
21968 case OPT_nostdinc__:
21969 case OPT_fpreprocessed:
21970 case OPT_fltrans_output_list_:
21971 case OPT_fresolution_:
21972 case OPT_fdebug_prefix_map_:
21973 /* Ignore these. */
21974 continue;
21975 default:
21976 if (cl_options[save_decoded_options[j].opt_index].flags
21977 & CL_NO_DWARF_RECORD)
21978 continue;
21979 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
21980 == '-');
21981 switch (save_decoded_options[j].canonical_option[0][1])
21982 {
21983 case 'M':
21984 case 'i':
21985 case 'W':
21986 continue;
21987 case 'f':
21988 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
21989 "dump", 4) == 0)
21990 continue;
21991 break;
21992 default:
21993 break;
21994 }
21995 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
21996 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
21997 break;
21998 }
21999
22000 producer = XNEWVEC (char, plen + 1 + len + 1);
22001 tail = producer;
22002 sprintf (tail, "%s %s", language_string, version_string);
22003 tail += plen;
22004
22005 FOR_EACH_VEC_ELT (switches, j, p)
22006 {
22007 len = strlen (p);
22008 *tail = ' ';
22009 memcpy (tail + 1, p, len);
22010 tail += len + 1;
22011 }
22012
22013 *tail = '\0';
22014 return producer;
22015 }
22016
22017 /* Given a C and/or C++ language/version string return the "highest".
22018 C++ is assumed to be "higher" than C in this case. Used for merging
22019 LTO translation unit languages. */
22020 static const char *
22021 highest_c_language (const char *lang1, const char *lang2)
22022 {
22023 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
22024 return "GNU C++14";
22025 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
22026 return "GNU C++11";
22027 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
22028 return "GNU C++98";
22029
22030 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
22031 return "GNU C11";
22032 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
22033 return "GNU C99";
22034 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
22035 return "GNU C89";
22036
22037 gcc_unreachable ();
22038 }
22039
22040
22041 /* Generate the DIE for the compilation unit. */
22042
22043 static dw_die_ref
22044 gen_compile_unit_die (const char *filename)
22045 {
22046 dw_die_ref die;
22047 const char *language_string = lang_hooks.name;
22048 int language;
22049
22050 die = new_die (DW_TAG_compile_unit, NULL, NULL);
22051
22052 if (filename)
22053 {
22054 add_name_attribute (die, filename);
22055 /* Don't add cwd for <built-in>. */
22056 if (!IS_ABSOLUTE_PATH (filename) && filename[0] != '<')
22057 add_comp_dir_attribute (die);
22058 }
22059
22060 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
22061
22062 /* If our producer is LTO try to figure out a common language to use
22063 from the global list of translation units. */
22064 if (strcmp (language_string, "GNU GIMPLE") == 0)
22065 {
22066 unsigned i;
22067 tree t;
22068 const char *common_lang = NULL;
22069
22070 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
22071 {
22072 if (!TRANSLATION_UNIT_LANGUAGE (t))
22073 continue;
22074 if (!common_lang)
22075 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
22076 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
22077 ;
22078 else if (strncmp (common_lang, "GNU C", 5) == 0
22079 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
22080 /* Mixing C and C++ is ok, use C++ in that case. */
22081 common_lang = highest_c_language (common_lang,
22082 TRANSLATION_UNIT_LANGUAGE (t));
22083 else
22084 {
22085 /* Fall back to C. */
22086 common_lang = NULL;
22087 break;
22088 }
22089 }
22090
22091 if (common_lang)
22092 language_string = common_lang;
22093 }
22094
22095 language = DW_LANG_C;
22096 if (strncmp (language_string, "GNU C", 5) == 0
22097 && ISDIGIT (language_string[5]))
22098 {
22099 language = DW_LANG_C89;
22100 if (dwarf_version >= 3 || !dwarf_strict)
22101 {
22102 if (strcmp (language_string, "GNU C89") != 0)
22103 language = DW_LANG_C99;
22104
22105 if (dwarf_version >= 5 /* || !dwarf_strict */)
22106 if (strcmp (language_string, "GNU C11") == 0)
22107 language = DW_LANG_C11;
22108 }
22109 }
22110 else if (strncmp (language_string, "GNU C++", 7) == 0)
22111 {
22112 language = DW_LANG_C_plus_plus;
22113 if (dwarf_version >= 5 /* || !dwarf_strict */)
22114 {
22115 if (strcmp (language_string, "GNU C++11") == 0)
22116 language = DW_LANG_C_plus_plus_11;
22117 else if (strcmp (language_string, "GNU C++14") == 0)
22118 language = DW_LANG_C_plus_plus_14;
22119 }
22120 }
22121 else if (strcmp (language_string, "GNU F77") == 0)
22122 language = DW_LANG_Fortran77;
22123 else if (strcmp (language_string, "GNU Pascal") == 0)
22124 language = DW_LANG_Pascal83;
22125 else if (dwarf_version >= 3 || !dwarf_strict)
22126 {
22127 if (strcmp (language_string, "GNU Ada") == 0)
22128 language = DW_LANG_Ada95;
22129 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
22130 {
22131 language = DW_LANG_Fortran95;
22132 if (dwarf_version >= 5 /* || !dwarf_strict */)
22133 {
22134 if (strcmp (language_string, "GNU Fortran2003") == 0)
22135 language = DW_LANG_Fortran03;
22136 else if (strcmp (language_string, "GNU Fortran2008") == 0)
22137 language = DW_LANG_Fortran08;
22138 }
22139 }
22140 else if (strcmp (language_string, "GNU Java") == 0)
22141 language = DW_LANG_Java;
22142 else if (strcmp (language_string, "GNU Objective-C") == 0)
22143 language = DW_LANG_ObjC;
22144 else if (strcmp (language_string, "GNU Objective-C++") == 0)
22145 language = DW_LANG_ObjC_plus_plus;
22146 else if (dwarf_version >= 5 || !dwarf_strict)
22147 {
22148 if (strcmp (language_string, "GNU Go") == 0)
22149 language = DW_LANG_Go;
22150 }
22151 }
22152 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
22153 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
22154 language = DW_LANG_Fortran90;
22155
22156 add_AT_unsigned (die, DW_AT_language, language);
22157
22158 switch (language)
22159 {
22160 case DW_LANG_Fortran77:
22161 case DW_LANG_Fortran90:
22162 case DW_LANG_Fortran95:
22163 case DW_LANG_Fortran03:
22164 case DW_LANG_Fortran08:
22165 /* Fortran has case insensitive identifiers and the front-end
22166 lowercases everything. */
22167 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
22168 break;
22169 default:
22170 /* The default DW_ID_case_sensitive doesn't need to be specified. */
22171 break;
22172 }
22173 return die;
22174 }
22175
22176 /* Generate the DIE for a base class. */
22177
22178 static void
22179 gen_inheritance_die (tree binfo, tree access, tree type,
22180 dw_die_ref context_die)
22181 {
22182 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
22183 struct vlr_context ctx = { type, NULL };
22184
22185 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
22186 context_die);
22187 add_data_member_location_attribute (die, binfo, &ctx);
22188
22189 if (BINFO_VIRTUAL_P (binfo))
22190 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
22191
22192 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
22193 children, otherwise the default is DW_ACCESS_public. In DWARF2
22194 the default has always been DW_ACCESS_private. */
22195 if (access == access_public_node)
22196 {
22197 if (dwarf_version == 2
22198 || context_die->die_tag == DW_TAG_class_type)
22199 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
22200 }
22201 else if (access == access_protected_node)
22202 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
22203 else if (dwarf_version > 2
22204 && context_die->die_tag != DW_TAG_class_type)
22205 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
22206 }
22207
22208 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
22209 structure. */
22210 static bool
22211 is_variant_part (tree decl)
22212 {
22213 return (TREE_CODE (decl) == FIELD_DECL
22214 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
22215 }
22216
22217 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
22218 return the FIELD_DECL. Return NULL_TREE otherwise. */
22219
22220 static tree
22221 analyze_discr_in_predicate (tree operand, tree struct_type)
22222 {
22223 bool continue_stripping = true;
22224 while (continue_stripping)
22225 switch (TREE_CODE (operand))
22226 {
22227 CASE_CONVERT:
22228 operand = TREE_OPERAND (operand, 0);
22229 break;
22230 default:
22231 continue_stripping = false;
22232 break;
22233 }
22234
22235 /* Match field access to members of struct_type only. */
22236 if (TREE_CODE (operand) == COMPONENT_REF
22237 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
22238 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
22239 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
22240 return TREE_OPERAND (operand, 1);
22241 else
22242 return NULL_TREE;
22243 }
22244
22245 /* Check that SRC is a constant integer that can be represented as a native
22246 integer constant (either signed or unsigned). If so, store it into DEST and
22247 return true. Return false otherwise. */
22248
22249 static bool
22250 get_discr_value (tree src, dw_discr_value *dest)
22251 {
22252 bool is_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
22253
22254 if (TREE_CODE (src) != INTEGER_CST
22255 || !(is_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
22256 return false;
22257
22258 dest->pos = is_unsigned;
22259 if (is_unsigned)
22260 dest->v.uval = tree_to_uhwi (src);
22261 else
22262 dest->v.sval = tree_to_shwi (src);
22263
22264 return true;
22265 }
22266
22267 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
22268 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
22269 store NULL_TREE in DISCR_DECL. Otherwise:
22270
22271 - store the discriminant field in STRUCT_TYPE that controls the variant
22272 part to *DISCR_DECL
22273
22274 - put in *DISCR_LISTS_P an array where for each variant, the item
22275 represents the corresponding matching list of discriminant values.
22276
22277 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
22278 the above array.
22279
22280 Note that when the array is allocated (i.e. when the analysis is
22281 successful), it is up to the caller to free the array. */
22282
22283 static void
22284 analyze_variants_discr (tree variant_part_decl,
22285 tree struct_type,
22286 tree *discr_decl,
22287 dw_discr_list_ref **discr_lists_p,
22288 unsigned *discr_lists_length)
22289 {
22290 tree variant_part_type = TREE_TYPE (variant_part_decl);
22291 tree variant;
22292 dw_discr_list_ref *discr_lists;
22293 unsigned i;
22294
22295 /* Compute how many variants there are in this variant part. */
22296 *discr_lists_length = 0;
22297 for (variant = TYPE_FIELDS (variant_part_type);
22298 variant != NULL_TREE;
22299 variant = DECL_CHAIN (variant))
22300 ++*discr_lists_length;
22301
22302 *discr_decl = NULL_TREE;
22303 *discr_lists_p
22304 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
22305 sizeof (**discr_lists_p));
22306 discr_lists = *discr_lists_p;
22307
22308 /* And then analyze all variants to extract discriminant information for all
22309 of them. This analysis is conservative: as soon as we detect something we
22310 do not support, abort everything and pretend we found nothing. */
22311 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
22312 variant != NULL_TREE;
22313 variant = DECL_CHAIN (variant), ++i)
22314 {
22315 tree match_expr = DECL_QUALIFIER (variant);
22316
22317 /* Now, try to analyze the predicate and deduce a discriminant for
22318 it. */
22319 if (match_expr == boolean_true_node)
22320 /* Typically happens for the default variant: it matches all cases that
22321 previous variants rejected. Don't output any matching value for
22322 this one. */
22323 continue;
22324
22325 /* The following loop tries to iterate over each discriminant
22326 possibility: single values or ranges. */
22327 while (match_expr != NULL_TREE)
22328 {
22329 tree next_round_match_expr;
22330 tree candidate_discr = NULL_TREE;
22331 dw_discr_list_ref new_node = NULL;
22332
22333 /* Possibilities are matched one after the other by nested
22334 TRUTH_ORIF_EXPR expressions. Process the current possibility and
22335 continue with the rest at next iteration. */
22336 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
22337 {
22338 next_round_match_expr = TREE_OPERAND (match_expr, 0);
22339 match_expr = TREE_OPERAND (match_expr, 1);
22340 }
22341 else
22342 next_round_match_expr = NULL_TREE;
22343
22344 if (match_expr == boolean_false_node)
22345 /* This sub-expression matches nothing: just wait for the next
22346 one. */
22347 ;
22348
22349 else if (TREE_CODE (match_expr) == EQ_EXPR)
22350 {
22351 /* We are matching: <discr_field> == <integer_cst>
22352 This sub-expression matches a single value. */
22353 tree integer_cst = TREE_OPERAND (match_expr, 1);
22354
22355 candidate_discr
22356 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
22357 struct_type);
22358
22359 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
22360 if (!get_discr_value (integer_cst,
22361 &new_node->dw_discr_lower_bound))
22362 goto abort;
22363 new_node->dw_discr_range = false;
22364 }
22365
22366 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
22367 {
22368 /* We are matching:
22369 <discr_field> > <integer_cst>
22370 && <discr_field> < <integer_cst>.
22371 This sub-expression matches the range of values between the
22372 two matched integer constants. Note that comparisons can be
22373 inclusive or exclusive. */
22374 tree candidate_discr_1, candidate_discr_2;
22375 tree lower_cst, upper_cst;
22376 bool lower_cst_included, upper_cst_included;
22377 tree lower_op = TREE_OPERAND (match_expr, 0);
22378 tree upper_op = TREE_OPERAND (match_expr, 1);
22379
22380 /* When the comparison is exclusive, the integer constant is not
22381 the discriminant range bound we are looking for: we will have
22382 to increment or decrement it. */
22383 if (TREE_CODE (lower_op) == GE_EXPR)
22384 lower_cst_included = true;
22385 else if (TREE_CODE (lower_op) == GT_EXPR)
22386 lower_cst_included = false;
22387 else
22388 goto abort;
22389
22390 if (TREE_CODE (upper_op) == LE_EXPR)
22391 upper_cst_included = true;
22392 else if (TREE_CODE (upper_op) == LT_EXPR)
22393 upper_cst_included = false;
22394 else
22395 goto abort;
22396
22397 /* Extract the discriminant from the first operand and check it
22398 is consistant with the same analysis in the second
22399 operand. */
22400 candidate_discr_1
22401 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
22402 struct_type);
22403 candidate_discr_2
22404 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
22405 struct_type);
22406 if (candidate_discr_1 == candidate_discr_2)
22407 candidate_discr = candidate_discr_1;
22408 else
22409 goto abort;
22410
22411 /* Extract bounds from both. */
22412 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
22413 lower_cst = TREE_OPERAND (lower_op, 1);
22414 upper_cst = TREE_OPERAND (upper_op, 1);
22415
22416 if (!lower_cst_included)
22417 lower_cst
22418 = fold (build2 (PLUS_EXPR, TREE_TYPE (lower_cst),
22419 lower_cst,
22420 build_int_cst (TREE_TYPE (lower_cst), 1)));
22421 if (!upper_cst_included)
22422 upper_cst
22423 = fold (build2 (MINUS_EXPR, TREE_TYPE (upper_cst),
22424 upper_cst,
22425 build_int_cst (TREE_TYPE (upper_cst), 1)));
22426
22427 if (!get_discr_value (lower_cst,
22428 &new_node->dw_discr_lower_bound)
22429 || !get_discr_value (upper_cst,
22430 &new_node->dw_discr_upper_bound))
22431 goto abort;
22432
22433 new_node->dw_discr_range = true;
22434 }
22435
22436 else
22437 /* Unsupported sub-expression: we cannot determine the set of
22438 matching discriminant values. Abort everything. */
22439 goto abort;
22440
22441 /* If the discriminant info is not consistant with what we saw so
22442 far, consider the analysis failed and abort everything. */
22443 if (candidate_discr == NULL_TREE
22444 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
22445 goto abort;
22446 else
22447 *discr_decl = candidate_discr;
22448
22449 if (new_node != NULL)
22450 {
22451 new_node->dw_discr_next = discr_lists[i];
22452 discr_lists[i] = new_node;
22453 }
22454 match_expr = next_round_match_expr;
22455 }
22456 }
22457
22458 /* If we reach this point, we could match everything we were interested
22459 in. */
22460 return;
22461
22462 abort:
22463 /* Clean all data structure and return no result. */
22464 free (*discr_lists_p);
22465 *discr_lists_p = NULL;
22466 *discr_decl = NULL_TREE;
22467 }
22468
22469 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
22470 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
22471 under CONTEXT_DIE.
22472
22473 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
22474 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
22475 this type, which are record types, represent the available variants and each
22476 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
22477 values are inferred from these attributes.
22478
22479 In trees, the offsets for the fields inside these sub-records are relative
22480 to the variant part itself, whereas the corresponding DIEs should have
22481 offset attributes that are relative to the embedding record base address.
22482 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
22483 must be an expression that computes the offset of the variant part to
22484 describe in DWARF. */
22485
22486 static void
22487 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
22488 dw_die_ref context_die)
22489 {
22490 const tree variant_part_type = TREE_TYPE (variant_part_decl);
22491 tree variant_part_offset = vlr_ctx->variant_part_offset;
22492 struct loc_descr_context ctx = {
22493 vlr_ctx->struct_type, /* context_type */
22494 NULL_TREE, /* base_decl */
22495 NULL /* dpi */
22496 };
22497
22498 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
22499 NULL_TREE if there is no such field. */
22500 tree discr_decl = NULL_TREE;
22501 dw_discr_list_ref *discr_lists;
22502 unsigned discr_lists_length = 0;
22503 unsigned i;
22504
22505 dw_die_ref dwarf_proc_die = NULL;
22506 dw_die_ref variant_part_die
22507 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
22508
22509 equate_decl_number_to_die (variant_part_decl, variant_part_die);
22510
22511 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
22512 &discr_decl, &discr_lists, &discr_lists_length);
22513
22514 if (discr_decl != NULL_TREE)
22515 {
22516 dw_die_ref discr_die = lookup_decl_die (discr_decl);
22517
22518 if (discr_die)
22519 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
22520 else
22521 /* We have no DIE for the discriminant, so just discard all
22522 discrimimant information in the output. */
22523 discr_decl = NULL_TREE;
22524 }
22525
22526 /* If the offset for this variant part is more complex than a constant,
22527 create a DWARF procedure for it so that we will not have to generate DWARF
22528 expressions for it for each member. */
22529 if (TREE_CODE (variant_part_offset) != INTEGER_CST
22530 && (dwarf_version >= 3 || !dwarf_strict))
22531 {
22532 const tree dwarf_proc_fndecl
22533 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
22534 build_function_type (TREE_TYPE (variant_part_offset),
22535 NULL_TREE));
22536 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
22537 const dw_loc_descr_ref dwarf_proc_body
22538 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
22539
22540 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
22541 dwarf_proc_fndecl, context_die);
22542 if (dwarf_proc_die != NULL)
22543 variant_part_offset = dwarf_proc_call;
22544 }
22545
22546 /* Output DIEs for all variants. */
22547 i = 0;
22548 for (tree variant = TYPE_FIELDS (variant_part_type);
22549 variant != NULL_TREE;
22550 variant = DECL_CHAIN (variant), ++i)
22551 {
22552 tree variant_type = TREE_TYPE (variant);
22553 dw_die_ref variant_die;
22554
22555 /* All variants (i.e. members of a variant part) are supposed to be
22556 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
22557 under these records. */
22558 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
22559
22560 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
22561 equate_decl_number_to_die (variant, variant_die);
22562
22563 /* Output discriminant values this variant matches, if any. */
22564 if (discr_decl == NULL || discr_lists[i] == NULL)
22565 /* In the case we have discriminant information at all, this is
22566 probably the default variant: as the standard says, don't
22567 output any discriminant value/list attribute. */
22568 ;
22569 else if (discr_lists[i]->dw_discr_next == NULL
22570 && !discr_lists[i]->dw_discr_range)
22571 /* If there is only one accepted value, don't bother outputting a
22572 list. */
22573 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
22574 else
22575 add_discr_list (variant_die, discr_lists[i]);
22576
22577 for (tree member = TYPE_FIELDS (variant_type);
22578 member != NULL_TREE;
22579 member = DECL_CHAIN (member))
22580 {
22581 struct vlr_context vlr_sub_ctx = {
22582 vlr_ctx->struct_type, /* struct_type */
22583 NULL /* variant_part_offset */
22584 };
22585 if (is_variant_part (member))
22586 {
22587 /* All offsets for fields inside variant parts are relative to
22588 the top-level embedding RECORD_TYPE's base address. On the
22589 other hand, offsets in GCC's types are relative to the
22590 nested-most variant part. So we have to sum offsets each time
22591 we recurse. */
22592
22593 vlr_sub_ctx.variant_part_offset
22594 = fold (build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
22595 variant_part_offset, byte_position (member)));
22596 gen_variant_part (member, &vlr_sub_ctx, variant_die);
22597 }
22598 else
22599 {
22600 vlr_sub_ctx.variant_part_offset = variant_part_offset;
22601 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
22602 }
22603 }
22604 }
22605
22606 free (discr_lists);
22607 }
22608
22609 /* Generate a DIE for a class member. */
22610
22611 static void
22612 gen_member_die (tree type, dw_die_ref context_die)
22613 {
22614 tree member;
22615 tree binfo = TYPE_BINFO (type);
22616 dw_die_ref child;
22617
22618 /* If this is not an incomplete type, output descriptions of each of its
22619 members. Note that as we output the DIEs necessary to represent the
22620 members of this record or union type, we will also be trying to output
22621 DIEs to represent the *types* of those members. However the `type'
22622 function (above) will specifically avoid generating type DIEs for member
22623 types *within* the list of member DIEs for this (containing) type except
22624 for those types (of members) which are explicitly marked as also being
22625 members of this (containing) type themselves. The g++ front- end can
22626 force any given type to be treated as a member of some other (containing)
22627 type by setting the TYPE_CONTEXT of the given (member) type to point to
22628 the TREE node representing the appropriate (containing) type. */
22629
22630 /* First output info about the base classes. */
22631 if (binfo)
22632 {
22633 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
22634 int i;
22635 tree base;
22636
22637 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
22638 gen_inheritance_die (base,
22639 (accesses ? (*accesses)[i] : access_public_node),
22640 type,
22641 context_die);
22642 }
22643
22644 /* Now output info about the data members and type members. */
22645 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
22646 {
22647 struct vlr_context vlr_ctx = { type, NULL_TREE };
22648
22649 /* If we thought we were generating minimal debug info for TYPE
22650 and then changed our minds, some of the member declarations
22651 may have already been defined. Don't define them again, but
22652 do put them in the right order. */
22653
22654 child = lookup_decl_die (member);
22655 if (child)
22656 splice_child_die (context_die, child);
22657
22658 /* Do not generate standard DWARF for variant parts if we are generating
22659 the corresponding GNAT encodings: DIEs generated for both would
22660 conflict in our mappings. */
22661 else if (is_variant_part (member)
22662 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
22663 {
22664 vlr_ctx.variant_part_offset = byte_position (member);
22665 gen_variant_part (member, &vlr_ctx, context_die);
22666 }
22667 else
22668 {
22669 vlr_ctx.variant_part_offset = NULL_TREE;
22670 gen_decl_die (member, NULL, &vlr_ctx, context_die);
22671 }
22672 }
22673
22674 /* We do not keep type methods in type variants. */
22675 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
22676 /* Now output info about the function members (if any). */
22677 if (TYPE_METHODS (type) != error_mark_node)
22678 for (member = TYPE_METHODS (type); member; member = DECL_CHAIN (member))
22679 {
22680 /* Don't include clones in the member list. */
22681 if (DECL_ABSTRACT_ORIGIN (member))
22682 continue;
22683 /* Nor constructors for anonymous classes. */
22684 if (DECL_ARTIFICIAL (member)
22685 && dwarf2_name (member, 0) == NULL)
22686 continue;
22687
22688 child = lookup_decl_die (member);
22689 if (child)
22690 splice_child_die (context_die, child);
22691 else
22692 gen_decl_die (member, NULL, NULL, context_die);
22693 }
22694 }
22695
22696 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
22697 is set, we pretend that the type was never defined, so we only get the
22698 member DIEs needed by later specification DIEs. */
22699
22700 static void
22701 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
22702 enum debug_info_usage usage)
22703 {
22704 if (TREE_ASM_WRITTEN (type))
22705 {
22706 /* Fill in the bound of variable-length fields in late dwarf if
22707 still incomplete. */
22708 if (!early_dwarf && variably_modified_type_p (type, NULL))
22709 for (tree member = TYPE_FIELDS (type);
22710 member;
22711 member = DECL_CHAIN (member))
22712 fill_variable_array_bounds (TREE_TYPE (member));
22713 return;
22714 }
22715
22716 dw_die_ref type_die = lookup_type_die (type);
22717 dw_die_ref scope_die = 0;
22718 int nested = 0;
22719 int complete = (TYPE_SIZE (type)
22720 && (! TYPE_STUB_DECL (type)
22721 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
22722 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
22723 complete = complete && should_emit_struct_debug (type, usage);
22724
22725 if (type_die && ! complete)
22726 return;
22727
22728 if (TYPE_CONTEXT (type) != NULL_TREE
22729 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
22730 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
22731 nested = 1;
22732
22733 scope_die = scope_die_for (type, context_die);
22734
22735 /* Generate child dies for template paramaters. */
22736 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
22737 schedule_generic_params_dies_gen (type);
22738
22739 if (! type_die || (nested && is_cu_die (scope_die)))
22740 /* First occurrence of type or toplevel definition of nested class. */
22741 {
22742 dw_die_ref old_die = type_die;
22743
22744 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
22745 ? record_type_tag (type) : DW_TAG_union_type,
22746 scope_die, type);
22747 equate_type_number_to_die (type, type_die);
22748 if (old_die)
22749 add_AT_specification (type_die, old_die);
22750 else
22751 add_name_attribute (type_die, type_tag (type));
22752 }
22753 else
22754 remove_AT (type_die, DW_AT_declaration);
22755
22756 /* If this type has been completed, then give it a byte_size attribute and
22757 then give a list of members. */
22758 if (complete && !ns_decl)
22759 {
22760 /* Prevent infinite recursion in cases where the type of some member of
22761 this type is expressed in terms of this type itself. */
22762 TREE_ASM_WRITTEN (type) = 1;
22763 add_byte_size_attribute (type_die, type);
22764 if (TYPE_STUB_DECL (type) != NULL_TREE)
22765 {
22766 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22767 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22768 }
22769
22770 /* If the first reference to this type was as the return type of an
22771 inline function, then it may not have a parent. Fix this now. */
22772 if (type_die->die_parent == NULL)
22773 add_child_die (scope_die, type_die);
22774
22775 push_decl_scope (type);
22776 gen_member_die (type, type_die);
22777 pop_decl_scope ();
22778
22779 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22780 if (TYPE_ARTIFICIAL (type))
22781 add_AT_flag (type_die, DW_AT_artificial, 1);
22782
22783 /* GNU extension: Record what type our vtable lives in. */
22784 if (TYPE_VFIELD (type))
22785 {
22786 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
22787
22788 gen_type_die (vtype, context_die);
22789 add_AT_die_ref (type_die, DW_AT_containing_type,
22790 lookup_type_die (vtype));
22791 }
22792 }
22793 else
22794 {
22795 add_AT_flag (type_die, DW_AT_declaration, 1);
22796
22797 /* We don't need to do this for function-local types. */
22798 if (TYPE_STUB_DECL (type)
22799 && ! decl_function_context (TYPE_STUB_DECL (type)))
22800 vec_safe_push (incomplete_types, type);
22801 }
22802
22803 if (get_AT (type_die, DW_AT_name))
22804 add_pubtype (type, type_die);
22805 }
22806
22807 /* Generate a DIE for a subroutine _type_. */
22808
22809 static void
22810 gen_subroutine_type_die (tree type, dw_die_ref context_die)
22811 {
22812 tree return_type = TREE_TYPE (type);
22813 dw_die_ref subr_die
22814 = new_die (DW_TAG_subroutine_type,
22815 scope_die_for (type, context_die), type);
22816
22817 equate_type_number_to_die (type, subr_die);
22818 add_prototyped_attribute (subr_die, type);
22819 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
22820 context_die);
22821 gen_formal_types_die (type, subr_die);
22822
22823 if (get_AT (subr_die, DW_AT_name))
22824 add_pubtype (type, subr_die);
22825 }
22826
22827 /* Generate a DIE for a type definition. */
22828
22829 static void
22830 gen_typedef_die (tree decl, dw_die_ref context_die)
22831 {
22832 dw_die_ref type_die;
22833 tree origin;
22834
22835 if (TREE_ASM_WRITTEN (decl))
22836 {
22837 if (DECL_ORIGINAL_TYPE (decl))
22838 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
22839 return;
22840 }
22841
22842 TREE_ASM_WRITTEN (decl) = 1;
22843 type_die = new_die (DW_TAG_typedef, context_die, decl);
22844 origin = decl_ultimate_origin (decl);
22845 if (origin != NULL)
22846 add_abstract_origin_attribute (type_die, origin);
22847 else
22848 {
22849 tree type;
22850
22851 add_name_and_src_coords_attributes (type_die, decl);
22852 if (DECL_ORIGINAL_TYPE (decl))
22853 {
22854 type = DECL_ORIGINAL_TYPE (decl);
22855
22856 if (type == error_mark_node)
22857 return;
22858
22859 gcc_assert (type != TREE_TYPE (decl));
22860 equate_type_number_to_die (TREE_TYPE (decl), type_die);
22861 }
22862 else
22863 {
22864 type = TREE_TYPE (decl);
22865
22866 if (type == error_mark_node)
22867 return;
22868
22869 if (is_naming_typedef_decl (TYPE_NAME (type)))
22870 {
22871 /* Here, we are in the case of decl being a typedef naming
22872 an anonymous type, e.g:
22873 typedef struct {...} foo;
22874 In that case TREE_TYPE (decl) is not a typedef variant
22875 type and TYPE_NAME of the anonymous type is set to the
22876 TYPE_DECL of the typedef. This construct is emitted by
22877 the C++ FE.
22878
22879 TYPE is the anonymous struct named by the typedef
22880 DECL. As we need the DW_AT_type attribute of the
22881 DW_TAG_typedef to point to the DIE of TYPE, let's
22882 generate that DIE right away. add_type_attribute
22883 called below will then pick (via lookup_type_die) that
22884 anonymous struct DIE. */
22885 if (!TREE_ASM_WRITTEN (type))
22886 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
22887
22888 /* This is a GNU Extension. We are adding a
22889 DW_AT_linkage_name attribute to the DIE of the
22890 anonymous struct TYPE. The value of that attribute
22891 is the name of the typedef decl naming the anonymous
22892 struct. This greatly eases the work of consumers of
22893 this debug info. */
22894 add_linkage_name_raw (lookup_type_die (type), decl);
22895 }
22896 }
22897
22898 add_type_attribute (type_die, type, decl_quals (decl), false,
22899 context_die);
22900
22901 if (is_naming_typedef_decl (decl))
22902 /* We want that all subsequent calls to lookup_type_die with
22903 TYPE in argument yield the DW_TAG_typedef we have just
22904 created. */
22905 equate_type_number_to_die (type, type_die);
22906
22907 add_accessibility_attribute (type_die, decl);
22908 }
22909
22910 if (DECL_ABSTRACT_P (decl))
22911 equate_decl_number_to_die (decl, type_die);
22912
22913 if (get_AT (type_die, DW_AT_name))
22914 add_pubtype (decl, type_die);
22915 }
22916
22917 /* Generate a DIE for a struct, class, enum or union type. */
22918
22919 static void
22920 gen_tagged_type_die (tree type,
22921 dw_die_ref context_die,
22922 enum debug_info_usage usage)
22923 {
22924 int need_pop;
22925
22926 if (type == NULL_TREE
22927 || !is_tagged_type (type))
22928 return;
22929
22930 if (TREE_ASM_WRITTEN (type))
22931 need_pop = 0;
22932 /* If this is a nested type whose containing class hasn't been written
22933 out yet, writing it out will cover this one, too. This does not apply
22934 to instantiations of member class templates; they need to be added to
22935 the containing class as they are generated. FIXME: This hurts the
22936 idea of combining type decls from multiple TUs, since we can't predict
22937 what set of template instantiations we'll get. */
22938 else if (TYPE_CONTEXT (type)
22939 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
22940 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
22941 {
22942 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
22943
22944 if (TREE_ASM_WRITTEN (type))
22945 return;
22946
22947 /* If that failed, attach ourselves to the stub. */
22948 push_decl_scope (TYPE_CONTEXT (type));
22949 context_die = lookup_type_die (TYPE_CONTEXT (type));
22950 need_pop = 1;
22951 }
22952 else if (TYPE_CONTEXT (type) != NULL_TREE
22953 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
22954 {
22955 /* If this type is local to a function that hasn't been written
22956 out yet, use a NULL context for now; it will be fixed up in
22957 decls_for_scope. */
22958 context_die = lookup_decl_die (TYPE_CONTEXT (type));
22959 /* A declaration DIE doesn't count; nested types need to go in the
22960 specification. */
22961 if (context_die && is_declaration_die (context_die))
22962 context_die = NULL;
22963 need_pop = 0;
22964 }
22965 else
22966 {
22967 context_die = declare_in_namespace (type, context_die);
22968 need_pop = 0;
22969 }
22970
22971 if (TREE_CODE (type) == ENUMERAL_TYPE)
22972 {
22973 /* This might have been written out by the call to
22974 declare_in_namespace. */
22975 if (!TREE_ASM_WRITTEN (type))
22976 gen_enumeration_type_die (type, context_die);
22977 }
22978 else
22979 gen_struct_or_union_type_die (type, context_die, usage);
22980
22981 if (need_pop)
22982 pop_decl_scope ();
22983
22984 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
22985 it up if it is ever completed. gen_*_type_die will set it for us
22986 when appropriate. */
22987 }
22988
22989 /* Generate a type description DIE. */
22990
22991 static void
22992 gen_type_die_with_usage (tree type, dw_die_ref context_die,
22993 enum debug_info_usage usage)
22994 {
22995 struct array_descr_info info;
22996
22997 if (type == NULL_TREE || type == error_mark_node)
22998 return;
22999
23000 if (flag_checking && type)
23001 verify_type (type);
23002
23003 if (TYPE_NAME (type) != NULL_TREE
23004 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
23005 && is_redundant_typedef (TYPE_NAME (type))
23006 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
23007 /* The DECL of this type is a typedef we don't want to emit debug
23008 info for but we want debug info for its underlying typedef.
23009 This can happen for e.g, the injected-class-name of a C++
23010 type. */
23011 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
23012
23013 /* If TYPE is a typedef type variant, let's generate debug info
23014 for the parent typedef which TYPE is a type of. */
23015 if (typedef_variant_p (type))
23016 {
23017 if (TREE_ASM_WRITTEN (type))
23018 return;
23019
23020 /* Prevent broken recursion; we can't hand off to the same type. */
23021 gcc_assert (DECL_ORIGINAL_TYPE (TYPE_NAME (type)) != type);
23022
23023 /* Give typedefs the right scope. */
23024 context_die = scope_die_for (type, context_die);
23025
23026 TREE_ASM_WRITTEN (type) = 1;
23027
23028 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
23029 return;
23030 }
23031
23032 /* If type is an anonymous tagged type named by a typedef, let's
23033 generate debug info for the typedef. */
23034 if (is_naming_typedef_decl (TYPE_NAME (type)))
23035 {
23036 /* Use the DIE of the containing namespace as the parent DIE of
23037 the type description DIE we want to generate. */
23038 if (DECL_CONTEXT (TYPE_NAME (type))
23039 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
23040 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
23041
23042 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
23043 return;
23044 }
23045
23046 /* We are going to output a DIE to represent the unqualified version
23047 of this type (i.e. without any const or volatile qualifiers) so
23048 get the main variant (i.e. the unqualified version) of this type
23049 now. (Vectors and arrays are special because the debugging info is in the
23050 cloned type itself). */
23051 if (TREE_CODE (type) != VECTOR_TYPE
23052 && TREE_CODE (type) != ARRAY_TYPE)
23053 type = type_main_variant (type);
23054
23055 /* If this is an array type with hidden descriptor, handle it first. */
23056 if (!TREE_ASM_WRITTEN (type)
23057 && lang_hooks.types.get_array_descr_info)
23058 {
23059 memset (&info, 0, sizeof (info));
23060 if (lang_hooks.types.get_array_descr_info (type, &info))
23061 {
23062 /* Fortran sometimes emits array types with no dimension. */
23063 gcc_assert (info.ndimensions >= 0
23064 && (info.ndimensions
23065 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
23066 gen_descr_array_type_die (type, &info, context_die);
23067 TREE_ASM_WRITTEN (type) = 1;
23068 return;
23069 }
23070 }
23071
23072 if (TREE_ASM_WRITTEN (type))
23073 {
23074 /* Variable-length types may be incomplete even if
23075 TREE_ASM_WRITTEN. For such types, fall through to
23076 gen_array_type_die() and possibly fill in
23077 DW_AT_{upper,lower}_bound attributes. */
23078 if ((TREE_CODE (type) != ARRAY_TYPE
23079 && TREE_CODE (type) != RECORD_TYPE
23080 && TREE_CODE (type) != UNION_TYPE
23081 && TREE_CODE (type) != QUAL_UNION_TYPE)
23082 || !variably_modified_type_p (type, NULL))
23083 return;
23084 }
23085
23086 switch (TREE_CODE (type))
23087 {
23088 case ERROR_MARK:
23089 break;
23090
23091 case POINTER_TYPE:
23092 case REFERENCE_TYPE:
23093 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
23094 ensures that the gen_type_die recursion will terminate even if the
23095 type is recursive. Recursive types are possible in Ada. */
23096 /* ??? We could perhaps do this for all types before the switch
23097 statement. */
23098 TREE_ASM_WRITTEN (type) = 1;
23099
23100 /* For these types, all that is required is that we output a DIE (or a
23101 set of DIEs) to represent the "basis" type. */
23102 gen_type_die_with_usage (TREE_TYPE (type), context_die,
23103 DINFO_USAGE_IND_USE);
23104 break;
23105
23106 case OFFSET_TYPE:
23107 /* This code is used for C++ pointer-to-data-member types.
23108 Output a description of the relevant class type. */
23109 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
23110 DINFO_USAGE_IND_USE);
23111
23112 /* Output a description of the type of the object pointed to. */
23113 gen_type_die_with_usage (TREE_TYPE (type), context_die,
23114 DINFO_USAGE_IND_USE);
23115
23116 /* Now output a DIE to represent this pointer-to-data-member type
23117 itself. */
23118 gen_ptr_to_mbr_type_die (type, context_die);
23119 break;
23120
23121 case FUNCTION_TYPE:
23122 /* Force out return type (in case it wasn't forced out already). */
23123 gen_type_die_with_usage (TREE_TYPE (type), context_die,
23124 DINFO_USAGE_DIR_USE);
23125 gen_subroutine_type_die (type, context_die);
23126 break;
23127
23128 case METHOD_TYPE:
23129 /* Force out return type (in case it wasn't forced out already). */
23130 gen_type_die_with_usage (TREE_TYPE (type), context_die,
23131 DINFO_USAGE_DIR_USE);
23132 gen_subroutine_type_die (type, context_die);
23133 break;
23134
23135 case ARRAY_TYPE:
23136 case VECTOR_TYPE:
23137 gen_array_type_die (type, context_die);
23138 break;
23139
23140 case ENUMERAL_TYPE:
23141 case RECORD_TYPE:
23142 case UNION_TYPE:
23143 case QUAL_UNION_TYPE:
23144 gen_tagged_type_die (type, context_die, usage);
23145 return;
23146
23147 case VOID_TYPE:
23148 case INTEGER_TYPE:
23149 case REAL_TYPE:
23150 case FIXED_POINT_TYPE:
23151 case COMPLEX_TYPE:
23152 case BOOLEAN_TYPE:
23153 case POINTER_BOUNDS_TYPE:
23154 /* No DIEs needed for fundamental types. */
23155 break;
23156
23157 case NULLPTR_TYPE:
23158 case LANG_TYPE:
23159 /* Just use DW_TAG_unspecified_type. */
23160 {
23161 dw_die_ref type_die = lookup_type_die (type);
23162 if (type_die == NULL)
23163 {
23164 tree name = TYPE_IDENTIFIER (type);
23165 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
23166 type);
23167 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
23168 equate_type_number_to_die (type, type_die);
23169 }
23170 }
23171 break;
23172
23173 default:
23174 if (is_cxx_auto (type))
23175 {
23176 tree name = TYPE_IDENTIFIER (type);
23177 dw_die_ref *die = (name == get_identifier ("auto")
23178 ? &auto_die : &decltype_auto_die);
23179 if (!*die)
23180 {
23181 *die = new_die (DW_TAG_unspecified_type,
23182 comp_unit_die (), NULL_TREE);
23183 add_name_attribute (*die, IDENTIFIER_POINTER (name));
23184 }
23185 equate_type_number_to_die (type, *die);
23186 break;
23187 }
23188 gcc_unreachable ();
23189 }
23190
23191 TREE_ASM_WRITTEN (type) = 1;
23192 }
23193
23194 static void
23195 gen_type_die (tree type, dw_die_ref context_die)
23196 {
23197 if (type != error_mark_node)
23198 {
23199 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
23200 if (flag_checking)
23201 {
23202 dw_die_ref die = lookup_type_die (type);
23203 if (die)
23204 check_die (die);
23205 }
23206 }
23207 }
23208
23209 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
23210 things which are local to the given block. */
23211
23212 static void
23213 gen_block_die (tree stmt, dw_die_ref context_die)
23214 {
23215 int must_output_die = 0;
23216 bool inlined_func;
23217
23218 /* Ignore blocks that are NULL. */
23219 if (stmt == NULL_TREE)
23220 return;
23221
23222 inlined_func = inlined_function_outer_scope_p (stmt);
23223
23224 /* If the block is one fragment of a non-contiguous block, do not
23225 process the variables, since they will have been done by the
23226 origin block. Do process subblocks. */
23227 if (BLOCK_FRAGMENT_ORIGIN (stmt))
23228 {
23229 tree sub;
23230
23231 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
23232 gen_block_die (sub, context_die);
23233
23234 return;
23235 }
23236
23237 /* Determine if we need to output any Dwarf DIEs at all to represent this
23238 block. */
23239 if (inlined_func)
23240 /* The outer scopes for inlinings *must* always be represented. We
23241 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
23242 must_output_die = 1;
23243 else
23244 {
23245 /* Determine if this block directly contains any "significant"
23246 local declarations which we will need to output DIEs for. */
23247 if (debug_info_level > DINFO_LEVEL_TERSE)
23248 /* We are not in terse mode so *any* local declaration counts
23249 as being a "significant" one. */
23250 must_output_die = ((BLOCK_VARS (stmt) != NULL
23251 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
23252 && (TREE_USED (stmt)
23253 || TREE_ASM_WRITTEN (stmt)
23254 || BLOCK_ABSTRACT (stmt)));
23255 else if ((TREE_USED (stmt)
23256 || TREE_ASM_WRITTEN (stmt)
23257 || BLOCK_ABSTRACT (stmt))
23258 && !dwarf2out_ignore_block (stmt))
23259 must_output_die = 1;
23260 }
23261
23262 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
23263 DIE for any block which contains no significant local declarations at
23264 all. Rather, in such cases we just call `decls_for_scope' so that any
23265 needed Dwarf info for any sub-blocks will get properly generated. Note
23266 that in terse mode, our definition of what constitutes a "significant"
23267 local declaration gets restricted to include only inlined function
23268 instances and local (nested) function definitions. */
23269 if (must_output_die)
23270 {
23271 if (inlined_func)
23272 {
23273 /* If STMT block is abstract, that means we have been called
23274 indirectly from dwarf2out_abstract_function.
23275 That function rightfully marks the descendent blocks (of
23276 the abstract function it is dealing with) as being abstract,
23277 precisely to prevent us from emitting any
23278 DW_TAG_inlined_subroutine DIE as a descendent
23279 of an abstract function instance. So in that case, we should
23280 not call gen_inlined_subroutine_die.
23281
23282 Later though, when cgraph asks dwarf2out to emit info
23283 for the concrete instance of the function decl into which
23284 the concrete instance of STMT got inlined, the later will lead
23285 to the generation of a DW_TAG_inlined_subroutine DIE. */
23286 if (! BLOCK_ABSTRACT (stmt))
23287 gen_inlined_subroutine_die (stmt, context_die);
23288 }
23289 else
23290 gen_lexical_block_die (stmt, context_die);
23291 }
23292 else
23293 decls_for_scope (stmt, context_die);
23294 }
23295
23296 /* Process variable DECL (or variable with origin ORIGIN) within
23297 block STMT and add it to CONTEXT_DIE. */
23298 static void
23299 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
23300 {
23301 dw_die_ref die;
23302 tree decl_or_origin = decl ? decl : origin;
23303
23304 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
23305 die = lookup_decl_die (decl_or_origin);
23306 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
23307 {
23308 if (TYPE_DECL_IS_STUB (decl_or_origin))
23309 die = lookup_type_die (TREE_TYPE (decl_or_origin));
23310 else
23311 die = lookup_decl_die (decl_or_origin);
23312 /* Avoid re-creating the DIE late if it was optimized as unused early. */
23313 if (! die && ! early_dwarf)
23314 return;
23315 }
23316 else
23317 die = NULL;
23318
23319 if (die != NULL && die->die_parent == NULL)
23320 add_child_die (context_die, die);
23321 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
23322 {
23323 if (early_dwarf)
23324 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
23325 stmt, context_die);
23326 }
23327 else
23328 gen_decl_die (decl, origin, NULL, context_die);
23329 }
23330
23331 /* Generate all of the decls declared within a given scope and (recursively)
23332 all of its sub-blocks. */
23333
23334 static void
23335 decls_for_scope (tree stmt, dw_die_ref context_die)
23336 {
23337 tree decl;
23338 unsigned int i;
23339 tree subblocks;
23340
23341 /* Ignore NULL blocks. */
23342 if (stmt == NULL_TREE)
23343 return;
23344
23345 /* Output the DIEs to represent all of the data objects and typedefs
23346 declared directly within this block but not within any nested
23347 sub-blocks. Also, nested function and tag DIEs have been
23348 generated with a parent of NULL; fix that up now. We don't
23349 have to do this if we're at -g1. */
23350 if (debug_info_level > DINFO_LEVEL_TERSE)
23351 {
23352 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
23353 process_scope_var (stmt, decl, NULL_TREE, context_die);
23354 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
23355 origin - avoid doing this twice as we have no good way to see
23356 if we've done it once already. */
23357 if (! early_dwarf)
23358 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
23359 process_scope_var (stmt, NULL, BLOCK_NONLOCALIZED_VAR (stmt, i),
23360 context_die);
23361 }
23362
23363 /* Even if we're at -g1, we need to process the subblocks in order to get
23364 inlined call information. */
23365
23366 /* Output the DIEs to represent all sub-blocks (and the items declared
23367 therein) of this block. */
23368 for (subblocks = BLOCK_SUBBLOCKS (stmt);
23369 subblocks != NULL;
23370 subblocks = BLOCK_CHAIN (subblocks))
23371 gen_block_die (subblocks, context_die);
23372 }
23373
23374 /* Is this a typedef we can avoid emitting? */
23375
23376 bool
23377 is_redundant_typedef (const_tree decl)
23378 {
23379 if (TYPE_DECL_IS_STUB (decl))
23380 return true;
23381
23382 if (DECL_ARTIFICIAL (decl)
23383 && DECL_CONTEXT (decl)
23384 && is_tagged_type (DECL_CONTEXT (decl))
23385 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
23386 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
23387 /* Also ignore the artificial member typedef for the class name. */
23388 return true;
23389
23390 return false;
23391 }
23392
23393 /* Return TRUE if TYPE is a typedef that names a type for linkage
23394 purposes. This kind of typedefs is produced by the C++ FE for
23395 constructs like:
23396
23397 typedef struct {...} foo;
23398
23399 In that case, there is no typedef variant type produced for foo.
23400 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
23401 struct type. */
23402
23403 static bool
23404 is_naming_typedef_decl (const_tree decl)
23405 {
23406 if (decl == NULL_TREE
23407 || TREE_CODE (decl) != TYPE_DECL
23408 || DECL_NAMELESS (decl)
23409 || !is_tagged_type (TREE_TYPE (decl))
23410 || DECL_IS_BUILTIN (decl)
23411 || is_redundant_typedef (decl)
23412 /* It looks like Ada produces TYPE_DECLs that are very similar
23413 to C++ naming typedefs but that have different
23414 semantics. Let's be specific to c++ for now. */
23415 || !is_cxx ())
23416 return FALSE;
23417
23418 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
23419 && TYPE_NAME (TREE_TYPE (decl)) == decl
23420 && (TYPE_STUB_DECL (TREE_TYPE (decl))
23421 != TYPE_NAME (TREE_TYPE (decl))));
23422 }
23423
23424 /* Looks up the DIE for a context. */
23425
23426 static inline dw_die_ref
23427 lookup_context_die (tree context)
23428 {
23429 if (context)
23430 {
23431 /* Find die that represents this context. */
23432 if (TYPE_P (context))
23433 {
23434 context = TYPE_MAIN_VARIANT (context);
23435 dw_die_ref ctx = lookup_type_die (context);
23436 if (!ctx)
23437 return NULL;
23438 return strip_naming_typedef (context, ctx);
23439 }
23440 else
23441 return lookup_decl_die (context);
23442 }
23443 return comp_unit_die ();
23444 }
23445
23446 /* Returns the DIE for a context. */
23447
23448 static inline dw_die_ref
23449 get_context_die (tree context)
23450 {
23451 if (context)
23452 {
23453 /* Find die that represents this context. */
23454 if (TYPE_P (context))
23455 {
23456 context = TYPE_MAIN_VARIANT (context);
23457 return strip_naming_typedef (context, force_type_die (context));
23458 }
23459 else
23460 return force_decl_die (context);
23461 }
23462 return comp_unit_die ();
23463 }
23464
23465 /* Returns the DIE for decl. A DIE will always be returned. */
23466
23467 static dw_die_ref
23468 force_decl_die (tree decl)
23469 {
23470 dw_die_ref decl_die;
23471 unsigned saved_external_flag;
23472 tree save_fn = NULL_TREE;
23473 decl_die = lookup_decl_die (decl);
23474 if (!decl_die)
23475 {
23476 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
23477
23478 decl_die = lookup_decl_die (decl);
23479 if (decl_die)
23480 return decl_die;
23481
23482 switch (TREE_CODE (decl))
23483 {
23484 case FUNCTION_DECL:
23485 /* Clear current_function_decl, so that gen_subprogram_die thinks
23486 that this is a declaration. At this point, we just want to force
23487 declaration die. */
23488 save_fn = current_function_decl;
23489 current_function_decl = NULL_TREE;
23490 gen_subprogram_die (decl, context_die);
23491 current_function_decl = save_fn;
23492 break;
23493
23494 case VAR_DECL:
23495 /* Set external flag to force declaration die. Restore it after
23496 gen_decl_die() call. */
23497 saved_external_flag = DECL_EXTERNAL (decl);
23498 DECL_EXTERNAL (decl) = 1;
23499 gen_decl_die (decl, NULL, NULL, context_die);
23500 DECL_EXTERNAL (decl) = saved_external_flag;
23501 break;
23502
23503 case NAMESPACE_DECL:
23504 if (dwarf_version >= 3 || !dwarf_strict)
23505 dwarf2out_decl (decl);
23506 else
23507 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
23508 decl_die = comp_unit_die ();
23509 break;
23510
23511 case TRANSLATION_UNIT_DECL:
23512 decl_die = comp_unit_die ();
23513 break;
23514
23515 default:
23516 gcc_unreachable ();
23517 }
23518
23519 /* We should be able to find the DIE now. */
23520 if (!decl_die)
23521 decl_die = lookup_decl_die (decl);
23522 gcc_assert (decl_die);
23523 }
23524
23525 return decl_die;
23526 }
23527
23528 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
23529 always returned. */
23530
23531 static dw_die_ref
23532 force_type_die (tree type)
23533 {
23534 dw_die_ref type_die;
23535
23536 type_die = lookup_type_die (type);
23537 if (!type_die)
23538 {
23539 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
23540
23541 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
23542 false, context_die);
23543 gcc_assert (type_die);
23544 }
23545 return type_die;
23546 }
23547
23548 /* Force out any required namespaces to be able to output DECL,
23549 and return the new context_die for it, if it's changed. */
23550
23551 static dw_die_ref
23552 setup_namespace_context (tree thing, dw_die_ref context_die)
23553 {
23554 tree context = (DECL_P (thing)
23555 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
23556 if (context && TREE_CODE (context) == NAMESPACE_DECL)
23557 /* Force out the namespace. */
23558 context_die = force_decl_die (context);
23559
23560 return context_die;
23561 }
23562
23563 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
23564 type) within its namespace, if appropriate.
23565
23566 For compatibility with older debuggers, namespace DIEs only contain
23567 declarations; all definitions are emitted at CU scope, with
23568 DW_AT_specification pointing to the declaration (like with class
23569 members). */
23570
23571 static dw_die_ref
23572 declare_in_namespace (tree thing, dw_die_ref context_die)
23573 {
23574 dw_die_ref ns_context;
23575
23576 if (debug_info_level <= DINFO_LEVEL_TERSE)
23577 return context_die;
23578
23579 /* External declarations in the local scope only need to be emitted
23580 once, not once in the namespace and once in the scope.
23581
23582 This avoids declaring the `extern' below in the
23583 namespace DIE as well as in the innermost scope:
23584
23585 namespace S
23586 {
23587 int i=5;
23588 int foo()
23589 {
23590 int i=8;
23591 extern int i;
23592 return i;
23593 }
23594 }
23595 */
23596 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
23597 return context_die;
23598
23599 /* If this decl is from an inlined function, then don't try to emit it in its
23600 namespace, as we will get confused. It would have already been emitted
23601 when the abstract instance of the inline function was emitted anyways. */
23602 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
23603 return context_die;
23604
23605 ns_context = setup_namespace_context (thing, context_die);
23606
23607 if (ns_context != context_die)
23608 {
23609 if (is_fortran ())
23610 return ns_context;
23611 if (DECL_P (thing))
23612 gen_decl_die (thing, NULL, NULL, ns_context);
23613 else
23614 gen_type_die (thing, ns_context);
23615 }
23616 return context_die;
23617 }
23618
23619 /* Generate a DIE for a namespace or namespace alias. */
23620
23621 static void
23622 gen_namespace_die (tree decl, dw_die_ref context_die)
23623 {
23624 dw_die_ref namespace_die;
23625
23626 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
23627 they are an alias of. */
23628 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
23629 {
23630 /* Output a real namespace or module. */
23631 context_die = setup_namespace_context (decl, comp_unit_die ());
23632 namespace_die = new_die (is_fortran ()
23633 ? DW_TAG_module : DW_TAG_namespace,
23634 context_die, decl);
23635 /* For Fortran modules defined in different CU don't add src coords. */
23636 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
23637 {
23638 const char *name = dwarf2_name (decl, 0);
23639 if (name)
23640 add_name_attribute (namespace_die, name);
23641 }
23642 else
23643 add_name_and_src_coords_attributes (namespace_die, decl);
23644 if (DECL_EXTERNAL (decl))
23645 add_AT_flag (namespace_die, DW_AT_declaration, 1);
23646 equate_decl_number_to_die (decl, namespace_die);
23647 }
23648 else
23649 {
23650 /* Output a namespace alias. */
23651
23652 /* Force out the namespace we are an alias of, if necessary. */
23653 dw_die_ref origin_die
23654 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
23655
23656 if (DECL_FILE_SCOPE_P (decl)
23657 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
23658 context_die = setup_namespace_context (decl, comp_unit_die ());
23659 /* Now create the namespace alias DIE. */
23660 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
23661 add_name_and_src_coords_attributes (namespace_die, decl);
23662 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
23663 equate_decl_number_to_die (decl, namespace_die);
23664 }
23665 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
23666 if (want_pubnames ())
23667 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
23668 }
23669
23670 /* Generate Dwarf debug information for a decl described by DECL.
23671 The return value is currently only meaningful for PARM_DECLs,
23672 for all other decls it returns NULL.
23673
23674 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
23675 It can be NULL otherwise. */
23676
23677 static dw_die_ref
23678 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
23679 dw_die_ref context_die)
23680 {
23681 tree decl_or_origin = decl ? decl : origin;
23682 tree class_origin = NULL, ultimate_origin;
23683
23684 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
23685 return NULL;
23686
23687 /* Ignore pointer bounds decls. */
23688 if (DECL_P (decl_or_origin)
23689 && TREE_TYPE (decl_or_origin)
23690 && POINTER_BOUNDS_P (decl_or_origin))
23691 return NULL;
23692
23693 switch (TREE_CODE (decl_or_origin))
23694 {
23695 case ERROR_MARK:
23696 break;
23697
23698 case CONST_DECL:
23699 if (!is_fortran () && !is_ada ())
23700 {
23701 /* The individual enumerators of an enum type get output when we output
23702 the Dwarf representation of the relevant enum type itself. */
23703 break;
23704 }
23705
23706 /* Emit its type. */
23707 gen_type_die (TREE_TYPE (decl), context_die);
23708
23709 /* And its containing namespace. */
23710 context_die = declare_in_namespace (decl, context_die);
23711
23712 gen_const_die (decl, context_die);
23713 break;
23714
23715 case FUNCTION_DECL:
23716 /* Don't output any DIEs to represent mere function declarations,
23717 unless they are class members or explicit block externs. */
23718 if (DECL_INITIAL (decl_or_origin) == NULL_TREE
23719 && DECL_FILE_SCOPE_P (decl_or_origin)
23720 && (current_function_decl == NULL_TREE
23721 || DECL_ARTIFICIAL (decl_or_origin)))
23722 break;
23723
23724 #if 0
23725 /* FIXME */
23726 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
23727 on local redeclarations of global functions. That seems broken. */
23728 if (current_function_decl != decl)
23729 /* This is only a declaration. */;
23730 #endif
23731
23732 /* If we're emitting a clone, emit info for the abstract instance. */
23733 if (origin || DECL_ORIGIN (decl) != decl)
23734 dwarf2out_abstract_function (origin
23735 ? DECL_ORIGIN (origin)
23736 : DECL_ABSTRACT_ORIGIN (decl));
23737
23738 /* If we're emitting an out-of-line copy of an inline function,
23739 emit info for the abstract instance and set up to refer to it. */
23740 else if (cgraph_function_possibly_inlined_p (decl)
23741 && ! DECL_ABSTRACT_P (decl)
23742 && ! class_or_namespace_scope_p (context_die)
23743 /* dwarf2out_abstract_function won't emit a die if this is just
23744 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
23745 that case, because that works only if we have a die. */
23746 && DECL_INITIAL (decl) != NULL_TREE)
23747 {
23748 dwarf2out_abstract_function (decl);
23749 set_decl_origin_self (decl);
23750 }
23751
23752 /* Otherwise we're emitting the primary DIE for this decl. */
23753 else if (debug_info_level > DINFO_LEVEL_TERSE)
23754 {
23755 /* Before we describe the FUNCTION_DECL itself, make sure that we
23756 have its containing type. */
23757 if (!origin)
23758 origin = decl_class_context (decl);
23759 if (origin != NULL_TREE)
23760 gen_type_die (origin, context_die);
23761
23762 /* And its return type. */
23763 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
23764
23765 /* And its virtual context. */
23766 if (DECL_VINDEX (decl) != NULL_TREE)
23767 gen_type_die (DECL_CONTEXT (decl), context_die);
23768
23769 /* Make sure we have a member DIE for decl. */
23770 if (origin != NULL_TREE)
23771 gen_type_die_for_member (origin, decl, context_die);
23772
23773 /* And its containing namespace. */
23774 context_die = declare_in_namespace (decl, context_die);
23775 }
23776
23777 /* Now output a DIE to represent the function itself. */
23778 if (decl)
23779 gen_subprogram_die (decl, context_die);
23780 break;
23781
23782 case TYPE_DECL:
23783 /* If we are in terse mode, don't generate any DIEs to represent any
23784 actual typedefs. */
23785 if (debug_info_level <= DINFO_LEVEL_TERSE)
23786 break;
23787
23788 /* In the special case of a TYPE_DECL node representing the declaration
23789 of some type tag, if the given TYPE_DECL is marked as having been
23790 instantiated from some other (original) TYPE_DECL node (e.g. one which
23791 was generated within the original definition of an inline function) we
23792 used to generate a special (abbreviated) DW_TAG_structure_type,
23793 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
23794 should be actually referencing those DIEs, as variable DIEs with that
23795 type would be emitted already in the abstract origin, so it was always
23796 removed during unused type prunning. Don't add anything in this
23797 case. */
23798 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
23799 break;
23800
23801 if (is_redundant_typedef (decl))
23802 gen_type_die (TREE_TYPE (decl), context_die);
23803 else
23804 /* Output a DIE to represent the typedef itself. */
23805 gen_typedef_die (decl, context_die);
23806 break;
23807
23808 case LABEL_DECL:
23809 if (debug_info_level >= DINFO_LEVEL_NORMAL)
23810 gen_label_die (decl, context_die);
23811 break;
23812
23813 case VAR_DECL:
23814 case RESULT_DECL:
23815 /* If we are in terse mode, don't generate any DIEs to represent any
23816 variable declarations or definitions. */
23817 if (debug_info_level <= DINFO_LEVEL_TERSE)
23818 break;
23819
23820 /* Output any DIEs that are needed to specify the type of this data
23821 object. */
23822 if (decl_by_reference_p (decl_or_origin))
23823 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
23824 else
23825 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
23826
23827 /* And its containing type. */
23828 class_origin = decl_class_context (decl_or_origin);
23829 if (class_origin != NULL_TREE)
23830 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
23831
23832 /* And its containing namespace. */
23833 context_die = declare_in_namespace (decl_or_origin, context_die);
23834
23835 /* Now output the DIE to represent the data object itself. This gets
23836 complicated because of the possibility that the VAR_DECL really
23837 represents an inlined instance of a formal parameter for an inline
23838 function. */
23839 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23840 if (ultimate_origin != NULL_TREE
23841 && TREE_CODE (ultimate_origin) == PARM_DECL)
23842 gen_formal_parameter_die (decl, origin,
23843 true /* Emit name attribute. */,
23844 context_die);
23845 else
23846 gen_variable_die (decl, origin, context_die);
23847 break;
23848
23849 case FIELD_DECL:
23850 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
23851 /* Ignore the nameless fields that are used to skip bits but handle C++
23852 anonymous unions and structs. */
23853 if (DECL_NAME (decl) != NULL_TREE
23854 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
23855 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
23856 {
23857 gen_type_die (member_declared_type (decl), context_die);
23858 gen_field_die (decl, ctx, context_die);
23859 }
23860 break;
23861
23862 case PARM_DECL:
23863 if (DECL_BY_REFERENCE (decl_or_origin))
23864 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
23865 else
23866 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
23867 return gen_formal_parameter_die (decl, origin,
23868 true /* Emit name attribute. */,
23869 context_die);
23870
23871 case NAMESPACE_DECL:
23872 if (dwarf_version >= 3 || !dwarf_strict)
23873 gen_namespace_die (decl, context_die);
23874 break;
23875
23876 case IMPORTED_DECL:
23877 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
23878 DECL_CONTEXT (decl), context_die);
23879 break;
23880
23881 case NAMELIST_DECL:
23882 gen_namelist_decl (DECL_NAME (decl), context_die,
23883 NAMELIST_DECL_ASSOCIATED_DECL (decl));
23884 break;
23885
23886 default:
23887 /* Probably some frontend-internal decl. Assume we don't care. */
23888 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
23889 break;
23890 }
23891
23892 return NULL;
23893 }
23894 \f
23895 /* Output initial debug information for global DECL. Called at the
23896 end of the parsing process.
23897
23898 This is the initial debug generation process. As such, the DIEs
23899 generated may be incomplete. A later debug generation pass
23900 (dwarf2out_late_global_decl) will augment the information generated
23901 in this pass (e.g., with complete location info). */
23902
23903 static void
23904 dwarf2out_early_global_decl (tree decl)
23905 {
23906 set_early_dwarf s;
23907
23908 /* gen_decl_die() will set DECL_ABSTRACT because
23909 cgraph_function_possibly_inlined_p() returns true. This is in
23910 turn will cause DW_AT_inline attributes to be set.
23911
23912 This happens because at early dwarf generation, there is no
23913 cgraph information, causing cgraph_function_possibly_inlined_p()
23914 to return true. Trick cgraph_function_possibly_inlined_p()
23915 while we generate dwarf early. */
23916 bool save = symtab->global_info_ready;
23917 symtab->global_info_ready = true;
23918
23919 /* We don't handle TYPE_DECLs. If required, they'll be reached via
23920 other DECLs and they can point to template types or other things
23921 that dwarf2out can't handle when done via dwarf2out_decl. */
23922 if (TREE_CODE (decl) != TYPE_DECL
23923 && TREE_CODE (decl) != PARM_DECL)
23924 {
23925 tree save_fndecl = current_function_decl;
23926 if (TREE_CODE (decl) == FUNCTION_DECL)
23927 {
23928 /* No cfun means the symbol has no body, so there's nothing
23929 to emit. */
23930 if (!DECL_STRUCT_FUNCTION (decl))
23931 goto early_decl_exit;
23932
23933 /* For nested functions, emit DIEs for the parents first so that all
23934 nested DIEs are generated at the proper scope in the first
23935 shot. */
23936 tree context = decl_function_context (decl);
23937 if (context != NULL)
23938 {
23939 current_function_decl = context;
23940 dwarf2out_decl (context);
23941 }
23942
23943 current_function_decl = decl;
23944 }
23945 dwarf2out_decl (decl);
23946 if (TREE_CODE (decl) == FUNCTION_DECL)
23947 current_function_decl = save_fndecl;
23948 }
23949 early_decl_exit:
23950 symtab->global_info_ready = save;
23951 }
23952
23953 /* Output debug information for global decl DECL. Called from
23954 toplev.c after compilation proper has finished. */
23955
23956 static void
23957 dwarf2out_late_global_decl (tree decl)
23958 {
23959 /* Fill-in any location information we were unable to determine
23960 on the first pass. */
23961 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
23962 {
23963 dw_die_ref die = lookup_decl_die (decl);
23964
23965 /* We have to generate early debug late for LTO. */
23966 if (! die && in_lto_p)
23967 {
23968 dwarf2out_decl (decl);
23969 die = lookup_decl_die (decl);
23970 }
23971
23972 if (die)
23973 {
23974 /* We get called during the early debug phase via the symtab
23975 code invoking late_global_decl for symbols that are optimized
23976 out. When the early phase is not finished, do not add
23977 locations. */
23978 if (! early_dwarf_finished)
23979 tree_add_const_value_attribute_for_decl (die, decl);
23980 else
23981 add_location_or_const_value_attribute (die, decl, false);
23982 }
23983 }
23984 }
23985
23986 /* Output debug information for type decl DECL. Called from toplev.c
23987 and from language front ends (to record built-in types). */
23988 static void
23989 dwarf2out_type_decl (tree decl, int local)
23990 {
23991 if (!local)
23992 {
23993 set_early_dwarf s;
23994 dwarf2out_decl (decl);
23995 }
23996 }
23997
23998 /* Output debug information for imported module or decl DECL.
23999 NAME is non-NULL name in the lexical block if the decl has been renamed.
24000 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
24001 that DECL belongs to.
24002 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
24003 static void
24004 dwarf2out_imported_module_or_decl_1 (tree decl,
24005 tree name,
24006 tree lexical_block,
24007 dw_die_ref lexical_block_die)
24008 {
24009 expanded_location xloc;
24010 dw_die_ref imported_die = NULL;
24011 dw_die_ref at_import_die;
24012
24013 if (TREE_CODE (decl) == IMPORTED_DECL)
24014 {
24015 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
24016 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
24017 gcc_assert (decl);
24018 }
24019 else
24020 xloc = expand_location (input_location);
24021
24022 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
24023 {
24024 at_import_die = force_type_die (TREE_TYPE (decl));
24025 /* For namespace N { typedef void T; } using N::T; base_type_die
24026 returns NULL, but DW_TAG_imported_declaration requires
24027 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
24028 if (!at_import_die)
24029 {
24030 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
24031 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
24032 at_import_die = lookup_type_die (TREE_TYPE (decl));
24033 gcc_assert (at_import_die);
24034 }
24035 }
24036 else
24037 {
24038 at_import_die = lookup_decl_die (decl);
24039 if (!at_import_die)
24040 {
24041 /* If we're trying to avoid duplicate debug info, we may not have
24042 emitted the member decl for this field. Emit it now. */
24043 if (TREE_CODE (decl) == FIELD_DECL)
24044 {
24045 tree type = DECL_CONTEXT (decl);
24046
24047 if (TYPE_CONTEXT (type)
24048 && TYPE_P (TYPE_CONTEXT (type))
24049 && !should_emit_struct_debug (TYPE_CONTEXT (type),
24050 DINFO_USAGE_DIR_USE))
24051 return;
24052 gen_type_die_for_member (type, decl,
24053 get_context_die (TYPE_CONTEXT (type)));
24054 }
24055 if (TREE_CODE (decl) == NAMELIST_DECL)
24056 at_import_die = gen_namelist_decl (DECL_NAME (decl),
24057 get_context_die (DECL_CONTEXT (decl)),
24058 NULL_TREE);
24059 else
24060 at_import_die = force_decl_die (decl);
24061 }
24062 }
24063
24064 if (TREE_CODE (decl) == NAMESPACE_DECL)
24065 {
24066 if (dwarf_version >= 3 || !dwarf_strict)
24067 imported_die = new_die (DW_TAG_imported_module,
24068 lexical_block_die,
24069 lexical_block);
24070 else
24071 return;
24072 }
24073 else
24074 imported_die = new_die (DW_TAG_imported_declaration,
24075 lexical_block_die,
24076 lexical_block);
24077
24078 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
24079 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
24080 if (name)
24081 add_AT_string (imported_die, DW_AT_name,
24082 IDENTIFIER_POINTER (name));
24083 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
24084 }
24085
24086 /* Output debug information for imported module or decl DECL.
24087 NAME is non-NULL name in context if the decl has been renamed.
24088 CHILD is true if decl is one of the renamed decls as part of
24089 importing whole module. */
24090
24091 static void
24092 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
24093 bool child)
24094 {
24095 /* dw_die_ref at_import_die; */
24096 dw_die_ref scope_die;
24097
24098 if (debug_info_level <= DINFO_LEVEL_TERSE)
24099 return;
24100
24101 gcc_assert (decl);
24102
24103 set_early_dwarf s;
24104
24105 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
24106 We need decl DIE for reference and scope die. First, get DIE for the decl
24107 itself. */
24108
24109 /* Get the scope die for decl context. Use comp_unit_die for global module
24110 or decl. If die is not found for non globals, force new die. */
24111 if (context
24112 && TYPE_P (context)
24113 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
24114 return;
24115
24116 scope_die = get_context_die (context);
24117
24118 if (child)
24119 {
24120 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
24121 there is nothing we can do, here. */
24122 if (dwarf_version < 3 && dwarf_strict)
24123 return;
24124
24125 gcc_assert (scope_die->die_child);
24126 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
24127 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
24128 scope_die = scope_die->die_child;
24129 }
24130
24131 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
24132 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
24133 }
24134
24135 /* Output debug information for namelists. */
24136
24137 static dw_die_ref
24138 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
24139 {
24140 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
24141 tree value;
24142 unsigned i;
24143
24144 if (debug_info_level <= DINFO_LEVEL_TERSE)
24145 return NULL;
24146
24147 gcc_assert (scope_die != NULL);
24148 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
24149 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
24150
24151 /* If there are no item_decls, we have a nondefining namelist, e.g.
24152 with USE association; hence, set DW_AT_declaration. */
24153 if (item_decls == NULL_TREE)
24154 {
24155 add_AT_flag (nml_die, DW_AT_declaration, 1);
24156 return nml_die;
24157 }
24158
24159 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
24160 {
24161 nml_item_ref_die = lookup_decl_die (value);
24162 if (!nml_item_ref_die)
24163 nml_item_ref_die = force_decl_die (value);
24164
24165 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
24166 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
24167 }
24168 return nml_die;
24169 }
24170
24171
24172 /* Write the debugging output for DECL and return the DIE. */
24173
24174 static void
24175 dwarf2out_decl (tree decl)
24176 {
24177 dw_die_ref context_die = comp_unit_die ();
24178
24179 switch (TREE_CODE (decl))
24180 {
24181 case ERROR_MARK:
24182 return;
24183
24184 case FUNCTION_DECL:
24185 /* What we would really like to do here is to filter out all mere
24186 file-scope declarations of file-scope functions which are never
24187 referenced later within this translation unit (and keep all of ones
24188 that *are* referenced later on) but we aren't clairvoyant, so we have
24189 no idea which functions will be referenced in the future (i.e. later
24190 on within the current translation unit). So here we just ignore all
24191 file-scope function declarations which are not also definitions. If
24192 and when the debugger needs to know something about these functions,
24193 it will have to hunt around and find the DWARF information associated
24194 with the definition of the function.
24195
24196 We can't just check DECL_EXTERNAL to find out which FUNCTION_DECL
24197 nodes represent definitions and which ones represent mere
24198 declarations. We have to check DECL_INITIAL instead. That's because
24199 the C front-end supports some weird semantics for "extern inline"
24200 function definitions. These can get inlined within the current
24201 translation unit (and thus, we need to generate Dwarf info for their
24202 abstract instances so that the Dwarf info for the concrete inlined
24203 instances can have something to refer to) but the compiler never
24204 generates any out-of-lines instances of such things (despite the fact
24205 that they *are* definitions).
24206
24207 The important point is that the C front-end marks these "extern
24208 inline" functions as DECL_EXTERNAL, but we need to generate DWARF for
24209 them anyway. Note that the C++ front-end also plays some similar games
24210 for inline function definitions appearing within include files which
24211 also contain `#pragma interface' pragmas.
24212
24213 If we are called from dwarf2out_abstract_function output a DIE
24214 anyway. We can end up here this way with early inlining and LTO
24215 where the inlined function is output in a different LTRANS unit
24216 or not at all. */
24217 if (DECL_INITIAL (decl) == NULL_TREE
24218 && ! DECL_ABSTRACT_P (decl))
24219 return;
24220
24221 /* If we're a nested function, initially use a parent of NULL; if we're
24222 a plain function, this will be fixed up in decls_for_scope. If
24223 we're a method, it will be ignored, since we already have a DIE. */
24224 if (decl_function_context (decl)
24225 /* But if we're in terse mode, we don't care about scope. */
24226 && debug_info_level > DINFO_LEVEL_TERSE)
24227 context_die = NULL;
24228 break;
24229
24230 case VAR_DECL:
24231 /* For local statics lookup proper context die. */
24232 if (local_function_static (decl))
24233 context_die = lookup_decl_die (DECL_CONTEXT (decl));
24234
24235 /* If we are in terse mode, don't generate any DIEs to represent any
24236 variable declarations or definitions. */
24237 if (debug_info_level <= DINFO_LEVEL_TERSE)
24238 return;
24239 break;
24240
24241 case CONST_DECL:
24242 if (debug_info_level <= DINFO_LEVEL_TERSE)
24243 return;
24244 if (!is_fortran () && !is_ada ())
24245 return;
24246 if (TREE_STATIC (decl) && decl_function_context (decl))
24247 context_die = lookup_decl_die (DECL_CONTEXT (decl));
24248 break;
24249
24250 case NAMESPACE_DECL:
24251 case IMPORTED_DECL:
24252 if (debug_info_level <= DINFO_LEVEL_TERSE)
24253 return;
24254 if (lookup_decl_die (decl) != NULL)
24255 return;
24256 break;
24257
24258 case TYPE_DECL:
24259 /* Don't emit stubs for types unless they are needed by other DIEs. */
24260 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
24261 return;
24262
24263 /* Don't bother trying to generate any DIEs to represent any of the
24264 normal built-in types for the language we are compiling. */
24265 if (DECL_IS_BUILTIN (decl))
24266 return;
24267
24268 /* If we are in terse mode, don't generate any DIEs for types. */
24269 if (debug_info_level <= DINFO_LEVEL_TERSE)
24270 return;
24271
24272 /* If we're a function-scope tag, initially use a parent of NULL;
24273 this will be fixed up in decls_for_scope. */
24274 if (decl_function_context (decl))
24275 context_die = NULL;
24276
24277 break;
24278
24279 case NAMELIST_DECL:
24280 break;
24281
24282 default:
24283 return;
24284 }
24285
24286 gen_decl_die (decl, NULL, NULL, context_die);
24287
24288 if (flag_checking)
24289 {
24290 dw_die_ref die = lookup_decl_die (decl);
24291 if (die)
24292 check_die (die);
24293 }
24294 }
24295
24296 /* Write the debugging output for DECL. */
24297
24298 static void
24299 dwarf2out_function_decl (tree decl)
24300 {
24301 dwarf2out_decl (decl);
24302 call_arg_locations = NULL;
24303 call_arg_loc_last = NULL;
24304 call_site_count = -1;
24305 tail_call_site_count = -1;
24306 decl_loc_table->empty ();
24307 cached_dw_loc_list_table->empty ();
24308 }
24309
24310 /* Output a marker (i.e. a label) for the beginning of the generated code for
24311 a lexical block. */
24312
24313 static void
24314 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
24315 unsigned int blocknum)
24316 {
24317 switch_to_section (current_function_section ());
24318 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
24319 }
24320
24321 /* Output a marker (i.e. a label) for the end of the generated code for a
24322 lexical block. */
24323
24324 static void
24325 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
24326 {
24327 switch_to_section (current_function_section ());
24328 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
24329 }
24330
24331 /* Returns nonzero if it is appropriate not to emit any debugging
24332 information for BLOCK, because it doesn't contain any instructions.
24333
24334 Don't allow this for blocks with nested functions or local classes
24335 as we would end up with orphans, and in the presence of scheduling
24336 we may end up calling them anyway. */
24337
24338 static bool
24339 dwarf2out_ignore_block (const_tree block)
24340 {
24341 tree decl;
24342 unsigned int i;
24343
24344 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
24345 if (TREE_CODE (decl) == FUNCTION_DECL
24346 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
24347 return 0;
24348 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
24349 {
24350 decl = BLOCK_NONLOCALIZED_VAR (block, i);
24351 if (TREE_CODE (decl) == FUNCTION_DECL
24352 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
24353 return 0;
24354 }
24355
24356 return 1;
24357 }
24358
24359 /* Hash table routines for file_hash. */
24360
24361 bool
24362 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
24363 {
24364 return filename_cmp (p1->filename, p2) == 0;
24365 }
24366
24367 hashval_t
24368 dwarf_file_hasher::hash (dwarf_file_data *p)
24369 {
24370 return htab_hash_string (p->filename);
24371 }
24372
24373 /* Lookup FILE_NAME (in the list of filenames that we know about here in
24374 dwarf2out.c) and return its "index". The index of each (known) filename is
24375 just a unique number which is associated with only that one filename. We
24376 need such numbers for the sake of generating labels (in the .debug_sfnames
24377 section) and references to those files numbers (in the .debug_srcinfo
24378 and .debug_macinfo sections). If the filename given as an argument is not
24379 found in our current list, add it to the list and assign it the next
24380 available unique index number. */
24381
24382 static struct dwarf_file_data *
24383 lookup_filename (const char *file_name)
24384 {
24385 struct dwarf_file_data * created;
24386
24387 if (!file_name)
24388 return NULL;
24389
24390 dwarf_file_data **slot
24391 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
24392 INSERT);
24393 if (*slot)
24394 return *slot;
24395
24396 created = ggc_alloc<dwarf_file_data> ();
24397 created->filename = file_name;
24398 created->emitted_number = 0;
24399 *slot = created;
24400 return created;
24401 }
24402
24403 /* If the assembler will construct the file table, then translate the compiler
24404 internal file table number into the assembler file table number, and emit
24405 a .file directive if we haven't already emitted one yet. The file table
24406 numbers are different because we prune debug info for unused variables and
24407 types, which may include filenames. */
24408
24409 static int
24410 maybe_emit_file (struct dwarf_file_data * fd)
24411 {
24412 if (! fd->emitted_number)
24413 {
24414 if (last_emitted_file)
24415 fd->emitted_number = last_emitted_file->emitted_number + 1;
24416 else
24417 fd->emitted_number = 1;
24418 last_emitted_file = fd;
24419
24420 if (DWARF2_ASM_LINE_DEBUG_INFO)
24421 {
24422 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
24423 output_quoted_string (asm_out_file,
24424 remap_debug_filename (fd->filename));
24425 fputc ('\n', asm_out_file);
24426 }
24427 }
24428
24429 return fd->emitted_number;
24430 }
24431
24432 /* Schedule generation of a DW_AT_const_value attribute to DIE.
24433 That generation should happen after function debug info has been
24434 generated. The value of the attribute is the constant value of ARG. */
24435
24436 static void
24437 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
24438 {
24439 die_arg_entry entry;
24440
24441 if (!die || !arg)
24442 return;
24443
24444 gcc_assert (early_dwarf);
24445
24446 if (!tmpl_value_parm_die_table)
24447 vec_alloc (tmpl_value_parm_die_table, 32);
24448
24449 entry.die = die;
24450 entry.arg = arg;
24451 vec_safe_push (tmpl_value_parm_die_table, entry);
24452 }
24453
24454 /* Return TRUE if T is an instance of generic type, FALSE
24455 otherwise. */
24456
24457 static bool
24458 generic_type_p (tree t)
24459 {
24460 if (t == NULL_TREE || !TYPE_P (t))
24461 return false;
24462 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
24463 }
24464
24465 /* Schedule the generation of the generic parameter dies for the
24466 instance of generic type T. The proper generation itself is later
24467 done by gen_scheduled_generic_parms_dies. */
24468
24469 static void
24470 schedule_generic_params_dies_gen (tree t)
24471 {
24472 if (!generic_type_p (t))
24473 return;
24474
24475 gcc_assert (early_dwarf);
24476
24477 if (!generic_type_instances)
24478 vec_alloc (generic_type_instances, 256);
24479
24480 vec_safe_push (generic_type_instances, t);
24481 }
24482
24483 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
24484 by append_entry_to_tmpl_value_parm_die_table. This function must
24485 be called after function DIEs have been generated. */
24486
24487 static void
24488 gen_remaining_tmpl_value_param_die_attribute (void)
24489 {
24490 if (tmpl_value_parm_die_table)
24491 {
24492 unsigned i, j;
24493 die_arg_entry *e;
24494
24495 /* We do this in two phases - first get the cases we can
24496 handle during early-finish, preserving those we cannot
24497 (containing symbolic constants where we don't yet know
24498 whether we are going to output the referenced symbols).
24499 For those we try again at late-finish. */
24500 j = 0;
24501 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
24502 {
24503 if (!tree_add_const_value_attribute (e->die, e->arg))
24504 {
24505 dw_loc_descr_ref loc = NULL;
24506 if (! early_dwarf
24507 && (dwarf_version >= 5 || !dwarf_strict))
24508 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
24509 if (loc)
24510 add_AT_loc (e->die, DW_AT_location, loc);
24511 else
24512 (*tmpl_value_parm_die_table)[j++] = *e;
24513 }
24514 }
24515 tmpl_value_parm_die_table->truncate (j);
24516 }
24517 }
24518
24519 /* Generate generic parameters DIEs for instances of generic types
24520 that have been previously scheduled by
24521 schedule_generic_params_dies_gen. This function must be called
24522 after all the types of the CU have been laid out. */
24523
24524 static void
24525 gen_scheduled_generic_parms_dies (void)
24526 {
24527 unsigned i;
24528 tree t;
24529
24530 if (!generic_type_instances)
24531 return;
24532
24533 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
24534 if (COMPLETE_TYPE_P (t))
24535 gen_generic_params_dies (t);
24536
24537 generic_type_instances = NULL;
24538 }
24539
24540
24541 /* Replace DW_AT_name for the decl with name. */
24542
24543 static void
24544 dwarf2out_set_name (tree decl, tree name)
24545 {
24546 dw_die_ref die;
24547 dw_attr_node *attr;
24548 const char *dname;
24549
24550 die = TYPE_SYMTAB_DIE (decl);
24551 if (!die)
24552 return;
24553
24554 dname = dwarf2_name (name, 0);
24555 if (!dname)
24556 return;
24557
24558 attr = get_AT (die, DW_AT_name);
24559 if (attr)
24560 {
24561 struct indirect_string_node *node;
24562
24563 node = find_AT_string (dname);
24564 /* replace the string. */
24565 attr->dw_attr_val.v.val_str = node;
24566 }
24567
24568 else
24569 add_name_attribute (die, dname);
24570 }
24571
24572 /* True if before or during processing of the first function being emitted. */
24573 static bool in_first_function_p = true;
24574 /* True if loc_note during dwarf2out_var_location call might still be
24575 before first real instruction at address equal to .Ltext0. */
24576 static bool maybe_at_text_label_p = true;
24577 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
24578 static unsigned int first_loclabel_num_not_at_text_label;
24579
24580 /* Called by the final INSN scan whenever we see a var location. We
24581 use it to drop labels in the right places, and throw the location in
24582 our lookup table. */
24583
24584 static void
24585 dwarf2out_var_location (rtx_insn *loc_note)
24586 {
24587 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
24588 struct var_loc_node *newloc;
24589 rtx_insn *next_real, *next_note;
24590 rtx_insn *call_insn = NULL;
24591 static const char *last_label;
24592 static const char *last_postcall_label;
24593 static bool last_in_cold_section_p;
24594 static rtx_insn *expected_next_loc_note;
24595 tree decl;
24596 bool var_loc_p;
24597
24598 if (!NOTE_P (loc_note))
24599 {
24600 if (CALL_P (loc_note))
24601 {
24602 call_site_count++;
24603 if (SIBLING_CALL_P (loc_note))
24604 tail_call_site_count++;
24605 if (optimize == 0 && !flag_var_tracking)
24606 {
24607 /* When the var-tracking pass is not running, there is no note
24608 for indirect calls whose target is compile-time known. In this
24609 case, process such calls specifically so that we generate call
24610 sites for them anyway. */
24611 rtx x = PATTERN (loc_note);
24612 if (GET_CODE (x) == PARALLEL)
24613 x = XVECEXP (x, 0, 0);
24614 if (GET_CODE (x) == SET)
24615 x = SET_SRC (x);
24616 if (GET_CODE (x) == CALL)
24617 x = XEXP (x, 0);
24618 if (!MEM_P (x)
24619 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
24620 || !SYMBOL_REF_DECL (XEXP (x, 0))
24621 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
24622 != FUNCTION_DECL))
24623 {
24624 call_insn = loc_note;
24625 loc_note = NULL;
24626 var_loc_p = false;
24627
24628 next_real = next_real_insn (call_insn);
24629 next_note = NULL;
24630 cached_next_real_insn = NULL;
24631 goto create_label;
24632 }
24633 }
24634 }
24635 return;
24636 }
24637
24638 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
24639 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
24640 return;
24641
24642 /* Optimize processing a large consecutive sequence of location
24643 notes so we don't spend too much time in next_real_insn. If the
24644 next insn is another location note, remember the next_real_insn
24645 calculation for next time. */
24646 next_real = cached_next_real_insn;
24647 if (next_real)
24648 {
24649 if (expected_next_loc_note != loc_note)
24650 next_real = NULL;
24651 }
24652
24653 next_note = NEXT_INSN (loc_note);
24654 if (! next_note
24655 || next_note->deleted ()
24656 || ! NOTE_P (next_note)
24657 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
24658 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
24659 next_note = NULL;
24660
24661 if (! next_real)
24662 next_real = next_real_insn (loc_note);
24663
24664 if (next_note)
24665 {
24666 expected_next_loc_note = next_note;
24667 cached_next_real_insn = next_real;
24668 }
24669 else
24670 cached_next_real_insn = NULL;
24671
24672 /* If there are no instructions which would be affected by this note,
24673 don't do anything. */
24674 if (var_loc_p
24675 && next_real == NULL_RTX
24676 && !NOTE_DURING_CALL_P (loc_note))
24677 return;
24678
24679 create_label:
24680
24681 if (next_real == NULL_RTX)
24682 next_real = get_last_insn ();
24683
24684 /* If there were any real insns between note we processed last time
24685 and this note (or if it is the first note), clear
24686 last_{,postcall_}label so that they are not reused this time. */
24687 if (last_var_location_insn == NULL_RTX
24688 || last_var_location_insn != next_real
24689 || last_in_cold_section_p != in_cold_section_p)
24690 {
24691 last_label = NULL;
24692 last_postcall_label = NULL;
24693 }
24694
24695 if (var_loc_p)
24696 {
24697 decl = NOTE_VAR_LOCATION_DECL (loc_note);
24698 newloc = add_var_loc_to_decl (decl, loc_note,
24699 NOTE_DURING_CALL_P (loc_note)
24700 ? last_postcall_label : last_label);
24701 if (newloc == NULL)
24702 return;
24703 }
24704 else
24705 {
24706 decl = NULL_TREE;
24707 newloc = NULL;
24708 }
24709
24710 /* If there were no real insns between note we processed last time
24711 and this note, use the label we emitted last time. Otherwise
24712 create a new label and emit it. */
24713 if (last_label == NULL)
24714 {
24715 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
24716 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
24717 loclabel_num++;
24718 last_label = ggc_strdup (loclabel);
24719 /* See if loclabel might be equal to .Ltext0. If yes,
24720 bump first_loclabel_num_not_at_text_label. */
24721 if (!have_multiple_function_sections
24722 && in_first_function_p
24723 && maybe_at_text_label_p)
24724 {
24725 static rtx_insn *last_start;
24726 rtx_insn *insn;
24727 for (insn = loc_note; insn; insn = previous_insn (insn))
24728 if (insn == last_start)
24729 break;
24730 else if (!NONDEBUG_INSN_P (insn))
24731 continue;
24732 else
24733 {
24734 rtx body = PATTERN (insn);
24735 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
24736 continue;
24737 /* Inline asm could occupy zero bytes. */
24738 else if (GET_CODE (body) == ASM_INPUT
24739 || asm_noperands (body) >= 0)
24740 continue;
24741 #ifdef HAVE_attr_length
24742 else if (get_attr_min_length (insn) == 0)
24743 continue;
24744 #endif
24745 else
24746 {
24747 /* Assume insn has non-zero length. */
24748 maybe_at_text_label_p = false;
24749 break;
24750 }
24751 }
24752 if (maybe_at_text_label_p)
24753 {
24754 last_start = loc_note;
24755 first_loclabel_num_not_at_text_label = loclabel_num;
24756 }
24757 }
24758 }
24759
24760 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
24761 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
24762
24763 if (!var_loc_p)
24764 {
24765 struct call_arg_loc_node *ca_loc
24766 = ggc_cleared_alloc<call_arg_loc_node> ();
24767 rtx_insn *prev
24768 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
24769
24770 ca_loc->call_arg_loc_note = loc_note;
24771 ca_loc->next = NULL;
24772 ca_loc->label = last_label;
24773 gcc_assert (prev
24774 && (CALL_P (prev)
24775 || (NONJUMP_INSN_P (prev)
24776 && GET_CODE (PATTERN (prev)) == SEQUENCE
24777 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
24778 if (!CALL_P (prev))
24779 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
24780 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
24781
24782 /* Look for a SYMBOL_REF in the "prev" instruction. */
24783 rtx x = get_call_rtx_from (PATTERN (prev));
24784 if (x)
24785 {
24786 /* Try to get the call symbol, if any. */
24787 if (MEM_P (XEXP (x, 0)))
24788 x = XEXP (x, 0);
24789 /* First, look for a memory access to a symbol_ref. */
24790 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
24791 && SYMBOL_REF_DECL (XEXP (x, 0))
24792 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
24793 ca_loc->symbol_ref = XEXP (x, 0);
24794 /* Otherwise, look at a compile-time known user-level function
24795 declaration. */
24796 else if (MEM_P (x)
24797 && MEM_EXPR (x)
24798 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
24799 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
24800 }
24801
24802 ca_loc->block = insn_scope (prev);
24803 if (call_arg_locations)
24804 call_arg_loc_last->next = ca_loc;
24805 else
24806 call_arg_locations = ca_loc;
24807 call_arg_loc_last = ca_loc;
24808 }
24809 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
24810 newloc->label = last_label;
24811 else
24812 {
24813 if (!last_postcall_label)
24814 {
24815 sprintf (loclabel, "%s-1", last_label);
24816 last_postcall_label = ggc_strdup (loclabel);
24817 }
24818 newloc->label = last_postcall_label;
24819 }
24820
24821 last_var_location_insn = next_real;
24822 last_in_cold_section_p = in_cold_section_p;
24823 }
24824
24825 /* Called from finalize_size_functions for size functions so that their body
24826 can be encoded in the debug info to describe the layout of variable-length
24827 structures. */
24828
24829 static void
24830 dwarf2out_size_function (tree decl)
24831 {
24832 function_to_dwarf_procedure (decl);
24833 }
24834
24835 /* Note in one location list that text section has changed. */
24836
24837 int
24838 var_location_switch_text_section_1 (var_loc_list **slot, void *)
24839 {
24840 var_loc_list *list = *slot;
24841 if (list->first)
24842 list->last_before_switch
24843 = list->last->next ? list->last->next : list->last;
24844 return 1;
24845 }
24846
24847 /* Note in all location lists that text section has changed. */
24848
24849 static void
24850 var_location_switch_text_section (void)
24851 {
24852 if (decl_loc_table == NULL)
24853 return;
24854
24855 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
24856 }
24857
24858 /* Create a new line number table. */
24859
24860 static dw_line_info_table *
24861 new_line_info_table (void)
24862 {
24863 dw_line_info_table *table;
24864
24865 table = ggc_cleared_alloc<dw_line_info_table> ();
24866 table->file_num = 1;
24867 table->line_num = 1;
24868 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
24869
24870 return table;
24871 }
24872
24873 /* Lookup the "current" table into which we emit line info, so
24874 that we don't have to do it for every source line. */
24875
24876 static void
24877 set_cur_line_info_table (section *sec)
24878 {
24879 dw_line_info_table *table;
24880
24881 if (sec == text_section)
24882 table = text_section_line_info;
24883 else if (sec == cold_text_section)
24884 {
24885 table = cold_text_section_line_info;
24886 if (!table)
24887 {
24888 cold_text_section_line_info = table = new_line_info_table ();
24889 table->end_label = cold_end_label;
24890 }
24891 }
24892 else
24893 {
24894 const char *end_label;
24895
24896 if (flag_reorder_blocks_and_partition)
24897 {
24898 if (in_cold_section_p)
24899 end_label = crtl->subsections.cold_section_end_label;
24900 else
24901 end_label = crtl->subsections.hot_section_end_label;
24902 }
24903 else
24904 {
24905 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24906 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
24907 current_function_funcdef_no);
24908 end_label = ggc_strdup (label);
24909 }
24910
24911 table = new_line_info_table ();
24912 table->end_label = end_label;
24913
24914 vec_safe_push (separate_line_info, table);
24915 }
24916
24917 if (DWARF2_ASM_LINE_DEBUG_INFO)
24918 table->is_stmt = (cur_line_info_table
24919 ? cur_line_info_table->is_stmt
24920 : DWARF_LINE_DEFAULT_IS_STMT_START);
24921 cur_line_info_table = table;
24922 }
24923
24924
24925 /* We need to reset the locations at the beginning of each
24926 function. We can't do this in the end_function hook, because the
24927 declarations that use the locations won't have been output when
24928 that hook is called. Also compute have_multiple_function_sections here. */
24929
24930 static void
24931 dwarf2out_begin_function (tree fun)
24932 {
24933 section *sec = function_section (fun);
24934
24935 if (sec != text_section)
24936 have_multiple_function_sections = true;
24937
24938 if (flag_reorder_blocks_and_partition && !cold_text_section)
24939 {
24940 gcc_assert (current_function_decl == fun);
24941 cold_text_section = unlikely_text_section ();
24942 switch_to_section (cold_text_section);
24943 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
24944 switch_to_section (sec);
24945 }
24946
24947 dwarf2out_note_section_used ();
24948 call_site_count = 0;
24949 tail_call_site_count = 0;
24950
24951 set_cur_line_info_table (sec);
24952 }
24953
24954 /* Helper function of dwarf2out_end_function, called only after emitting
24955 the very first function into assembly. Check if some .debug_loc range
24956 might end with a .LVL* label that could be equal to .Ltext0.
24957 In that case we must force using absolute addresses in .debug_loc ranges,
24958 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
24959 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
24960 list terminator.
24961 Set have_multiple_function_sections to true in that case and
24962 terminate htab traversal. */
24963
24964 int
24965 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
24966 {
24967 var_loc_list *entry = *slot;
24968 struct var_loc_node *node;
24969
24970 node = entry->first;
24971 if (node && node->next && node->next->label)
24972 {
24973 unsigned int i;
24974 const char *label = node->next->label;
24975 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
24976
24977 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
24978 {
24979 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
24980 if (strcmp (label, loclabel) == 0)
24981 {
24982 have_multiple_function_sections = true;
24983 return 0;
24984 }
24985 }
24986 }
24987 return 1;
24988 }
24989
24990 /* Hook called after emitting a function into assembly.
24991 This does something only for the very first function emitted. */
24992
24993 static void
24994 dwarf2out_end_function (unsigned int)
24995 {
24996 if (in_first_function_p
24997 && !have_multiple_function_sections
24998 && first_loclabel_num_not_at_text_label
24999 && decl_loc_table)
25000 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
25001 in_first_function_p = false;
25002 maybe_at_text_label_p = false;
25003 }
25004
25005 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
25006 front-ends register a translation unit even before dwarf2out_init is
25007 called. */
25008 static tree main_translation_unit = NULL_TREE;
25009
25010 /* Hook called by front-ends after they built their main translation unit.
25011 Associate comp_unit_die to UNIT. */
25012
25013 static void
25014 dwarf2out_register_main_translation_unit (tree unit)
25015 {
25016 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
25017 && main_translation_unit == NULL_TREE);
25018 main_translation_unit = unit;
25019 /* If dwarf2out_init has not been called yet, it will perform the association
25020 itself looking at main_translation_unit. */
25021 if (decl_die_table != NULL)
25022 equate_decl_number_to_die (unit, comp_unit_die ());
25023 }
25024
25025 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
25026
25027 static void
25028 push_dw_line_info_entry (dw_line_info_table *table,
25029 enum dw_line_info_opcode opcode, unsigned int val)
25030 {
25031 dw_line_info_entry e;
25032 e.opcode = opcode;
25033 e.val = val;
25034 vec_safe_push (table->entries, e);
25035 }
25036
25037 /* Output a label to mark the beginning of a source code line entry
25038 and record information relating to this source line, in
25039 'line_info_table' for later output of the .debug_line section. */
25040 /* ??? The discriminator parameter ought to be unsigned. */
25041
25042 static void
25043 dwarf2out_source_line (unsigned int line, const char *filename,
25044 int discriminator, bool is_stmt)
25045 {
25046 unsigned int file_num;
25047 dw_line_info_table *table;
25048
25049 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
25050 return;
25051
25052 /* The discriminator column was added in dwarf4. Simplify the below
25053 by simply removing it if we're not supposed to output it. */
25054 if (dwarf_version < 4 && dwarf_strict)
25055 discriminator = 0;
25056
25057 table = cur_line_info_table;
25058 file_num = maybe_emit_file (lookup_filename (filename));
25059
25060 /* ??? TODO: Elide duplicate line number entries. Traditionally,
25061 the debugger has used the second (possibly duplicate) line number
25062 at the beginning of the function to mark the end of the prologue.
25063 We could eliminate any other duplicates within the function. For
25064 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
25065 that second line number entry. */
25066 /* Recall that this end-of-prologue indication is *not* the same thing
25067 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
25068 to which the hook corresponds, follows the last insn that was
25069 emitted by gen_prologue. What we need is to precede the first insn
25070 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
25071 insn that corresponds to something the user wrote. These may be
25072 very different locations once scheduling is enabled. */
25073
25074 if (0 && file_num == table->file_num
25075 && line == table->line_num
25076 && discriminator == table->discrim_num
25077 && is_stmt == table->is_stmt)
25078 return;
25079
25080 switch_to_section (current_function_section ());
25081
25082 /* If requested, emit something human-readable. */
25083 if (flag_debug_asm)
25084 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START, filename, line);
25085
25086 if (DWARF2_ASM_LINE_DEBUG_INFO)
25087 {
25088 /* Emit the .loc directive understood by GNU as. */
25089 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
25090 file_num, line, is_stmt, discriminator */
25091 fputs ("\t.loc ", asm_out_file);
25092 fprint_ul (asm_out_file, file_num);
25093 putc (' ', asm_out_file);
25094 fprint_ul (asm_out_file, line);
25095 putc (' ', asm_out_file);
25096 putc ('0', asm_out_file);
25097
25098 if (is_stmt != table->is_stmt)
25099 {
25100 fputs (" is_stmt ", asm_out_file);
25101 putc (is_stmt ? '1' : '0', asm_out_file);
25102 }
25103 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
25104 {
25105 gcc_assert (discriminator > 0);
25106 fputs (" discriminator ", asm_out_file);
25107 fprint_ul (asm_out_file, (unsigned long) discriminator);
25108 }
25109 putc ('\n', asm_out_file);
25110 }
25111 else
25112 {
25113 unsigned int label_num = ++line_info_label_num;
25114
25115 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
25116
25117 push_dw_line_info_entry (table, LI_set_address, label_num);
25118 if (file_num != table->file_num)
25119 push_dw_line_info_entry (table, LI_set_file, file_num);
25120 if (discriminator != table->discrim_num)
25121 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
25122 if (is_stmt != table->is_stmt)
25123 push_dw_line_info_entry (table, LI_negate_stmt, 0);
25124 push_dw_line_info_entry (table, LI_set_line, line);
25125 }
25126
25127 table->file_num = file_num;
25128 table->line_num = line;
25129 table->discrim_num = discriminator;
25130 table->is_stmt = is_stmt;
25131 table->in_use = true;
25132 }
25133
25134 /* Record the beginning of a new source file. */
25135
25136 static void
25137 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
25138 {
25139 if (flag_eliminate_dwarf2_dups)
25140 {
25141 /* Record the beginning of the file for break_out_includes. */
25142 dw_die_ref bincl_die;
25143
25144 bincl_die = new_die (DW_TAG_GNU_BINCL, comp_unit_die (), NULL);
25145 add_AT_string (bincl_die, DW_AT_name, remap_debug_filename (filename));
25146 }
25147
25148 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25149 {
25150 macinfo_entry e;
25151 e.code = DW_MACINFO_start_file;
25152 e.lineno = lineno;
25153 e.info = ggc_strdup (filename);
25154 vec_safe_push (macinfo_table, e);
25155 }
25156 }
25157
25158 /* Record the end of a source file. */
25159
25160 static void
25161 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
25162 {
25163 if (flag_eliminate_dwarf2_dups)
25164 /* Record the end of the file for break_out_includes. */
25165 new_die (DW_TAG_GNU_EINCL, comp_unit_die (), NULL);
25166
25167 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25168 {
25169 macinfo_entry e;
25170 e.code = DW_MACINFO_end_file;
25171 e.lineno = lineno;
25172 e.info = NULL;
25173 vec_safe_push (macinfo_table, e);
25174 }
25175 }
25176
25177 /* Called from debug_define in toplev.c. The `buffer' parameter contains
25178 the tail part of the directive line, i.e. the part which is past the
25179 initial whitespace, #, whitespace, directive-name, whitespace part. */
25180
25181 static void
25182 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
25183 const char *buffer ATTRIBUTE_UNUSED)
25184 {
25185 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25186 {
25187 macinfo_entry e;
25188 /* Insert a dummy first entry to be able to optimize the whole
25189 predefined macro block using DW_MACRO_GNU_transparent_include. */
25190 if (macinfo_table->is_empty () && lineno <= 1)
25191 {
25192 e.code = 0;
25193 e.lineno = 0;
25194 e.info = NULL;
25195 vec_safe_push (macinfo_table, e);
25196 }
25197 e.code = DW_MACINFO_define;
25198 e.lineno = lineno;
25199 e.info = ggc_strdup (buffer);
25200 vec_safe_push (macinfo_table, e);
25201 }
25202 }
25203
25204 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
25205 the tail part of the directive line, i.e. the part which is past the
25206 initial whitespace, #, whitespace, directive-name, whitespace part. */
25207
25208 static void
25209 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
25210 const char *buffer ATTRIBUTE_UNUSED)
25211 {
25212 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25213 {
25214 macinfo_entry e;
25215 /* Insert a dummy first entry to be able to optimize the whole
25216 predefined macro block using DW_MACRO_GNU_transparent_include. */
25217 if (macinfo_table->is_empty () && lineno <= 1)
25218 {
25219 e.code = 0;
25220 e.lineno = 0;
25221 e.info = NULL;
25222 vec_safe_push (macinfo_table, e);
25223 }
25224 e.code = DW_MACINFO_undef;
25225 e.lineno = lineno;
25226 e.info = ggc_strdup (buffer);
25227 vec_safe_push (macinfo_table, e);
25228 }
25229 }
25230
25231 /* Helpers to manipulate hash table of CUs. */
25232
25233 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
25234 {
25235 static inline hashval_t hash (const macinfo_entry *);
25236 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
25237 };
25238
25239 inline hashval_t
25240 macinfo_entry_hasher::hash (const macinfo_entry *entry)
25241 {
25242 return htab_hash_string (entry->info);
25243 }
25244
25245 inline bool
25246 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
25247 const macinfo_entry *entry2)
25248 {
25249 return !strcmp (entry1->info, entry2->info);
25250 }
25251
25252 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
25253
25254 /* Output a single .debug_macinfo entry. */
25255
25256 static void
25257 output_macinfo_op (macinfo_entry *ref)
25258 {
25259 int file_num;
25260 size_t len;
25261 struct indirect_string_node *node;
25262 char label[MAX_ARTIFICIAL_LABEL_BYTES];
25263 struct dwarf_file_data *fd;
25264
25265 switch (ref->code)
25266 {
25267 case DW_MACINFO_start_file:
25268 fd = lookup_filename (ref->info);
25269 file_num = maybe_emit_file (fd);
25270 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
25271 dw2_asm_output_data_uleb128 (ref->lineno,
25272 "Included from line number %lu",
25273 (unsigned long) ref->lineno);
25274 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
25275 break;
25276 case DW_MACINFO_end_file:
25277 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
25278 break;
25279 case DW_MACINFO_define:
25280 case DW_MACINFO_undef:
25281 len = strlen (ref->info) + 1;
25282 if (!dwarf_strict
25283 && len > DWARF_OFFSET_SIZE
25284 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
25285 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
25286 {
25287 ref->code = ref->code == DW_MACINFO_define
25288 ? DW_MACRO_GNU_define_indirect
25289 : DW_MACRO_GNU_undef_indirect;
25290 output_macinfo_op (ref);
25291 return;
25292 }
25293 dw2_asm_output_data (1, ref->code,
25294 ref->code == DW_MACINFO_define
25295 ? "Define macro" : "Undefine macro");
25296 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
25297 (unsigned long) ref->lineno);
25298 dw2_asm_output_nstring (ref->info, -1, "The macro");
25299 break;
25300 case DW_MACRO_GNU_define_indirect:
25301 case DW_MACRO_GNU_undef_indirect:
25302 node = find_AT_string (ref->info);
25303 gcc_assert (node
25304 && ((node->form == DW_FORM_strp)
25305 || (node->form == DW_FORM_GNU_str_index)));
25306 dw2_asm_output_data (1, ref->code,
25307 ref->code == DW_MACRO_GNU_define_indirect
25308 ? "Define macro indirect"
25309 : "Undefine macro indirect");
25310 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
25311 (unsigned long) ref->lineno);
25312 if (node->form == DW_FORM_strp)
25313 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
25314 debug_str_section, "The macro: \"%s\"",
25315 ref->info);
25316 else
25317 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
25318 ref->info);
25319 break;
25320 case DW_MACRO_GNU_transparent_include:
25321 dw2_asm_output_data (1, ref->code, "Transparent include");
25322 ASM_GENERATE_INTERNAL_LABEL (label,
25323 DEBUG_MACRO_SECTION_LABEL, ref->lineno);
25324 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
25325 break;
25326 default:
25327 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
25328 ASM_COMMENT_START, (unsigned long) ref->code);
25329 break;
25330 }
25331 }
25332
25333 /* Attempt to make a sequence of define/undef macinfo ops shareable with
25334 other compilation unit .debug_macinfo sections. IDX is the first
25335 index of a define/undef, return the number of ops that should be
25336 emitted in a comdat .debug_macinfo section and emit
25337 a DW_MACRO_GNU_transparent_include entry referencing it.
25338 If the define/undef entry should be emitted normally, return 0. */
25339
25340 static unsigned
25341 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
25342 macinfo_hash_type **macinfo_htab)
25343 {
25344 macinfo_entry *first, *second, *cur, *inc;
25345 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
25346 unsigned char checksum[16];
25347 struct md5_ctx ctx;
25348 char *grp_name, *tail;
25349 const char *base;
25350 unsigned int i, count, encoded_filename_len, linebuf_len;
25351 macinfo_entry **slot;
25352
25353 first = &(*macinfo_table)[idx];
25354 second = &(*macinfo_table)[idx + 1];
25355
25356 /* Optimize only if there are at least two consecutive define/undef ops,
25357 and either all of them are before first DW_MACINFO_start_file
25358 with lineno {0,1} (i.e. predefined macro block), or all of them are
25359 in some included header file. */
25360 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
25361 return 0;
25362 if (vec_safe_is_empty (files))
25363 {
25364 if (first->lineno > 1 || second->lineno > 1)
25365 return 0;
25366 }
25367 else if (first->lineno == 0)
25368 return 0;
25369
25370 /* Find the last define/undef entry that can be grouped together
25371 with first and at the same time compute md5 checksum of their
25372 codes, linenumbers and strings. */
25373 md5_init_ctx (&ctx);
25374 for (i = idx; macinfo_table->iterate (i, &cur); i++)
25375 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
25376 break;
25377 else if (vec_safe_is_empty (files) && cur->lineno > 1)
25378 break;
25379 else
25380 {
25381 unsigned char code = cur->code;
25382 md5_process_bytes (&code, 1, &ctx);
25383 checksum_uleb128 (cur->lineno, &ctx);
25384 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
25385 }
25386 md5_finish_ctx (&ctx, checksum);
25387 count = i - idx;
25388
25389 /* From the containing include filename (if any) pick up just
25390 usable characters from its basename. */
25391 if (vec_safe_is_empty (files))
25392 base = "";
25393 else
25394 base = lbasename (files->last ().info);
25395 for (encoded_filename_len = 0, i = 0; base[i]; i++)
25396 if (ISIDNUM (base[i]) || base[i] == '.')
25397 encoded_filename_len++;
25398 /* Count . at the end. */
25399 if (encoded_filename_len)
25400 encoded_filename_len++;
25401
25402 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
25403 linebuf_len = strlen (linebuf);
25404
25405 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
25406 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
25407 + 16 * 2 + 1);
25408 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
25409 tail = grp_name + 4;
25410 if (encoded_filename_len)
25411 {
25412 for (i = 0; base[i]; i++)
25413 if (ISIDNUM (base[i]) || base[i] == '.')
25414 *tail++ = base[i];
25415 *tail++ = '.';
25416 }
25417 memcpy (tail, linebuf, linebuf_len);
25418 tail += linebuf_len;
25419 *tail++ = '.';
25420 for (i = 0; i < 16; i++)
25421 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
25422
25423 /* Construct a macinfo_entry for DW_MACRO_GNU_transparent_include
25424 in the empty vector entry before the first define/undef. */
25425 inc = &(*macinfo_table)[idx - 1];
25426 inc->code = DW_MACRO_GNU_transparent_include;
25427 inc->lineno = 0;
25428 inc->info = ggc_strdup (grp_name);
25429 if (!*macinfo_htab)
25430 *macinfo_htab = new macinfo_hash_type (10);
25431 /* Avoid emitting duplicates. */
25432 slot = (*macinfo_htab)->find_slot (inc, INSERT);
25433 if (*slot != NULL)
25434 {
25435 inc->code = 0;
25436 inc->info = NULL;
25437 /* If such an entry has been used before, just emit
25438 a DW_MACRO_GNU_transparent_include op. */
25439 inc = *slot;
25440 output_macinfo_op (inc);
25441 /* And clear all macinfo_entry in the range to avoid emitting them
25442 in the second pass. */
25443 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
25444 {
25445 cur->code = 0;
25446 cur->info = NULL;
25447 }
25448 }
25449 else
25450 {
25451 *slot = inc;
25452 inc->lineno = (*macinfo_htab)->elements ();
25453 output_macinfo_op (inc);
25454 }
25455 return count;
25456 }
25457
25458 /* Save any strings needed by the macinfo table in the debug str
25459 table. All strings must be collected into the table by the time
25460 index_string is called. */
25461
25462 static void
25463 save_macinfo_strings (void)
25464 {
25465 unsigned len;
25466 unsigned i;
25467 macinfo_entry *ref;
25468
25469 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
25470 {
25471 switch (ref->code)
25472 {
25473 /* Match the logic in output_macinfo_op to decide on
25474 indirect strings. */
25475 case DW_MACINFO_define:
25476 case DW_MACINFO_undef:
25477 len = strlen (ref->info) + 1;
25478 if (!dwarf_strict
25479 && len > DWARF_OFFSET_SIZE
25480 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
25481 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
25482 set_indirect_string (find_AT_string (ref->info));
25483 break;
25484 case DW_MACRO_GNU_define_indirect:
25485 case DW_MACRO_GNU_undef_indirect:
25486 set_indirect_string (find_AT_string (ref->info));
25487 break;
25488 default:
25489 break;
25490 }
25491 }
25492 }
25493
25494 /* Output macinfo section(s). */
25495
25496 static void
25497 output_macinfo (void)
25498 {
25499 unsigned i;
25500 unsigned long length = vec_safe_length (macinfo_table);
25501 macinfo_entry *ref;
25502 vec<macinfo_entry, va_gc> *files = NULL;
25503 macinfo_hash_type *macinfo_htab = NULL;
25504
25505 if (! length)
25506 return;
25507
25508 /* output_macinfo* uses these interchangeably. */
25509 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_GNU_define
25510 && (int) DW_MACINFO_undef == (int) DW_MACRO_GNU_undef
25511 && (int) DW_MACINFO_start_file == (int) DW_MACRO_GNU_start_file
25512 && (int) DW_MACINFO_end_file == (int) DW_MACRO_GNU_end_file);
25513
25514 /* For .debug_macro emit the section header. */
25515 if (!dwarf_strict)
25516 {
25517 dw2_asm_output_data (2, 4, "DWARF macro version number");
25518 if (DWARF_OFFSET_SIZE == 8)
25519 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
25520 else
25521 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
25522 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
25523 (!dwarf_split_debug_info ? debug_line_section_label
25524 : debug_skeleton_line_section_label),
25525 debug_line_section, NULL);
25526 }
25527
25528 /* In the first loop, it emits the primary .debug_macinfo section
25529 and after each emitted op the macinfo_entry is cleared.
25530 If a longer range of define/undef ops can be optimized using
25531 DW_MACRO_GNU_transparent_include, the
25532 DW_MACRO_GNU_transparent_include op is emitted and kept in
25533 the vector before the first define/undef in the range and the
25534 whole range of define/undef ops is not emitted and kept. */
25535 for (i = 0; macinfo_table->iterate (i, &ref); i++)
25536 {
25537 switch (ref->code)
25538 {
25539 case DW_MACINFO_start_file:
25540 vec_safe_push (files, *ref);
25541 break;
25542 case DW_MACINFO_end_file:
25543 if (!vec_safe_is_empty (files))
25544 files->pop ();
25545 break;
25546 case DW_MACINFO_define:
25547 case DW_MACINFO_undef:
25548 if (!dwarf_strict
25549 && HAVE_COMDAT_GROUP
25550 && vec_safe_length (files) != 1
25551 && i > 0
25552 && i + 1 < length
25553 && (*macinfo_table)[i - 1].code == 0)
25554 {
25555 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
25556 if (count)
25557 {
25558 i += count - 1;
25559 continue;
25560 }
25561 }
25562 break;
25563 case 0:
25564 /* A dummy entry may be inserted at the beginning to be able
25565 to optimize the whole block of predefined macros. */
25566 if (i == 0)
25567 continue;
25568 default:
25569 break;
25570 }
25571 output_macinfo_op (ref);
25572 ref->info = NULL;
25573 ref->code = 0;
25574 }
25575
25576 if (!macinfo_htab)
25577 return;
25578
25579 delete macinfo_htab;
25580 macinfo_htab = NULL;
25581
25582 /* If any DW_MACRO_GNU_transparent_include were used, on those
25583 DW_MACRO_GNU_transparent_include entries terminate the
25584 current chain and switch to a new comdat .debug_macinfo
25585 section and emit the define/undef entries within it. */
25586 for (i = 0; macinfo_table->iterate (i, &ref); i++)
25587 switch (ref->code)
25588 {
25589 case 0:
25590 continue;
25591 case DW_MACRO_GNU_transparent_include:
25592 {
25593 char label[MAX_ARTIFICIAL_LABEL_BYTES];
25594 tree comdat_key = get_identifier (ref->info);
25595 /* Terminate the previous .debug_macinfo section. */
25596 dw2_asm_output_data (1, 0, "End compilation unit");
25597 targetm.asm_out.named_section (debug_macinfo_section_name,
25598 SECTION_DEBUG
25599 | SECTION_LINKONCE,
25600 comdat_key);
25601 ASM_GENERATE_INTERNAL_LABEL (label,
25602 DEBUG_MACRO_SECTION_LABEL,
25603 ref->lineno);
25604 ASM_OUTPUT_LABEL (asm_out_file, label);
25605 ref->code = 0;
25606 ref->info = NULL;
25607 dw2_asm_output_data (2, 4, "DWARF macro version number");
25608 if (DWARF_OFFSET_SIZE == 8)
25609 dw2_asm_output_data (1, 1, "Flags: 64-bit");
25610 else
25611 dw2_asm_output_data (1, 0, "Flags: 32-bit");
25612 }
25613 break;
25614 case DW_MACINFO_define:
25615 case DW_MACINFO_undef:
25616 output_macinfo_op (ref);
25617 ref->code = 0;
25618 ref->info = NULL;
25619 break;
25620 default:
25621 gcc_unreachable ();
25622 }
25623 }
25624
25625 /* Initialize the various sections and labels for dwarf output. */
25626
25627 static void
25628 init_sections_and_labels (void)
25629 {
25630 if (!dwarf_split_debug_info)
25631 {
25632 debug_info_section = get_section (DEBUG_INFO_SECTION,
25633 SECTION_DEBUG, NULL);
25634 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
25635 SECTION_DEBUG, NULL);
25636 debug_loc_section = get_section (DEBUG_LOC_SECTION,
25637 SECTION_DEBUG, NULL);
25638 debug_macinfo_section_name
25639 = dwarf_strict ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION;
25640 debug_macinfo_section = get_section (debug_macinfo_section_name,
25641 SECTION_DEBUG, NULL);
25642 }
25643 else
25644 {
25645 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
25646 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25647 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
25648 SECTION_DEBUG | SECTION_EXCLUDE,
25649 NULL);
25650 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
25651 SECTION_DEBUG, NULL);
25652 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
25653 SECTION_DEBUG, NULL);
25654 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
25655 SECTION_DEBUG, NULL);
25656 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
25657 DEBUG_SKELETON_ABBREV_SECTION_LABEL, 0);
25658
25659 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections stay in
25660 the main .o, but the skeleton_line goes into the split off dwo. */
25661 debug_skeleton_line_section
25662 = get_section (DEBUG_DWO_LINE_SECTION,
25663 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25664 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
25665 DEBUG_SKELETON_LINE_SECTION_LABEL, 0);
25666 debug_str_offsets_section = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
25667 SECTION_DEBUG | SECTION_EXCLUDE,
25668 NULL);
25669 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
25670 DEBUG_SKELETON_INFO_SECTION_LABEL, 0);
25671 debug_loc_section = get_section (DEBUG_DWO_LOC_SECTION,
25672 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25673 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
25674 DEBUG_STR_DWO_SECTION_FLAGS, NULL);
25675 debug_macinfo_section_name
25676 = dwarf_strict ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION;
25677 debug_macinfo_section = get_section (debug_macinfo_section_name,
25678 SECTION_DEBUG | SECTION_EXCLUDE,
25679 NULL);
25680 }
25681 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
25682 SECTION_DEBUG, NULL);
25683 debug_line_section = get_section (DEBUG_LINE_SECTION,
25684 SECTION_DEBUG, NULL);
25685 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
25686 SECTION_DEBUG, NULL);
25687 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
25688 SECTION_DEBUG, NULL);
25689 debug_str_section = get_section (DEBUG_STR_SECTION,
25690 DEBUG_STR_SECTION_FLAGS, NULL);
25691 debug_ranges_section = get_section (DEBUG_RANGES_SECTION,
25692 SECTION_DEBUG, NULL);
25693 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
25694 SECTION_DEBUG, NULL);
25695
25696 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
25697 DEBUG_ABBREV_SECTION_LABEL, 0);
25698 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
25699 DEBUG_INFO_SECTION_LABEL, 0);
25700 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
25701 DEBUG_LINE_SECTION_LABEL, 0);
25702 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
25703 DEBUG_RANGES_SECTION_LABEL, 0);
25704 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
25705 DEBUG_ADDR_SECTION_LABEL, 0);
25706 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
25707 dwarf_strict
25708 ? DEBUG_MACINFO_SECTION_LABEL
25709 : DEBUG_MACRO_SECTION_LABEL, 0);
25710 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, 0);
25711 }
25712
25713 /* Set up for Dwarf output at the start of compilation. */
25714
25715 static void
25716 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
25717 {
25718 /* This option is currently broken, see (PR53118 and PR46102). */
25719 if (flag_eliminate_dwarf2_dups
25720 && strstr (lang_hooks.name, "C++"))
25721 {
25722 warning (0, "-feliminate-dwarf2-dups is broken for C++, ignoring");
25723 flag_eliminate_dwarf2_dups = 0;
25724 }
25725
25726 /* Allocate the file_table. */
25727 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
25728
25729 #ifndef DWARF2_LINENO_DEBUGGING_INFO
25730 /* Allocate the decl_die_table. */
25731 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
25732
25733 /* Allocate the decl_loc_table. */
25734 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
25735
25736 /* Allocate the cached_dw_loc_list_table. */
25737 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
25738
25739 /* Allocate the initial hunk of the decl_scope_table. */
25740 vec_alloc (decl_scope_table, 256);
25741
25742 /* Allocate the initial hunk of the abbrev_die_table. */
25743 abbrev_die_table = ggc_cleared_vec_alloc<dw_die_ref>
25744 (ABBREV_DIE_TABLE_INCREMENT);
25745 abbrev_die_table_allocated = ABBREV_DIE_TABLE_INCREMENT;
25746 /* Zero-th entry is allocated, but unused. */
25747 abbrev_die_table_in_use = 1;
25748
25749 /* Allocate the dwarf_proc_stack_usage_map. */
25750 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
25751
25752 /* Allocate the pubtypes and pubnames vectors. */
25753 vec_alloc (pubname_table, 32);
25754 vec_alloc (pubtype_table, 32);
25755
25756 vec_alloc (incomplete_types, 64);
25757
25758 vec_alloc (used_rtx_array, 32);
25759
25760 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25761 vec_alloc (macinfo_table, 64);
25762 #endif
25763
25764 /* If front-ends already registered a main translation unit but we were not
25765 ready to perform the association, do this now. */
25766 if (main_translation_unit != NULL_TREE)
25767 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
25768 }
25769
25770 /* Called before compile () starts outputtting functions, variables
25771 and toplevel asms into assembly. */
25772
25773 static void
25774 dwarf2out_assembly_start (void)
25775 {
25776 #ifndef DWARF2_LINENO_DEBUGGING_INFO
25777 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
25778 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
25779 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
25780 COLD_TEXT_SECTION_LABEL, 0);
25781 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
25782
25783 switch_to_section (text_section);
25784 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
25785 #endif
25786
25787 /* Make sure the line number table for .text always exists. */
25788 text_section_line_info = new_line_info_table ();
25789 text_section_line_info->end_label = text_end_label;
25790
25791 #ifdef DWARF2_LINENO_DEBUGGING_INFO
25792 cur_line_info_table = text_section_line_info;
25793 #endif
25794
25795 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
25796 && dwarf2out_do_cfi_asm ()
25797 && (!(flag_unwind_tables || flag_exceptions)
25798 || targetm_common.except_unwind_info (&global_options) != UI_DWARF2))
25799 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
25800 }
25801
25802 /* A helper function for dwarf2out_finish called through
25803 htab_traverse. Assign a string its index. All strings must be
25804 collected into the table by the time index_string is called,
25805 because the indexing code relies on htab_traverse to traverse nodes
25806 in the same order for each run. */
25807
25808 int
25809 index_string (indirect_string_node **h, unsigned int *index)
25810 {
25811 indirect_string_node *node = *h;
25812
25813 find_string_form (node);
25814 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25815 {
25816 gcc_assert (node->index == NO_INDEX_ASSIGNED);
25817 node->index = *index;
25818 *index += 1;
25819 }
25820 return 1;
25821 }
25822
25823 /* A helper function for output_indirect_strings called through
25824 htab_traverse. Output the offset to a string and update the
25825 current offset. */
25826
25827 int
25828 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
25829 {
25830 indirect_string_node *node = *h;
25831
25832 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25833 {
25834 /* Assert that this node has been assigned an index. */
25835 gcc_assert (node->index != NO_INDEX_ASSIGNED
25836 && node->index != NOT_INDEXED);
25837 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
25838 "indexed string 0x%x: %s", node->index, node->str);
25839 *offset += strlen (node->str) + 1;
25840 }
25841 return 1;
25842 }
25843
25844 /* A helper function for dwarf2out_finish called through
25845 htab_traverse. Output the indexed string. */
25846
25847 int
25848 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
25849 {
25850 struct indirect_string_node *node = *h;
25851
25852 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25853 {
25854 /* Assert that the strings are output in the same order as their
25855 indexes were assigned. */
25856 gcc_assert (*cur_idx == node->index);
25857 assemble_string (node->str, strlen (node->str) + 1);
25858 *cur_idx += 1;
25859 }
25860 return 1;
25861 }
25862
25863 /* A helper function for dwarf2out_finish called through
25864 htab_traverse. Emit one queued .debug_str string. */
25865
25866 int
25867 output_indirect_string (indirect_string_node **h, void *)
25868 {
25869 struct indirect_string_node *node = *h;
25870
25871 node->form = find_string_form (node);
25872 if (node->form == DW_FORM_strp && node->refcount > 0)
25873 {
25874 ASM_OUTPUT_LABEL (asm_out_file, node->label);
25875 assemble_string (node->str, strlen (node->str) + 1);
25876 }
25877
25878 return 1;
25879 }
25880
25881 /* Output the indexed string table. */
25882
25883 static void
25884 output_indirect_strings (void)
25885 {
25886 switch_to_section (debug_str_section);
25887 if (!dwarf_split_debug_info)
25888 debug_str_hash->traverse<void *, output_indirect_string> (NULL);
25889 else
25890 {
25891 unsigned int offset = 0;
25892 unsigned int cur_idx = 0;
25893
25894 skeleton_debug_str_hash->traverse<void *, output_indirect_string> (NULL);
25895
25896 switch_to_section (debug_str_offsets_section);
25897 debug_str_hash->traverse_noresize
25898 <unsigned int *, output_index_string_offset> (&offset);
25899 switch_to_section (debug_str_dwo_section);
25900 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
25901 (&cur_idx);
25902 }
25903 }
25904
25905 /* Callback for htab_traverse to assign an index to an entry in the
25906 table, and to write that entry to the .debug_addr section. */
25907
25908 int
25909 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
25910 {
25911 addr_table_entry *entry = *slot;
25912
25913 if (entry->refcount == 0)
25914 {
25915 gcc_assert (entry->index == NO_INDEX_ASSIGNED
25916 || entry->index == NOT_INDEXED);
25917 return 1;
25918 }
25919
25920 gcc_assert (entry->index == *cur_index);
25921 (*cur_index)++;
25922
25923 switch (entry->kind)
25924 {
25925 case ate_kind_rtx:
25926 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
25927 "0x%x", entry->index);
25928 break;
25929 case ate_kind_rtx_dtprel:
25930 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
25931 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
25932 DWARF2_ADDR_SIZE,
25933 entry->addr.rtl);
25934 fputc ('\n', asm_out_file);
25935 break;
25936 case ate_kind_label:
25937 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
25938 "0x%x", entry->index);
25939 break;
25940 default:
25941 gcc_unreachable ();
25942 }
25943 return 1;
25944 }
25945
25946 /* Produce the .debug_addr section. */
25947
25948 static void
25949 output_addr_table (void)
25950 {
25951 unsigned int index = 0;
25952 if (addr_index_table == NULL || addr_index_table->size () == 0)
25953 return;
25954
25955 switch_to_section (debug_addr_section);
25956 addr_index_table
25957 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
25958 }
25959
25960 #if ENABLE_ASSERT_CHECKING
25961 /* Verify that all marks are clear. */
25962
25963 static void
25964 verify_marks_clear (dw_die_ref die)
25965 {
25966 dw_die_ref c;
25967
25968 gcc_assert (! die->die_mark);
25969 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
25970 }
25971 #endif /* ENABLE_ASSERT_CHECKING */
25972
25973 /* Clear the marks for a die and its children.
25974 Be cool if the mark isn't set. */
25975
25976 static void
25977 prune_unmark_dies (dw_die_ref die)
25978 {
25979 dw_die_ref c;
25980
25981 if (die->die_mark)
25982 die->die_mark = 0;
25983 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
25984 }
25985
25986 /* Given LOC that is referenced by a DIE we're marking as used, find all
25987 referenced DWARF procedures it references and mark them as used. */
25988
25989 static void
25990 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
25991 {
25992 for (; loc != NULL; loc = loc->dw_loc_next)
25993 switch (loc->dw_loc_opc)
25994 {
25995 case DW_OP_GNU_implicit_pointer:
25996 case DW_OP_GNU_convert:
25997 case DW_OP_GNU_reinterpret:
25998 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
25999 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
26000 break;
26001 case DW_OP_call2:
26002 case DW_OP_call4:
26003 case DW_OP_call_ref:
26004 case DW_OP_GNU_const_type:
26005 case DW_OP_GNU_parameter_ref:
26006 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
26007 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
26008 break;
26009 case DW_OP_GNU_regval_type:
26010 case DW_OP_GNU_deref_type:
26011 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
26012 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
26013 break;
26014 case DW_OP_GNU_entry_value:
26015 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
26016 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
26017 break;
26018 default:
26019 break;
26020 }
26021 }
26022
26023 /* Given DIE that we're marking as used, find any other dies
26024 it references as attributes and mark them as used. */
26025
26026 static void
26027 prune_unused_types_walk_attribs (dw_die_ref die)
26028 {
26029 dw_attr_node *a;
26030 unsigned ix;
26031
26032 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
26033 {
26034 switch (AT_class (a))
26035 {
26036 /* Make sure DWARF procedures referenced by location descriptions will
26037 get emitted. */
26038 case dw_val_class_loc:
26039 prune_unused_types_walk_loc_descr (AT_loc (a));
26040 break;
26041 case dw_val_class_loc_list:
26042 for (dw_loc_list_ref list = AT_loc_list (a);
26043 list != NULL;
26044 list = list->dw_loc_next)
26045 prune_unused_types_walk_loc_descr (list->expr);
26046 break;
26047
26048 case dw_val_class_die_ref:
26049 /* A reference to another DIE.
26050 Make sure that it will get emitted.
26051 If it was broken out into a comdat group, don't follow it. */
26052 if (! AT_ref (a)->comdat_type_p
26053 || a->dw_attr == DW_AT_specification)
26054 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
26055 break;
26056
26057 case dw_val_class_str:
26058 /* Set the string's refcount to 0 so that prune_unused_types_mark
26059 accounts properly for it. */
26060 a->dw_attr_val.v.val_str->refcount = 0;
26061 break;
26062
26063 default:
26064 break;
26065 }
26066 }
26067 }
26068
26069 /* Mark the generic parameters and arguments children DIEs of DIE. */
26070
26071 static void
26072 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
26073 {
26074 dw_die_ref c;
26075
26076 if (die == NULL || die->die_child == NULL)
26077 return;
26078 c = die->die_child;
26079 do
26080 {
26081 if (is_template_parameter (c))
26082 prune_unused_types_mark (c, 1);
26083 c = c->die_sib;
26084 } while (c && c != die->die_child);
26085 }
26086
26087 /* Mark DIE as being used. If DOKIDS is true, then walk down
26088 to DIE's children. */
26089
26090 static void
26091 prune_unused_types_mark (dw_die_ref die, int dokids)
26092 {
26093 dw_die_ref c;
26094
26095 if (die->die_mark == 0)
26096 {
26097 /* We haven't done this node yet. Mark it as used. */
26098 die->die_mark = 1;
26099 /* If this is the DIE of a generic type instantiation,
26100 mark the children DIEs that describe its generic parms and
26101 args. */
26102 prune_unused_types_mark_generic_parms_dies (die);
26103
26104 /* We also have to mark its parents as used.
26105 (But we don't want to mark our parent's kids due to this,
26106 unless it is a class.) */
26107 if (die->die_parent)
26108 prune_unused_types_mark (die->die_parent,
26109 class_scope_p (die->die_parent));
26110
26111 /* Mark any referenced nodes. */
26112 prune_unused_types_walk_attribs (die);
26113
26114 /* If this node is a specification,
26115 also mark the definition, if it exists. */
26116 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
26117 prune_unused_types_mark (die->die_definition, 1);
26118 }
26119
26120 if (dokids && die->die_mark != 2)
26121 {
26122 /* We need to walk the children, but haven't done so yet.
26123 Remember that we've walked the kids. */
26124 die->die_mark = 2;
26125
26126 /* If this is an array type, we need to make sure our
26127 kids get marked, even if they're types. If we're
26128 breaking out types into comdat sections, do this
26129 for all type definitions. */
26130 if (die->die_tag == DW_TAG_array_type
26131 || (use_debug_types
26132 && is_type_die (die) && ! is_declaration_die (die)))
26133 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
26134 else
26135 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
26136 }
26137 }
26138
26139 /* For local classes, look if any static member functions were emitted
26140 and if so, mark them. */
26141
26142 static void
26143 prune_unused_types_walk_local_classes (dw_die_ref die)
26144 {
26145 dw_die_ref c;
26146
26147 if (die->die_mark == 2)
26148 return;
26149
26150 switch (die->die_tag)
26151 {
26152 case DW_TAG_structure_type:
26153 case DW_TAG_union_type:
26154 case DW_TAG_class_type:
26155 break;
26156
26157 case DW_TAG_subprogram:
26158 if (!get_AT_flag (die, DW_AT_declaration)
26159 || die->die_definition != NULL)
26160 prune_unused_types_mark (die, 1);
26161 return;
26162
26163 default:
26164 return;
26165 }
26166
26167 /* Mark children. */
26168 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
26169 }
26170
26171 /* Walk the tree DIE and mark types that we actually use. */
26172
26173 static void
26174 prune_unused_types_walk (dw_die_ref die)
26175 {
26176 dw_die_ref c;
26177
26178 /* Don't do anything if this node is already marked and
26179 children have been marked as well. */
26180 if (die->die_mark == 2)
26181 return;
26182
26183 switch (die->die_tag)
26184 {
26185 case DW_TAG_structure_type:
26186 case DW_TAG_union_type:
26187 case DW_TAG_class_type:
26188 if (die->die_perennial_p)
26189 break;
26190
26191 for (c = die->die_parent; c; c = c->die_parent)
26192 if (c->die_tag == DW_TAG_subprogram)
26193 break;
26194
26195 /* Finding used static member functions inside of classes
26196 is needed just for local classes, because for other classes
26197 static member function DIEs with DW_AT_specification
26198 are emitted outside of the DW_TAG_*_type. If we ever change
26199 it, we'd need to call this even for non-local classes. */
26200 if (c)
26201 prune_unused_types_walk_local_classes (die);
26202
26203 /* It's a type node --- don't mark it. */
26204 return;
26205
26206 case DW_TAG_const_type:
26207 case DW_TAG_packed_type:
26208 case DW_TAG_pointer_type:
26209 case DW_TAG_reference_type:
26210 case DW_TAG_rvalue_reference_type:
26211 case DW_TAG_volatile_type:
26212 case DW_TAG_typedef:
26213 case DW_TAG_array_type:
26214 case DW_TAG_interface_type:
26215 case DW_TAG_friend:
26216 case DW_TAG_enumeration_type:
26217 case DW_TAG_subroutine_type:
26218 case DW_TAG_string_type:
26219 case DW_TAG_set_type:
26220 case DW_TAG_subrange_type:
26221 case DW_TAG_ptr_to_member_type:
26222 case DW_TAG_file_type:
26223 /* Type nodes are useful only when other DIEs reference them --- don't
26224 mark them. */
26225 /* FALLTHROUGH */
26226
26227 case DW_TAG_dwarf_procedure:
26228 /* Likewise for DWARF procedures. */
26229
26230 if (die->die_perennial_p)
26231 break;
26232
26233 return;
26234
26235 default:
26236 /* Mark everything else. */
26237 break;
26238 }
26239
26240 if (die->die_mark == 0)
26241 {
26242 die->die_mark = 1;
26243
26244 /* Now, mark any dies referenced from here. */
26245 prune_unused_types_walk_attribs (die);
26246 }
26247
26248 die->die_mark = 2;
26249
26250 /* Mark children. */
26251 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
26252 }
26253
26254 /* Increment the string counts on strings referred to from DIE's
26255 attributes. */
26256
26257 static void
26258 prune_unused_types_update_strings (dw_die_ref die)
26259 {
26260 dw_attr_node *a;
26261 unsigned ix;
26262
26263 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
26264 if (AT_class (a) == dw_val_class_str)
26265 {
26266 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
26267 s->refcount++;
26268 /* Avoid unnecessarily putting strings that are used less than
26269 twice in the hash table. */
26270 if (s->refcount
26271 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
26272 {
26273 indirect_string_node **slot
26274 = debug_str_hash->find_slot_with_hash (s->str,
26275 htab_hash_string (s->str),
26276 INSERT);
26277 gcc_assert (*slot == NULL);
26278 *slot = s;
26279 }
26280 }
26281 }
26282
26283 /* Mark DIE and its children as removed. */
26284
26285 static void
26286 mark_removed (dw_die_ref die)
26287 {
26288 dw_die_ref c;
26289 die->removed = true;
26290 FOR_EACH_CHILD (die, c, mark_removed (c));
26291 }
26292
26293 /* Remove from the tree DIE any dies that aren't marked. */
26294
26295 static void
26296 prune_unused_types_prune (dw_die_ref die)
26297 {
26298 dw_die_ref c;
26299
26300 gcc_assert (die->die_mark);
26301 prune_unused_types_update_strings (die);
26302
26303 if (! die->die_child)
26304 return;
26305
26306 c = die->die_child;
26307 do {
26308 dw_die_ref prev = c, next;
26309 for (c = c->die_sib; ! c->die_mark; c = next)
26310 if (c == die->die_child)
26311 {
26312 /* No marked children between 'prev' and the end of the list. */
26313 if (prev == c)
26314 /* No marked children at all. */
26315 die->die_child = NULL;
26316 else
26317 {
26318 prev->die_sib = c->die_sib;
26319 die->die_child = prev;
26320 }
26321 c->die_sib = NULL;
26322 mark_removed (c);
26323 return;
26324 }
26325 else
26326 {
26327 next = c->die_sib;
26328 c->die_sib = NULL;
26329 mark_removed (c);
26330 }
26331
26332 if (c != prev->die_sib)
26333 prev->die_sib = c;
26334 prune_unused_types_prune (c);
26335 } while (c != die->die_child);
26336 }
26337
26338 /* Remove dies representing declarations that we never use. */
26339
26340 static void
26341 prune_unused_types (void)
26342 {
26343 unsigned int i;
26344 limbo_die_node *node;
26345 comdat_type_node *ctnode;
26346 pubname_entry *pub;
26347 dw_die_ref base_type;
26348
26349 #if ENABLE_ASSERT_CHECKING
26350 /* All the marks should already be clear. */
26351 verify_marks_clear (comp_unit_die ());
26352 for (node = limbo_die_list; node; node = node->next)
26353 verify_marks_clear (node->die);
26354 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26355 verify_marks_clear (ctnode->root_die);
26356 #endif /* ENABLE_ASSERT_CHECKING */
26357
26358 /* Mark types that are used in global variables. */
26359 premark_types_used_by_global_vars ();
26360
26361 /* Set the mark on nodes that are actually used. */
26362 prune_unused_types_walk (comp_unit_die ());
26363 for (node = limbo_die_list; node; node = node->next)
26364 prune_unused_types_walk (node->die);
26365 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26366 {
26367 prune_unused_types_walk (ctnode->root_die);
26368 prune_unused_types_mark (ctnode->type_die, 1);
26369 }
26370
26371 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
26372 are unusual in that they are pubnames that are the children of pubtypes.
26373 They should only be marked via their parent DW_TAG_enumeration_type die,
26374 not as roots in themselves. */
26375 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
26376 if (pub->die->die_tag != DW_TAG_enumerator)
26377 prune_unused_types_mark (pub->die, 1);
26378 for (i = 0; base_types.iterate (i, &base_type); i++)
26379 prune_unused_types_mark (base_type, 1);
26380
26381 if (debug_str_hash)
26382 debug_str_hash->empty ();
26383 if (skeleton_debug_str_hash)
26384 skeleton_debug_str_hash->empty ();
26385 prune_unused_types_prune (comp_unit_die ());
26386 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
26387 {
26388 node = *pnode;
26389 if (!node->die->die_mark)
26390 *pnode = node->next;
26391 else
26392 {
26393 prune_unused_types_prune (node->die);
26394 pnode = &node->next;
26395 }
26396 }
26397 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26398 prune_unused_types_prune (ctnode->root_die);
26399
26400 /* Leave the marks clear. */
26401 prune_unmark_dies (comp_unit_die ());
26402 for (node = limbo_die_list; node; node = node->next)
26403 prune_unmark_dies (node->die);
26404 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26405 prune_unmark_dies (ctnode->root_die);
26406 }
26407
26408 /* Set the parameter to true if there are any relative pathnames in
26409 the file table. */
26410 int
26411 file_table_relative_p (dwarf_file_data **slot, bool *p)
26412 {
26413 struct dwarf_file_data *d = *slot;
26414 if (!IS_ABSOLUTE_PATH (d->filename))
26415 {
26416 *p = true;
26417 return 0;
26418 }
26419 return 1;
26420 }
26421
26422 /* Helpers to manipulate hash table of comdat type units. */
26423
26424 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
26425 {
26426 static inline hashval_t hash (const comdat_type_node *);
26427 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
26428 };
26429
26430 inline hashval_t
26431 comdat_type_hasher::hash (const comdat_type_node *type_node)
26432 {
26433 hashval_t h;
26434 memcpy (&h, type_node->signature, sizeof (h));
26435 return h;
26436 }
26437
26438 inline bool
26439 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
26440 const comdat_type_node *type_node_2)
26441 {
26442 return (! memcmp (type_node_1->signature, type_node_2->signature,
26443 DWARF_TYPE_SIGNATURE_SIZE));
26444 }
26445
26446 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
26447 to the location it would have been added, should we know its
26448 DECL_ASSEMBLER_NAME when we added other attributes. This will
26449 probably improve compactness of debug info, removing equivalent
26450 abbrevs, and hide any differences caused by deferring the
26451 computation of the assembler name, triggered by e.g. PCH. */
26452
26453 static inline void
26454 move_linkage_attr (dw_die_ref die)
26455 {
26456 unsigned ix = vec_safe_length (die->die_attr);
26457 dw_attr_node linkage = (*die->die_attr)[ix - 1];
26458
26459 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
26460 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
26461
26462 while (--ix > 0)
26463 {
26464 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
26465
26466 if (prev->dw_attr == DW_AT_decl_line || prev->dw_attr == DW_AT_name)
26467 break;
26468 }
26469
26470 if (ix != vec_safe_length (die->die_attr) - 1)
26471 {
26472 die->die_attr->pop ();
26473 die->die_attr->quick_insert (ix, linkage);
26474 }
26475 }
26476
26477 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
26478 referenced from typed stack ops and count how often they are used. */
26479
26480 static void
26481 mark_base_types (dw_loc_descr_ref loc)
26482 {
26483 dw_die_ref base_type = NULL;
26484
26485 for (; loc; loc = loc->dw_loc_next)
26486 {
26487 switch (loc->dw_loc_opc)
26488 {
26489 case DW_OP_GNU_regval_type:
26490 case DW_OP_GNU_deref_type:
26491 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
26492 break;
26493 case DW_OP_GNU_convert:
26494 case DW_OP_GNU_reinterpret:
26495 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
26496 continue;
26497 /* FALLTHRU */
26498 case DW_OP_GNU_const_type:
26499 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
26500 break;
26501 case DW_OP_GNU_entry_value:
26502 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
26503 continue;
26504 default:
26505 continue;
26506 }
26507 gcc_assert (base_type->die_parent == comp_unit_die ());
26508 if (base_type->die_mark)
26509 base_type->die_mark++;
26510 else
26511 {
26512 base_types.safe_push (base_type);
26513 base_type->die_mark = 1;
26514 }
26515 }
26516 }
26517
26518 /* Comparison function for sorting marked base types. */
26519
26520 static int
26521 base_type_cmp (const void *x, const void *y)
26522 {
26523 dw_die_ref dx = *(const dw_die_ref *) x;
26524 dw_die_ref dy = *(const dw_die_ref *) y;
26525 unsigned int byte_size1, byte_size2;
26526 unsigned int encoding1, encoding2;
26527 if (dx->die_mark > dy->die_mark)
26528 return -1;
26529 if (dx->die_mark < dy->die_mark)
26530 return 1;
26531 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
26532 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
26533 if (byte_size1 < byte_size2)
26534 return 1;
26535 if (byte_size1 > byte_size2)
26536 return -1;
26537 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
26538 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
26539 if (encoding1 < encoding2)
26540 return 1;
26541 if (encoding1 > encoding2)
26542 return -1;
26543 return 0;
26544 }
26545
26546 /* Move base types marked by mark_base_types as early as possible
26547 in the CU, sorted by decreasing usage count both to make the
26548 uleb128 references as small as possible and to make sure they
26549 will have die_offset already computed by calc_die_sizes when
26550 sizes of typed stack loc ops is computed. */
26551
26552 static void
26553 move_marked_base_types (void)
26554 {
26555 unsigned int i;
26556 dw_die_ref base_type, die, c;
26557
26558 if (base_types.is_empty ())
26559 return;
26560
26561 /* Sort by decreasing usage count, they will be added again in that
26562 order later on. */
26563 base_types.qsort (base_type_cmp);
26564 die = comp_unit_die ();
26565 c = die->die_child;
26566 do
26567 {
26568 dw_die_ref prev = c;
26569 c = c->die_sib;
26570 while (c->die_mark)
26571 {
26572 remove_child_with_prev (c, prev);
26573 /* As base types got marked, there must be at least
26574 one node other than DW_TAG_base_type. */
26575 gcc_assert (die->die_child != NULL);
26576 c = prev->die_sib;
26577 }
26578 }
26579 while (c != die->die_child);
26580 gcc_assert (die->die_child);
26581 c = die->die_child;
26582 for (i = 0; base_types.iterate (i, &base_type); i++)
26583 {
26584 base_type->die_mark = 0;
26585 base_type->die_sib = c->die_sib;
26586 c->die_sib = base_type;
26587 c = base_type;
26588 }
26589 }
26590
26591 /* Helper function for resolve_addr, attempt to resolve
26592 one CONST_STRING, return true if successful. Similarly verify that
26593 SYMBOL_REFs refer to variables emitted in the current CU. */
26594
26595 static bool
26596 resolve_one_addr (rtx *addr)
26597 {
26598 rtx rtl = *addr;
26599
26600 if (GET_CODE (rtl) == CONST_STRING)
26601 {
26602 size_t len = strlen (XSTR (rtl, 0)) + 1;
26603 tree t = build_string (len, XSTR (rtl, 0));
26604 tree tlen = size_int (len - 1);
26605 TREE_TYPE (t)
26606 = build_array_type (char_type_node, build_index_type (tlen));
26607 rtl = lookup_constant_def (t);
26608 if (!rtl || !MEM_P (rtl))
26609 return false;
26610 rtl = XEXP (rtl, 0);
26611 if (GET_CODE (rtl) == SYMBOL_REF
26612 && SYMBOL_REF_DECL (rtl)
26613 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
26614 return false;
26615 vec_safe_push (used_rtx_array, rtl);
26616 *addr = rtl;
26617 return true;
26618 }
26619
26620 if (GET_CODE (rtl) == SYMBOL_REF
26621 && SYMBOL_REF_DECL (rtl))
26622 {
26623 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
26624 {
26625 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
26626 return false;
26627 }
26628 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
26629 return false;
26630 }
26631
26632 if (GET_CODE (rtl) == CONST)
26633 {
26634 subrtx_ptr_iterator::array_type array;
26635 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
26636 if (!resolve_one_addr (*iter))
26637 return false;
26638 }
26639
26640 return true;
26641 }
26642
26643 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
26644 if possible, and create DW_TAG_dwarf_procedure that can be referenced
26645 from DW_OP_GNU_implicit_pointer if the string hasn't been seen yet. */
26646
26647 static rtx
26648 string_cst_pool_decl (tree t)
26649 {
26650 rtx rtl = output_constant_def (t, 1);
26651 unsigned char *array;
26652 dw_loc_descr_ref l;
26653 tree decl;
26654 size_t len;
26655 dw_die_ref ref;
26656
26657 if (!rtl || !MEM_P (rtl))
26658 return NULL_RTX;
26659 rtl = XEXP (rtl, 0);
26660 if (GET_CODE (rtl) != SYMBOL_REF
26661 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
26662 return NULL_RTX;
26663
26664 decl = SYMBOL_REF_DECL (rtl);
26665 if (!lookup_decl_die (decl))
26666 {
26667 len = TREE_STRING_LENGTH (t);
26668 vec_safe_push (used_rtx_array, rtl);
26669 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
26670 array = ggc_vec_alloc<unsigned char> (len);
26671 memcpy (array, TREE_STRING_POINTER (t), len);
26672 l = new_loc_descr (DW_OP_implicit_value, len, 0);
26673 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
26674 l->dw_loc_oprnd2.v.val_vec.length = len;
26675 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
26676 l->dw_loc_oprnd2.v.val_vec.array = array;
26677 add_AT_loc (ref, DW_AT_location, l);
26678 equate_decl_number_to_die (decl, ref);
26679 }
26680 return rtl;
26681 }
26682
26683 /* Helper function of resolve_addr_in_expr. LOC is
26684 a DW_OP_addr followed by DW_OP_stack_value, either at the start
26685 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
26686 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
26687 with DW_OP_GNU_implicit_pointer if possible
26688 and return true, if unsuccessful, return false. */
26689
26690 static bool
26691 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
26692 {
26693 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
26694 HOST_WIDE_INT offset = 0;
26695 dw_die_ref ref = NULL;
26696 tree decl;
26697
26698 if (GET_CODE (rtl) == CONST
26699 && GET_CODE (XEXP (rtl, 0)) == PLUS
26700 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
26701 {
26702 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
26703 rtl = XEXP (XEXP (rtl, 0), 0);
26704 }
26705 if (GET_CODE (rtl) == CONST_STRING)
26706 {
26707 size_t len = strlen (XSTR (rtl, 0)) + 1;
26708 tree t = build_string (len, XSTR (rtl, 0));
26709 tree tlen = size_int (len - 1);
26710
26711 TREE_TYPE (t)
26712 = build_array_type (char_type_node, build_index_type (tlen));
26713 rtl = string_cst_pool_decl (t);
26714 if (!rtl)
26715 return false;
26716 }
26717 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
26718 {
26719 decl = SYMBOL_REF_DECL (rtl);
26720 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
26721 {
26722 ref = lookup_decl_die (decl);
26723 if (ref && (get_AT (ref, DW_AT_location)
26724 || get_AT (ref, DW_AT_const_value)))
26725 {
26726 loc->dw_loc_opc = DW_OP_GNU_implicit_pointer;
26727 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26728 loc->dw_loc_oprnd1.val_entry = NULL;
26729 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
26730 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
26731 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
26732 loc->dw_loc_oprnd2.v.val_int = offset;
26733 return true;
26734 }
26735 }
26736 }
26737 return false;
26738 }
26739
26740 /* Helper function for resolve_addr, handle one location
26741 expression, return false if at least one CONST_STRING or SYMBOL_REF in
26742 the location list couldn't be resolved. */
26743
26744 static bool
26745 resolve_addr_in_expr (dw_loc_descr_ref loc)
26746 {
26747 dw_loc_descr_ref keep = NULL;
26748 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
26749 switch (loc->dw_loc_opc)
26750 {
26751 case DW_OP_addr:
26752 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
26753 {
26754 if ((prev == NULL
26755 || prev->dw_loc_opc == DW_OP_piece
26756 || prev->dw_loc_opc == DW_OP_bit_piece)
26757 && loc->dw_loc_next
26758 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
26759 && !dwarf_strict
26760 && optimize_one_addr_into_implicit_ptr (loc))
26761 break;
26762 return false;
26763 }
26764 break;
26765 case DW_OP_GNU_addr_index:
26766 case DW_OP_GNU_const_index:
26767 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
26768 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
26769 {
26770 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
26771 if (!resolve_one_addr (&rtl))
26772 return false;
26773 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
26774 loc->dw_loc_oprnd1.val_entry =
26775 add_addr_table_entry (rtl, ate_kind_rtx);
26776 }
26777 break;
26778 case DW_OP_const4u:
26779 case DW_OP_const8u:
26780 if (loc->dtprel
26781 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
26782 return false;
26783 break;
26784 case DW_OP_plus_uconst:
26785 if (size_of_loc_descr (loc)
26786 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
26787 + 1
26788 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
26789 {
26790 dw_loc_descr_ref repl
26791 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
26792 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
26793 add_loc_descr (&repl, loc->dw_loc_next);
26794 *loc = *repl;
26795 }
26796 break;
26797 case DW_OP_implicit_value:
26798 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
26799 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
26800 return false;
26801 break;
26802 case DW_OP_GNU_implicit_pointer:
26803 case DW_OP_GNU_parameter_ref:
26804 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
26805 {
26806 dw_die_ref ref
26807 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
26808 if (ref == NULL)
26809 return false;
26810 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26811 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
26812 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
26813 }
26814 break;
26815 case DW_OP_GNU_const_type:
26816 case DW_OP_GNU_regval_type:
26817 case DW_OP_GNU_deref_type:
26818 case DW_OP_GNU_convert:
26819 case DW_OP_GNU_reinterpret:
26820 while (loc->dw_loc_next
26821 && loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert)
26822 {
26823 dw_die_ref base1, base2;
26824 unsigned enc1, enc2, size1, size2;
26825 if (loc->dw_loc_opc == DW_OP_GNU_regval_type
26826 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
26827 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
26828 else if (loc->dw_loc_oprnd1.val_class
26829 == dw_val_class_unsigned_const)
26830 break;
26831 else
26832 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
26833 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
26834 == dw_val_class_unsigned_const)
26835 break;
26836 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
26837 gcc_assert (base1->die_tag == DW_TAG_base_type
26838 && base2->die_tag == DW_TAG_base_type);
26839 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
26840 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
26841 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
26842 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
26843 if (size1 == size2
26844 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
26845 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
26846 && loc != keep)
26847 || enc1 == enc2))
26848 {
26849 /* Optimize away next DW_OP_GNU_convert after
26850 adjusting LOC's base type die reference. */
26851 if (loc->dw_loc_opc == DW_OP_GNU_regval_type
26852 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
26853 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
26854 else
26855 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
26856 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
26857 continue;
26858 }
26859 /* Don't change integer DW_OP_GNU_convert after e.g. floating
26860 point typed stack entry. */
26861 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
26862 keep = loc->dw_loc_next;
26863 break;
26864 }
26865 break;
26866 default:
26867 break;
26868 }
26869 return true;
26870 }
26871
26872 /* Helper function of resolve_addr. DIE had DW_AT_location of
26873 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
26874 and DW_OP_addr couldn't be resolved. resolve_addr has already
26875 removed the DW_AT_location attribute. This function attempts to
26876 add a new DW_AT_location attribute with DW_OP_GNU_implicit_pointer
26877 to it or DW_AT_const_value attribute, if possible. */
26878
26879 static void
26880 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
26881 {
26882 if (!VAR_P (decl)
26883 || lookup_decl_die (decl) != die
26884 || DECL_EXTERNAL (decl)
26885 || !TREE_STATIC (decl)
26886 || DECL_INITIAL (decl) == NULL_TREE
26887 || DECL_P (DECL_INITIAL (decl))
26888 || get_AT (die, DW_AT_const_value))
26889 return;
26890
26891 tree init = DECL_INITIAL (decl);
26892 HOST_WIDE_INT offset = 0;
26893 /* For variables that have been optimized away and thus
26894 don't have a memory location, see if we can emit
26895 DW_AT_const_value instead. */
26896 if (tree_add_const_value_attribute (die, init))
26897 return;
26898 if (dwarf_strict)
26899 return;
26900 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
26901 and ADDR_EXPR refers to a decl that has DW_AT_location or
26902 DW_AT_const_value (but isn't addressable, otherwise
26903 resolving the original DW_OP_addr wouldn't fail), see if
26904 we can add DW_OP_GNU_implicit_pointer. */
26905 STRIP_NOPS (init);
26906 if (TREE_CODE (init) == POINTER_PLUS_EXPR
26907 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
26908 {
26909 offset = tree_to_shwi (TREE_OPERAND (init, 1));
26910 init = TREE_OPERAND (init, 0);
26911 STRIP_NOPS (init);
26912 }
26913 if (TREE_CODE (init) != ADDR_EXPR)
26914 return;
26915 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
26916 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
26917 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
26918 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
26919 && TREE_OPERAND (init, 0) != decl))
26920 {
26921 dw_die_ref ref;
26922 dw_loc_descr_ref l;
26923
26924 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
26925 {
26926 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
26927 if (!rtl)
26928 return;
26929 decl = SYMBOL_REF_DECL (rtl);
26930 }
26931 else
26932 decl = TREE_OPERAND (init, 0);
26933 ref = lookup_decl_die (decl);
26934 if (ref == NULL
26935 || (!get_AT (ref, DW_AT_location)
26936 && !get_AT (ref, DW_AT_const_value)))
26937 return;
26938 l = new_loc_descr (DW_OP_GNU_implicit_pointer, 0, offset);
26939 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26940 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
26941 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
26942 add_AT_loc (die, DW_AT_location, l);
26943 }
26944 }
26945
26946 /* Return NULL if l is a DWARF expression, or first op that is not
26947 valid DWARF expression. */
26948
26949 static dw_loc_descr_ref
26950 non_dwarf_expression (dw_loc_descr_ref l)
26951 {
26952 while (l)
26953 {
26954 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
26955 return l;
26956 switch (l->dw_loc_opc)
26957 {
26958 case DW_OP_regx:
26959 case DW_OP_implicit_value:
26960 case DW_OP_stack_value:
26961 case DW_OP_GNU_implicit_pointer:
26962 case DW_OP_GNU_parameter_ref:
26963 case DW_OP_piece:
26964 case DW_OP_bit_piece:
26965 return l;
26966 default:
26967 break;
26968 }
26969 l = l->dw_loc_next;
26970 }
26971 return NULL;
26972 }
26973
26974 /* Return adjusted copy of EXPR:
26975 If it is empty DWARF expression, return it.
26976 If it is valid non-empty DWARF expression,
26977 return copy of EXPR with copy of DEREF appended to it.
26978 If it is DWARF expression followed by DW_OP_reg{N,x}, return
26979 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended
26980 and no DEREF.
26981 If it is DWARF expression followed by DW_OP_stack_value, return
26982 copy of the DWARF expression without anything appended.
26983 Otherwise, return NULL. */
26984
26985 static dw_loc_descr_ref
26986 copy_deref_exprloc (dw_loc_descr_ref expr, dw_loc_descr_ref deref)
26987 {
26988
26989 if (expr == NULL)
26990 return NULL;
26991
26992 dw_loc_descr_ref l = non_dwarf_expression (expr);
26993 if (l && l->dw_loc_next)
26994 return NULL;
26995
26996 if (l)
26997 {
26998 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
26999 deref = new_loc_descr ((enum dwarf_location_atom)
27000 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
27001 0, 0);
27002 else
27003 switch (l->dw_loc_opc)
27004 {
27005 case DW_OP_regx:
27006 deref = new_loc_descr (DW_OP_bregx,
27007 l->dw_loc_oprnd1.v.val_unsigned, 0);
27008 break;
27009 case DW_OP_stack_value:
27010 deref = NULL;
27011 break;
27012 default:
27013 return NULL;
27014 }
27015 }
27016 else
27017 deref = new_loc_descr (deref->dw_loc_opc,
27018 deref->dw_loc_oprnd1.v.val_int, 0);
27019
27020 dw_loc_descr_ref ret = NULL, *p = &ret;
27021 while (expr != l)
27022 {
27023 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
27024 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
27025 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
27026 p = &(*p)->dw_loc_next;
27027 expr = expr->dw_loc_next;
27028 }
27029 *p = deref;
27030 return ret;
27031 }
27032
27033 /* For DW_AT_string_length attribute with DW_OP_call4 reference to a variable
27034 or argument, adjust it if needed and return:
27035 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
27036 attribute if present should be removed
27037 0 keep the attribute as is if the referenced var or argument has
27038 only DWARF expression that covers all ranges
27039 1 if the attribute has been successfully adjusted. */
27040
27041 static int
27042 optimize_string_length (dw_attr_node *a)
27043 {
27044 dw_loc_descr_ref l = AT_loc (a), lv;
27045 dw_die_ref die = l->dw_loc_oprnd1.v.val_die_ref.die;
27046 dw_attr_node *av = get_AT (die, DW_AT_location);
27047 dw_loc_list_ref d;
27048 bool non_dwarf_expr = false;
27049
27050 if (av == NULL)
27051 return -1;
27052 switch (AT_class (av))
27053 {
27054 case dw_val_class_loc_list:
27055 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
27056 if (d->expr && non_dwarf_expression (d->expr))
27057 non_dwarf_expr = true;
27058 break;
27059 case dw_val_class_loc:
27060 lv = AT_loc (av);
27061 if (lv == NULL)
27062 return -1;
27063 if (non_dwarf_expression (lv))
27064 non_dwarf_expr = true;
27065 break;
27066 default:
27067 return -1;
27068 }
27069
27070 /* If it is safe to keep DW_OP_call4 in, keep it. */
27071 if (!non_dwarf_expr
27072 && (l->dw_loc_next == NULL || AT_class (av) == dw_val_class_loc))
27073 return 0;
27074
27075 /* If not dereferencing the DW_OP_call4 afterwards, we can just
27076 copy over the DW_AT_location attribute from die to a. */
27077 if (l->dw_loc_next == NULL)
27078 {
27079 a->dw_attr_val = av->dw_attr_val;
27080 return 1;
27081 }
27082
27083 dw_loc_list_ref list, *p;
27084 switch (AT_class (av))
27085 {
27086 case dw_val_class_loc_list:
27087 p = &list;
27088 list = NULL;
27089 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
27090 {
27091 lv = copy_deref_exprloc (d->expr, l->dw_loc_next);
27092 if (lv)
27093 {
27094 *p = new_loc_list (lv, d->begin, d->end, d->section);
27095 p = &(*p)->dw_loc_next;
27096 }
27097 }
27098 if (list == NULL)
27099 return -1;
27100 a->dw_attr_val.val_class = dw_val_class_loc_list;
27101 gen_llsym (list);
27102 *AT_loc_list_ptr (a) = list;
27103 return 1;
27104 case dw_val_class_loc:
27105 lv = copy_deref_exprloc (AT_loc (av), l->dw_loc_next);
27106 if (lv == NULL)
27107 return -1;
27108 a->dw_attr_val.v.val_loc = lv;
27109 return 1;
27110 default:
27111 gcc_unreachable ();
27112 }
27113 }
27114
27115 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
27116 an address in .rodata section if the string literal is emitted there,
27117 or remove the containing location list or replace DW_AT_const_value
27118 with DW_AT_location and empty location expression, if it isn't found
27119 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
27120 to something that has been emitted in the current CU. */
27121
27122 static void
27123 resolve_addr (dw_die_ref die)
27124 {
27125 dw_die_ref c;
27126 dw_attr_node *a;
27127 dw_loc_list_ref *curr, *start, loc;
27128 unsigned ix;
27129 bool remove_AT_byte_size = false;
27130
27131 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27132 switch (AT_class (a))
27133 {
27134 case dw_val_class_loc_list:
27135 start = curr = AT_loc_list_ptr (a);
27136 loc = *curr;
27137 gcc_assert (loc);
27138 /* The same list can be referenced more than once. See if we have
27139 already recorded the result from a previous pass. */
27140 if (loc->replaced)
27141 *curr = loc->dw_loc_next;
27142 else if (!loc->resolved_addr)
27143 {
27144 /* As things stand, we do not expect or allow one die to
27145 reference a suffix of another die's location list chain.
27146 References must be identical or completely separate.
27147 There is therefore no need to cache the result of this
27148 pass on any list other than the first; doing so
27149 would lead to unnecessary writes. */
27150 while (*curr)
27151 {
27152 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
27153 if (!resolve_addr_in_expr ((*curr)->expr))
27154 {
27155 dw_loc_list_ref next = (*curr)->dw_loc_next;
27156 dw_loc_descr_ref l = (*curr)->expr;
27157
27158 if (next && (*curr)->ll_symbol)
27159 {
27160 gcc_assert (!next->ll_symbol);
27161 next->ll_symbol = (*curr)->ll_symbol;
27162 }
27163 if (dwarf_split_debug_info)
27164 remove_loc_list_addr_table_entries (l);
27165 *curr = next;
27166 }
27167 else
27168 {
27169 mark_base_types ((*curr)->expr);
27170 curr = &(*curr)->dw_loc_next;
27171 }
27172 }
27173 if (loc == *start)
27174 loc->resolved_addr = 1;
27175 else
27176 {
27177 loc->replaced = 1;
27178 loc->dw_loc_next = *start;
27179 }
27180 }
27181 if (!*start)
27182 {
27183 remove_AT (die, a->dw_attr);
27184 ix--;
27185 }
27186 break;
27187 case dw_val_class_loc:
27188 {
27189 dw_loc_descr_ref l = AT_loc (a);
27190 /* Using DW_OP_call4 or DW_OP_call4 DW_OP_deref in
27191 DW_AT_string_length is only a rough approximation; unfortunately
27192 DW_AT_string_length can't be a reference to a DIE. DW_OP_call4
27193 needs a DWARF expression, while DW_AT_location of the referenced
27194 variable or argument might be any location description. */
27195 if (a->dw_attr == DW_AT_string_length
27196 && l
27197 && l->dw_loc_opc == DW_OP_call4
27198 && l->dw_loc_oprnd1.val_class == dw_val_class_die_ref
27199 && (l->dw_loc_next == NULL
27200 || (l->dw_loc_next->dw_loc_next == NULL
27201 && (l->dw_loc_next->dw_loc_opc == DW_OP_deref
27202 || l->dw_loc_next->dw_loc_opc != DW_OP_deref_size))))
27203 {
27204 switch (optimize_string_length (a))
27205 {
27206 case -1:
27207 remove_AT (die, a->dw_attr);
27208 ix--;
27209 /* If we drop DW_AT_string_length, we need to drop also
27210 DW_AT_{string_length_,}byte_size. */
27211 remove_AT_byte_size = true;
27212 continue;
27213 default:
27214 break;
27215 case 1:
27216 /* Even if we keep the optimized DW_AT_string_length,
27217 it might have changed AT_class, so process it again. */
27218 ix--;
27219 continue;
27220 }
27221 }
27222 /* For -gdwarf-2 don't attempt to optimize
27223 DW_AT_data_member_location containing
27224 DW_OP_plus_uconst - older consumers might
27225 rely on it being that op instead of a more complex,
27226 but shorter, location description. */
27227 if ((dwarf_version > 2
27228 || a->dw_attr != DW_AT_data_member_location
27229 || l == NULL
27230 || l->dw_loc_opc != DW_OP_plus_uconst
27231 || l->dw_loc_next != NULL)
27232 && !resolve_addr_in_expr (l))
27233 {
27234 if (dwarf_split_debug_info)
27235 remove_loc_list_addr_table_entries (l);
27236 if (l != NULL
27237 && l->dw_loc_next == NULL
27238 && l->dw_loc_opc == DW_OP_addr
27239 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
27240 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
27241 && a->dw_attr == DW_AT_location)
27242 {
27243 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
27244 remove_AT (die, a->dw_attr);
27245 ix--;
27246 optimize_location_into_implicit_ptr (die, decl);
27247 break;
27248 }
27249 remove_AT (die, a->dw_attr);
27250 ix--;
27251 }
27252 else
27253 mark_base_types (l);
27254 }
27255 break;
27256 case dw_val_class_addr:
27257 if (a->dw_attr == DW_AT_const_value
27258 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
27259 {
27260 if (AT_index (a) != NOT_INDEXED)
27261 remove_addr_table_entry (a->dw_attr_val.val_entry);
27262 remove_AT (die, a->dw_attr);
27263 ix--;
27264 }
27265 if (die->die_tag == DW_TAG_GNU_call_site
27266 && a->dw_attr == DW_AT_abstract_origin)
27267 {
27268 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
27269 dw_die_ref tdie = lookup_decl_die (tdecl);
27270 dw_die_ref cdie;
27271 if (tdie == NULL
27272 && DECL_EXTERNAL (tdecl)
27273 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
27274 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
27275 {
27276 /* Creating a full DIE for tdecl is overly expensive and
27277 at this point even wrong when in the LTO phase
27278 as it can end up generating new type DIEs we didn't
27279 output and thus optimize_external_refs will crash. */
27280 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
27281 add_AT_flag (tdie, DW_AT_external, 1);
27282 add_AT_flag (tdie, DW_AT_declaration, 1);
27283 add_linkage_attr (tdie, tdecl);
27284 add_name_and_src_coords_attributes (tdie, tdecl);
27285 equate_decl_number_to_die (tdecl, tdie);
27286 }
27287 if (tdie)
27288 {
27289 a->dw_attr_val.val_class = dw_val_class_die_ref;
27290 a->dw_attr_val.v.val_die_ref.die = tdie;
27291 a->dw_attr_val.v.val_die_ref.external = 0;
27292 }
27293 else
27294 {
27295 if (AT_index (a) != NOT_INDEXED)
27296 remove_addr_table_entry (a->dw_attr_val.val_entry);
27297 remove_AT (die, a->dw_attr);
27298 ix--;
27299 }
27300 }
27301 break;
27302 default:
27303 break;
27304 }
27305
27306 if (remove_AT_byte_size)
27307 remove_AT (die, dwarf_version >= 5
27308 ? DW_AT_string_length_byte_size
27309 : DW_AT_byte_size);
27310
27311 FOR_EACH_CHILD (die, c, resolve_addr (c));
27312 }
27313 \f
27314 /* Helper routines for optimize_location_lists.
27315 This pass tries to share identical local lists in .debug_loc
27316 section. */
27317
27318 /* Iteratively hash operands of LOC opcode into HSTATE. */
27319
27320 static void
27321 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
27322 {
27323 dw_val_ref val1 = &loc->dw_loc_oprnd1;
27324 dw_val_ref val2 = &loc->dw_loc_oprnd2;
27325
27326 switch (loc->dw_loc_opc)
27327 {
27328 case DW_OP_const4u:
27329 case DW_OP_const8u:
27330 if (loc->dtprel)
27331 goto hash_addr;
27332 /* FALLTHRU */
27333 case DW_OP_const1u:
27334 case DW_OP_const1s:
27335 case DW_OP_const2u:
27336 case DW_OP_const2s:
27337 case DW_OP_const4s:
27338 case DW_OP_const8s:
27339 case DW_OP_constu:
27340 case DW_OP_consts:
27341 case DW_OP_pick:
27342 case DW_OP_plus_uconst:
27343 case DW_OP_breg0:
27344 case DW_OP_breg1:
27345 case DW_OP_breg2:
27346 case DW_OP_breg3:
27347 case DW_OP_breg4:
27348 case DW_OP_breg5:
27349 case DW_OP_breg6:
27350 case DW_OP_breg7:
27351 case DW_OP_breg8:
27352 case DW_OP_breg9:
27353 case DW_OP_breg10:
27354 case DW_OP_breg11:
27355 case DW_OP_breg12:
27356 case DW_OP_breg13:
27357 case DW_OP_breg14:
27358 case DW_OP_breg15:
27359 case DW_OP_breg16:
27360 case DW_OP_breg17:
27361 case DW_OP_breg18:
27362 case DW_OP_breg19:
27363 case DW_OP_breg20:
27364 case DW_OP_breg21:
27365 case DW_OP_breg22:
27366 case DW_OP_breg23:
27367 case DW_OP_breg24:
27368 case DW_OP_breg25:
27369 case DW_OP_breg26:
27370 case DW_OP_breg27:
27371 case DW_OP_breg28:
27372 case DW_OP_breg29:
27373 case DW_OP_breg30:
27374 case DW_OP_breg31:
27375 case DW_OP_regx:
27376 case DW_OP_fbreg:
27377 case DW_OP_piece:
27378 case DW_OP_deref_size:
27379 case DW_OP_xderef_size:
27380 hstate.add_object (val1->v.val_int);
27381 break;
27382 case DW_OP_skip:
27383 case DW_OP_bra:
27384 {
27385 int offset;
27386
27387 gcc_assert (val1->val_class == dw_val_class_loc);
27388 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
27389 hstate.add_object (offset);
27390 }
27391 break;
27392 case DW_OP_implicit_value:
27393 hstate.add_object (val1->v.val_unsigned);
27394 switch (val2->val_class)
27395 {
27396 case dw_val_class_const:
27397 hstate.add_object (val2->v.val_int);
27398 break;
27399 case dw_val_class_vec:
27400 {
27401 unsigned int elt_size = val2->v.val_vec.elt_size;
27402 unsigned int len = val2->v.val_vec.length;
27403
27404 hstate.add_int (elt_size);
27405 hstate.add_int (len);
27406 hstate.add (val2->v.val_vec.array, len * elt_size);
27407 }
27408 break;
27409 case dw_val_class_const_double:
27410 hstate.add_object (val2->v.val_double.low);
27411 hstate.add_object (val2->v.val_double.high);
27412 break;
27413 case dw_val_class_wide_int:
27414 hstate.add (val2->v.val_wide->get_val (),
27415 get_full_len (*val2->v.val_wide)
27416 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
27417 break;
27418 case dw_val_class_addr:
27419 inchash::add_rtx (val2->v.val_addr, hstate);
27420 break;
27421 default:
27422 gcc_unreachable ();
27423 }
27424 break;
27425 case DW_OP_bregx:
27426 case DW_OP_bit_piece:
27427 hstate.add_object (val1->v.val_int);
27428 hstate.add_object (val2->v.val_int);
27429 break;
27430 case DW_OP_addr:
27431 hash_addr:
27432 if (loc->dtprel)
27433 {
27434 unsigned char dtprel = 0xd1;
27435 hstate.add_object (dtprel);
27436 }
27437 inchash::add_rtx (val1->v.val_addr, hstate);
27438 break;
27439 case DW_OP_GNU_addr_index:
27440 case DW_OP_GNU_const_index:
27441 {
27442 if (loc->dtprel)
27443 {
27444 unsigned char dtprel = 0xd1;
27445 hstate.add_object (dtprel);
27446 }
27447 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
27448 }
27449 break;
27450 case DW_OP_GNU_implicit_pointer:
27451 hstate.add_int (val2->v.val_int);
27452 break;
27453 case DW_OP_GNU_entry_value:
27454 hstate.add_object (val1->v.val_loc);
27455 break;
27456 case DW_OP_GNU_regval_type:
27457 case DW_OP_GNU_deref_type:
27458 {
27459 unsigned int byte_size
27460 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
27461 unsigned int encoding
27462 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
27463 hstate.add_object (val1->v.val_int);
27464 hstate.add_object (byte_size);
27465 hstate.add_object (encoding);
27466 }
27467 break;
27468 case DW_OP_GNU_convert:
27469 case DW_OP_GNU_reinterpret:
27470 if (val1->val_class == dw_val_class_unsigned_const)
27471 {
27472 hstate.add_object (val1->v.val_unsigned);
27473 break;
27474 }
27475 /* FALLTHRU */
27476 case DW_OP_GNU_const_type:
27477 {
27478 unsigned int byte_size
27479 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
27480 unsigned int encoding
27481 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
27482 hstate.add_object (byte_size);
27483 hstate.add_object (encoding);
27484 if (loc->dw_loc_opc != DW_OP_GNU_const_type)
27485 break;
27486 hstate.add_object (val2->val_class);
27487 switch (val2->val_class)
27488 {
27489 case dw_val_class_const:
27490 hstate.add_object (val2->v.val_int);
27491 break;
27492 case dw_val_class_vec:
27493 {
27494 unsigned int elt_size = val2->v.val_vec.elt_size;
27495 unsigned int len = val2->v.val_vec.length;
27496
27497 hstate.add_object (elt_size);
27498 hstate.add_object (len);
27499 hstate.add (val2->v.val_vec.array, len * elt_size);
27500 }
27501 break;
27502 case dw_val_class_const_double:
27503 hstate.add_object (val2->v.val_double.low);
27504 hstate.add_object (val2->v.val_double.high);
27505 break;
27506 case dw_val_class_wide_int:
27507 hstate.add (val2->v.val_wide->get_val (),
27508 get_full_len (*val2->v.val_wide)
27509 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
27510 break;
27511 default:
27512 gcc_unreachable ();
27513 }
27514 }
27515 break;
27516
27517 default:
27518 /* Other codes have no operands. */
27519 break;
27520 }
27521 }
27522
27523 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
27524
27525 static inline void
27526 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
27527 {
27528 dw_loc_descr_ref l;
27529 bool sizes_computed = false;
27530 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
27531 size_of_locs (loc);
27532
27533 for (l = loc; l != NULL; l = l->dw_loc_next)
27534 {
27535 enum dwarf_location_atom opc = l->dw_loc_opc;
27536 hstate.add_object (opc);
27537 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
27538 {
27539 size_of_locs (loc);
27540 sizes_computed = true;
27541 }
27542 hash_loc_operands (l, hstate);
27543 }
27544 }
27545
27546 /* Compute hash of the whole location list LIST_HEAD. */
27547
27548 static inline void
27549 hash_loc_list (dw_loc_list_ref list_head)
27550 {
27551 dw_loc_list_ref curr = list_head;
27552 inchash::hash hstate;
27553
27554 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
27555 {
27556 hstate.add (curr->begin, strlen (curr->begin) + 1);
27557 hstate.add (curr->end, strlen (curr->end) + 1);
27558 if (curr->section)
27559 hstate.add (curr->section, strlen (curr->section) + 1);
27560 hash_locs (curr->expr, hstate);
27561 }
27562 list_head->hash = hstate.end ();
27563 }
27564
27565 /* Return true if X and Y opcodes have the same operands. */
27566
27567 static inline bool
27568 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
27569 {
27570 dw_val_ref valx1 = &x->dw_loc_oprnd1;
27571 dw_val_ref valx2 = &x->dw_loc_oprnd2;
27572 dw_val_ref valy1 = &y->dw_loc_oprnd1;
27573 dw_val_ref valy2 = &y->dw_loc_oprnd2;
27574
27575 switch (x->dw_loc_opc)
27576 {
27577 case DW_OP_const4u:
27578 case DW_OP_const8u:
27579 if (x->dtprel)
27580 goto hash_addr;
27581 /* FALLTHRU */
27582 case DW_OP_const1u:
27583 case DW_OP_const1s:
27584 case DW_OP_const2u:
27585 case DW_OP_const2s:
27586 case DW_OP_const4s:
27587 case DW_OP_const8s:
27588 case DW_OP_constu:
27589 case DW_OP_consts:
27590 case DW_OP_pick:
27591 case DW_OP_plus_uconst:
27592 case DW_OP_breg0:
27593 case DW_OP_breg1:
27594 case DW_OP_breg2:
27595 case DW_OP_breg3:
27596 case DW_OP_breg4:
27597 case DW_OP_breg5:
27598 case DW_OP_breg6:
27599 case DW_OP_breg7:
27600 case DW_OP_breg8:
27601 case DW_OP_breg9:
27602 case DW_OP_breg10:
27603 case DW_OP_breg11:
27604 case DW_OP_breg12:
27605 case DW_OP_breg13:
27606 case DW_OP_breg14:
27607 case DW_OP_breg15:
27608 case DW_OP_breg16:
27609 case DW_OP_breg17:
27610 case DW_OP_breg18:
27611 case DW_OP_breg19:
27612 case DW_OP_breg20:
27613 case DW_OP_breg21:
27614 case DW_OP_breg22:
27615 case DW_OP_breg23:
27616 case DW_OP_breg24:
27617 case DW_OP_breg25:
27618 case DW_OP_breg26:
27619 case DW_OP_breg27:
27620 case DW_OP_breg28:
27621 case DW_OP_breg29:
27622 case DW_OP_breg30:
27623 case DW_OP_breg31:
27624 case DW_OP_regx:
27625 case DW_OP_fbreg:
27626 case DW_OP_piece:
27627 case DW_OP_deref_size:
27628 case DW_OP_xderef_size:
27629 return valx1->v.val_int == valy1->v.val_int;
27630 case DW_OP_skip:
27631 case DW_OP_bra:
27632 /* If splitting debug info, the use of DW_OP_GNU_addr_index
27633 can cause irrelevant differences in dw_loc_addr. */
27634 gcc_assert (valx1->val_class == dw_val_class_loc
27635 && valy1->val_class == dw_val_class_loc
27636 && (dwarf_split_debug_info
27637 || x->dw_loc_addr == y->dw_loc_addr));
27638 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
27639 case DW_OP_implicit_value:
27640 if (valx1->v.val_unsigned != valy1->v.val_unsigned
27641 || valx2->val_class != valy2->val_class)
27642 return false;
27643 switch (valx2->val_class)
27644 {
27645 case dw_val_class_const:
27646 return valx2->v.val_int == valy2->v.val_int;
27647 case dw_val_class_vec:
27648 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
27649 && valx2->v.val_vec.length == valy2->v.val_vec.length
27650 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
27651 valx2->v.val_vec.elt_size
27652 * valx2->v.val_vec.length) == 0;
27653 case dw_val_class_const_double:
27654 return valx2->v.val_double.low == valy2->v.val_double.low
27655 && valx2->v.val_double.high == valy2->v.val_double.high;
27656 case dw_val_class_wide_int:
27657 return *valx2->v.val_wide == *valy2->v.val_wide;
27658 case dw_val_class_addr:
27659 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
27660 default:
27661 gcc_unreachable ();
27662 }
27663 case DW_OP_bregx:
27664 case DW_OP_bit_piece:
27665 return valx1->v.val_int == valy1->v.val_int
27666 && valx2->v.val_int == valy2->v.val_int;
27667 case DW_OP_addr:
27668 hash_addr:
27669 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
27670 case DW_OP_GNU_addr_index:
27671 case DW_OP_GNU_const_index:
27672 {
27673 rtx ax1 = valx1->val_entry->addr.rtl;
27674 rtx ay1 = valy1->val_entry->addr.rtl;
27675 return rtx_equal_p (ax1, ay1);
27676 }
27677 case DW_OP_GNU_implicit_pointer:
27678 return valx1->val_class == dw_val_class_die_ref
27679 && valx1->val_class == valy1->val_class
27680 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
27681 && valx2->v.val_int == valy2->v.val_int;
27682 case DW_OP_GNU_entry_value:
27683 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
27684 case DW_OP_GNU_const_type:
27685 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
27686 || valx2->val_class != valy2->val_class)
27687 return false;
27688 switch (valx2->val_class)
27689 {
27690 case dw_val_class_const:
27691 return valx2->v.val_int == valy2->v.val_int;
27692 case dw_val_class_vec:
27693 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
27694 && valx2->v.val_vec.length == valy2->v.val_vec.length
27695 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
27696 valx2->v.val_vec.elt_size
27697 * valx2->v.val_vec.length) == 0;
27698 case dw_val_class_const_double:
27699 return valx2->v.val_double.low == valy2->v.val_double.low
27700 && valx2->v.val_double.high == valy2->v.val_double.high;
27701 case dw_val_class_wide_int:
27702 return *valx2->v.val_wide == *valy2->v.val_wide;
27703 default:
27704 gcc_unreachable ();
27705 }
27706 case DW_OP_GNU_regval_type:
27707 case DW_OP_GNU_deref_type:
27708 return valx1->v.val_int == valy1->v.val_int
27709 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
27710 case DW_OP_GNU_convert:
27711 case DW_OP_GNU_reinterpret:
27712 if (valx1->val_class != valy1->val_class)
27713 return false;
27714 if (valx1->val_class == dw_val_class_unsigned_const)
27715 return valx1->v.val_unsigned == valy1->v.val_unsigned;
27716 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
27717 case DW_OP_GNU_parameter_ref:
27718 return valx1->val_class == dw_val_class_die_ref
27719 && valx1->val_class == valy1->val_class
27720 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
27721 default:
27722 /* Other codes have no operands. */
27723 return true;
27724 }
27725 }
27726
27727 /* Return true if DWARF location expressions X and Y are the same. */
27728
27729 static inline bool
27730 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
27731 {
27732 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
27733 if (x->dw_loc_opc != y->dw_loc_opc
27734 || x->dtprel != y->dtprel
27735 || !compare_loc_operands (x, y))
27736 break;
27737 return x == NULL && y == NULL;
27738 }
27739
27740 /* Hashtable helpers. */
27741
27742 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
27743 {
27744 static inline hashval_t hash (const dw_loc_list_struct *);
27745 static inline bool equal (const dw_loc_list_struct *,
27746 const dw_loc_list_struct *);
27747 };
27748
27749 /* Return precomputed hash of location list X. */
27750
27751 inline hashval_t
27752 loc_list_hasher::hash (const dw_loc_list_struct *x)
27753 {
27754 return x->hash;
27755 }
27756
27757 /* Return true if location lists A and B are the same. */
27758
27759 inline bool
27760 loc_list_hasher::equal (const dw_loc_list_struct *a,
27761 const dw_loc_list_struct *b)
27762 {
27763 if (a == b)
27764 return 1;
27765 if (a->hash != b->hash)
27766 return 0;
27767 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
27768 if (strcmp (a->begin, b->begin) != 0
27769 || strcmp (a->end, b->end) != 0
27770 || (a->section == NULL) != (b->section == NULL)
27771 || (a->section && strcmp (a->section, b->section) != 0)
27772 || !compare_locs (a->expr, b->expr))
27773 break;
27774 return a == NULL && b == NULL;
27775 }
27776
27777 typedef hash_table<loc_list_hasher> loc_list_hash_type;
27778
27779
27780 /* Recursively optimize location lists referenced from DIE
27781 children and share them whenever possible. */
27782
27783 static void
27784 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
27785 {
27786 dw_die_ref c;
27787 dw_attr_node *a;
27788 unsigned ix;
27789 dw_loc_list_struct **slot;
27790
27791 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27792 if (AT_class (a) == dw_val_class_loc_list)
27793 {
27794 dw_loc_list_ref list = AT_loc_list (a);
27795 /* TODO: perform some optimizations here, before hashing
27796 it and storing into the hash table. */
27797 hash_loc_list (list);
27798 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
27799 if (*slot == NULL)
27800 *slot = list;
27801 else
27802 a->dw_attr_val.v.val_loc_list = *slot;
27803 }
27804
27805 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
27806 }
27807
27808
27809 /* Recursively assign each location list a unique index into the debug_addr
27810 section. */
27811
27812 static void
27813 index_location_lists (dw_die_ref die)
27814 {
27815 dw_die_ref c;
27816 dw_attr_node *a;
27817 unsigned ix;
27818
27819 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27820 if (AT_class (a) == dw_val_class_loc_list)
27821 {
27822 dw_loc_list_ref list = AT_loc_list (a);
27823 dw_loc_list_ref curr;
27824 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
27825 {
27826 /* Don't index an entry that has already been indexed
27827 or won't be output. */
27828 if (curr->begin_entry != NULL
27829 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
27830 continue;
27831
27832 curr->begin_entry
27833 = add_addr_table_entry (xstrdup (curr->begin),
27834 ate_kind_label);
27835 }
27836 }
27837
27838 FOR_EACH_CHILD (die, c, index_location_lists (c));
27839 }
27840
27841 /* Optimize location lists referenced from DIE
27842 children and share them whenever possible. */
27843
27844 static void
27845 optimize_location_lists (dw_die_ref die)
27846 {
27847 loc_list_hash_type htab (500);
27848 optimize_location_lists_1 (die, &htab);
27849 }
27850 \f
27851 /* Traverse the limbo die list, and add parent/child links. The only
27852 dies without parents that should be here are concrete instances of
27853 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
27854 For concrete instances, we can get the parent die from the abstract
27855 instance. */
27856
27857 static void
27858 flush_limbo_die_list (void)
27859 {
27860 limbo_die_node *node;
27861
27862 /* get_context_die calls force_decl_die, which can put new DIEs on the
27863 limbo list in LTO mode when nested functions are put in a different
27864 partition than that of their parent function. */
27865 while ((node = limbo_die_list))
27866 {
27867 dw_die_ref die = node->die;
27868 limbo_die_list = node->next;
27869
27870 if (die->die_parent == NULL)
27871 {
27872 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
27873
27874 if (origin && origin->die_parent)
27875 add_child_die (origin->die_parent, die);
27876 else if (is_cu_die (die))
27877 ;
27878 else if (seen_error ())
27879 /* It's OK to be confused by errors in the input. */
27880 add_child_die (comp_unit_die (), die);
27881 else
27882 {
27883 /* In certain situations, the lexical block containing a
27884 nested function can be optimized away, which results
27885 in the nested function die being orphaned. Likewise
27886 with the return type of that nested function. Force
27887 this to be a child of the containing function.
27888
27889 It may happen that even the containing function got fully
27890 inlined and optimized out. In that case we are lost and
27891 assign the empty child. This should not be big issue as
27892 the function is likely unreachable too. */
27893 gcc_assert (node->created_for);
27894
27895 if (DECL_P (node->created_for))
27896 origin = get_context_die (DECL_CONTEXT (node->created_for));
27897 else if (TYPE_P (node->created_for))
27898 origin = scope_die_for (node->created_for, comp_unit_die ());
27899 else
27900 origin = comp_unit_die ();
27901
27902 add_child_die (origin, die);
27903 }
27904 }
27905 }
27906 }
27907
27908 /* Output stuff that dwarf requires at the end of every file,
27909 and generate the DWARF-2 debugging info. */
27910
27911 static void
27912 dwarf2out_finish (const char *)
27913 {
27914 comdat_type_node *ctnode;
27915 dw_die_ref main_comp_unit_die;
27916
27917 /* Flush out any latecomers to the limbo party. */
27918 flush_limbo_die_list ();
27919
27920 if (flag_checking)
27921 {
27922 verify_die (comp_unit_die ());
27923 for (limbo_die_node *node = cu_die_list; node; node = node->next)
27924 verify_die (node->die);
27925 }
27926
27927 /* We shouldn't have any symbols with delayed asm names for
27928 DIEs generated after early finish. */
27929 gcc_assert (deferred_asm_name == NULL);
27930
27931 gen_remaining_tmpl_value_param_die_attribute ();
27932
27933 #if ENABLE_ASSERT_CHECKING
27934 {
27935 dw_die_ref die = comp_unit_die (), c;
27936 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
27937 }
27938 #endif
27939 resolve_addr (comp_unit_die ());
27940 move_marked_base_types ();
27941
27942 /* Initialize sections and labels used for actual assembler output. */
27943 init_sections_and_labels ();
27944
27945 /* Traverse the DIE's and add sibling attributes to those DIE's that
27946 have children. */
27947 add_sibling_attributes (comp_unit_die ());
27948 limbo_die_node *node;
27949 for (node = cu_die_list; node; node = node->next)
27950 add_sibling_attributes (node->die);
27951 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27952 add_sibling_attributes (ctnode->root_die);
27953
27954 /* When splitting DWARF info, we put some attributes in the
27955 skeleton compile_unit DIE that remains in the .o, while
27956 most attributes go in the DWO compile_unit_die. */
27957 if (dwarf_split_debug_info)
27958 {
27959 limbo_die_node *cu;
27960 main_comp_unit_die = gen_compile_unit_die (NULL);
27961 cu = limbo_die_list;
27962 gcc_assert (cu->die == main_comp_unit_die);
27963 limbo_die_list = limbo_die_list->next;
27964 cu->next = cu_die_list;
27965 cu_die_list = cu;
27966 }
27967 else
27968 main_comp_unit_die = comp_unit_die ();
27969
27970 /* Output a terminator label for the .text section. */
27971 switch_to_section (text_section);
27972 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
27973 if (cold_text_section)
27974 {
27975 switch_to_section (cold_text_section);
27976 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
27977 }
27978
27979 /* We can only use the low/high_pc attributes if all of the code was
27980 in .text. */
27981 if (!have_multiple_function_sections
27982 || (dwarf_version < 3 && dwarf_strict))
27983 {
27984 /* Don't add if the CU has no associated code. */
27985 if (text_section_used)
27986 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
27987 text_end_label, true);
27988 }
27989 else
27990 {
27991 unsigned fde_idx;
27992 dw_fde_ref fde;
27993 bool range_list_added = false;
27994
27995 if (text_section_used)
27996 add_ranges_by_labels (main_comp_unit_die, text_section_label,
27997 text_end_label, &range_list_added, true);
27998 if (cold_text_section_used)
27999 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
28000 cold_end_label, &range_list_added, true);
28001
28002 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
28003 {
28004 if (DECL_IGNORED_P (fde->decl))
28005 continue;
28006 if (!fde->in_std_section)
28007 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
28008 fde->dw_fde_end, &range_list_added,
28009 true);
28010 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
28011 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
28012 fde->dw_fde_second_end, &range_list_added,
28013 true);
28014 }
28015
28016 if (range_list_added)
28017 {
28018 /* We need to give .debug_loc and .debug_ranges an appropriate
28019 "base address". Use zero so that these addresses become
28020 absolute. Historically, we've emitted the unexpected
28021 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
28022 Emit both to give time for other tools to adapt. */
28023 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
28024 if (! dwarf_strict && dwarf_version < 4)
28025 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
28026
28027 add_ranges (NULL);
28028 }
28029 }
28030
28031 if (debug_info_level >= DINFO_LEVEL_TERSE)
28032 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
28033 debug_line_section_label);
28034
28035 if (have_macinfo)
28036 add_AT_macptr (comp_unit_die (),
28037 dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros,
28038 macinfo_section_label);
28039
28040 if (dwarf_split_debug_info)
28041 {
28042 /* optimize_location_lists calculates the size of the lists,
28043 so index them first, and assign indices to the entries.
28044 Although optimize_location_lists will remove entries from
28045 the table, it only does so for duplicates, and therefore
28046 only reduces ref_counts to 1. */
28047 index_location_lists (comp_unit_die ());
28048
28049 if (addr_index_table != NULL)
28050 {
28051 unsigned int index = 0;
28052 addr_index_table
28053 ->traverse_noresize<unsigned int *, index_addr_table_entry>
28054 (&index);
28055 }
28056 }
28057
28058 if (have_location_lists)
28059 optimize_location_lists (comp_unit_die ());
28060
28061 save_macinfo_strings ();
28062
28063 if (dwarf_split_debug_info)
28064 {
28065 unsigned int index = 0;
28066
28067 /* Add attributes common to skeleton compile_units and
28068 type_units. Because these attributes include strings, it
28069 must be done before freezing the string table. Top-level
28070 skeleton die attrs are added when the skeleton type unit is
28071 created, so ensure it is created by this point. */
28072 add_top_level_skeleton_die_attrs (main_comp_unit_die);
28073 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
28074 }
28075
28076 /* Output all of the compilation units. We put the main one last so that
28077 the offsets are available to output_pubnames. */
28078 for (node = cu_die_list; node; node = node->next)
28079 output_comp_unit (node->die, 0);
28080
28081 hash_table<comdat_type_hasher> comdat_type_table (100);
28082 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
28083 {
28084 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
28085
28086 /* Don't output duplicate types. */
28087 if (*slot != HTAB_EMPTY_ENTRY)
28088 continue;
28089
28090 /* Add a pointer to the line table for the main compilation unit
28091 so that the debugger can make sense of DW_AT_decl_file
28092 attributes. */
28093 if (debug_info_level >= DINFO_LEVEL_TERSE)
28094 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
28095 (!dwarf_split_debug_info
28096 ? debug_line_section_label
28097 : debug_skeleton_line_section_label));
28098
28099 output_comdat_type_unit (ctnode);
28100 *slot = ctnode;
28101 }
28102
28103 /* The AT_pubnames attribute needs to go in all skeleton dies, including
28104 both the main_cu and all skeleton TUs. Making this call unconditional
28105 would end up either adding a second copy of the AT_pubnames attribute, or
28106 requiring a special case in add_top_level_skeleton_die_attrs. */
28107 if (!dwarf_split_debug_info)
28108 add_AT_pubnames (comp_unit_die ());
28109
28110 if (dwarf_split_debug_info)
28111 {
28112 int mark;
28113 unsigned char checksum[16];
28114 struct md5_ctx ctx;
28115
28116 /* Compute a checksum of the comp_unit to use as the dwo_id. */
28117 md5_init_ctx (&ctx);
28118 mark = 0;
28119 die_checksum (comp_unit_die (), &ctx, &mark);
28120 unmark_all_dies (comp_unit_die ());
28121 md5_finish_ctx (&ctx, checksum);
28122
28123 /* Use the first 8 bytes of the checksum as the dwo_id,
28124 and add it to both comp-unit DIEs. */
28125 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
28126 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
28127
28128 /* Add the base offset of the ranges table to the skeleton
28129 comp-unit DIE. */
28130 if (ranges_table_in_use)
28131 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
28132 ranges_section_label);
28133
28134 switch_to_section (debug_addr_section);
28135 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
28136 output_addr_table ();
28137 }
28138
28139 /* Output the main compilation unit if non-empty or if .debug_macinfo
28140 or .debug_macro will be emitted. */
28141 output_comp_unit (comp_unit_die (), have_macinfo);
28142
28143 if (dwarf_split_debug_info && info_section_emitted)
28144 output_skeleton_debug_sections (main_comp_unit_die);
28145
28146 /* Output the abbreviation table. */
28147 if (abbrev_die_table_in_use != 1)
28148 {
28149 switch_to_section (debug_abbrev_section);
28150 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
28151 output_abbrev_section ();
28152 }
28153
28154 /* Output location list section if necessary. */
28155 if (have_location_lists)
28156 {
28157 /* Output the location lists info. */
28158 switch_to_section (debug_loc_section);
28159 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
28160 output_location_lists (comp_unit_die ());
28161 }
28162
28163 output_pubtables ();
28164
28165 /* Output the address range information if a CU (.debug_info section)
28166 was emitted. We output an empty table even if we had no functions
28167 to put in it. This because the consumer has no way to tell the
28168 difference between an empty table that we omitted and failure to
28169 generate a table that would have contained data. */
28170 if (info_section_emitted)
28171 {
28172 switch_to_section (debug_aranges_section);
28173 output_aranges ();
28174 }
28175
28176 /* Output ranges section if necessary. */
28177 if (ranges_table_in_use)
28178 {
28179 switch_to_section (debug_ranges_section);
28180 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
28181 output_ranges ();
28182 }
28183
28184 /* Have to end the macro section. */
28185 if (have_macinfo)
28186 {
28187 switch_to_section (debug_macinfo_section);
28188 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
28189 output_macinfo ();
28190 dw2_asm_output_data (1, 0, "End compilation unit");
28191 }
28192
28193 /* Output the source line correspondence table. We must do this
28194 even if there is no line information. Otherwise, on an empty
28195 translation unit, we will generate a present, but empty,
28196 .debug_info section. IRIX 6.5 `nm' will then complain when
28197 examining the file. This is done late so that any filenames
28198 used by the debug_info section are marked as 'used'. */
28199 switch_to_section (debug_line_section);
28200 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
28201 if (! DWARF2_ASM_LINE_DEBUG_INFO)
28202 output_line_info (false);
28203
28204 if (dwarf_split_debug_info && info_section_emitted)
28205 {
28206 switch_to_section (debug_skeleton_line_section);
28207 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
28208 output_line_info (true);
28209 }
28210
28211 /* If we emitted any indirect strings, output the string table too. */
28212 if (debug_str_hash || skeleton_debug_str_hash)
28213 output_indirect_strings ();
28214 }
28215
28216 /* Perform any cleanups needed after the early debug generation pass
28217 has run. */
28218
28219 static void
28220 dwarf2out_early_finish (const char *filename)
28221 {
28222 set_early_dwarf s;
28223
28224 /* PCH might result in DW_AT_producer string being restored from the
28225 header compilation, so always fill it with empty string initially
28226 and overwrite only here. */
28227 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
28228 producer_string = gen_producer_string ();
28229 producer->dw_attr_val.v.val_str->refcount--;
28230 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
28231
28232 /* Add the name for the main input file now. We delayed this from
28233 dwarf2out_init to avoid complications with PCH. */
28234 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
28235 if (!IS_ABSOLUTE_PATH (filename) || targetm.force_at_comp_dir)
28236 add_comp_dir_attribute (comp_unit_die ());
28237 else if (get_AT (comp_unit_die (), DW_AT_comp_dir) == NULL)
28238 {
28239 bool p = false;
28240 file_table->traverse<bool *, file_table_relative_p> (&p);
28241 if (p)
28242 add_comp_dir_attribute (comp_unit_die ());
28243 }
28244
28245 /* With LTO early dwarf was really finished at compile-time, so make
28246 sure to adjust the phase after annotating the LTRANS CU DIE. */
28247 if (in_lto_p)
28248 {
28249 early_dwarf_finished = true;
28250 return;
28251 }
28252
28253 /* Walk through the list of incomplete types again, trying once more to
28254 emit full debugging info for them. */
28255 retry_incomplete_types ();
28256
28257 /* The point here is to flush out the limbo list so that it is empty
28258 and we don't need to stream it for LTO. */
28259 flush_limbo_die_list ();
28260
28261 gen_scheduled_generic_parms_dies ();
28262 gen_remaining_tmpl_value_param_die_attribute ();
28263
28264 /* Add DW_AT_linkage_name for all deferred DIEs. */
28265 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
28266 {
28267 tree decl = node->created_for;
28268 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
28269 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
28270 ended up in deferred_asm_name before we knew it was
28271 constant and never written to disk. */
28272 && DECL_ASSEMBLER_NAME (decl))
28273 {
28274 add_linkage_attr (node->die, decl);
28275 move_linkage_attr (node->die);
28276 }
28277 }
28278 deferred_asm_name = NULL;
28279
28280 if (flag_eliminate_unused_debug_types)
28281 prune_unused_types ();
28282
28283 /* Generate separate COMDAT sections for type DIEs. */
28284 if (use_debug_types)
28285 {
28286 break_out_comdat_types (comp_unit_die ());
28287
28288 /* Each new type_unit DIE was added to the limbo die list when created.
28289 Since these have all been added to comdat_type_list, clear the
28290 limbo die list. */
28291 limbo_die_list = NULL;
28292
28293 /* For each new comdat type unit, copy declarations for incomplete
28294 types to make the new unit self-contained (i.e., no direct
28295 references to the main compile unit). */
28296 for (comdat_type_node *ctnode = comdat_type_list;
28297 ctnode != NULL; ctnode = ctnode->next)
28298 copy_decls_for_unworthy_types (ctnode->root_die);
28299 copy_decls_for_unworthy_types (comp_unit_die ());
28300
28301 /* In the process of copying declarations from one unit to another,
28302 we may have left some declarations behind that are no longer
28303 referenced. Prune them. */
28304 prune_unused_types ();
28305 }
28306
28307 /* Generate separate CUs for each of the include files we've seen.
28308 They will go into limbo_die_list and from there to cu_die_list. */
28309 if (flag_eliminate_dwarf2_dups)
28310 {
28311 gcc_assert (limbo_die_list == NULL);
28312 break_out_includes (comp_unit_die ());
28313 limbo_die_node *cu;
28314 while ((cu = limbo_die_list))
28315 {
28316 limbo_die_list = cu->next;
28317 cu->next = cu_die_list;
28318 cu_die_list = cu;
28319 }
28320 }
28321
28322 /* The early debug phase is now finished. */
28323 early_dwarf_finished = true;
28324 }
28325
28326 /* Reset all state within dwarf2out.c so that we can rerun the compiler
28327 within the same process. For use by toplev::finalize. */
28328
28329 void
28330 dwarf2out_c_finalize (void)
28331 {
28332 last_var_location_insn = NULL;
28333 cached_next_real_insn = NULL;
28334 used_rtx_array = NULL;
28335 incomplete_types = NULL;
28336 decl_scope_table = NULL;
28337 debug_info_section = NULL;
28338 debug_skeleton_info_section = NULL;
28339 debug_abbrev_section = NULL;
28340 debug_skeleton_abbrev_section = NULL;
28341 debug_aranges_section = NULL;
28342 debug_addr_section = NULL;
28343 debug_macinfo_section = NULL;
28344 debug_line_section = NULL;
28345 debug_skeleton_line_section = NULL;
28346 debug_loc_section = NULL;
28347 debug_pubnames_section = NULL;
28348 debug_pubtypes_section = NULL;
28349 debug_str_section = NULL;
28350 debug_str_dwo_section = NULL;
28351 debug_str_offsets_section = NULL;
28352 debug_ranges_section = NULL;
28353 debug_frame_section = NULL;
28354 fde_vec = NULL;
28355 debug_str_hash = NULL;
28356 skeleton_debug_str_hash = NULL;
28357 dw2_string_counter = 0;
28358 have_multiple_function_sections = false;
28359 text_section_used = false;
28360 cold_text_section_used = false;
28361 cold_text_section = NULL;
28362 current_unit_personality = NULL;
28363
28364 next_die_offset = 0;
28365 single_comp_unit_die = NULL;
28366 comdat_type_list = NULL;
28367 limbo_die_list = NULL;
28368 file_table = NULL;
28369 decl_die_table = NULL;
28370 common_block_die_table = NULL;
28371 decl_loc_table = NULL;
28372 call_arg_locations = NULL;
28373 call_arg_loc_last = NULL;
28374 call_site_count = -1;
28375 tail_call_site_count = -1;
28376 cached_dw_loc_list_table = NULL;
28377 abbrev_die_table = NULL;
28378 abbrev_die_table_allocated = 0;
28379 abbrev_die_table_in_use = 0;
28380 delete dwarf_proc_stack_usage_map;
28381 dwarf_proc_stack_usage_map = NULL;
28382 line_info_label_num = 0;
28383 cur_line_info_table = NULL;
28384 text_section_line_info = NULL;
28385 cold_text_section_line_info = NULL;
28386 separate_line_info = NULL;
28387 info_section_emitted = false;
28388 pubname_table = NULL;
28389 pubtype_table = NULL;
28390 macinfo_table = NULL;
28391 ranges_table = NULL;
28392 ranges_table_allocated = 0;
28393 ranges_table_in_use = 0;
28394 ranges_by_label = 0;
28395 ranges_by_label_allocated = 0;
28396 ranges_by_label_in_use = 0;
28397 have_location_lists = false;
28398 loclabel_num = 0;
28399 poc_label_num = 0;
28400 last_emitted_file = NULL;
28401 label_num = 0;
28402 tmpl_value_parm_die_table = NULL;
28403 generic_type_instances = NULL;
28404 frame_pointer_fb_offset = 0;
28405 frame_pointer_fb_offset_valid = false;
28406 base_types.release ();
28407 XDELETEVEC (producer_string);
28408 producer_string = NULL;
28409 }
28410
28411 #include "gt-dwarf2out.h"