1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
90 #include "coretypes.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
100 #include "insn-config.h"
102 #include "emit-rtl.h"
104 #include "diagnostic.h"
106 #include "stor-layout.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
115 #include "tree-pretty-print.h"
116 #include "rtl-iter.h"
117 #include "fibonacci_heap.h"
119 typedef fibonacci_heap
<long, basic_block_def
> bb_heap_t
;
120 typedef fibonacci_node
<long, basic_block_def
> bb_heap_node_t
;
122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
123 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
124 Currently the value is the same as IDENTIFIER_NODE, which has such
125 a property. If this compile time assertion ever fails, make sure that
126 the new tree code that equals (int) VALUE has the same property. */
127 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
129 /* Type of micro operation. */
130 enum micro_operation_type
132 MO_USE
, /* Use location (REG or MEM). */
133 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
134 or the variable is not trackable. */
135 MO_VAL_USE
, /* Use location which is associated with a value. */
136 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
137 MO_VAL_SET
, /* Set location associated with a value. */
138 MO_SET
, /* Set location. */
139 MO_COPY
, /* Copy the same portion of a variable from one
140 location to another. */
141 MO_CLOBBER
, /* Clobber location. */
142 MO_CALL
, /* Call insn. */
143 MO_ADJUST
/* Adjust stack pointer. */
147 static const char * const ATTRIBUTE_UNUSED
148 micro_operation_type_name
[] = {
161 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
162 Notes emitted as AFTER_CALL are to take effect during the call,
163 rather than after the call. */
166 EMIT_NOTE_BEFORE_INSN
,
167 EMIT_NOTE_AFTER_INSN
,
168 EMIT_NOTE_AFTER_CALL_INSN
171 /* Structure holding information about micro operation. */
172 struct micro_operation
174 /* Type of micro operation. */
175 enum micro_operation_type type
;
177 /* The instruction which the micro operation is in, for MO_USE,
178 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
179 instruction or note in the original flow (before any var-tracking
180 notes are inserted, to simplify emission of notes), for MO_SET
185 /* Location. For MO_SET and MO_COPY, this is the SET that
186 performs the assignment, if known, otherwise it is the target
187 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
188 CONCAT of the VALUE and the LOC associated with it. For
189 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
190 associated with it. */
193 /* Stack adjustment. */
194 HOST_WIDE_INT adjust
;
199 /* A declaration of a variable, or an RTL value being handled like a
201 typedef void *decl_or_value
;
203 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 dv_is_decl_p (decl_or_value dv
)
207 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
210 /* Return true if a decl_or_value is a VALUE rtl. */
212 dv_is_value_p (decl_or_value dv
)
214 return dv
&& !dv_is_decl_p (dv
);
217 /* Return the decl in the decl_or_value. */
219 dv_as_decl (decl_or_value dv
)
221 gcc_checking_assert (dv_is_decl_p (dv
));
225 /* Return the value in the decl_or_value. */
227 dv_as_value (decl_or_value dv
)
229 gcc_checking_assert (dv_is_value_p (dv
));
233 /* Return the opaque pointer in the decl_or_value. */
235 dv_as_opaque (decl_or_value dv
)
241 /* Description of location of a part of a variable. The content of a physical
242 register is described by a chain of these structures.
243 The chains are pretty short (usually 1 or 2 elements) and thus
244 chain is the best data structure. */
247 /* Pointer to next member of the list. */
250 /* The rtx of register. */
253 /* The declaration corresponding to LOC. */
256 /* Offset from start of DECL. */
257 HOST_WIDE_INT offset
;
260 /* Structure for chaining the locations. */
261 struct location_chain
263 /* Next element in the chain. */
264 location_chain
*next
;
266 /* The location (REG, MEM or VALUE). */
269 /* The "value" stored in this location. */
273 enum var_init_status init
;
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part
277 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
278 location of DV. Each entry is also part of VALUE' s linked-list of
279 backlinks back to DV. */
282 /* The dependent DV. */
284 /* The dependency VALUE or DECL_DEBUG. */
286 /* The next entry in VALUE's backlinks list. */
287 struct loc_exp_dep
*next
;
288 /* A pointer to the pointer to this entry (head or prev's next) in
289 the doubly-linked list. */
290 struct loc_exp_dep
**pprev
;
294 /* This data structure holds information about the depth of a variable
298 /* This measures the complexity of the expanded expression. It
299 grows by one for each level of expansion that adds more than one
302 /* This counts the number of ENTRY_VALUE expressions in an
303 expansion. We want to minimize their use. */
307 /* This data structure is allocated for one-part variables at the time
308 of emitting notes. */
311 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
312 computation used the expansion of this variable, and that ought
313 to be notified should this variable change. If the DV's cur_loc
314 expanded to NULL, all components of the loc list are regarded as
315 active, so that any changes in them give us a chance to get a
316 location. Otherwise, only components of the loc that expanded to
317 non-NULL are regarded as active dependencies. */
318 loc_exp_dep
*backlinks
;
319 /* This holds the LOC that was expanded into cur_loc. We need only
320 mark a one-part variable as changed if the FROM loc is removed,
321 or if it has no known location and a loc is added, or if it gets
322 a change notification from any of its active dependencies. */
324 /* The depth of the cur_loc expression. */
326 /* Dependencies actively used when expand FROM into cur_loc. */
327 vec
<loc_exp_dep
, va_heap
, vl_embed
> deps
;
330 /* Structure describing one part of variable. */
333 /* Chain of locations of the part. */
334 location_chain
*loc_chain
;
336 /* Location which was last emitted to location list. */
341 /* The offset in the variable, if !var->onepart. */
342 HOST_WIDE_INT offset
;
344 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
345 struct onepart_aux
*onepaux
;
349 /* Maximum number of location parts. */
350 #define MAX_VAR_PARTS 16
352 /* Enumeration type used to discriminate various types of one-part
356 /* Not a one-part variable. */
358 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
360 /* A DEBUG_EXPR_DECL. */
366 /* Structure describing where the variable is located. */
369 /* The declaration of the variable, or an RTL value being handled
370 like a declaration. */
373 /* Reference count. */
376 /* Number of variable parts. */
379 /* What type of DV this is, according to enum onepart_enum. */
380 ENUM_BITFIELD (onepart_enum
) onepart
: CHAR_BIT
;
382 /* True if this variable_def struct is currently in the
383 changed_variables hash table. */
384 bool in_changed_variables
;
386 /* The variable parts. */
387 variable_part var_part
[1];
390 /* Pointer to the BB's information specific to variable tracking pass. */
391 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
393 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */
395 static inline HOST_WIDE_INT
396 int_mem_offset (const_rtx mem
)
398 if (MEM_OFFSET_KNOWN_P (mem
))
399 return MEM_OFFSET (mem
);
403 #if CHECKING_P && (GCC_VERSION >= 2007)
405 /* Access VAR's Ith part's offset, checking that it's not a one-part
407 #define VAR_PART_OFFSET(var, i) __extension__ \
408 (*({ variable *const __v = (var); \
409 gcc_checking_assert (!__v->onepart); \
410 &__v->var_part[(i)].aux.offset; }))
412 /* Access VAR's one-part auxiliary data, checking that it is a
413 one-part variable. */
414 #define VAR_LOC_1PAUX(var) __extension__ \
415 (*({ variable *const __v = (var); \
416 gcc_checking_assert (__v->onepart); \
417 &__v->var_part[0].aux.onepaux; }))
420 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
421 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
424 /* These are accessor macros for the one-part auxiliary data. When
425 convenient for users, they're guarded by tests that the data was
427 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
428 ? VAR_LOC_1PAUX (var)->backlinks \
430 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->backlinks \
433 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
434 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
435 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
436 ? &VAR_LOC_1PAUX (var)->deps \
441 typedef unsigned int dvuid
;
443 /* Return the uid of DV. */
446 dv_uid (decl_or_value dv
)
448 if (dv_is_value_p (dv
))
449 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
451 return DECL_UID (dv_as_decl (dv
));
454 /* Compute the hash from the uid. */
456 static inline hashval_t
457 dv_uid2hash (dvuid uid
)
462 /* The hash function for a mask table in a shared_htab chain. */
464 static inline hashval_t
465 dv_htab_hash (decl_or_value dv
)
467 return dv_uid2hash (dv_uid (dv
));
470 static void variable_htab_free (void *);
472 /* Variable hashtable helpers. */
474 struct variable_hasher
: pointer_hash
<variable
>
476 typedef void *compare_type
;
477 static inline hashval_t
hash (const variable
*);
478 static inline bool equal (const variable
*, const void *);
479 static inline void remove (variable
*);
482 /* The hash function for variable_htab, computes the hash value
483 from the declaration of variable X. */
486 variable_hasher::hash (const variable
*v
)
488 return dv_htab_hash (v
->dv
);
491 /* Compare the declaration of variable X with declaration Y. */
494 variable_hasher::equal (const variable
*v
, const void *y
)
496 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
498 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
501 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
504 variable_hasher::remove (variable
*var
)
506 variable_htab_free (var
);
509 typedef hash_table
<variable_hasher
> variable_table_type
;
510 typedef variable_table_type::iterator variable_iterator_type
;
512 /* Structure for passing some other parameters to function
513 emit_note_insn_var_location. */
514 struct emit_note_data
516 /* The instruction which the note will be emitted before/after. */
519 /* Where the note will be emitted (before/after insn)? */
520 enum emit_note_where where
;
522 /* The variables and values active at this point. */
523 variable_table_type
*vars
;
526 /* Structure holding a refcounted hash table. If refcount > 1,
527 it must be first unshared before modified. */
530 /* Reference count. */
533 /* Actual hash table. */
534 variable_table_type
*htab
;
537 /* Structure holding the IN or OUT set for a basic block. */
540 /* Adjustment of stack offset. */
541 HOST_WIDE_INT stack_adjust
;
543 /* Attributes for registers (lists of attrs). */
544 attrs
*regs
[FIRST_PSEUDO_REGISTER
];
546 /* Variable locations. */
549 /* Vars that is being traversed. */
550 shared_hash
*traversed_vars
;
553 /* The structure (one for each basic block) containing the information
554 needed for variable tracking. */
555 struct variable_tracking_info
557 /* The vector of micro operations. */
558 vec
<micro_operation
> mos
;
560 /* The IN and OUT set for dataflow analysis. */
564 /* The permanent-in dataflow set for this block. This is used to
565 hold values for which we had to compute entry values. ??? This
566 should probably be dynamically allocated, to avoid using more
567 memory in non-debug builds. */
570 /* Has the block been visited in DFS? */
573 /* Has the block been flooded in VTA? */
578 /* Alloc pool for struct attrs_def. */
579 object_allocator
<attrs
> attrs_pool ("attrs pool");
581 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
583 static pool_allocator var_pool
584 ("variable_def pool", sizeof (variable
) +
585 (MAX_VAR_PARTS
- 1) * sizeof (((variable
*)NULL
)->var_part
[0]));
587 /* Alloc pool for struct variable_def with a single var_part entry. */
588 static pool_allocator valvar_pool
589 ("small variable_def pool", sizeof (variable
));
591 /* Alloc pool for struct location_chain. */
592 static object_allocator
<location_chain
> location_chain_pool
593 ("location_chain pool");
595 /* Alloc pool for struct shared_hash. */
596 static object_allocator
<shared_hash
> shared_hash_pool ("shared_hash pool");
598 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
599 object_allocator
<loc_exp_dep
> loc_exp_dep_pool ("loc_exp_dep pool");
601 /* Changed variables, notes will be emitted for them. */
602 static variable_table_type
*changed_variables
;
604 /* Shall notes be emitted? */
605 static bool emit_notes
;
607 /* Values whose dynamic location lists have gone empty, but whose
608 cselib location lists are still usable. Use this to hold the
609 current location, the backlinks, etc, during emit_notes. */
610 static variable_table_type
*dropped_values
;
612 /* Empty shared hashtable. */
613 static shared_hash
*empty_shared_hash
;
615 /* Scratch register bitmap used by cselib_expand_value_rtx. */
616 static bitmap scratch_regs
= NULL
;
618 #ifdef HAVE_window_save
619 struct GTY(()) parm_reg
{
625 /* Vector of windowed parameter registers, if any. */
626 static vec
<parm_reg
, va_gc
> *windowed_parm_regs
= NULL
;
629 /* Variable used to tell whether cselib_process_insn called our hook. */
630 static bool cselib_hook_called
;
632 /* Local function prototypes. */
633 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
635 static void insn_stack_adjust_offset_pre_post (rtx_insn
*, HOST_WIDE_INT
*,
637 static bool vt_stack_adjustments (void);
639 static void init_attrs_list_set (attrs
**);
640 static void attrs_list_clear (attrs
**);
641 static attrs
*attrs_list_member (attrs
*, decl_or_value
, HOST_WIDE_INT
);
642 static void attrs_list_insert (attrs
**, decl_or_value
, HOST_WIDE_INT
, rtx
);
643 static void attrs_list_copy (attrs
**, attrs
*);
644 static void attrs_list_union (attrs
**, attrs
*);
646 static variable
**unshare_variable (dataflow_set
*set
, variable
**slot
,
647 variable
*var
, enum var_init_status
);
648 static void vars_copy (variable_table_type
*, variable_table_type
*);
649 static tree
var_debug_decl (tree
);
650 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
651 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
652 enum var_init_status
, rtx
);
653 static void var_reg_delete (dataflow_set
*, rtx
, bool);
654 static void var_regno_delete (dataflow_set
*, int);
655 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
656 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
657 enum var_init_status
, rtx
);
658 static void var_mem_delete (dataflow_set
*, rtx
, bool);
660 static void dataflow_set_init (dataflow_set
*);
661 static void dataflow_set_clear (dataflow_set
*);
662 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
663 static int variable_union_info_cmp_pos (const void *, const void *);
664 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
665 static location_chain
*find_loc_in_1pdv (rtx
, variable
*,
666 variable_table_type
*);
667 static bool canon_value_cmp (rtx
, rtx
);
668 static int loc_cmp (rtx
, rtx
);
669 static bool variable_part_different_p (variable_part
*, variable_part
*);
670 static bool onepart_variable_different_p (variable
*, variable
*);
671 static bool variable_different_p (variable
*, variable
*);
672 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
673 static void dataflow_set_destroy (dataflow_set
*);
675 static bool track_expr_p (tree
, bool);
676 static void add_uses_1 (rtx
*, void *);
677 static void add_stores (rtx
, const_rtx
, void *);
678 static bool compute_bb_dataflow (basic_block
);
679 static bool vt_find_locations (void);
681 static void dump_attrs_list (attrs
*);
682 static void dump_var (variable
*);
683 static void dump_vars (variable_table_type
*);
684 static void dump_dataflow_set (dataflow_set
*);
685 static void dump_dataflow_sets (void);
687 static void set_dv_changed (decl_or_value
, bool);
688 static void variable_was_changed (variable
*, dataflow_set
*);
689 static variable
**set_slot_part (dataflow_set
*, rtx
, variable
**,
690 decl_or_value
, HOST_WIDE_INT
,
691 enum var_init_status
, rtx
);
692 static void set_variable_part (dataflow_set
*, rtx
,
693 decl_or_value
, HOST_WIDE_INT
,
694 enum var_init_status
, rtx
, enum insert_option
);
695 static variable
**clobber_slot_part (dataflow_set
*, rtx
,
696 variable
**, HOST_WIDE_INT
, rtx
);
697 static void clobber_variable_part (dataflow_set
*, rtx
,
698 decl_or_value
, HOST_WIDE_INT
, rtx
);
699 static variable
**delete_slot_part (dataflow_set
*, rtx
, variable
**,
701 static void delete_variable_part (dataflow_set
*, rtx
,
702 decl_or_value
, HOST_WIDE_INT
);
703 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
704 static void vt_emit_notes (void);
706 static void vt_add_function_parameters (void);
707 static bool vt_initialize (void);
708 static void vt_finalize (void);
710 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
713 stack_adjust_offset_pre_post_cb (rtx
, rtx op
, rtx dest
, rtx src
, rtx srcoff
,
716 if (dest
!= stack_pointer_rtx
)
719 switch (GET_CODE (op
))
723 ((HOST_WIDE_INT
*)arg
)[0] -= INTVAL (srcoff
);
727 ((HOST_WIDE_INT
*)arg
)[1] -= INTVAL (srcoff
);
731 /* We handle only adjustments by constant amount. */
732 gcc_assert (GET_CODE (src
) == PLUS
733 && CONST_INT_P (XEXP (src
, 1))
734 && XEXP (src
, 0) == stack_pointer_rtx
);
735 ((HOST_WIDE_INT
*)arg
)[GET_CODE (op
) == POST_MODIFY
]
736 -= INTVAL (XEXP (src
, 1));
743 /* Given a SET, calculate the amount of stack adjustment it contains
744 PRE- and POST-modifying stack pointer.
745 This function is similar to stack_adjust_offset. */
748 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
751 rtx src
= SET_SRC (pattern
);
752 rtx dest
= SET_DEST (pattern
);
755 if (dest
== stack_pointer_rtx
)
757 /* (set (reg sp) (plus (reg sp) (const_int))) */
758 code
= GET_CODE (src
);
759 if (! (code
== PLUS
|| code
== MINUS
)
760 || XEXP (src
, 0) != stack_pointer_rtx
761 || !CONST_INT_P (XEXP (src
, 1)))
765 *post
+= INTVAL (XEXP (src
, 1));
767 *post
-= INTVAL (XEXP (src
, 1));
770 HOST_WIDE_INT res
[2] = { 0, 0 };
771 for_each_inc_dec (pattern
, stack_adjust_offset_pre_post_cb
, res
);
776 /* Given an INSN, calculate the amount of stack adjustment it contains
777 PRE- and POST-modifying stack pointer. */
780 insn_stack_adjust_offset_pre_post (rtx_insn
*insn
, HOST_WIDE_INT
*pre
,
788 pattern
= PATTERN (insn
);
789 if (RTX_FRAME_RELATED_P (insn
))
791 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
793 pattern
= XEXP (expr
, 0);
796 if (GET_CODE (pattern
) == SET
)
797 stack_adjust_offset_pre_post (pattern
, pre
, post
);
798 else if (GET_CODE (pattern
) == PARALLEL
799 || GET_CODE (pattern
) == SEQUENCE
)
803 /* There may be stack adjustments inside compound insns. Search
805 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
806 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
807 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
811 /* Compute stack adjustments for all blocks by traversing DFS tree.
812 Return true when the adjustments on all incoming edges are consistent.
813 Heavily borrowed from pre_and_rev_post_order_compute. */
816 vt_stack_adjustments (void)
818 edge_iterator
*stack
;
821 /* Initialize entry block. */
822 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->visited
= true;
823 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->in
.stack_adjust
824 = INCOMING_FRAME_SP_OFFSET
;
825 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
.stack_adjust
826 = INCOMING_FRAME_SP_OFFSET
;
828 /* Allocate stack for back-tracking up CFG. */
829 stack
= XNEWVEC (edge_iterator
, n_basic_blocks_for_fn (cfun
) + 1);
832 /* Push the first edge on to the stack. */
833 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
841 /* Look at the edge on the top of the stack. */
843 src
= ei_edge (ei
)->src
;
844 dest
= ei_edge (ei
)->dest
;
846 /* Check if the edge destination has been visited yet. */
847 if (!VTI (dest
)->visited
)
850 HOST_WIDE_INT pre
, post
, offset
;
851 VTI (dest
)->visited
= true;
852 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
854 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
855 for (insn
= BB_HEAD (dest
);
856 insn
!= NEXT_INSN (BB_END (dest
));
857 insn
= NEXT_INSN (insn
))
860 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
861 offset
+= pre
+ post
;
864 VTI (dest
)->out
.stack_adjust
= offset
;
866 if (EDGE_COUNT (dest
->succs
) > 0)
867 /* Since the DEST node has been visited for the first
868 time, check its successors. */
869 stack
[sp
++] = ei_start (dest
->succs
);
873 /* We can end up with different stack adjustments for the exit block
874 of a shrink-wrapped function if stack_adjust_offset_pre_post
875 doesn't understand the rtx pattern used to restore the stack
876 pointer in the epilogue. For example, on s390(x), the stack
877 pointer is often restored via a load-multiple instruction
878 and so no stack_adjust offset is recorded for it. This means
879 that the stack offset at the end of the epilogue block is the
880 same as the offset before the epilogue, whereas other paths
881 to the exit block will have the correct stack_adjust.
883 It is safe to ignore these differences because (a) we never
884 use the stack_adjust for the exit block in this pass and
885 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
886 function are correct.
888 We must check whether the adjustments on other edges are
890 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
891 && VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
897 if (! ei_one_before_end_p (ei
))
898 /* Go to the next edge. */
899 ei_next (&stack
[sp
- 1]);
901 /* Return to previous level if there are no more edges. */
910 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
911 hard_frame_pointer_rtx is being mapped to it and offset for it. */
912 static rtx cfa_base_rtx
;
913 static HOST_WIDE_INT cfa_base_offset
;
915 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
916 or hard_frame_pointer_rtx. */
919 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
921 return plus_constant (Pmode
, cfa_base_rtx
, adjustment
+ cfa_base_offset
);
924 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
925 or -1 if the replacement shouldn't be done. */
926 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
928 /* Data for adjust_mems callback. */
930 struct adjust_mem_data
933 machine_mode mem_mode
;
934 HOST_WIDE_INT stack_adjust
;
935 auto_vec
<rtx
> side_effects
;
938 /* Helper for adjust_mems. Return true if X is suitable for
939 transformation of wider mode arithmetics to narrower mode. */
942 use_narrower_mode_test (rtx x
, const_rtx subreg
)
944 subrtx_var_iterator::array_type array
;
945 FOR_EACH_SUBRTX_VAR (iter
, array
, x
, NONCONST
)
949 iter
.skip_subrtxes ();
951 switch (GET_CODE (x
))
954 if (cselib_lookup (x
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
956 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (x
), x
,
957 subreg_lowpart_offset (GET_MODE (subreg
),
966 iter
.substitute (XEXP (x
, 0));
975 /* Transform X into narrower mode MODE from wider mode WMODE. */
978 use_narrower_mode (rtx x
, scalar_int_mode mode
, scalar_int_mode wmode
)
982 return lowpart_subreg (mode
, x
, wmode
);
983 switch (GET_CODE (x
))
986 return lowpart_subreg (mode
, x
, wmode
);
990 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
991 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
992 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
994 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
996 /* Ensure shift amount is not wider than mode. */
997 if (GET_MODE (op1
) == VOIDmode
)
998 op1
= lowpart_subreg (mode
, op1
, wmode
);
999 else if (GET_MODE_PRECISION (mode
)
1000 < GET_MODE_PRECISION (as_a
<scalar_int_mode
> (GET_MODE (op1
))))
1001 op1
= lowpart_subreg (mode
, op1
, GET_MODE (op1
));
1002 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
1008 /* Helper function for adjusting used MEMs. */
1011 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
1013 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
1014 rtx mem
, addr
= loc
, tem
;
1015 machine_mode mem_mode_save
;
1017 scalar_int_mode tem_mode
, tem_subreg_mode
;
1018 switch (GET_CODE (loc
))
1021 /* Don't do any sp or fp replacements outside of MEM addresses
1023 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
1025 if (loc
== stack_pointer_rtx
1026 && !frame_pointer_needed
1028 return compute_cfa_pointer (amd
->stack_adjust
);
1029 else if (loc
== hard_frame_pointer_rtx
1030 && frame_pointer_needed
1031 && hard_frame_pointer_adjustment
!= -1
1033 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
1034 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
1040 mem
= targetm
.delegitimize_address (mem
);
1041 if (mem
!= loc
&& !MEM_P (mem
))
1042 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
1045 addr
= XEXP (mem
, 0);
1046 mem_mode_save
= amd
->mem_mode
;
1047 amd
->mem_mode
= GET_MODE (mem
);
1048 store_save
= amd
->store
;
1050 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1051 amd
->store
= store_save
;
1052 amd
->mem_mode
= mem_mode_save
;
1054 addr
= targetm
.delegitimize_address (addr
);
1055 if (addr
!= XEXP (mem
, 0))
1056 mem
= replace_equiv_address_nv (mem
, addr
);
1058 mem
= avoid_constant_pool_reference (mem
);
1062 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1063 gen_int_mode (GET_CODE (loc
) == PRE_INC
1064 ? GET_MODE_SIZE (amd
->mem_mode
)
1065 : -GET_MODE_SIZE (amd
->mem_mode
),
1071 addr
= XEXP (loc
, 0);
1072 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
1073 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1074 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1075 gen_int_mode ((GET_CODE (loc
) == PRE_INC
1076 || GET_CODE (loc
) == POST_INC
)
1077 ? GET_MODE_SIZE (amd
->mem_mode
)
1078 : -GET_MODE_SIZE (amd
->mem_mode
),
1080 store_save
= amd
->store
;
1082 tem
= simplify_replace_fn_rtx (tem
, old_rtx
, adjust_mems
, data
);
1083 amd
->store
= store_save
;
1084 amd
->side_effects
.safe_push (gen_rtx_SET (XEXP (loc
, 0), tem
));
1087 addr
= XEXP (loc
, 1);
1091 addr
= XEXP (loc
, 0);
1092 gcc_assert (amd
->mem_mode
!= VOIDmode
);
1093 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1094 store_save
= amd
->store
;
1096 tem
= simplify_replace_fn_rtx (XEXP (loc
, 1), old_rtx
,
1098 amd
->store
= store_save
;
1099 amd
->side_effects
.safe_push (gen_rtx_SET (XEXP (loc
, 0), tem
));
1102 /* First try without delegitimization of whole MEMs and
1103 avoid_constant_pool_reference, which is more likely to succeed. */
1104 store_save
= amd
->store
;
1106 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
1108 amd
->store
= store_save
;
1109 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1110 if (mem
== SUBREG_REG (loc
))
1115 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
1116 GET_MODE (SUBREG_REG (loc
)),
1120 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
1121 GET_MODE (SUBREG_REG (loc
)),
1123 if (tem
== NULL_RTX
)
1124 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
1126 if (MAY_HAVE_DEBUG_BIND_INSNS
1127 && GET_CODE (tem
) == SUBREG
1128 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
1129 || GET_CODE (SUBREG_REG (tem
)) == MINUS
1130 || GET_CODE (SUBREG_REG (tem
)) == MULT
1131 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
1132 && is_a
<scalar_int_mode
> (GET_MODE (tem
), &tem_mode
)
1133 && is_a
<scalar_int_mode
> (GET_MODE (SUBREG_REG (tem
)),
1135 && (GET_MODE_PRECISION (tem_mode
)
1136 < GET_MODE_PRECISION (tem_subreg_mode
))
1137 && subreg_lowpart_p (tem
)
1138 && use_narrower_mode_test (SUBREG_REG (tem
), tem
))
1139 return use_narrower_mode (SUBREG_REG (tem
), tem_mode
, tem_subreg_mode
);
1142 /* Don't do any replacements in second and following
1143 ASM_OPERANDS of inline-asm with multiple sets.
1144 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1145 and ASM_OPERANDS_LABEL_VEC need to be equal between
1146 all the ASM_OPERANDs in the insn and adjust_insn will
1148 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
1157 /* Helper function for replacement of uses. */
1160 adjust_mem_uses (rtx
*x
, void *data
)
1162 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
1164 validate_change (NULL_RTX
, x
, new_x
, true);
1167 /* Helper function for replacement of stores. */
1170 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
1174 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
1176 if (new_dest
!= SET_DEST (expr
))
1178 rtx xexpr
= CONST_CAST_RTX (expr
);
1179 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
1184 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1185 replace them with their value in the insn and add the side-effects
1186 as other sets to the insn. */
1189 adjust_insn (basic_block bb
, rtx_insn
*insn
)
1193 #ifdef HAVE_window_save
1194 /* If the target machine has an explicit window save instruction, the
1195 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1196 if (RTX_FRAME_RELATED_P (insn
)
1197 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
1199 unsigned int i
, nregs
= vec_safe_length (windowed_parm_regs
);
1200 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
1203 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs
, i
, p
)
1205 XVECEXP (rtl
, 0, i
* 2)
1206 = gen_rtx_SET (p
->incoming
, p
->outgoing
);
1207 /* Do not clobber the attached DECL, but only the REG. */
1208 XVECEXP (rtl
, 0, i
* 2 + 1)
1209 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1210 gen_raw_REG (GET_MODE (p
->outgoing
),
1211 REGNO (p
->outgoing
)));
1214 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1219 adjust_mem_data amd
;
1220 amd
.mem_mode
= VOIDmode
;
1221 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1224 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1227 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1228 && asm_noperands (PATTERN (insn
)) > 0
1229 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1234 /* inline-asm with multiple sets is tiny bit more complicated,
1235 because the 3 vectors in ASM_OPERANDS need to be shared between
1236 all ASM_OPERANDS in the instruction. adjust_mems will
1237 not touch ASM_OPERANDS other than the first one, asm_noperands
1238 test above needs to be called before that (otherwise it would fail)
1239 and afterwards this code fixes it up. */
1240 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1241 body
= PATTERN (insn
);
1242 set0
= XVECEXP (body
, 0, 0);
1243 gcc_checking_assert (GET_CODE (set0
) == SET
1244 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1245 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1246 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1247 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1251 set
= XVECEXP (body
, 0, i
);
1252 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1253 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1255 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1256 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1257 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1258 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1259 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1260 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1262 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1263 ASM_OPERANDS_INPUT_VEC (newsrc
)
1264 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1265 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1266 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1267 ASM_OPERANDS_LABEL_VEC (newsrc
)
1268 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1269 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1274 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1276 /* For read-only MEMs containing some constant, prefer those
1278 set
= single_set (insn
);
1279 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1281 rtx note
= find_reg_equal_equiv_note (insn
);
1283 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1284 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1287 if (!amd
.side_effects
.is_empty ())
1292 pat
= &PATTERN (insn
);
1293 if (GET_CODE (*pat
) == COND_EXEC
)
1294 pat
= &COND_EXEC_CODE (*pat
);
1295 if (GET_CODE (*pat
) == PARALLEL
)
1296 oldn
= XVECLEN (*pat
, 0);
1299 unsigned int newn
= amd
.side_effects
.length ();
1300 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1301 if (GET_CODE (*pat
) == PARALLEL
)
1302 for (i
= 0; i
< oldn
; i
++)
1303 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1305 XVECEXP (new_pat
, 0, 0) = *pat
;
1309 FOR_EACH_VEC_ELT_REVERSE (amd
.side_effects
, j
, effect
)
1310 XVECEXP (new_pat
, 0, j
+ oldn
) = effect
;
1311 validate_change (NULL_RTX
, pat
, new_pat
, true);
1315 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1317 dv_as_rtx (decl_or_value dv
)
1321 if (dv_is_value_p (dv
))
1322 return dv_as_value (dv
);
1324 decl
= dv_as_decl (dv
);
1326 gcc_checking_assert (TREE_CODE (decl
) == DEBUG_EXPR_DECL
);
1327 return DECL_RTL_KNOWN_SET (decl
);
1330 /* Return nonzero if a decl_or_value must not have more than one
1331 variable part. The returned value discriminates among various
1332 kinds of one-part DVs ccording to enum onepart_enum. */
1333 static inline onepart_enum
1334 dv_onepart_p (decl_or_value dv
)
1338 if (!MAY_HAVE_DEBUG_BIND_INSNS
)
1341 if (dv_is_value_p (dv
))
1342 return ONEPART_VALUE
;
1344 decl
= dv_as_decl (dv
);
1346 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1347 return ONEPART_DEXPR
;
1349 if (target_for_debug_bind (decl
) != NULL_TREE
)
1350 return ONEPART_VDECL
;
1355 /* Return the variable pool to be used for a dv of type ONEPART. */
1356 static inline pool_allocator
&
1357 onepart_pool (onepart_enum onepart
)
1359 return onepart
? valvar_pool
: var_pool
;
1362 /* Allocate a variable_def from the corresponding variable pool. */
1363 static inline variable
*
1364 onepart_pool_allocate (onepart_enum onepart
)
1366 return (variable
*) onepart_pool (onepart
).allocate ();
1369 /* Build a decl_or_value out of a decl. */
1370 static inline decl_or_value
1371 dv_from_decl (tree decl
)
1375 gcc_checking_assert (dv_is_decl_p (dv
));
1379 /* Build a decl_or_value out of a value. */
1380 static inline decl_or_value
1381 dv_from_value (rtx value
)
1385 gcc_checking_assert (dv_is_value_p (dv
));
1389 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1390 static inline decl_or_value
1395 switch (GET_CODE (x
))
1398 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
1399 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x
)) == x
);
1403 dv
= dv_from_value (x
);
1413 extern void debug_dv (decl_or_value dv
);
1416 debug_dv (decl_or_value dv
)
1418 if (dv_is_value_p (dv
))
1419 debug_rtx (dv_as_value (dv
));
1421 debug_generic_stmt (dv_as_decl (dv
));
1424 static void loc_exp_dep_clear (variable
*var
);
1426 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1429 variable_htab_free (void *elem
)
1432 variable
*var
= (variable
*) elem
;
1433 location_chain
*node
, *next
;
1435 gcc_checking_assert (var
->refcount
> 0);
1438 if (var
->refcount
> 0)
1441 for (i
= 0; i
< var
->n_var_parts
; i
++)
1443 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1448 var
->var_part
[i
].loc_chain
= NULL
;
1450 if (var
->onepart
&& VAR_LOC_1PAUX (var
))
1452 loc_exp_dep_clear (var
);
1453 if (VAR_LOC_DEP_LST (var
))
1454 VAR_LOC_DEP_LST (var
)->pprev
= NULL
;
1455 XDELETE (VAR_LOC_1PAUX (var
));
1456 /* These may be reused across functions, so reset
1458 if (var
->onepart
== ONEPART_DEXPR
)
1459 set_dv_changed (var
->dv
, true);
1461 onepart_pool (var
->onepart
).remove (var
);
1464 /* Initialize the set (array) SET of attrs to empty lists. */
1467 init_attrs_list_set (attrs
**set
)
1471 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1475 /* Make the list *LISTP empty. */
1478 attrs_list_clear (attrs
**listp
)
1482 for (list
= *listp
; list
; list
= next
)
1490 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1493 attrs_list_member (attrs
*list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1495 for (; list
; list
= list
->next
)
1496 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1501 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1504 attrs_list_insert (attrs
**listp
, decl_or_value dv
,
1505 HOST_WIDE_INT offset
, rtx loc
)
1507 attrs
*list
= new attrs
;
1510 list
->offset
= offset
;
1511 list
->next
= *listp
;
1515 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1518 attrs_list_copy (attrs
**dstp
, attrs
*src
)
1520 attrs_list_clear (dstp
);
1521 for (; src
; src
= src
->next
)
1523 attrs
*n
= new attrs
;
1526 n
->offset
= src
->offset
;
1532 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1535 attrs_list_union (attrs
**dstp
, attrs
*src
)
1537 for (; src
; src
= src
->next
)
1539 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1540 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1544 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1548 attrs_list_mpdv_union (attrs
**dstp
, attrs
*src
, attrs
*src2
)
1550 gcc_assert (!*dstp
);
1551 for (; src
; src
= src
->next
)
1553 if (!dv_onepart_p (src
->dv
))
1554 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1556 for (src
= src2
; src
; src
= src
->next
)
1558 if (!dv_onepart_p (src
->dv
)
1559 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1560 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1564 /* Shared hashtable support. */
1566 /* Return true if VARS is shared. */
1569 shared_hash_shared (shared_hash
*vars
)
1571 return vars
->refcount
> 1;
1574 /* Return the hash table for VARS. */
1576 static inline variable_table_type
*
1577 shared_hash_htab (shared_hash
*vars
)
1582 /* Return true if VAR is shared, or maybe because VARS is shared. */
1585 shared_var_p (variable
*var
, shared_hash
*vars
)
1587 /* Don't count an entry in the changed_variables table as a duplicate. */
1588 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1589 || shared_hash_shared (vars
));
1592 /* Copy variables into a new hash table. */
1594 static shared_hash
*
1595 shared_hash_unshare (shared_hash
*vars
)
1597 shared_hash
*new_vars
= new shared_hash
;
1598 gcc_assert (vars
->refcount
> 1);
1599 new_vars
->refcount
= 1;
1600 new_vars
->htab
= new variable_table_type (vars
->htab
->elements () + 3);
1601 vars_copy (new_vars
->htab
, vars
->htab
);
1606 /* Increment reference counter on VARS and return it. */
1608 static inline shared_hash
*
1609 shared_hash_copy (shared_hash
*vars
)
1615 /* Decrement reference counter and destroy hash table if not shared
1619 shared_hash_destroy (shared_hash
*vars
)
1621 gcc_checking_assert (vars
->refcount
> 0);
1622 if (--vars
->refcount
== 0)
1629 /* Unshare *PVARS if shared and return slot for DV. If INS is
1630 INSERT, insert it if not already present. */
1632 static inline variable
**
1633 shared_hash_find_slot_unshare_1 (shared_hash
**pvars
, decl_or_value dv
,
1634 hashval_t dvhash
, enum insert_option ins
)
1636 if (shared_hash_shared (*pvars
))
1637 *pvars
= shared_hash_unshare (*pvars
);
1638 return shared_hash_htab (*pvars
)->find_slot_with_hash (dv
, dvhash
, ins
);
1641 static inline variable
**
1642 shared_hash_find_slot_unshare (shared_hash
**pvars
, decl_or_value dv
,
1643 enum insert_option ins
)
1645 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1648 /* Return slot for DV, if it is already present in the hash table.
1649 If it is not present, insert it only VARS is not shared, otherwise
1652 static inline variable
**
1653 shared_hash_find_slot_1 (shared_hash
*vars
, decl_or_value dv
, hashval_t dvhash
)
1655 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
,
1656 shared_hash_shared (vars
)
1657 ? NO_INSERT
: INSERT
);
1660 static inline variable
**
1661 shared_hash_find_slot (shared_hash
*vars
, decl_or_value dv
)
1663 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1666 /* Return slot for DV only if it is already present in the hash table. */
1668 static inline variable
**
1669 shared_hash_find_slot_noinsert_1 (shared_hash
*vars
, decl_or_value dv
,
1672 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
, NO_INSERT
);
1675 static inline variable
**
1676 shared_hash_find_slot_noinsert (shared_hash
*vars
, decl_or_value dv
)
1678 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1681 /* Return variable for DV or NULL if not already present in the hash
1684 static inline variable
*
1685 shared_hash_find_1 (shared_hash
*vars
, decl_or_value dv
, hashval_t dvhash
)
1687 return shared_hash_htab (vars
)->find_with_hash (dv
, dvhash
);
1690 static inline variable
*
1691 shared_hash_find (shared_hash
*vars
, decl_or_value dv
)
1693 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1696 /* Return true if TVAL is better than CVAL as a canonival value. We
1697 choose lowest-numbered VALUEs, using the RTX address as a
1698 tie-breaker. The idea is to arrange them into a star topology,
1699 such that all of them are at most one step away from the canonical
1700 value, and the canonical value has backlinks to all of them, in
1701 addition to all the actual locations. We don't enforce this
1702 topology throughout the entire dataflow analysis, though.
1706 canon_value_cmp (rtx tval
, rtx cval
)
1709 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1712 static bool dst_can_be_shared
;
1714 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1717 unshare_variable (dataflow_set
*set
, variable
**slot
, variable
*var
,
1718 enum var_init_status initialized
)
1723 new_var
= onepart_pool_allocate (var
->onepart
);
1724 new_var
->dv
= var
->dv
;
1725 new_var
->refcount
= 1;
1727 new_var
->n_var_parts
= var
->n_var_parts
;
1728 new_var
->onepart
= var
->onepart
;
1729 new_var
->in_changed_variables
= false;
1731 if (! flag_var_tracking_uninit
)
1732 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1734 for (i
= 0; i
< var
->n_var_parts
; i
++)
1736 location_chain
*node
;
1737 location_chain
**nextp
;
1739 if (i
== 0 && var
->onepart
)
1741 /* One-part auxiliary data is only used while emitting
1742 notes, so propagate it to the new variable in the active
1743 dataflow set. If we're not emitting notes, this will be
1745 gcc_checking_assert (!VAR_LOC_1PAUX (var
) || emit_notes
);
1746 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (var
);
1747 VAR_LOC_1PAUX (var
) = NULL
;
1750 VAR_PART_OFFSET (new_var
, i
) = VAR_PART_OFFSET (var
, i
);
1751 nextp
= &new_var
->var_part
[i
].loc_chain
;
1752 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1754 location_chain
*new_lc
;
1756 new_lc
= new location_chain
;
1757 new_lc
->next
= NULL
;
1758 if (node
->init
> initialized
)
1759 new_lc
->init
= node
->init
;
1761 new_lc
->init
= initialized
;
1762 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1763 new_lc
->set_src
= node
->set_src
;
1765 new_lc
->set_src
= NULL
;
1766 new_lc
->loc
= node
->loc
;
1769 nextp
= &new_lc
->next
;
1772 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1775 dst_can_be_shared
= false;
1776 if (shared_hash_shared (set
->vars
))
1777 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1778 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1779 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1781 if (var
->in_changed_variables
)
1784 = changed_variables
->find_slot_with_hash (var
->dv
,
1785 dv_htab_hash (var
->dv
),
1787 gcc_assert (*cslot
== (void *) var
);
1788 var
->in_changed_variables
= false;
1789 variable_htab_free (var
);
1791 new_var
->in_changed_variables
= true;
1796 /* Copy all variables from hash table SRC to hash table DST. */
1799 vars_copy (variable_table_type
*dst
, variable_table_type
*src
)
1801 variable_iterator_type hi
;
1804 FOR_EACH_HASH_TABLE_ELEMENT (*src
, var
, variable
, hi
)
1808 dstp
= dst
->find_slot_with_hash (var
->dv
, dv_htab_hash (var
->dv
),
1814 /* Map a decl to its main debug decl. */
1817 var_debug_decl (tree decl
)
1819 if (decl
&& VAR_P (decl
) && DECL_HAS_DEBUG_EXPR_P (decl
))
1821 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1822 if (DECL_P (debugdecl
))
1829 /* Set the register LOC to contain DV, OFFSET. */
1832 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1833 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1834 enum insert_option iopt
)
1837 bool decl_p
= dv_is_decl_p (dv
);
1840 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1842 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1843 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1844 && node
->offset
== offset
)
1847 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1848 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1851 /* Return true if we should track a location that is OFFSET bytes from
1852 a variable. Store the constant offset in *OFFSET_OUT if so. */
1855 track_offset_p (poly_int64 offset
, HOST_WIDE_INT
*offset_out
)
1857 HOST_WIDE_INT const_offset
;
1858 if (!offset
.is_constant (&const_offset
)
1859 || !IN_RANGE (const_offset
, 0, MAX_VAR_PARTS
- 1))
1861 *offset_out
= const_offset
;
1865 /* Return the offset of a register that track_offset_p says we
1868 static HOST_WIDE_INT
1869 get_tracked_reg_offset (rtx loc
)
1871 HOST_WIDE_INT offset
;
1872 if (!track_offset_p (REG_OFFSET (loc
), &offset
))
1877 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1880 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1883 tree decl
= REG_EXPR (loc
);
1884 HOST_WIDE_INT offset
= get_tracked_reg_offset (loc
);
1886 var_reg_decl_set (set
, loc
, initialized
,
1887 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1890 static enum var_init_status
1891 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1895 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1897 if (! flag_var_tracking_uninit
)
1898 return VAR_INIT_STATUS_INITIALIZED
;
1900 var
= shared_hash_find (set
->vars
, dv
);
1903 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1905 location_chain
*nextp
;
1906 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1907 if (rtx_equal_p (nextp
->loc
, loc
))
1909 ret_val
= nextp
->init
;
1918 /* Delete current content of register LOC in dataflow set SET and set
1919 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1920 MODIFY is true, any other live copies of the same variable part are
1921 also deleted from the dataflow set, otherwise the variable part is
1922 assumed to be copied from another location holding the same
1926 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1927 enum var_init_status initialized
, rtx set_src
)
1929 tree decl
= REG_EXPR (loc
);
1930 HOST_WIDE_INT offset
= get_tracked_reg_offset (loc
);
1934 decl
= var_debug_decl (decl
);
1936 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1937 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1939 nextp
= &set
->regs
[REGNO (loc
)];
1940 for (node
= *nextp
; node
; node
= next
)
1943 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1945 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1952 nextp
= &node
->next
;
1956 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1957 var_reg_set (set
, loc
, initialized
, set_src
);
1960 /* Delete the association of register LOC in dataflow set SET with any
1961 variables that aren't onepart. If CLOBBER is true, also delete any
1962 other live copies of the same variable part, and delete the
1963 association with onepart dvs too. */
1966 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1968 attrs
**nextp
= &set
->regs
[REGNO (loc
)];
1971 HOST_WIDE_INT offset
;
1972 if (clobber
&& track_offset_p (REG_OFFSET (loc
), &offset
))
1974 tree decl
= REG_EXPR (loc
);
1976 decl
= var_debug_decl (decl
);
1978 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1981 for (node
= *nextp
; node
; node
= next
)
1984 if (clobber
|| !dv_onepart_p (node
->dv
))
1986 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1991 nextp
= &node
->next
;
1995 /* Delete content of register with number REGNO in dataflow set SET. */
1998 var_regno_delete (dataflow_set
*set
, int regno
)
2000 attrs
**reg
= &set
->regs
[regno
];
2003 for (node
= *reg
; node
; node
= next
)
2006 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2012 /* Return true if I is the negated value of a power of two. */
2014 negative_power_of_two_p (HOST_WIDE_INT i
)
2016 unsigned HOST_WIDE_INT x
= -(unsigned HOST_WIDE_INT
)i
;
2017 return pow2_or_zerop (x
);
2020 /* Strip constant offsets and alignments off of LOC. Return the base
2024 vt_get_canonicalize_base (rtx loc
)
2026 while ((GET_CODE (loc
) == PLUS
2027 || GET_CODE (loc
) == AND
)
2028 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2029 && (GET_CODE (loc
) != AND
2030 || negative_power_of_two_p (INTVAL (XEXP (loc
, 1)))))
2031 loc
= XEXP (loc
, 0);
2036 /* This caches canonicalized addresses for VALUEs, computed using
2037 information in the global cselib table. */
2038 static hash_map
<rtx
, rtx
> *global_get_addr_cache
;
2040 /* This caches canonicalized addresses for VALUEs, computed using
2041 information from the global cache and information pertaining to a
2042 basic block being analyzed. */
2043 static hash_map
<rtx
, rtx
> *local_get_addr_cache
;
2045 static rtx
vt_canonicalize_addr (dataflow_set
*, rtx
);
2047 /* Return the canonical address for LOC, that must be a VALUE, using a
2048 cached global equivalence or computing it and storing it in the
2052 get_addr_from_global_cache (rtx
const loc
)
2056 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2059 rtx
*slot
= &global_get_addr_cache
->get_or_insert (loc
, &existed
);
2063 x
= canon_rtx (get_addr (loc
));
2065 /* Tentative, avoiding infinite recursion. */
2070 rtx nx
= vt_canonicalize_addr (NULL
, x
);
2073 /* The table may have moved during recursion, recompute
2075 *global_get_addr_cache
->get (loc
) = x
= nx
;
2082 /* Return the canonical address for LOC, that must be a VALUE, using a
2083 cached local equivalence or computing it and storing it in the
2087 get_addr_from_local_cache (dataflow_set
*set
, rtx
const loc
)
2094 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2097 rtx
*slot
= &local_get_addr_cache
->get_or_insert (loc
, &existed
);
2101 x
= get_addr_from_global_cache (loc
);
2103 /* Tentative, avoiding infinite recursion. */
2106 /* Recurse to cache local expansion of X, or if we need to search
2107 for a VALUE in the expansion. */
2110 rtx nx
= vt_canonicalize_addr (set
, x
);
2113 slot
= local_get_addr_cache
->get (loc
);
2119 dv
= dv_from_rtx (x
);
2120 var
= shared_hash_find (set
->vars
, dv
);
2124 /* Look for an improved equivalent expression. */
2125 for (l
= var
->var_part
[0].loc_chain
; l
; l
= l
->next
)
2127 rtx base
= vt_get_canonicalize_base (l
->loc
);
2128 if (GET_CODE (base
) == VALUE
2129 && canon_value_cmp (base
, loc
))
2131 rtx nx
= vt_canonicalize_addr (set
, l
->loc
);
2134 slot
= local_get_addr_cache
->get (loc
);
2144 /* Canonicalize LOC using equivalences from SET in addition to those
2145 in the cselib static table. It expects a VALUE-based expression,
2146 and it will only substitute VALUEs with other VALUEs or
2147 function-global equivalences, so that, if two addresses have base
2148 VALUEs that are locally or globally related in ways that
2149 memrefs_conflict_p cares about, they will both canonicalize to
2150 expressions that have the same base VALUE.
2152 The use of VALUEs as canonical base addresses enables the canonical
2153 RTXs to remain unchanged globally, if they resolve to a constant,
2154 or throughout a basic block otherwise, so that they can be cached
2155 and the cache needs not be invalidated when REGs, MEMs or such
2159 vt_canonicalize_addr (dataflow_set
*set
, rtx oloc
)
2161 HOST_WIDE_INT ofst
= 0;
2162 machine_mode mode
= GET_MODE (oloc
);
2169 while (GET_CODE (loc
) == PLUS
2170 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2172 ofst
+= INTVAL (XEXP (loc
, 1));
2173 loc
= XEXP (loc
, 0);
2176 /* Alignment operations can't normally be combined, so just
2177 canonicalize the base and we're done. We'll normally have
2178 only one stack alignment anyway. */
2179 if (GET_CODE (loc
) == AND
2180 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2181 && negative_power_of_two_p (INTVAL (XEXP (loc
, 1))))
2183 x
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2184 if (x
!= XEXP (loc
, 0))
2185 loc
= gen_rtx_AND (mode
, x
, XEXP (loc
, 1));
2189 if (GET_CODE (loc
) == VALUE
)
2192 loc
= get_addr_from_local_cache (set
, loc
);
2194 loc
= get_addr_from_global_cache (loc
);
2196 /* Consolidate plus_constants. */
2197 while (ofst
&& GET_CODE (loc
) == PLUS
2198 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2200 ofst
+= INTVAL (XEXP (loc
, 1));
2201 loc
= XEXP (loc
, 0);
2208 x
= canon_rtx (loc
);
2215 /* Add OFST back in. */
2218 /* Don't build new RTL if we can help it. */
2219 if (GET_CODE (oloc
) == PLUS
2220 && XEXP (oloc
, 0) == loc
2221 && INTVAL (XEXP (oloc
, 1)) == ofst
)
2224 loc
= plus_constant (mode
, loc
, ofst
);
2230 /* Return true iff there's a true dependence between MLOC and LOC.
2231 MADDR must be a canonicalized version of MLOC's address. */
2234 vt_canon_true_dep (dataflow_set
*set
, rtx mloc
, rtx maddr
, rtx loc
)
2236 if (GET_CODE (loc
) != MEM
)
2239 rtx addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2240 if (!canon_true_dependence (mloc
, GET_MODE (mloc
), maddr
, loc
, addr
))
2246 /* Hold parameters for the hashtab traversal function
2247 drop_overlapping_mem_locs, see below. */
2249 struct overlapping_mems
2255 /* Remove all MEMs that overlap with COMS->LOC from the location list
2256 of a hash table entry for a onepart variable. COMS->ADDR must be a
2257 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2258 canonicalized itself. */
2261 drop_overlapping_mem_locs (variable
**slot
, overlapping_mems
*coms
)
2263 dataflow_set
*set
= coms
->set
;
2264 rtx mloc
= coms
->loc
, addr
= coms
->addr
;
2265 variable
*var
= *slot
;
2267 if (var
->onepart
!= NOT_ONEPART
)
2269 location_chain
*loc
, **locp
;
2270 bool changed
= false;
2273 gcc_assert (var
->n_var_parts
== 1);
2275 if (shared_var_p (var
, set
->vars
))
2277 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
2278 if (vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2284 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
2286 gcc_assert (var
->n_var_parts
== 1);
2289 if (VAR_LOC_1PAUX (var
))
2290 cur_loc
= VAR_LOC_FROM (var
);
2292 cur_loc
= var
->var_part
[0].cur_loc
;
2294 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
2297 if (!vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2304 /* If we have deleted the location which was last emitted
2305 we have to emit new location so add the variable to set
2306 of changed variables. */
2307 if (cur_loc
== loc
->loc
)
2310 var
->var_part
[0].cur_loc
= NULL
;
2311 if (VAR_LOC_1PAUX (var
))
2312 VAR_LOC_FROM (var
) = NULL
;
2317 if (!var
->var_part
[0].loc_chain
)
2323 variable_was_changed (var
, set
);
2329 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2332 clobber_overlapping_mems (dataflow_set
*set
, rtx loc
)
2334 struct overlapping_mems coms
;
2336 gcc_checking_assert (GET_CODE (loc
) == MEM
);
2339 coms
.loc
= canon_rtx (loc
);
2340 coms
.addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2342 set
->traversed_vars
= set
->vars
;
2343 shared_hash_htab (set
->vars
)
2344 ->traverse
<overlapping_mems
*, drop_overlapping_mem_locs
> (&coms
);
2345 set
->traversed_vars
= NULL
;
2348 /* Set the location of DV, OFFSET as the MEM LOC. */
2351 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2352 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
2353 enum insert_option iopt
)
2355 if (dv_is_decl_p (dv
))
2356 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
2358 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
2361 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2363 Adjust the address first if it is stack pointer based. */
2366 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2369 tree decl
= MEM_EXPR (loc
);
2370 HOST_WIDE_INT offset
= int_mem_offset (loc
);
2372 var_mem_decl_set (set
, loc
, initialized
,
2373 dv_from_decl (decl
), offset
, set_src
, INSERT
);
2376 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2377 dataflow set SET to LOC. If MODIFY is true, any other live copies
2378 of the same variable part are also deleted from the dataflow set,
2379 otherwise the variable part is assumed to be copied from another
2380 location holding the same part.
2381 Adjust the address first if it is stack pointer based. */
2384 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
2385 enum var_init_status initialized
, rtx set_src
)
2387 tree decl
= MEM_EXPR (loc
);
2388 HOST_WIDE_INT offset
= int_mem_offset (loc
);
2390 clobber_overlapping_mems (set
, loc
);
2391 decl
= var_debug_decl (decl
);
2393 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
2394 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
2397 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
2398 var_mem_set (set
, loc
, initialized
, set_src
);
2401 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2402 true, also delete any other live copies of the same variable part.
2403 Adjust the address first if it is stack pointer based. */
2406 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2408 tree decl
= MEM_EXPR (loc
);
2409 HOST_WIDE_INT offset
= int_mem_offset (loc
);
2411 clobber_overlapping_mems (set
, loc
);
2412 decl
= var_debug_decl (decl
);
2414 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2415 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
2418 /* Return true if LOC should not be expanded for location expressions,
2422 unsuitable_loc (rtx loc
)
2424 switch (GET_CODE (loc
))
2438 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2442 val_bind (dataflow_set
*set
, rtx val
, rtx loc
, bool modified
)
2447 var_regno_delete (set
, REGNO (loc
));
2448 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2449 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2451 else if (MEM_P (loc
))
2453 struct elt_loc_list
*l
= CSELIB_VAL_PTR (val
)->locs
;
2456 clobber_overlapping_mems (set
, loc
);
2458 if (l
&& GET_CODE (l
->loc
) == VALUE
)
2459 l
= canonical_cselib_val (CSELIB_VAL_PTR (l
->loc
))->locs
;
2461 /* If this MEM is a global constant, we don't need it in the
2462 dynamic tables. ??? We should test this before emitting the
2463 micro-op in the first place. */
2465 if (GET_CODE (l
->loc
) == MEM
&& XEXP (l
->loc
, 0) == XEXP (loc
, 0))
2471 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2472 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2476 /* Other kinds of equivalences are necessarily static, at least
2477 so long as we do not perform substitutions while merging
2480 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2481 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2485 /* Bind a value to a location it was just stored in. If MODIFIED
2486 holds, assume the location was modified, detaching it from any
2487 values bound to it. */
2490 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
,
2493 cselib_val
*v
= CSELIB_VAL_PTR (val
);
2495 gcc_assert (cselib_preserved_value_p (v
));
2499 fprintf (dump_file
, "%i: ", insn
? INSN_UID (insn
) : 0);
2500 print_inline_rtx (dump_file
, loc
, 0);
2501 fprintf (dump_file
, " evaluates to ");
2502 print_inline_rtx (dump_file
, val
, 0);
2505 struct elt_loc_list
*l
;
2506 for (l
= v
->locs
; l
; l
= l
->next
)
2508 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
2509 print_inline_rtx (dump_file
, l
->loc
, 0);
2512 fprintf (dump_file
, "\n");
2515 gcc_checking_assert (!unsuitable_loc (loc
));
2517 val_bind (set
, val
, loc
, modified
);
2520 /* Clear (canonical address) slots that reference X. */
2523 local_get_addr_clear_given_value (rtx
const &, rtx
*slot
, rtx x
)
2525 if (vt_get_canonicalize_base (*slot
) == x
)
2530 /* Reset this node, detaching all its equivalences. Return the slot
2531 in the variable hash table that holds dv, if there is one. */
2534 val_reset (dataflow_set
*set
, decl_or_value dv
)
2536 variable
*var
= shared_hash_find (set
->vars
, dv
) ;
2537 location_chain
*node
;
2540 if (!var
|| !var
->n_var_parts
)
2543 gcc_assert (var
->n_var_parts
== 1);
2545 if (var
->onepart
== ONEPART_VALUE
)
2547 rtx x
= dv_as_value (dv
);
2549 /* Relationships in the global cache don't change, so reset the
2550 local cache entry only. */
2551 rtx
*slot
= local_get_addr_cache
->get (x
);
2554 /* If the value resolved back to itself, odds are that other
2555 values may have cached it too. These entries now refer
2556 to the old X, so detach them too. Entries that used the
2557 old X but resolved to something else remain ok as long as
2558 that something else isn't also reset. */
2560 local_get_addr_cache
2561 ->traverse
<rtx
, local_get_addr_clear_given_value
> (x
);
2567 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2568 if (GET_CODE (node
->loc
) == VALUE
2569 && canon_value_cmp (node
->loc
, cval
))
2572 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2573 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
2575 /* Redirect the equivalence link to the new canonical
2576 value, or simply remove it if it would point at
2579 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
2580 0, node
->init
, node
->set_src
, NO_INSERT
);
2581 delete_variable_part (set
, dv_as_value (dv
),
2582 dv_from_value (node
->loc
), 0);
2587 decl_or_value cdv
= dv_from_value (cval
);
2589 /* Keep the remaining values connected, accumulating links
2590 in the canonical value. */
2591 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2593 if (node
->loc
== cval
)
2595 else if (GET_CODE (node
->loc
) == REG
)
2596 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2597 node
->set_src
, NO_INSERT
);
2598 else if (GET_CODE (node
->loc
) == MEM
)
2599 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2600 node
->set_src
, NO_INSERT
);
2602 set_variable_part (set
, node
->loc
, cdv
, 0,
2603 node
->init
, node
->set_src
, NO_INSERT
);
2607 /* We remove this last, to make sure that the canonical value is not
2608 removed to the point of requiring reinsertion. */
2610 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
2612 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2615 /* Find the values in a given location and map the val to another
2616 value, if it is unique, or add the location as one holding the
2620 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
)
2622 decl_or_value dv
= dv_from_value (val
);
2624 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2627 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2629 fprintf (dump_file
, "head: ");
2630 print_inline_rtx (dump_file
, val
, 0);
2631 fputs (" is at ", dump_file
);
2632 print_inline_rtx (dump_file
, loc
, 0);
2633 fputc ('\n', dump_file
);
2636 val_reset (set
, dv
);
2638 gcc_checking_assert (!unsuitable_loc (loc
));
2642 attrs
*node
, *found
= NULL
;
2644 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2645 if (dv_is_value_p (node
->dv
)
2646 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2650 /* Map incoming equivalences. ??? Wouldn't it be nice if
2651 we just started sharing the location lists? Maybe a
2652 circular list ending at the value itself or some
2654 set_variable_part (set
, dv_as_value (node
->dv
),
2655 dv_from_value (val
), node
->offset
,
2656 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2657 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2658 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2661 /* If we didn't find any equivalence, we need to remember that
2662 this value is held in the named register. */
2666 /* ??? Attempt to find and merge equivalent MEMs or other
2669 val_bind (set
, val
, loc
, false);
2672 /* Initialize dataflow set SET to be empty.
2673 VARS_SIZE is the initial size of hash table VARS. */
2676 dataflow_set_init (dataflow_set
*set
)
2678 init_attrs_list_set (set
->regs
);
2679 set
->vars
= shared_hash_copy (empty_shared_hash
);
2680 set
->stack_adjust
= 0;
2681 set
->traversed_vars
= NULL
;
2684 /* Delete the contents of dataflow set SET. */
2687 dataflow_set_clear (dataflow_set
*set
)
2691 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2692 attrs_list_clear (&set
->regs
[i
]);
2694 shared_hash_destroy (set
->vars
);
2695 set
->vars
= shared_hash_copy (empty_shared_hash
);
2698 /* Copy the contents of dataflow set SRC to DST. */
2701 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2705 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2706 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2708 shared_hash_destroy (dst
->vars
);
2709 dst
->vars
= shared_hash_copy (src
->vars
);
2710 dst
->stack_adjust
= src
->stack_adjust
;
2713 /* Information for merging lists of locations for a given offset of variable.
2715 struct variable_union_info
2717 /* Node of the location chain. */
2720 /* The sum of positions in the input chains. */
2723 /* The position in the chain of DST dataflow set. */
2727 /* Buffer for location list sorting and its allocated size. */
2728 static struct variable_union_info
*vui_vec
;
2729 static int vui_allocated
;
2731 /* Compare function for qsort, order the structures by POS element. */
2734 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2736 const struct variable_union_info
*const i1
=
2737 (const struct variable_union_info
*) n1
;
2738 const struct variable_union_info
*const i2
=
2739 ( const struct variable_union_info
*) n2
;
2741 if (i1
->pos
!= i2
->pos
)
2742 return i1
->pos
- i2
->pos
;
2744 return (i1
->pos_dst
- i2
->pos_dst
);
2747 /* Compute union of location parts of variable *SLOT and the same variable
2748 from hash table DATA. Compute "sorted" union of the location chains
2749 for common offsets, i.e. the locations of a variable part are sorted by
2750 a priority where the priority is the sum of the positions in the 2 chains
2751 (if a location is only in one list the position in the second list is
2752 defined to be larger than the length of the chains).
2753 When we are updating the location parts the newest location is in the
2754 beginning of the chain, so when we do the described "sorted" union
2755 we keep the newest locations in the beginning. */
2758 variable_union (variable
*src
, dataflow_set
*set
)
2764 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2765 if (!dstp
|| !*dstp
)
2769 dst_can_be_shared
= false;
2771 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2775 /* Continue traversing the hash table. */
2781 gcc_assert (src
->n_var_parts
);
2782 gcc_checking_assert (src
->onepart
== dst
->onepart
);
2784 /* We can combine one-part variables very efficiently, because their
2785 entries are in canonical order. */
2788 location_chain
**nodep
, *dnode
, *snode
;
2790 gcc_assert (src
->n_var_parts
== 1
2791 && dst
->n_var_parts
== 1);
2793 snode
= src
->var_part
[0].loc_chain
;
2796 restart_onepart_unshared
:
2797 nodep
= &dst
->var_part
[0].loc_chain
;
2803 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2807 location_chain
*nnode
;
2809 if (shared_var_p (dst
, set
->vars
))
2811 dstp
= unshare_variable (set
, dstp
, dst
,
2812 VAR_INIT_STATUS_INITIALIZED
);
2814 goto restart_onepart_unshared
;
2817 *nodep
= nnode
= new location_chain
;
2818 nnode
->loc
= snode
->loc
;
2819 nnode
->init
= snode
->init
;
2820 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2821 nnode
->set_src
= NULL
;
2823 nnode
->set_src
= snode
->set_src
;
2824 nnode
->next
= dnode
;
2828 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2831 snode
= snode
->next
;
2833 nodep
= &dnode
->next
;
2840 gcc_checking_assert (!src
->onepart
);
2842 /* Count the number of location parts, result is K. */
2843 for (i
= 0, j
= 0, k
= 0;
2844 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2846 if (VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2851 else if (VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
2856 k
+= src
->n_var_parts
- i
;
2857 k
+= dst
->n_var_parts
- j
;
2859 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2860 thus there are at most MAX_VAR_PARTS different offsets. */
2861 gcc_checking_assert (dst
->onepart
? k
== 1 : k
<= MAX_VAR_PARTS
);
2863 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2865 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2869 i
= src
->n_var_parts
- 1;
2870 j
= dst
->n_var_parts
- 1;
2871 dst
->n_var_parts
= k
;
2873 for (k
--; k
>= 0; k
--)
2875 location_chain
*node
, *node2
;
2877 if (i
>= 0 && j
>= 0
2878 && VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2880 /* Compute the "sorted" union of the chains, i.e. the locations which
2881 are in both chains go first, they are sorted by the sum of
2882 positions in the chains. */
2885 struct variable_union_info
*vui
;
2887 /* If DST is shared compare the location chains.
2888 If they are different we will modify the chain in DST with
2889 high probability so make a copy of DST. */
2890 if (shared_var_p (dst
, set
->vars
))
2892 for (node
= src
->var_part
[i
].loc_chain
,
2893 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2894 node
= node
->next
, node2
= node2
->next
)
2896 if (!((REG_P (node2
->loc
)
2897 && REG_P (node
->loc
)
2898 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2899 || rtx_equal_p (node2
->loc
, node
->loc
)))
2901 if (node2
->init
< node
->init
)
2902 node2
->init
= node
->init
;
2908 dstp
= unshare_variable (set
, dstp
, dst
,
2909 VAR_INIT_STATUS_UNKNOWN
);
2910 dst
= (variable
*)*dstp
;
2915 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2918 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2923 /* The most common case, much simpler, no qsort is needed. */
2924 location_chain
*dstnode
= dst
->var_part
[j
].loc_chain
;
2925 dst
->var_part
[k
].loc_chain
= dstnode
;
2926 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
2928 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2929 if (!((REG_P (dstnode
->loc
)
2930 && REG_P (node
->loc
)
2931 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2932 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2934 location_chain
*new_node
;
2936 /* Copy the location from SRC. */
2937 new_node
= new location_chain
;
2938 new_node
->loc
= node
->loc
;
2939 new_node
->init
= node
->init
;
2940 if (!node
->set_src
|| MEM_P (node
->set_src
))
2941 new_node
->set_src
= NULL
;
2943 new_node
->set_src
= node
->set_src
;
2944 node2
->next
= new_node
;
2951 if (src_l
+ dst_l
> vui_allocated
)
2953 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2954 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2959 /* Fill in the locations from DST. */
2960 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2961 node
= node
->next
, jj
++)
2964 vui
[jj
].pos_dst
= jj
;
2966 /* Pos plus value larger than a sum of 2 valid positions. */
2967 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2970 /* Fill in the locations from SRC. */
2972 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2973 node
= node
->next
, ii
++)
2975 /* Find location from NODE. */
2976 for (jj
= 0; jj
< dst_l
; jj
++)
2978 if ((REG_P (vui
[jj
].lc
->loc
)
2979 && REG_P (node
->loc
)
2980 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2981 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2983 vui
[jj
].pos
= jj
+ ii
;
2987 if (jj
>= dst_l
) /* The location has not been found. */
2989 location_chain
*new_node
;
2991 /* Copy the location from SRC. */
2992 new_node
= new location_chain
;
2993 new_node
->loc
= node
->loc
;
2994 new_node
->init
= node
->init
;
2995 if (!node
->set_src
|| MEM_P (node
->set_src
))
2996 new_node
->set_src
= NULL
;
2998 new_node
->set_src
= node
->set_src
;
2999 vui
[n
].lc
= new_node
;
3000 vui
[n
].pos_dst
= src_l
+ dst_l
;
3001 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
3008 /* Special case still very common case. For dst_l == 2
3009 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3010 vui[i].pos == i + src_l + dst_l. */
3011 if (vui
[0].pos
> vui
[1].pos
)
3013 /* Order should be 1, 0, 2... */
3014 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
3015 vui
[1].lc
->next
= vui
[0].lc
;
3018 vui
[0].lc
->next
= vui
[2].lc
;
3019 vui
[n
- 1].lc
->next
= NULL
;
3022 vui
[0].lc
->next
= NULL
;
3027 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3028 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
3030 /* Order should be 0, 2, 1, 3... */
3031 vui
[0].lc
->next
= vui
[2].lc
;
3032 vui
[2].lc
->next
= vui
[1].lc
;
3035 vui
[1].lc
->next
= vui
[3].lc
;
3036 vui
[n
- 1].lc
->next
= NULL
;
3039 vui
[1].lc
->next
= NULL
;
3044 /* Order should be 0, 1, 2... */
3046 vui
[n
- 1].lc
->next
= NULL
;
3049 for (; ii
< n
; ii
++)
3050 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3054 qsort (vui
, n
, sizeof (struct variable_union_info
),
3055 variable_union_info_cmp_pos
);
3057 /* Reconnect the nodes in sorted order. */
3058 for (ii
= 1; ii
< n
; ii
++)
3059 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3060 vui
[n
- 1].lc
->next
= NULL
;
3061 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3064 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
3069 else if ((i
>= 0 && j
>= 0
3070 && VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
3073 dst
->var_part
[k
] = dst
->var_part
[j
];
3076 else if ((i
>= 0 && j
>= 0
3077 && VAR_PART_OFFSET (src
, i
) > VAR_PART_OFFSET (dst
, j
))
3080 location_chain
**nextp
;
3082 /* Copy the chain from SRC. */
3083 nextp
= &dst
->var_part
[k
].loc_chain
;
3084 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3086 location_chain
*new_lc
;
3088 new_lc
= new location_chain
;
3089 new_lc
->next
= NULL
;
3090 new_lc
->init
= node
->init
;
3091 if (!node
->set_src
|| MEM_P (node
->set_src
))
3092 new_lc
->set_src
= NULL
;
3094 new_lc
->set_src
= node
->set_src
;
3095 new_lc
->loc
= node
->loc
;
3098 nextp
= &new_lc
->next
;
3101 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (src
, i
);
3104 dst
->var_part
[k
].cur_loc
= NULL
;
3107 if (flag_var_tracking_uninit
)
3108 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
3110 location_chain
*node
, *node2
;
3111 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3112 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
3113 if (rtx_equal_p (node
->loc
, node2
->loc
))
3115 if (node
->init
> node2
->init
)
3116 node2
->init
= node
->init
;
3120 /* Continue traversing the hash table. */
3124 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3127 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
3131 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3132 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
3134 if (dst
->vars
== empty_shared_hash
)
3136 shared_hash_destroy (dst
->vars
);
3137 dst
->vars
= shared_hash_copy (src
->vars
);
3141 variable_iterator_type hi
;
3144 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src
->vars
),
3146 variable_union (var
, dst
);
3150 /* Whether the value is currently being expanded. */
3151 #define VALUE_RECURSED_INTO(x) \
3152 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3154 /* Whether no expansion was found, saving useless lookups.
3155 It must only be set when VALUE_CHANGED is clear. */
3156 #define NO_LOC_P(x) \
3157 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3159 /* Whether cur_loc in the value needs to be (re)computed. */
3160 #define VALUE_CHANGED(x) \
3161 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3162 /* Whether cur_loc in the decl needs to be (re)computed. */
3163 #define DECL_CHANGED(x) TREE_VISITED (x)
3165 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3166 user DECLs, this means they're in changed_variables. Values and
3167 debug exprs may be left with this flag set if no user variable
3168 requires them to be evaluated. */
3171 set_dv_changed (decl_or_value dv
, bool newv
)
3173 switch (dv_onepart_p (dv
))
3177 NO_LOC_P (dv_as_value (dv
)) = false;
3178 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
3183 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv
))) = false;
3187 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
3192 /* Return true if DV needs to have its cur_loc recomputed. */
3195 dv_changed_p (decl_or_value dv
)
3197 return (dv_is_value_p (dv
)
3198 ? VALUE_CHANGED (dv_as_value (dv
))
3199 : DECL_CHANGED (dv_as_decl (dv
)));
3202 /* Return a location list node whose loc is rtx_equal to LOC, in the
3203 location list of a one-part variable or value VAR, or in that of
3204 any values recursively mentioned in the location lists. VARS must
3205 be in star-canonical form. */
3207 static location_chain
*
3208 find_loc_in_1pdv (rtx loc
, variable
*var
, variable_table_type
*vars
)
3210 location_chain
*node
;
3211 enum rtx_code loc_code
;
3216 gcc_checking_assert (var
->onepart
);
3218 if (!var
->n_var_parts
)
3221 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
3223 loc_code
= GET_CODE (loc
);
3224 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3229 if (GET_CODE (node
->loc
) != loc_code
)
3231 if (GET_CODE (node
->loc
) != VALUE
)
3234 else if (loc
== node
->loc
)
3236 else if (loc_code
!= VALUE
)
3238 if (rtx_equal_p (loc
, node
->loc
))
3243 /* Since we're in star-canonical form, we don't need to visit
3244 non-canonical nodes: one-part variables and non-canonical
3245 values would only point back to the canonical node. */
3246 if (dv_is_value_p (var
->dv
)
3247 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
3249 /* Skip all subsequent VALUEs. */
3250 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
3253 gcc_checking_assert (!canon_value_cmp (node
->loc
,
3254 dv_as_value (var
->dv
)));
3255 if (loc
== node
->loc
)
3261 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
3262 gcc_checking_assert (!node
->next
);
3264 dv
= dv_from_value (node
->loc
);
3265 rvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
3266 return find_loc_in_1pdv (loc
, rvar
, vars
);
3269 /* ??? Gotta look in cselib_val locations too. */
3274 /* Hash table iteration argument passed to variable_merge. */
3277 /* The set in which the merge is to be inserted. */
3279 /* The set that we're iterating in. */
3281 /* The set that may contain the other dv we are to merge with. */
3283 /* Number of onepart dvs in src. */
3284 int src_onepart_cnt
;
3287 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3288 loc_cmp order, and it is maintained as such. */
3291 insert_into_intersection (location_chain
**nodep
, rtx loc
,
3292 enum var_init_status status
)
3294 location_chain
*node
;
3297 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
3298 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
3300 node
->init
= MIN (node
->init
, status
);
3306 node
= new location_chain
;
3309 node
->set_src
= NULL
;
3310 node
->init
= status
;
3311 node
->next
= *nodep
;
3315 /* Insert in DEST the intersection of the locations present in both
3316 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3317 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3321 intersect_loc_chains (rtx val
, location_chain
**dest
, struct dfset_merge
*dsm
,
3322 location_chain
*s1node
, variable
*s2var
)
3324 dataflow_set
*s1set
= dsm
->cur
;
3325 dataflow_set
*s2set
= dsm
->src
;
3326 location_chain
*found
;
3330 location_chain
*s2node
;
3332 gcc_checking_assert (s2var
->onepart
);
3334 if (s2var
->n_var_parts
)
3336 s2node
= s2var
->var_part
[0].loc_chain
;
3338 for (; s1node
&& s2node
;
3339 s1node
= s1node
->next
, s2node
= s2node
->next
)
3340 if (s1node
->loc
!= s2node
->loc
)
3342 else if (s1node
->loc
== val
)
3345 insert_into_intersection (dest
, s1node
->loc
,
3346 MIN (s1node
->init
, s2node
->init
));
3350 for (; s1node
; s1node
= s1node
->next
)
3352 if (s1node
->loc
== val
)
3355 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
3356 shared_hash_htab (s2set
->vars
))))
3358 insert_into_intersection (dest
, s1node
->loc
,
3359 MIN (s1node
->init
, found
->init
));
3363 if (GET_CODE (s1node
->loc
) == VALUE
3364 && !VALUE_RECURSED_INTO (s1node
->loc
))
3366 decl_or_value dv
= dv_from_value (s1node
->loc
);
3367 variable
*svar
= shared_hash_find (s1set
->vars
, dv
);
3370 if (svar
->n_var_parts
== 1)
3372 VALUE_RECURSED_INTO (s1node
->loc
) = true;
3373 intersect_loc_chains (val
, dest
, dsm
,
3374 svar
->var_part
[0].loc_chain
,
3376 VALUE_RECURSED_INTO (s1node
->loc
) = false;
3381 /* ??? gotta look in cselib_val locations too. */
3383 /* ??? if the location is equivalent to any location in src,
3384 searched recursively
3386 add to dst the values needed to represent the equivalence
3388 telling whether locations S is equivalent to another dv's
3391 for each location D in the list
3393 if S and D satisfy rtx_equal_p, then it is present
3395 else if D is a value, recurse without cycles
3397 else if S and D have the same CODE and MODE
3399 for each operand oS and the corresponding oD
3401 if oS and oD are not equivalent, then S an D are not equivalent
3403 else if they are RTX vectors
3405 if any vector oS element is not equivalent to its respective oD,
3406 then S and D are not equivalent
3414 /* Return -1 if X should be before Y in a location list for a 1-part
3415 variable, 1 if Y should be before X, and 0 if they're equivalent
3416 and should not appear in the list. */
3419 loc_cmp (rtx x
, rtx y
)
3422 RTX_CODE code
= GET_CODE (x
);
3432 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3433 if (REGNO (x
) == REGNO (y
))
3435 else if (REGNO (x
) < REGNO (y
))
3448 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3449 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
3455 if (GET_CODE (x
) == VALUE
)
3457 if (GET_CODE (y
) != VALUE
)
3459 /* Don't assert the modes are the same, that is true only
3460 when not recursing. (subreg:QI (value:SI 1:1) 0)
3461 and (subreg:QI (value:DI 2:2) 0) can be compared,
3462 even when the modes are different. */
3463 if (canon_value_cmp (x
, y
))
3469 if (GET_CODE (y
) == VALUE
)
3472 /* Entry value is the least preferable kind of expression. */
3473 if (GET_CODE (x
) == ENTRY_VALUE
)
3475 if (GET_CODE (y
) != ENTRY_VALUE
)
3477 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3478 return loc_cmp (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
3481 if (GET_CODE (y
) == ENTRY_VALUE
)
3484 if (GET_CODE (x
) == GET_CODE (y
))
3485 /* Compare operands below. */;
3486 else if (GET_CODE (x
) < GET_CODE (y
))
3491 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3493 if (GET_CODE (x
) == DEBUG_EXPR
)
3495 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3496 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
3498 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3499 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
3503 fmt
= GET_RTX_FORMAT (code
);
3504 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3508 if (XWINT (x
, i
) == XWINT (y
, i
))
3510 else if (XWINT (x
, i
) < XWINT (y
, i
))
3517 if (XINT (x
, i
) == XINT (y
, i
))
3519 else if (XINT (x
, i
) < XINT (y
, i
))
3526 /* Compare the vector length first. */
3527 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
3528 /* Compare the vectors elements. */;
3529 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
3534 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3535 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
3536 XVECEXP (y
, i
, j
))))
3541 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
3547 if (XSTR (x
, i
) == XSTR (y
, i
))
3553 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
3561 /* These are just backpointers, so they don't matter. */
3568 /* It is believed that rtx's at this level will never
3569 contain anything but integers and other rtx's,
3570 except for within LABEL_REFs and SYMBOL_REFs. */
3574 if (CONST_WIDE_INT_P (x
))
3576 /* Compare the vector length first. */
3577 if (CONST_WIDE_INT_NUNITS (x
) >= CONST_WIDE_INT_NUNITS (y
))
3579 else if (CONST_WIDE_INT_NUNITS (x
) < CONST_WIDE_INT_NUNITS (y
))
3582 /* Compare the vectors elements. */;
3583 for (j
= CONST_WIDE_INT_NUNITS (x
) - 1; j
>= 0 ; j
--)
3585 if (CONST_WIDE_INT_ELT (x
, j
) < CONST_WIDE_INT_ELT (y
, j
))
3587 if (CONST_WIDE_INT_ELT (x
, j
) > CONST_WIDE_INT_ELT (y
, j
))
3595 /* Check the order of entries in one-part variables. */
3598 canonicalize_loc_order_check (variable
**slot
,
3599 dataflow_set
*data ATTRIBUTE_UNUSED
)
3601 variable
*var
= *slot
;
3602 location_chain
*node
, *next
;
3604 #ifdef ENABLE_RTL_CHECKING
3606 for (i
= 0; i
< var
->n_var_parts
; i
++)
3607 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3608 gcc_assert (!var
->in_changed_variables
);
3614 gcc_assert (var
->n_var_parts
== 1);
3615 node
= var
->var_part
[0].loc_chain
;
3618 while ((next
= node
->next
))
3620 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3627 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3628 more likely to be chosen as canonical for an equivalence set.
3629 Ensure less likely values can reach more likely neighbors, making
3630 the connections bidirectional. */
3633 canonicalize_values_mark (variable
**slot
, dataflow_set
*set
)
3635 variable
*var
= *slot
;
3636 decl_or_value dv
= var
->dv
;
3638 location_chain
*node
;
3640 if (!dv_is_value_p (dv
))
3643 gcc_checking_assert (var
->n_var_parts
== 1);
3645 val
= dv_as_value (dv
);
3647 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3648 if (GET_CODE (node
->loc
) == VALUE
)
3650 if (canon_value_cmp (node
->loc
, val
))
3651 VALUE_RECURSED_INTO (val
) = true;
3654 decl_or_value odv
= dv_from_value (node
->loc
);
3656 oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3658 set_slot_part (set
, val
, oslot
, odv
, 0,
3659 node
->init
, NULL_RTX
);
3661 VALUE_RECURSED_INTO (node
->loc
) = true;
3668 /* Remove redundant entries from equivalence lists in onepart
3669 variables, canonicalizing equivalence sets into star shapes. */
3672 canonicalize_values_star (variable
**slot
, dataflow_set
*set
)
3674 variable
*var
= *slot
;
3675 decl_or_value dv
= var
->dv
;
3676 location_chain
*node
;
3686 gcc_checking_assert (var
->n_var_parts
== 1);
3688 if (dv_is_value_p (dv
))
3690 cval
= dv_as_value (dv
);
3691 if (!VALUE_RECURSED_INTO (cval
))
3693 VALUE_RECURSED_INTO (cval
) = false;
3703 gcc_assert (var
->n_var_parts
== 1);
3705 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3706 if (GET_CODE (node
->loc
) == VALUE
)
3709 if (VALUE_RECURSED_INTO (node
->loc
))
3711 if (canon_value_cmp (node
->loc
, cval
))
3720 if (!has_marks
|| dv_is_decl_p (dv
))
3723 /* Keep it marked so that we revisit it, either after visiting a
3724 child node, or after visiting a new parent that might be
3726 VALUE_RECURSED_INTO (val
) = true;
3728 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3729 if (GET_CODE (node
->loc
) == VALUE
3730 && VALUE_RECURSED_INTO (node
->loc
))
3734 VALUE_RECURSED_INTO (cval
) = false;
3735 dv
= dv_from_value (cval
);
3736 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3739 gcc_assert (dv_is_decl_p (var
->dv
));
3740 /* The canonical value was reset and dropped.
3742 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3746 gcc_assert (dv_is_value_p (var
->dv
));
3747 if (var
->n_var_parts
== 0)
3749 gcc_assert (var
->n_var_parts
== 1);
3753 VALUE_RECURSED_INTO (val
) = false;
3758 /* Push values to the canonical one. */
3759 cdv
= dv_from_value (cval
);
3760 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3762 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3763 if (node
->loc
!= cval
)
3765 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3766 node
->init
, NULL_RTX
);
3767 if (GET_CODE (node
->loc
) == VALUE
)
3769 decl_or_value ndv
= dv_from_value (node
->loc
);
3771 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3774 if (canon_value_cmp (node
->loc
, val
))
3776 /* If it could have been a local minimum, it's not any more,
3777 since it's now neighbor to cval, so it may have to push
3778 to it. Conversely, if it wouldn't have prevailed over
3779 val, then whatever mark it has is fine: if it was to
3780 push, it will now push to a more canonical node, but if
3781 it wasn't, then it has already pushed any values it might
3783 VALUE_RECURSED_INTO (node
->loc
) = true;
3784 /* Make sure we visit node->loc by ensuring we cval is
3786 VALUE_RECURSED_INTO (cval
) = true;
3788 else if (!VALUE_RECURSED_INTO (node
->loc
))
3789 /* If we have no need to "recurse" into this node, it's
3790 already "canonicalized", so drop the link to the old
3792 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3794 else if (GET_CODE (node
->loc
) == REG
)
3796 attrs
*list
= set
->regs
[REGNO (node
->loc
)], **listp
;
3798 /* Change an existing attribute referring to dv so that it
3799 refers to cdv, removing any duplicate this might
3800 introduce, and checking that no previous duplicates
3801 existed, all in a single pass. */
3805 if (list
->offset
== 0
3806 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3807 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3814 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3817 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3822 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3824 *listp
= list
->next
;
3830 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3833 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3835 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3840 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3842 *listp
= list
->next
;
3848 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3857 if (list
->offset
== 0
3858 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3859 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3868 set_slot_part (set
, val
, cslot
, cdv
, 0,
3869 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3871 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3873 /* Variable may have been unshared. */
3875 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3876 && var
->var_part
[0].loc_chain
->next
== NULL
);
3878 if (VALUE_RECURSED_INTO (cval
))
3879 goto restart_with_cval
;
3884 /* Bind one-part variables to the canonical value in an equivalence
3885 set. Not doing this causes dataflow convergence failure in rare
3886 circumstances, see PR42873. Unfortunately we can't do this
3887 efficiently as part of canonicalize_values_star, since we may not
3888 have determined or even seen the canonical value of a set when we
3889 get to a variable that references another member of the set. */
3892 canonicalize_vars_star (variable
**slot
, dataflow_set
*set
)
3894 variable
*var
= *slot
;
3895 decl_or_value dv
= var
->dv
;
3896 location_chain
*node
;
3901 location_chain
*cnode
;
3903 if (!var
->onepart
|| var
->onepart
== ONEPART_VALUE
)
3906 gcc_assert (var
->n_var_parts
== 1);
3908 node
= var
->var_part
[0].loc_chain
;
3910 if (GET_CODE (node
->loc
) != VALUE
)
3913 gcc_assert (!node
->next
);
3916 /* Push values to the canonical one. */
3917 cdv
= dv_from_value (cval
);
3918 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3922 gcc_assert (cvar
->n_var_parts
== 1);
3924 cnode
= cvar
->var_part
[0].loc_chain
;
3926 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3927 that are not “more canonical” than it. */
3928 if (GET_CODE (cnode
->loc
) != VALUE
3929 || !canon_value_cmp (cnode
->loc
, cval
))
3932 /* CVAL was found to be non-canonical. Change the variable to point
3933 to the canonical VALUE. */
3934 gcc_assert (!cnode
->next
);
3937 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3938 node
->init
, node
->set_src
);
3939 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3944 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3945 corresponding entry in DSM->src. Multi-part variables are combined
3946 with variable_union, whereas onepart dvs are combined with
3950 variable_merge_over_cur (variable
*s1var
, struct dfset_merge
*dsm
)
3952 dataflow_set
*dst
= dsm
->dst
;
3954 variable
*s2var
, *dvar
= NULL
;
3955 decl_or_value dv
= s1var
->dv
;
3956 onepart_enum onepart
= s1var
->onepart
;
3959 location_chain
*node
, **nodep
;
3961 /* If the incoming onepart variable has an empty location list, then
3962 the intersection will be just as empty. For other variables,
3963 it's always union. */
3964 gcc_checking_assert (s1var
->n_var_parts
3965 && s1var
->var_part
[0].loc_chain
);
3968 return variable_union (s1var
, dst
);
3970 gcc_checking_assert (s1var
->n_var_parts
== 1);
3972 dvhash
= dv_htab_hash (dv
);
3973 if (dv_is_value_p (dv
))
3974 val
= dv_as_value (dv
);
3978 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3981 dst_can_be_shared
= false;
3985 dsm
->src_onepart_cnt
--;
3986 gcc_assert (s2var
->var_part
[0].loc_chain
3987 && s2var
->onepart
== onepart
3988 && s2var
->n_var_parts
== 1);
3990 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3994 gcc_assert (dvar
->refcount
== 1
3995 && dvar
->onepart
== onepart
3996 && dvar
->n_var_parts
== 1);
3997 nodep
= &dvar
->var_part
[0].loc_chain
;
4005 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
4007 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
4009 *dstslot
= dvar
= s2var
;
4014 dst_can_be_shared
= false;
4016 intersect_loc_chains (val
, nodep
, dsm
,
4017 s1var
->var_part
[0].loc_chain
, s2var
);
4023 dvar
= onepart_pool_allocate (onepart
);
4026 dvar
->n_var_parts
= 1;
4027 dvar
->onepart
= onepart
;
4028 dvar
->in_changed_variables
= false;
4029 dvar
->var_part
[0].loc_chain
= node
;
4030 dvar
->var_part
[0].cur_loc
= NULL
;
4032 VAR_LOC_1PAUX (dvar
) = NULL
;
4034 VAR_PART_OFFSET (dvar
, 0) = 0;
4037 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
4039 gcc_assert (!*dstslot
);
4047 nodep
= &dvar
->var_part
[0].loc_chain
;
4048 while ((node
= *nodep
))
4050 location_chain
**nextp
= &node
->next
;
4052 if (GET_CODE (node
->loc
) == REG
)
4056 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
4057 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
4058 && dv_is_value_p (list
->dv
))
4062 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
4064 /* If this value became canonical for another value that had
4065 this register, we want to leave it alone. */
4066 else if (dv_as_value (list
->dv
) != val
)
4068 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
4070 node
->init
, NULL_RTX
);
4071 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
4073 /* Since nextp points into the removed node, we can't
4074 use it. The pointer to the next node moved to nodep.
4075 However, if the variable we're walking is unshared
4076 during our walk, we'll keep walking the location list
4077 of the previously-shared variable, in which case the
4078 node won't have been removed, and we'll want to skip
4079 it. That's why we test *nodep here. */
4085 /* Canonicalization puts registers first, so we don't have to
4091 if (dvar
!= *dstslot
)
4093 nodep
= &dvar
->var_part
[0].loc_chain
;
4097 /* Mark all referenced nodes for canonicalization, and make sure
4098 we have mutual equivalence links. */
4099 VALUE_RECURSED_INTO (val
) = true;
4100 for (node
= *nodep
; node
; node
= node
->next
)
4101 if (GET_CODE (node
->loc
) == VALUE
)
4103 VALUE_RECURSED_INTO (node
->loc
) = true;
4104 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
4105 node
->init
, NULL
, INSERT
);
4108 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4109 gcc_assert (*dstslot
== dvar
);
4110 canonicalize_values_star (dstslot
, dst
);
4111 gcc_checking_assert (dstslot
4112 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4118 bool has_value
= false, has_other
= false;
4120 /* If we have one value and anything else, we're going to
4121 canonicalize this, so make sure all values have an entry in
4122 the table and are marked for canonicalization. */
4123 for (node
= *nodep
; node
; node
= node
->next
)
4125 if (GET_CODE (node
->loc
) == VALUE
)
4127 /* If this was marked during register canonicalization,
4128 we know we have to canonicalize values. */
4143 if (has_value
&& has_other
)
4145 for (node
= *nodep
; node
; node
= node
->next
)
4147 if (GET_CODE (node
->loc
) == VALUE
)
4149 decl_or_value dv
= dv_from_value (node
->loc
);
4150 variable
**slot
= NULL
;
4152 if (shared_hash_shared (dst
->vars
))
4153 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
4155 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
4159 variable
*var
= onepart_pool_allocate (ONEPART_VALUE
);
4162 var
->n_var_parts
= 1;
4163 var
->onepart
= ONEPART_VALUE
;
4164 var
->in_changed_variables
= false;
4165 var
->var_part
[0].loc_chain
= NULL
;
4166 var
->var_part
[0].cur_loc
= NULL
;
4167 VAR_LOC_1PAUX (var
) = NULL
;
4171 VALUE_RECURSED_INTO (node
->loc
) = true;
4175 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4176 gcc_assert (*dstslot
== dvar
);
4177 canonicalize_values_star (dstslot
, dst
);
4178 gcc_checking_assert (dstslot
4179 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4185 if (!onepart_variable_different_p (dvar
, s2var
))
4187 variable_htab_free (dvar
);
4188 *dstslot
= dvar
= s2var
;
4191 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
4193 variable_htab_free (dvar
);
4194 *dstslot
= dvar
= s1var
;
4196 dst_can_be_shared
= false;
4199 dst_can_be_shared
= false;
4204 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4205 multi-part variable. Unions of multi-part variables and
4206 intersections of one-part ones will be handled in
4207 variable_merge_over_cur(). */
4210 variable_merge_over_src (variable
*s2var
, struct dfset_merge
*dsm
)
4212 dataflow_set
*dst
= dsm
->dst
;
4213 decl_or_value dv
= s2var
->dv
;
4215 if (!s2var
->onepart
)
4217 variable
**dstp
= shared_hash_find_slot (dst
->vars
, dv
);
4223 dsm
->src_onepart_cnt
++;
4227 /* Combine dataflow set information from SRC2 into DST, using PDST
4228 to carry over information across passes. */
4231 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
4233 dataflow_set cur
= *dst
;
4234 dataflow_set
*src1
= &cur
;
4235 struct dfset_merge dsm
;
4237 size_t src1_elems
, src2_elems
;
4238 variable_iterator_type hi
;
4241 src1_elems
= shared_hash_htab (src1
->vars
)->elements ();
4242 src2_elems
= shared_hash_htab (src2
->vars
)->elements ();
4243 dataflow_set_init (dst
);
4244 dst
->stack_adjust
= cur
.stack_adjust
;
4245 shared_hash_destroy (dst
->vars
);
4246 dst
->vars
= new shared_hash
;
4247 dst
->vars
->refcount
= 1;
4248 dst
->vars
->htab
= new variable_table_type (MAX (src1_elems
, src2_elems
));
4250 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4251 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
4256 dsm
.src_onepart_cnt
= 0;
4258 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.src
->vars
),
4260 variable_merge_over_src (var
, &dsm
);
4261 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.cur
->vars
),
4263 variable_merge_over_cur (var
, &dsm
);
4265 if (dsm
.src_onepart_cnt
)
4266 dst_can_be_shared
= false;
4268 dataflow_set_destroy (src1
);
4271 /* Mark register equivalences. */
4274 dataflow_set_equiv_regs (dataflow_set
*set
)
4277 attrs
*list
, **listp
;
4279 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4281 rtx canon
[NUM_MACHINE_MODES
];
4283 /* If the list is empty or one entry, no need to canonicalize
4285 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
4288 memset (canon
, 0, sizeof (canon
));
4290 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4291 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
4293 rtx val
= dv_as_value (list
->dv
);
4294 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
4297 if (canon_value_cmp (val
, cval
))
4301 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4302 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4304 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4309 if (dv_is_value_p (list
->dv
))
4311 rtx val
= dv_as_value (list
->dv
);
4316 VALUE_RECURSED_INTO (val
) = true;
4317 set_variable_part (set
, val
, dv_from_value (cval
), 0,
4318 VAR_INIT_STATUS_INITIALIZED
,
4322 VALUE_RECURSED_INTO (cval
) = true;
4323 set_variable_part (set
, cval
, list
->dv
, 0,
4324 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
4327 for (listp
= &set
->regs
[i
]; (list
= *listp
);
4328 listp
= list
? &list
->next
: listp
)
4329 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4331 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4337 if (dv_is_value_p (list
->dv
))
4339 rtx val
= dv_as_value (list
->dv
);
4340 if (!VALUE_RECURSED_INTO (val
))
4344 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
4345 canonicalize_values_star (slot
, set
);
4352 /* Remove any redundant values in the location list of VAR, which must
4353 be unshared and 1-part. */
4356 remove_duplicate_values (variable
*var
)
4358 location_chain
*node
, **nodep
;
4360 gcc_assert (var
->onepart
);
4361 gcc_assert (var
->n_var_parts
== 1);
4362 gcc_assert (var
->refcount
== 1);
4364 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
4366 if (GET_CODE (node
->loc
) == VALUE
)
4368 if (VALUE_RECURSED_INTO (node
->loc
))
4370 /* Remove duplicate value node. */
4371 *nodep
= node
->next
;
4376 VALUE_RECURSED_INTO (node
->loc
) = true;
4378 nodep
= &node
->next
;
4381 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4382 if (GET_CODE (node
->loc
) == VALUE
)
4384 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
4385 VALUE_RECURSED_INTO (node
->loc
) = false;
4390 /* Hash table iteration argument passed to variable_post_merge. */
4391 struct dfset_post_merge
4393 /* The new input set for the current block. */
4395 /* Pointer to the permanent input set for the current block, or
4397 dataflow_set
**permp
;
4400 /* Create values for incoming expressions associated with one-part
4401 variables that don't have value numbers for them. */
4404 variable_post_merge_new_vals (variable
**slot
, dfset_post_merge
*dfpm
)
4406 dataflow_set
*set
= dfpm
->set
;
4407 variable
*var
= *slot
;
4408 location_chain
*node
;
4410 if (!var
->onepart
|| !var
->n_var_parts
)
4413 gcc_assert (var
->n_var_parts
== 1);
4415 if (dv_is_decl_p (var
->dv
))
4417 bool check_dupes
= false;
4420 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4422 if (GET_CODE (node
->loc
) == VALUE
)
4423 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
4424 else if (GET_CODE (node
->loc
) == REG
)
4426 attrs
*att
, **attp
, **curp
= NULL
;
4428 if (var
->refcount
!= 1)
4430 slot
= unshare_variable (set
, slot
, var
,
4431 VAR_INIT_STATUS_INITIALIZED
);
4436 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
4438 if (att
->offset
== 0
4439 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4441 if (dv_is_value_p (att
->dv
))
4443 rtx cval
= dv_as_value (att
->dv
);
4448 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
4456 if ((*curp
)->offset
== 0
4457 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
4458 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
4461 curp
= &(*curp
)->next
;
4472 *dfpm
->permp
= XNEW (dataflow_set
);
4473 dataflow_set_init (*dfpm
->permp
);
4476 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
4477 att
; att
= att
->next
)
4478 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4480 gcc_assert (att
->offset
== 0
4481 && dv_is_value_p (att
->dv
));
4482 val_reset (set
, att
->dv
);
4489 cval
= dv_as_value (cdv
);
4493 /* Create a unique value to hold this register,
4494 that ought to be found and reused in
4495 subsequent rounds. */
4497 gcc_assert (!cselib_lookup (node
->loc
,
4498 GET_MODE (node
->loc
), 0,
4500 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4502 cselib_preserve_value (v
);
4503 cselib_invalidate_rtx (node
->loc
);
4505 cdv
= dv_from_value (cval
);
4508 "Created new value %u:%u for reg %i\n",
4509 v
->uid
, v
->hash
, REGNO (node
->loc
));
4512 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4513 VAR_INIT_STATUS_INITIALIZED
,
4514 cdv
, 0, NULL
, INSERT
);
4520 /* Remove attribute referring to the decl, which now
4521 uses the value for the register, already existing or
4522 to be added when we bring perm in. */
4530 remove_duplicate_values (var
);
4536 /* Reset values in the permanent set that are not associated with the
4537 chosen expression. */
4540 variable_post_merge_perm_vals (variable
**pslot
, dfset_post_merge
*dfpm
)
4542 dataflow_set
*set
= dfpm
->set
;
4543 variable
*pvar
= *pslot
, *var
;
4544 location_chain
*pnode
;
4548 gcc_assert (dv_is_value_p (pvar
->dv
)
4549 && pvar
->n_var_parts
== 1);
4550 pnode
= pvar
->var_part
[0].loc_chain
;
4553 && REG_P (pnode
->loc
));
4557 var
= shared_hash_find (set
->vars
, dv
);
4560 /* Although variable_post_merge_new_vals may have made decls
4561 non-star-canonical, values that pre-existed in canonical form
4562 remain canonical, and newly-created values reference a single
4563 REG, so they are canonical as well. Since VAR has the
4564 location list for a VALUE, using find_loc_in_1pdv for it is
4565 fine, since VALUEs don't map back to DECLs. */
4566 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4568 val_reset (set
, dv
);
4571 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4572 if (att
->offset
== 0
4573 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4574 && dv_is_value_p (att
->dv
))
4577 /* If there is a value associated with this register already, create
4579 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4581 rtx cval
= dv_as_value (att
->dv
);
4582 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4583 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4588 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4590 variable_union (pvar
, set
);
4596 /* Just checking stuff and registering register attributes for
4600 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4602 struct dfset_post_merge dfpm
;
4607 shared_hash_htab (set
->vars
)
4608 ->traverse
<dfset_post_merge
*, variable_post_merge_new_vals
> (&dfpm
);
4610 shared_hash_htab ((*permp
)->vars
)
4611 ->traverse
<dfset_post_merge
*, variable_post_merge_perm_vals
> (&dfpm
);
4612 shared_hash_htab (set
->vars
)
4613 ->traverse
<dataflow_set
*, canonicalize_values_star
> (set
);
4614 shared_hash_htab (set
->vars
)
4615 ->traverse
<dataflow_set
*, canonicalize_vars_star
> (set
);
4618 /* Return a node whose loc is a MEM that refers to EXPR in the
4619 location list of a one-part variable or value VAR, or in that of
4620 any values recursively mentioned in the location lists. */
4622 static location_chain
*
4623 find_mem_expr_in_1pdv (tree expr
, rtx val
, variable_table_type
*vars
)
4625 location_chain
*node
;
4628 location_chain
*where
= NULL
;
4633 gcc_assert (GET_CODE (val
) == VALUE
4634 && !VALUE_RECURSED_INTO (val
));
4636 dv
= dv_from_value (val
);
4637 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
4642 gcc_assert (var
->onepart
);
4644 if (!var
->n_var_parts
)
4647 VALUE_RECURSED_INTO (val
) = true;
4649 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4650 if (MEM_P (node
->loc
)
4651 && MEM_EXPR (node
->loc
) == expr
4652 && int_mem_offset (node
->loc
) == 0)
4657 else if (GET_CODE (node
->loc
) == VALUE
4658 && !VALUE_RECURSED_INTO (node
->loc
)
4659 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4662 VALUE_RECURSED_INTO (val
) = false;
4667 /* Return TRUE if the value of MEM may vary across a call. */
4670 mem_dies_at_call (rtx mem
)
4672 tree expr
= MEM_EXPR (mem
);
4678 decl
= get_base_address (expr
);
4686 return (may_be_aliased (decl
)
4687 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4690 /* Remove all MEMs from the location list of a hash table entry for a
4691 one-part variable, except those whose MEM attributes map back to
4692 the variable itself, directly or within a VALUE. */
4695 dataflow_set_preserve_mem_locs (variable
**slot
, dataflow_set
*set
)
4697 variable
*var
= *slot
;
4699 if (var
->onepart
== ONEPART_VDECL
|| var
->onepart
== ONEPART_DEXPR
)
4701 tree decl
= dv_as_decl (var
->dv
);
4702 location_chain
*loc
, **locp
;
4703 bool changed
= false;
4705 if (!var
->n_var_parts
)
4708 gcc_assert (var
->n_var_parts
== 1);
4710 if (shared_var_p (var
, set
->vars
))
4712 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4714 /* We want to remove dying MEMs that don't refer to DECL. */
4715 if (GET_CODE (loc
->loc
) == MEM
4716 && (MEM_EXPR (loc
->loc
) != decl
4717 || int_mem_offset (loc
->loc
) != 0)
4718 && mem_dies_at_call (loc
->loc
))
4720 /* We want to move here MEMs that do refer to DECL. */
4721 else if (GET_CODE (loc
->loc
) == VALUE
4722 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4723 shared_hash_htab (set
->vars
)))
4730 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4732 gcc_assert (var
->n_var_parts
== 1);
4735 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4738 rtx old_loc
= loc
->loc
;
4739 if (GET_CODE (old_loc
) == VALUE
)
4741 location_chain
*mem_node
4742 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4743 shared_hash_htab (set
->vars
));
4745 /* ??? This picks up only one out of multiple MEMs that
4746 refer to the same variable. Do we ever need to be
4747 concerned about dealing with more than one, or, given
4748 that they should all map to the same variable
4749 location, their addresses will have been merged and
4750 they will be regarded as equivalent? */
4753 loc
->loc
= mem_node
->loc
;
4754 loc
->set_src
= mem_node
->set_src
;
4755 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4759 if (GET_CODE (loc
->loc
) != MEM
4760 || (MEM_EXPR (loc
->loc
) == decl
4761 && int_mem_offset (loc
->loc
) == 0)
4762 || !mem_dies_at_call (loc
->loc
))
4764 if (old_loc
!= loc
->loc
&& emit_notes
)
4766 if (old_loc
== var
->var_part
[0].cur_loc
)
4769 var
->var_part
[0].cur_loc
= NULL
;
4778 if (old_loc
== var
->var_part
[0].cur_loc
)
4781 var
->var_part
[0].cur_loc
= NULL
;
4788 if (!var
->var_part
[0].loc_chain
)
4794 variable_was_changed (var
, set
);
4800 /* Remove all MEMs from the location list of a hash table entry for a
4801 onepart variable. */
4804 dataflow_set_remove_mem_locs (variable
**slot
, dataflow_set
*set
)
4806 variable
*var
= *slot
;
4808 if (var
->onepart
!= NOT_ONEPART
)
4810 location_chain
*loc
, **locp
;
4811 bool changed
= false;
4814 gcc_assert (var
->n_var_parts
== 1);
4816 if (shared_var_p (var
, set
->vars
))
4818 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4819 if (GET_CODE (loc
->loc
) == MEM
4820 && mem_dies_at_call (loc
->loc
))
4826 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4828 gcc_assert (var
->n_var_parts
== 1);
4831 if (VAR_LOC_1PAUX (var
))
4832 cur_loc
= VAR_LOC_FROM (var
);
4834 cur_loc
= var
->var_part
[0].cur_loc
;
4836 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4839 if (GET_CODE (loc
->loc
) != MEM
4840 || !mem_dies_at_call (loc
->loc
))
4847 /* If we have deleted the location which was last emitted
4848 we have to emit new location so add the variable to set
4849 of changed variables. */
4850 if (cur_loc
== loc
->loc
)
4853 var
->var_part
[0].cur_loc
= NULL
;
4854 if (VAR_LOC_1PAUX (var
))
4855 VAR_LOC_FROM (var
) = NULL
;
4860 if (!var
->var_part
[0].loc_chain
)
4866 variable_was_changed (var
, set
);
4872 /* Remove all variable-location information about call-clobbered
4873 registers, as well as associations between MEMs and VALUEs. */
4876 dataflow_set_clear_at_call (dataflow_set
*set
, rtx_insn
*call_insn
)
4879 hard_reg_set_iterator hrsi
;
4880 HARD_REG_SET invalidated_regs
;
4882 get_call_reg_set_usage (call_insn
, &invalidated_regs
,
4883 regs_invalidated_by_call
);
4885 EXECUTE_IF_SET_IN_HARD_REG_SET (invalidated_regs
, 0, r
, hrsi
)
4886 var_regno_delete (set
, r
);
4888 if (MAY_HAVE_DEBUG_BIND_INSNS
)
4890 set
->traversed_vars
= set
->vars
;
4891 shared_hash_htab (set
->vars
)
4892 ->traverse
<dataflow_set
*, dataflow_set_preserve_mem_locs
> (set
);
4893 set
->traversed_vars
= set
->vars
;
4894 shared_hash_htab (set
->vars
)
4895 ->traverse
<dataflow_set
*, dataflow_set_remove_mem_locs
> (set
);
4896 set
->traversed_vars
= NULL
;
4901 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4903 location_chain
*lc1
, *lc2
;
4905 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4907 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4909 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4911 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4914 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4923 /* Return true if one-part variables VAR1 and VAR2 are different.
4924 They must be in canonical order. */
4927 onepart_variable_different_p (variable
*var1
, variable
*var2
)
4929 location_chain
*lc1
, *lc2
;
4934 gcc_assert (var1
->n_var_parts
== 1
4935 && var2
->n_var_parts
== 1);
4937 lc1
= var1
->var_part
[0].loc_chain
;
4938 lc2
= var2
->var_part
[0].loc_chain
;
4940 gcc_assert (lc1
&& lc2
);
4944 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4953 /* Return true if one-part variables VAR1 and VAR2 are different.
4954 They must be in canonical order. */
4957 dump_onepart_variable_differences (variable
*var1
, variable
*var2
)
4959 location_chain
*lc1
, *lc2
;
4961 gcc_assert (var1
!= var2
);
4962 gcc_assert (dump_file
);
4963 gcc_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
));
4964 gcc_assert (var1
->n_var_parts
== 1
4965 && var2
->n_var_parts
== 1);
4967 lc1
= var1
->var_part
[0].loc_chain
;
4968 lc2
= var2
->var_part
[0].loc_chain
;
4970 gcc_assert (lc1
&& lc2
);
4974 switch (loc_cmp (lc1
->loc
, lc2
->loc
))
4977 fprintf (dump_file
, "removed: ");
4978 print_rtl_single (dump_file
, lc1
->loc
);
4984 fprintf (dump_file
, "added: ");
4985 print_rtl_single (dump_file
, lc2
->loc
);
4997 fprintf (dump_file
, "removed: ");
4998 print_rtl_single (dump_file
, lc1
->loc
);
5004 fprintf (dump_file
, "added: ");
5005 print_rtl_single (dump_file
, lc2
->loc
);
5010 /* Return true if variables VAR1 and VAR2 are different. */
5013 variable_different_p (variable
*var1
, variable
*var2
)
5020 if (var1
->onepart
!= var2
->onepart
)
5023 if (var1
->n_var_parts
!= var2
->n_var_parts
)
5026 if (var1
->onepart
&& var1
->n_var_parts
)
5028 gcc_checking_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
)
5029 && var1
->n_var_parts
== 1);
5030 /* One-part values have locations in a canonical order. */
5031 return onepart_variable_different_p (var1
, var2
);
5034 for (i
= 0; i
< var1
->n_var_parts
; i
++)
5036 if (VAR_PART_OFFSET (var1
, i
) != VAR_PART_OFFSET (var2
, i
))
5038 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
5040 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
5046 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5049 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
5051 variable_iterator_type hi
;
5053 bool diffound
= false;
5054 bool details
= (dump_file
&& (dump_flags
& TDF_DETAILS
));
5066 if (old_set
->vars
== new_set
->vars
)
5069 if (shared_hash_htab (old_set
->vars
)->elements ()
5070 != shared_hash_htab (new_set
->vars
)->elements ())
5073 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set
->vars
),
5076 variable_table_type
*htab
= shared_hash_htab (new_set
->vars
);
5077 variable
*var2
= htab
->find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
5081 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5083 fprintf (dump_file
, "dataflow difference found: removal of:\n");
5088 else if (variable_different_p (var1
, var2
))
5092 fprintf (dump_file
, "dataflow difference found: "
5093 "old and new follow:\n");
5095 if (dv_onepart_p (var1
->dv
))
5096 dump_onepart_variable_differences (var1
, var2
);
5103 /* There's no need to traverse the second hashtab unless we want to
5104 print the details. If both have the same number of elements and
5105 the second one had all entries found in the first one, then the
5106 second can't have any extra entries. */
5110 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set
->vars
),
5113 variable_table_type
*htab
= shared_hash_htab (old_set
->vars
);
5114 variable
*var2
= htab
->find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
5119 fprintf (dump_file
, "dataflow difference found: addition of:\n");
5131 /* Free the contents of dataflow set SET. */
5134 dataflow_set_destroy (dataflow_set
*set
)
5138 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5139 attrs_list_clear (&set
->regs
[i
]);
5141 shared_hash_destroy (set
->vars
);
5145 /* Return true if T is a tracked parameter with non-degenerate record type. */
5148 tracked_record_parameter_p (tree t
)
5150 if (TREE_CODE (t
) != PARM_DECL
)
5153 if (DECL_MODE (t
) == BLKmode
)
5156 tree type
= TREE_TYPE (t
);
5157 if (TREE_CODE (type
) != RECORD_TYPE
)
5160 if (TYPE_FIELDS (type
) == NULL_TREE
5161 || DECL_CHAIN (TYPE_FIELDS (type
)) == NULL_TREE
)
5167 /* Shall EXPR be tracked? */
5170 track_expr_p (tree expr
, bool need_rtl
)
5175 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
5176 return DECL_RTL_SET_P (expr
);
5178 /* If EXPR is not a parameter or a variable do not track it. */
5179 if (!VAR_P (expr
) && TREE_CODE (expr
) != PARM_DECL
)
5182 /* It also must have a name... */
5183 if (!DECL_NAME (expr
) && need_rtl
)
5186 /* ... and a RTL assigned to it. */
5187 decl_rtl
= DECL_RTL_IF_SET (expr
);
5188 if (!decl_rtl
&& need_rtl
)
5191 /* If this expression is really a debug alias of some other declaration, we
5192 don't need to track this expression if the ultimate declaration is
5195 if (VAR_P (realdecl
) && DECL_HAS_DEBUG_EXPR_P (realdecl
))
5197 realdecl
= DECL_DEBUG_EXPR (realdecl
);
5198 if (!DECL_P (realdecl
))
5200 if (handled_component_p (realdecl
)
5201 || (TREE_CODE (realdecl
) == MEM_REF
5202 && TREE_CODE (TREE_OPERAND (realdecl
, 0)) == ADDR_EXPR
))
5204 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
5207 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
5208 &maxsize
, &reverse
);
5209 if (!DECL_P (innerdecl
)
5210 || DECL_IGNORED_P (innerdecl
)
5211 /* Do not track declarations for parts of tracked record
5212 parameters since we want to track them as a whole. */
5213 || tracked_record_parameter_p (innerdecl
)
5214 || TREE_STATIC (innerdecl
)
5216 || bitpos
+ bitsize
> 256
5217 || bitsize
!= maxsize
)
5227 /* Do not track EXPR if REALDECL it should be ignored for debugging
5229 if (DECL_IGNORED_P (realdecl
))
5232 /* Do not track global variables until we are able to emit correct location
5234 if (TREE_STATIC (realdecl
))
5237 /* When the EXPR is a DECL for alias of some variable (see example)
5238 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5239 DECL_RTL contains SYMBOL_REF.
5242 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5245 if (decl_rtl
&& MEM_P (decl_rtl
)
5246 && contains_symbol_ref_p (XEXP (decl_rtl
, 0)))
5249 /* If RTX is a memory it should not be very large (because it would be
5250 an array or struct). */
5251 if (decl_rtl
&& MEM_P (decl_rtl
))
5253 /* Do not track structures and arrays. */
5254 if ((GET_MODE (decl_rtl
) == BLKmode
5255 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
5256 && !tracked_record_parameter_p (realdecl
))
5258 if (MEM_SIZE_KNOWN_P (decl_rtl
)
5259 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
5263 DECL_CHANGED (expr
) = 0;
5264 DECL_CHANGED (realdecl
) = 0;
5268 /* Determine whether a given LOC refers to the same variable part as
5272 same_variable_part_p (rtx loc
, tree expr
, poly_int64 offset
)
5277 if (! DECL_P (expr
))
5282 expr2
= REG_EXPR (loc
);
5283 offset2
= REG_OFFSET (loc
);
5285 else if (MEM_P (loc
))
5287 expr2
= MEM_EXPR (loc
);
5288 offset2
= int_mem_offset (loc
);
5293 if (! expr2
|| ! DECL_P (expr2
))
5296 expr
= var_debug_decl (expr
);
5297 expr2
= var_debug_decl (expr2
);
5299 return (expr
== expr2
&& known_eq (offset
, offset2
));
5302 /* LOC is a REG or MEM that we would like to track if possible.
5303 If EXPR is null, we don't know what expression LOC refers to,
5304 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5305 LOC is an lvalue register.
5307 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5308 is something we can track. When returning true, store the mode of
5309 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5310 from EXPR in *OFFSET_OUT (if nonnull). */
5313 track_loc_p (rtx loc
, tree expr
, poly_int64 offset
, bool store_reg_p
,
5314 machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
5318 if (expr
== NULL
|| !track_expr_p (expr
, true))
5321 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5322 whole subreg, but only the old inner part is really relevant. */
5323 mode
= GET_MODE (loc
);
5324 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
5326 machine_mode pseudo_mode
;
5328 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
5329 if (paradoxical_subreg_p (mode
, pseudo_mode
))
5331 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
5336 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5337 Do the same if we are storing to a register and EXPR occupies
5338 the whole of register LOC; in that case, the whole of EXPR is
5339 being changed. We exclude complex modes from the second case
5340 because the real and imaginary parts are represented as separate
5341 pseudo registers, even if the whole complex value fits into one
5343 if ((paradoxical_subreg_p (mode
, DECL_MODE (expr
))
5345 && !COMPLEX_MODE_P (DECL_MODE (expr
))
5346 && hard_regno_nregs (REGNO (loc
), DECL_MODE (expr
)) == 1))
5347 && known_eq (offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
), 0))
5349 mode
= DECL_MODE (expr
);
5353 HOST_WIDE_INT const_offset
;
5354 if (!track_offset_p (offset
, &const_offset
))
5360 *offset_out
= const_offset
;
5364 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5365 want to track. When returning nonnull, make sure that the attributes
5366 on the returned value are updated. */
5369 var_lowpart (machine_mode mode
, rtx loc
)
5371 unsigned int offset
, reg_offset
, regno
;
5373 if (GET_MODE (loc
) == mode
)
5376 if (!REG_P (loc
) && !MEM_P (loc
))
5379 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
5382 return adjust_address_nv (loc
, mode
, offset
);
5384 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
5385 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
5387 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
5390 /* Carry information about uses and stores while walking rtx. */
5392 struct count_use_info
5394 /* The insn where the RTX is. */
5397 /* The basic block where insn is. */
5400 /* The array of n_sets sets in the insn, as determined by cselib. */
5401 struct cselib_set
*sets
;
5404 /* True if we're counting stores, false otherwise. */
5408 /* Find a VALUE corresponding to X. */
5410 static inline cselib_val
*
5411 find_use_val (rtx x
, machine_mode mode
, struct count_use_info
*cui
)
5417 /* This is called after uses are set up and before stores are
5418 processed by cselib, so it's safe to look up srcs, but not
5419 dsts. So we look up expressions that appear in srcs or in
5420 dest expressions, but we search the sets array for dests of
5424 /* Some targets represent memset and memcpy patterns
5425 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5426 (set (mem:BLK ...) (const_int ...)) or
5427 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5428 in that case, otherwise we end up with mode mismatches. */
5429 if (mode
== BLKmode
&& MEM_P (x
))
5431 for (i
= 0; i
< cui
->n_sets
; i
++)
5432 if (cui
->sets
[i
].dest
== x
)
5433 return cui
->sets
[i
].src_elt
;
5436 return cselib_lookup (x
, mode
, 0, VOIDmode
);
5442 /* Replace all registers and addresses in an expression with VALUE
5443 expressions that map back to them, unless the expression is a
5444 register. If no mapping is or can be performed, returns NULL. */
5447 replace_expr_with_values (rtx loc
)
5449 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
5451 else if (MEM_P (loc
))
5453 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
5454 get_address_mode (loc
), 0,
5457 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
5462 return cselib_subst_to_values (loc
, VOIDmode
);
5465 /* Return true if X contains a DEBUG_EXPR. */
5468 rtx_debug_expr_p (const_rtx x
)
5470 subrtx_iterator::array_type array
;
5471 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5472 if (GET_CODE (*iter
) == DEBUG_EXPR
)
5477 /* Determine what kind of micro operation to choose for a USE. Return
5478 MO_CLOBBER if no micro operation is to be generated. */
5480 static enum micro_operation_type
5481 use_type (rtx loc
, struct count_use_info
*cui
, machine_mode
*modep
)
5485 if (cui
&& cui
->sets
)
5487 if (GET_CODE (loc
) == VAR_LOCATION
)
5489 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
5491 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
5492 if (! VAR_LOC_UNKNOWN_P (ploc
))
5494 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
5497 /* ??? flag_float_store and volatile mems are never
5498 given values, but we could in theory use them for
5500 gcc_assert (val
|| 1);
5508 if (REG_P (loc
) || MEM_P (loc
))
5511 *modep
= GET_MODE (loc
);
5515 || (find_use_val (loc
, GET_MODE (loc
), cui
)
5516 && cselib_lookup (XEXP (loc
, 0),
5517 get_address_mode (loc
), 0,
5523 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5525 if (val
&& !cselib_preserved_value_p (val
))
5533 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
5535 if (loc
== cfa_base_rtx
)
5537 expr
= REG_EXPR (loc
);
5540 return MO_USE_NO_VAR
;
5541 else if (target_for_debug_bind (var_debug_decl (expr
)))
5543 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
5544 false, modep
, NULL
))
5547 return MO_USE_NO_VAR
;
5549 else if (MEM_P (loc
))
5551 expr
= MEM_EXPR (loc
);
5555 else if (target_for_debug_bind (var_debug_decl (expr
)))
5557 else if (track_loc_p (loc
, expr
, int_mem_offset (loc
),
5559 /* Multi-part variables shouldn't refer to one-part
5560 variable names such as VALUEs (never happens) or
5561 DEBUG_EXPRs (only happens in the presence of debug
5563 && (!MAY_HAVE_DEBUG_BIND_INSNS
5564 || !rtx_debug_expr_p (XEXP (loc
, 0))))
5573 /* Log to OUT information about micro-operation MOPT involving X in
5577 log_op_type (rtx x
, basic_block bb
, rtx_insn
*insn
,
5578 enum micro_operation_type mopt
, FILE *out
)
5580 fprintf (out
, "bb %i op %i insn %i %s ",
5581 bb
->index
, VTI (bb
)->mos
.length (),
5582 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
5583 print_inline_rtx (out
, x
, 2);
5587 /* Tell whether the CONCAT used to holds a VALUE and its location
5588 needs value resolution, i.e., an attempt of mapping the location
5589 back to other incoming values. */
5590 #define VAL_NEEDS_RESOLUTION(x) \
5591 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5592 /* Whether the location in the CONCAT is a tracked expression, that
5593 should also be handled like a MO_USE. */
5594 #define VAL_HOLDS_TRACK_EXPR(x) \
5595 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5596 /* Whether the location in the CONCAT should be handled like a MO_COPY
5598 #define VAL_EXPR_IS_COPIED(x) \
5599 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5600 /* Whether the location in the CONCAT should be handled like a
5601 MO_CLOBBER as well. */
5602 #define VAL_EXPR_IS_CLOBBERED(x) \
5603 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5605 /* All preserved VALUEs. */
5606 static vec
<rtx
> preserved_values
;
5608 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5611 preserve_value (cselib_val
*val
)
5613 cselib_preserve_value (val
);
5614 preserved_values
.safe_push (val
->val_rtx
);
5617 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5618 any rtxes not suitable for CONST use not replaced by VALUEs
5622 non_suitable_const (const_rtx x
)
5624 subrtx_iterator::array_type array
;
5625 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5627 const_rtx x
= *iter
;
5628 switch (GET_CODE (x
))
5639 if (!MEM_READONLY_P (x
))
5649 /* Add uses (register and memory references) LOC which will be tracked
5650 to VTI (bb)->mos. */
5653 add_uses (rtx loc
, struct count_use_info
*cui
)
5655 machine_mode mode
= VOIDmode
;
5656 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5658 if (type
!= MO_CLOBBER
)
5660 basic_block bb
= cui
->bb
;
5664 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5665 mo
.insn
= cui
->insn
;
5667 if (type
== MO_VAL_LOC
)
5670 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5673 gcc_assert (cui
->sets
);
5676 && !REG_P (XEXP (vloc
, 0))
5677 && !MEM_P (XEXP (vloc
, 0)))
5680 machine_mode address_mode
= get_address_mode (mloc
);
5682 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5685 if (val
&& !cselib_preserved_value_p (val
))
5686 preserve_value (val
);
5689 if (CONSTANT_P (vloc
)
5690 && (GET_CODE (vloc
) != CONST
|| non_suitable_const (vloc
)))
5691 /* For constants don't look up any value. */;
5692 else if (!VAR_LOC_UNKNOWN_P (vloc
) && !unsuitable_loc (vloc
)
5693 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5696 enum micro_operation_type type2
;
5698 bool resolvable
= REG_P (vloc
) || MEM_P (vloc
);
5701 nloc
= replace_expr_with_values (vloc
);
5705 oloc
= shallow_copy_rtx (oloc
);
5706 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5709 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5711 type2
= use_type (vloc
, 0, &mode2
);
5713 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5714 || type2
== MO_CLOBBER
);
5716 if (type2
== MO_CLOBBER
5717 && !cselib_preserved_value_p (val
))
5719 VAL_NEEDS_RESOLUTION (oloc
) = resolvable
;
5720 preserve_value (val
);
5723 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5725 oloc
= shallow_copy_rtx (oloc
);
5726 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5731 else if (type
== MO_VAL_USE
)
5733 machine_mode mode2
= VOIDmode
;
5734 enum micro_operation_type type2
;
5735 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5736 rtx vloc
, oloc
= loc
, nloc
;
5738 gcc_assert (cui
->sets
);
5741 && !REG_P (XEXP (oloc
, 0))
5742 && !MEM_P (XEXP (oloc
, 0)))
5745 machine_mode address_mode
= get_address_mode (mloc
);
5747 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5750 if (val
&& !cselib_preserved_value_p (val
))
5751 preserve_value (val
);
5754 type2
= use_type (loc
, 0, &mode2
);
5756 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5757 || type2
== MO_CLOBBER
);
5759 if (type2
== MO_USE
)
5760 vloc
= var_lowpart (mode2
, loc
);
5764 /* The loc of a MO_VAL_USE may have two forms:
5766 (concat val src): val is at src, a value-based
5769 (concat (concat val use) src): same as above, with use as
5770 the MO_USE tracked value, if it differs from src.
5774 gcc_checking_assert (REG_P (loc
) || MEM_P (loc
));
5775 nloc
= replace_expr_with_values (loc
);
5780 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5782 oloc
= val
->val_rtx
;
5784 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5786 if (type2
== MO_USE
)
5787 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5788 if (!cselib_preserved_value_p (val
))
5790 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5791 preserve_value (val
);
5795 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5797 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5798 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5799 VTI (bb
)->mos
.safe_push (mo
);
5803 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5806 add_uses_1 (rtx
*x
, void *cui
)
5808 subrtx_var_iterator::array_type array
;
5809 FOR_EACH_SUBRTX_VAR (iter
, array
, *x
, NONCONST
)
5810 add_uses (*iter
, (struct count_use_info
*) cui
);
5813 /* This is the value used during expansion of locations. We want it
5814 to be unbounded, so that variables expanded deep in a recursion
5815 nest are fully evaluated, so that their values are cached
5816 correctly. We avoid recursion cycles through other means, and we
5817 don't unshare RTL, so excess complexity is not a problem. */
5818 #define EXPR_DEPTH (INT_MAX)
5819 /* We use this to keep too-complex expressions from being emitted as
5820 location notes, and then to debug information. Users can trade
5821 compile time for ridiculously complex expressions, although they're
5822 seldom useful, and they may often have to be discarded as not
5823 representable anyway. */
5824 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5826 /* Attempt to reverse the EXPR operation in the debug info and record
5827 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5828 no longer live we can express its value as VAL - 6. */
5831 reverse_op (rtx val
, const_rtx expr
, rtx_insn
*insn
)
5835 struct elt_loc_list
*l
;
5839 if (GET_CODE (expr
) != SET
)
5842 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5845 src
= SET_SRC (expr
);
5846 switch (GET_CODE (src
))
5853 if (!REG_P (XEXP (src
, 0)))
5858 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5865 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5868 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5869 if (!v
|| !cselib_preserved_value_p (v
))
5872 /* Use canonical V to avoid creating multiple redundant expressions
5873 for different VALUES equivalent to V. */
5874 v
= canonical_cselib_val (v
);
5876 /* Adding a reverse op isn't useful if V already has an always valid
5877 location. Ignore ENTRY_VALUE, while it is always constant, we should
5878 prefer non-ENTRY_VALUE locations whenever possible. */
5879 for (l
= v
->locs
, count
= 0; l
; l
= l
->next
, count
++)
5880 if (CONSTANT_P (l
->loc
)
5881 && (GET_CODE (l
->loc
) != CONST
|| !references_value_p (l
->loc
, 0)))
5883 /* Avoid creating too large locs lists. */
5884 else if (count
== PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE
))
5887 switch (GET_CODE (src
))
5891 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5893 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5897 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5909 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5911 arg
= XEXP (src
, 1);
5912 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5914 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5915 if (arg
== NULL_RTX
)
5917 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5920 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5926 cselib_add_permanent_equiv (v
, ret
, insn
);
5929 /* Add stores (register and memory references) LOC which will be tracked
5930 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5931 CUIP->insn is instruction which the LOC is part of. */
5934 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5936 machine_mode mode
= VOIDmode
, mode2
;
5937 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5938 basic_block bb
= cui
->bb
;
5940 rtx oloc
= loc
, nloc
, src
= NULL
;
5941 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5942 bool track_p
= false;
5944 bool resolve
, preserve
;
5946 if (type
== MO_CLOBBER
)
5953 gcc_assert (loc
!= cfa_base_rtx
);
5954 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5955 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5956 || GET_CODE (expr
) == CLOBBER
)
5958 mo
.type
= MO_CLOBBER
;
5960 if (GET_CODE (expr
) == SET
5961 && SET_DEST (expr
) == loc
5962 && !unsuitable_loc (SET_SRC (expr
))
5963 && find_use_val (loc
, mode
, cui
))
5965 gcc_checking_assert (type
== MO_VAL_SET
);
5966 mo
.u
.loc
= gen_rtx_SET (loc
, SET_SRC (expr
));
5971 if (GET_CODE (expr
) == SET
5972 && SET_DEST (expr
) == loc
5973 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5974 src
= var_lowpart (mode2
, SET_SRC (expr
));
5975 loc
= var_lowpart (mode2
, loc
);
5984 rtx xexpr
= gen_rtx_SET (loc
, src
);
5985 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5987 /* If this is an instruction copying (part of) a parameter
5988 passed by invisible reference to its register location,
5989 pretend it's a SET so that the initial memory location
5990 is discarded, as the parameter register can be reused
5991 for other purposes and we do not track locations based
5992 on generic registers. */
5995 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5996 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5997 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5998 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0)
6009 mo
.insn
= cui
->insn
;
6011 else if (MEM_P (loc
)
6012 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
6015 if (MEM_P (loc
) && type
== MO_VAL_SET
6016 && !REG_P (XEXP (loc
, 0))
6017 && !MEM_P (XEXP (loc
, 0)))
6020 machine_mode address_mode
= get_address_mode (mloc
);
6021 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
6025 if (val
&& !cselib_preserved_value_p (val
))
6026 preserve_value (val
);
6029 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
6031 mo
.type
= MO_CLOBBER
;
6032 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
6036 if (GET_CODE (expr
) == SET
6037 && SET_DEST (expr
) == loc
6038 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
6039 src
= var_lowpart (mode2
, SET_SRC (expr
));
6040 loc
= var_lowpart (mode2
, loc
);
6049 rtx xexpr
= gen_rtx_SET (loc
, src
);
6050 if (same_variable_part_p (SET_SRC (xexpr
),
6052 int_mem_offset (loc
)))
6059 mo
.insn
= cui
->insn
;
6064 if (type
!= MO_VAL_SET
)
6065 goto log_and_return
;
6067 v
= find_use_val (oloc
, mode
, cui
);
6070 goto log_and_return
;
6072 resolve
= preserve
= !cselib_preserved_value_p (v
);
6074 /* We cannot track values for multiple-part variables, so we track only
6075 locations for tracked record parameters. */
6079 && tracked_record_parameter_p (REG_EXPR (loc
)))
6081 /* Although we don't use the value here, it could be used later by the
6082 mere virtue of its existence as the operand of the reverse operation
6083 that gave rise to it (typically extension/truncation). Make sure it
6084 is preserved as required by vt_expand_var_loc_chain. */
6087 goto log_and_return
;
6090 if (loc
== stack_pointer_rtx
6091 && hard_frame_pointer_adjustment
!= -1
6093 cselib_set_value_sp_based (v
);
6095 nloc
= replace_expr_with_values (oloc
);
6099 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
6101 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
6105 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
6107 if (oval
&& !cselib_preserved_value_p (oval
))
6109 micro_operation moa
;
6111 preserve_value (oval
);
6113 moa
.type
= MO_VAL_USE
;
6114 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
6115 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
6116 moa
.insn
= cui
->insn
;
6118 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6119 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
6120 moa
.type
, dump_file
);
6121 VTI (bb
)->mos
.safe_push (moa
);
6126 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
6128 if (REG_P (SET_SRC (expr
)) || MEM_P (SET_SRC (expr
)))
6129 nloc
= replace_expr_with_values (SET_SRC (expr
));
6133 /* Avoid the mode mismatch between oexpr and expr. */
6134 if (!nloc
&& mode
!= mode2
)
6136 nloc
= SET_SRC (expr
);
6137 gcc_assert (oloc
== SET_DEST (expr
));
6140 if (nloc
&& nloc
!= SET_SRC (mo
.u
.loc
))
6141 oloc
= gen_rtx_SET (oloc
, nloc
);
6144 if (oloc
== SET_DEST (mo
.u
.loc
))
6145 /* No point in duplicating. */
6147 if (!REG_P (SET_SRC (mo
.u
.loc
)))
6153 if (GET_CODE (mo
.u
.loc
) == SET
6154 && oloc
== SET_DEST (mo
.u
.loc
))
6155 /* No point in duplicating. */
6161 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
6163 if (mo
.u
.loc
!= oloc
)
6164 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
6166 /* The loc of a MO_VAL_SET may have various forms:
6168 (concat val dst): dst now holds val
6170 (concat val (set dst src)): dst now holds val, copied from src
6172 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6173 after replacing mems and non-top-level regs with values.
6175 (concat (concat val dstv) (set dst src)): dst now holds val,
6176 copied from src. dstv is a value-based representation of dst, if
6177 it differs from dst. If resolution is needed, src is a REG, and
6178 its mode is the same as that of val.
6180 (concat (concat val (set dstv srcv)) (set dst src)): src
6181 copied to dst, holding val. dstv and srcv are value-based
6182 representations of dst and src, respectively.
6186 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
6187 reverse_op (v
->val_rtx
, expr
, cui
->insn
);
6192 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
6195 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
6198 if (mo
.type
== MO_CLOBBER
)
6199 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
6200 if (mo
.type
== MO_COPY
)
6201 VAL_EXPR_IS_COPIED (loc
) = 1;
6203 mo
.type
= MO_VAL_SET
;
6206 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6207 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
6208 VTI (bb
)->mos
.safe_push (mo
);
6211 /* Arguments to the call. */
6212 static rtx call_arguments
;
6214 /* Compute call_arguments. */
6217 prepare_call_arguments (basic_block bb
, rtx_insn
*insn
)
6220 rtx prev
, cur
, next
;
6221 rtx this_arg
= NULL_RTX
;
6222 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
6223 tree obj_type_ref
= NULL_TREE
;
6224 CUMULATIVE_ARGS args_so_far_v
;
6225 cumulative_args_t args_so_far
;
6227 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
6228 args_so_far
= pack_cumulative_args (&args_so_far_v
);
6229 call
= get_call_rtx_from (insn
);
6232 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
6234 rtx symbol
= XEXP (XEXP (call
, 0), 0);
6235 if (SYMBOL_REF_DECL (symbol
))
6236 fndecl
= SYMBOL_REF_DECL (symbol
);
6238 if (fndecl
== NULL_TREE
)
6239 fndecl
= MEM_EXPR (XEXP (call
, 0));
6241 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
6242 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
6244 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
6245 type
= TREE_TYPE (fndecl
);
6246 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
6248 if (TREE_CODE (fndecl
) == INDIRECT_REF
6249 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
6250 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
6255 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
6257 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
6258 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
6260 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
6264 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
6265 link
= CALL_INSN_FUNCTION_USAGE (insn
);
6266 #ifndef PCC_STATIC_STRUCT_RETURN
6267 if (aggregate_value_p (TREE_TYPE (type
), type
)
6268 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
6270 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
6271 machine_mode mode
= TYPE_MODE (struct_addr
);
6273 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6275 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6277 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6279 if (reg
== NULL_RTX
)
6281 for (; link
; link
= XEXP (link
, 1))
6282 if (GET_CODE (XEXP (link
, 0)) == USE
6283 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6285 link
= XEXP (link
, 1);
6292 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6294 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
6297 t
= TYPE_ARG_TYPES (type
);
6298 mode
= TYPE_MODE (TREE_VALUE (t
));
6299 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6300 TREE_VALUE (t
), true);
6301 if (this_arg
&& !REG_P (this_arg
))
6302 this_arg
= NULL_RTX
;
6303 else if (this_arg
== NULL_RTX
)
6305 for (; link
; link
= XEXP (link
, 1))
6306 if (GET_CODE (XEXP (link
, 0)) == USE
6307 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6309 this_arg
= XEXP (XEXP (link
, 0), 0);
6317 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
6319 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
6320 if (GET_CODE (XEXP (link
, 0)) == USE
)
6322 rtx item
= NULL_RTX
;
6323 x
= XEXP (XEXP (link
, 0), 0);
6324 if (GET_MODE (link
) == VOIDmode
6325 || GET_MODE (link
) == BLKmode
6326 || (GET_MODE (link
) != GET_MODE (x
)
6327 && ((GET_MODE_CLASS (GET_MODE (link
)) != MODE_INT
6328 && GET_MODE_CLASS (GET_MODE (link
)) != MODE_PARTIAL_INT
)
6329 || (GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
6330 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_PARTIAL_INT
))))
6331 /* Can't do anything for these, if the original type mode
6332 isn't known or can't be converted. */;
6335 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6336 scalar_int_mode mode
;
6337 if (val
&& cselib_preserved_value_p (val
))
6338 item
= val
->val_rtx
;
6339 else if (is_a
<scalar_int_mode
> (GET_MODE (x
), &mode
))
6341 opt_scalar_int_mode mode_iter
;
6342 FOR_EACH_WIDER_MODE (mode_iter
, mode
)
6344 mode
= mode_iter
.require ();
6345 if (GET_MODE_BITSIZE (mode
) > BITS_PER_WORD
)
6348 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
6349 if (reg
== NULL_RTX
|| !REG_P (reg
))
6351 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
6352 if (val
&& cselib_preserved_value_p (val
))
6354 item
= val
->val_rtx
;
6365 if (!frame_pointer_needed
)
6367 struct adjust_mem_data amd
;
6368 amd
.mem_mode
= VOIDmode
;
6369 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
6371 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
6373 gcc_assert (amd
.side_effects
.is_empty ());
6375 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
6376 if (val
&& cselib_preserved_value_p (val
))
6377 item
= val
->val_rtx
;
6378 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
6379 && GET_MODE_CLASS (GET_MODE (mem
)) != MODE_PARTIAL_INT
)
6381 /* For non-integer stack argument see also if they weren't
6382 initialized by integers. */
6383 scalar_int_mode imode
;
6384 if (int_mode_for_mode (GET_MODE (mem
)).exists (&imode
)
6385 && imode
!= GET_MODE (mem
))
6387 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
6388 imode
, 0, VOIDmode
);
6389 if (val
&& cselib_preserved_value_p (val
))
6390 item
= lowpart_subreg (GET_MODE (x
), val
->val_rtx
,
6398 if (GET_MODE (item
) != GET_MODE (link
))
6399 item
= lowpart_subreg (GET_MODE (link
), item
, GET_MODE (item
));
6400 if (GET_MODE (x2
) != GET_MODE (link
))
6401 x2
= lowpart_subreg (GET_MODE (link
), x2
, GET_MODE (x2
));
6402 item
= gen_rtx_CONCAT (GET_MODE (link
), x2
, item
);
6404 = gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
6406 if (t
&& t
!= void_list_node
)
6408 tree argtype
= TREE_VALUE (t
);
6409 machine_mode mode
= TYPE_MODE (argtype
);
6411 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
6413 argtype
= build_pointer_type (argtype
);
6414 mode
= TYPE_MODE (argtype
);
6416 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6418 if (TREE_CODE (argtype
) == REFERENCE_TYPE
6419 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
6422 && GET_MODE (reg
) == mode
6423 && (GET_MODE_CLASS (mode
) == MODE_INT
6424 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
6426 && REGNO (x
) == REGNO (reg
)
6427 && GET_MODE (x
) == mode
6430 machine_mode indmode
6431 = TYPE_MODE (TREE_TYPE (argtype
));
6432 rtx mem
= gen_rtx_MEM (indmode
, x
);
6433 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
6434 if (val
&& cselib_preserved_value_p (val
))
6436 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
6437 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6442 struct elt_loc_list
*l
;
6445 /* Try harder, when passing address of a constant
6446 pool integer it can be easily read back. */
6447 item
= XEXP (item
, 1);
6448 if (GET_CODE (item
) == SUBREG
)
6449 item
= SUBREG_REG (item
);
6450 gcc_assert (GET_CODE (item
) == VALUE
);
6451 val
= CSELIB_VAL_PTR (item
);
6452 for (l
= val
->locs
; l
; l
= l
->next
)
6453 if (GET_CODE (l
->loc
) == SYMBOL_REF
6454 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
6455 && SYMBOL_REF_DECL (l
->loc
)
6456 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
6458 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
6459 if (tree_fits_shwi_p (initial
))
6461 item
= GEN_INT (tree_to_shwi (initial
));
6462 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
6464 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
6471 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6477 /* Add debug arguments. */
6479 && TREE_CODE (fndecl
) == FUNCTION_DECL
6480 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
6482 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (fndecl
);
6487 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, ¶m
); ix
+= 2)
6490 tree dtemp
= (**debug_args
)[ix
+ 1];
6491 machine_mode mode
= DECL_MODE (dtemp
);
6492 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
6493 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL_KNOWN_SET (dtemp
));
6494 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6500 /* Reverse call_arguments chain. */
6502 for (cur
= call_arguments
; cur
; cur
= next
)
6504 next
= XEXP (cur
, 1);
6505 XEXP (cur
, 1) = prev
;
6508 call_arguments
= prev
;
6510 x
= get_call_rtx_from (insn
);
6513 x
= XEXP (XEXP (x
, 0), 0);
6514 if (GET_CODE (x
) == SYMBOL_REF
)
6515 /* Don't record anything. */;
6516 else if (CONSTANT_P (x
))
6518 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
6521 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6525 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6526 if (val
&& cselib_preserved_value_p (val
))
6528 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
6530 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6537 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
6538 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
6540 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref
));
6542 clobbered
= plus_constant (mode
, clobbered
,
6543 token
* GET_MODE_SIZE (mode
));
6544 clobbered
= gen_rtx_MEM (mode
, clobbered
);
6545 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
6547 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6551 /* Callback for cselib_record_sets_hook, that records as micro
6552 operations uses and stores in an insn after cselib_record_sets has
6553 analyzed the sets in an insn, but before it modifies the stored
6554 values in the internal tables, unless cselib_record_sets doesn't
6555 call it directly (perhaps because we're not doing cselib in the
6556 first place, in which case sets and n_sets will be 0). */
6559 add_with_sets (rtx_insn
*insn
, struct cselib_set
*sets
, int n_sets
)
6561 basic_block bb
= BLOCK_FOR_INSN (insn
);
6563 struct count_use_info cui
;
6564 micro_operation
*mos
;
6566 cselib_hook_called
= true;
6571 cui
.n_sets
= n_sets
;
6573 n1
= VTI (bb
)->mos
.length ();
6574 cui
.store_p
= false;
6575 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
6576 n2
= VTI (bb
)->mos
.length () - 1;
6577 mos
= VTI (bb
)->mos
.address ();
6579 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6583 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
6585 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
6588 std::swap (mos
[n1
], mos
[n2
]);
6591 n2
= VTI (bb
)->mos
.length () - 1;
6594 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
6596 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6599 std::swap (mos
[n1
], mos
[n2
]);
6608 mo
.u
.loc
= call_arguments
;
6609 call_arguments
= NULL_RTX
;
6611 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6612 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6613 VTI (bb
)->mos
.safe_push (mo
);
6616 n1
= VTI (bb
)->mos
.length ();
6617 /* This will record NEXT_INSN (insn), such that we can
6618 insert notes before it without worrying about any
6619 notes that MO_USEs might emit after the insn. */
6621 note_stores (PATTERN (insn
), add_stores
, &cui
);
6622 n2
= VTI (bb
)->mos
.length () - 1;
6623 mos
= VTI (bb
)->mos
.address ();
6625 /* Order the MO_VAL_USEs first (note_stores does nothing
6626 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6627 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6630 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6632 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6635 std::swap (mos
[n1
], mos
[n2
]);
6638 n2
= VTI (bb
)->mos
.length () - 1;
6641 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6643 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6646 std::swap (mos
[n1
], mos
[n2
]);
6650 static enum var_init_status
6651 find_src_status (dataflow_set
*in
, rtx src
)
6653 tree decl
= NULL_TREE
;
6654 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6656 if (! flag_var_tracking_uninit
)
6657 status
= VAR_INIT_STATUS_INITIALIZED
;
6659 if (src
&& REG_P (src
))
6660 decl
= var_debug_decl (REG_EXPR (src
));
6661 else if (src
&& MEM_P (src
))
6662 decl
= var_debug_decl (MEM_EXPR (src
));
6665 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6670 /* SRC is the source of an assignment. Use SET to try to find what
6671 was ultimately assigned to SRC. Return that value if known,
6672 otherwise return SRC itself. */
6675 find_src_set_src (dataflow_set
*set
, rtx src
)
6677 tree decl
= NULL_TREE
; /* The variable being copied around. */
6678 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6680 location_chain
*nextp
;
6684 if (src
&& REG_P (src
))
6685 decl
= var_debug_decl (REG_EXPR (src
));
6686 else if (src
&& MEM_P (src
))
6687 decl
= var_debug_decl (MEM_EXPR (src
));
6691 decl_or_value dv
= dv_from_decl (decl
);
6693 var
= shared_hash_find (set
->vars
, dv
);
6697 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6698 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6699 nextp
= nextp
->next
)
6700 if (rtx_equal_p (nextp
->loc
, src
))
6702 set_src
= nextp
->set_src
;
6712 /* Compute the changes of variable locations in the basic block BB. */
6715 compute_bb_dataflow (basic_block bb
)
6718 micro_operation
*mo
;
6720 dataflow_set old_out
;
6721 dataflow_set
*in
= &VTI (bb
)->in
;
6722 dataflow_set
*out
= &VTI (bb
)->out
;
6724 dataflow_set_init (&old_out
);
6725 dataflow_set_copy (&old_out
, out
);
6726 dataflow_set_copy (out
, in
);
6728 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6729 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
6731 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
6733 rtx_insn
*insn
= mo
->insn
;
6738 dataflow_set_clear_at_call (out
, insn
);
6743 rtx loc
= mo
->u
.loc
;
6746 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6747 else if (MEM_P (loc
))
6748 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6754 rtx loc
= mo
->u
.loc
;
6758 if (GET_CODE (loc
) == CONCAT
)
6760 val
= XEXP (loc
, 0);
6761 vloc
= XEXP (loc
, 1);
6769 var
= PAT_VAR_LOCATION_DECL (vloc
);
6771 clobber_variable_part (out
, NULL_RTX
,
6772 dv_from_decl (var
), 0, NULL_RTX
);
6775 if (VAL_NEEDS_RESOLUTION (loc
))
6776 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6777 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6778 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6781 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6782 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6783 dv_from_decl (var
), 0,
6784 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6791 rtx loc
= mo
->u
.loc
;
6792 rtx val
, vloc
, uloc
;
6794 vloc
= uloc
= XEXP (loc
, 1);
6795 val
= XEXP (loc
, 0);
6797 if (GET_CODE (val
) == CONCAT
)
6799 uloc
= XEXP (val
, 1);
6800 val
= XEXP (val
, 0);
6803 if (VAL_NEEDS_RESOLUTION (loc
))
6804 val_resolve (out
, val
, vloc
, insn
);
6806 val_store (out
, val
, uloc
, insn
, false);
6808 if (VAL_HOLDS_TRACK_EXPR (loc
))
6810 if (GET_CODE (uloc
) == REG
)
6811 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6813 else if (GET_CODE (uloc
) == MEM
)
6814 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6822 rtx loc
= mo
->u
.loc
;
6823 rtx val
, vloc
, uloc
;
6827 uloc
= XEXP (vloc
, 1);
6828 val
= XEXP (vloc
, 0);
6831 if (GET_CODE (uloc
) == SET
)
6833 dstv
= SET_DEST (uloc
);
6834 srcv
= SET_SRC (uloc
);
6842 if (GET_CODE (val
) == CONCAT
)
6844 dstv
= vloc
= XEXP (val
, 1);
6845 val
= XEXP (val
, 0);
6848 if (GET_CODE (vloc
) == SET
)
6850 srcv
= SET_SRC (vloc
);
6852 gcc_assert (val
!= srcv
);
6853 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6855 dstv
= vloc
= SET_DEST (vloc
);
6857 if (VAL_NEEDS_RESOLUTION (loc
))
6858 val_resolve (out
, val
, srcv
, insn
);
6860 else if (VAL_NEEDS_RESOLUTION (loc
))
6862 gcc_assert (GET_CODE (uloc
) == SET
6863 && GET_CODE (SET_SRC (uloc
)) == REG
);
6864 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6867 if (VAL_HOLDS_TRACK_EXPR (loc
))
6869 if (VAL_EXPR_IS_CLOBBERED (loc
))
6872 var_reg_delete (out
, uloc
, true);
6873 else if (MEM_P (uloc
))
6875 gcc_assert (MEM_P (dstv
));
6876 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
6877 var_mem_delete (out
, dstv
, true);
6882 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6883 rtx src
= NULL
, dst
= uloc
;
6884 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6886 if (GET_CODE (uloc
) == SET
)
6888 src
= SET_SRC (uloc
);
6889 dst
= SET_DEST (uloc
);
6894 if (flag_var_tracking_uninit
)
6896 status
= find_src_status (in
, src
);
6898 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6899 status
= find_src_status (out
, src
);
6902 src
= find_src_set_src (in
, src
);
6906 var_reg_delete_and_set (out
, dst
, !copied_p
,
6908 else if (MEM_P (dst
))
6910 gcc_assert (MEM_P (dstv
));
6911 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
6912 var_mem_delete_and_set (out
, dstv
, !copied_p
,
6917 else if (REG_P (uloc
))
6918 var_regno_delete (out
, REGNO (uloc
));
6919 else if (MEM_P (uloc
))
6921 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
6922 gcc_checking_assert (dstv
== vloc
);
6924 clobber_overlapping_mems (out
, vloc
);
6927 val_store (out
, val
, dstv
, insn
, true);
6933 rtx loc
= mo
->u
.loc
;
6936 if (GET_CODE (loc
) == SET
)
6938 set_src
= SET_SRC (loc
);
6939 loc
= SET_DEST (loc
);
6943 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6945 else if (MEM_P (loc
))
6946 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6953 rtx loc
= mo
->u
.loc
;
6954 enum var_init_status src_status
;
6957 if (GET_CODE (loc
) == SET
)
6959 set_src
= SET_SRC (loc
);
6960 loc
= SET_DEST (loc
);
6963 if (! flag_var_tracking_uninit
)
6964 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6967 src_status
= find_src_status (in
, set_src
);
6969 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6970 src_status
= find_src_status (out
, set_src
);
6973 set_src
= find_src_set_src (in
, set_src
);
6976 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6977 else if (MEM_P (loc
))
6978 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6984 rtx loc
= mo
->u
.loc
;
6987 var_reg_delete (out
, loc
, false);
6988 else if (MEM_P (loc
))
6989 var_mem_delete (out
, loc
, false);
6995 rtx loc
= mo
->u
.loc
;
6998 var_reg_delete (out
, loc
, true);
6999 else if (MEM_P (loc
))
7000 var_mem_delete (out
, loc
, true);
7005 out
->stack_adjust
+= mo
->u
.adjust
;
7010 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7012 delete local_get_addr_cache
;
7013 local_get_addr_cache
= NULL
;
7015 dataflow_set_equiv_regs (out
);
7016 shared_hash_htab (out
->vars
)
7017 ->traverse
<dataflow_set
*, canonicalize_values_mark
> (out
);
7018 shared_hash_htab (out
->vars
)
7019 ->traverse
<dataflow_set
*, canonicalize_values_star
> (out
);
7021 shared_hash_htab (out
->vars
)
7022 ->traverse
<dataflow_set
*, canonicalize_loc_order_check
> (out
);
7024 changed
= dataflow_set_different (&old_out
, out
);
7025 dataflow_set_destroy (&old_out
);
7029 /* Find the locations of variables in the whole function. */
7032 vt_find_locations (void)
7034 bb_heap_t
*worklist
= new bb_heap_t (LONG_MIN
);
7035 bb_heap_t
*pending
= new bb_heap_t (LONG_MIN
);
7036 sbitmap in_worklist
, in_pending
;
7043 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
7044 bool success
= true;
7046 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
7047 /* Compute reverse completion order of depth first search of the CFG
7048 so that the data-flow runs faster. */
7049 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
7050 bb_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
7051 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
7052 for (i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; i
++)
7053 bb_order
[rc_order
[i
]] = i
;
7056 auto_sbitmap
visited (last_basic_block_for_fn (cfun
));
7057 in_worklist
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7058 in_pending
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7059 bitmap_clear (in_worklist
);
7061 FOR_EACH_BB_FN (bb
, cfun
)
7062 pending
->insert (bb_order
[bb
->index
], bb
);
7063 bitmap_ones (in_pending
);
7065 while (success
&& !pending
->empty ())
7067 std::swap (worklist
, pending
);
7068 std::swap (in_worklist
, in_pending
);
7070 bitmap_clear (visited
);
7072 while (!worklist
->empty ())
7074 bb
= worklist
->extract_min ();
7075 bitmap_clear_bit (in_worklist
, bb
->index
);
7076 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
7077 if (!bitmap_bit_p (visited
, bb
->index
))
7081 int oldinsz
, oldoutsz
;
7083 bitmap_set_bit (visited
, bb
->index
);
7085 if (VTI (bb
)->in
.vars
)
7088 -= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7089 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7090 oldinsz
= shared_hash_htab (VTI (bb
)->in
.vars
)->elements ();
7092 = shared_hash_htab (VTI (bb
)->out
.vars
)->elements ();
7095 oldinsz
= oldoutsz
= 0;
7097 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7099 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
7100 bool first
= true, adjust
= false;
7102 /* Calculate the IN set as the intersection of
7103 predecessor OUT sets. */
7105 dataflow_set_clear (in
);
7106 dst_can_be_shared
= true;
7108 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7109 if (!VTI (e
->src
)->flooded
)
7110 gcc_assert (bb_order
[bb
->index
]
7111 <= bb_order
[e
->src
->index
]);
7114 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
7115 first_out
= &VTI (e
->src
)->out
;
7120 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
7126 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
7129 /* Merge and merge_adjust should keep entries in
7131 shared_hash_htab (in
->vars
)
7132 ->traverse
<dataflow_set
*,
7133 canonicalize_loc_order_check
> (in
);
7135 if (dst_can_be_shared
)
7137 shared_hash_destroy (in
->vars
);
7138 in
->vars
= shared_hash_copy (first_out
->vars
);
7142 VTI (bb
)->flooded
= true;
7146 /* Calculate the IN set as union of predecessor OUT sets. */
7147 dataflow_set_clear (&VTI (bb
)->in
);
7148 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7149 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
7152 changed
= compute_bb_dataflow (bb
);
7153 htabsz
+= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7154 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7156 if (htabmax
&& htabsz
> htabmax
)
7158 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7159 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7160 "variable tracking size limit exceeded with "
7161 "-fvar-tracking-assignments, retrying without");
7163 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7164 "variable tracking size limit exceeded");
7171 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7173 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7176 if (bitmap_bit_p (visited
, e
->dest
->index
))
7178 if (!bitmap_bit_p (in_pending
, e
->dest
->index
))
7180 /* Send E->DEST to next round. */
7181 bitmap_set_bit (in_pending
, e
->dest
->index
);
7182 pending
->insert (bb_order
[e
->dest
->index
],
7186 else if (!bitmap_bit_p (in_worklist
, e
->dest
->index
))
7188 /* Add E->DEST to current round. */
7189 bitmap_set_bit (in_worklist
, e
->dest
->index
);
7190 worklist
->insert (bb_order
[e
->dest
->index
],
7198 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7200 (int)shared_hash_htab (VTI (bb
)->in
.vars
)->size (),
7202 (int)shared_hash_htab (VTI (bb
)->out
.vars
)->size (),
7204 (int)worklist
->nodes (), (int)pending
->nodes (),
7207 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7209 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
7210 dump_dataflow_set (&VTI (bb
)->in
);
7211 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
7212 dump_dataflow_set (&VTI (bb
)->out
);
7218 if (success
&& MAY_HAVE_DEBUG_BIND_INSNS
)
7219 FOR_EACH_BB_FN (bb
, cfun
)
7220 gcc_assert (VTI (bb
)->flooded
);
7225 sbitmap_free (in_worklist
);
7226 sbitmap_free (in_pending
);
7228 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
7232 /* Print the content of the LIST to dump file. */
7235 dump_attrs_list (attrs
*list
)
7237 for (; list
; list
= list
->next
)
7239 if (dv_is_decl_p (list
->dv
))
7240 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
7242 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
7243 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
7245 fprintf (dump_file
, "\n");
7248 /* Print the information about variable *SLOT to dump file. */
7251 dump_var_tracking_slot (variable
**slot
, void *data ATTRIBUTE_UNUSED
)
7253 variable
*var
= *slot
;
7257 /* Continue traversing the hash table. */
7261 /* Print the information about variable VAR to dump file. */
7264 dump_var (variable
*var
)
7267 location_chain
*node
;
7269 if (dv_is_decl_p (var
->dv
))
7271 const_tree decl
= dv_as_decl (var
->dv
);
7273 if (DECL_NAME (decl
))
7275 fprintf (dump_file
, " name: %s",
7276 IDENTIFIER_POINTER (DECL_NAME (decl
)));
7277 if (dump_flags
& TDF_UID
)
7278 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
7280 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7281 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
7283 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
7284 fprintf (dump_file
, "\n");
7288 fputc (' ', dump_file
);
7289 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
7292 for (i
= 0; i
< var
->n_var_parts
; i
++)
7294 fprintf (dump_file
, " offset %ld\n",
7295 (long)(var
->onepart
? 0 : VAR_PART_OFFSET (var
, i
)));
7296 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
7298 fprintf (dump_file
, " ");
7299 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
7300 fprintf (dump_file
, "[uninit]");
7301 print_rtl_single (dump_file
, node
->loc
);
7306 /* Print the information about variables from hash table VARS to dump file. */
7309 dump_vars (variable_table_type
*vars
)
7311 if (vars
->elements () > 0)
7313 fprintf (dump_file
, "Variables:\n");
7314 vars
->traverse
<void *, dump_var_tracking_slot
> (NULL
);
7318 /* Print the dataflow set SET to dump file. */
7321 dump_dataflow_set (dataflow_set
*set
)
7325 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
7327 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7331 fprintf (dump_file
, "Reg %d:", i
);
7332 dump_attrs_list (set
->regs
[i
]);
7335 dump_vars (shared_hash_htab (set
->vars
));
7336 fprintf (dump_file
, "\n");
7339 /* Print the IN and OUT sets for each basic block to dump file. */
7342 dump_dataflow_sets (void)
7346 FOR_EACH_BB_FN (bb
, cfun
)
7348 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
7349 fprintf (dump_file
, "IN:\n");
7350 dump_dataflow_set (&VTI (bb
)->in
);
7351 fprintf (dump_file
, "OUT:\n");
7352 dump_dataflow_set (&VTI (bb
)->out
);
7356 /* Return the variable for DV in dropped_values, inserting one if
7357 requested with INSERT. */
7359 static inline variable
*
7360 variable_from_dropped (decl_or_value dv
, enum insert_option insert
)
7363 variable
*empty_var
;
7364 onepart_enum onepart
;
7366 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
), insert
);
7374 gcc_checking_assert (insert
== INSERT
);
7376 onepart
= dv_onepart_p (dv
);
7378 gcc_checking_assert (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
);
7380 empty_var
= onepart_pool_allocate (onepart
);
7382 empty_var
->refcount
= 1;
7383 empty_var
->n_var_parts
= 0;
7384 empty_var
->onepart
= onepart
;
7385 empty_var
->in_changed_variables
= false;
7386 empty_var
->var_part
[0].loc_chain
= NULL
;
7387 empty_var
->var_part
[0].cur_loc
= NULL
;
7388 VAR_LOC_1PAUX (empty_var
) = NULL
;
7389 set_dv_changed (dv
, true);
7396 /* Recover the one-part aux from dropped_values. */
7398 static struct onepart_aux
*
7399 recover_dropped_1paux (variable
*var
)
7403 gcc_checking_assert (var
->onepart
);
7405 if (VAR_LOC_1PAUX (var
))
7406 return VAR_LOC_1PAUX (var
);
7408 if (var
->onepart
== ONEPART_VDECL
)
7411 dvar
= variable_from_dropped (var
->dv
, NO_INSERT
);
7416 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (dvar
);
7417 VAR_LOC_1PAUX (dvar
) = NULL
;
7419 return VAR_LOC_1PAUX (var
);
7422 /* Add variable VAR to the hash table of changed variables and
7423 if it has no locations delete it from SET's hash table. */
7426 variable_was_changed (variable
*var
, dataflow_set
*set
)
7428 hashval_t hash
= dv_htab_hash (var
->dv
);
7434 /* Remember this decl or VALUE has been added to changed_variables. */
7435 set_dv_changed (var
->dv
, true);
7437 slot
= changed_variables
->find_slot_with_hash (var
->dv
, hash
, INSERT
);
7441 variable
*old_var
= *slot
;
7442 gcc_assert (old_var
->in_changed_variables
);
7443 old_var
->in_changed_variables
= false;
7444 if (var
!= old_var
&& var
->onepart
)
7446 /* Restore the auxiliary info from an empty variable
7447 previously created for changed_variables, so it is
7449 gcc_checking_assert (!VAR_LOC_1PAUX (var
));
7450 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (old_var
);
7451 VAR_LOC_1PAUX (old_var
) = NULL
;
7453 variable_htab_free (*slot
);
7456 if (set
&& var
->n_var_parts
== 0)
7458 onepart_enum onepart
= var
->onepart
;
7459 variable
*empty_var
= NULL
;
7460 variable
**dslot
= NULL
;
7462 if (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
)
7464 dslot
= dropped_values
->find_slot_with_hash (var
->dv
,
7465 dv_htab_hash (var
->dv
),
7471 gcc_checking_assert (!empty_var
->in_changed_variables
);
7472 if (!VAR_LOC_1PAUX (var
))
7474 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (empty_var
);
7475 VAR_LOC_1PAUX (empty_var
) = NULL
;
7478 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
7484 empty_var
= onepart_pool_allocate (onepart
);
7485 empty_var
->dv
= var
->dv
;
7486 empty_var
->refcount
= 1;
7487 empty_var
->n_var_parts
= 0;
7488 empty_var
->onepart
= onepart
;
7491 empty_var
->refcount
++;
7496 empty_var
->refcount
++;
7497 empty_var
->in_changed_variables
= true;
7501 empty_var
->var_part
[0].loc_chain
= NULL
;
7502 empty_var
->var_part
[0].cur_loc
= NULL
;
7503 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (var
);
7504 VAR_LOC_1PAUX (var
) = NULL
;
7510 if (var
->onepart
&& !VAR_LOC_1PAUX (var
))
7511 recover_dropped_1paux (var
);
7513 var
->in_changed_variables
= true;
7520 if (var
->n_var_parts
== 0)
7525 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
7528 if (shared_hash_shared (set
->vars
))
7529 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
7531 shared_hash_htab (set
->vars
)->clear_slot (slot
);
7537 /* Look for the index in VAR->var_part corresponding to OFFSET.
7538 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7539 referenced int will be set to the index that the part has or should
7540 have, if it should be inserted. */
7543 find_variable_location_part (variable
*var
, HOST_WIDE_INT offset
,
7544 int *insertion_point
)
7553 if (insertion_point
)
7554 *insertion_point
= 0;
7556 return var
->n_var_parts
- 1;
7559 /* Find the location part. */
7561 high
= var
->n_var_parts
;
7564 pos
= (low
+ high
) / 2;
7565 if (VAR_PART_OFFSET (var
, pos
) < offset
)
7572 if (insertion_point
)
7573 *insertion_point
= pos
;
7575 if (pos
< var
->n_var_parts
&& VAR_PART_OFFSET (var
, pos
) == offset
)
7582 set_slot_part (dataflow_set
*set
, rtx loc
, variable
**slot
,
7583 decl_or_value dv
, HOST_WIDE_INT offset
,
7584 enum var_init_status initialized
, rtx set_src
)
7587 location_chain
*node
, *next
;
7588 location_chain
**nextp
;
7590 onepart_enum onepart
;
7595 onepart
= var
->onepart
;
7597 onepart
= dv_onepart_p (dv
);
7599 gcc_checking_assert (offset
== 0 || !onepart
);
7600 gcc_checking_assert (loc
!= dv_as_opaque (dv
));
7602 if (! flag_var_tracking_uninit
)
7603 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7607 /* Create new variable information. */
7608 var
= onepart_pool_allocate (onepart
);
7611 var
->n_var_parts
= 1;
7612 var
->onepart
= onepart
;
7613 var
->in_changed_variables
= false;
7615 VAR_LOC_1PAUX (var
) = NULL
;
7617 VAR_PART_OFFSET (var
, 0) = offset
;
7618 var
->var_part
[0].loc_chain
= NULL
;
7619 var
->var_part
[0].cur_loc
= NULL
;
7622 nextp
= &var
->var_part
[0].loc_chain
;
7628 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
7632 if (GET_CODE (loc
) == VALUE
)
7634 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7635 nextp
= &node
->next
)
7636 if (GET_CODE (node
->loc
) == VALUE
)
7638 if (node
->loc
== loc
)
7643 if (canon_value_cmp (node
->loc
, loc
))
7651 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
7659 else if (REG_P (loc
))
7661 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7662 nextp
= &node
->next
)
7663 if (REG_P (node
->loc
))
7665 if (REGNO (node
->loc
) < REGNO (loc
))
7669 if (REGNO (node
->loc
) == REGNO (loc
))
7682 else if (MEM_P (loc
))
7684 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7685 nextp
= &node
->next
)
7686 if (REG_P (node
->loc
))
7688 else if (MEM_P (node
->loc
))
7690 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
7702 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7703 nextp
= &node
->next
)
7704 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
7712 if (shared_var_p (var
, set
->vars
))
7714 slot
= unshare_variable (set
, slot
, var
, initialized
);
7716 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
7717 nextp
= &(*nextp
)->next
)
7719 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7726 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7728 pos
= find_variable_location_part (var
, offset
, &inspos
);
7732 node
= var
->var_part
[pos
].loc_chain
;
7735 && ((REG_P (node
->loc
) && REG_P (loc
)
7736 && REGNO (node
->loc
) == REGNO (loc
))
7737 || rtx_equal_p (node
->loc
, loc
)))
7739 /* LOC is in the beginning of the chain so we have nothing
7741 if (node
->init
< initialized
)
7742 node
->init
= initialized
;
7743 if (set_src
!= NULL
)
7744 node
->set_src
= set_src
;
7750 /* We have to make a copy of a shared variable. */
7751 if (shared_var_p (var
, set
->vars
))
7753 slot
= unshare_variable (set
, slot
, var
, initialized
);
7760 /* We have not found the location part, new one will be created. */
7762 /* We have to make a copy of the shared variable. */
7763 if (shared_var_p (var
, set
->vars
))
7765 slot
= unshare_variable (set
, slot
, var
, initialized
);
7769 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7770 thus there are at most MAX_VAR_PARTS different offsets. */
7771 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7772 && (!var
->n_var_parts
|| !onepart
));
7774 /* We have to move the elements of array starting at index
7775 inspos to the next position. */
7776 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7777 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7780 gcc_checking_assert (!onepart
);
7781 VAR_PART_OFFSET (var
, pos
) = offset
;
7782 var
->var_part
[pos
].loc_chain
= NULL
;
7783 var
->var_part
[pos
].cur_loc
= NULL
;
7786 /* Delete the location from the list. */
7787 nextp
= &var
->var_part
[pos
].loc_chain
;
7788 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7791 if ((REG_P (node
->loc
) && REG_P (loc
)
7792 && REGNO (node
->loc
) == REGNO (loc
))
7793 || rtx_equal_p (node
->loc
, loc
))
7795 /* Save these values, to assign to the new node, before
7796 deleting this one. */
7797 if (node
->init
> initialized
)
7798 initialized
= node
->init
;
7799 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7800 set_src
= node
->set_src
;
7801 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7802 var
->var_part
[pos
].cur_loc
= NULL
;
7808 nextp
= &node
->next
;
7811 nextp
= &var
->var_part
[pos
].loc_chain
;
7814 /* Add the location to the beginning. */
7815 node
= new location_chain
;
7817 node
->init
= initialized
;
7818 node
->set_src
= set_src
;
7819 node
->next
= *nextp
;
7822 /* If no location was emitted do so. */
7823 if (var
->var_part
[pos
].cur_loc
== NULL
)
7824 variable_was_changed (var
, set
);
7829 /* Set the part of variable's location in the dataflow set SET. The
7830 variable part is specified by variable's declaration in DV and
7831 offset OFFSET and the part's location by LOC. IOPT should be
7832 NO_INSERT if the variable is known to be in SET already and the
7833 variable hash table must not be resized, and INSERT otherwise. */
7836 set_variable_part (dataflow_set
*set
, rtx loc
,
7837 decl_or_value dv
, HOST_WIDE_INT offset
,
7838 enum var_init_status initialized
, rtx set_src
,
7839 enum insert_option iopt
)
7843 if (iopt
== NO_INSERT
)
7844 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7847 slot
= shared_hash_find_slot (set
->vars
, dv
);
7849 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7851 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7854 /* Remove all recorded register locations for the given variable part
7855 from dataflow set SET, except for those that are identical to loc.
7856 The variable part is specified by variable's declaration or value
7857 DV and offset OFFSET. */
7860 clobber_slot_part (dataflow_set
*set
, rtx loc
, variable
**slot
,
7861 HOST_WIDE_INT offset
, rtx set_src
)
7863 variable
*var
= *slot
;
7864 int pos
= find_variable_location_part (var
, offset
, NULL
);
7868 location_chain
*node
, *next
;
7870 /* Remove the register locations from the dataflow set. */
7871 next
= var
->var_part
[pos
].loc_chain
;
7872 for (node
= next
; node
; node
= next
)
7875 if (node
->loc
!= loc
7876 && (!flag_var_tracking_uninit
7879 || !rtx_equal_p (set_src
, node
->set_src
)))
7881 if (REG_P (node
->loc
))
7883 attrs
*anode
, *anext
;
7886 /* Remove the variable part from the register's
7887 list, but preserve any other variable parts
7888 that might be regarded as live in that same
7890 anextp
= &set
->regs
[REGNO (node
->loc
)];
7891 for (anode
= *anextp
; anode
; anode
= anext
)
7893 anext
= anode
->next
;
7894 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7895 && anode
->offset
== offset
)
7901 anextp
= &anode
->next
;
7905 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7913 /* Remove all recorded register locations for the given variable part
7914 from dataflow set SET, except for those that are identical to loc.
7915 The variable part is specified by variable's declaration or value
7916 DV and offset OFFSET. */
7919 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7920 HOST_WIDE_INT offset
, rtx set_src
)
7924 if (!dv_as_opaque (dv
)
7925 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7928 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7932 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7935 /* Delete the part of variable's location from dataflow set SET. The
7936 variable part is specified by its SET->vars slot SLOT and offset
7937 OFFSET and the part's location by LOC. */
7940 delete_slot_part (dataflow_set
*set
, rtx loc
, variable
**slot
,
7941 HOST_WIDE_INT offset
)
7943 variable
*var
= *slot
;
7944 int pos
= find_variable_location_part (var
, offset
, NULL
);
7948 location_chain
*node
, *next
;
7949 location_chain
**nextp
;
7953 if (shared_var_p (var
, set
->vars
))
7955 /* If the variable contains the location part we have to
7956 make a copy of the variable. */
7957 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7960 if ((REG_P (node
->loc
) && REG_P (loc
)
7961 && REGNO (node
->loc
) == REGNO (loc
))
7962 || rtx_equal_p (node
->loc
, loc
))
7964 slot
= unshare_variable (set
, slot
, var
,
7965 VAR_INIT_STATUS_UNKNOWN
);
7972 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7973 cur_loc
= VAR_LOC_FROM (var
);
7975 cur_loc
= var
->var_part
[pos
].cur_loc
;
7977 /* Delete the location part. */
7979 nextp
= &var
->var_part
[pos
].loc_chain
;
7980 for (node
= *nextp
; node
; node
= next
)
7983 if ((REG_P (node
->loc
) && REG_P (loc
)
7984 && REGNO (node
->loc
) == REGNO (loc
))
7985 || rtx_equal_p (node
->loc
, loc
))
7987 /* If we have deleted the location which was last emitted
7988 we have to emit new location so add the variable to set
7989 of changed variables. */
7990 if (cur_loc
== node
->loc
)
7993 var
->var_part
[pos
].cur_loc
= NULL
;
7994 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7995 VAR_LOC_FROM (var
) = NULL
;
8002 nextp
= &node
->next
;
8005 if (var
->var_part
[pos
].loc_chain
== NULL
)
8009 while (pos
< var
->n_var_parts
)
8011 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
8016 variable_was_changed (var
, set
);
8022 /* Delete the part of variable's location from dataflow set SET. The
8023 variable part is specified by variable's declaration or value DV
8024 and offset OFFSET and the part's location by LOC. */
8027 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
8028 HOST_WIDE_INT offset
)
8030 variable
**slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
8034 delete_slot_part (set
, loc
, slot
, offset
);
8038 /* Structure for passing some other parameters to function
8039 vt_expand_loc_callback. */
8040 struct expand_loc_callback_data
8042 /* The variables and values active at this point. */
8043 variable_table_type
*vars
;
8045 /* Stack of values and debug_exprs under expansion, and their
8047 auto_vec
<rtx
, 4> expanding
;
8049 /* Stack of values and debug_exprs whose expansion hit recursion
8050 cycles. They will have VALUE_RECURSED_INTO marked when added to
8051 this list. This flag will be cleared if any of its dependencies
8052 resolves to a valid location. So, if the flag remains set at the
8053 end of the search, we know no valid location for this one can
8055 auto_vec
<rtx
, 4> pending
;
8057 /* The maximum depth among the sub-expressions under expansion.
8058 Zero indicates no expansion so far. */
8062 /* Allocate the one-part auxiliary data structure for VAR, with enough
8063 room for COUNT dependencies. */
8066 loc_exp_dep_alloc (variable
*var
, int count
)
8070 gcc_checking_assert (var
->onepart
);
8072 /* We can be called with COUNT == 0 to allocate the data structure
8073 without any dependencies, e.g. for the backlinks only. However,
8074 if we are specifying a COUNT, then the dependency list must have
8075 been emptied before. It would be possible to adjust pointers or
8076 force it empty here, but this is better done at an earlier point
8077 in the algorithm, so we instead leave an assertion to catch
8079 gcc_checking_assert (!count
8080 || VAR_LOC_DEP_VEC (var
) == NULL
8081 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8083 if (VAR_LOC_1PAUX (var
) && VAR_LOC_DEP_VEC (var
)->space (count
))
8086 allocsize
= offsetof (struct onepart_aux
, deps
)
8087 + vec
<loc_exp_dep
, va_heap
, vl_embed
>::embedded_size (count
);
8089 if (VAR_LOC_1PAUX (var
))
8091 VAR_LOC_1PAUX (var
) = XRESIZEVAR (struct onepart_aux
,
8092 VAR_LOC_1PAUX (var
), allocsize
);
8093 /* If the reallocation moves the onepaux structure, the
8094 back-pointer to BACKLINKS in the first list member will still
8095 point to its old location. Adjust it. */
8096 if (VAR_LOC_DEP_LST (var
))
8097 VAR_LOC_DEP_LST (var
)->pprev
= VAR_LOC_DEP_LSTP (var
);
8101 VAR_LOC_1PAUX (var
) = XNEWVAR (struct onepart_aux
, allocsize
);
8102 *VAR_LOC_DEP_LSTP (var
) = NULL
;
8103 VAR_LOC_FROM (var
) = NULL
;
8104 VAR_LOC_DEPTH (var
).complexity
= 0;
8105 VAR_LOC_DEPTH (var
).entryvals
= 0;
8107 VAR_LOC_DEP_VEC (var
)->embedded_init (count
);
8110 /* Remove all entries from the vector of active dependencies of VAR,
8111 removing them from the back-links lists too. */
8114 loc_exp_dep_clear (variable
*var
)
8116 while (VAR_LOC_DEP_VEC (var
) && !VAR_LOC_DEP_VEC (var
)->is_empty ())
8118 loc_exp_dep
*led
= &VAR_LOC_DEP_VEC (var
)->last ();
8120 led
->next
->pprev
= led
->pprev
;
8122 *led
->pprev
= led
->next
;
8123 VAR_LOC_DEP_VEC (var
)->pop ();
8127 /* Insert an active dependency from VAR on X to the vector of
8128 dependencies, and add the corresponding back-link to X's list of
8129 back-links in VARS. */
8132 loc_exp_insert_dep (variable
*var
, rtx x
, variable_table_type
*vars
)
8138 dv
= dv_from_rtx (x
);
8140 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8141 an additional look up? */
8142 xvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8146 xvar
= variable_from_dropped (dv
, NO_INSERT
);
8147 gcc_checking_assert (xvar
);
8150 /* No point in adding the same backlink more than once. This may
8151 arise if say the same value appears in two complex expressions in
8152 the same loc_list, or even more than once in a single
8154 if (VAR_LOC_DEP_LST (xvar
) && VAR_LOC_DEP_LST (xvar
)->dv
== var
->dv
)
8157 if (var
->onepart
== NOT_ONEPART
)
8158 led
= new loc_exp_dep
;
8162 memset (&empty
, 0, sizeof (empty
));
8163 VAR_LOC_DEP_VEC (var
)->quick_push (empty
);
8164 led
= &VAR_LOC_DEP_VEC (var
)->last ();
8169 loc_exp_dep_alloc (xvar
, 0);
8170 led
->pprev
= VAR_LOC_DEP_LSTP (xvar
);
8171 led
->next
= *led
->pprev
;
8173 led
->next
->pprev
= &led
->next
;
8177 /* Create active dependencies of VAR on COUNT values starting at
8178 VALUE, and corresponding back-links to the entries in VARS. Return
8179 true if we found any pending-recursion results. */
8182 loc_exp_dep_set (variable
*var
, rtx result
, rtx
*value
, int count
,
8183 variable_table_type
*vars
)
8185 bool pending_recursion
= false;
8187 gcc_checking_assert (VAR_LOC_DEP_VEC (var
) == NULL
8188 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8190 /* Set up all dependencies from last_child (as set up at the end of
8191 the loop above) to the end. */
8192 loc_exp_dep_alloc (var
, count
);
8198 if (!pending_recursion
)
8199 pending_recursion
= !result
&& VALUE_RECURSED_INTO (x
);
8201 loc_exp_insert_dep (var
, x
, vars
);
8204 return pending_recursion
;
8207 /* Notify the back-links of IVAR that are pending recursion that we
8208 have found a non-NIL value for it, so they are cleared for another
8209 attempt to compute a current location. */
8212 notify_dependents_of_resolved_value (variable
*ivar
, variable_table_type
*vars
)
8214 loc_exp_dep
*led
, *next
;
8216 for (led
= VAR_LOC_DEP_LST (ivar
); led
; led
= next
)
8218 decl_or_value dv
= led
->dv
;
8223 if (dv_is_value_p (dv
))
8225 rtx value
= dv_as_value (dv
);
8227 /* If we have already resolved it, leave it alone. */
8228 if (!VALUE_RECURSED_INTO (value
))
8231 /* Check that VALUE_RECURSED_INTO, true from the test above,
8232 implies NO_LOC_P. */
8233 gcc_checking_assert (NO_LOC_P (value
));
8235 /* We won't notify variables that are being expanded,
8236 because their dependency list is cleared before
8238 NO_LOC_P (value
) = false;
8239 VALUE_RECURSED_INTO (value
) = false;
8241 gcc_checking_assert (dv_changed_p (dv
));
8245 gcc_checking_assert (dv_onepart_p (dv
) != NOT_ONEPART
);
8246 if (!dv_changed_p (dv
))
8250 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8253 var
= variable_from_dropped (dv
, NO_INSERT
);
8256 notify_dependents_of_resolved_value (var
, vars
);
8259 next
->pprev
= led
->pprev
;
8267 static rtx
vt_expand_loc_callback (rtx x
, bitmap regs
,
8268 int max_depth
, void *data
);
8270 /* Return the combined depth, when one sub-expression evaluated to
8271 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8273 static inline expand_depth
8274 update_depth (expand_depth saved_depth
, expand_depth best_depth
)
8276 /* If we didn't find anything, stick with what we had. */
8277 if (!best_depth
.complexity
)
8280 /* If we found hadn't found anything, use the depth of the current
8281 expression. Do NOT add one extra level, we want to compute the
8282 maximum depth among sub-expressions. We'll increment it later,
8284 if (!saved_depth
.complexity
)
8287 /* Combine the entryval count so that regardless of which one we
8288 return, the entryval count is accurate. */
8289 best_depth
.entryvals
= saved_depth
.entryvals
8290 = best_depth
.entryvals
+ saved_depth
.entryvals
;
8292 if (saved_depth
.complexity
< best_depth
.complexity
)
8298 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8299 DATA for cselib expand callback. If PENDRECP is given, indicate in
8300 it whether any sub-expression couldn't be fully evaluated because
8301 it is pending recursion resolution. */
8304 vt_expand_var_loc_chain (variable
*var
, bitmap regs
, void *data
,
8307 struct expand_loc_callback_data
*elcd
8308 = (struct expand_loc_callback_data
*) data
;
8309 location_chain
*loc
, *next
;
8311 int first_child
, result_first_child
, last_child
;
8312 bool pending_recursion
;
8313 rtx loc_from
= NULL
;
8314 struct elt_loc_list
*cloc
= NULL
;
8315 expand_depth depth
= { 0, 0 }, saved_depth
= elcd
->depth
;
8316 int wanted_entryvals
, found_entryvals
= 0;
8318 /* Clear all backlinks pointing at this, so that we're not notified
8319 while we're active. */
8320 loc_exp_dep_clear (var
);
8323 if (var
->onepart
== ONEPART_VALUE
)
8325 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (var
->dv
));
8327 gcc_checking_assert (cselib_preserved_value_p (val
));
8332 first_child
= result_first_child
= last_child
8333 = elcd
->expanding
.length ();
8335 wanted_entryvals
= found_entryvals
;
8337 /* Attempt to expand each available location in turn. */
8338 for (next
= loc
= var
->n_var_parts
? var
->var_part
[0].loc_chain
: NULL
;
8339 loc
|| cloc
; loc
= next
)
8341 result_first_child
= last_child
;
8345 loc_from
= cloc
->loc
;
8348 if (unsuitable_loc (loc_from
))
8353 loc_from
= loc
->loc
;
8357 gcc_checking_assert (!unsuitable_loc (loc_from
));
8359 elcd
->depth
.complexity
= elcd
->depth
.entryvals
= 0;
8360 result
= cselib_expand_value_rtx_cb (loc_from
, regs
, EXPR_DEPTH
,
8361 vt_expand_loc_callback
, data
);
8362 last_child
= elcd
->expanding
.length ();
8366 depth
= elcd
->depth
;
8368 gcc_checking_assert (depth
.complexity
8369 || result_first_child
== last_child
);
8371 if (last_child
- result_first_child
!= 1)
8373 if (!depth
.complexity
&& GET_CODE (result
) == ENTRY_VALUE
)
8378 if (depth
.complexity
<= EXPR_USE_DEPTH
)
8380 if (depth
.entryvals
<= wanted_entryvals
)
8382 else if (!found_entryvals
|| depth
.entryvals
< found_entryvals
)
8383 found_entryvals
= depth
.entryvals
;
8389 /* Set it up in case we leave the loop. */
8390 depth
.complexity
= depth
.entryvals
= 0;
8392 result_first_child
= first_child
;
8395 if (!loc_from
&& wanted_entryvals
< found_entryvals
)
8397 /* We found entries with ENTRY_VALUEs and skipped them. Since
8398 we could not find any expansions without ENTRY_VALUEs, but we
8399 found at least one with them, go back and get an entry with
8400 the minimum number ENTRY_VALUE count that we found. We could
8401 avoid looping, but since each sub-loc is already resolved,
8402 the re-expansion should be trivial. ??? Should we record all
8403 attempted locs as dependencies, so that we retry the
8404 expansion should any of them change, in the hope it can give
8405 us a new entry without an ENTRY_VALUE? */
8406 elcd
->expanding
.truncate (first_child
);
8410 /* Register all encountered dependencies as active. */
8411 pending_recursion
= loc_exp_dep_set
8412 (var
, result
, elcd
->expanding
.address () + result_first_child
,
8413 last_child
- result_first_child
, elcd
->vars
);
8415 elcd
->expanding
.truncate (first_child
);
8417 /* Record where the expansion came from. */
8418 gcc_checking_assert (!result
|| !pending_recursion
);
8419 VAR_LOC_FROM (var
) = loc_from
;
8420 VAR_LOC_DEPTH (var
) = depth
;
8422 gcc_checking_assert (!depth
.complexity
== !result
);
8424 elcd
->depth
= update_depth (saved_depth
, depth
);
8426 /* Indicate whether any of the dependencies are pending recursion
8429 *pendrecp
= pending_recursion
;
8431 if (!pendrecp
|| !pending_recursion
)
8432 var
->var_part
[0].cur_loc
= result
;
8437 /* Callback for cselib_expand_value, that looks for expressions
8438 holding the value in the var-tracking hash tables. Return X for
8439 standard processing, anything else is to be used as-is. */
8442 vt_expand_loc_callback (rtx x
, bitmap regs
,
8443 int max_depth ATTRIBUTE_UNUSED
,
8446 struct expand_loc_callback_data
*elcd
8447 = (struct expand_loc_callback_data
*) data
;
8451 bool pending_recursion
= false;
8452 bool from_empty
= false;
8454 switch (GET_CODE (x
))
8457 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
8459 vt_expand_loc_callback
, data
);
8464 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
8465 GET_MODE (SUBREG_REG (x
)),
8468 /* Invalid SUBREGs are ok in debug info. ??? We could try
8469 alternate expansions for the VALUE as well. */
8471 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
8477 dv
= dv_from_rtx (x
);
8484 elcd
->expanding
.safe_push (x
);
8486 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8487 gcc_checking_assert (!VALUE_RECURSED_INTO (x
) || NO_LOC_P (x
));
8491 gcc_checking_assert (VALUE_RECURSED_INTO (x
) || !dv_changed_p (dv
));
8495 var
= elcd
->vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8500 var
= variable_from_dropped (dv
, INSERT
);
8503 gcc_checking_assert (var
);
8505 if (!dv_changed_p (dv
))
8507 gcc_checking_assert (!NO_LOC_P (x
));
8508 gcc_checking_assert (var
->var_part
[0].cur_loc
);
8509 gcc_checking_assert (VAR_LOC_1PAUX (var
));
8510 gcc_checking_assert (VAR_LOC_1PAUX (var
)->depth
.complexity
);
8512 elcd
->depth
= update_depth (elcd
->depth
, VAR_LOC_1PAUX (var
)->depth
);
8514 return var
->var_part
[0].cur_loc
;
8517 VALUE_RECURSED_INTO (x
) = true;
8518 /* This is tentative, but it makes some tests simpler. */
8519 NO_LOC_P (x
) = true;
8521 gcc_checking_assert (var
->n_var_parts
== 1 || from_empty
);
8523 result
= vt_expand_var_loc_chain (var
, regs
, data
, &pending_recursion
);
8525 if (pending_recursion
)
8527 gcc_checking_assert (!result
);
8528 elcd
->pending
.safe_push (x
);
8532 NO_LOC_P (x
) = !result
;
8533 VALUE_RECURSED_INTO (x
) = false;
8534 set_dv_changed (dv
, false);
8537 notify_dependents_of_resolved_value (var
, elcd
->vars
);
8543 /* While expanding variables, we may encounter recursion cycles
8544 because of mutual (possibly indirect) dependencies between two
8545 particular variables (or values), say A and B. If we're trying to
8546 expand A when we get to B, which in turn attempts to expand A, if
8547 we can't find any other expansion for B, we'll add B to this
8548 pending-recursion stack, and tentatively return NULL for its
8549 location. This tentative value will be used for any other
8550 occurrences of B, unless A gets some other location, in which case
8551 it will notify B that it is worth another try at computing a
8552 location for it, and it will use the location computed for A then.
8553 At the end of the expansion, the tentative NULL locations become
8554 final for all members of PENDING that didn't get a notification.
8555 This function performs this finalization of NULL locations. */
8558 resolve_expansions_pending_recursion (vec
<rtx
, va_heap
> *pending
)
8560 while (!pending
->is_empty ())
8562 rtx x
= pending
->pop ();
8565 if (!VALUE_RECURSED_INTO (x
))
8568 gcc_checking_assert (NO_LOC_P (x
));
8569 VALUE_RECURSED_INTO (x
) = false;
8570 dv
= dv_from_rtx (x
);
8571 gcc_checking_assert (dv_changed_p (dv
));
8572 set_dv_changed (dv
, false);
8576 /* Initialize expand_loc_callback_data D with variable hash table V.
8577 It must be a macro because of alloca (vec stack). */
8578 #define INIT_ELCD(d, v) \
8582 (d).depth.complexity = (d).depth.entryvals = 0; \
8585 /* Finalize expand_loc_callback_data D, resolved to location L. */
8586 #define FINI_ELCD(d, l) \
8589 resolve_expansions_pending_recursion (&(d).pending); \
8590 (d).pending.release (); \
8591 (d).expanding.release (); \
8593 if ((l) && MEM_P (l)) \
8594 (l) = targetm.delegitimize_address (l); \
8598 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8599 equivalences in VARS, updating their CUR_LOCs in the process. */
8602 vt_expand_loc (rtx loc
, variable_table_type
*vars
)
8604 struct expand_loc_callback_data data
;
8607 if (!MAY_HAVE_DEBUG_BIND_INSNS
)
8610 INIT_ELCD (data
, vars
);
8612 result
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
8613 vt_expand_loc_callback
, &data
);
8615 FINI_ELCD (data
, result
);
8620 /* Expand the one-part VARiable to a location, using the equivalences
8621 in VARS, updating their CUR_LOCs in the process. */
8624 vt_expand_1pvar (variable
*var
, variable_table_type
*vars
)
8626 struct expand_loc_callback_data data
;
8629 gcc_checking_assert (var
->onepart
&& var
->n_var_parts
== 1);
8631 if (!dv_changed_p (var
->dv
))
8632 return var
->var_part
[0].cur_loc
;
8634 INIT_ELCD (data
, vars
);
8636 loc
= vt_expand_var_loc_chain (var
, scratch_regs
, &data
, NULL
);
8638 gcc_checking_assert (data
.expanding
.is_empty ());
8640 FINI_ELCD (data
, loc
);
8645 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8646 additional parameters: WHERE specifies whether the note shall be emitted
8647 before or after instruction INSN. */
8650 emit_note_insn_var_location (variable
**varp
, emit_note_data
*data
)
8652 variable
*var
= *varp
;
8653 rtx_insn
*insn
= data
->insn
;
8654 enum emit_note_where where
= data
->where
;
8655 variable_table_type
*vars
= data
->vars
;
8658 int i
, j
, n_var_parts
;
8660 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
8661 HOST_WIDE_INT last_limit
;
8662 tree type_size_unit
;
8663 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
8664 rtx loc
[MAX_VAR_PARTS
];
8668 gcc_checking_assert (var
->onepart
== NOT_ONEPART
8669 || var
->onepart
== ONEPART_VDECL
);
8671 decl
= dv_as_decl (var
->dv
);
8677 for (i
= 0; i
< var
->n_var_parts
; i
++)
8678 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
8679 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
8680 for (i
= 0; i
< var
->n_var_parts
; i
++)
8682 machine_mode mode
, wider_mode
;
8684 HOST_WIDE_INT offset
;
8686 if (i
== 0 && var
->onepart
)
8688 gcc_checking_assert (var
->n_var_parts
== 1);
8690 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8691 loc2
= vt_expand_1pvar (var
, vars
);
8695 if (last_limit
< VAR_PART_OFFSET (var
, i
))
8700 else if (last_limit
> VAR_PART_OFFSET (var
, i
))
8702 offset
= VAR_PART_OFFSET (var
, i
);
8703 loc2
= var
->var_part
[i
].cur_loc
;
8704 if (loc2
&& GET_CODE (loc2
) == MEM
8705 && GET_CODE (XEXP (loc2
, 0)) == VALUE
)
8707 rtx depval
= XEXP (loc2
, 0);
8709 loc2
= vt_expand_loc (loc2
, vars
);
8712 loc_exp_insert_dep (var
, depval
, vars
);
8719 gcc_checking_assert (GET_CODE (loc2
) != VALUE
);
8720 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
8721 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
8723 initialized
= lc
->init
;
8729 offsets
[n_var_parts
] = offset
;
8735 loc
[n_var_parts
] = loc2
;
8736 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
8737 if (mode
== VOIDmode
&& var
->onepart
)
8738 mode
= DECL_MODE (decl
);
8739 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8741 /* Attempt to merge adjacent registers or memory. */
8742 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
8743 if (last_limit
<= VAR_PART_OFFSET (var
, j
))
8745 if (j
< var
->n_var_parts
8746 && GET_MODE_WIDER_MODE (mode
).exists (&wider_mode
)
8747 && var
->var_part
[j
].cur_loc
8748 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
8749 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
8750 && last_limit
== (var
->onepart
? 0 : VAR_PART_OFFSET (var
, j
))
8751 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
8752 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
8756 if (REG_P (loc
[n_var_parts
])
8757 && hard_regno_nregs (REGNO (loc
[n_var_parts
]), mode
) * 2
8758 == hard_regno_nregs (REGNO (loc
[n_var_parts
]), wider_mode
)
8759 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
8762 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
8763 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
8765 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
8766 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
8769 if (!REG_P (new_loc
)
8770 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
8773 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
8776 else if (MEM_P (loc
[n_var_parts
])
8777 && GET_CODE (XEXP (loc2
, 0)) == PLUS
8778 && REG_P (XEXP (XEXP (loc2
, 0), 0))
8779 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
8781 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
8782 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
8783 XEXP (XEXP (loc2
, 0), 0))
8784 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
8785 == GET_MODE_SIZE (mode
))
8786 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
8787 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8788 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
8789 XEXP (XEXP (loc2
, 0), 0))
8790 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8791 + GET_MODE_SIZE (mode
)
8792 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
8793 new_loc
= adjust_address_nv (loc
[n_var_parts
],
8799 loc
[n_var_parts
] = new_loc
;
8801 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8807 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8808 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
8811 if (! flag_var_tracking_uninit
)
8812 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8816 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
, initialized
);
8817 else if (n_var_parts
== 1)
8821 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
8822 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
8826 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
, initialized
);
8828 else if (n_var_parts
)
8832 for (i
= 0; i
< n_var_parts
; i
++)
8834 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
8836 parallel
= gen_rtx_PARALLEL (VOIDmode
,
8837 gen_rtvec_v (n_var_parts
, loc
));
8838 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
8839 parallel
, initialized
);
8842 if (where
!= EMIT_NOTE_BEFORE_INSN
)
8844 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8845 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
8846 NOTE_DURING_CALL_P (note
) = true;
8850 /* Make sure that the call related notes come first. */
8851 while (NEXT_INSN (insn
)
8853 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8854 && NOTE_DURING_CALL_P (insn
))
8855 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8856 insn
= NEXT_INSN (insn
);
8858 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8859 && NOTE_DURING_CALL_P (insn
))
8860 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8861 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8863 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
8865 NOTE_VAR_LOCATION (note
) = note_vl
;
8867 set_dv_changed (var
->dv
, false);
8868 gcc_assert (var
->in_changed_variables
);
8869 var
->in_changed_variables
= false;
8870 changed_variables
->clear_slot (varp
);
8872 /* Continue traversing the hash table. */
8876 /* While traversing changed_variables, push onto DATA (a stack of RTX
8877 values) entries that aren't user variables. */
8880 var_track_values_to_stack (variable
**slot
,
8881 vec
<rtx
, va_heap
> *changed_values_stack
)
8883 variable
*var
= *slot
;
8885 if (var
->onepart
== ONEPART_VALUE
)
8886 changed_values_stack
->safe_push (dv_as_value (var
->dv
));
8887 else if (var
->onepart
== ONEPART_DEXPR
)
8888 changed_values_stack
->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var
->dv
)));
8893 /* Remove from changed_variables the entry whose DV corresponds to
8894 value or debug_expr VAL. */
8896 remove_value_from_changed_variables (rtx val
)
8898 decl_or_value dv
= dv_from_rtx (val
);
8902 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8905 var
->in_changed_variables
= false;
8906 changed_variables
->clear_slot (slot
);
8909 /* If VAL (a value or debug_expr) has backlinks to variables actively
8910 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8911 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8912 have dependencies of their own to notify. */
8915 notify_dependents_of_changed_value (rtx val
, variable_table_type
*htab
,
8916 vec
<rtx
, va_heap
> *changed_values_stack
)
8921 decl_or_value dv
= dv_from_rtx (val
);
8923 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8926 slot
= htab
->find_slot_with_hash (dv
, dv_htab_hash (dv
), NO_INSERT
);
8928 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8932 while ((led
= VAR_LOC_DEP_LST (var
)))
8934 decl_or_value ldv
= led
->dv
;
8937 /* Deactivate and remove the backlink, as it was “used up”. It
8938 makes no sense to attempt to notify the same entity again:
8939 either it will be recomputed and re-register an active
8940 dependency, or it will still have the changed mark. */
8942 led
->next
->pprev
= led
->pprev
;
8944 *led
->pprev
= led
->next
;
8948 if (dv_changed_p (ldv
))
8951 switch (dv_onepart_p (ldv
))
8955 set_dv_changed (ldv
, true);
8956 changed_values_stack
->safe_push (dv_as_rtx (ldv
));
8960 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8961 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar
));
8962 variable_was_changed (ivar
, NULL
);
8967 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8970 int i
= ivar
->n_var_parts
;
8973 rtx loc
= ivar
->var_part
[i
].cur_loc
;
8975 if (loc
&& GET_CODE (loc
) == MEM
8976 && XEXP (loc
, 0) == val
)
8978 variable_was_changed (ivar
, NULL
);
8991 /* Take out of changed_variables any entries that don't refer to use
8992 variables. Back-propagate change notifications from values and
8993 debug_exprs to their active dependencies in HTAB or in
8994 CHANGED_VARIABLES. */
8997 process_changed_values (variable_table_type
*htab
)
9001 auto_vec
<rtx
, 20> changed_values_stack
;
9003 /* Move values from changed_variables to changed_values_stack. */
9005 ->traverse
<vec
<rtx
, va_heap
>*, var_track_values_to_stack
>
9006 (&changed_values_stack
);
9008 /* Back-propagate change notifications in values while popping
9009 them from the stack. */
9010 for (n
= i
= changed_values_stack
.length ();
9011 i
> 0; i
= changed_values_stack
.length ())
9013 val
= changed_values_stack
.pop ();
9014 notify_dependents_of_changed_value (val
, htab
, &changed_values_stack
);
9016 /* This condition will hold when visiting each of the entries
9017 originally in changed_variables. We can't remove them
9018 earlier because this could drop the backlinks before we got a
9019 chance to use them. */
9022 remove_value_from_changed_variables (val
);
9028 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9029 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9030 the notes shall be emitted before of after instruction INSN. */
9033 emit_notes_for_changes (rtx_insn
*insn
, enum emit_note_where where
,
9036 emit_note_data data
;
9037 variable_table_type
*htab
= shared_hash_htab (vars
);
9039 if (!changed_variables
->elements ())
9042 if (MAY_HAVE_DEBUG_BIND_INSNS
)
9043 process_changed_values (htab
);
9050 ->traverse
<emit_note_data
*, emit_note_insn_var_location
> (&data
);
9053 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9054 same variable in hash table DATA or is not there at all. */
9057 emit_notes_for_differences_1 (variable
**slot
, variable_table_type
*new_vars
)
9059 variable
*old_var
, *new_var
;
9062 new_var
= new_vars
->find_with_hash (old_var
->dv
, dv_htab_hash (old_var
->dv
));
9066 /* Variable has disappeared. */
9067 variable
*empty_var
= NULL
;
9069 if (old_var
->onepart
== ONEPART_VALUE
9070 || old_var
->onepart
== ONEPART_DEXPR
)
9072 empty_var
= variable_from_dropped (old_var
->dv
, NO_INSERT
);
9075 gcc_checking_assert (!empty_var
->in_changed_variables
);
9076 if (!VAR_LOC_1PAUX (old_var
))
9078 VAR_LOC_1PAUX (old_var
) = VAR_LOC_1PAUX (empty_var
);
9079 VAR_LOC_1PAUX (empty_var
) = NULL
;
9082 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
9088 empty_var
= onepart_pool_allocate (old_var
->onepart
);
9089 empty_var
->dv
= old_var
->dv
;
9090 empty_var
->refcount
= 0;
9091 empty_var
->n_var_parts
= 0;
9092 empty_var
->onepart
= old_var
->onepart
;
9093 empty_var
->in_changed_variables
= false;
9096 if (empty_var
->onepart
)
9098 /* Propagate the auxiliary data to (ultimately)
9099 changed_variables. */
9100 empty_var
->var_part
[0].loc_chain
= NULL
;
9101 empty_var
->var_part
[0].cur_loc
= NULL
;
9102 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (old_var
);
9103 VAR_LOC_1PAUX (old_var
) = NULL
;
9105 variable_was_changed (empty_var
, NULL
);
9106 /* Continue traversing the hash table. */
9109 /* Update cur_loc and one-part auxiliary data, before new_var goes
9110 through variable_was_changed. */
9111 if (old_var
!= new_var
&& new_var
->onepart
)
9113 gcc_checking_assert (VAR_LOC_1PAUX (new_var
) == NULL
);
9114 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (old_var
);
9115 VAR_LOC_1PAUX (old_var
) = NULL
;
9116 new_var
->var_part
[0].cur_loc
= old_var
->var_part
[0].cur_loc
;
9118 if (variable_different_p (old_var
, new_var
))
9119 variable_was_changed (new_var
, NULL
);
9121 /* Continue traversing the hash table. */
9125 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9129 emit_notes_for_differences_2 (variable
**slot
, variable_table_type
*old_vars
)
9131 variable
*old_var
, *new_var
;
9134 old_var
= old_vars
->find_with_hash (new_var
->dv
, dv_htab_hash (new_var
->dv
));
9138 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
9139 new_var
->var_part
[i
].cur_loc
= NULL
;
9140 variable_was_changed (new_var
, NULL
);
9143 /* Continue traversing the hash table. */
9147 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9151 emit_notes_for_differences (rtx_insn
*insn
, dataflow_set
*old_set
,
9152 dataflow_set
*new_set
)
9154 shared_hash_htab (old_set
->vars
)
9155 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9156 (shared_hash_htab (new_set
->vars
));
9157 shared_hash_htab (new_set
->vars
)
9158 ->traverse
<variable_table_type
*, emit_notes_for_differences_2
>
9159 (shared_hash_htab (old_set
->vars
));
9160 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
9163 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9166 next_non_note_insn_var_location (rtx_insn
*insn
)
9170 insn
= NEXT_INSN (insn
);
9173 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
9180 /* Emit the notes for changes of location parts in the basic block BB. */
9183 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
9186 micro_operation
*mo
;
9188 dataflow_set_clear (set
);
9189 dataflow_set_copy (set
, &VTI (bb
)->in
);
9191 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
9193 rtx_insn
*insn
= mo
->insn
;
9194 rtx_insn
*next_insn
= next_non_note_insn_var_location (insn
);
9199 dataflow_set_clear_at_call (set
, insn
);
9200 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
9202 rtx arguments
= mo
->u
.loc
, *p
= &arguments
;
9206 XEXP (XEXP (*p
, 0), 1)
9207 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
9208 shared_hash_htab (set
->vars
));
9209 /* If expansion is successful, keep it in the list. */
9210 if (XEXP (XEXP (*p
, 0), 1))
9212 /* Otherwise, if the following item is data_value for it,
9214 else if (XEXP (*p
, 1)
9215 && REG_P (XEXP (XEXP (*p
, 0), 0))
9216 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
9217 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
9219 && REGNO (XEXP (XEXP (*p
, 0), 0))
9220 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
9222 *p
= XEXP (XEXP (*p
, 1), 1);
9223 /* Just drop this item. */
9227 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
9228 NOTE_VAR_LOCATION (note
) = arguments
;
9234 rtx loc
= mo
->u
.loc
;
9237 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9239 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9241 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9247 rtx loc
= mo
->u
.loc
;
9251 if (GET_CODE (loc
) == CONCAT
)
9253 val
= XEXP (loc
, 0);
9254 vloc
= XEXP (loc
, 1);
9262 var
= PAT_VAR_LOCATION_DECL (vloc
);
9264 clobber_variable_part (set
, NULL_RTX
,
9265 dv_from_decl (var
), 0, NULL_RTX
);
9268 if (VAL_NEEDS_RESOLUTION (loc
))
9269 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
9270 set_variable_part (set
, val
, dv_from_decl (var
), 0,
9271 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9274 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
9275 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
9276 dv_from_decl (var
), 0,
9277 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9280 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9286 rtx loc
= mo
->u
.loc
;
9287 rtx val
, vloc
, uloc
;
9289 vloc
= uloc
= XEXP (loc
, 1);
9290 val
= XEXP (loc
, 0);
9292 if (GET_CODE (val
) == CONCAT
)
9294 uloc
= XEXP (val
, 1);
9295 val
= XEXP (val
, 0);
9298 if (VAL_NEEDS_RESOLUTION (loc
))
9299 val_resolve (set
, val
, vloc
, insn
);
9301 val_store (set
, val
, uloc
, insn
, false);
9303 if (VAL_HOLDS_TRACK_EXPR (loc
))
9305 if (GET_CODE (uloc
) == REG
)
9306 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9308 else if (GET_CODE (uloc
) == MEM
)
9309 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9313 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9319 rtx loc
= mo
->u
.loc
;
9320 rtx val
, vloc
, uloc
;
9324 uloc
= XEXP (vloc
, 1);
9325 val
= XEXP (vloc
, 0);
9328 if (GET_CODE (uloc
) == SET
)
9330 dstv
= SET_DEST (uloc
);
9331 srcv
= SET_SRC (uloc
);
9339 if (GET_CODE (val
) == CONCAT
)
9341 dstv
= vloc
= XEXP (val
, 1);
9342 val
= XEXP (val
, 0);
9345 if (GET_CODE (vloc
) == SET
)
9347 srcv
= SET_SRC (vloc
);
9349 gcc_assert (val
!= srcv
);
9350 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
9352 dstv
= vloc
= SET_DEST (vloc
);
9354 if (VAL_NEEDS_RESOLUTION (loc
))
9355 val_resolve (set
, val
, srcv
, insn
);
9357 else if (VAL_NEEDS_RESOLUTION (loc
))
9359 gcc_assert (GET_CODE (uloc
) == SET
9360 && GET_CODE (SET_SRC (uloc
)) == REG
);
9361 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
9364 if (VAL_HOLDS_TRACK_EXPR (loc
))
9366 if (VAL_EXPR_IS_CLOBBERED (loc
))
9369 var_reg_delete (set
, uloc
, true);
9370 else if (MEM_P (uloc
))
9372 gcc_assert (MEM_P (dstv
));
9373 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
9374 var_mem_delete (set
, dstv
, true);
9379 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
9380 rtx src
= NULL
, dst
= uloc
;
9381 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
9383 if (GET_CODE (uloc
) == SET
)
9385 src
= SET_SRC (uloc
);
9386 dst
= SET_DEST (uloc
);
9391 status
= find_src_status (set
, src
);
9393 src
= find_src_set_src (set
, src
);
9397 var_reg_delete_and_set (set
, dst
, !copied_p
,
9399 else if (MEM_P (dst
))
9401 gcc_assert (MEM_P (dstv
));
9402 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
9403 var_mem_delete_and_set (set
, dstv
, !copied_p
,
9408 else if (REG_P (uloc
))
9409 var_regno_delete (set
, REGNO (uloc
));
9410 else if (MEM_P (uloc
))
9412 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
9413 gcc_checking_assert (vloc
== dstv
);
9415 clobber_overlapping_mems (set
, vloc
);
9418 val_store (set
, val
, dstv
, insn
, true);
9420 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9427 rtx loc
= mo
->u
.loc
;
9430 if (GET_CODE (loc
) == SET
)
9432 set_src
= SET_SRC (loc
);
9433 loc
= SET_DEST (loc
);
9437 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9440 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9443 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9450 rtx loc
= mo
->u
.loc
;
9451 enum var_init_status src_status
;
9454 if (GET_CODE (loc
) == SET
)
9456 set_src
= SET_SRC (loc
);
9457 loc
= SET_DEST (loc
);
9460 src_status
= find_src_status (set
, set_src
);
9461 set_src
= find_src_set_src (set
, set_src
);
9464 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
9466 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
9468 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9475 rtx loc
= mo
->u
.loc
;
9478 var_reg_delete (set
, loc
, false);
9480 var_mem_delete (set
, loc
, false);
9482 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9488 rtx loc
= mo
->u
.loc
;
9491 var_reg_delete (set
, loc
, true);
9493 var_mem_delete (set
, loc
, true);
9495 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9501 set
->stack_adjust
+= mo
->u
.adjust
;
9507 /* Return BB's head, unless BB is the block that succeeds ENTRY_BLOCK,
9508 in which case it searches back from BB's head for the very first
9509 insn. Use [get_first_insn (bb), BB_HEAD (bb->next_bb)[ as a range
9510 to iterate over all insns of a function while iterating over its
9514 get_first_insn (basic_block bb
)
9516 rtx_insn
*insn
= BB_HEAD (bb
);
9518 if (bb
->prev_bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
9519 while (rtx_insn
*prev
= PREV_INSN (insn
))
9525 /* Emit notes for the whole function. */
9528 vt_emit_notes (void)
9533 gcc_assert (!changed_variables
->elements ());
9535 /* Free memory occupied by the out hash tables, as they aren't used
9537 FOR_EACH_BB_FN (bb
, cfun
)
9538 dataflow_set_clear (&VTI (bb
)->out
);
9540 /* Enable emitting notes by functions (mainly by set_variable_part and
9541 delete_variable_part). */
9544 if (MAY_HAVE_DEBUG_BIND_INSNS
)
9545 dropped_values
= new variable_table_type (cselib_get_next_uid () * 2);
9547 dataflow_set_init (&cur
);
9549 FOR_EACH_BB_FN (bb
, cfun
)
9551 /* Emit the notes for changes of variable locations between two
9552 subsequent basic blocks. */
9553 emit_notes_for_differences (get_first_insn (bb
),
9554 &cur
, &VTI (bb
)->in
);
9556 if (MAY_HAVE_DEBUG_BIND_INSNS
)
9557 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9559 /* Emit the notes for the changes in the basic block itself. */
9560 emit_notes_in_bb (bb
, &cur
);
9562 if (MAY_HAVE_DEBUG_BIND_INSNS
)
9563 delete local_get_addr_cache
;
9564 local_get_addr_cache
= NULL
;
9566 /* Free memory occupied by the in hash table, we won't need it
9568 dataflow_set_clear (&VTI (bb
)->in
);
9572 shared_hash_htab (cur
.vars
)
9573 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9574 (shared_hash_htab (empty_shared_hash
));
9576 dataflow_set_destroy (&cur
);
9578 if (MAY_HAVE_DEBUG_BIND_INSNS
)
9579 delete dropped_values
;
9580 dropped_values
= NULL
;
9585 /* If there is a declaration and offset associated with register/memory RTL
9586 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9589 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, poly_int64
*offsetp
)
9593 if (REG_ATTRS (rtl
))
9595 *declp
= REG_EXPR (rtl
);
9596 *offsetp
= REG_OFFSET (rtl
);
9600 else if (GET_CODE (rtl
) == PARALLEL
)
9602 tree decl
= NULL_TREE
;
9603 HOST_WIDE_INT offset
= MAX_VAR_PARTS
;
9604 int len
= XVECLEN (rtl
, 0), i
;
9606 for (i
= 0; i
< len
; i
++)
9608 rtx reg
= XEXP (XVECEXP (rtl
, 0, i
), 0);
9609 if (!REG_P (reg
) || !REG_ATTRS (reg
))
9612 decl
= REG_EXPR (reg
);
9613 if (REG_EXPR (reg
) != decl
)
9615 HOST_WIDE_INT this_offset
;
9616 if (!track_offset_p (REG_OFFSET (reg
), &this_offset
))
9618 offset
= MIN (offset
, this_offset
);
9628 else if (MEM_P (rtl
))
9630 if (MEM_ATTRS (rtl
))
9632 *declp
= MEM_EXPR (rtl
);
9633 *offsetp
= int_mem_offset (rtl
);
9640 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9644 record_entry_value (cselib_val
*val
, rtx rtl
)
9646 rtx ev
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
9648 ENTRY_VALUE_EXP (ev
) = rtl
;
9650 cselib_add_permanent_equiv (val
, ev
, get_insns ());
9653 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9656 vt_add_function_parameter (tree parm
)
9658 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
9659 rtx incoming
= DECL_INCOMING_RTL (parm
);
9666 if (TREE_CODE (parm
) != PARM_DECL
)
9669 if (!decl_rtl
|| !incoming
)
9672 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
9675 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9676 rewrite the incoming location of parameters passed on the stack
9677 into MEMs based on the argument pointer, so that incoming doesn't
9678 depend on a pseudo. */
9679 if (MEM_P (incoming
)
9680 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
9681 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
9682 && XEXP (XEXP (incoming
, 0), 0)
9683 == crtl
->args
.internal_arg_pointer
9684 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
9686 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
9687 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
9688 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
9690 = replace_equiv_address_nv (incoming
,
9691 plus_constant (Pmode
,
9692 arg_pointer_rtx
, off
));
9695 #ifdef HAVE_window_save
9696 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9697 If the target machine has an explicit window save instruction, the
9698 actual entry value is the corresponding OUTGOING_REGNO instead. */
9699 if (HAVE_window_save
&& !crtl
->uses_only_leaf_regs
)
9701 if (REG_P (incoming
)
9702 && HARD_REGISTER_P (incoming
)
9703 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
9706 p
.incoming
= incoming
;
9708 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
9709 OUTGOING_REGNO (REGNO (incoming
)), 0);
9710 p
.outgoing
= incoming
;
9711 vec_safe_push (windowed_parm_regs
, p
);
9713 else if (GET_CODE (incoming
) == PARALLEL
)
9716 = gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (XVECLEN (incoming
, 0)));
9719 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9721 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9724 reg
= gen_rtx_REG_offset (reg
, GET_MODE (reg
),
9725 OUTGOING_REGNO (REGNO (reg
)), 0);
9727 XVECEXP (outgoing
, 0, i
)
9728 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
9729 XEXP (XVECEXP (incoming
, 0, i
), 1));
9730 vec_safe_push (windowed_parm_regs
, p
);
9733 incoming
= outgoing
;
9735 else if (MEM_P (incoming
)
9736 && REG_P (XEXP (incoming
, 0))
9737 && HARD_REGISTER_P (XEXP (incoming
, 0)))
9739 rtx reg
= XEXP (incoming
, 0);
9740 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
9744 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
9746 vec_safe_push (windowed_parm_regs
, p
);
9747 incoming
= replace_equiv_address_nv (incoming
, reg
);
9753 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
9755 if (MEM_P (incoming
))
9757 /* This means argument is passed by invisible reference. */
9763 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
9765 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
9766 GET_MODE (decl_rtl
));
9775 /* If that DECL_RTL wasn't a pseudo that got spilled to
9776 memory, bail out. Otherwise, the spill slot sharing code
9777 will force the memory to reference spill_slot_decl (%sfp),
9778 so we don't match above. That's ok, the pseudo must have
9779 referenced the entire parameter, so just reset OFFSET. */
9780 if (decl
!= get_spill_slot_decl (false))
9785 HOST_WIDE_INT const_offset
;
9786 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &const_offset
))
9789 out
= &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
;
9791 dv
= dv_from_decl (parm
);
9793 if (target_for_debug_bind (parm
)
9794 /* We can't deal with these right now, because this kind of
9795 variable is single-part. ??? We could handle parallels
9796 that describe multiple locations for the same single
9797 value, but ATM we don't. */
9798 && GET_CODE (incoming
) != PARALLEL
)
9803 /* ??? We shouldn't ever hit this, but it may happen because
9804 arguments passed by invisible reference aren't dealt with
9805 above: incoming-rtl will have Pmode rather than the
9806 expected mode for the type. */
9810 lowpart
= var_lowpart (mode
, incoming
);
9814 val
= cselib_lookup_from_insn (lowpart
, mode
, true,
9815 VOIDmode
, get_insns ());
9817 /* ??? Float-typed values in memory are not handled by
9821 preserve_value (val
);
9822 set_variable_part (out
, val
->val_rtx
, dv
, const_offset
,
9823 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9824 dv
= dv_from_value (val
->val_rtx
);
9827 if (MEM_P (incoming
))
9829 val
= cselib_lookup_from_insn (XEXP (incoming
, 0), mode
, true,
9830 VOIDmode
, get_insns ());
9833 preserve_value (val
);
9834 incoming
= replace_equiv_address_nv (incoming
, val
->val_rtx
);
9839 if (REG_P (incoming
))
9841 incoming
= var_lowpart (mode
, incoming
);
9842 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
9843 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, const_offset
,
9845 set_variable_part (out
, incoming
, dv
, const_offset
,
9846 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9847 if (dv_is_value_p (dv
))
9849 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv
)), incoming
);
9850 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
9851 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
9853 machine_mode indmode
9854 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
9855 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
9856 cselib_val
*val
= cselib_lookup_from_insn (mem
, indmode
, true,
9861 preserve_value (val
);
9862 record_entry_value (val
, mem
);
9863 set_variable_part (out
, mem
, dv_from_value (val
->val_rtx
), 0,
9864 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9869 else if (GET_CODE (incoming
) == PARALLEL
&& !dv_onepart_p (dv
))
9873 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9875 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9876 /* vt_get_decl_and_offset has already checked that the offset
9877 is a valid variable part. */
9878 const_offset
= get_tracked_reg_offset (reg
);
9879 gcc_assert (REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
9880 attrs_list_insert (&out
->regs
[REGNO (reg
)], dv
, const_offset
, reg
);
9881 set_variable_part (out
, reg
, dv
, const_offset
,
9882 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9885 else if (MEM_P (incoming
))
9887 incoming
= var_lowpart (mode
, incoming
);
9888 set_variable_part (out
, incoming
, dv
, const_offset
,
9889 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9893 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9896 vt_add_function_parameters (void)
9900 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9901 parm
; parm
= DECL_CHAIN (parm
))
9902 if (!POINTER_BOUNDS_P (parm
))
9903 vt_add_function_parameter (parm
);
9905 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
9907 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
9909 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
9910 vexpr
= TREE_OPERAND (vexpr
, 0);
9912 if (TREE_CODE (vexpr
) == PARM_DECL
9913 && DECL_ARTIFICIAL (vexpr
)
9914 && !DECL_IGNORED_P (vexpr
)
9915 && DECL_NAMELESS (vexpr
))
9916 vt_add_function_parameter (vexpr
);
9920 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9921 ensure it isn't flushed during cselib_reset_table.
9922 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9923 has been eliminated. */
9926 vt_init_cfa_base (void)
9930 #ifdef FRAME_POINTER_CFA_OFFSET
9931 cfa_base_rtx
= frame_pointer_rtx
;
9932 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9934 cfa_base_rtx
= arg_pointer_rtx
;
9935 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
9937 if (cfa_base_rtx
== hard_frame_pointer_rtx
9938 || !fixed_regs
[REGNO (cfa_base_rtx
)])
9940 cfa_base_rtx
= NULL_RTX
;
9943 if (!MAY_HAVE_DEBUG_BIND_INSNS
)
9946 /* Tell alias analysis that cfa_base_rtx should share
9947 find_base_term value with stack pointer or hard frame pointer. */
9948 if (!frame_pointer_needed
)
9949 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
9950 else if (!crtl
->stack_realign_tried
)
9951 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
9953 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
9954 VOIDmode
, get_insns ());
9955 preserve_value (val
);
9956 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
9959 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */
9962 reemit_marker_as_note (rtx_insn
*insn
, basic_block
*bb
)
9964 gcc_checking_assert (DEBUG_MARKER_INSN_P (insn
));
9966 enum insn_note kind
= INSN_DEBUG_MARKER_KIND (insn
);
9970 case NOTE_INSN_BEGIN_STMT
:
9972 rtx_insn
*note
= NULL
;
9973 if (cfun
->debug_nonbind_markers
)
9975 note
= emit_note_before (kind
, insn
);
9976 NOTE_MARKER_LOCATION (note
) = INSN_LOCATION (insn
);
9978 BLOCK_FOR_INSN (note
) = *bb
;
9989 /* Allocate and initialize the data structures for variable tracking
9990 and parse the RTL to get the micro operations. */
9993 vt_initialize (void)
9996 HOST_WIDE_INT fp_cfa_offset
= -1;
9998 alloc_aux_for_blocks (sizeof (variable_tracking_info
));
10000 empty_shared_hash
= shared_hash_pool
.allocate ();
10001 empty_shared_hash
->refcount
= 1;
10002 empty_shared_hash
->htab
= new variable_table_type (1);
10003 changed_variables
= new variable_table_type (10);
10005 /* Init the IN and OUT sets. */
10006 FOR_ALL_BB_FN (bb
, cfun
)
10008 VTI (bb
)->visited
= false;
10009 VTI (bb
)->flooded
= false;
10010 dataflow_set_init (&VTI (bb
)->in
);
10011 dataflow_set_init (&VTI (bb
)->out
);
10012 VTI (bb
)->permp
= NULL
;
10015 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10017 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
10018 scratch_regs
= BITMAP_ALLOC (NULL
);
10019 preserved_values
.create (256);
10020 global_get_addr_cache
= new hash_map
<rtx
, rtx
>;
10024 scratch_regs
= NULL
;
10025 global_get_addr_cache
= NULL
;
10028 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10034 #ifdef FRAME_POINTER_CFA_OFFSET
10035 reg
= frame_pointer_rtx
;
10036 ofst
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
10038 reg
= arg_pointer_rtx
;
10039 ofst
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
10042 ofst
-= INCOMING_FRAME_SP_OFFSET
;
10044 val
= cselib_lookup_from_insn (reg
, GET_MODE (reg
), 1,
10045 VOIDmode
, get_insns ());
10046 preserve_value (val
);
10047 if (reg
!= hard_frame_pointer_rtx
&& fixed_regs
[REGNO (reg
)])
10048 cselib_preserve_cfa_base_value (val
, REGNO (reg
));
10049 expr
= plus_constant (GET_MODE (stack_pointer_rtx
),
10050 stack_pointer_rtx
, -ofst
);
10051 cselib_add_permanent_equiv (val
, expr
, get_insns ());
10055 val
= cselib_lookup_from_insn (stack_pointer_rtx
,
10056 GET_MODE (stack_pointer_rtx
), 1,
10057 VOIDmode
, get_insns ());
10058 preserve_value (val
);
10059 expr
= plus_constant (GET_MODE (reg
), reg
, ofst
);
10060 cselib_add_permanent_equiv (val
, expr
, get_insns ());
10064 /* In order to factor out the adjustments made to the stack pointer or to
10065 the hard frame pointer and thus be able to use DW_OP_fbreg operations
10066 instead of individual location lists, we're going to rewrite MEMs based
10067 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10068 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10069 resp. arg_pointer_rtx. We can do this either when there is no frame
10070 pointer in the function and stack adjustments are consistent for all
10071 basic blocks or when there is a frame pointer and no stack realignment.
10072 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10073 has been eliminated. */
10074 if (!frame_pointer_needed
)
10078 if (!vt_stack_adjustments ())
10081 #ifdef FRAME_POINTER_CFA_OFFSET
10082 reg
= frame_pointer_rtx
;
10084 reg
= arg_pointer_rtx
;
10086 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10089 if (GET_CODE (elim
) == PLUS
)
10090 elim
= XEXP (elim
, 0);
10091 if (elim
== stack_pointer_rtx
)
10092 vt_init_cfa_base ();
10095 else if (!crtl
->stack_realign_tried
)
10099 #ifdef FRAME_POINTER_CFA_OFFSET
10100 reg
= frame_pointer_rtx
;
10101 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
10103 reg
= arg_pointer_rtx
;
10104 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
10106 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10109 if (GET_CODE (elim
) == PLUS
)
10111 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
10112 elim
= XEXP (elim
, 0);
10114 if (elim
!= hard_frame_pointer_rtx
)
10115 fp_cfa_offset
= -1;
10118 fp_cfa_offset
= -1;
10121 /* If the stack is realigned and a DRAP register is used, we're going to
10122 rewrite MEMs based on it representing incoming locations of parameters
10123 passed on the stack into MEMs based on the argument pointer. Although
10124 we aren't going to rewrite other MEMs, we still need to initialize the
10125 virtual CFA pointer in order to ensure that the argument pointer will
10126 be seen as a constant throughout the function.
10128 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10129 else if (stack_realign_drap
)
10133 #ifdef FRAME_POINTER_CFA_OFFSET
10134 reg
= frame_pointer_rtx
;
10136 reg
= arg_pointer_rtx
;
10138 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10141 if (GET_CODE (elim
) == PLUS
)
10142 elim
= XEXP (elim
, 0);
10143 if (elim
== hard_frame_pointer_rtx
)
10144 vt_init_cfa_base ();
10148 hard_frame_pointer_adjustment
= -1;
10150 vt_add_function_parameters ();
10152 FOR_EACH_BB_FN (bb
, cfun
)
10155 HOST_WIDE_INT pre
, post
= 0;
10156 basic_block first_bb
, last_bb
;
10158 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10160 cselib_record_sets_hook
= add_with_sets
;
10161 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10162 fprintf (dump_file
, "first value: %i\n",
10163 cselib_get_next_uid ());
10170 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
10171 || ! single_pred_p (bb
->next_bb
))
10173 e
= find_edge (bb
, bb
->next_bb
);
10174 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
10180 /* Add the micro-operations to the vector. */
10181 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
10183 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
10184 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
10186 /* If we are walking the first basic block, walk any HEADER
10187 insns that might be before it too. Unfortunately,
10188 BB_HEADER and BB_FOOTER are not set while we run this
10191 bool outside_bb
= true;
10192 for (insn
= get_first_insn (bb
); insn
!= BB_HEAD (bb
->next_bb
);
10195 if (insn
== BB_HEAD (bb
))
10196 outside_bb
= false;
10197 else if (insn
== NEXT_INSN (BB_END (bb
)))
10199 next
= NEXT_INSN (insn
);
10204 /* Ignore non-debug insns outside of basic blocks. */
10205 if (!DEBUG_INSN_P (insn
))
10207 /* Debug binds shouldn't appear outside of bbs. */
10208 gcc_assert (!DEBUG_BIND_INSN_P (insn
));
10210 basic_block save_bb
= BLOCK_FOR_INSN (insn
);
10211 if (!BLOCK_FOR_INSN (insn
))
10213 gcc_assert (outside_bb
);
10214 BLOCK_FOR_INSN (insn
) = bb
;
10217 gcc_assert (BLOCK_FOR_INSN (insn
) == bb
);
10219 if (!frame_pointer_needed
)
10221 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
10224 micro_operation mo
;
10225 mo
.type
= MO_ADJUST
;
10228 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10229 log_op_type (PATTERN (insn
), bb
, insn
,
10230 MO_ADJUST
, dump_file
);
10231 VTI (bb
)->mos
.safe_push (mo
);
10232 VTI (bb
)->out
.stack_adjust
+= pre
;
10236 cselib_hook_called
= false;
10237 adjust_insn (bb
, insn
);
10238 if (DEBUG_MARKER_INSN_P (insn
))
10240 insn
= reemit_marker_as_note (insn
, &save_bb
);
10244 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10247 prepare_call_arguments (bb
, insn
);
10248 cselib_process_insn (insn
);
10249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10251 print_rtl_single (dump_file
, insn
);
10252 dump_cselib_table (dump_file
);
10255 if (!cselib_hook_called
)
10256 add_with_sets (insn
, 0, 0);
10257 cancel_changes (0);
10259 if (!frame_pointer_needed
&& post
)
10261 micro_operation mo
;
10262 mo
.type
= MO_ADJUST
;
10263 mo
.u
.adjust
= post
;
10265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10266 log_op_type (PATTERN (insn
), bb
, insn
,
10267 MO_ADJUST
, dump_file
);
10268 VTI (bb
)->mos
.safe_push (mo
);
10269 VTI (bb
)->out
.stack_adjust
+= post
;
10272 if (fp_cfa_offset
!= -1
10273 && hard_frame_pointer_adjustment
== -1
10274 && fp_setter_insn (insn
))
10276 vt_init_cfa_base ();
10277 hard_frame_pointer_adjustment
= fp_cfa_offset
;
10278 /* Disassociate sp from fp now. */
10279 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10282 cselib_invalidate_rtx (stack_pointer_rtx
);
10283 v
= cselib_lookup (stack_pointer_rtx
, Pmode
, 1,
10285 if (v
&& !cselib_preserved_value_p (v
))
10287 cselib_set_value_sp_based (v
);
10288 preserve_value (v
);
10292 BLOCK_FOR_INSN (insn
) = save_bb
;
10295 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
10300 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10302 cselib_preserve_only_values ();
10303 cselib_reset_table (cselib_get_next_uid ());
10304 cselib_record_sets_hook
= NULL
;
10308 hard_frame_pointer_adjustment
= -1;
10309 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->flooded
= true;
10310 cfa_base_rtx
= NULL_RTX
;
10314 /* This is *not* reset after each function. It gives each
10315 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10316 a unique label number. */
10318 static int debug_label_num
= 1;
10320 /* Remove from the insn stream all debug insns used for variable
10321 tracking at assignments. */
10324 delete_vta_debug_insns (void)
10327 rtx_insn
*insn
, *next
;
10329 if (!MAY_HAVE_DEBUG_INSNS
)
10332 FOR_EACH_BB_FN (bb
, cfun
)
10334 for (insn
= get_first_insn (bb
);
10335 insn
!= BB_HEAD (bb
->next_bb
)
10336 ? next
= NEXT_INSN (insn
), true : false;
10338 if (DEBUG_INSN_P (insn
))
10340 if (DEBUG_MARKER_INSN_P (insn
))
10342 insn
= reemit_marker_as_note (insn
, NULL
);
10346 tree decl
= INSN_VAR_LOCATION_DECL (insn
);
10347 if (TREE_CODE (decl
) == LABEL_DECL
10348 && DECL_NAME (decl
)
10349 && !DECL_RTL_SET_P (decl
))
10351 PUT_CODE (insn
, NOTE
);
10352 NOTE_KIND (insn
) = NOTE_INSN_DELETED_DEBUG_LABEL
;
10353 NOTE_DELETED_LABEL_NAME (insn
)
10354 = IDENTIFIER_POINTER (DECL_NAME (decl
));
10355 SET_DECL_RTL (decl
, insn
);
10356 CODE_LABEL_NUMBER (insn
) = debug_label_num
++;
10359 delete_insn (insn
);
10364 /* Run a fast, BB-local only version of var tracking, to take care of
10365 information that we don't do global analysis on, such that not all
10366 information is lost. If SKIPPED holds, we're skipping the global
10367 pass entirely, so we should try to use information it would have
10368 handled as well.. */
10371 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
10373 /* ??? Just skip it all for now. */
10374 delete_vta_debug_insns ();
10377 /* Free the data structures needed for variable tracking. */
10384 FOR_EACH_BB_FN (bb
, cfun
)
10386 VTI (bb
)->mos
.release ();
10389 FOR_ALL_BB_FN (bb
, cfun
)
10391 dataflow_set_destroy (&VTI (bb
)->in
);
10392 dataflow_set_destroy (&VTI (bb
)->out
);
10393 if (VTI (bb
)->permp
)
10395 dataflow_set_destroy (VTI (bb
)->permp
);
10396 XDELETE (VTI (bb
)->permp
);
10399 free_aux_for_blocks ();
10400 delete empty_shared_hash
->htab
;
10401 empty_shared_hash
->htab
= NULL
;
10402 delete changed_variables
;
10403 changed_variables
= NULL
;
10404 attrs_pool
.release ();
10405 var_pool
.release ();
10406 location_chain_pool
.release ();
10407 shared_hash_pool
.release ();
10409 if (MAY_HAVE_DEBUG_BIND_INSNS
)
10411 if (global_get_addr_cache
)
10412 delete global_get_addr_cache
;
10413 global_get_addr_cache
= NULL
;
10414 loc_exp_dep_pool
.release ();
10415 valvar_pool
.release ();
10416 preserved_values
.release ();
10418 BITMAP_FREE (scratch_regs
);
10419 scratch_regs
= NULL
;
10422 #ifdef HAVE_window_save
10423 vec_free (windowed_parm_regs
);
10427 XDELETEVEC (vui_vec
);
10432 /* The entry point to variable tracking pass. */
10434 static inline unsigned int
10435 variable_tracking_main_1 (void)
10439 /* We won't be called as a separate pass if flag_var_tracking is not
10440 set, but final may call us to turn debug markers into notes. */
10441 if ((!flag_var_tracking
&& MAY_HAVE_DEBUG_INSNS
)
10442 || flag_var_tracking_assignments
< 0
10443 /* Var-tracking right now assumes the IR doesn't contain
10444 any pseudos at this point. */
10445 || targetm
.no_register_allocation
)
10447 delete_vta_debug_insns ();
10451 if (!flag_var_tracking
)
10454 if (n_basic_blocks_for_fn (cfun
) > 500
10455 && n_edges_for_fn (cfun
) / n_basic_blocks_for_fn (cfun
) >= 20)
10457 vt_debug_insns_local (true);
10461 mark_dfs_back_edges ();
10462 if (!vt_initialize ())
10465 vt_debug_insns_local (true);
10469 success
= vt_find_locations ();
10471 if (!success
&& flag_var_tracking_assignments
> 0)
10475 delete_vta_debug_insns ();
10477 /* This is later restored by our caller. */
10478 flag_var_tracking_assignments
= 0;
10480 success
= vt_initialize ();
10481 gcc_assert (success
);
10483 success
= vt_find_locations ();
10489 vt_debug_insns_local (false);
10493 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10495 dump_dataflow_sets ();
10496 dump_reg_info (dump_file
);
10497 dump_flow_info (dump_file
, dump_flags
);
10500 timevar_push (TV_VAR_TRACKING_EMIT
);
10502 timevar_pop (TV_VAR_TRACKING_EMIT
);
10505 vt_debug_insns_local (false);
10510 variable_tracking_main (void)
10513 int save
= flag_var_tracking_assignments
;
10515 ret
= variable_tracking_main_1 ();
10517 flag_var_tracking_assignments
= save
;
10524 const pass_data pass_data_variable_tracking
=
10526 RTL_PASS
, /* type */
10527 "vartrack", /* name */
10528 OPTGROUP_NONE
, /* optinfo_flags */
10529 TV_VAR_TRACKING
, /* tv_id */
10530 0, /* properties_required */
10531 0, /* properties_provided */
10532 0, /* properties_destroyed */
10533 0, /* todo_flags_start */
10534 0, /* todo_flags_finish */
10537 class pass_variable_tracking
: public rtl_opt_pass
10540 pass_variable_tracking (gcc::context
*ctxt
)
10541 : rtl_opt_pass (pass_data_variable_tracking
, ctxt
)
10544 /* opt_pass methods: */
10545 virtual bool gate (function
*)
10547 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
10550 virtual unsigned int execute (function
*)
10552 return variable_tracking_main ();
10555 }; // class pass_variable_tracking
10557 } // anon namespace
10560 make_pass_variable_tracking (gcc::context
*ctxt
)
10562 return new pass_variable_tracking (ctxt
);