]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/var-tracking.c
gcc/
[thirdparty/gcc.git] / gcc / var-tracking.c
CommitLineData
5923a5e7 1/* Variable tracking routines for the GNU compiler.
d1a21c35 2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
cfaf579d 3 Free Software Foundation, Inc.
5923a5e7 4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
8c4c00c1 9 the Free Software Foundation; either version 3, or (at your option)
5923a5e7 10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
8c4c00c1 18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
5923a5e7 20
21/* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
28
29 How does the variable tracking pass work?
30
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
8ee59e4e 37 < clobber < set < post-modifying stack adjustment
5923a5e7 38
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
45
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
55
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
61
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
71
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
79
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
86
87*/
88
89#include "config.h"
90#include "system.h"
91#include "coretypes.h"
92#include "tm.h"
93#include "rtl.h"
94#include "tree.h"
58869e7f 95#include "tm_p.h"
5923a5e7 96#include "hard-reg-set.h"
97#include "basic-block.h"
98#include "flags.h"
5923a5e7 99#include "insn-config.h"
100#include "reload.h"
101#include "sbitmap.h"
102#include "alloc-pool.h"
103#include "fibheap.h"
104#include "hashtab.h"
4719779b 105#include "regs.h"
106#include "expr.h"
77fce4cd 107#include "timevar.h"
108#include "tree-pass.h"
0358713f 109#include "tree-flow.h"
9845d120 110#include "cselib.h"
111#include "target.h"
76f4ab1c 112#include "params.h"
b4d01854 113#include "diagnostic.h"
ce084dfc 114#include "tree-pretty-print.h"
bc95df68 115#include "pointer-set.h"
35af0188 116#include "recog.h"
4143d08b 117#include "tm_p.h"
c573c1b5 118#include "alias.h"
5923a5e7 119
65683f79 120/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
126
5923a5e7 127/* Type of micro operation. */
128enum micro_operation_type
129{
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
9845d120 133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
5923a5e7 136 MO_SET, /* Set location. */
96414f01 137 MO_COPY, /* Copy the same portion of a variable from one
a8b99ef6 138 location to another. */
5923a5e7 139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
7c2df241 141 MO_ADJUST /* Adjust stack pointer. */
9845d120 142
143};
144
145static const char * const ATTRIBUTE_UNUSED
146micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
5923a5e7 157};
158
9845d120 159/* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
5923a5e7 162enum emit_note_where
163{
164 EMIT_NOTE_BEFORE_INSN,
9845d120 165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
5923a5e7 167};
168
169/* Structure holding information about micro operation. */
170typedef struct micro_operation_def
171{
172 /* Type of micro operation. */
173 enum micro_operation_type type;
174
c77c64d8 175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
181
5923a5e7 182 union {
9845d120 183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
5923a5e7 189 rtx loc;
190
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
5923a5e7 194} micro_operation;
195
c77c64d8 196DEF_VEC_O(micro_operation);
197DEF_VEC_ALLOC_O(micro_operation,heap);
198
9845d120 199/* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201typedef void *decl_or_value;
202
5923a5e7 203/* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205typedef struct emit_note_data_def
206{
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
209
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
9845d120 212
213 /* The variables and values active at this point. */
214 htab_t vars;
5923a5e7 215} emit_note_data;
216
217/* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221typedef struct attrs_def
222{
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
225
226 /* The rtx of register. */
227 rtx loc;
228
229 /* The declaration corresponding to LOC. */
9845d120 230 decl_or_value dv;
5923a5e7 231
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234} *attrs;
235
a8f6ad2b 236/* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238typedef struct shared_hash_def
239{
240 /* Reference count. */
241 int refcount;
242
243 /* Actual hash table. */
244 htab_t htab;
245} *shared_hash;
246
5923a5e7 247/* Structure holding the IN or OUT set for a basic block. */
248typedef struct dataflow_set_def
249{
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
252
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
255
256 /* Variable locations. */
a8f6ad2b 257 shared_hash vars;
9845d120 258
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
5923a5e7 261} dataflow_set;
262
263/* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265typedef struct variable_tracking_info_def
266{
c77c64d8 267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
5923a5e7 269
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
273
9845d120 274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
279
5923a5e7 280 /* Has the block been visited in DFS? */
281 bool visited;
9845d120 282
283 /* Has the block been flooded in VTA? */
284 bool flooded;
285
5923a5e7 286} *variable_tracking_info;
287
288/* Structure for chaining the locations. */
289typedef struct location_chain_def
290{
291 /* Next element in the chain. */
292 struct location_chain_def *next;
293
9845d120 294 /* The location (REG, MEM or VALUE). */
5923a5e7 295 rtx loc;
d53bb226 296
297 /* The "value" stored in this location. */
298 rtx set_src;
299
300 /* Initialized? */
301 enum var_init_status init;
5923a5e7 302} *location_chain;
303
72fdb379 304/* A vector of loc_exp_dep holds the active dependencies of a one-part
305 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
306 location of DV. Each entry is also part of VALUE' s linked-list of
307 backlinks back to DV. */
308typedef struct loc_exp_dep_s
309{
310 /* The dependent DV. */
311 decl_or_value dv;
312 /* The dependency VALUE or DECL_DEBUG. */
313 rtx value;
314 /* The next entry in VALUE's backlinks list. */
315 struct loc_exp_dep_s *next;
316 /* A pointer to the pointer to this entry (head or prev's next) in
317 the doubly-linked list. */
318 struct loc_exp_dep_s **pprev;
319} loc_exp_dep;
320
321DEF_VEC_O (loc_exp_dep);
322
11643610 323/* This data structure holds information about the depth of a variable
324 expansion. */
325typedef struct expand_depth_struct
326{
327 /* This measures the complexity of the expanded expression. It
328 grows by one for each level of expansion that adds more than one
329 operand. */
330 int complexity;
331 /* This counts the number of ENTRY_VALUE expressions in an
332 expansion. We want to minimize their use. */
333 int entryvals;
334} expand_depth;
335
72fdb379 336/* This data structure is allocated for one-part variables at the time
337 of emitting notes. */
338struct onepart_aux
339{
340 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
341 computation used the expansion of this variable, and that ought
342 to be notified should this variable change. If the DV's cur_loc
343 expanded to NULL, all components of the loc list are regarded as
344 active, so that any changes in them give us a chance to get a
345 location. Otherwise, only components of the loc that expanded to
346 non-NULL are regarded as active dependencies. */
347 loc_exp_dep *backlinks;
348 /* This holds the LOC that was expanded into cur_loc. We need only
349 mark a one-part variable as changed if the FROM loc is removed,
350 or if it has no known location and a loc is added, or if it gets
351 a change notification from any of its active dependencies. */
352 rtx from;
353 /* The depth of the cur_loc expression. */
11643610 354 expand_depth depth;
72fdb379 355 /* Dependencies actively used when expand FROM into cur_loc. */
356 VEC (loc_exp_dep, none) deps;
357};
358
5923a5e7 359/* Structure describing one part of variable. */
360typedef struct variable_part_def
361{
362 /* Chain of locations of the part. */
363 location_chain loc_chain;
364
365 /* Location which was last emitted to location list. */
366 rtx cur_loc;
367
72fdb379 368 union variable_aux
369 {
370 /* The offset in the variable, if !var->onepart. */
371 HOST_WIDE_INT offset;
372
373 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
374 struct onepart_aux *onepaux;
375 } aux;
5923a5e7 376} variable_part;
377
378/* Maximum number of location parts. */
379#define MAX_VAR_PARTS 16
380
72fdb379 381/* Enumeration type used to discriminate various types of one-part
382 variables. */
383typedef enum onepart_enum
384{
385 /* Not a one-part variable. */
386 NOT_ONEPART = 0,
387 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
388 ONEPART_VDECL = 1,
389 /* A DEBUG_EXPR_DECL. */
390 ONEPART_DEXPR = 2,
391 /* A VALUE. */
392 ONEPART_VALUE = 3
393} onepart_enum_t;
394
5923a5e7 395/* Structure describing where the variable is located. */
396typedef struct variable_def
397{
9845d120 398 /* The declaration of the variable, or an RTL value being handled
399 like a declaration. */
400 decl_or_value dv;
5923a5e7 401
85bbdb3f 402 /* Reference count. */
403 int refcount;
404
5923a5e7 405 /* Number of variable parts. */
bc95df68 406 char n_var_parts;
407
72fdb379 408 /* What type of DV this is, according to enum onepart_enum. */
409 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
bc95df68 410
411 /* True if this variable_def struct is currently in the
412 changed_variables hash table. */
413 bool in_changed_variables;
5923a5e7 414
415 /* The variable parts. */
9845d120 416 variable_part var_part[1];
5923a5e7 417} *variable;
aae87fc3 418typedef const struct variable_def *const_variable;
5923a5e7 419
5923a5e7 420/* Pointer to the BB's information specific to variable tracking pass. */
421#define VTI(BB) ((variable_tracking_info) (BB)->aux)
422
eeb0ae23 423/* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
da443c27 424#define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
eeb0ae23 425
72fdb379 426#if ENABLE_CHECKING && (GCC_VERSION >= 2007)
427
428/* Access VAR's Ith part's offset, checking that it's not a one-part
429 variable. */
430#define VAR_PART_OFFSET(var, i) __extension__ \
431(*({ variable const __v = (var); \
432 gcc_checking_assert (!__v->onepart); \
433 &__v->var_part[(i)].aux.offset; }))
434
435/* Access VAR's one-part auxiliary data, checking that it is a
436 one-part variable. */
437#define VAR_LOC_1PAUX(var) __extension__ \
438(*({ variable const __v = (var); \
439 gcc_checking_assert (__v->onepart); \
440 &__v->var_part[0].aux.onepaux; }))
441
442#else
443#define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
444#define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
445#endif
446
447/* These are accessor macros for the one-part auxiliary data. When
448 convenient for users, they're guarded by tests that the data was
449 allocated. */
450#define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
451 ? VAR_LOC_1PAUX (var)->backlinks \
452 : NULL)
453#define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
454 ? &VAR_LOC_1PAUX (var)->backlinks \
455 : NULL)
456#define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
457#define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
458#define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
459 ? &VAR_LOC_1PAUX (var)->deps \
460 : NULL)
461
5923a5e7 462/* Alloc pool for struct attrs_def. */
463static alloc_pool attrs_pool;
464
9845d120 465/* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
5923a5e7 466static alloc_pool var_pool;
467
9845d120 468/* Alloc pool for struct variable_def with a single var_part entry. */
469static alloc_pool valvar_pool;
470
5923a5e7 471/* Alloc pool for struct location_chain_def. */
472static alloc_pool loc_chain_pool;
473
a8f6ad2b 474/* Alloc pool for struct shared_hash_def. */
475static alloc_pool shared_hash_pool;
476
df226854 477/* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
478static alloc_pool loc_exp_dep_pool;
479
5923a5e7 480/* Changed variables, notes will be emitted for them. */
481static htab_t changed_variables;
482
483/* Shall notes be emitted? */
484static bool emit_notes;
485
72fdb379 486/* Values whose dynamic location lists have gone empty, but whose
487 cselib location lists are still usable. Use this to hold the
488 current location, the backlinks, etc, during emit_notes. */
489static htab_t dropped_values;
490
a8f6ad2b 491/* Empty shared hashtable. */
492static shared_hash empty_shared_hash;
493
9845d120 494/* Scratch register bitmap used by cselib_expand_value_rtx. */
495static bitmap scratch_regs = NULL;
496
72fdb379 497#ifdef HAVE_window_save
8ee59e4e 498typedef struct GTY(()) parm_reg {
499 rtx outgoing;
500 rtx incoming;
501} parm_reg_t;
502
503DEF_VEC_O(parm_reg_t);
504DEF_VEC_ALLOC_O(parm_reg_t, gc);
505
506/* Vector of windowed parameter registers, if any. */
507static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
72fdb379 508#endif
8ee59e4e 509
9845d120 510/* Variable used to tell whether cselib_process_insn called our hook. */
511static bool cselib_hook_called;
512
5923a5e7 513/* Local function prototypes. */
514static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
515 HOST_WIDE_INT *);
516static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
517 HOST_WIDE_INT *);
5923a5e7 518static bool vt_stack_adjustments (void);
5923a5e7 519static hashval_t variable_htab_hash (const void *);
520static int variable_htab_eq (const void *, const void *);
521static void variable_htab_free (void *);
522
523static void init_attrs_list_set (attrs *);
524static void attrs_list_clear (attrs *);
9845d120 525static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
526static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
5923a5e7 527static void attrs_list_copy (attrs *, attrs);
528static void attrs_list_union (attrs *, attrs);
529
9845d120 530static void **unshare_variable (dataflow_set *set, void **slot, variable var,
531 enum var_init_status);
5923a5e7 532static void vars_copy (htab_t, htab_t);
96414f01 533static tree var_debug_decl (tree);
d53bb226 534static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
48e1416a 535static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
d53bb226 536 enum var_init_status, rtx);
96414f01 537static void var_reg_delete (dataflow_set *, rtx, bool);
5923a5e7 538static void var_regno_delete (dataflow_set *, int);
d53bb226 539static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
48e1416a 540static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
d53bb226 541 enum var_init_status, rtx);
96414f01 542static void var_mem_delete (dataflow_set *, rtx, bool);
5923a5e7 543
a8f6ad2b 544static void dataflow_set_init (dataflow_set *);
5923a5e7 545static void dataflow_set_clear (dataflow_set *);
546static void dataflow_set_copy (dataflow_set *, dataflow_set *);
547static int variable_union_info_cmp_pos (const void *, const void *);
5923a5e7 548static void dataflow_set_union (dataflow_set *, dataflow_set *);
9845d120 549static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
550static bool canon_value_cmp (rtx, rtx);
551static int loc_cmp (rtx, rtx);
5923a5e7 552static bool variable_part_different_p (variable_part *, variable_part *);
9845d120 553static bool onepart_variable_different_p (variable, variable);
bc95df68 554static bool variable_different_p (variable, variable);
5923a5e7 555static bool dataflow_set_different (dataflow_set *, dataflow_set *);
556static void dataflow_set_destroy (dataflow_set *);
557
558static bool contains_symbol_ref (rtx);
9845d120 559static bool track_expr_p (tree, bool);
96414f01 560static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
5923a5e7 561static int add_uses (rtx *, void *);
562static void add_uses_1 (rtx *, void *);
81a410b1 563static void add_stores (rtx, const_rtx, void *);
5923a5e7 564static bool compute_bb_dataflow (basic_block);
76f4ab1c 565static bool vt_find_locations (void);
5923a5e7 566
567static void dump_attrs_list (attrs);
0358713f 568static int dump_var_slot (void **, void *);
569static void dump_var (variable);
5923a5e7 570static void dump_vars (htab_t);
571static void dump_dataflow_set (dataflow_set *);
572static void dump_dataflow_sets (void);
573
72fdb379 574static void set_dv_changed (decl_or_value, bool);
a8f6ad2b 575static void variable_was_changed (variable, dataflow_set *);
9845d120 576static void **set_slot_part (dataflow_set *, rtx, void **,
577 decl_or_value, HOST_WIDE_INT,
578 enum var_init_status, rtx);
579static void set_variable_part (dataflow_set *, rtx,
580 decl_or_value, HOST_WIDE_INT,
581 enum var_init_status, rtx, enum insert_option);
582static void **clobber_slot_part (dataflow_set *, rtx,
583 void **, HOST_WIDE_INT, rtx);
584static void clobber_variable_part (dataflow_set *, rtx,
585 decl_or_value, HOST_WIDE_INT, rtx);
586static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
587static void delete_variable_part (dataflow_set *, rtx,
588 decl_or_value, HOST_WIDE_INT);
5923a5e7 589static int emit_note_insn_var_location (void **, void *);
9845d120 590static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
5923a5e7 591static int emit_notes_for_differences_1 (void **, void *);
592static int emit_notes_for_differences_2 (void **, void *);
593static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
9845d120 594static void emit_notes_in_bb (basic_block, dataflow_set *);
5923a5e7 595static void vt_emit_notes (void);
596
597static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
598static void vt_add_function_parameters (void);
35af0188 599static bool vt_initialize (void);
5923a5e7 600static void vt_finalize (void);
601
602/* Given a SET, calculate the amount of stack adjustment it contains
603 PRE- and POST-modifying stack pointer.
604 This function is similar to stack_adjust_offset. */
605
606static void
607stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
608 HOST_WIDE_INT *post)
609{
610 rtx src = SET_SRC (pattern);
611 rtx dest = SET_DEST (pattern);
612 enum rtx_code code;
613
614 if (dest == stack_pointer_rtx)
615 {
616 /* (set (reg sp) (plus (reg sp) (const_int))) */
617 code = GET_CODE (src);
618 if (! (code == PLUS || code == MINUS)
619 || XEXP (src, 0) != stack_pointer_rtx
971ba038 620 || !CONST_INT_P (XEXP (src, 1)))
5923a5e7 621 return;
622
623 if (code == MINUS)
624 *post += INTVAL (XEXP (src, 1));
625 else
626 *post -= INTVAL (XEXP (src, 1));
627 }
e16ceb8e 628 else if (MEM_P (dest))
5923a5e7 629 {
630 /* (set (mem (pre_dec (reg sp))) (foo)) */
631 src = XEXP (dest, 0);
632 code = GET_CODE (src);
633
634 switch (code)
635 {
636 case PRE_MODIFY:
637 case POST_MODIFY:
638 if (XEXP (src, 0) == stack_pointer_rtx)
639 {
640 rtx val = XEXP (XEXP (src, 1), 1);
641 /* We handle only adjustments by constant amount. */
22167fd5 642 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
971ba038 643 CONST_INT_P (val));
48e1416a 644
5923a5e7 645 if (code == PRE_MODIFY)
646 *pre -= INTVAL (val);
647 else
648 *post -= INTVAL (val);
649 break;
650 }
651 return;
652
653 case PRE_DEC:
654 if (XEXP (src, 0) == stack_pointer_rtx)
655 {
656 *pre += GET_MODE_SIZE (GET_MODE (dest));
657 break;
658 }
659 return;
660
661 case POST_DEC:
662 if (XEXP (src, 0) == stack_pointer_rtx)
663 {
664 *post += GET_MODE_SIZE (GET_MODE (dest));
665 break;
666 }
667 return;
668
669 case PRE_INC:
670 if (XEXP (src, 0) == stack_pointer_rtx)
671 {
672 *pre -= GET_MODE_SIZE (GET_MODE (dest));
673 break;
674 }
675 return;
676
677 case POST_INC:
678 if (XEXP (src, 0) == stack_pointer_rtx)
679 {
680 *post -= GET_MODE_SIZE (GET_MODE (dest));
681 break;
682 }
683 return;
684
685 default:
686 return;
687 }
688 }
689}
690
691/* Given an INSN, calculate the amount of stack adjustment it contains
692 PRE- and POST-modifying stack pointer. */
693
694static void
695insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
696 HOST_WIDE_INT *post)
697{
7acb838b 698 rtx pattern;
699
5923a5e7 700 *pre = 0;
701 *post = 0;
702
7acb838b 703 pattern = PATTERN (insn);
704 if (RTX_FRAME_RELATED_P (insn))
705 {
706 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
707 if (expr)
708 pattern = XEXP (expr, 0);
709 }
710
711 if (GET_CODE (pattern) == SET)
712 stack_adjust_offset_pre_post (pattern, pre, post);
713 else if (GET_CODE (pattern) == PARALLEL
714 || GET_CODE (pattern) == SEQUENCE)
5923a5e7 715 {
716 int i;
717
718 /* There may be stack adjustments inside compound insns. Search
719 for them. */
7acb838b 720 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
721 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
722 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
5923a5e7 723 }
724}
725
5923a5e7 726/* Compute stack adjustments for all blocks by traversing DFS tree.
727 Return true when the adjustments on all incoming edges are consistent.
6180f28d 728 Heavily borrowed from pre_and_rev_post_order_compute. */
5923a5e7 729
730static bool
731vt_stack_adjustments (void)
732{
cd665a06 733 edge_iterator *stack;
5923a5e7 734 int sp;
735
1fba1f6c 736 /* Initialize entry block. */
5923a5e7 737 VTI (ENTRY_BLOCK_PTR)->visited = true;
35af0188 738 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
12d886b8 739 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
5923a5e7 740
741 /* Allocate stack for back-tracking up CFG. */
4c36ffe6 742 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
5923a5e7 743 sp = 0;
744
745 /* Push the first edge on to the stack. */
cd665a06 746 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
5923a5e7 747
748 while (sp)
749 {
cd665a06 750 edge_iterator ei;
5923a5e7 751 basic_block src;
752 basic_block dest;
753
754 /* Look at the edge on the top of the stack. */
cd665a06 755 ei = stack[sp - 1];
756 src = ei_edge (ei)->src;
757 dest = ei_edge (ei)->dest;
5923a5e7 758
759 /* Check if the edge destination has been visited yet. */
760 if (!VTI (dest)->visited)
761 {
35af0188 762 rtx insn;
763 HOST_WIDE_INT pre, post, offset;
5923a5e7 764 VTI (dest)->visited = true;
35af0188 765 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
766
767 if (dest != EXIT_BLOCK_PTR)
768 for (insn = BB_HEAD (dest);
769 insn != NEXT_INSN (BB_END (dest));
770 insn = NEXT_INSN (insn))
72fdb379 771 if (INSN_P (insn))
772 {
773 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
774 offset += pre + post;
775 }
35af0188 776
777 VTI (dest)->out.stack_adjust = offset;
5923a5e7 778
cd665a06 779 if (EDGE_COUNT (dest->succs) > 0)
5923a5e7 780 /* Since the DEST node has been visited for the first
781 time, check its successors. */
cd665a06 782 stack[sp++] = ei_start (dest->succs);
5923a5e7 783 }
784 else
785 {
786 /* Check whether the adjustments on the edges are the same. */
787 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
788 {
789 free (stack);
790 return false;
791 }
792
cd665a06 793 if (! ei_one_before_end_p (ei))
5923a5e7 794 /* Go to the next edge. */
cd665a06 795 ei_next (&stack[sp - 1]);
5923a5e7 796 else
797 /* Return to previous level if there are no more edges. */
798 sp--;
799 }
800 }
801
802 free (stack);
803 return true;
804}
805
4afc3056 806/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
807 hard_frame_pointer_rtx is being mapped to it and offset for it. */
808static rtx cfa_base_rtx;
809static HOST_WIDE_INT cfa_base_offset;
810
c69a7a6b 811/* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
812 or hard_frame_pointer_rtx. */
5923a5e7 813
4afc3056 814static inline rtx
35af0188 815compute_cfa_pointer (HOST_WIDE_INT adjustment)
5923a5e7 816{
29c05e22 817 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
35af0188 818}
819
820/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
821 or -1 if the replacement shouldn't be done. */
822static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
823
824/* Data for adjust_mems callback. */
825
826struct adjust_mem_data
827{
828 bool store;
829 enum machine_mode mem_mode;
830 HOST_WIDE_INT stack_adjust;
831 rtx side_effects;
832};
833
910193a5 834/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
835 transformation of wider mode arithmetics to narrower mode,
836 -1 if it is suitable and subexpressions shouldn't be
837 traversed and 0 if it is suitable and subexpressions should
838 be traversed. Called through for_each_rtx. */
839
840static int
841use_narrower_mode_test (rtx *loc, void *data)
842{
843 rtx subreg = (rtx) data;
844
845 if (CONSTANT_P (*loc))
846 return -1;
847 switch (GET_CODE (*loc))
848 {
849 case REG:
1f864115 850 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
910193a5 851 return 1;
ca88a6ea 852 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
853 *loc, subreg_lowpart_offset (GET_MODE (subreg),
854 GET_MODE (*loc))))
855 return 1;
910193a5 856 return -1;
857 case PLUS:
858 case MINUS:
859 case MULT:
860 return 0;
861 case ASHIFT:
862 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
863 return 1;
864 else
865 return -1;
866 default:
867 return 1;
868 }
869}
870
871/* Transform X into narrower mode MODE from wider mode WMODE. */
872
873static rtx
874use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
875{
876 rtx op0, op1;
877 if (CONSTANT_P (x))
878 return lowpart_subreg (mode, x, wmode);
879 switch (GET_CODE (x))
880 {
881 case REG:
882 return lowpart_subreg (mode, x, wmode);
883 case PLUS:
884 case MINUS:
885 case MULT:
886 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
887 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
888 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
889 case ASHIFT:
890 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
891 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
892 default:
893 gcc_unreachable ();
894 }
895}
896
35af0188 897/* Helper function for adjusting used MEMs. */
898
899static rtx
900adjust_mems (rtx loc, const_rtx old_rtx, void *data)
901{
902 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
903 rtx mem, addr = loc, tem;
904 enum machine_mode mem_mode_save;
905 bool store_save;
906 switch (GET_CODE (loc))
907 {
908 case REG:
4573d576 909 /* Don't do any sp or fp replacements outside of MEM addresses
910 on the LHS. */
911 if (amd->mem_mode == VOIDmode && amd->store)
35af0188 912 return loc;
913 if (loc == stack_pointer_rtx
4afc3056 914 && !frame_pointer_needed
915 && cfa_base_rtx)
35af0188 916 return compute_cfa_pointer (amd->stack_adjust);
917 else if (loc == hard_frame_pointer_rtx
918 && frame_pointer_needed
4afc3056 919 && hard_frame_pointer_adjustment != -1
920 && cfa_base_rtx)
35af0188 921 return compute_cfa_pointer (hard_frame_pointer_adjustment);
95b985e5 922 gcc_checking_assert (loc != virtual_incoming_args_rtx);
35af0188 923 return loc;
924 case MEM:
925 mem = loc;
926 if (!amd->store)
927 {
928 mem = targetm.delegitimize_address (mem);
929 if (mem != loc && !MEM_P (mem))
930 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
931 }
932
933 addr = XEXP (mem, 0);
934 mem_mode_save = amd->mem_mode;
935 amd->mem_mode = GET_MODE (mem);
936 store_save = amd->store;
937 amd->store = false;
938 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
939 amd->store = store_save;
940 amd->mem_mode = mem_mode_save;
941 if (mem == loc)
942 addr = targetm.delegitimize_address (addr);
943 if (addr != XEXP (mem, 0))
944 mem = replace_equiv_address_nv (mem, addr);
945 if (!amd->store)
946 mem = avoid_constant_pool_reference (mem);
947 return mem;
948 case PRE_INC:
949 case PRE_DEC:
950 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
951 GEN_INT (GET_CODE (loc) == PRE_INC
952 ? GET_MODE_SIZE (amd->mem_mode)
953 : -GET_MODE_SIZE (amd->mem_mode)));
954 case POST_INC:
955 case POST_DEC:
956 if (addr == loc)
957 addr = XEXP (loc, 0);
958 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
959 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
960 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
961 GEN_INT ((GET_CODE (loc) == PRE_INC
962 || GET_CODE (loc) == POST_INC)
963 ? GET_MODE_SIZE (amd->mem_mode)
964 : -GET_MODE_SIZE (amd->mem_mode)));
965 amd->side_effects = alloc_EXPR_LIST (0,
966 gen_rtx_SET (VOIDmode,
967 XEXP (loc, 0),
968 tem),
969 amd->side_effects);
970 return addr;
971 case PRE_MODIFY:
972 addr = XEXP (loc, 1);
973 case POST_MODIFY:
974 if (addr == loc)
975 addr = XEXP (loc, 0);
967370ed 976 gcc_assert (amd->mem_mode != VOIDmode);
35af0188 977 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
978 amd->side_effects = alloc_EXPR_LIST (0,
979 gen_rtx_SET (VOIDmode,
980 XEXP (loc, 0),
981 XEXP (loc, 1)),
982 amd->side_effects);
983 return addr;
984 case SUBREG:
985 /* First try without delegitimization of whole MEMs and
986 avoid_constant_pool_reference, which is more likely to succeed. */
987 store_save = amd->store;
988 amd->store = true;
989 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
990 data);
991 amd->store = store_save;
992 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
993 if (mem == SUBREG_REG (loc))
910193a5 994 {
995 tem = loc;
996 goto finish_subreg;
997 }
35af0188 998 tem = simplify_gen_subreg (GET_MODE (loc), mem,
999 GET_MODE (SUBREG_REG (loc)),
1000 SUBREG_BYTE (loc));
1001 if (tem)
910193a5 1002 goto finish_subreg;
35af0188 1003 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1004 GET_MODE (SUBREG_REG (loc)),
1005 SUBREG_BYTE (loc));
910193a5 1006 if (tem == NULL_RTX)
1007 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1008 finish_subreg:
1009 if (MAY_HAVE_DEBUG_INSNS
1010 && GET_CODE (tem) == SUBREG
1011 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1012 || GET_CODE (SUBREG_REG (tem)) == MINUS
1013 || GET_CODE (SUBREG_REG (tem)) == MULT
1014 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1015 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1017 && GET_MODE_SIZE (GET_MODE (tem))
1018 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1019 && subreg_lowpart_p (tem)
1020 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1021 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1022 GET_MODE (SUBREG_REG (tem)));
1023 return tem;
e0d031fa 1024 case ASM_OPERANDS:
1025 /* Don't do any replacements in second and following
1026 ASM_OPERANDS of inline-asm with multiple sets.
1027 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1028 and ASM_OPERANDS_LABEL_VEC need to be equal between
1029 all the ASM_OPERANDs in the insn and adjust_insn will
1030 fix this up. */
1031 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1032 return loc;
1033 break;
35af0188 1034 default:
1035 break;
1036 }
1037 return NULL_RTX;
1038}
1039
1040/* Helper function for replacement of uses. */
1041
1042static void
1043adjust_mem_uses (rtx *x, void *data)
1044{
1045 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1046 if (new_x != *x)
1047 validate_change (NULL_RTX, x, new_x, true);
1048}
1049
1050/* Helper function for replacement of stores. */
1051
1052static void
1053adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1054{
1055 if (MEM_P (loc))
1056 {
1057 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1058 adjust_mems, data);
1059 if (new_dest != SET_DEST (expr))
1060 {
1061 rtx xexpr = CONST_CAST_RTX (expr);
1062 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1063 }
1064 }
1065}
1066
1067/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1068 replace them with their value in the insn and add the side-effects
1069 as other sets to the insn. */
1070
1071static void
1072adjust_insn (basic_block bb, rtx insn)
1073{
1074 struct adjust_mem_data amd;
1075 rtx set;
8ee59e4e 1076
1077#ifdef HAVE_window_save
1078 /* If the target machine has an explicit window save instruction, the
1079 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1080 if (RTX_FRAME_RELATED_P (insn)
1081 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1082 {
1083 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
1084 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1085 parm_reg_t *p;
1086
1087 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
1088 {
1089 XVECEXP (rtl, 0, i * 2)
1090 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1091 /* Do not clobber the attached DECL, but only the REG. */
1092 XVECEXP (rtl, 0, i * 2 + 1)
1093 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1094 gen_raw_REG (GET_MODE (p->outgoing),
1095 REGNO (p->outgoing)));
1096 }
1097
1098 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1099 return;
1100 }
1101#endif
1102
35af0188 1103 amd.mem_mode = VOIDmode;
1104 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1105 amd.side_effects = NULL_RTX;
1106
1107 amd.store = true;
1108 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1109
1110 amd.store = false;
e0d031fa 1111 if (GET_CODE (PATTERN (insn)) == PARALLEL
1112 && asm_noperands (PATTERN (insn)) > 0
1113 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1114 {
1115 rtx body, set0;
1116 int i;
1117
1118 /* inline-asm with multiple sets is tiny bit more complicated,
1119 because the 3 vectors in ASM_OPERANDS need to be shared between
1120 all ASM_OPERANDS in the instruction. adjust_mems will
1121 not touch ASM_OPERANDS other than the first one, asm_noperands
1122 test above needs to be called before that (otherwise it would fail)
1123 and afterwards this code fixes it up. */
1124 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1125 body = PATTERN (insn);
1126 set0 = XVECEXP (body, 0, 0);
1127 gcc_checking_assert (GET_CODE (set0) == SET
1128 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1129 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1130 for (i = 1; i < XVECLEN (body, 0); i++)
1131 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1132 break;
1133 else
1134 {
1135 set = XVECEXP (body, 0, i);
1136 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1137 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1138 == i);
1139 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1140 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1141 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1142 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1143 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1144 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1145 {
1146 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1147 ASM_OPERANDS_INPUT_VEC (newsrc)
1148 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1149 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1150 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1151 ASM_OPERANDS_LABEL_VEC (newsrc)
1152 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1153 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1154 }
1155 }
1156 }
1157 else
1158 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
35af0188 1159
1160 /* For read-only MEMs containing some constant, prefer those
1161 constants. */
1162 set = single_set (insn);
1163 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1164 {
1165 rtx note = find_reg_equal_equiv_note (insn);
1166
1167 if (note && CONSTANT_P (XEXP (note, 0)))
1168 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1169 }
1170
1171 if (amd.side_effects)
1172 {
1173 rtx *pat, new_pat, s;
1174 int i, oldn, newn;
5923a5e7 1175
35af0188 1176 pat = &PATTERN (insn);
1177 if (GET_CODE (*pat) == COND_EXEC)
1178 pat = &COND_EXEC_CODE (*pat);
1179 if (GET_CODE (*pat) == PARALLEL)
1180 oldn = XVECLEN (*pat, 0);
1181 else
1182 oldn = 1;
1183 for (s = amd.side_effects, newn = 0; s; newn++)
1184 s = XEXP (s, 1);
1185 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1186 if (GET_CODE (*pat) == PARALLEL)
1187 for (i = 0; i < oldn; i++)
1188 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1189 else
1190 XVECEXP (new_pat, 0, 0) = *pat;
1191 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1192 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1193 free_EXPR_LIST_list (&amd.side_effects);
1194 validate_change (NULL_RTX, pat, new_pat, true);
1195 }
5923a5e7 1196}
1197
9845d120 1198/* Return true if a decl_or_value DV is a DECL or NULL. */
1199static inline bool
1200dv_is_decl_p (decl_or_value dv)
1201{
65683f79 1202 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
9845d120 1203}
1204
1205/* Return true if a decl_or_value is a VALUE rtl. */
1206static inline bool
1207dv_is_value_p (decl_or_value dv)
1208{
1209 return dv && !dv_is_decl_p (dv);
1210}
1211
1212/* Return the decl in the decl_or_value. */
1213static inline tree
1214dv_as_decl (decl_or_value dv)
1215{
1b4345f7 1216 gcc_checking_assert (dv_is_decl_p (dv));
9845d120 1217 return (tree) dv;
1218}
1219
1220/* Return the value in the decl_or_value. */
1221static inline rtx
1222dv_as_value (decl_or_value dv)
1223{
1b4345f7 1224 gcc_checking_assert (dv_is_value_p (dv));
9845d120 1225 return (rtx)dv;
1226}
1227
72fdb379 1228/* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1229static inline rtx
1230dv_as_rtx (decl_or_value dv)
1231{
1232 tree decl;
1233
1234 if (dv_is_value_p (dv))
1235 return dv_as_value (dv);
1236
1237 decl = dv_as_decl (dv);
1238
1239 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1240 return DECL_RTL_KNOWN_SET (decl);
1241}
1242
9845d120 1243/* Return the opaque pointer in the decl_or_value. */
1244static inline void *
1245dv_as_opaque (decl_or_value dv)
1246{
1247 return dv;
1248}
1249
72fdb379 1250/* Return nonzero if a decl_or_value must not have more than one
1251 variable part. The returned value discriminates among various
1252 kinds of one-part DVs ccording to enum onepart_enum. */
1253static inline onepart_enum_t
9845d120 1254dv_onepart_p (decl_or_value dv)
1255{
1256 tree decl;
1257
1258 if (!MAY_HAVE_DEBUG_INSNS)
72fdb379 1259 return NOT_ONEPART;
9845d120 1260
1261 if (dv_is_value_p (dv))
72fdb379 1262 return ONEPART_VALUE;
9845d120 1263
1264 decl = dv_as_decl (dv);
1265
e55f862a 1266 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
72fdb379 1267 return ONEPART_DEXPR;
e55f862a 1268
72fdb379 1269 if (target_for_debug_bind (decl) != NULL_TREE)
1270 return ONEPART_VDECL;
1271
1272 return NOT_ONEPART;
9845d120 1273}
1274
72fdb379 1275/* Return the variable pool to be used for a dv of type ONEPART. */
9845d120 1276static inline alloc_pool
72fdb379 1277onepart_pool (onepart_enum_t onepart)
9845d120 1278{
72fdb379 1279 return onepart ? valvar_pool : var_pool;
9845d120 1280}
1281
9845d120 1282/* Build a decl_or_value out of a decl. */
1283static inline decl_or_value
1284dv_from_decl (tree decl)
1285{
1286 decl_or_value dv;
9845d120 1287 dv = decl;
1b4345f7 1288 gcc_checking_assert (dv_is_decl_p (dv));
9845d120 1289 return dv;
1290}
1291
1292/* Build a decl_or_value out of a value. */
1293static inline decl_or_value
1294dv_from_value (rtx value)
1295{
1296 decl_or_value dv;
9845d120 1297 dv = value;
1b4345f7 1298 gcc_checking_assert (dv_is_value_p (dv));
9845d120 1299 return dv;
1300}
1301
72fdb379 1302/* Return a value or the decl of a debug_expr as a decl_or_value. */
1303static inline decl_or_value
1304dv_from_rtx (rtx x)
1305{
1306 decl_or_value dv;
1307
1308 switch (GET_CODE (x))
1309 {
1310 case DEBUG_EXPR:
1311 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1312 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1313 break;
1314
1315 case VALUE:
1316 dv = dv_from_value (x);
1317 break;
1318
1319 default:
1320 gcc_unreachable ();
1321 }
1322
1323 return dv;
1324}
1325
b4d01854 1326extern void debug_dv (decl_or_value dv);
1327
4b987fac 1328DEBUG_FUNCTION void
b4d01854 1329debug_dv (decl_or_value dv)
1330{
1331 if (dv_is_value_p (dv))
1332 debug_rtx (dv_as_value (dv));
1333 else
1334 debug_generic_stmt (dv_as_decl (dv));
1335}
1336
01df1184 1337typedef unsigned int dvuid;
1338
1339/* Return the uid of DV. */
1340
1341static inline dvuid
1342dv_uid (decl_or_value dv)
9845d120 1343{
1344 if (dv_is_value_p (dv))
01df1184 1345 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
9845d120 1346 else
01df1184 1347 return DECL_UID (dv_as_decl (dv));
1348}
1349
1350/* Compute the hash from the uid. */
1351
1352static inline hashval_t
1353dv_uid2hash (dvuid uid)
1354{
1355 return uid;
1356}
1357
1358/* The hash function for a mask table in a shared_htab chain. */
1359
1360static inline hashval_t
1361dv_htab_hash (decl_or_value dv)
1362{
1363 return dv_uid2hash (dv_uid (dv));
9845d120 1364}
1365
5923a5e7 1366/* The hash function for variable_htab, computes the hash value
1367 from the declaration of variable X. */
1368
1369static hashval_t
1370variable_htab_hash (const void *x)
1371{
aae87fc3 1372 const_variable const v = (const_variable) x;
5923a5e7 1373
9845d120 1374 return dv_htab_hash (v->dv);
5923a5e7 1375}
1376
1377/* Compare the declaration of variable X with declaration Y. */
1378
1379static int
1380variable_htab_eq (const void *x, const void *y)
1381{
aae87fc3 1382 const_variable const v = (const_variable) x;
9845d120 1383 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1384
01df1184 1385 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
5923a5e7 1386}
1387
72fdb379 1388static void loc_exp_dep_clear (variable var);
1389
5923a5e7 1390/* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1391
1392static void
1393variable_htab_free (void *elem)
1394{
1395 int i;
1396 variable var = (variable) elem;
1397 location_chain node, next;
1398
0ea2d350 1399 gcc_checking_assert (var->refcount > 0);
85bbdb3f 1400
1401 var->refcount--;
1402 if (var->refcount > 0)
1403 return;
1404
5923a5e7 1405 for (i = 0; i < var->n_var_parts; i++)
1406 {
1407 for (node = var->var_part[i].loc_chain; node; node = next)
1408 {
1409 next = node->next;
1410 pool_free (loc_chain_pool, node);
1411 }
1412 var->var_part[i].loc_chain = NULL;
1413 }
72fdb379 1414 if (var->onepart && VAR_LOC_1PAUX (var))
1415 {
1416 loc_exp_dep_clear (var);
1417 if (VAR_LOC_DEP_LST (var))
1418 VAR_LOC_DEP_LST (var)->pprev = NULL;
1419 XDELETE (VAR_LOC_1PAUX (var));
1420 /* These may be reused across functions, so reset
1421 e.g. NO_LOC_P. */
1422 if (var->onepart == ONEPART_DEXPR)
1423 set_dv_changed (var->dv, true);
1424 }
1425 pool_free (onepart_pool (var->onepart), var);
5923a5e7 1426}
1427
1428/* Initialize the set (array) SET of attrs to empty lists. */
1429
1430static void
1431init_attrs_list_set (attrs *set)
1432{
1433 int i;
1434
1435 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1436 set[i] = NULL;
1437}
1438
1439/* Make the list *LISTP empty. */
1440
1441static void
1442attrs_list_clear (attrs *listp)
1443{
1444 attrs list, next;
1445
1446 for (list = *listp; list; list = next)
1447 {
1448 next = list->next;
1449 pool_free (attrs_pool, list);
1450 }
1451 *listp = NULL;
1452}
1453
1454/* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1455
1456static attrs
9845d120 1457attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
5923a5e7 1458{
1459 for (; list; list = list->next)
9845d120 1460 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
5923a5e7 1461 return list;
1462 return NULL;
1463}
1464
1465/* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1466
1467static void
9845d120 1468attrs_list_insert (attrs *listp, decl_or_value dv,
1469 HOST_WIDE_INT offset, rtx loc)
5923a5e7 1470{
1471 attrs list;
1472
45ba1503 1473 list = (attrs) pool_alloc (attrs_pool);
5923a5e7 1474 list->loc = loc;
9845d120 1475 list->dv = dv;
5923a5e7 1476 list->offset = offset;
1477 list->next = *listp;
1478 *listp = list;
1479}
1480
1481/* Copy all nodes from SRC and create a list *DSTP of the copies. */
1482
1483static void
1484attrs_list_copy (attrs *dstp, attrs src)
1485{
1486 attrs n;
1487
1488 attrs_list_clear (dstp);
1489 for (; src; src = src->next)
1490 {
45ba1503 1491 n = (attrs) pool_alloc (attrs_pool);
5923a5e7 1492 n->loc = src->loc;
9845d120 1493 n->dv = src->dv;
5923a5e7 1494 n->offset = src->offset;
1495 n->next = *dstp;
1496 *dstp = n;
1497 }
1498}
1499
1500/* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1501
1502static void
1503attrs_list_union (attrs *dstp, attrs src)
1504{
1505 for (; src; src = src->next)
1506 {
9845d120 1507 if (!attrs_list_member (*dstp, src->dv, src->offset))
1508 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1509 }
1510}
1511
1512/* Combine nodes that are not onepart nodes from SRC and SRC2 into
1513 *DSTP. */
1514
1515static void
1516attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1517{
1518 gcc_assert (!*dstp);
1519 for (; src; src = src->next)
1520 {
1521 if (!dv_onepart_p (src->dv))
1522 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1523 }
1524 for (src = src2; src; src = src->next)
1525 {
1526 if (!dv_onepart_p (src->dv)
1527 && !attrs_list_member (*dstp, src->dv, src->offset))
1528 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
5923a5e7 1529 }
1530}
1531
a8f6ad2b 1532/* Shared hashtable support. */
1533
1534/* Return true if VARS is shared. */
1535
1536static inline bool
1537shared_hash_shared (shared_hash vars)
1538{
1539 return vars->refcount > 1;
1540}
1541
1542/* Return the hash table for VARS. */
1543
1544static inline htab_t
1545shared_hash_htab (shared_hash vars)
1546{
1547 return vars->htab;
1548}
1549
bc95df68 1550/* Return true if VAR is shared, or maybe because VARS is shared. */
1551
1552static inline bool
1553shared_var_p (variable var, shared_hash vars)
1554{
1555 /* Don't count an entry in the changed_variables table as a duplicate. */
1556 return ((var->refcount > 1 + (int) var->in_changed_variables)
1557 || shared_hash_shared (vars));
1558}
1559
a8f6ad2b 1560/* Copy variables into a new hash table. */
1561
1562static shared_hash
1563shared_hash_unshare (shared_hash vars)
1564{
1565 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1566 gcc_assert (vars->refcount > 1);
1567 new_vars->refcount = 1;
1568 new_vars->htab
1569 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1570 variable_htab_eq, variable_htab_free);
1571 vars_copy (new_vars->htab, vars->htab);
1572 vars->refcount--;
1573 return new_vars;
1574}
1575
1576/* Increment reference counter on VARS and return it. */
1577
1578static inline shared_hash
1579shared_hash_copy (shared_hash vars)
1580{
1581 vars->refcount++;
1582 return vars;
1583}
1584
1585/* Decrement reference counter and destroy hash table if not shared
1586 anymore. */
5923a5e7 1587
1588static void
a8f6ad2b 1589shared_hash_destroy (shared_hash vars)
5923a5e7 1590{
0ea2d350 1591 gcc_checking_assert (vars->refcount > 0);
a8f6ad2b 1592 if (--vars->refcount == 0)
1593 {
1594 htab_delete (vars->htab);
1595 pool_free (shared_hash_pool, vars);
1596 }
1597}
1598
9845d120 1599/* Unshare *PVARS if shared and return slot for DV. If INS is
a8f6ad2b 1600 INSERT, insert it if not already present. */
1601
1602static inline void **
9845d120 1603shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1604 hashval_t dvhash, enum insert_option ins)
a8f6ad2b 1605{
1606 if (shared_hash_shared (*pvars))
1607 *pvars = shared_hash_unshare (*pvars);
9845d120 1608 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1609}
1610
1611static inline void **
1612shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1613 enum insert_option ins)
1614{
1615 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
a8f6ad2b 1616}
1617
9845d120 1618/* Return slot for DV, if it is already present in the hash table.
a8f6ad2b 1619 If it is not present, insert it only VARS is not shared, otherwise
1620 return NULL. */
1621
1622static inline void **
9845d120 1623shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
a8f6ad2b 1624{
9845d120 1625 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
a8f6ad2b 1626 shared_hash_shared (vars)
1627 ? NO_INSERT : INSERT);
1628}
1629
9845d120 1630static inline void **
1631shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1632{
1633 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1634}
1635
1636/* Return slot for DV only if it is already present in the hash table. */
1637
1638static inline void **
1639shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1640 hashval_t dvhash)
1641{
1642 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1643 NO_INSERT);
1644}
a8f6ad2b 1645
1646static inline void **
9845d120 1647shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
a8f6ad2b 1648{
9845d120 1649 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
a8f6ad2b 1650}
1651
9845d120 1652/* Return variable for DV or NULL if not already present in the hash
a8f6ad2b 1653 table. */
1654
1655static inline variable
9845d120 1656shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1657{
1658 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1659}
1660
1661static inline variable
1662shared_hash_find (shared_hash vars, decl_or_value dv)
1663{
1664 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1665}
1666
9845d120 1667/* Return true if TVAL is better than CVAL as a canonival value. We
1668 choose lowest-numbered VALUEs, using the RTX address as a
1669 tie-breaker. The idea is to arrange them into a star topology,
1670 such that all of them are at most one step away from the canonical
1671 value, and the canonical value has backlinks to all of them, in
1672 addition to all the actual locations. We don't enforce this
1673 topology throughout the entire dataflow analysis, though.
1674 */
1675
1676static inline bool
1677canon_value_cmp (rtx tval, rtx cval)
1678{
1679 return !cval
01df1184 1680 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
5923a5e7 1681}
1682
9845d120 1683static bool dst_can_be_shared;
1684
85bbdb3f 1685/* Return a copy of a variable VAR and insert it to dataflow set SET. */
5923a5e7 1686
9845d120 1687static void **
1688unshare_variable (dataflow_set *set, void **slot, variable var,
d53bb226 1689 enum var_init_status initialized)
5923a5e7 1690{
85bbdb3f 1691 variable new_var;
5923a5e7 1692 int i;
1693
72fdb379 1694 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
9845d120 1695 new_var->dv = var->dv;
85bbdb3f 1696 new_var->refcount = 1;
1697 var->refcount--;
1698 new_var->n_var_parts = var->n_var_parts;
72fdb379 1699 new_var->onepart = var->onepart;
bc95df68 1700 new_var->in_changed_variables = false;
5923a5e7 1701
331cf53a 1702 if (! flag_var_tracking_uninit)
1703 initialized = VAR_INIT_STATUS_INITIALIZED;
1704
5923a5e7 1705 for (i = 0; i < var->n_var_parts; i++)
1706 {
e0473d22 1707 location_chain node;
1708 location_chain *nextp;
5923a5e7 1709
72fdb379 1710 if (i == 0 && var->onepart)
1711 {
1712 /* One-part auxiliary data is only used while emitting
1713 notes, so propagate it to the new variable in the active
1714 dataflow set. If we're not emitting notes, this will be
1715 a no-op. */
1716 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1717 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1718 VAR_LOC_1PAUX (var) = NULL;
1719 }
1720 else
1721 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
85bbdb3f 1722 nextp = &new_var->var_part[i].loc_chain;
1723 for (node = var->var_part[i].loc_chain; node; node = node->next)
5923a5e7 1724 {
1725 location_chain new_lc;
1726
45ba1503 1727 new_lc = (location_chain) pool_alloc (loc_chain_pool);
5923a5e7 1728 new_lc->next = NULL;
d53bb226 1729 if (node->init > initialized)
1730 new_lc->init = node->init;
1731 else
1732 new_lc->init = initialized;
1733 if (node->set_src && !(MEM_P (node->set_src)))
1734 new_lc->set_src = node->set_src;
1735 else
1736 new_lc->set_src = NULL;
5923a5e7 1737 new_lc->loc = node->loc;
1738
e0473d22 1739 *nextp = new_lc;
1740 nextp = &new_lc->next;
5923a5e7 1741 }
1742
bc95df68 1743 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
5923a5e7 1744 }
1745
9845d120 1746 dst_can_be_shared = false;
1747 if (shared_hash_shared (set->vars))
1748 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1749 else if (set->traversed_vars && set->vars != set->traversed_vars)
1750 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
85bbdb3f 1751 *slot = new_var;
bc95df68 1752 if (var->in_changed_variables)
1753 {
1754 void **cslot
1755 = htab_find_slot_with_hash (changed_variables, var->dv,
1756 dv_htab_hash (var->dv), NO_INSERT);
1757 gcc_assert (*cslot == (void *) var);
1758 var->in_changed_variables = false;
1759 variable_htab_free (var);
1760 *cslot = new_var;
1761 new_var->in_changed_variables = true;
1762 }
9845d120 1763 return slot;
85bbdb3f 1764}
1765
5923a5e7 1766/* Copy all variables from hash table SRC to hash table DST. */
1767
1768static void
1769vars_copy (htab_t dst, htab_t src)
1770{
a868678a 1771 htab_iterator hi;
1772 variable var;
1773
1774 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1775 {
1776 void **dstp;
1777 var->refcount++;
1778 dstp = htab_find_slot_with_hash (dst, var->dv,
1779 dv_htab_hash (var->dv),
1780 INSERT);
1781 *dstp = var;
1782 }
5923a5e7 1783}
1784
96414f01 1785/* Map a decl to its main debug decl. */
1786
1787static inline tree
1788var_debug_decl (tree decl)
1789{
1790 if (decl && DECL_P (decl)
4e9d80c4 1791 && DECL_DEBUG_EXPR_IS_FROM (decl))
1792 {
1793 tree debugdecl = DECL_DEBUG_EXPR (decl);
1794 if (debugdecl && DECL_P (debugdecl))
1795 decl = debugdecl;
1796 }
96414f01 1797
1798 return decl;
1799}
1800
9845d120 1801/* Set the register LOC to contain DV, OFFSET. */
9a8a3ff4 1802
1803static void
9845d120 1804var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1805 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1806 enum insert_option iopt)
9a8a3ff4 1807{
96414f01 1808 attrs node;
9845d120 1809 bool decl_p = dv_is_decl_p (dv);
96414f01 1810
9845d120 1811 if (decl_p)
1812 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
9a8a3ff4 1813
96414f01 1814 for (node = set->regs[REGNO (loc)]; node; node = node->next)
9845d120 1815 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1816 && node->offset == offset)
96414f01 1817 break;
1818 if (!node)
9845d120 1819 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1820 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1821}
1822
1823/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1824
1825static void
1826var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1827 rtx set_src)
1828{
1829 tree decl = REG_EXPR (loc);
1830 HOST_WIDE_INT offset = REG_OFFSET (loc);
1831
1832 var_reg_decl_set (set, loc, initialized,
1833 dv_from_decl (decl), offset, set_src, INSERT);
d53bb226 1834}
1835
8458f4ca 1836static enum var_init_status
9845d120 1837get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
d53bb226 1838{
d53bb226 1839 variable var;
1840 int i;
8458f4ca 1841 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
d53bb226 1842
1843 if (! flag_var_tracking_uninit)
1844 return VAR_INIT_STATUS_INITIALIZED;
1845
9845d120 1846 var = shared_hash_find (set->vars, dv);
a8f6ad2b 1847 if (var)
d53bb226 1848 {
d53bb226 1849 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1850 {
1851 location_chain nextp;
1852 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1853 if (rtx_equal_p (nextp->loc, loc))
1854 {
1855 ret_val = nextp->init;
1856 break;
1857 }
1858 }
1859 }
1860
1861 return ret_val;
9a8a3ff4 1862}
1863
96414f01 1864/* Delete current content of register LOC in dataflow set SET and set
1865 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1866 MODIFY is true, any other live copies of the same variable part are
1867 also deleted from the dataflow set, otherwise the variable part is
1868 assumed to be copied from another location holding the same
1869 part. */
5923a5e7 1870
1871static void
48e1416a 1872var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
d53bb226 1873 enum var_init_status initialized, rtx set_src)
5923a5e7 1874{
5923a5e7 1875 tree decl = REG_EXPR (loc);
1876 HOST_WIDE_INT offset = REG_OFFSET (loc);
e0473d22 1877 attrs node, next;
1878 attrs *nextp;
5923a5e7 1879
96414f01 1880 decl = var_debug_decl (decl);
1881
d53bb226 1882 if (initialized == VAR_INIT_STATUS_UNKNOWN)
9845d120 1883 initialized = get_init_value (set, loc, dv_from_decl (decl));
d53bb226 1884
e0473d22 1885 nextp = &set->regs[REGNO (loc)];
1886 for (node = *nextp; node; node = next)
5923a5e7 1887 {
1888 next = node->next;
9845d120 1889 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
5923a5e7 1890 {
9845d120 1891 delete_variable_part (set, node->loc, node->dv, node->offset);
5923a5e7 1892 pool_free (attrs_pool, node);
e0473d22 1893 *nextp = next;
5923a5e7 1894 }
1895 else
1896 {
1897 node->loc = loc;
e0473d22 1898 nextp = &node->next;
5923a5e7 1899 }
1900 }
96414f01 1901 if (modify)
9845d120 1902 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
d53bb226 1903 var_reg_set (set, loc, initialized, set_src);
5923a5e7 1904}
1905
e25fd798 1906/* Delete the association of register LOC in dataflow set SET with any
1907 variables that aren't onepart. If CLOBBER is true, also delete any
1908 other live copies of the same variable part, and delete the
1909 association with onepart dvs too. */
5923a5e7 1910
1911static void
96414f01 1912var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
5923a5e7 1913{
e25fd798 1914 attrs *nextp = &set->regs[REGNO (loc)];
5923a5e7 1915 attrs node, next;
1916
96414f01 1917 if (clobber)
1918 {
1919 tree decl = REG_EXPR (loc);
1920 HOST_WIDE_INT offset = REG_OFFSET (loc);
1921
1922 decl = var_debug_decl (decl);
1923
9845d120 1924 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
96414f01 1925 }
1926
e25fd798 1927 for (node = *nextp; node; node = next)
5923a5e7 1928 {
1929 next = node->next;
e25fd798 1930 if (clobber || !dv_onepart_p (node->dv))
1931 {
1932 delete_variable_part (set, node->loc, node->dv, node->offset);
1933 pool_free (attrs_pool, node);
1934 *nextp = next;
1935 }
1936 else
1937 nextp = &node->next;
5923a5e7 1938 }
5923a5e7 1939}
1940
1941/* Delete content of register with number REGNO in dataflow set SET. */
1942
1943static void
1944var_regno_delete (dataflow_set *set, int regno)
1945{
1946 attrs *reg = &set->regs[regno];
1947 attrs node, next;
1948
1949 for (node = *reg; node; node = next)
1950 {
1951 next = node->next;
9845d120 1952 delete_variable_part (set, node->loc, node->dv, node->offset);
5923a5e7 1953 pool_free (attrs_pool, node);
1954 }
1955 *reg = NULL;
1956}
1957
d806488d 1958/* Strip constant offsets and alignments off of LOC. Return the base
1959 expression. */
1960
1961static rtx
1962vt_get_canonicalize_base (rtx loc)
1963{
1964 while ((GET_CODE (loc) == PLUS
1965 || GET_CODE (loc) == AND)
1966 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1967 && (GET_CODE (loc) != AND
1968 || INTVAL (XEXP (loc, 1)) < 0))
1969 loc = XEXP (loc, 0);
1970
1971 return loc;
1972}
1973
1974/* Canonicalize LOC using equivalences from SET in addition to those
1975 in the cselib static table. */
1976
1977static rtx
1978vt_canonicalize_addr (dataflow_set *set, rtx oloc)
1979{
1980 HOST_WIDE_INT ofst = 0;
1981 enum machine_mode mode = GET_MODE (oloc);
1982 rtx loc = canon_rtx (get_addr (oloc));
1983
1984 /* Try to substitute a base VALUE for equivalent expressions as much
1985 as possible. The goal here is to expand stack-related addresses
1986 to one of the stack base registers, so that we can compare
1987 addresses for overlaps. */
1988 while (GET_CODE (vt_get_canonicalize_base (loc)) == VALUE)
1989 {
1990 rtx x;
1991 decl_or_value dv;
1992 variable var;
1993 location_chain l;
1994
1995 while (GET_CODE (loc) == PLUS)
1996 {
1997 ofst += INTVAL (XEXP (loc, 1));
1998 loc = XEXP (loc, 0);
1999 continue;
2000 }
2001
2002 /* Alignment operations can't normally be combined, so just
2003 canonicalize the base and we're done. We'll normally have
2004 only one stack alignment anyway. */
2005 if (GET_CODE (loc) == AND)
2006 {
2007 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2008 if (x != XEXP (loc, 0))
2009 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2010 loc = canon_rtx (get_addr (loc));
2011 break;
2012 }
2013
2014 x = canon_rtx (get_addr (loc));
2015
2016 /* We've made progress! Start over. */
2017 if (x != loc || GET_CODE (x) != VALUE)
2018 {
2019 loc = x;
2020 continue;
2021 }
2022
2023 dv = dv_from_rtx (x);
2024 var = (variable) htab_find_with_hash (shared_hash_htab (set->vars),
2025 dv, dv_htab_hash (dv));
2026 if (!var)
2027 break;
2028
2029 /* Look for an improved equivalent expression. */
2030 for (l = var->var_part[0].loc_chain; l; l = l->next)
2031 {
2032 rtx base = vt_get_canonicalize_base (l->loc);
2033 if (GET_CODE (base) == REG
2034 || (GET_CODE (base) == VALUE
2035 && canon_value_cmp (base, loc)))
2036 {
2037 loc = l->loc;
2038 break;
2039 }
2040 }
2041
2042 /* No luck with the dataflow set, so we're done. */
2043 if (!l)
2044 break;
2045 }
2046
2047 /* Add OFST back in. */
2048 if (ofst)
2049 {
2050 /* Don't build new RTL if we can help it. */
2051 if (GET_CODE (oloc) == PLUS
2052 && XEXP (oloc, 0) == loc
2053 && INTVAL (XEXP (oloc, 1)) == ofst)
2054 return oloc;
2055
2056 loc = plus_constant (mode, loc, ofst);
2057 }
2058
2059 return loc;
2060}
2061
2062/* Return true iff ADDR has a stack register as the base address. */
2063
2064static inline bool
2065vt_stack_offset_p (rtx addr)
2066{
2067 rtx base = vt_get_canonicalize_base (addr);
2068
2069 if (GET_CODE (base) != REG)
2070 return false;
2071
2072 return REGNO_PTR_FRAME_P (REGNO (base));
2073}
2074
2075/* Return true iff there's a true dependence between MLOC and LOC.
2076 MADDR must be a canonicalized version of MLOC's address. */
2077
2078static inline bool
2079vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2080{
2081 if (GET_CODE (loc) != MEM)
2082 return false;
2083
2084 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, NULL))
2085 return false;
2086
2087 if (!MEM_EXPR (loc) && vt_stack_offset_p (maddr))
2088 {
2089 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2090 return canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr);
2091 }
2092
2093 return true;
2094}
2095
c573c1b5 2096/* Hold parameters for the hashtab traversal function
2097 drop_overlapping_mem_locs, see below. */
2098
2099struct overlapping_mems
2100{
2101 dataflow_set *set;
2102 rtx loc, addr;
2103};
2104
2105/* Remove all MEMs that overlap with COMS->LOC from the location list
2106 of a hash table entry for a value. COMS->ADDR must be a
2107 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2108 canonicalized itself. */
2109
2110static int
2111drop_overlapping_mem_locs (void **slot, void *data)
2112{
2113 struct overlapping_mems *coms = (struct overlapping_mems *)data;
2114 dataflow_set *set = coms->set;
2115 rtx mloc = coms->loc, addr = coms->addr;
2116 variable var = (variable) *slot;
2117
2118 if (var->onepart == ONEPART_VALUE)
2119 {
2120 location_chain loc, *locp;
2121 bool changed = false;
2122 rtx cur_loc;
2123
2124 gcc_assert (var->n_var_parts == 1);
2125
2126 if (shared_var_p (var, set->vars))
2127 {
2128 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
d806488d 2129 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
c573c1b5 2130 break;
2131
2132 if (!loc)
2133 return 1;
2134
2135 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2136 var = (variable)*slot;
2137 gcc_assert (var->n_var_parts == 1);
2138 }
2139
2140 if (VAR_LOC_1PAUX (var))
2141 cur_loc = VAR_LOC_FROM (var);
2142 else
2143 cur_loc = var->var_part[0].cur_loc;
2144
2145 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2146 loc; loc = *locp)
2147 {
d806488d 2148 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
c573c1b5 2149 {
2150 locp = &loc->next;
2151 continue;
2152 }
2153
2154 *locp = loc->next;
2155 /* If we have deleted the location which was last emitted
2156 we have to emit new location so add the variable to set
2157 of changed variables. */
2158 if (cur_loc == loc->loc)
2159 {
2160 changed = true;
2161 var->var_part[0].cur_loc = NULL;
2162 if (VAR_LOC_1PAUX (var))
2163 VAR_LOC_FROM (var) = NULL;
2164 }
2165 pool_free (loc_chain_pool, loc);
2166 }
2167
2168 if (!var->var_part[0].loc_chain)
2169 {
2170 var->n_var_parts--;
2171 changed = true;
2172 }
2173 if (changed)
2174 variable_was_changed (var, set);
2175 }
2176
2177 return 1;
2178}
2179
2180/* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2181
2182static void
2183clobber_overlapping_mems (dataflow_set *set, rtx loc)
2184{
2185 struct overlapping_mems coms;
2186
2187 coms.set = set;
2188 coms.loc = canon_rtx (loc);
d806488d 2189 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
c573c1b5 2190
2191 set->traversed_vars = set->vars;
2192 htab_traverse (shared_hash_htab (set->vars),
2193 drop_overlapping_mem_locs, &coms);
2194 set->traversed_vars = NULL;
2195}
2196
9845d120 2197/* Set the location of DV, OFFSET as the MEM LOC. */
2198
2199static void
2200var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2201 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2202 enum insert_option iopt)
2203{
2204 if (dv_is_decl_p (dv))
2205 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2206
2207 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2208}
2209
9a8a3ff4 2210/* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2211 SET to LOC.
5923a5e7 2212 Adjust the address first if it is stack pointer based. */
2213
2214static void
48e1416a 2215var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
d53bb226 2216 rtx set_src)
5923a5e7 2217{
2218 tree decl = MEM_EXPR (loc);
eeb0ae23 2219 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
5923a5e7 2220
9845d120 2221 var_mem_decl_set (set, loc, initialized,
2222 dv_from_decl (decl), offset, set_src, INSERT);
5923a5e7 2223}
2224
96414f01 2225/* Delete and set the location part of variable MEM_EXPR (LOC) in
2226 dataflow set SET to LOC. If MODIFY is true, any other live copies
2227 of the same variable part are also deleted from the dataflow set,
2228 otherwise the variable part is assumed to be copied from another
2229 location holding the same part.
9a8a3ff4 2230 Adjust the address first if it is stack pointer based. */
2231
2232static void
48e1416a 2233var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
d53bb226 2234 enum var_init_status initialized, rtx set_src)
9a8a3ff4 2235{
96414f01 2236 tree decl = MEM_EXPR (loc);
eeb0ae23 2237 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
96414f01 2238
c573c1b5 2239 clobber_overlapping_mems (set, loc);
96414f01 2240 decl = var_debug_decl (decl);
2241
d53bb226 2242 if (initialized == VAR_INIT_STATUS_UNKNOWN)
9845d120 2243 initialized = get_init_value (set, loc, dv_from_decl (decl));
d53bb226 2244
96414f01 2245 if (modify)
9845d120 2246 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
d53bb226 2247 var_mem_set (set, loc, initialized, set_src);
9a8a3ff4 2248}
2249
96414f01 2250/* Delete the location part LOC from dataflow set SET. If CLOBBER is
2251 true, also delete any other live copies of the same variable part.
5923a5e7 2252 Adjust the address first if it is stack pointer based. */
2253
2254static void
96414f01 2255var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
5923a5e7 2256{
2257 tree decl = MEM_EXPR (loc);
eeb0ae23 2258 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
5923a5e7 2259
c573c1b5 2260 clobber_overlapping_mems (set, loc);
96414f01 2261 decl = var_debug_decl (decl);
2262 if (clobber)
9845d120 2263 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2264 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2265}
2266
72fdb379 2267/* Return true if LOC should not be expanded for location expressions,
2268 or used in them. */
2269
2270static inline bool
2271unsuitable_loc (rtx loc)
2272{
2273 switch (GET_CODE (loc))
2274 {
2275 case PC:
2276 case SCRATCH:
2277 case CC0:
2278 case ASM_INPUT:
2279 case ASM_OPERANDS:
2280 return true;
2281
2282 default:
2283 return false;
2284 }
2285}
2286
8081d3a6 2287/* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2288 bound to it. */
2289
2290static inline void
2291val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2292{
2293 if (REG_P (loc))
2294 {
2295 if (modified)
2296 var_regno_delete (set, REGNO (loc));
2297 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2298 dv_from_value (val), 0, NULL_RTX, INSERT);
2299 }
2300 else if (MEM_P (loc))
2301 {
2302 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2303
c573c1b5 2304 if (modified)
2305 clobber_overlapping_mems (set, loc);
2306
8081d3a6 2307 if (l && GET_CODE (l->loc) == VALUE)
2308 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2309
2310 /* If this MEM is a global constant, we don't need it in the
2311 dynamic tables. ??? We should test this before emitting the
2312 micro-op in the first place. */
2313 while (l)
2314 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2315 break;
2316 else
2317 l = l->next;
2318
2319 if (!l)
2320 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2321 dv_from_value (val), 0, NULL_RTX, INSERT);
2322 }
2323 else
2324 {
2325 /* Other kinds of equivalences are necessarily static, at least
2326 so long as we do not perform substitutions while merging
2327 expressions. */
2328 gcc_unreachable ();
2329 set_variable_part (set, loc, dv_from_value (val), 0,
2330 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2331 }
2332}
2333
bf262632 2334/* Bind a value to a location it was just stored in. If MODIFIED
2335 holds, assume the location was modified, detaching it from any
2336 values bound to it. */
9845d120 2337
2338static void
bf262632 2339val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
9845d120 2340{
2341 cselib_val *v = CSELIB_VAL_PTR (val);
2342
2343 gcc_assert (cselib_preserved_value_p (v));
2344
2345 if (dump_file)
2346 {
72fdb379 2347 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
9845d120 2348 print_inline_rtx (dump_file, loc, 0);
72fdb379 2349 fprintf (dump_file, " evaluates to ");
2350 print_inline_rtx (dump_file, val, 0);
9845d120 2351 if (v->locs)
2352 {
2353 struct elt_loc_list *l;
2354 for (l = v->locs; l; l = l->next)
2355 {
2356 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2357 print_inline_rtx (dump_file, l->loc, 0);
2358 }
2359 }
2360 fprintf (dump_file, "\n");
2361 }
2362
72fdb379 2363 gcc_checking_assert (!unsuitable_loc (loc));
2364
8081d3a6 2365 val_bind (set, val, loc, modified);
9845d120 2366}
2367
2368/* Reset this node, detaching all its equivalences. Return the slot
2369 in the variable hash table that holds dv, if there is one. */
2370
2371static void
2372val_reset (dataflow_set *set, decl_or_value dv)
2373{
2374 variable var = shared_hash_find (set->vars, dv) ;
2375 location_chain node;
2376 rtx cval;
2377
2378 if (!var || !var->n_var_parts)
2379 return;
2380
2381 gcc_assert (var->n_var_parts == 1);
2382
2383 cval = NULL;
2384 for (node = var->var_part[0].loc_chain; node; node = node->next)
2385 if (GET_CODE (node->loc) == VALUE
2386 && canon_value_cmp (node->loc, cval))
2387 cval = node->loc;
2388
2389 for (node = var->var_part[0].loc_chain; node; node = node->next)
2390 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2391 {
2392 /* Redirect the equivalence link to the new canonical
2393 value, or simply remove it if it would point at
2394 itself. */
2395 if (cval)
2396 set_variable_part (set, cval, dv_from_value (node->loc),
2397 0, node->init, node->set_src, NO_INSERT);
2398 delete_variable_part (set, dv_as_value (dv),
2399 dv_from_value (node->loc), 0);
2400 }
2401
2402 if (cval)
2403 {
2404 decl_or_value cdv = dv_from_value (cval);
2405
2406 /* Keep the remaining values connected, accummulating links
2407 in the canonical value. */
2408 for (node = var->var_part[0].loc_chain; node; node = node->next)
2409 {
2410 if (node->loc == cval)
2411 continue;
2412 else if (GET_CODE (node->loc) == REG)
2413 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2414 node->set_src, NO_INSERT);
2415 else if (GET_CODE (node->loc) == MEM)
2416 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2417 node->set_src, NO_INSERT);
2418 else
2419 set_variable_part (set, node->loc, cdv, 0,
2420 node->init, node->set_src, NO_INSERT);
2421 }
2422 }
2423
2424 /* We remove this last, to make sure that the canonical value is not
2425 removed to the point of requiring reinsertion. */
2426 if (cval)
2427 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2428
2429 clobber_variable_part (set, NULL, dv, 0, NULL);
9845d120 2430}
2431
2432/* Find the values in a given location and map the val to another
2433 value, if it is unique, or add the location as one holding the
2434 value. */
2435
2436static void
2437val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2438{
2439 decl_or_value dv = dv_from_value (val);
2440
2441 if (dump_file && (dump_flags & TDF_DETAILS))
2442 {
2443 if (insn)
2444 fprintf (dump_file, "%i: ", INSN_UID (insn));
2445 else
2446 fprintf (dump_file, "head: ");
2447 print_inline_rtx (dump_file, val, 0);
2448 fputs (" is at ", dump_file);
2449 print_inline_rtx (dump_file, loc, 0);
2450 fputc ('\n', dump_file);
2451 }
2452
2453 val_reset (set, dv);
2454
72fdb379 2455 gcc_checking_assert (!unsuitable_loc (loc));
2456
9845d120 2457 if (REG_P (loc))
2458 {
2459 attrs node, found = NULL;
2460
2461 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2462 if (dv_is_value_p (node->dv)
2463 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2464 {
2465 found = node;
2466
2467 /* Map incoming equivalences. ??? Wouldn't it be nice if
2468 we just started sharing the location lists? Maybe a
2469 circular list ending at the value itself or some
2470 such. */
2471 set_variable_part (set, dv_as_value (node->dv),
2472 dv_from_value (val), node->offset,
2473 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2474 set_variable_part (set, val, node->dv, node->offset,
2475 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2476 }
2477
2478 /* If we didn't find any equivalence, we need to remember that
2479 this value is held in the named register. */
8081d3a6 2480 if (found)
2481 return;
9845d120 2482 }
8081d3a6 2483 /* ??? Attempt to find and merge equivalent MEMs or other
2484 expressions too. */
2485
2486 val_bind (set, val, loc, false);
5923a5e7 2487}
2488
48e1416a 2489/* Initialize dataflow set SET to be empty.
5923a5e7 2490 VARS_SIZE is the initial size of hash table VARS. */
2491
2492static void
a8f6ad2b 2493dataflow_set_init (dataflow_set *set)
5923a5e7 2494{
2495 init_attrs_list_set (set->regs);
a8f6ad2b 2496 set->vars = shared_hash_copy (empty_shared_hash);
5923a5e7 2497 set->stack_adjust = 0;
9845d120 2498 set->traversed_vars = NULL;
5923a5e7 2499}
2500
2501/* Delete the contents of dataflow set SET. */
2502
2503static void
2504dataflow_set_clear (dataflow_set *set)
2505{
2506 int i;
2507
2508 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2509 attrs_list_clear (&set->regs[i]);
2510
a8f6ad2b 2511 shared_hash_destroy (set->vars);
2512 set->vars = shared_hash_copy (empty_shared_hash);
5923a5e7 2513}
2514
2515/* Copy the contents of dataflow set SRC to DST. */
2516
2517static void
2518dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2519{
2520 int i;
2521
2522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2523 attrs_list_copy (&dst->regs[i], src->regs[i]);
2524
a8f6ad2b 2525 shared_hash_destroy (dst->vars);
2526 dst->vars = shared_hash_copy (src->vars);
5923a5e7 2527 dst->stack_adjust = src->stack_adjust;
2528}
2529
2530/* Information for merging lists of locations for a given offset of variable.
2531 */
2532struct variable_union_info
2533{
2534 /* Node of the location chain. */
2535 location_chain lc;
2536
2537 /* The sum of positions in the input chains. */
2538 int pos;
2539
331cf53a 2540 /* The position in the chain of DST dataflow set. */
5923a5e7 2541 int pos_dst;
2542};
2543
331cf53a 2544/* Buffer for location list sorting and its allocated size. */
2545static struct variable_union_info *vui_vec;
2546static int vui_allocated;
2547
5923a5e7 2548/* Compare function for qsort, order the structures by POS element. */
2549
2550static int
2551variable_union_info_cmp_pos (const void *n1, const void *n2)
2552{
45ba1503 2553 const struct variable_union_info *const i1 =
2554 (const struct variable_union_info *) n1;
2555 const struct variable_union_info *const i2 =
2556 ( const struct variable_union_info *) n2;
5923a5e7 2557
2558 if (i1->pos != i2->pos)
2559 return i1->pos - i2->pos;
48e1416a 2560
5923a5e7 2561 return (i1->pos_dst - i2->pos_dst);
2562}
2563
2564/* Compute union of location parts of variable *SLOT and the same variable
2565 from hash table DATA. Compute "sorted" union of the location chains
2566 for common offsets, i.e. the locations of a variable part are sorted by
2567 a priority where the priority is the sum of the positions in the 2 chains
2568 (if a location is only in one list the position in the second list is
2569 defined to be larger than the length of the chains).
2570 When we are updating the location parts the newest location is in the
2571 beginning of the chain, so when we do the described "sorted" union
2572 we keep the newest locations in the beginning. */
2573
2574static int
a868678a 2575variable_union (variable src, dataflow_set *set)
5923a5e7 2576{
a868678a 2577 variable dst;
a8f6ad2b 2578 void **dstp;
5923a5e7 2579 int i, j, k;
2580
9845d120 2581 dstp = shared_hash_find_slot (set->vars, src->dv);
a8f6ad2b 2582 if (!dstp || !*dstp)
5923a5e7 2583 {
85bbdb3f 2584 src->refcount++;
2585
9845d120 2586 dst_can_be_shared = false;
2587 if (!dstp)
2588 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2589
2590 *dstp = src;
2591
85bbdb3f 2592 /* Continue traversing the hash table. */
2593 return 1;
5923a5e7 2594 }
2595 else
a8f6ad2b 2596 dst = (variable) *dstp;
5923a5e7 2597
22167fd5 2598 gcc_assert (src->n_var_parts);
72fdb379 2599 gcc_checking_assert (src->onepart == dst->onepart);
5923a5e7 2600
9845d120 2601 /* We can combine one-part variables very efficiently, because their
2602 entries are in canonical order. */
72fdb379 2603 if (src->onepart)
9845d120 2604 {
2605 location_chain *nodep, dnode, snode;
2606
a868678a 2607 gcc_assert (src->n_var_parts == 1
2608 && dst->n_var_parts == 1);
9845d120 2609
2610 snode = src->var_part[0].loc_chain;
2611 gcc_assert (snode);
2612
2613 restart_onepart_unshared:
2614 nodep = &dst->var_part[0].loc_chain;
2615 dnode = *nodep;
2616 gcc_assert (dnode);
2617
2618 while (snode)
2619 {
2620 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2621
2622 if (r > 0)
2623 {
2624 location_chain nnode;
2625
bc95df68 2626 if (shared_var_p (dst, set->vars))
9845d120 2627 {
2628 dstp = unshare_variable (set, dstp, dst,
2629 VAR_INIT_STATUS_INITIALIZED);
2630 dst = (variable)*dstp;
2631 goto restart_onepart_unshared;
2632 }
2633
2634 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2635 nnode->loc = snode->loc;
2636 nnode->init = snode->init;
2637 if (!snode->set_src || MEM_P (snode->set_src))
2638 nnode->set_src = NULL;
2639 else
2640 nnode->set_src = snode->set_src;
2641 nnode->next = dnode;
2642 dnode = nnode;
2643 }
9845d120 2644 else if (r == 0)
1b4345f7 2645 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
9845d120 2646
2647 if (r >= 0)
2648 snode = snode->next;
2649
2650 nodep = &dnode->next;
2651 dnode = *nodep;
2652 }
2653
9845d120 2654 return 1;
2655 }
2656
72fdb379 2657 gcc_checking_assert (!src->onepart);
2658
5923a5e7 2659 /* Count the number of location parts, result is K. */
2660 for (i = 0, j = 0, k = 0;
2661 i < src->n_var_parts && j < dst->n_var_parts; k++)
2662 {
72fdb379 2663 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
5923a5e7 2664 {
2665 i++;
2666 j++;
2667 }
72fdb379 2668 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
5923a5e7 2669 i++;
2670 else
2671 j++;
2672 }
85bbdb3f 2673 k += src->n_var_parts - i;
2674 k += dst->n_var_parts - j;
22167fd5 2675
5923a5e7 2676 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2677 thus there are at most MAX_VAR_PARTS different offsets. */
72fdb379 2678 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
5923a5e7 2679
bc95df68 2680 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
9845d120 2681 {
2682 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2683 dst = (variable)*dstp;
2684 }
2685
5923a5e7 2686 i = src->n_var_parts - 1;
2687 j = dst->n_var_parts - 1;
2688 dst->n_var_parts = k;
2689
2690 for (k--; k >= 0; k--)
2691 {
85bbdb3f 2692 location_chain node, node2;
5923a5e7 2693
2694 if (i >= 0 && j >= 0
72fdb379 2695 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
5923a5e7 2696 {
2697 /* Compute the "sorted" union of the chains, i.e. the locations which
2698 are in both chains go first, they are sorted by the sum of
2699 positions in the chains. */
2700 int dst_l, src_l;
2701 int ii, jj, n;
2702 struct variable_union_info *vui;
85bbdb3f 2703
2704 /* If DST is shared compare the location chains.
2705 If they are different we will modify the chain in DST with
2706 high probability so make a copy of DST. */
bc95df68 2707 if (shared_var_p (dst, set->vars))
85bbdb3f 2708 {
2709 for (node = src->var_part[i].loc_chain,
2710 node2 = dst->var_part[j].loc_chain; node && node2;
2711 node = node->next, node2 = node2->next)
2712 {
8ad4c111 2713 if (!((REG_P (node2->loc)
2714 && REG_P (node->loc)
85bbdb3f 2715 && REGNO (node2->loc) == REGNO (node->loc))
2716 || rtx_equal_p (node2->loc, node->loc)))
4236f54f 2717 {
2718 if (node2->init < node->init)
2719 node2->init = node->init;
2720 break;
2721 }
85bbdb3f 2722 }
2723 if (node || node2)
9845d120 2724 {
2725 dstp = unshare_variable (set, dstp, dst,
2726 VAR_INIT_STATUS_UNKNOWN);
2727 dst = (variable)*dstp;
2728 }
85bbdb3f 2729 }
2730
5923a5e7 2731 src_l = 0;
2732 for (node = src->var_part[i].loc_chain; node; node = node->next)
2733 src_l++;
2734 dst_l = 0;
2735 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2736 dst_l++;
5923a5e7 2737
331cf53a 2738 if (dst_l == 1)
5923a5e7 2739 {
331cf53a 2740 /* The most common case, much simpler, no qsort is needed. */
2741 location_chain dstnode = dst->var_part[j].loc_chain;
2742 dst->var_part[k].loc_chain = dstnode;
72fdb379 2743 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
331cf53a 2744 node2 = dstnode;
2745 for (node = src->var_part[i].loc_chain; node; node = node->next)
2746 if (!((REG_P (dstnode->loc)
2747 && REG_P (node->loc)
2748 && REGNO (dstnode->loc) == REGNO (node->loc))
2749 || rtx_equal_p (dstnode->loc, node->loc)))
2750 {
2751 location_chain new_node;
2752
2753 /* Copy the location from SRC. */
2754 new_node = (location_chain) pool_alloc (loc_chain_pool);
2755 new_node->loc = node->loc;
2756 new_node->init = node->init;
2757 if (!node->set_src || MEM_P (node->set_src))
2758 new_node->set_src = NULL;
2759 else
2760 new_node->set_src = node->set_src;
2761 node2->next = new_node;
2762 node2 = new_node;
2763 }
2764 node2->next = NULL;
5923a5e7 2765 }
331cf53a 2766 else
5923a5e7 2767 {
331cf53a 2768 if (src_l + dst_l > vui_allocated)
5923a5e7 2769 {
331cf53a 2770 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2771 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2772 vui_allocated);
2773 }
2774 vui = vui_vec;
2775
2776 /* Fill in the locations from DST. */
2777 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2778 node = node->next, jj++)
2779 {
2780 vui[jj].lc = node;
2781 vui[jj].pos_dst = jj;
2782
2783 /* Pos plus value larger than a sum of 2 valid positions. */
2784 vui[jj].pos = jj + src_l + dst_l;
2785 }
2786
2787 /* Fill in the locations from SRC. */
2788 n = dst_l;
2789 for (node = src->var_part[i].loc_chain, ii = 0; node;
2790 node = node->next, ii++)
2791 {
2792 /* Find location from NODE. */
2793 for (jj = 0; jj < dst_l; jj++)
5923a5e7 2794 {
331cf53a 2795 if ((REG_P (vui[jj].lc->loc)
2796 && REG_P (node->loc)
2797 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2798 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2799 {
2800 vui[jj].pos = jj + ii;
2801 break;
2802 }
2803 }
2804 if (jj >= dst_l) /* The location has not been found. */
2805 {
2806 location_chain new_node;
2807
2808 /* Copy the location from SRC. */
2809 new_node = (location_chain) pool_alloc (loc_chain_pool);
2810 new_node->loc = node->loc;
2811 new_node->init = node->init;
2812 if (!node->set_src || MEM_P (node->set_src))
2813 new_node->set_src = NULL;
2814 else
2815 new_node->set_src = node->set_src;
2816 vui[n].lc = new_node;
2817 vui[n].pos_dst = src_l + dst_l;
2818 vui[n].pos = ii + src_l + dst_l;
2819 n++;
5923a5e7 2820 }
2821 }
331cf53a 2822
2823 if (dst_l == 2)
5923a5e7 2824 {
331cf53a 2825 /* Special case still very common case. For dst_l == 2
2826 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2827 vui[i].pos == i + src_l + dst_l. */
2828 if (vui[0].pos > vui[1].pos)
2829 {
2830 /* Order should be 1, 0, 2... */
2831 dst->var_part[k].loc_chain = vui[1].lc;
2832 vui[1].lc->next = vui[0].lc;
2833 if (n >= 3)
2834 {
2835 vui[0].lc->next = vui[2].lc;
2836 vui[n - 1].lc->next = NULL;
2837 }
2838 else
2839 vui[0].lc->next = NULL;
2840 ii = 3;
2841 }
d53bb226 2842 else
331cf53a 2843 {
2844 dst->var_part[k].loc_chain = vui[0].lc;
2845 if (n >= 3 && vui[2].pos < vui[1].pos)
2846 {
2847 /* Order should be 0, 2, 1, 3... */
2848 vui[0].lc->next = vui[2].lc;
2849 vui[2].lc->next = vui[1].lc;
2850 if (n >= 4)
2851 {
2852 vui[1].lc->next = vui[3].lc;
2853 vui[n - 1].lc->next = NULL;
2854 }
2855 else
2856 vui[1].lc->next = NULL;
2857 ii = 4;
2858 }
2859 else
2860 {
2861 /* Order should be 0, 1, 2... */
2862 ii = 1;
2863 vui[n - 1].lc->next = NULL;
2864 }
2865 }
2866 for (; ii < n; ii++)
2867 vui[ii - 1].lc->next = vui[ii].lc;
2868 }
2869 else
2870 {
2871 qsort (vui, n, sizeof (struct variable_union_info),
2872 variable_union_info_cmp_pos);
2873
2874 /* Reconnect the nodes in sorted order. */
2875 for (ii = 1; ii < n; ii++)
2876 vui[ii - 1].lc->next = vui[ii].lc;
2877 vui[n - 1].lc->next = NULL;
2878 dst->var_part[k].loc_chain = vui[0].lc;
5923a5e7 2879 }
5923a5e7 2880
72fdb379 2881 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
331cf53a 2882 }
5923a5e7 2883 i--;
2884 j--;
2885 }
2886 else if ((i >= 0 && j >= 0
72fdb379 2887 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
5923a5e7 2888 || i < 0)
2889 {
2890 dst->var_part[k] = dst->var_part[j];
2891 j--;
2892 }
2893 else if ((i >= 0 && j >= 0
72fdb379 2894 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
5923a5e7 2895 || j < 0)
2896 {
e0473d22 2897 location_chain *nextp;
5923a5e7 2898
2899 /* Copy the chain from SRC. */
e0473d22 2900 nextp = &dst->var_part[k].loc_chain;
5923a5e7 2901 for (node = src->var_part[i].loc_chain; node; node = node->next)
2902 {
2903 location_chain new_lc;
2904
45ba1503 2905 new_lc = (location_chain) pool_alloc (loc_chain_pool);
5923a5e7 2906 new_lc->next = NULL;
d53bb226 2907 new_lc->init = node->init;
2908 if (!node->set_src || MEM_P (node->set_src))
2909 new_lc->set_src = NULL;
2910 else
2911 new_lc->set_src = node->set_src;
5923a5e7 2912 new_lc->loc = node->loc;
2913
e0473d22 2914 *nextp = new_lc;
2915 nextp = &new_lc->next;
5923a5e7 2916 }
2917
72fdb379 2918 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
5923a5e7 2919 i--;
2920 }
bc95df68 2921 dst->var_part[k].cur_loc = NULL;
5923a5e7 2922 }
2923
331cf53a 2924 if (flag_var_tracking_uninit)
2925 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2926 {
2927 location_chain node, node2;
2928 for (node = src->var_part[i].loc_chain; node; node = node->next)
2929 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2930 if (rtx_equal_p (node->loc, node2->loc))
2931 {
2932 if (node->init > node2->init)
2933 node2->init = node->init;
2934 }
2935 }
d53bb226 2936
5923a5e7 2937 /* Continue traversing the hash table. */
2938 return 1;
2939}
2940
2941/* Compute union of dataflow sets SRC and DST and store it to DST. */
2942
2943static void
2944dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2945{
2946 int i;
2947
2948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2949 attrs_list_union (&dst->regs[i], src->regs[i]);
2950
a8f6ad2b 2951 if (dst->vars == empty_shared_hash)
2952 {
2953 shared_hash_destroy (dst->vars);
2954 dst->vars = shared_hash_copy (src->vars);
a8f6ad2b 2955 }
2956 else
a868678a 2957 {
2958 htab_iterator hi;
2959 variable var;
2960
2961 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2962 variable_union (var, dst);
2963 }
5923a5e7 2964}
2965
9845d120 2966/* Whether the value is currently being expanded. */
2967#define VALUE_RECURSED_INTO(x) \
688ff29b 2968 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
72fdb379 2969
2970/* Whether no expansion was found, saving useless lookups.
2971 It must only be set when VALUE_CHANGED is clear. */
2972#define NO_LOC_P(x) \
2973 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
2974
2975/* Whether cur_loc in the value needs to be (re)computed. */
9845d120 2976#define VALUE_CHANGED(x) \
2977 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
72fdb379 2978/* Whether cur_loc in the decl needs to be (re)computed. */
9845d120 2979#define DECL_CHANGED(x) TREE_VISITED (x)
5923a5e7 2980
72fdb379 2981/* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
2982 user DECLs, this means they're in changed_variables. Values and
2983 debug exprs may be left with this flag set if no user variable
2984 requires them to be evaluated. */
5923a5e7 2985
9845d120 2986static inline void
2987set_dv_changed (decl_or_value dv, bool newv)
2988{
72fdb379 2989 switch (dv_onepart_p (dv))
2990 {
2991 case ONEPART_VALUE:
2992 if (newv)
2993 NO_LOC_P (dv_as_value (dv)) = false;
2994 VALUE_CHANGED (dv_as_value (dv)) = newv;
2995 break;
2996
2997 case ONEPART_DEXPR:
2998 if (newv)
2999 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3000 /* Fall through... */
3001
3002 default:
3003 DECL_CHANGED (dv_as_decl (dv)) = newv;
3004 break;
3005 }
5923a5e7 3006}
3007
72fdb379 3008/* Return true if DV needs to have its cur_loc recomputed. */
5923a5e7 3009
9845d120 3010static inline bool
3011dv_changed_p (decl_or_value dv)
5923a5e7 3012{
9845d120 3013 return (dv_is_value_p (dv)
3014 ? VALUE_CHANGED (dv_as_value (dv))
3015 : DECL_CHANGED (dv_as_decl (dv)));
5923a5e7 3016}
3017
493a60c8 3018/* Return a location list node whose loc is rtx_equal to LOC, in the
9845d120 3019 location list of a one-part variable or value VAR, or in that of
ec87b3ce 3020 any values recursively mentioned in the location lists. VARS must
3021 be in star-canonical form. */
5923a5e7 3022
9845d120 3023static location_chain
493a60c8 3024find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
5923a5e7 3025{
9845d120 3026 location_chain node;
4b32df91 3027 enum rtx_code loc_code;
5923a5e7 3028
9845d120 3029 if (!var)
ec87b3ce 3030 return NULL;
5923a5e7 3031
72fdb379 3032 gcc_checking_assert (var->onepart);
5923a5e7 3033
9845d120 3034 if (!var->n_var_parts)
ec87b3ce 3035 return NULL;
5923a5e7 3036
1b4345f7 3037 gcc_checking_assert (loc != dv_as_opaque (var->dv));
5923a5e7 3038
4b32df91 3039 loc_code = GET_CODE (loc);
9845d120 3040 for (node = var->var_part[0].loc_chain; node; node = node->next)
4b32df91 3041 {
ec87b3ce 3042 decl_or_value dv;
3043 variable rvar;
3044
4b32df91 3045 if (GET_CODE (node->loc) != loc_code)
3046 {
3047 if (GET_CODE (node->loc) != VALUE)
3048 continue;
3049 }
3050 else if (loc == node->loc)
ec87b3ce 3051 return node;
4b32df91 3052 else if (loc_code != VALUE)
3053 {
3054 if (rtx_equal_p (loc, node->loc))
ec87b3ce 3055 return node;
4b32df91 3056 continue;
3057 }
9845d120 3058
ec87b3ce 3059 /* Since we're in star-canonical form, we don't need to visit
3060 non-canonical nodes: one-part variables and non-canonical
3061 values would only point back to the canonical node. */
3062 if (dv_is_value_p (var->dv)
3063 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3064 {
3065 /* Skip all subsequent VALUEs. */
3066 while (node->next && GET_CODE (node->next->loc) == VALUE)
4b32df91 3067 {
ec87b3ce 3068 node = node->next;
1b4345f7 3069 gcc_checking_assert (!canon_value_cmp (node->loc,
3070 dv_as_value (var->dv)));
ec87b3ce 3071 if (loc == node->loc)
3072 return node;
4b32df91 3073 }
ec87b3ce 3074 continue;
4b32df91 3075 }
5923a5e7 3076
1b4345f7 3077 gcc_checking_assert (node == var->var_part[0].loc_chain);
3078 gcc_checking_assert (!node->next);
ec87b3ce 3079
3080 dv = dv_from_value (node->loc);
3081 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3082 return find_loc_in_1pdv (loc, rvar, vars);
685ead0b 3083 }
3084
72fdb379 3085 /* ??? Gotta look in cselib_val locations too. */
3086
ec87b3ce 3087 return NULL;
9845d120 3088}
5923a5e7 3089
9845d120 3090/* Hash table iteration argument passed to variable_merge. */
3091struct dfset_merge
5923a5e7 3092{
9845d120 3093 /* The set in which the merge is to be inserted. */
3094 dataflow_set *dst;
3095 /* The set that we're iterating in. */
3096 dataflow_set *cur;
3097 /* The set that may contain the other dv we are to merge with. */
3098 dataflow_set *src;
3099 /* Number of onepart dvs in src. */
3100 int src_onepart_cnt;
3101};
5923a5e7 3102
9845d120 3103/* Insert LOC in *DNODE, if it's not there yet. The list must be in
3104 loc_cmp order, and it is maintained as such. */
5923a5e7 3105
3106static void
9845d120 3107insert_into_intersection (location_chain *nodep, rtx loc,
3108 enum var_init_status status)
5923a5e7 3109{
9845d120 3110 location_chain node;
3111 int r;
5923a5e7 3112
9845d120 3113 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3114 if ((r = loc_cmp (node->loc, loc)) == 0)
3115 {
3116 node->init = MIN (node->init, status);
3117 return;
3118 }
3119 else if (r > 0)
3120 break;
5923a5e7 3121
9845d120 3122 node = (location_chain) pool_alloc (loc_chain_pool);
3123
3124 node->loc = loc;
3125 node->set_src = NULL;
3126 node->init = status;
3127 node->next = *nodep;
3128 *nodep = node;
5923a5e7 3129}
3130
72fdb379 3131/* Insert in DEST the intersection of the locations present in both
9845d120 3132 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3133 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3134 DSM->dst. */
5923a5e7 3135
9845d120 3136static void
3137intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3138 location_chain s1node, variable s2var)
5923a5e7 3139{
9845d120 3140 dataflow_set *s1set = dsm->cur;
3141 dataflow_set *s2set = dsm->src;
3142 location_chain found;
5923a5e7 3143
a8c57873 3144 if (s2var)
3145 {
3146 location_chain s2node;
3147
72fdb379 3148 gcc_checking_assert (s2var->onepart);
a8c57873 3149
3150 if (s2var->n_var_parts)
3151 {
a8c57873 3152 s2node = s2var->var_part[0].loc_chain;
3153
3154 for (; s1node && s2node;
3155 s1node = s1node->next, s2node = s2node->next)
3156 if (s1node->loc != s2node->loc)
3157 break;
3158 else if (s1node->loc == val)
3159 continue;
3160 else
3161 insert_into_intersection (dest, s1node->loc,
3162 MIN (s1node->init, s2node->init));
3163 }
3164 }
3165
9845d120 3166 for (; s1node; s1node = s1node->next)
5923a5e7 3167 {
9845d120 3168 if (s1node->loc == val)
3169 continue;
3170
3171 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3172 shared_hash_htab (s2set->vars))))
5923a5e7 3173 {
9845d120 3174 insert_into_intersection (dest, s1node->loc,
3175 MIN (s1node->init, found->init));
3176 continue;
5923a5e7 3177 }
9845d120 3178
3179 if (GET_CODE (s1node->loc) == VALUE
3180 && !VALUE_RECURSED_INTO (s1node->loc))
5923a5e7 3181 {
9845d120 3182 decl_or_value dv = dv_from_value (s1node->loc);
3183 variable svar = shared_hash_find (s1set->vars, dv);
3184 if (svar)
3185 {
3186 if (svar->n_var_parts == 1)
3187 {
3188 VALUE_RECURSED_INTO (s1node->loc) = true;
3189 intersect_loc_chains (val, dest, dsm,
3190 svar->var_part[0].loc_chain,
3191 s2var);
3192 VALUE_RECURSED_INTO (s1node->loc) = false;
3193 }
3194 }
5923a5e7 3195 }
5923a5e7 3196
72fdb379 3197 /* ??? gotta look in cselib_val locations too. */
3198
9845d120 3199 /* ??? if the location is equivalent to any location in src,
3200 searched recursively
5923a5e7 3201
9845d120 3202 add to dst the values needed to represent the equivalence
5923a5e7 3203
9845d120 3204 telling whether locations S is equivalent to another dv's
3205 location list:
5923a5e7 3206
9845d120 3207 for each location D in the list
5923a5e7 3208
9845d120 3209 if S and D satisfy rtx_equal_p, then it is present
5923a5e7 3210
9845d120 3211 else if D is a value, recurse without cycles
bbc7bce1 3212
9845d120 3213 else if S and D have the same CODE and MODE
b77c03bd 3214
9845d120 3215 for each operand oS and the corresponding oD
5923a5e7 3216
9845d120 3217 if oS and oD are not equivalent, then S an D are not equivalent
5923a5e7 3218
9845d120 3219 else if they are RTX vectors
5923a5e7 3220
9845d120 3221 if any vector oS element is not equivalent to its respective oD,
3222 then S and D are not equivalent
5923a5e7 3223
9845d120 3224 */
3225
3226
3227 }
5923a5e7 3228}
3229
9845d120 3230/* Return -1 if X should be before Y in a location list for a 1-part
3231 variable, 1 if Y should be before X, and 0 if they're equivalent
3232 and should not appear in the list. */
96414f01 3233
9845d120 3234static int
3235loc_cmp (rtx x, rtx y)
96414f01 3236{
9845d120 3237 int i, j, r;
3238 RTX_CODE code = GET_CODE (x);
3239 const char *fmt;
96414f01 3240
9845d120 3241 if (x == y)
3242 return 0;
96414f01 3243
9845d120 3244 if (REG_P (x))
96414f01 3245 {
9845d120 3246 if (!REG_P (y))
3247 return -1;
3248 gcc_assert (GET_MODE (x) == GET_MODE (y));
3249 if (REGNO (x) == REGNO (y))
3250 return 0;
3251 else if (REGNO (x) < REGNO (y))
3252 return -1;
3253 else
3254 return 1;
96414f01 3255 }
9845d120 3256
3257 if (REG_P (y))
3258 return 1;
3259
3260 if (MEM_P (x))
96414f01 3261 {
9845d120 3262 if (!MEM_P (y))
3263 return -1;
3264 gcc_assert (GET_MODE (x) == GET_MODE (y));
3265 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
96414f01 3266 }
96414f01 3267
9845d120 3268 if (MEM_P (y))
3269 return 1;
96414f01 3270
9845d120 3271 if (GET_CODE (x) == VALUE)
3272 {
3273 if (GET_CODE (y) != VALUE)
3274 return -1;
a243dd11 3275 /* Don't assert the modes are the same, that is true only
3276 when not recursing. (subreg:QI (value:SI 1:1) 0)
3277 and (subreg:QI (value:DI 2:2) 0) can be compared,
3278 even when the modes are different. */
9845d120 3279 if (canon_value_cmp (x, y))
3280 return -1;
3281 else
3282 return 1;
3283 }
96414f01 3284
9845d120 3285 if (GET_CODE (y) == VALUE)
3286 return 1;
96414f01 3287
72fdb379 3288 /* Entry value is the least preferable kind of expression. */
3289 if (GET_CODE (x) == ENTRY_VALUE)
3290 {
3291 if (GET_CODE (y) != ENTRY_VALUE)
3292 return 1;
3293 gcc_assert (GET_MODE (x) == GET_MODE (y));
f14f4d03 3294 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
72fdb379 3295 }
3296
3297 if (GET_CODE (y) == ENTRY_VALUE)
3298 return -1;
3299
9845d120 3300 if (GET_CODE (x) == GET_CODE (y))
3301 /* Compare operands below. */;
3302 else if (GET_CODE (x) < GET_CODE (y))
3303 return -1;
3304 else
3305 return 1;
3306
3307 gcc_assert (GET_MODE (x) == GET_MODE (y));
3308
bc95df68 3309 if (GET_CODE (x) == DEBUG_EXPR)
3310 {
3311 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3312 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3313 return -1;
1b4345f7 3314 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3315 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
bc95df68 3316 return 1;
3317 }
3318
9845d120 3319 fmt = GET_RTX_FORMAT (code);
3320 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3321 switch (fmt[i])
3322 {
3323 case 'w':
3324 if (XWINT (x, i) == XWINT (y, i))
3325 break;
3326 else if (XWINT (x, i) < XWINT (y, i))
3327 return -1;
3328 else
3329 return 1;
3330
3331 case 'n':
3332 case 'i':
3333 if (XINT (x, i) == XINT (y, i))
3334 break;
3335 else if (XINT (x, i) < XINT (y, i))
3336 return -1;
3337 else
3338 return 1;
3339
3340 case 'V':
3341 case 'E':
3342 /* Compare the vector length first. */
3343 if (XVECLEN (x, i) == XVECLEN (y, i))
3344 /* Compare the vectors elements. */;
3345 else if (XVECLEN (x, i) < XVECLEN (y, i))
3346 return -1;
3347 else
3348 return 1;
3349
3350 for (j = 0; j < XVECLEN (x, i); j++)
3351 if ((r = loc_cmp (XVECEXP (x, i, j),
3352 XVECEXP (y, i, j))))
3353 return r;
3354 break;
3355
3356 case 'e':
3357 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3358 return r;
3359 break;
3360
3361 case 'S':
3362 case 's':
3363 if (XSTR (x, i) == XSTR (y, i))
3364 break;
3365 if (!XSTR (x, i))
3366 return -1;
3367 if (!XSTR (y, i))
3368 return 1;
3369 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3370 break;
3371 else if (r < 0)
3372 return -1;
3373 else
3374 return 1;
3375
3376 case 'u':
3377 /* These are just backpointers, so they don't matter. */
3378 break;
3379
3380 case '0':
3381 case 't':
3382 break;
3383
3384 /* It is believed that rtx's at this level will never
3385 contain anything but integers and other rtx's,
3386 except for within LABEL_REFs and SYMBOL_REFs. */
3387 default:
3388 gcc_unreachable ();
3389 }
3390
3391 return 0;
3392}
3393
ace62c8c 3394#if ENABLE_CHECKING
9845d120 3395/* Check the order of entries in one-part variables. */
3396
3397static int
3398canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3399{
3400 variable var = (variable) *slot;
9845d120 3401 location_chain node, next;
3402
bc95df68 3403#ifdef ENABLE_RTL_CHECKING
3404 int i;
3405 for (i = 0; i < var->n_var_parts; i++)
3406 gcc_assert (var->var_part[0].cur_loc == NULL);
72fdb379 3407 gcc_assert (!var->in_changed_variables);
bc95df68 3408#endif
3409
72fdb379 3410 if (!var->onepart)
9845d120 3411 return 1;
3412
3413 gcc_assert (var->n_var_parts == 1);
3414 node = var->var_part[0].loc_chain;
3415 gcc_assert (node);
3416
3417 while ((next = node->next))
3418 {
3419 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3420 node = next;
3421 }
3422
3423 return 1;
3424}
3425#endif
3426
3427/* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3428 more likely to be chosen as canonical for an equivalence set.
3429 Ensure less likely values can reach more likely neighbors, making
3430 the connections bidirectional. */
3431
3432static int
3433canonicalize_values_mark (void **slot, void *data)
3434{
3435 dataflow_set *set = (dataflow_set *)data;
3436 variable var = (variable) *slot;
3437 decl_or_value dv = var->dv;
3438 rtx val;
3439 location_chain node;
3440
3441 if (!dv_is_value_p (dv))
3442 return 1;
3443
0ea2d350 3444 gcc_checking_assert (var->n_var_parts == 1);
9845d120 3445
3446 val = dv_as_value (dv);
3447
3448 for (node = var->var_part[0].loc_chain; node; node = node->next)
3449 if (GET_CODE (node->loc) == VALUE)
3450 {
3451 if (canon_value_cmp (node->loc, val))
3452 VALUE_RECURSED_INTO (val) = true;
3453 else
3454 {
3455 decl_or_value odv = dv_from_value (node->loc);
3456 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3457
8f9c21d9 3458 set_slot_part (set, val, oslot, odv, 0,
3459 node->init, NULL_RTX);
9845d120 3460
3461 VALUE_RECURSED_INTO (node->loc) = true;
3462 }
3463 }
3464
3465 return 1;
3466}
3467
3468/* Remove redundant entries from equivalence lists in onepart
3469 variables, canonicalizing equivalence sets into star shapes. */
3470
3471static int
3472canonicalize_values_star (void **slot, void *data)
3473{
3474 dataflow_set *set = (dataflow_set *)data;
3475 variable var = (variable) *slot;
3476 decl_or_value dv = var->dv;
3477 location_chain node;
3478 decl_or_value cdv;
3479 rtx val, cval;
3480 void **cslot;
3481 bool has_value;
3482 bool has_marks;
3483
72fdb379 3484 if (!var->onepart)
9845d120 3485 return 1;
3486
0ea2d350 3487 gcc_checking_assert (var->n_var_parts == 1);
9845d120 3488
3489 if (dv_is_value_p (dv))
3490 {
3491 cval = dv_as_value (dv);
3492 if (!VALUE_RECURSED_INTO (cval))
3493 return 1;
3494 VALUE_RECURSED_INTO (cval) = false;
3495 }
3496 else
3497 cval = NULL_RTX;
3498
3499 restart:
3500 val = cval;
3501 has_value = false;
3502 has_marks = false;
3503
3504 gcc_assert (var->n_var_parts == 1);
3505
3506 for (node = var->var_part[0].loc_chain; node; node = node->next)
3507 if (GET_CODE (node->loc) == VALUE)
3508 {
3509 has_value = true;
3510 if (VALUE_RECURSED_INTO (node->loc))
3511 has_marks = true;
3512 if (canon_value_cmp (node->loc, cval))
3513 cval = node->loc;
3514 }
3515
3516 if (!has_value)
3517 return 1;
3518
3519 if (cval == val)
3520 {
3521 if (!has_marks || dv_is_decl_p (dv))
3522 return 1;
3523
3524 /* Keep it marked so that we revisit it, either after visiting a
3525 child node, or after visiting a new parent that might be
3526 found out. */
3527 VALUE_RECURSED_INTO (val) = true;
3528
3529 for (node = var->var_part[0].loc_chain; node; node = node->next)
3530 if (GET_CODE (node->loc) == VALUE
3531 && VALUE_RECURSED_INTO (node->loc))
3532 {
3533 cval = node->loc;
3534 restart_with_cval:
3535 VALUE_RECURSED_INTO (cval) = false;
3536 dv = dv_from_value (cval);
3537 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3538 if (!slot)
3539 {
3540 gcc_assert (dv_is_decl_p (var->dv));
3541 /* The canonical value was reset and dropped.
3542 Remove it. */
3543 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3544 return 1;
3545 }
3546 var = (variable)*slot;
3547 gcc_assert (dv_is_value_p (var->dv));
3548 if (var->n_var_parts == 0)
3549 return 1;
3550 gcc_assert (var->n_var_parts == 1);
3551 goto restart;
3552 }
3553
3554 VALUE_RECURSED_INTO (val) = false;
3555
3556 return 1;
3557 }
3558
3559 /* Push values to the canonical one. */
3560 cdv = dv_from_value (cval);
3561 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3562
3563 for (node = var->var_part[0].loc_chain; node; node = node->next)
3564 if (node->loc != cval)
3565 {
3566 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3567 node->init, NULL_RTX);
3568 if (GET_CODE (node->loc) == VALUE)
3569 {
3570 decl_or_value ndv = dv_from_value (node->loc);
3571
3572 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3573 NO_INSERT);
3574
3575 if (canon_value_cmp (node->loc, val))
3576 {
3577 /* If it could have been a local minimum, it's not any more,
3578 since it's now neighbor to cval, so it may have to push
3579 to it. Conversely, if it wouldn't have prevailed over
3580 val, then whatever mark it has is fine: if it was to
3581 push, it will now push to a more canonical node, but if
3582 it wasn't, then it has already pushed any values it might
3583 have to. */
3584 VALUE_RECURSED_INTO (node->loc) = true;
3585 /* Make sure we visit node->loc by ensuring we cval is
3586 visited too. */
3587 VALUE_RECURSED_INTO (cval) = true;
3588 }
3589 else if (!VALUE_RECURSED_INTO (node->loc))
3590 /* If we have no need to "recurse" into this node, it's
3591 already "canonicalized", so drop the link to the old
3592 parent. */
3593 clobber_variable_part (set, cval, ndv, 0, NULL);
3594 }
3595 else if (GET_CODE (node->loc) == REG)
3596 {
3597 attrs list = set->regs[REGNO (node->loc)], *listp;
3598
3599 /* Change an existing attribute referring to dv so that it
3600 refers to cdv, removing any duplicate this might
3601 introduce, and checking that no previous duplicates
3602 existed, all in a single pass. */
3603
3604 while (list)
3605 {
3606 if (list->offset == 0
3607 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3608 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3609 break;
3610
3611 list = list->next;
3612 }
3613
3614 gcc_assert (list);
3615 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3616 {
3617 list->dv = cdv;
3618 for (listp = &list->next; (list = *listp); listp = &list->next)
3619 {
3620 if (list->offset)
3621 continue;
3622
3623 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3624 {
3625 *listp = list->next;
3626 pool_free (attrs_pool, list);
3627 list = *listp;
3628 break;
3629 }
3630
3631 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3632 }
3633 }
3634 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3635 {
3636 for (listp = &list->next; (list = *listp); listp = &list->next)
3637 {
3638 if (list->offset)
3639 continue;
3640
3641 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3642 {
3643 *listp = list->next;
3644 pool_free (attrs_pool, list);
3645 list = *listp;
3646 break;
3647 }
3648
3649 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3650 }
3651 }
3652 else
3653 gcc_unreachable ();
3654
3655#if ENABLE_CHECKING
3656 while (list)
3657 {
3658 if (list->offset == 0
3659 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3660 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3661 gcc_unreachable ();
3662
3663 list = list->next;
3664 }
3665#endif
3666 }
3667 }
3668
3669 if (val)
8f9c21d9 3670 set_slot_part (set, val, cslot, cdv, 0,
3671 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
9845d120 3672
3673 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3674
3675 /* Variable may have been unshared. */
3676 var = (variable)*slot;
0ea2d350 3677 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3678 && var->var_part[0].loc_chain->next == NULL);
9845d120 3679
3680 if (VALUE_RECURSED_INTO (cval))
3681 goto restart_with_cval;
3682
3683 return 1;
3684}
3685
ed5ab8eb 3686/* Bind one-part variables to the canonical value in an equivalence
3687 set. Not doing this causes dataflow convergence failure in rare
3688 circumstances, see PR42873. Unfortunately we can't do this
3689 efficiently as part of canonicalize_values_star, since we may not
3690 have determined or even seen the canonical value of a set when we
3691 get to a variable that references another member of the set. */
3692
3693static int
3694canonicalize_vars_star (void **slot, void *data)
3695{
3696 dataflow_set *set = (dataflow_set *)data;
3697 variable var = (variable) *slot;
3698 decl_or_value dv = var->dv;
3699 location_chain node;
3700 rtx cval;
3701 decl_or_value cdv;
3702 void **cslot;
3703 variable cvar;
3704 location_chain cnode;
3705
72fdb379 3706 if (!var->onepart || var->onepart == ONEPART_VALUE)
ed5ab8eb 3707 return 1;
3708
3709 gcc_assert (var->n_var_parts == 1);
3710
3711 node = var->var_part[0].loc_chain;
3712
3713 if (GET_CODE (node->loc) != VALUE)
3714 return 1;
3715
3716 gcc_assert (!node->next);
3717 cval = node->loc;
3718
3719 /* Push values to the canonical one. */
3720 cdv = dv_from_value (cval);
3721 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3722 if (!cslot)
3723 return 1;
3724 cvar = (variable)*cslot;
3725 gcc_assert (cvar->n_var_parts == 1);
3726
3727 cnode = cvar->var_part[0].loc_chain;
3728
3729 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3730 that are not “more canonical” than it. */
3731 if (GET_CODE (cnode->loc) != VALUE
3732 || !canon_value_cmp (cnode->loc, cval))
3733 return 1;
3734
3735 /* CVAL was found to be non-canonical. Change the variable to point
3736 to the canonical VALUE. */
3737 gcc_assert (!cnode->next);
3738 cval = cnode->loc;
3739
3740 slot = set_slot_part (set, cval, slot, dv, 0,
3741 node->init, node->set_src);
8f9c21d9 3742 clobber_slot_part (set, cval, slot, 0, node->set_src);
ed5ab8eb 3743
3744 return 1;
3745}
3746
9845d120 3747/* Combine variable or value in *S1SLOT (in DSM->cur) with the
3748 corresponding entry in DSM->src. Multi-part variables are combined
3749 with variable_union, whereas onepart dvs are combined with
3750 intersection. */
3751
3752static int
a868678a 3753variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
9845d120 3754{
9845d120 3755 dataflow_set *dst = dsm->dst;
3756 void **dstslot;
9845d120 3757 variable s2var, dvar = NULL;
3758 decl_or_value dv = s1var->dv;
72fdb379 3759 onepart_enum_t onepart = s1var->onepart;
9845d120 3760 rtx val;
3761 hashval_t dvhash;
3762 location_chain node, *nodep;
3763
3764 /* If the incoming onepart variable has an empty location list, then
3765 the intersection will be just as empty. For other variables,
3766 it's always union. */
0ea2d350 3767 gcc_checking_assert (s1var->n_var_parts
3768 && s1var->var_part[0].loc_chain);
9845d120 3769
3770 if (!onepart)
a868678a 3771 return variable_union (s1var, dst);
9845d120 3772
72fdb379 3773 gcc_checking_assert (s1var->n_var_parts == 1);
9845d120 3774
3775 dvhash = dv_htab_hash (dv);
3776 if (dv_is_value_p (dv))
3777 val = dv_as_value (dv);
3778 else
3779 val = NULL;
3780
3781 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3782 if (!s2var)
3783 {
3784 dst_can_be_shared = false;
3785 return 1;
3786 }
3787
3788 dsm->src_onepart_cnt--;
a868678a 3789 gcc_assert (s2var->var_part[0].loc_chain
72fdb379 3790 && s2var->onepart == onepart
3791 && s2var->n_var_parts == 1);
9845d120 3792
3793 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3794 if (dstslot)
3795 {
3796 dvar = (variable)*dstslot;
a868678a 3797 gcc_assert (dvar->refcount == 1
72fdb379 3798 && dvar->onepart == onepart
3799 && dvar->n_var_parts == 1);
9845d120 3800 nodep = &dvar->var_part[0].loc_chain;
3801 }
3802 else
3803 {
3804 nodep = &node;
3805 node = NULL;
3806 }
3807
3808 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3809 {
3810 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3811 dvhash, INSERT);
3812 *dstslot = dvar = s2var;
3813 dvar->refcount++;
3814 }
3815 else
3816 {
3817 dst_can_be_shared = false;
3818
3819 intersect_loc_chains (val, nodep, dsm,
3820 s1var->var_part[0].loc_chain, s2var);
3821
3822 if (!dstslot)
3823 {
3824 if (node)
3825 {
72fdb379 3826 dvar = (variable) pool_alloc (onepart_pool (onepart));
9845d120 3827 dvar->dv = dv;
3828 dvar->refcount = 1;
3829 dvar->n_var_parts = 1;
72fdb379 3830 dvar->onepart = onepart;
bc95df68 3831 dvar->in_changed_variables = false;
9845d120 3832 dvar->var_part[0].loc_chain = node;
bc95df68 3833 dvar->var_part[0].cur_loc = NULL;
72fdb379 3834 if (onepart)
3835 VAR_LOC_1PAUX (dvar) = NULL;
3836 else
3837 VAR_PART_OFFSET (dvar, 0) = 0;
9845d120 3838
3839 dstslot
3840 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3841 INSERT);
3842 gcc_assert (!*dstslot);
3843 *dstslot = dvar;
3844 }
3845 else
3846 return 1;
3847 }
3848 }
3849
3850 nodep = &dvar->var_part[0].loc_chain;
3851 while ((node = *nodep))
3852 {
3853 location_chain *nextp = &node->next;
3854
3855 if (GET_CODE (node->loc) == REG)
3856 {
3857 attrs list;
3858
3859 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3860 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3861 && dv_is_value_p (list->dv))
3862 break;
3863
3864 if (!list)
3865 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3866 dv, 0, node->loc);
3867 /* If this value became canonical for another value that had
3868 this register, we want to leave it alone. */
3869 else if (dv_as_value (list->dv) != val)
3870 {
3871 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3872 dstslot, dv, 0,
3873 node->init, NULL_RTX);
3874 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3875
3876 /* Since nextp points into the removed node, we can't
3877 use it. The pointer to the next node moved to nodep.
3878 However, if the variable we're walking is unshared
3879 during our walk, we'll keep walking the location list
3880 of the previously-shared variable, in which case the
3881 node won't have been removed, and we'll want to skip
3882 it. That's why we test *nodep here. */
3883 if (*nodep != node)
3884 nextp = nodep;
3885 }
3886 }
3887 else
3888 /* Canonicalization puts registers first, so we don't have to
3889 walk it all. */
3890 break;
3891 nodep = nextp;
3892 }
3893
3894 if (dvar != (variable)*dstslot)
3895 dvar = (variable)*dstslot;
3896 nodep = &dvar->var_part[0].loc_chain;
3897
3898 if (val)
3899 {
3900 /* Mark all referenced nodes for canonicalization, and make sure
3901 we have mutual equivalence links. */
3902 VALUE_RECURSED_INTO (val) = true;
3903 for (node = *nodep; node; node = node->next)
3904 if (GET_CODE (node->loc) == VALUE)
3905 {
3906 VALUE_RECURSED_INTO (node->loc) = true;
3907 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3908 node->init, NULL, INSERT);
3909 }
3910
3911 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3912 gcc_assert (*dstslot == dvar);
3913 canonicalize_values_star (dstslot, dst);
1b4345f7 3914 gcc_checking_assert (dstslot
3915 == shared_hash_find_slot_noinsert_1 (dst->vars,
3916 dv, dvhash));
9845d120 3917 dvar = (variable)*dstslot;
3918 }
3919 else
3920 {
3921 bool has_value = false, has_other = false;
3922
3923 /* If we have one value and anything else, we're going to
3924 canonicalize this, so make sure all values have an entry in
3925 the table and are marked for canonicalization. */
3926 for (node = *nodep; node; node = node->next)
3927 {
3928 if (GET_CODE (node->loc) == VALUE)
3929 {
3930 /* If this was marked during register canonicalization,
3931 we know we have to canonicalize values. */
3932 if (has_value)
3933 has_other = true;
3934 has_value = true;
3935 if (has_other)
3936 break;
3937 }
3938 else
3939 {
3940 has_other = true;
3941 if (has_value)
3942 break;
3943 }
3944 }
3945
3946 if (has_value && has_other)
3947 {
3948 for (node = *nodep; node; node = node->next)
3949 {
3950 if (GET_CODE (node->loc) == VALUE)
3951 {
3952 decl_or_value dv = dv_from_value (node->loc);
3953 void **slot = NULL;
3954
3955 if (shared_hash_shared (dst->vars))
3956 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3957 if (!slot)
3958 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3959 INSERT);
3960 if (!*slot)
3961 {
72fdb379 3962 variable var = (variable) pool_alloc (onepart_pool
3963 (ONEPART_VALUE));
9845d120 3964 var->dv = dv;
3965 var->refcount = 1;
3966 var->n_var_parts = 1;
72fdb379 3967 var->onepart = ONEPART_VALUE;
bc95df68 3968 var->in_changed_variables = false;
9845d120 3969 var->var_part[0].loc_chain = NULL;
3970 var->var_part[0].cur_loc = NULL;
72fdb379 3971 VAR_LOC_1PAUX (var) = NULL;
9845d120 3972 *slot = var;
3973 }
3974
3975 VALUE_RECURSED_INTO (node->loc) = true;
3976 }
3977 }
3978
3979 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3980 gcc_assert (*dstslot == dvar);
3981 canonicalize_values_star (dstslot, dst);
1b4345f7 3982 gcc_checking_assert (dstslot
3983 == shared_hash_find_slot_noinsert_1 (dst->vars,
3984 dv, dvhash));
9845d120 3985 dvar = (variable)*dstslot;
3986 }
3987 }
3988
3989 if (!onepart_variable_different_p (dvar, s2var))
3990 {
3991 variable_htab_free (dvar);
3992 *dstslot = dvar = s2var;
3993 dvar->refcount++;
3994 }
3995 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3996 {
3997 variable_htab_free (dvar);
3998 *dstslot = dvar = s1var;
3999 dvar->refcount++;
4000 dst_can_be_shared = false;
4001 }
4002 else
bc95df68 4003 dst_can_be_shared = false;
9845d120 4004
4005 return 1;
4006}
4007
493a60c8 4008/* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4009 multi-part variable. Unions of multi-part variables and
4010 intersections of one-part ones will be handled in
4011 variable_merge_over_cur(). */
9845d120 4012
4013static int
a868678a 4014variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
9845d120 4015{
9845d120 4016 dataflow_set *dst = dsm->dst;
9845d120 4017 decl_or_value dv = s2var->dv;
9845d120 4018
72fdb379 4019 if (!s2var->onepart)
9845d120 4020 {
4021 void **dstp = shared_hash_find_slot (dst->vars, dv);
4022 *dstp = s2var;
4023 s2var->refcount++;
bc95df68 4024 return 1;
9845d120 4025 }
4026
4027 dsm->src_onepart_cnt++;
4028 return 1;
4029}
4030
493a60c8 4031/* Combine dataflow set information from SRC2 into DST, using PDST
9845d120 4032 to carry over information across passes. */
4033
4034static void
493a60c8 4035dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
9845d120 4036{
493a60c8 4037 dataflow_set cur = *dst;
4038 dataflow_set *src1 = &cur;
9845d120 4039 struct dfset_merge dsm;
4040 int i;
493a60c8 4041 size_t src1_elems, src2_elems;
a868678a 4042 htab_iterator hi;
4043 variable var;
9845d120 4044
493a60c8 4045 src1_elems = htab_elements (shared_hash_htab (src1->vars));
4046 src2_elems = htab_elements (shared_hash_htab (src2->vars));
9845d120 4047 dataflow_set_init (dst);
493a60c8 4048 dst->stack_adjust = cur.stack_adjust;
9845d120 4049 shared_hash_destroy (dst->vars);
4050 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4051 dst->vars->refcount = 1;
4052 dst->vars->htab
493a60c8 4053 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
9845d120 4054 variable_htab_eq, variable_htab_free);
4055
4056 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
493a60c8 4057 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
9845d120 4058
4059 dsm.dst = dst;
493a60c8 4060 dsm.src = src2;
4061 dsm.cur = src1;
9845d120 4062 dsm.src_onepart_cnt = 0;
4063
a868678a 4064 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
4065 variable_merge_over_src (var, &dsm);
4066 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
4067 variable_merge_over_cur (var, &dsm);
9845d120 4068
4069 if (dsm.src_onepart_cnt)
4070 dst_can_be_shared = false;
4071
493a60c8 4072 dataflow_set_destroy (src1);
9845d120 4073}
4074
4075/* Mark register equivalences. */
4076
4077static void
4078dataflow_set_equiv_regs (dataflow_set *set)
4079{
4080 int i;
4081 attrs list, *listp;
4082
4083 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4084 {
4085 rtx canon[NUM_MACHINE_MODES];
4086
beca87f7 4087 /* If the list is empty or one entry, no need to canonicalize
9879e618 4088 anything. */
4089 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
beca87f7 4090 continue;
4091
9845d120 4092 memset (canon, 0, sizeof (canon));
4093
4094 for (list = set->regs[i]; list; list = list->next)
4095 if (list->offset == 0 && dv_is_value_p (list->dv))
4096 {
4097 rtx val = dv_as_value (list->dv);
4098 rtx *cvalp = &canon[(int)GET_MODE (val)];
4099 rtx cval = *cvalp;
4100
4101 if (canon_value_cmp (val, cval))
4102 *cvalp = val;
4103 }
4104
4105 for (list = set->regs[i]; list; list = list->next)
4106 if (list->offset == 0 && dv_onepart_p (list->dv))
4107 {
4108 rtx cval = canon[(int)GET_MODE (list->loc)];
4109
4110 if (!cval)
4111 continue;
4112
4113 if (dv_is_value_p (list->dv))
4114 {
4115 rtx val = dv_as_value (list->dv);
4116
4117 if (val == cval)
4118 continue;
4119
4120 VALUE_RECURSED_INTO (val) = true;
4121 set_variable_part (set, val, dv_from_value (cval), 0,
4122 VAR_INIT_STATUS_INITIALIZED,
4123 NULL, NO_INSERT);
4124 }
4125
4126 VALUE_RECURSED_INTO (cval) = true;
4127 set_variable_part (set, cval, list->dv, 0,
4128 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4129 }
4130
4131 for (listp = &set->regs[i]; (list = *listp);
4132 listp = list ? &list->next : listp)
4133 if (list->offset == 0 && dv_onepart_p (list->dv))
4134 {
4135 rtx cval = canon[(int)GET_MODE (list->loc)];
4136 void **slot;
4137
4138 if (!cval)
4139 continue;
4140
4141 if (dv_is_value_p (list->dv))
4142 {
4143 rtx val = dv_as_value (list->dv);
4144 if (!VALUE_RECURSED_INTO (val))
4145 continue;
4146 }
4147
4148 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4149 canonicalize_values_star (slot, set);
4150 if (*listp != list)
4151 list = NULL;
4152 }
4153 }
4154}
4155
4156/* Remove any redundant values in the location list of VAR, which must
4157 be unshared and 1-part. */
4158
4159static void
4160remove_duplicate_values (variable var)
4161{
4162 location_chain node, *nodep;
4163
72fdb379 4164 gcc_assert (var->onepart);
9845d120 4165 gcc_assert (var->n_var_parts == 1);
4166 gcc_assert (var->refcount == 1);
4167
4168 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4169 {
4170 if (GET_CODE (node->loc) == VALUE)
4171 {
4172 if (VALUE_RECURSED_INTO (node->loc))
4173 {
4174 /* Remove duplicate value node. */
4175 *nodep = node->next;
4176 pool_free (loc_chain_pool, node);
4177 continue;
4178 }
4179 else
4180 VALUE_RECURSED_INTO (node->loc) = true;
4181 }
4182 nodep = &node->next;
4183 }
4184
4185 for (node = var->var_part[0].loc_chain; node; node = node->next)
4186 if (GET_CODE (node->loc) == VALUE)
4187 {
4188 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4189 VALUE_RECURSED_INTO (node->loc) = false;
4190 }
4191}
4192
4193
4194/* Hash table iteration argument passed to variable_post_merge. */
4195struct dfset_post_merge
4196{
4197 /* The new input set for the current block. */
4198 dataflow_set *set;
4199 /* Pointer to the permanent input set for the current block, or
4200 NULL. */
4201 dataflow_set **permp;
4202};
4203
4204/* Create values for incoming expressions associated with one-part
4205 variables that don't have value numbers for them. */
4206
4207static int
4208variable_post_merge_new_vals (void **slot, void *info)
4209{
4210 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4211 dataflow_set *set = dfpm->set;
4212 variable var = (variable)*slot;
4213 location_chain node;
4214
72fdb379 4215 if (!var->onepart || !var->n_var_parts)
9845d120 4216 return 1;
4217
4218 gcc_assert (var->n_var_parts == 1);
4219
4220 if (dv_is_decl_p (var->dv))
4221 {
4222 bool check_dupes = false;
4223
4224 restart:
4225 for (node = var->var_part[0].loc_chain; node; node = node->next)
4226 {
4227 if (GET_CODE (node->loc) == VALUE)
4228 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4229 else if (GET_CODE (node->loc) == REG)
4230 {
4231 attrs att, *attp, *curp = NULL;
4232
4233 if (var->refcount != 1)
4234 {
4235 slot = unshare_variable (set, slot, var,
4236 VAR_INIT_STATUS_INITIALIZED);
4237 var = (variable)*slot;
4238 goto restart;
4239 }
4240
4241 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4242 attp = &att->next)
4243 if (att->offset == 0
4244 && GET_MODE (att->loc) == GET_MODE (node->loc))
4245 {
4246 if (dv_is_value_p (att->dv))
4247 {
4248 rtx cval = dv_as_value (att->dv);
4249 node->loc = cval;
4250 check_dupes = true;
4251 break;
4252 }
4253 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4254 curp = attp;
4255 }
4256
4257 if (!curp)
4258 {
4259 curp = attp;
4260 while (*curp)
4261 if ((*curp)->offset == 0
4262 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4263 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4264 break;
4265 else
4266 curp = &(*curp)->next;
4267 gcc_assert (*curp);
4268 }
4269
4270 if (!att)
4271 {
4272 decl_or_value cdv;
4273 rtx cval;
4274
4275 if (!*dfpm->permp)
4276 {
4277 *dfpm->permp = XNEW (dataflow_set);
4278 dataflow_set_init (*dfpm->permp);
4279 }
4280
4281 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4282 att; att = att->next)
4283 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4284 {
a868678a 4285 gcc_assert (att->offset == 0
4286 && dv_is_value_p (att->dv));
9845d120 4287 val_reset (set, att->dv);
4288 break;
4289 }
4290
4291 if (att)
4292 {
4293 cdv = att->dv;
4294 cval = dv_as_value (cdv);
4295 }
4296 else
4297 {
4298 /* Create a unique value to hold this register,
4299 that ought to be found and reused in
4300 subsequent rounds. */
4301 cselib_val *v;
4302 gcc_assert (!cselib_lookup (node->loc,
1f864115 4303 GET_MODE (node->loc), 0,
4304 VOIDmode));
4305 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4306 VOIDmode);
9845d120 4307 cselib_preserve_value (v);
4308 cselib_invalidate_rtx (node->loc);
4309 cval = v->val_rtx;
4310 cdv = dv_from_value (cval);
4311 if (dump_file)
4312 fprintf (dump_file,
01df1184 4313 "Created new value %u:%u for reg %i\n",
4314 v->uid, v->hash, REGNO (node->loc));
9845d120 4315 }
4316
4317 var_reg_decl_set (*dfpm->permp, node->loc,
4318 VAR_INIT_STATUS_INITIALIZED,
4319 cdv, 0, NULL, INSERT);
4320
4321 node->loc = cval;
4322 check_dupes = true;
4323 }
4324
4325 /* Remove attribute referring to the decl, which now
4326 uses the value for the register, already existing or
4327 to be added when we bring perm in. */
4328 att = *curp;
4329 *curp = att->next;
4330 pool_free (attrs_pool, att);
4331 }
4332 }
4333
4334 if (check_dupes)
4335 remove_duplicate_values (var);
4336 }
4337
4338 return 1;
4339}
4340
4341/* Reset values in the permanent set that are not associated with the
4342 chosen expression. */
4343
4344static int
4345variable_post_merge_perm_vals (void **pslot, void *info)
4346{
4347 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4348 dataflow_set *set = dfpm->set;
4349 variable pvar = (variable)*pslot, var;
4350 location_chain pnode;
4351 decl_or_value dv;
4352 attrs att;
4353
a868678a 4354 gcc_assert (dv_is_value_p (pvar->dv)
4355 && pvar->n_var_parts == 1);
9845d120 4356 pnode = pvar->var_part[0].loc_chain;
a868678a 4357 gcc_assert (pnode
4358 && !pnode->next
4359 && REG_P (pnode->loc));
9845d120 4360
4361 dv = pvar->dv;
4362
4363 var = shared_hash_find (set->vars, dv);
4364 if (var)
4365 {
ec87b3ce 4366 /* Although variable_post_merge_new_vals may have made decls
4367 non-star-canonical, values that pre-existed in canonical form
4368 remain canonical, and newly-created values reference a single
4369 REG, so they are canonical as well. Since VAR has the
4370 location list for a VALUE, using find_loc_in_1pdv for it is
4371 fine, since VALUEs don't map back to DECLs. */
9845d120 4372 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4373 return 1;
4374 val_reset (set, dv);
4375 }
4376
4377 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4378 if (att->offset == 0
4379 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4380 && dv_is_value_p (att->dv))
4381 break;
4382
4383 /* If there is a value associated with this register already, create
4384 an equivalence. */
4385 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4386 {
4387 rtx cval = dv_as_value (att->dv);
4388 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4389 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4390 NULL, INSERT);
4391 }
4392 else if (!att)
4393 {
4394 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4395 dv, 0, pnode->loc);
a868678a 4396 variable_union (pvar, set);
9845d120 4397 }
4398
4399 return 1;
4400}
4401
4402/* Just checking stuff and registering register attributes for
4403 now. */
4404
4405static void
4406dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4407{
4408 struct dfset_post_merge dfpm;
4409
4410 dfpm.set = set;
4411 dfpm.permp = permp;
4412
4413 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4414 &dfpm);
4415 if (*permp)
4416 htab_traverse (shared_hash_htab ((*permp)->vars),
4417 variable_post_merge_perm_vals, &dfpm);
4418 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
ed5ab8eb 4419 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
9845d120 4420}
4421
4422/* Return a node whose loc is a MEM that refers to EXPR in the
4423 location list of a one-part variable or value VAR, or in that of
4424 any values recursively mentioned in the location lists. */
4425
4426static location_chain
4427find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4428{
4429 location_chain node;
4430 decl_or_value dv;
4431 variable var;
4432 location_chain where = NULL;
4433
4434 if (!val)
4435 return NULL;
4436
a868678a 4437 gcc_assert (GET_CODE (val) == VALUE
4438 && !VALUE_RECURSED_INTO (val));
9845d120 4439
4440 dv = dv_from_value (val);
4441 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4442
4443 if (!var)
4444 return NULL;
4445
72fdb379 4446 gcc_assert (var->onepart);
9845d120 4447
4448 if (!var->n_var_parts)
4449 return NULL;
4450
9845d120 4451 VALUE_RECURSED_INTO (val) = true;
4452
4453 for (node = var->var_part[0].loc_chain; node; node = node->next)
d0a639c4 4454 if (MEM_P (node->loc)
4455 && MEM_EXPR (node->loc) == expr
4456 && INT_MEM_OFFSET (node->loc) == 0)
9845d120 4457 {
4458 where = node;
4459 break;
4460 }
4461 else if (GET_CODE (node->loc) == VALUE
4462 && !VALUE_RECURSED_INTO (node->loc)
4463 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4464 break;
4465
4466 VALUE_RECURSED_INTO (val) = false;
4467
4468 return where;
4469}
4470
0358713f 4471/* Return TRUE if the value of MEM may vary across a call. */
4472
4473static bool
4474mem_dies_at_call (rtx mem)
4475{
4476 tree expr = MEM_EXPR (mem);
4477 tree decl;
4478
4479 if (!expr)
4480 return true;
4481
4482 decl = get_base_address (expr);
4483
4484 if (!decl)
4485 return true;
4486
4487 if (!DECL_P (decl))
4488 return true;
4489
4490 return (may_be_aliased (decl)
4491 || (!TREE_READONLY (decl) && is_global_var (decl)));
4492}
4493
9845d120 4494/* Remove all MEMs from the location list of a hash table entry for a
4495 one-part variable, except those whose MEM attributes map back to
0358713f 4496 the variable itself, directly or within a VALUE. */
9845d120 4497
4498static int
4499dataflow_set_preserve_mem_locs (void **slot, void *data)
4500{
4501 dataflow_set *set = (dataflow_set *) data;
4502 variable var = (variable) *slot;
4503
72fdb379 4504 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
9845d120 4505 {
4506 tree decl = dv_as_decl (var->dv);
4507 location_chain loc, *locp;
bc95df68 4508 bool changed = false;
9845d120 4509
4510 if (!var->n_var_parts)
4511 return 1;
4512
4513 gcc_assert (var->n_var_parts == 1);
4514
bc95df68 4515 if (shared_var_p (var, set->vars))
9845d120 4516 {
4517 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4518 {
d0a639c4 4519 /* We want to remove dying MEMs that doesn't refer to DECL. */
9845d120 4520 if (GET_CODE (loc->loc) == MEM
4521 && (MEM_EXPR (loc->loc) != decl
d0a639c4 4522 || INT_MEM_OFFSET (loc->loc) != 0)
0358713f 4523 && !mem_dies_at_call (loc->loc))
9845d120 4524 break;
0358713f 4525 /* We want to move here MEMs that do refer to DECL. */
9845d120 4526 else if (GET_CODE (loc->loc) == VALUE
4527 && find_mem_expr_in_1pdv (decl, loc->loc,
4528 shared_hash_htab (set->vars)))
0358713f 4529 break;
9845d120 4530 }
4531
4532 if (!loc)
4533 return 1;
4534
4535 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4536 var = (variable)*slot;
4537 gcc_assert (var->n_var_parts == 1);
4538 }
4539
4540 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4541 loc; loc = *locp)
4542 {
4543 rtx old_loc = loc->loc;
4544 if (GET_CODE (old_loc) == VALUE)
4545 {
4546 location_chain mem_node
4547 = find_mem_expr_in_1pdv (decl, loc->loc,
4548 shared_hash_htab (set->vars));
4549
4550 /* ??? This picks up only one out of multiple MEMs that
4551 refer to the same variable. Do we ever need to be
4552 concerned about dealing with more than one, or, given
4553 that they should all map to the same variable
4554 location, their addresses will have been merged and
4555 they will be regarded as equivalent? */
4556 if (mem_node)
4557 {
4558 loc->loc = mem_node->loc;
4559 loc->set_src = mem_node->set_src;
4560 loc->init = MIN (loc->init, mem_node->init);
4561 }
4562 }
4563
4564 if (GET_CODE (loc->loc) != MEM
4565 || (MEM_EXPR (loc->loc) == decl
d0a639c4 4566 && INT_MEM_OFFSET (loc->loc) == 0)
0358713f 4567 || !mem_dies_at_call (loc->loc))
9845d120 4568 {
4569 if (old_loc != loc->loc && emit_notes)
4570 {
bc95df68 4571 if (old_loc == var->var_part[0].cur_loc)
4572 {
4573 changed = true;
4574 var->var_part[0].cur_loc = NULL;
bc95df68 4575 }
9845d120 4576 }
4577 locp = &loc->next;
4578 continue;
4579 }
4580
4581 if (emit_notes)
bc95df68 4582 {
bc95df68 4583 if (old_loc == var->var_part[0].cur_loc)
4584 {
4585 changed = true;
4586 var->var_part[0].cur_loc = NULL;
bc95df68 4587 }
4588 }
9845d120 4589 *locp = loc->next;
4590 pool_free (loc_chain_pool, loc);
4591 }
4592
4593 if (!var->var_part[0].loc_chain)
4594 {
4595 var->n_var_parts--;
bc95df68 4596 changed = true;
9845d120 4597 }
bc95df68 4598 if (changed)
4599 variable_was_changed (var, set);
9845d120 4600 }
4601
4602 return 1;
4603}
4604
4605/* Remove all MEMs from the location list of a hash table entry for a
4606 value. */
4607
4608static int
4609dataflow_set_remove_mem_locs (void **slot, void *data)
4610{
4611 dataflow_set *set = (dataflow_set *) data;
4612 variable var = (variable) *slot;
4613
72fdb379 4614 if (var->onepart == ONEPART_VALUE)
9845d120 4615 {
4616 location_chain loc, *locp;
4617 bool changed = false;
72fdb379 4618 rtx cur_loc;
9845d120 4619
4620 gcc_assert (var->n_var_parts == 1);
4621
bc95df68 4622 if (shared_var_p (var, set->vars))
9845d120 4623 {
4624 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
0358713f 4625 if (GET_CODE (loc->loc) == MEM
4626 && mem_dies_at_call (loc->loc))
9845d120 4627 break;
4628
4629 if (!loc)
4630 return 1;
4631
4632 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4633 var = (variable)*slot;
4634 gcc_assert (var->n_var_parts == 1);
4635 }
4636
72fdb379 4637 if (VAR_LOC_1PAUX (var))
4638 cur_loc = VAR_LOC_FROM (var);
4639 else
4640 cur_loc = var->var_part[0].cur_loc;
4641
9845d120 4642 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4643 loc; loc = *locp)
4644 {
0358713f 4645 if (GET_CODE (loc->loc) != MEM
4646 || !mem_dies_at_call (loc->loc))
9845d120 4647 {
4648 locp = &loc->next;
4649 continue;
4650 }
4651
9845d120 4652 *locp = loc->next;
4653 /* If we have deleted the location which was last emitted
4654 we have to emit new location so add the variable to set
4655 of changed variables. */
72fdb379 4656 if (cur_loc == loc->loc)
bc95df68 4657 {
4658 changed = true;
4659 var->var_part[0].cur_loc = NULL;
72fdb379 4660 if (VAR_LOC_1PAUX (var))
4661 VAR_LOC_FROM (var) = NULL;
bc95df68 4662 }
9845d120 4663 pool_free (loc_chain_pool, loc);
4664 }
4665
4666 if (!var->var_part[0].loc_chain)
4667 {
4668 var->n_var_parts--;
bc95df68 4669 changed = true;
9845d120 4670 }
4671 if (changed)
bc95df68 4672 variable_was_changed (var, set);
9845d120 4673 }
4674
4675 return 1;
4676}
4677
4678/* Remove all variable-location information about call-clobbered
4679 registers, as well as associations between MEMs and VALUEs. */
4680
4681static void
4682dataflow_set_clear_at_call (dataflow_set *set)
4683{
4684 int r;
4685
4686 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
54f4c5d9 4687 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
9845d120 4688 var_regno_delete (set, r);
4689
4690 if (MAY_HAVE_DEBUG_INSNS)
4691 {
4692 set->traversed_vars = set->vars;
4693 htab_traverse (shared_hash_htab (set->vars),
4694 dataflow_set_preserve_mem_locs, set);
4695 set->traversed_vars = set->vars;
4696 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4697 set);
4698 set->traversed_vars = NULL;
4699 }
4700}
4701
9845d120 4702static bool
4703variable_part_different_p (variable_part *vp1, variable_part *vp2)
4704{
4705 location_chain lc1, lc2;
4706
4707 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4708 {
4709 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4710 {
4711 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4712 {
4713 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4714 break;
4715 }
4716 if (rtx_equal_p (lc1->loc, lc2->loc))
4717 break;
4718 }
4719 if (!lc2)
4720 return true;
4721 }
4722 return false;
4723}
4724
4725/* Return true if one-part variables VAR1 and VAR2 are different.
4726 They must be in canonical order. */
4727
4728static bool
4729onepart_variable_different_p (variable var1, variable var2)
4730{
4731 location_chain lc1, lc2;
4732
4733 if (var1 == var2)
4734 return false;
4735
a868678a 4736 gcc_assert (var1->n_var_parts == 1
4737 && var2->n_var_parts == 1);
9845d120 4738
4739 lc1 = var1->var_part[0].loc_chain;
4740 lc2 = var2->var_part[0].loc_chain;
4741
a868678a 4742 gcc_assert (lc1 && lc2);
9845d120 4743
4744 while (lc1 && lc2)
4745 {
4746 if (loc_cmp (lc1->loc, lc2->loc))
4747 return true;
4748 lc1 = lc1->next;
4749 lc2 = lc2->next;
4750 }
4751
4752 return lc1 != lc2;
4753}
4754
bc95df68 4755/* Return true if variables VAR1 and VAR2 are different. */
9845d120 4756
4757static bool
bc95df68 4758variable_different_p (variable var1, variable var2)
9845d120 4759{
4760 int i;
4761
4762 if (var1 == var2)
4763 return false;
4764
72fdb379 4765 if (var1->onepart != var2->onepart)
4766 return true;
4767
9845d120 4768 if (var1->n_var_parts != var2->n_var_parts)
4769 return true;
4770
72fdb379 4771 if (var1->onepart && var1->n_var_parts)
4772 {
4773 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4774 && var1->n_var_parts == 1);
4775 /* One-part values have locations in a canonical order. */
4776 return onepart_variable_different_p (var1, var2);
4777 }
4778
9845d120 4779 for (i = 0; i < var1->n_var_parts; i++)
4780 {
72fdb379 4781 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
9845d120 4782 return true;
9845d120 4783 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4784 return true;
4785 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4786 return true;
4787 }
4788 return false;
4789}
4790
9845d120 4791/* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4792
4793static bool
4794dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4795{
a868678a 4796 htab_iterator hi;
4797 variable var1;
4798
9845d120 4799 if (old_set->vars == new_set->vars)
4800 return false;
4801
4802 if (htab_elements (shared_hash_htab (old_set->vars))
4803 != htab_elements (shared_hash_htab (new_set->vars)))
4804 return true;
4805
a868678a 4806 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4807 {
4808 htab_t htab = shared_hash_htab (new_set->vars);
4809 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4810 dv_htab_hash (var1->dv));
4811 if (!var2)
4812 {
4813 if (dump_file && (dump_flags & TDF_DETAILS))
4814 {
4815 fprintf (dump_file, "dataflow difference found: removal of:\n");
4816 dump_var (var1);
4817 }
4818 return true;
4819 }
4820
4821 if (variable_different_p (var1, var2))
4822 {
4823 if (dump_file && (dump_flags & TDF_DETAILS))
4824 {
4825 fprintf (dump_file, "dataflow difference found: "
4826 "old and new follow:\n");
4827 dump_var (var1);
4828 dump_var (var2);
4829 }
4830 return true;
4831 }
4832 }
9845d120 4833
9845d120 4834 /* No need to traverse the second hashtab, if both have the same number
4835 of elements and the second one had all entries found in the first one,
4836 then it can't have any extra entries. */
a868678a 4837 return false;
9845d120 4838}
4839
4840/* Free the contents of dataflow set SET. */
4841
4842static void
4843dataflow_set_destroy (dataflow_set *set)
4844{
4845 int i;
4846
4847 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4848 attrs_list_clear (&set->regs[i]);
4849
4850 shared_hash_destroy (set->vars);
4851 set->vars = NULL;
4852}
4853
4854/* Return true if RTL X contains a SYMBOL_REF. */
4855
4856static bool
4857contains_symbol_ref (rtx x)
4858{
4859 const char *fmt;
4860 RTX_CODE code;
4861 int i;
4862
4863 if (!x)
4864 return false;
4865
4866 code = GET_CODE (x);
4867 if (code == SYMBOL_REF)
4868 return true;
4869
4870 fmt = GET_RTX_FORMAT (code);
4871 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4872 {
4873 if (fmt[i] == 'e')
4874 {
4875 if (contains_symbol_ref (XEXP (x, i)))
4876 return true;
4877 }
4878 else if (fmt[i] == 'E')
4879 {
4880 int j;
4881 for (j = 0; j < XVECLEN (x, i); j++)
4882 if (contains_symbol_ref (XVECEXP (x, i, j)))
4883 return true;
4884 }
4885 }
4886
4887 return false;
4888}
4889
4890/* Shall EXPR be tracked? */
4891
4892static bool
4893track_expr_p (tree expr, bool need_rtl)
4894{
4895 rtx decl_rtl;
4896 tree realdecl;
4897
688ff29b 4898 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4899 return DECL_RTL_SET_P (expr);
4900
9845d120 4901 /* If EXPR is not a parameter or a variable do not track it. */
4902 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4903 return 0;
4904
4905 /* It also must have a name... */
60d11eb6 4906 if (!DECL_NAME (expr) && need_rtl)
9845d120 4907 return 0;
4908
4909 /* ... and a RTL assigned to it. */
4910 decl_rtl = DECL_RTL_IF_SET (expr);
4911 if (!decl_rtl && need_rtl)
4912 return 0;
48e1416a 4913
4914 /* If this expression is really a debug alias of some other declaration, we
9845d120 4915 don't need to track this expression if the ultimate declaration is
4916 ignored. */
4917 realdecl = expr;
4e9d80c4 4918 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
9845d120 4919 {
4920 realdecl = DECL_DEBUG_EXPR (realdecl);
4e9d80c4 4921 if (realdecl == NULL_TREE)
4922 realdecl = expr;
4e9d80c4 4923 else if (!DECL_P (realdecl))
5dee2817 4924 {
4925 if (handled_component_p (realdecl))
4926 {
4927 HOST_WIDE_INT bitsize, bitpos, maxsize;
4928 tree innerdecl
4929 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4930 &maxsize);
4931 if (!DECL_P (innerdecl)
4932 || DECL_IGNORED_P (innerdecl)
4933 || TREE_STATIC (innerdecl)
4934 || bitsize <= 0
4935 || bitpos + bitsize > 256
4936 || bitsize != maxsize)
4937 return 0;
4938 else
4939 realdecl = expr;
4940 }
4941 else
4942 return 0;
4943 }
9845d120 4944 }
4945
4946 /* Do not track EXPR if REALDECL it should be ignored for debugging
48e1416a 4947 purposes. */
9845d120 4948 if (DECL_IGNORED_P (realdecl))
4949 return 0;
4950
4951 /* Do not track global variables until we are able to emit correct location
4952 list for them. */
4953 if (TREE_STATIC (realdecl))
4954 return 0;
4955
4956 /* When the EXPR is a DECL for alias of some variable (see example)
4957 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4958 DECL_RTL contains SYMBOL_REF.
4959
4960 Example:
4961 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4962 char **_dl_argv;
4963 */
4964 if (decl_rtl && MEM_P (decl_rtl)
4965 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4966 return 0;
4967
4968 /* If RTX is a memory it should not be very large (because it would be
4969 an array or struct). */
4970 if (decl_rtl && MEM_P (decl_rtl))
4971 {
4972 /* Do not track structures and arrays. */
4973 if (GET_MODE (decl_rtl) == BLKmode
4974 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4975 return 0;
5b2a69fa 4976 if (MEM_SIZE_KNOWN_P (decl_rtl)
4977 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
9845d120 4978 return 0;
4979 }
4980
4981 DECL_CHANGED (expr) = 0;
4982 DECL_CHANGED (realdecl) = 0;
4983 return 1;
4984}
4985
4986/* Determine whether a given LOC refers to the same variable part as
4987 EXPR+OFFSET. */
4988
4989static bool
4990same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4991{
4992 tree expr2;
4993 HOST_WIDE_INT offset2;
4994
4995 if (! DECL_P (expr))
4996 return false;
4997
4998 if (REG_P (loc))
4999 {
5000 expr2 = REG_EXPR (loc);
5001 offset2 = REG_OFFSET (loc);
5002 }
5003 else if (MEM_P (loc))
5004 {
5005 expr2 = MEM_EXPR (loc);
5006 offset2 = INT_MEM_OFFSET (loc);
5007 }
5008 else
5009 return false;
5010
5011 if (! expr2 || ! DECL_P (expr2))
5012 return false;
5013
5014 expr = var_debug_decl (expr);
5015 expr2 = var_debug_decl (expr2);
5016
5017 return (expr == expr2 && offset == offset2);
5018}
5019
5020/* LOC is a REG or MEM that we would like to track if possible.
80c70e76 5021 If EXPR is null, we don't know what expression LOC refers to,
5022 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5023 LOC is an lvalue register.
e10d697d 5024
80c70e76 5025 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5026 is something we can track. When returning true, store the mode of
5027 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5028 from EXPR in *OFFSET_OUT (if nonnull). */
e10d697d 5029
80c70e76 5030static bool
5031track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5032 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
e10d697d 5033{
5034 enum machine_mode mode;
5035
9845d120 5036 if (expr == NULL || !track_expr_p (expr, true))
80c70e76 5037 return false;
5038
5039 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5040 whole subreg, but only the old inner part is really relevant. */
5041 mode = GET_MODE (loc);
5042 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
e10d697d 5043 {
5044 enum machine_mode pseudo_mode;
5045
80c70e76 5046 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
e10d697d 5047 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
80c70e76 5048 {
5049 offset += byte_lowpart_offset (pseudo_mode, mode);
5050 mode = pseudo_mode;
5051 }
5052 }
5053
5054 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5055 Do the same if we are storing to a register and EXPR occupies
5056 the whole of register LOC; in that case, the whole of EXPR is
5057 being changed. We exclude complex modes from the second case
5058 because the real and imaginary parts are represented as separate
5059 pseudo registers, even if the whole complex value fits into one
5060 hard register. */
5061 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5062 || (store_reg_p
5063 && !COMPLEX_MODE_P (DECL_MODE (expr))
5064 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5065 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5066 {
5067 mode = DECL_MODE (expr);
5068 offset = 0;
e10d697d 5069 }
80c70e76 5070
5071 if (offset < 0 || offset >= MAX_VAR_PARTS)
5072 return false;
5073
5074 if (mode_out)
5075 *mode_out = mode;
5076 if (offset_out)
5077 *offset_out = offset;
5078 return true;
e10d697d 5079}
5080
5081/* Return the MODE lowpart of LOC, or null if LOC is not something we
5082 want to track. When returning nonnull, make sure that the attributes
5083 on the returned value are updated. */
5084
5085static rtx
5086var_lowpart (enum machine_mode mode, rtx loc)
5087{
80c70e76 5088 unsigned int offset, reg_offset, regno;
e10d697d 5089
5090 if (!REG_P (loc) && !MEM_P (loc))
5091 return NULL;
5092
5093 if (GET_MODE (loc) == mode)
5094 return loc;
5095
9845d120 5096 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5097
5098 if (MEM_P (loc))
5099 return adjust_address_nv (loc, mode, offset);
5100
5101 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5102 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5103 reg_offset, mode);
5104 return gen_rtx_REG_offset (loc, mode, regno, offset);
5105}
5106
5107/* Carry information about uses and stores while walking rtx. */
5108
5109struct count_use_info
5110{
5111 /* The insn where the RTX is. */
5112 rtx insn;
5113
5114 /* The basic block where insn is. */
5115 basic_block bb;
5116
5117 /* The array of n_sets sets in the insn, as determined by cselib. */
5118 struct cselib_set *sets;
5119 int n_sets;
5120
5121 /* True if we're counting stores, false otherwise. */
5122 bool store_p;
5123};
5124
5125/* Find a VALUE corresponding to X. */
5126
5127static inline cselib_val *
5128find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5129{
5130 int i;
5131
5132 if (cui->sets)
5133 {
5134 /* This is called after uses are set up and before stores are
88948300 5135 processed by cselib, so it's safe to look up srcs, but not
9845d120 5136 dsts. So we look up expressions that appear in srcs or in
5137 dest expressions, but we search the sets array for dests of
5138 stores. */
5139 if (cui->store_p)
5140 {
88948300 5141 /* Some targets represent memset and memcpy patterns
5142 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5143 (set (mem:BLK ...) (const_int ...)) or
5144 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5145 in that case, otherwise we end up with mode mismatches. */
5146 if (mode == BLKmode && MEM_P (x))
5147 return NULL;
9845d120 5148 for (i = 0; i < cui->n_sets; i++)
5149 if (cui->sets[i].dest == x)
5150 return cui->sets[i].src_elt;
5151 }
5152 else
1f864115 5153 return cselib_lookup (x, mode, 0, VOIDmode);
9845d120 5154 }
5155
5156 return NULL;
5157}
5158
5159/* Replace all registers and addresses in an expression with VALUE
5160 expressions that map back to them, unless the expression is a
5161 register. If no mapping is or can be performed, returns NULL. */
5162
5163static rtx
5164replace_expr_with_values (rtx loc)
5165{
0ee5bf3c 5166 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
9845d120 5167 return NULL;
5168 else if (MEM_P (loc))
5169 {
35af0188 5170 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
1f864115 5171 get_address_mode (loc), 0,
5172 GET_MODE (loc));
9845d120 5173 if (addr)
5174 return replace_equiv_address_nv (loc, addr->val_rtx);
5175 else
5176 return NULL;
5177 }
5178 else
1f864115 5179 return cselib_subst_to_values (loc, VOIDmode);
9845d120 5180}
5181
d59975de 5182/* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5183 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5184 RTX. */
5185
5186static int
5187rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5188{
5189 rtx loc = *x;
5190
5191 return GET_CODE (loc) == DEBUG_EXPR;
5192}
5193
9845d120 5194/* Determine what kind of micro operation to choose for a USE. Return
5195 MO_CLOBBER if no micro operation is to be generated. */
5196
5197static enum micro_operation_type
67f2e0ef 5198use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
9845d120 5199{
5200 tree expr;
9845d120 5201
5202 if (cui && cui->sets)
5203 {
67f2e0ef 5204 if (GET_CODE (loc) == VAR_LOCATION)
9845d120 5205 {
67f2e0ef 5206 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
9845d120 5207 {
67f2e0ef 5208 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
35af0188 5209 if (! VAR_LOC_UNKNOWN_P (ploc))
5210 {
1f864115 5211 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5212 VOIDmode);
9845d120 5213
35af0188 5214 /* ??? flag_float_store and volatile mems are never
5215 given values, but we could in theory use them for
5216 locations. */
5217 gcc_assert (val || 1);
5218 }
9845d120 5219 return MO_VAL_LOC;
5220 }
5221 else
5222 return MO_CLOBBER;
5223 }
5224
3af97df7 5225 if (REG_P (loc) || MEM_P (loc))
9845d120 5226 {
5227 if (modep)
67f2e0ef 5228 *modep = GET_MODE (loc);
9845d120 5229 if (cui->store_p)
5230 {
67f2e0ef 5231 if (REG_P (loc)
3af97df7 5232 || (find_use_val (loc, GET_MODE (loc), cui)
35af0188 5233 && cselib_lookup (XEXP (loc, 0),
1f864115 5234 get_address_mode (loc), 0,
5235 GET_MODE (loc))))
9845d120 5236 return MO_VAL_SET;
5237 }
3af97df7 5238 else
5239 {
5240 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5241
5242 if (val && !cselib_preserved_value_p (val))
5243 return MO_VAL_USE;
5244 }
9845d120 5245 }
5246 }
5247
67f2e0ef 5248 if (REG_P (loc))
9845d120 5249 {
67f2e0ef 5250 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
9845d120 5251
35af0188 5252 if (loc == cfa_base_rtx)
5253 return MO_CLOBBER;
67f2e0ef 5254 expr = REG_EXPR (loc);
9845d120 5255
5256 if (!expr)
5257 return MO_USE_NO_VAR;
5258 else if (target_for_debug_bind (var_debug_decl (expr)))
5259 return MO_CLOBBER;
67f2e0ef 5260 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
9845d120 5261 false, modep, NULL))
5262 return MO_USE;
5263 else
5264 return MO_USE_NO_VAR;
5265 }
67f2e0ef 5266 else if (MEM_P (loc))
9845d120 5267 {
67f2e0ef 5268 expr = MEM_EXPR (loc);
9845d120 5269
5270 if (!expr)
5271 return MO_CLOBBER;
5272 else if (target_for_debug_bind (var_debug_decl (expr)))
5273 return MO_CLOBBER;
67f2e0ef 5274 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
d59975de 5275 false, modep, NULL)
5276 /* Multi-part variables shouldn't refer to one-part
5277 variable names such as VALUEs (never happens) or
5278 DEBUG_EXPRs (only happens in the presence of debug
5279 insns). */
5280 && (!MAY_HAVE_DEBUG_INSNS
5281 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
9845d120 5282 return MO_USE;
5283 else
5284 return MO_CLOBBER;
5285 }
5286
5287 return MO_CLOBBER;
5288}
e10d697d 5289
9845d120 5290/* Log to OUT information about micro-operation MOPT involving X in
5291 INSN of BB. */
e10d697d 5292
9845d120 5293static inline void
5294log_op_type (rtx x, basic_block bb, rtx insn,
5295 enum micro_operation_type mopt, FILE *out)
5296{
5297 fprintf (out, "bb %i op %i insn %i %s ",
c77c64d8 5298 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
9845d120 5299 INSN_UID (insn), micro_operation_type_name[mopt]);
5300 print_inline_rtx (out, x, 2);
5301 fputc ('\n', out);
e10d697d 5302}
96414f01 5303
9845d120 5304/* Tell whether the CONCAT used to holds a VALUE and its location
5305 needs value resolution, i.e., an attempt of mapping the location
5306 back to other incoming values. */
5307#define VAL_NEEDS_RESOLUTION(x) \
5308 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5309/* Whether the location in the CONCAT is a tracked expression, that
5310 should also be handled like a MO_USE. */
5311#define VAL_HOLDS_TRACK_EXPR(x) \
5312 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5313/* Whether the location in the CONCAT should be handled like a MO_COPY
5314 as well. */
5315#define VAL_EXPR_IS_COPIED(x) \
5316 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5317/* Whether the location in the CONCAT should be handled like a
5318 MO_CLOBBER as well. */
5319#define VAL_EXPR_IS_CLOBBERED(x) \
5320 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5321
ace62c8c 5322/* All preserved VALUEs. */
5323static VEC (rtx, heap) *preserved_values;
5324
c77c64d8 5325/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
ace62c8c 5326
5327static void
5328preserve_value (cselib_val *val)
5329{
5330 cselib_preserve_value (val);
5331 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5332}
5333
1197d3d7 5334/* Helper function for MO_VAL_LOC handling. Return non-zero if
5335 any rtxes not suitable for CONST use not replaced by VALUEs
5336 are discovered. */
5337
5338static int
5339non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5340{
5341 if (*x == NULL_RTX)
5342 return 0;
5343
5344 switch (GET_CODE (*x))
5345 {
5346 case REG:
5347 case DEBUG_EXPR:
5348 case PC:
5349 case SCRATCH:
5350 case CC0:
5351 case ASM_INPUT:
5352 case ASM_OPERANDS:
5353 return 1;
5354 case MEM:
5355 return !MEM_READONLY_P (*x);
5356 default:
5357 return 0;
5358 }
5359}
5360
5923a5e7 5361/* Add uses (register and memory references) LOC which will be tracked
5362 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5363
5364static int
67f2e0ef 5365add_uses (rtx *ploc, void *data)
5923a5e7 5366{
67f2e0ef 5367 rtx loc = *ploc;
9845d120 5368 enum machine_mode mode = VOIDmode;
5369 struct count_use_info *cui = (struct count_use_info *)data;
5370 enum micro_operation_type type = use_type (loc, cui, &mode);
80c70e76 5371
9845d120 5372 if (type != MO_CLOBBER)
5923a5e7 5373 {
9845d120 5374 basic_block bb = cui->bb;
c77c64d8 5375 micro_operation mo;
5923a5e7 5376
c77c64d8 5377 mo.type = type;
5378 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5379 mo.insn = cui->insn;
9845d120 5380
5381 if (type == MO_VAL_LOC)
e10d697d 5382 {
67f2e0ef 5383 rtx oloc = loc;
9845d120 5384 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5385 cselib_val *val;
5386
5387 gcc_assert (cui->sets);
5388
5389 if (MEM_P (vloc)
35af0188 5390 && !REG_P (XEXP (vloc, 0))
72fdb379 5391 && !MEM_P (XEXP (vloc, 0)))
9845d120 5392 {
5393 rtx mloc = vloc;
35af0188 5394 enum machine_mode address_mode = get_address_mode (mloc);
98155838 5395 cselib_val *val
1f864115 5396 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5397 GET_MODE (mloc));
9845d120 5398
5399 if (val && !cselib_preserved_value_p (val))
8081d3a6 5400 preserve_value (val);
9845d120 5401 }
5402
1197d3d7 5403 if (CONSTANT_P (vloc)
5404 && (GET_CODE (vloc) != CONST
5405 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5406 /* For constants don't look up any value. */;
72fdb379 5407 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
1197d3d7 5408 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
9845d120 5409 {
5410 enum machine_mode mode2;
5411 enum micro_operation_type type2;
8081d3a6 5412 rtx nloc = NULL;
5413 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5414
5415 if (resolvable)
5416 nloc = replace_expr_with_values (vloc);
9845d120 5417
5418 if (nloc)
5419 {
5420 oloc = shallow_copy_rtx (oloc);
5421 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5422 }
5423
5424 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5425
67f2e0ef 5426 type2 = use_type (vloc, 0, &mode2);
9845d120 5427
5428 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5429 || type2 == MO_CLOBBER);
5430
5431 if (type2 == MO_CLOBBER
5432 && !cselib_preserved_value_p (val))
5433 {
8081d3a6 5434 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
ace62c8c 5435 preserve_value (val);
9845d120 5436 }
5437 }
5438 else if (!VAR_LOC_UNKNOWN_P (vloc))
5439 {
5440 oloc = shallow_copy_rtx (oloc);
5441 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5442 }
5443
c77c64d8 5444 mo.u.loc = oloc;
e10d697d 5445 }
9845d120 5446 else if (type == MO_VAL_USE)
e10d697d 5447 {
9845d120 5448 enum machine_mode mode2 = VOIDmode;
5449 enum micro_operation_type type2;
67f2e0ef 5450 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5451 rtx vloc, oloc = loc, nloc;
9845d120 5452
5453 gcc_assert (cui->sets);
5454
5455 if (MEM_P (oloc)
35af0188 5456 && !REG_P (XEXP (oloc, 0))
72fdb379 5457 && !MEM_P (XEXP (oloc, 0)))
9845d120 5458 {
5459 rtx mloc = oloc;
35af0188 5460 enum machine_mode address_mode = get_address_mode (mloc);
98155838 5461 cselib_val *val
1f864115 5462 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
72fdb379 5463 GET_MODE (mloc));
9845d120 5464
5465 if (val && !cselib_preserved_value_p (val))
8081d3a6 5466 preserve_value (val);
9845d120 5467 }
5468
5469 type2 = use_type (loc, 0, &mode2);
5470
5471 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5472 || type2 == MO_CLOBBER);
5473
5474 if (type2 == MO_USE)
67f2e0ef 5475 vloc = var_lowpart (mode2, loc);
9845d120 5476 else
5477 vloc = oloc;
5478
5479 /* The loc of a MO_VAL_USE may have two forms:
5480
5481 (concat val src): val is at src, a value-based
5482 representation.
5483
5484 (concat (concat val use) src): same as above, with use as
5485 the MO_USE tracked value, if it differs from src.
5486
5487 */
5488
8081d3a6 5489 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
67f2e0ef 5490 nloc = replace_expr_with_values (loc);
9845d120 5491 if (!nloc)
5492 nloc = oloc;
5493
5494 if (vloc != nloc)
5495 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5496 else
5497 oloc = val->val_rtx;
5498
c77c64d8 5499 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
9845d120 5500
5501 if (type2 == MO_USE)
c77c64d8 5502 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
9845d120 5503 if (!cselib_preserved_value_p (val))
5504 {
c77c64d8 5505 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
ace62c8c 5506 preserve_value (val);
9845d120 5507 }
e10d697d 5508 }
9845d120 5509 else
5510 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5923a5e7 5511
9845d120 5512 if (dump_file && (dump_flags & TDF_DETAILS))
c77c64d8 5513 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5514 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5923a5e7 5515 }
5516
5517 return 0;
5518}
5519
5520/* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5521
5522static void
9845d120 5523add_uses_1 (rtx *x, void *cui)
5923a5e7 5524{
9845d120 5525 for_each_rtx (x, add_uses, cui);
5923a5e7 5526}
5527
72fdb379 5528/* This is the value used during expansion of locations. We want it
5529 to be unbounded, so that variables expanded deep in a recursion
5530 nest are fully evaluated, so that their values are cached
5531 correctly. We avoid recursion cycles through other means, and we
5532 don't unshare RTL, so excess complexity is not a problem. */
5533#define EXPR_DEPTH (INT_MAX)
5534/* We use this to keep too-complex expressions from being emitted as
5535 location notes, and then to debug information. Users can trade
5536 compile time for ridiculously complex expressions, although they're
5537 seldom useful, and they may often have to be discarded as not
5538 representable anyway. */
5539#define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
b5c80997 5540
8081d3a6 5541/* Attempt to reverse the EXPR operation in the debug info and record
5542 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5543 no longer live we can express its value as VAL - 6. */
a243dd11 5544
8081d3a6 5545static void
5546reverse_op (rtx val, const_rtx expr, rtx insn)
a243dd11 5547{
5548 rtx src, arg, ret;
5549 cselib_val *v;
0a35a733 5550 struct elt_loc_list *l;
a243dd11 5551 enum rtx_code code;
5552
5553 if (GET_CODE (expr) != SET)
8081d3a6 5554 return;
a243dd11 5555
5556 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
8081d3a6 5557 return;
a243dd11 5558
5559 src = SET_SRC (expr);
5560 switch (GET_CODE (src))
5561 {
5562 case PLUS:
5563 case MINUS:
5564 case XOR:
5565 case NOT:
5566 case NEG:
5cbc53e2 5567 if (!REG_P (XEXP (src, 0)))
8081d3a6 5568 return;
5cbc53e2 5569 break;
a243dd11 5570 case SIGN_EXTEND:
5571 case ZERO_EXTEND:
5cbc53e2 5572 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
8081d3a6 5573 return;
a243dd11 5574 break;
5575 default:
8081d3a6 5576 return;
a243dd11 5577 }
5578
5cbc53e2 5579 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
8081d3a6 5580 return;
a243dd11 5581
1f864115 5582 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
a243dd11 5583 if (!v || !cselib_preserved_value_p (v))
8081d3a6 5584 return;
a243dd11 5585
0be329ef 5586 /* Use canonical V to avoid creating multiple redundant expressions
5587 for different VALUES equivalent to V. */
5588 v = canonical_cselib_val (v);
5589
0a35a733 5590 /* Adding a reverse op isn't useful if V already has an always valid
5591 location. Ignore ENTRY_VALUE, while it is always constant, we should
5592 prefer non-ENTRY_VALUE locations whenever possible. */
5593 for (l = v->locs; l; l = l->next)
5594 if (CONSTANT_P (l->loc)
5595 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5596 return;
5597
a243dd11 5598 switch (GET_CODE (src))
5599 {
5600 case NOT:
5601 case NEG:
5602 if (GET_MODE (v->val_rtx) != GET_MODE (val))
8081d3a6 5603 return;
a243dd11 5604 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5605 break;
5606 case SIGN_EXTEND:
5607 case ZERO_EXTEND:
5608 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5609 break;
5610 case XOR:
5611 code = XOR;
5612 goto binary;
5613 case PLUS:
5614 code = MINUS;
5615 goto binary;
5616 case MINUS:
5617 code = PLUS;
5618 goto binary;
5619 binary:
5620 if (GET_MODE (v->val_rtx) != GET_MODE (val))
8081d3a6 5621 return;
a243dd11 5622 arg = XEXP (src, 1);
5623 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5624 {
66d42724 5625 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
a243dd11 5626 if (arg == NULL_RTX)
8081d3a6 5627 return;
a243dd11 5628 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
8081d3a6 5629 return;
a243dd11 5630 }
5631 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5632 if (ret == val)
5633 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5634 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5635 breaks a lot of routines during var-tracking. */
5636 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5637 break;
5638 default:
5639 gcc_unreachable ();
5640 }
5641
8081d3a6 5642 cselib_add_permanent_equiv (v, ret, insn);
a243dd11 5643}
5644
5923a5e7 5645/* Add stores (register and memory references) LOC which will be tracked
9845d120 5646 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5647 CUIP->insn is instruction which the LOC is part of. */
5923a5e7 5648
5649static void
9845d120 5650add_stores (rtx loc, const_rtx expr, void *cuip)
5923a5e7 5651{
9845d120 5652 enum machine_mode mode = VOIDmode, mode2;
5653 struct count_use_info *cui = (struct count_use_info *)cuip;
5654 basic_block bb = cui->bb;
c77c64d8 5655 micro_operation mo;
9845d120 5656 rtx oloc = loc, nloc, src = NULL;
67f2e0ef 5657 enum micro_operation_type type = use_type (loc, cui, &mode);
9845d120 5658 bool track_p = false;
5659 cselib_val *v;
5660 bool resolve, preserve;
5661
5662 if (type == MO_CLOBBER)
5663 return;
5664
5665 mode2 = mode;
80c70e76 5666
8ad4c111 5667 if (REG_P (loc))
5923a5e7 5668 {
35af0188 5669 gcc_assert (loc != cfa_base_rtx);
9845d120 5670 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
67f2e0ef 5671 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
9845d120 5672 || GET_CODE (expr) == CLOBBER)
e10d697d 5673 {
c77c64d8 5674 mo.type = MO_CLOBBER;
5675 mo.u.loc = loc;
4143d08b 5676 if (GET_CODE (expr) == SET
5677 && SET_DEST (expr) == loc
72fdb379 5678 && !unsuitable_loc (SET_SRC (expr))
5679 && find_use_val (loc, mode, cui))
4143d08b 5680 {
5681 gcc_checking_assert (type == MO_VAL_SET);
5682 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5683 }
e10d697d 5684 }
96414f01 5685 else
e10d697d 5686 {
4143d08b 5687 if (GET_CODE (expr) == SET
5688 && SET_DEST (expr) == loc
5689 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
35af0188 5690 src = var_lowpart (mode2, SET_SRC (expr));
9845d120 5691 loc = var_lowpart (mode2, loc);
e10d697d 5692
5693 if (src == NULL)
5694 {
c77c64d8 5695 mo.type = MO_SET;
5696 mo.u.loc = loc;
e10d697d 5697 }
5698 else
5699 {
35af0188 5700 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
e10d697d 5701 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
c77c64d8 5702 mo.type = MO_COPY;
e10d697d 5703 else
c77c64d8 5704 mo.type = MO_SET;
5705 mo.u.loc = xexpr;
e10d697d 5706 }
5707 }
c77c64d8 5708 mo.insn = cui->insn;
5923a5e7 5709 }
e16ceb8e 5710 else if (MEM_P (loc)
67f2e0ef 5711 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
9845d120 5712 || cui->sets))
5923a5e7 5713 {
9845d120 5714 if (MEM_P (loc) && type == MO_VAL_SET
35af0188 5715 && !REG_P (XEXP (loc, 0))
72fdb379 5716 && !MEM_P (XEXP (loc, 0)))
9845d120 5717 {
5718 rtx mloc = loc;
35af0188 5719 enum machine_mode address_mode = get_address_mode (mloc);
5720 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
1f864115 5721 address_mode, 0,
5722 GET_MODE (mloc));
9845d120 5723
5724 if (val && !cselib_preserved_value_p (val))
8081d3a6 5725 preserve_value (val);
9845d120 5726 }
5923a5e7 5727
9845d120 5728 if (GET_CODE (expr) == CLOBBER || !track_p)
e10d697d 5729 {
c77c64d8 5730 mo.type = MO_CLOBBER;
5731 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
e10d697d 5732 }
5733 else
5734 {
4143d08b 5735 if (GET_CODE (expr) == SET
5736 && SET_DEST (expr) == loc
5737 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
35af0188 5738 src = var_lowpart (mode2, SET_SRC (expr));
9845d120 5739 loc = var_lowpart (mode2, loc);
e10d697d 5740
5741 if (src == NULL)
5742 {
c77c64d8 5743 mo.type = MO_SET;
5744 mo.u.loc = loc;
e10d697d 5745 }
5746 else
5747 {
35af0188 5748 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
3e873167 5749 if (same_variable_part_p (SET_SRC (xexpr),
96414f01 5750 MEM_EXPR (loc),
eeb0ae23 5751 INT_MEM_OFFSET (loc)))
c77c64d8 5752 mo.type = MO_COPY;
e10d697d 5753 else
c77c64d8 5754 mo.type = MO_SET;
5755 mo.u.loc = xexpr;
e10d697d 5756 }
5757 }
c77c64d8 5758 mo.insn = cui->insn;
9845d120 5759 }
5760 else
5761 return;
5762
5763 if (type != MO_VAL_SET)
5764 goto log_and_return;
5765
5766 v = find_use_val (oloc, mode, cui);
5767
3af97df7 5768 if (!v)
5769 goto log_and_return;
5770
9845d120 5771 resolve = preserve = !cselib_preserved_value_p (v);
5772
5773 nloc = replace_expr_with_values (oloc);
5774 if (nloc)
5775 oloc = nloc;
5776
bb325882 5777 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5778 {
1f864115 5779 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
bb325882 5780
5781 gcc_assert (oval != v);
5782 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5783
d1a21c35 5784 if (oval && !cselib_preserved_value_p (oval))
bb325882 5785 {
c77c64d8 5786 micro_operation moa;
bb325882 5787
ace62c8c 5788 preserve_value (oval);
bb325882 5789
c77c64d8 5790 moa.type = MO_VAL_USE;
5791 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5792 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5793 moa.insn = cui->insn;
bb325882 5794
5795 if (dump_file && (dump_flags & TDF_DETAILS))
c77c64d8 5796 log_op_type (moa.u.loc, cui->bb, cui->insn,
5797 moa.type, dump_file);
5798 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
bb325882 5799 }
5800
5801 resolve = false;
5802 }
c77c64d8 5803 else if (resolve && GET_CODE (mo.u.loc) == SET)
9845d120 5804 {
8081d3a6 5805 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5806 nloc = replace_expr_with_values (SET_SRC (expr));
5807 else
5808 nloc = NULL_RTX;
3e873167 5809
5810 /* Avoid the mode mismatch between oexpr and expr. */
5811 if (!nloc && mode != mode2)
5812 {
35af0188 5813 nloc = SET_SRC (expr);
3e873167 5814 gcc_assert (oloc == SET_DEST (expr));
5815 }
9845d120 5816
8081d3a6 5817 if (nloc && nloc != SET_SRC (mo.u.loc))
c77c64d8 5818 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
9845d120 5819 else
5820 {
c77c64d8 5821 if (oloc == SET_DEST (mo.u.loc))
9845d120 5822 /* No point in duplicating. */
c77c64d8 5823 oloc = mo.u.loc;
5824 if (!REG_P (SET_SRC (mo.u.loc)))
9845d120 5825 resolve = false;
5826 }
5827 }
5828 else if (!resolve)
5829 {
c77c64d8 5830 if (GET_CODE (mo.u.loc) == SET
5831 && oloc == SET_DEST (mo.u.loc))
9845d120 5832 /* No point in duplicating. */
c77c64d8 5833 oloc = mo.u.loc;
9845d120 5834 }
5835 else
5836 resolve = false;
5837
5838 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5839
c77c64d8 5840 if (mo.u.loc != oloc)
5841 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
9845d120 5842
5843 /* The loc of a MO_VAL_SET may have various forms:
5844
5845 (concat val dst): dst now holds val
5846
5847 (concat val (set dst src)): dst now holds val, copied from src
5848
5849 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5850 after replacing mems and non-top-level regs with values.
5851
5852 (concat (concat val dstv) (set dst src)): dst now holds val,
5853 copied from src. dstv is a value-based representation of dst, if
3e873167 5854 it differs from dst. If resolution is needed, src is a REG, and
5855 its mode is the same as that of val.
9845d120 5856
5857 (concat (concat val (set dstv srcv)) (set dst src)): src
5858 copied to dst, holding val. dstv and srcv are value-based
5859 representations of dst and src, respectively.
5860
5861 */
5862
a243dd11 5863 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
8081d3a6 5864 reverse_op (v->val_rtx, expr, cui->insn);
a243dd11 5865
c77c64d8 5866 mo.u.loc = loc;
9845d120 5867
5868 if (track_p)
5869 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5870 if (preserve)
5871 {
5872 VAL_NEEDS_RESOLUTION (loc) = resolve;
ace62c8c 5873 preserve_value (v);
9845d120 5874 }
c77c64d8 5875 if (mo.type == MO_CLOBBER)
9845d120 5876 VAL_EXPR_IS_CLOBBERED (loc) = 1;
c77c64d8 5877 if (mo.type == MO_COPY)
9845d120 5878 VAL_EXPR_IS_COPIED (loc) = 1;
5879
c77c64d8 5880 mo.type = MO_VAL_SET;
9845d120 5881
5882 log_and_return:
5883 if (dump_file && (dump_flags & TDF_DETAILS))
c77c64d8 5884 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5885 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
9845d120 5886}
5887
4143d08b 5888/* Arguments to the call. */
5889static rtx call_arguments;
5890
5891/* Compute call_arguments. */
5892
5893static void
5894prepare_call_arguments (basic_block bb, rtx insn)
5895{
5896 rtx link, x;
5897 rtx prev, cur, next;
5898 rtx call = PATTERN (insn);
57999964 5899 rtx this_arg = NULL_RTX;
5900 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5901 tree obj_type_ref = NULL_TREE;
39cba157 5902 CUMULATIVE_ARGS args_so_far_v;
5903 cumulative_args_t args_so_far;
4143d08b 5904
39cba157 5905 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5906 args_so_far = pack_cumulative_args (&args_so_far_v);
4143d08b 5907 if (GET_CODE (call) == PARALLEL)
5908 call = XVECEXP (call, 0, 0);
5909 if (GET_CODE (call) == SET)
5910 call = SET_SRC (call);
57999964 5911 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
4143d08b 5912 {
57999964 5913 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5914 {
5915 rtx symbol = XEXP (XEXP (call, 0), 0);
5916 if (SYMBOL_REF_DECL (symbol))
5917 fndecl = SYMBOL_REF_DECL (symbol);
5918 }
5919 if (fndecl == NULL_TREE)
5920 fndecl = MEM_EXPR (XEXP (call, 0));
5921 if (fndecl
5922 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5923 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5924 fndecl = NULL_TREE;
5925 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5926 type = TREE_TYPE (fndecl);
5927 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5928 {
5929 if (TREE_CODE (fndecl) == INDIRECT_REF
5930 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5931 obj_type_ref = TREE_OPERAND (fndecl, 0);
5932 fndecl = NULL_TREE;
5933 }
5934 if (type)
4143d08b 5935 {
4143d08b 5936 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5937 t = TREE_CHAIN (t))
5938 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5939 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5940 break;
57999964 5941 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
4143d08b 5942 type = NULL;
5943 else
57999964 5944 {
c5af5828 5945 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
57999964 5946 link = CALL_INSN_FUNCTION_USAGE (insn);
5947#ifndef PCC_STATIC_STRUCT_RETURN
5948 if (aggregate_value_p (TREE_TYPE (type), type)
5949 && targetm.calls.struct_value_rtx (type, 0) == 0)
5950 {
5951 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5952 enum machine_mode mode = TYPE_MODE (struct_addr);
5953 rtx reg;
39cba157 5954 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
57999964 5955 nargs + 1);
39cba157 5956 reg = targetm.calls.function_arg (args_so_far, mode,
57999964 5957 struct_addr, true);
39cba157 5958 targetm.calls.function_arg_advance (args_so_far, mode,
57999964 5959 struct_addr, true);
5960 if (reg == NULL_RTX)
5961 {
5962 for (; link; link = XEXP (link, 1))
5963 if (GET_CODE (XEXP (link, 0)) == USE
5964 && MEM_P (XEXP (XEXP (link, 0), 0)))
5965 {
5966 link = XEXP (link, 1);
5967 break;
5968 }
5969 }
5970 }
57999964 5971 else
bc40ee90 5972#endif
39cba157 5973 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
57999964 5974 nargs);
5975 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5976 {
5977 enum machine_mode mode;
5978 t = TYPE_ARG_TYPES (type);
5979 mode = TYPE_MODE (TREE_VALUE (t));
39cba157 5980 this_arg = targetm.calls.function_arg (args_so_far, mode,
57999964 5981 TREE_VALUE (t), true);
5982 if (this_arg && !REG_P (this_arg))
5983 this_arg = NULL_RTX;
5984 else if (this_arg == NULL_RTX)
5985 {
5986 for (; link; link = XEXP (link, 1))
5987 if (GET_CODE (XEXP (link, 0)) == USE
5988 && MEM_P (XEXP (XEXP (link, 0), 0)))
5989 {
5990 this_arg = XEXP (XEXP (link, 0), 0);
5991 break;
5992 }
5993 }
5994 }
5995 }
4143d08b 5996 }
5997 }
5998 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5999
6000 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6001 if (GET_CODE (XEXP (link, 0)) == USE)
6002 {
6003 rtx item = NULL_RTX;
6004 x = XEXP (XEXP (link, 0), 0);
b4eeceb9 6005 if (GET_MODE (link) == VOIDmode
6006 || GET_MODE (link) == BLKmode
6007 || (GET_MODE (link) != GET_MODE (x)
6008 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6009 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6010 /* Can't do anything for these, if the original type mode
6011 isn't known or can't be converted. */;
6012 else if (REG_P (x))
4143d08b 6013 {
6014 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6015 if (val && cselib_preserved_value_p (val))
b4eeceb9 6016 item = val->val_rtx;
4143d08b 6017 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6018 {
6019 enum machine_mode mode = GET_MODE (x);
6020
6021 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6022 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6023 {
6024 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6025
6026 if (reg == NULL_RTX || !REG_P (reg))
6027 continue;
6028 val = cselib_lookup (reg, mode, 0, VOIDmode);
6029 if (val && cselib_preserved_value_p (val))
6030 {
b4eeceb9 6031 item = val->val_rtx;
4143d08b 6032 break;
6033 }
6034 }
6035 }
6036 }
6037 else if (MEM_P (x))
6038 {
6039 rtx mem = x;
6040 cselib_val *val;
6041
6042 if (!frame_pointer_needed)
6043 {
6044 struct adjust_mem_data amd;
6045 amd.mem_mode = VOIDmode;
6046 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6047 amd.side_effects = NULL_RTX;
6048 amd.store = true;
6049 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6050 &amd);
6051 gcc_assert (amd.side_effects == NULL_RTX);
6052 }
6053 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6054 if (val && cselib_preserved_value_p (val))
b4eeceb9 6055 item = val->val_rtx;
6aed0484 6056 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6057 {
6058 /* For non-integer stack argument see also if they weren't
6059 initialized by integers. */
6060 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6061 if (imode != GET_MODE (mem) && imode != BLKmode)
6062 {
6063 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6064 imode, 0, VOIDmode);
6065 if (val && cselib_preserved_value_p (val))
b4eeceb9 6066 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6067 imode);
6aed0484 6068 }
6069 }
4143d08b 6070 }
6071 if (item)
b4eeceb9 6072 {
6073 rtx x2 = x;
6074 if (GET_MODE (item) != GET_MODE (link))
6075 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6076 if (GET_MODE (x2) != GET_MODE (link))
6077 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6078 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6079 call_arguments
6080 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6081 }
4143d08b 6082 if (t && t != void_list_node)
6083 {
2f5e9511 6084 tree argtype = TREE_VALUE (t);
6085 enum machine_mode mode = TYPE_MODE (argtype);
6086 rtx reg;
39cba157 6087 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
2f5e9511 6088 {
6089 argtype = build_pointer_type (argtype);
6090 mode = TYPE_MODE (argtype);
6091 }
39cba157 6092 reg = targetm.calls.function_arg (args_so_far, mode,
2f5e9511 6093 argtype, true);
6094 if (TREE_CODE (argtype) == REFERENCE_TYPE
6095 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
4143d08b 6096 && reg
6097 && REG_P (reg)
6098 && GET_MODE (reg) == mode
6099 && GET_MODE_CLASS (mode) == MODE_INT
6100 && REG_P (x)
6101 && REGNO (x) == REGNO (reg)
6102 && GET_MODE (x) == mode
6103 && item)
6104 {
6105 enum machine_mode indmode
2f5e9511 6106 = TYPE_MODE (TREE_TYPE (argtype));
4143d08b 6107 rtx mem = gen_rtx_MEM (indmode, x);
6108 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6109 if (val && cselib_preserved_value_p (val))
6110 {
6111 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6112 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6113 call_arguments);
6114 }
6115 else
6116 {
6117 struct elt_loc_list *l;
6118 tree initial;
6119
6120 /* Try harder, when passing address of a constant
6121 pool integer it can be easily read back. */
4a55735d 6122 item = XEXP (item, 1);
6123 if (GET_CODE (item) == SUBREG)
6124 item = SUBREG_REG (item);
6125 gcc_assert (GET_CODE (item) == VALUE);
6126 val = CSELIB_VAL_PTR (item);
4143d08b 6127 for (l = val->locs; l; l = l->next)
6128 if (GET_CODE (l->loc) == SYMBOL_REF
6129 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6130 && SYMBOL_REF_DECL (l->loc)
6131 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6132 {
6133 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6134 if (host_integerp (initial, 0))
6135 {
6136 item = GEN_INT (tree_low_cst (initial, 0));
6137 item = gen_rtx_CONCAT (indmode, mem, item);
6138 call_arguments
6139 = gen_rtx_EXPR_LIST (VOIDmode, item,
6140 call_arguments);
6141 }
6142 break;
6143 }
6144 }
6145 }
39cba157 6146 targetm.calls.function_arg_advance (args_so_far, mode,
2f5e9511 6147 argtype, true);
4143d08b 6148 t = TREE_CHAIN (t);
6149 }
6150 }
6151
841424cc 6152 /* Add debug arguments. */
6153 if (fndecl
6154 && TREE_CODE (fndecl) == FUNCTION_DECL
6155 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6156 {
6157 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
6158 if (debug_args)
6159 {
6160 unsigned int ix;
6161 tree param;
6162 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
6163 {
6164 rtx item;
6165 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
6166 enum machine_mode mode = DECL_MODE (dtemp);
6167 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
72fdb379 6168 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
841424cc 6169 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6170 call_arguments);
6171 }
6172 }
6173 }
6174
4143d08b 6175 /* Reverse call_arguments chain. */
6176 prev = NULL_RTX;
6177 for (cur = call_arguments; cur; cur = next)
6178 {
6179 next = XEXP (cur, 1);
6180 XEXP (cur, 1) = prev;
6181 prev = cur;
6182 }
6183 call_arguments = prev;
6184
6185 x = PATTERN (insn);
6186 if (GET_CODE (x) == PARALLEL)
6187 x = XVECEXP (x, 0, 0);
6188 if (GET_CODE (x) == SET)
6189 x = SET_SRC (x);
6190 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
6191 {
6192 x = XEXP (XEXP (x, 0), 0);
e3a8f582 6193 if (GET_CODE (x) == SYMBOL_REF)
6194 /* Don't record anything. */;
6195 else if (CONSTANT_P (x))
6196 {
6197 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6198 pc_rtx, x);
6199 call_arguments
6200 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6201 }
6202 else
4143d08b 6203 {
6204 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6205 if (val && cselib_preserved_value_p (val))
6206 {
6207 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6208 call_arguments
6209 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6210 }
6211 }
6212 }
57999964 6213 if (this_arg)
6214 {
6215 enum machine_mode mode
6216 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6217 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6218 HOST_WIDE_INT token
6219 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
6220 if (token)
29c05e22 6221 clobbered = plus_constant (mode, clobbered,
6222 token * GET_MODE_SIZE (mode));
57999964 6223 clobbered = gen_rtx_MEM (mode, clobbered);
6224 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6225 call_arguments
6226 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6227 }
4143d08b 6228}
6229
9845d120 6230/* Callback for cselib_record_sets_hook, that records as micro
6231 operations uses and stores in an insn after cselib_record_sets has
6232 analyzed the sets in an insn, but before it modifies the stored
6233 values in the internal tables, unless cselib_record_sets doesn't
6234 call it directly (perhaps because we're not doing cselib in the
6235 first place, in which case sets and n_sets will be 0). */
6236
6237static void
6238add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6239{
6240 basic_block bb = BLOCK_FOR_INSN (insn);
6241 int n1, n2;
6242 struct count_use_info cui;
c77c64d8 6243 micro_operation *mos;
9845d120 6244
6245 cselib_hook_called = true;
6246
6247 cui.insn = insn;
6248 cui.bb = bb;
6249 cui.sets = sets;
6250 cui.n_sets = n_sets;
6251
c77c64d8 6252 n1 = VEC_length (micro_operation, VTI (bb)->mos);
9845d120 6253 cui.store_p = false;
6254 note_uses (&PATTERN (insn), add_uses_1, &cui);
c77c64d8 6255 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6256 mos = VEC_address (micro_operation, VTI (bb)->mos);
9845d120 6257
4c0840d0 6258 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6259 MO_VAL_LOC last. */
9845d120 6260 while (n1 < n2)
6261 {
c77c64d8 6262 while (n1 < n2 && mos[n1].type == MO_USE)
9845d120 6263 n1++;
c77c64d8 6264 while (n1 < n2 && mos[n2].type != MO_USE)
9845d120 6265 n2--;
6266 if (n1 < n2)
6267 {
6268 micro_operation sw;
6269
c77c64d8 6270 sw = mos[n1];
6271 mos[n1] = mos[n2];
6272 mos[n2] = sw;
4c0840d0 6273 }
6274 }
6275
c77c64d8 6276 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
4c0840d0 6277 while (n1 < n2)
6278 {
c77c64d8 6279 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
4c0840d0 6280 n1++;
c77c64d8 6281 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
4c0840d0 6282 n2--;
6283 if (n1 < n2)
6284 {
6285 micro_operation sw;
6286
c77c64d8 6287 sw = mos[n1];
6288 mos[n1] = mos[n2];
6289 mos[n2] = sw;
9845d120 6290 }
6291 }
6292
6293 if (CALL_P (insn))
6294 {
c77c64d8 6295 micro_operation mo;
9845d120 6296
c77c64d8 6297 mo.type = MO_CALL;
6298 mo.insn = insn;
4143d08b 6299 mo.u.loc = call_arguments;
6300 call_arguments = NULL_RTX;
9845d120 6301
6302 if (dump_file && (dump_flags & TDF_DETAILS))
c77c64d8 6303 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6304 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
9845d120 6305 }
6306
c77c64d8 6307 n1 = VEC_length (micro_operation, VTI (bb)->mos);
9845d120 6308 /* This will record NEXT_INSN (insn), such that we can
6309 insert notes before it without worrying about any
6310 notes that MO_USEs might emit after the insn. */
6311 cui.store_p = true;
6312 note_stores (PATTERN (insn), add_stores, &cui);
c77c64d8 6313 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6314 mos = VEC_address (micro_operation, VTI (bb)->mos);
9845d120 6315
c77c64d8 6316 /* Order the MO_VAL_USEs first (note_stores does nothing
6317 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6318 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
9845d120 6319 while (n1 < n2)
6320 {
c77c64d8 6321 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
9845d120 6322 n1++;
c77c64d8 6323 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
9845d120 6324 n2--;
6325 if (n1 < n2)
6326 {
6327 micro_operation sw;
6328
c77c64d8 6329 sw = mos[n1];
6330 mos[n1] = mos[n2];
6331 mos[n2] = sw;
6332 }
6333 }
6334
6335 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6336 while (n1 < n2)
6337 {
6338 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6339 n1++;
6340 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6341 n2--;
6342 if (n1 < n2)
6343 {
6344 micro_operation sw;
6345
6346 sw = mos[n1];
6347 mos[n1] = mos[n2];
6348 mos[n2] = sw;
9845d120 6349 }
5923a5e7 6350 }
6351}
6352
d53bb226 6353static enum var_init_status
e10d697d 6354find_src_status (dataflow_set *in, rtx src)
d53bb226 6355{
d53bb226 6356 tree decl = NULL_TREE;
6357 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6358
6359 if (! flag_var_tracking_uninit)
6360 status = VAR_INIT_STATUS_INITIALIZED;
6361
22e3029e 6362 if (src && REG_P (src))
d53bb226 6363 decl = var_debug_decl (REG_EXPR (src));
22e3029e 6364 else if (src && MEM_P (src))
d53bb226 6365 decl = var_debug_decl (MEM_EXPR (src));
6366
6367 if (src && decl)
9845d120 6368 status = get_init_value (in, src, dv_from_decl (decl));
d53bb226 6369
6370 return status;
6371}
6372
e10d697d 6373/* SRC is the source of an assignment. Use SET to try to find what
6374 was ultimately assigned to SRC. Return that value if known,
6375 otherwise return SRC itself. */
d53bb226 6376
6377static rtx
e10d697d 6378find_src_set_src (dataflow_set *set, rtx src)
d53bb226 6379{
6380 tree decl = NULL_TREE; /* The variable being copied around. */
d53bb226 6381 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
d53bb226 6382 variable var;
6383 location_chain nextp;
6384 int i;
6385 bool found;
6386
22e3029e 6387 if (src && REG_P (src))
d53bb226 6388 decl = var_debug_decl (REG_EXPR (src));
22e3029e 6389 else if (src && MEM_P (src))
d53bb226 6390 decl = var_debug_decl (MEM_EXPR (src));
6391
6392 if (src && decl)
6393 {
9845d120 6394 decl_or_value dv = dv_from_decl (decl);
6395
6396 var = shared_hash_find (set->vars, dv);
a8f6ad2b 6397 if (var)
d53bb226 6398 {
d53bb226 6399 found = false;
6400 for (i = 0; i < var->n_var_parts && !found; i++)
48e1416a 6401 for (nextp = var->var_part[i].loc_chain; nextp && !found;
d53bb226 6402 nextp = nextp->next)
6403 if (rtx_equal_p (nextp->loc, src))
6404 {
6405 set_src = nextp->set_src;
6406 found = true;
6407 }
48e1416a 6408
d53bb226 6409 }
6410 }
6411
6412 return set_src;
6413}
6414
9845d120 6415/* Compute the changes of variable locations in the basic block BB. */
6416
6417static bool
6418compute_bb_dataflow (basic_block bb)
6419{
c77c64d8 6420 unsigned int i;
6421 micro_operation *mo;
9845d120 6422 bool changed;
6423 dataflow_set old_out;
6424 dataflow_set *in = &VTI (bb)->in;
6425 dataflow_set *out = &VTI (bb)->out;
6426
6427 dataflow_set_init (&old_out);
6428 dataflow_set_copy (&old_out, out);
6429 dataflow_set_copy (out, in);
6430
48148244 6431 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
9845d120 6432 {
c77c64d8 6433 rtx insn = mo->insn;
9845d120 6434
c77c64d8 6435 switch (mo->type)
9845d120 6436 {
6437 case MO_CALL:
6438 dataflow_set_clear_at_call (out);
6439 break;
6440
6441 case MO_USE:
6442 {
c77c64d8 6443 rtx loc = mo->u.loc;
9845d120 6444
6445 if (REG_P (loc))
6446 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6447 else if (MEM_P (loc))
6448 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6449 }
6450 break;
6451
6452 case MO_VAL_LOC:
6453 {
c77c64d8 6454 rtx loc = mo->u.loc;
9845d120 6455 rtx val, vloc;
6456 tree var;
6457
6458 if (GET_CODE (loc) == CONCAT)
6459 {
6460 val = XEXP (loc, 0);
6461 vloc = XEXP (loc, 1);
6462 }
6463 else
6464 {
6465 val = NULL_RTX;
6466 vloc = loc;
6467 }
6468
6469 var = PAT_VAR_LOCATION_DECL (vloc);
6470
6471 clobber_variable_part (out, NULL_RTX,
6472 dv_from_decl (var), 0, NULL_RTX);
6473 if (val)
6474 {
6475 if (VAL_NEEDS_RESOLUTION (loc))
6476 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6477 set_variable_part (out, val, dv_from_decl (var), 0,
6478 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6479 INSERT);
6480 }
1197d3d7 6481 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6482 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6483 dv_from_decl (var), 0,
6484 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6485 INSERT);
9845d120 6486 }
6487 break;
6488
6489 case MO_VAL_USE:
6490 {
c77c64d8 6491 rtx loc = mo->u.loc;
9845d120 6492 rtx val, vloc, uloc;
6493
6494 vloc = uloc = XEXP (loc, 1);
6495 val = XEXP (loc, 0);
6496
6497 if (GET_CODE (val) == CONCAT)
6498 {
6499 uloc = XEXP (val, 1);
6500 val = XEXP (val, 0);
6501 }
6502
6503 if (VAL_NEEDS_RESOLUTION (loc))
6504 val_resolve (out, val, vloc, insn);
bf262632 6505 else
6506 val_store (out, val, uloc, insn, false);
9845d120 6507
6508 if (VAL_HOLDS_TRACK_EXPR (loc))
6509 {
6510 if (GET_CODE (uloc) == REG)
6511 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6512 NULL);
6513 else if (GET_CODE (uloc) == MEM)
6514 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6515 NULL);
6516 }
6517 }
6518 break;
6519
6520 case MO_VAL_SET:
6521 {
c77c64d8 6522 rtx loc = mo->u.loc;
8081d3a6 6523 rtx val, vloc, uloc;
df226854 6524 rtx dstv, srcv;
9845d120 6525
a243dd11 6526 vloc = loc;
a243dd11 6527 uloc = XEXP (vloc, 1);
6528 val = XEXP (vloc, 0);
6529 vloc = uloc;
9845d120 6530
df226854 6531 if (GET_CODE (uloc) == SET)
6532 {
6533 dstv = SET_DEST (uloc);
6534 srcv = SET_SRC (uloc);
6535 }
6536 else
6537 {
6538 dstv = uloc;
6539 srcv = NULL;
6540 }
6541
9845d120 6542 if (GET_CODE (val) == CONCAT)
6543 {
df226854 6544 dstv = vloc = XEXP (val, 1);
9845d120 6545 val = XEXP (val, 0);
6546 }
6547
6548 if (GET_CODE (vloc) == SET)
6549 {
df226854 6550 srcv = SET_SRC (vloc);
9845d120 6551
df226854 6552 gcc_assert (val != srcv);
9845d120 6553 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6554
df226854 6555 dstv = vloc = SET_DEST (vloc);
9845d120 6556
6557 if (VAL_NEEDS_RESOLUTION (loc))
df226854 6558 val_resolve (out, val, srcv, insn);
9845d120 6559 }
6560 else if (VAL_NEEDS_RESOLUTION (loc))
6561 {
6562 gcc_assert (GET_CODE (uloc) == SET
6563 && GET_CODE (SET_SRC (uloc)) == REG);
6564 val_resolve (out, val, SET_SRC (uloc), insn);
6565 }
6566
6567 if (VAL_HOLDS_TRACK_EXPR (loc))
6568 {
6569 if (VAL_EXPR_IS_CLOBBERED (loc))
6570 {
6571 if (REG_P (uloc))
6572 var_reg_delete (out, uloc, true);
6573 else if (MEM_P (uloc))
df226854 6574 {
6575 gcc_assert (MEM_P (dstv));
6576 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6577 var_mem_delete (out, dstv, true);
6578 }
9845d120 6579 }
6580 else
6581 {
6582 bool copied_p = VAL_EXPR_IS_COPIED (loc);
df226854 6583 rtx src = NULL, dst = uloc;
9845d120 6584 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6585
6586 if (GET_CODE (uloc) == SET)
6587 {
df226854 6588 src = SET_SRC (uloc);
6589 dst = SET_DEST (uloc);
9845d120 6590 }
5923a5e7 6591
9845d120 6592 if (copied_p)
6593 {
6594 if (flag_var_tracking_uninit)
6595 {
df226854 6596 status = find_src_status (in, src);
5923a5e7 6597
9845d120 6598 if (status == VAR_INIT_STATUS_UNKNOWN)
df226854 6599 status = find_src_status (out, src);
9845d120 6600 }
5923a5e7 6601
df226854 6602 src = find_src_set_src (in, src);
9845d120 6603 }
5923a5e7 6604
df226854 6605 if (REG_P (dst))
6606 var_reg_delete_and_set (out, dst, !copied_p,
6607 status, srcv);
6608 else if (MEM_P (dst))
6609 {
6610 gcc_assert (MEM_P (dstv));
6611 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6612 var_mem_delete_and_set (out, dstv, !copied_p,
6613 status, srcv);
6614 }
9845d120 6615 }
6616 }
6617 else if (REG_P (uloc))
6618 var_regno_delete (out, REGNO (uloc));
c573c1b5 6619 else if (MEM_P (uloc))
6620 clobber_overlapping_mems (out, uloc);
9a8a3ff4 6621
df226854 6622 val_store (out, val, dstv, insn, true);
9a8a3ff4 6623 }
6624 break;
6625
5923a5e7 6626 case MO_SET:
6627 {
c77c64d8 6628 rtx loc = mo->u.loc;
e10d697d 6629 rtx set_src = NULL;
d53bb226 6630
e10d697d 6631 if (GET_CODE (loc) == SET)
d53bb226 6632 {
e10d697d 6633 set_src = SET_SRC (loc);
6634 loc = SET_DEST (loc);
d53bb226 6635 }
5923a5e7 6636
8ad4c111 6637 if (REG_P (loc))
d53bb226 6638 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6639 set_src);
96414f01 6640 else if (MEM_P (loc))
d53bb226 6641 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6642 set_src);
96414f01 6643 }
6644 break;
6645
6646 case MO_COPY:
6647 {
c77c64d8 6648 rtx loc = mo->u.loc;
d53bb226 6649 enum var_init_status src_status;
e10d697d 6650 rtx set_src = NULL;
6651
6652 if (GET_CODE (loc) == SET)
6653 {
6654 set_src = SET_SRC (loc);
6655 loc = SET_DEST (loc);
6656 }
d53bb226 6657
6658 if (! flag_var_tracking_uninit)
6659 src_status = VAR_INIT_STATUS_INITIALIZED;
6660 else
331cf53a 6661 {
6662 src_status = find_src_status (in, set_src);
d53bb226 6663
331cf53a 6664 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6665 src_status = find_src_status (out, set_src);
6666 }
d53bb226 6667
e10d697d 6668 set_src = find_src_set_src (in, set_src);
96414f01 6669
6670 if (REG_P (loc))
d53bb226 6671 var_reg_delete_and_set (out, loc, false, src_status, set_src);
e16ceb8e 6672 else if (MEM_P (loc))
d53bb226 6673 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5923a5e7 6674 }
6675 break;
6676
6677 case MO_USE_NO_VAR:
96414f01 6678 {
c77c64d8 6679 rtx loc = mo->u.loc;
96414f01 6680
6681 if (REG_P (loc))
6682 var_reg_delete (out, loc, false);
6683 else if (MEM_P (loc))
6684 var_mem_delete (out, loc, false);
6685 }
6686 break;
6687
5923a5e7 6688 case MO_CLOBBER:
6689 {
c77c64d8 6690 rtx loc = mo->u.loc;
5923a5e7 6691
8ad4c111 6692 if (REG_P (loc))
96414f01 6693 var_reg_delete (out, loc, true);
e16ceb8e 6694 else if (MEM_P (loc))
96414f01 6695 var_mem_delete (out, loc, true);
5923a5e7 6696 }
6697 break;
6698
6699 case MO_ADJUST:
c77c64d8 6700 out->stack_adjust += mo->u.adjust;
5923a5e7 6701 break;
6702 }
6703 }
6704
9845d120 6705 if (MAY_HAVE_DEBUG_INSNS)
6706 {
6707 dataflow_set_equiv_regs (out);
6708 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6709 out);
6710 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6711 out);
6712#if ENABLE_CHECKING
6713 htab_traverse (shared_hash_htab (out->vars),
6714 canonicalize_loc_order_check, out);
6715#endif
6716 }
5923a5e7 6717 changed = dataflow_set_different (&old_out, out);
6718 dataflow_set_destroy (&old_out);
6719 return changed;
6720}
6721
6722/* Find the locations of variables in the whole function. */
6723
76f4ab1c 6724static bool
5923a5e7 6725vt_find_locations (void)
6726{
6727 fibheap_t worklist, pending, fibheap_swap;
6728 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6729 basic_block bb;
6730 edge e;
6731 int *bb_order;
6732 int *rc_order;
6733 int i;
9845d120 6734 int htabsz = 0;
76f4ab1c 6735 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6736 bool success = true;
5923a5e7 6737
e2050933 6738 timevar_push (TV_VAR_TRACKING_DATAFLOW);
5923a5e7 6739 /* Compute reverse completion order of depth first search of the CFG
6740 so that the data-flow runs faster. */
4c36ffe6 6741 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6742 bb_order = XNEWVEC (int, last_basic_block);
6180f28d 6743 pre_and_rev_post_order_compute (NULL, rc_order, false);
4d2e5d52 6744 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5923a5e7 6745 bb_order[rc_order[i]] = i;
6746 free (rc_order);
6747
6748 worklist = fibheap_new ();
6749 pending = fibheap_new ();
6750 visited = sbitmap_alloc (last_basic_block);
6751 in_worklist = sbitmap_alloc (last_basic_block);
6752 in_pending = sbitmap_alloc (last_basic_block);
6753 sbitmap_zero (in_worklist);
5923a5e7 6754
6755 FOR_EACH_BB (bb)
d86eb5b2 6756 fibheap_insert (pending, bb_order[bb->index], bb);
6757 sbitmap_ones (in_pending);
5923a5e7 6758
76f4ab1c 6759 while (success && !fibheap_empty (pending))
5923a5e7 6760 {
6761 fibheap_swap = pending;
6762 pending = worklist;
6763 worklist = fibheap_swap;
6764 sbitmap_swap = in_pending;
6765 in_pending = in_worklist;
6766 in_worklist = sbitmap_swap;
6767
6768 sbitmap_zero (visited);
6769
6770 while (!fibheap_empty (worklist))
6771 {
45ba1503 6772 bb = (basic_block) fibheap_extract_min (worklist);
5923a5e7 6773 RESET_BIT (in_worklist, bb->index);
e2050933 6774 gcc_assert (!TEST_BIT (visited, bb->index));
5923a5e7 6775 if (!TEST_BIT (visited, bb->index))
6776 {
6777 bool changed;
cd665a06 6778 edge_iterator ei;
9845d120 6779 int oldinsz, oldoutsz;
5923a5e7 6780
6781 SET_BIT (visited, bb->index);
6782
76f4ab1c 6783 if (VTI (bb)->in.vars)
9845d120 6784 {
6785 htabsz
76f4ab1c 6786 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6787 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
9845d120 6788 oldinsz
6789 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6790 oldoutsz
6791 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6792 }
6793 else
6794 oldinsz = oldoutsz = 0;
6795
6796 if (MAY_HAVE_DEBUG_INSNS)
6797 {
6798 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6799 bool first = true, adjust = false;
6800
6801 /* Calculate the IN set as the intersection of
6802 predecessor OUT sets. */
6803
6804 dataflow_set_clear (in);
6805 dst_can_be_shared = true;
6806
6807 FOR_EACH_EDGE (e, ei, bb->preds)
6808 if (!VTI (e->src)->flooded)
6809 gcc_assert (bb_order[bb->index]
6810 <= bb_order[e->src->index]);
6811 else if (first)
6812 {
6813 dataflow_set_copy (in, &VTI (e->src)->out);
6814 first_out = &VTI (e->src)->out;
6815 first = false;
6816 }
6817 else
6818 {
6819 dataflow_set_merge (in, &VTI (e->src)->out);
6820 adjust = true;
6821 }
6822
6823 if (adjust)
6824 {
6825 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6826#if ENABLE_CHECKING
6827 /* Merge and merge_adjust should keep entries in
6828 canonical order. */
6829 htab_traverse (shared_hash_htab (in->vars),
6830 canonicalize_loc_order_check,
6831 in);
6832#endif
6833 if (dst_can_be_shared)
6834 {
6835 shared_hash_destroy (in->vars);
6836 in->vars = shared_hash_copy (first_out->vars);
6837 }
6838 }
6839
6840 VTI (bb)->flooded = true;
6841 }
6842 else
5923a5e7 6843 {
9845d120 6844 /* Calculate the IN set as union of predecessor OUT sets. */
6845 dataflow_set_clear (&VTI (bb)->in);
6846 FOR_EACH_EDGE (e, ei, bb->preds)
6847 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5923a5e7 6848 }
6849
6850 changed = compute_bb_dataflow (bb);
76f4ab1c 6851 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6852 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6853
6854 if (htabmax && htabsz > htabmax)
6855 {
6856 if (MAY_HAVE_DEBUG_INSNS)
6857 inform (DECL_SOURCE_LOCATION (cfun->decl),
6858 "variable tracking size limit exceeded with "
6859 "-fvar-tracking-assignments, retrying without");
6860 else
6861 inform (DECL_SOURCE_LOCATION (cfun->decl),
6862 "variable tracking size limit exceeded");
6863 success = false;
6864 break;
6865 }
9845d120 6866
5923a5e7 6867 if (changed)
6868 {
cd665a06 6869 FOR_EACH_EDGE (e, ei, bb->succs)
5923a5e7 6870 {
6871 if (e->dest == EXIT_BLOCK_PTR)
6872 continue;
6873
5923a5e7 6874 if (TEST_BIT (visited, e->dest->index))
6875 {
6876 if (!TEST_BIT (in_pending, e->dest->index))
6877 {
6878 /* Send E->DEST to next round. */
6879 SET_BIT (in_pending, e->dest->index);
6880 fibheap_insert (pending,
6881 bb_order[e->dest->index],
6882 e->dest);
6883 }
6884 }
6885 else if (!TEST_BIT (in_worklist, e->dest->index))
6886 {
6887 /* Add E->DEST to current round. */
6888 SET_BIT (in_worklist, e->dest->index);
6889 fibheap_insert (worklist, bb_order[e->dest->index],
6890 e->dest);
6891 }
6892 }
6893 }
9845d120 6894
6895 if (dump_file)
6896 fprintf (dump_file,
6897 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6898 bb->index,
6899 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6900 oldinsz,
6901 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6902 oldoutsz,
6903 (int)worklist->nodes, (int)pending->nodes, htabsz);
6904
6905 if (dump_file && (dump_flags & TDF_DETAILS))
6906 {
6907 fprintf (dump_file, "BB %i IN:\n", bb->index);
6908 dump_dataflow_set (&VTI (bb)->in);
6909 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6910 dump_dataflow_set (&VTI (bb)->out);
6911 }
5923a5e7 6912 }
6913 }
6914 }
6915
76f4ab1c 6916 if (success && MAY_HAVE_DEBUG_INSNS)
9845d120 6917 FOR_EACH_BB (bb)
6918 gcc_assert (VTI (bb)->flooded);
6919
5923a5e7 6920 free (bb_order);
6921 fibheap_delete (worklist);
6922 fibheap_delete (pending);
6923 sbitmap_free (visited);
6924 sbitmap_free (in_worklist);
6925 sbitmap_free (in_pending);
76f4ab1c 6926
e2050933 6927 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
76f4ab1c 6928 return success;
5923a5e7 6929}
6930
6931/* Print the content of the LIST to dump file. */
6932
6933static void
6934dump_attrs_list (attrs list)
6935{
6936 for (; list; list = list->next)
6937 {
9845d120 6938 if (dv_is_decl_p (list->dv))
6939 print_mem_expr (dump_file, dv_as_decl (list->dv));
6940 else
6941 print_rtl_single (dump_file, dv_as_value (list->dv));
12d886b8 6942 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5923a5e7 6943 }
450d042a 6944 fprintf (dump_file, "\n");
5923a5e7 6945}
6946
6947/* Print the information about variable *SLOT to dump file. */
6948
6949static int
0358713f 6950dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
9845d120 6951{
6952 variable var = (variable) *slot;
6953
0358713f 6954 dump_var (var);
9845d120 6955
6956 /* Continue traversing the hash table. */
6957 return 1;
6958}
6959
6960/* Print the information about variable VAR to dump file. */
6961
6962static void
0358713f 6963dump_var (variable var)
5923a5e7 6964{
5923a5e7 6965 int i;
6966 location_chain node;
6967
9845d120 6968 if (dv_is_decl_p (var->dv))
6969 {
6970 const_tree decl = dv_as_decl (var->dv);
6971
6972 if (DECL_NAME (decl))
b4d01854 6973 {
6974 fprintf (dump_file, " name: %s",
6975 IDENTIFIER_POINTER (DECL_NAME (decl)));
6976 if (dump_flags & TDF_UID)
6977 fprintf (dump_file, "D.%u", DECL_UID (decl));
6978 }
6979 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6980 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
9845d120 6981 else
6982 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
b4d01854 6983 fprintf (dump_file, "\n");
9845d120 6984 }
4236f54f 6985 else
9845d120 6986 {
6987 fputc (' ', dump_file);
6988 print_rtl_single (dump_file, dv_as_value (var->dv));
6989 }
4236f54f 6990
5923a5e7 6991 for (i = 0; i < var->n_var_parts; i++)
6992 {
450d042a 6993 fprintf (dump_file, " offset %ld\n",
72fdb379 6994 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
5923a5e7 6995 for (node = var->var_part[i].loc_chain; node; node = node->next)
6996 {
450d042a 6997 fprintf (dump_file, " ");
d53bb226 6998 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6999 fprintf (dump_file, "[uninit]");
450d042a 7000 print_rtl_single (dump_file, node->loc);
5923a5e7 7001 }
7002 }
5923a5e7 7003}
7004
7005/* Print the information about variables from hash table VARS to dump file. */
7006
7007static void
7008dump_vars (htab_t vars)
7009{
7010 if (htab_elements (vars) > 0)
7011 {
450d042a 7012 fprintf (dump_file, "Variables:\n");
0358713f 7013 htab_traverse (vars, dump_var_slot, NULL);
5923a5e7 7014 }
7015}
7016
7017/* Print the dataflow set SET to dump file. */
7018
7019static void
7020dump_dataflow_set (dataflow_set *set)
7021{
7022 int i;
7023
12d886b8 7024 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7025 set->stack_adjust);
2a964ec6 7026 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5923a5e7 7027 {
7028 if (set->regs[i])
7029 {
450d042a 7030 fprintf (dump_file, "Reg %d:", i);
5923a5e7 7031 dump_attrs_list (set->regs[i]);
7032 }
7033 }
a8f6ad2b 7034 dump_vars (shared_hash_htab (set->vars));
450d042a 7035 fprintf (dump_file, "\n");
5923a5e7 7036}
7037
7038/* Print the IN and OUT sets for each basic block to dump file. */
7039
7040static void
7041dump_dataflow_sets (void)
7042{
7043 basic_block bb;
7044
7045 FOR_EACH_BB (bb)
7046 {
450d042a 7047 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7048 fprintf (dump_file, "IN:\n");
5923a5e7 7049 dump_dataflow_set (&VTI (bb)->in);
450d042a 7050 fprintf (dump_file, "OUT:\n");
5923a5e7 7051 dump_dataflow_set (&VTI (bb)->out);
7052 }
7053}
7054
72fdb379 7055/* Return the variable for DV in dropped_values, inserting one if
7056 requested with INSERT. */
7057
7058static inline variable
7059variable_from_dropped (decl_or_value dv, enum insert_option insert)
7060{
7061 void **slot;
7062 variable empty_var;
7063 onepart_enum_t onepart;
7064
7065 slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
7066 insert);
7067
7068 if (!slot)
7069 return NULL;
7070
7071 if (*slot)
7072 return (variable) *slot;
7073
7074 gcc_checking_assert (insert == INSERT);
7075
7076 onepart = dv_onepart_p (dv);
7077
7078 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7079
7080 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7081 empty_var->dv = dv;
7082 empty_var->refcount = 1;
7083 empty_var->n_var_parts = 0;
7084 empty_var->onepart = onepart;
7085 empty_var->in_changed_variables = false;
7086 empty_var->var_part[0].loc_chain = NULL;
7087 empty_var->var_part[0].cur_loc = NULL;
7088 VAR_LOC_1PAUX (empty_var) = NULL;
7089 set_dv_changed (dv, true);
7090
7091 *slot = empty_var;
7092
7093 return empty_var;
7094}
7095
7096/* Recover the one-part aux from dropped_values. */
7097
7098static struct onepart_aux *
7099recover_dropped_1paux (variable var)
7100{
7101 variable dvar;
7102
7103 gcc_checking_assert (var->onepart);
7104
7105 if (VAR_LOC_1PAUX (var))
7106 return VAR_LOC_1PAUX (var);
7107
7108 if (var->onepart == ONEPART_VDECL)
7109 return NULL;
7110
7111 dvar = variable_from_dropped (var->dv, NO_INSERT);
7112
7113 if (!dvar)
7114 return NULL;
7115
7116 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7117 VAR_LOC_1PAUX (dvar) = NULL;
7118
7119 return VAR_LOC_1PAUX (var);
7120}
7121
7122/* Add variable VAR to the hash table of changed variables and
a8f6ad2b 7123 if it has no locations delete it from SET's hash table. */
5923a5e7 7124
7125static void
a8f6ad2b 7126variable_was_changed (variable var, dataflow_set *set)
5923a5e7 7127{
9845d120 7128 hashval_t hash = dv_htab_hash (var->dv);
5923a5e7 7129
7130 if (emit_notes)
7131 {
9845d120 7132 void **slot;
7133
7134 /* Remember this decl or VALUE has been added to changed_variables. */
7135 set_dv_changed (var->dv, true);
5923a5e7 7136
9845d120 7137 slot = htab_find_slot_with_hash (changed_variables,
7138 var->dv,
7139 hash, INSERT);
5923a5e7 7140
bc95df68 7141 if (*slot)
7142 {
7143 variable old_var = (variable) *slot;
7144 gcc_assert (old_var->in_changed_variables);
7145 old_var->in_changed_variables = false;
72fdb379 7146 if (var != old_var && var->onepart)
7147 {
7148 /* Restore the auxiliary info from an empty variable
7149 previously created for changed_variables, so it is
7150 not lost. */
7151 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7152 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7153 VAR_LOC_1PAUX (old_var) = NULL;
7154 }
bc95df68 7155 variable_htab_free (*slot);
7156 }
72fdb379 7157
a8f6ad2b 7158 if (set && var->n_var_parts == 0)
5923a5e7 7159 {
72fdb379 7160 onepart_enum_t onepart = var->onepart;
7161 variable empty_var = NULL;
7162 void **dslot = NULL;
5923a5e7 7163
72fdb379 7164 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7165 {
7166 dslot = htab_find_slot_with_hash (dropped_values, var->dv,
7167 dv_htab_hash (var->dv),
7168 INSERT);
7169 empty_var = (variable) *dslot;
7170
7171 if (empty_var)
7172 {
7173 gcc_checking_assert (!empty_var->in_changed_variables);
7174 if (!VAR_LOC_1PAUX (var))
7175 {
7176 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7177 VAR_LOC_1PAUX (empty_var) = NULL;
7178 }
7179 else
7180 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7181 }
7182 }
7183
7184 if (!empty_var)
7185 {
7186 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7187 empty_var->dv = var->dv;
7188 empty_var->refcount = 1;
7189 empty_var->n_var_parts = 0;
7190 empty_var->onepart = onepart;
7191 if (dslot)
7192 {
7193 empty_var->refcount++;
7194 *dslot = empty_var;
7195 }
7196 }
7197 else
7198 empty_var->refcount++;
bc95df68 7199 empty_var->in_changed_variables = true;
5923a5e7 7200 *slot = empty_var;
72fdb379 7201 if (onepart)
7202 {
7203 empty_var->var_part[0].loc_chain = NULL;
7204 empty_var->var_part[0].cur_loc = NULL;
7205 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7206 VAR_LOC_1PAUX (var) = NULL;
7207 }
a8f6ad2b 7208 goto drop_var;
5923a5e7 7209 }
7210 else
7211 {
72fdb379 7212 if (var->onepart && !VAR_LOC_1PAUX (var))
7213 recover_dropped_1paux (var);
a8f6ad2b 7214 var->refcount++;
bc95df68 7215 var->in_changed_variables = true;
5923a5e7 7216 *slot = var;
7217 }
7218 }
7219 else
7220 {
a8f6ad2b 7221 gcc_assert (set);
5923a5e7 7222 if (var->n_var_parts == 0)
7223 {
a8f6ad2b 7224 void **slot;
7225
7226 drop_var:
9845d120 7227 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5923a5e7 7228 if (slot)
a8f6ad2b 7229 {
7230 if (shared_hash_shared (set->vars))
9845d120 7231 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
a8f6ad2b 7232 NO_INSERT);
7233 htab_clear_slot (shared_hash_htab (set->vars), slot);
7234 }
5923a5e7 7235 }
7236 }
7237}
7238
96414f01 7239/* Look for the index in VAR->var_part corresponding to OFFSET.
7240 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7241 referenced int will be set to the index that the part has or should
7242 have, if it should be inserted. */
7243
7244static inline int
7245find_variable_location_part (variable var, HOST_WIDE_INT offset,
7246 int *insertion_point)
7247{
7248 int pos, low, high;
7249
72fdb379 7250 if (var->onepart)
7251 {
7252 if (offset != 0)
7253 return -1;
7254
7255 if (insertion_point)
7256 *insertion_point = 0;
7257
7258 return var->n_var_parts - 1;
7259 }
7260
96414f01 7261 /* Find the location part. */
7262 low = 0;
7263 high = var->n_var_parts;
7264 while (low != high)
7265 {
7266 pos = (low + high) / 2;
72fdb379 7267 if (VAR_PART_OFFSET (var, pos) < offset)
96414f01 7268 low = pos + 1;
7269 else
7270 high = pos;
7271 }
7272 pos = low;
7273
7274 if (insertion_point)
7275 *insertion_point = pos;
7276
72fdb379 7277 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
96414f01 7278 return pos;
7279
7280 return -1;
7281}
7282
9845d120 7283static void **
7284set_slot_part (dataflow_set *set, rtx loc, void **slot,
7285 decl_or_value dv, HOST_WIDE_INT offset,
7286 enum var_init_status initialized, rtx set_src)
5923a5e7 7287{
96414f01 7288 int pos;
e0473d22 7289 location_chain node, next;
7290 location_chain *nextp;
5923a5e7 7291 variable var;
72fdb379 7292 onepart_enum_t onepart;
9845d120 7293
7294 var = (variable) *slot;
a8f6ad2b 7295
72fdb379 7296 if (var)
7297 onepart = var->onepart;
7298 else
7299 onepart = dv_onepart_p (dv);
7300
7301 gcc_checking_assert (offset == 0 || !onepart);
7302 gcc_checking_assert (loc != dv_as_opaque (dv));
7303
331cf53a 7304 if (! flag_var_tracking_uninit)
7305 initialized = VAR_INIT_STATUS_INITIALIZED;
7306
9845d120 7307 if (!var)
5923a5e7 7308 {
7309 /* Create new variable information. */
72fdb379 7310 var = (variable) pool_alloc (onepart_pool (onepart));
9845d120 7311 var->dv = dv;
85bbdb3f 7312 var->refcount = 1;
5923a5e7 7313 var->n_var_parts = 1;
72fdb379 7314 var->onepart = onepart;
bc95df68 7315 var->in_changed_variables = false;
72fdb379 7316 if (var->onepart)
7317 VAR_LOC_1PAUX (var) = NULL;
7318 else
7319 VAR_PART_OFFSET (var, 0) = offset;
5923a5e7 7320 var->var_part[0].loc_chain = NULL;
7321 var->var_part[0].cur_loc = NULL;
7322 *slot = var;
7323 pos = 0;
9845d120 7324 nextp = &var->var_part[0].loc_chain;
9845d120 7325 }
7326 else if (onepart)
7327 {
7328 int r = -1, c = 0;
7329
7330 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7331
7332 pos = 0;
7333
7334 if (GET_CODE (loc) == VALUE)
7335 {
7336 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7337 nextp = &node->next)
7338 if (GET_CODE (node->loc) == VALUE)
7339 {
7340 if (node->loc == loc)
7341 {
7342 r = 0;
7343 break;
7344 }
7345 if (canon_value_cmp (node->loc, loc))
7346 c++;
7347 else
7348 {
7349 r = 1;
7350 break;
7351 }
7352 }
7353 else if (REG_P (node->loc) || MEM_P (node->loc))
7354 c++;
7355 else
7356 {
7357 r = 1;
7358 break;
7359 }
7360 }
7361 else if (REG_P (loc))
7362 {
7363 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7364 nextp = &node->next)
7365 if (REG_P (node->loc))
7366 {
7367 if (REGNO (node->loc) < REGNO (loc))
7368 c++;
7369 else
7370 {
7371 if (REGNO (node->loc) == REGNO (loc))
7372 r = 0;
7373 else
7374 r = 1;
7375 break;
7376 }
7377 }
7378 else
7379 {
7380 r = 1;
7381 break;
7382 }
7383 }
7384 else if (MEM_P (loc))
7385 {
7386 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7387 nextp = &node->next)
7388 if (REG_P (node->loc))
7389 c++;
7390 else if (MEM_P (node->loc))
7391 {
7392 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7393 break;
7394 else
7395 c++;
7396 }
7397 else
7398 {
7399 r = 1;
7400 break;
7401 }
7402 }
7403 else
7404 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7405 nextp = &node->next)
7406 if ((r = loc_cmp (node->loc, loc)) >= 0)
7407 break;
7408 else
7409 c++;
7410
7411 if (r == 0)
7412 return slot;
7413
bc95df68 7414 if (shared_var_p (var, set->vars))
9845d120 7415 {
7416 slot = unshare_variable (set, slot, var, initialized);
7417 var = (variable)*slot;
7418 for (nextp = &var->var_part[0].loc_chain; c;
7419 nextp = &(*nextp)->next)
7420 c--;
7421 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7422 }
5923a5e7 7423 }
7424 else
7425 {
96414f01 7426 int inspos = 0;
7427
9845d120 7428 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
5923a5e7 7429
96414f01 7430 pos = find_variable_location_part (var, offset, &inspos);
5923a5e7 7431
96414f01 7432 if (pos >= 0)
5923a5e7 7433 {
85bbdb3f 7434 node = var->var_part[pos].loc_chain;
7435
7436 if (node
8ad4c111 7437 && ((REG_P (node->loc) && REG_P (loc)
85bbdb3f 7438 && REGNO (node->loc) == REGNO (loc))
7439 || rtx_equal_p (node->loc, loc)))
7440 {
7441 /* LOC is in the beginning of the chain so we have nothing
7442 to do. */
d53bb226 7443 if (node->init < initialized)
7444 node->init = initialized;
7445 if (set_src != NULL)
7446 node->set_src = set_src;
7447
9845d120 7448 return slot;
85bbdb3f 7449 }
7450 else
7451 {
7452 /* We have to make a copy of a shared variable. */
bc95df68 7453 if (shared_var_p (var, set->vars))
9845d120 7454 {
7455 slot = unshare_variable (set, slot, var, initialized);
7456 var = (variable)*slot;
7457 }
85bbdb3f 7458 }
7459 }
7460 else
7461 {
7462 /* We have not found the location part, new one will be created. */
7463
7464 /* We have to make a copy of the shared variable. */
bc95df68 7465 if (shared_var_p (var, set->vars))
9845d120 7466 {
7467 slot = unshare_variable (set, slot, var, initialized);
7468 var = (variable)*slot;
7469 }
5923a5e7 7470
5923a5e7 7471 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7472 thus there are at most MAX_VAR_PARTS different offsets. */
9845d120 7473 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
72fdb379 7474 && (!var->n_var_parts || !onepart));
5923a5e7 7475
96414f01 7476 /* We have to move the elements of array starting at index
7477 inspos to the next position. */
7478 for (pos = var->n_var_parts; pos > inspos; pos--)
7479 var->var_part[pos] = var->var_part[pos - 1];
5923a5e7 7480
7481 var->n_var_parts++;
72fdb379 7482 gcc_checking_assert (!onepart);
7483 VAR_PART_OFFSET (var, pos) = offset;
5923a5e7 7484 var->var_part[pos].loc_chain = NULL;
7485 var->var_part[pos].cur_loc = NULL;
7486 }
5923a5e7 7487
9845d120 7488 /* Delete the location from the list. */
7489 nextp = &var->var_part[pos].loc_chain;
7490 for (node = var->var_part[pos].loc_chain; node; node = next)
5923a5e7 7491 {
9845d120 7492 next = node->next;
7493 if ((REG_P (node->loc) && REG_P (loc)
7494 && REGNO (node->loc) == REGNO (loc))
7495 || rtx_equal_p (node->loc, loc))
7496 {
7497 /* Save these values, to assign to the new node, before
7498 deleting this one. */
7499 if (node->init > initialized)
7500 initialized = node->init;
7501 if (node->set_src != NULL && set_src == NULL)
7502 set_src = node->set_src;
bc95df68 7503 if (var->var_part[pos].cur_loc == node->loc)
72fdb379 7504 var->var_part[pos].cur_loc = NULL;
9845d120 7505 pool_free (loc_chain_pool, node);
7506 *nextp = next;
7507 break;
7508 }
7509 else
7510 nextp = &node->next;
5923a5e7 7511 }
9845d120 7512
7513 nextp = &var->var_part[pos].loc_chain;
5923a5e7 7514 }
7515
7516 /* Add the location to the beginning. */
45ba1503 7517 node = (location_chain) pool_alloc (loc_chain_pool);
5923a5e7 7518 node->loc = loc;
d53bb226 7519 node->init = initialized;
7520 node->set_src = set_src;
9845d120 7521 node->next = *nextp;
7522 *nextp = node;
7523
5923a5e7 7524 /* If no location was emitted do so. */
7525 if (var->var_part[pos].cur_loc == NULL)
bc95df68 7526 variable_was_changed (var, set);
9845d120 7527
7528 return slot;
5923a5e7 7529}
7530
9845d120 7531/* Set the part of variable's location in the dataflow set SET. The
7532 variable part is specified by variable's declaration in DV and
7533 offset OFFSET and the part's location by LOC. IOPT should be
7534 NO_INSERT if the variable is known to be in SET already and the
7535 variable hash table must not be resized, and INSERT otherwise. */
96414f01 7536
7537static void
9845d120 7538set_variable_part (dataflow_set *set, rtx loc,
7539 decl_or_value dv, HOST_WIDE_INT offset,
7540 enum var_init_status initialized, rtx set_src,
7541 enum insert_option iopt)
96414f01 7542{
9845d120 7543 void **slot;
96414f01 7544
9845d120 7545 if (iopt == NO_INSERT)
7546 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7547 else
7548 {
7549 slot = shared_hash_find_slot (set->vars, dv);
7550 if (!slot)
7551 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7552 }
8f9c21d9 7553 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
9845d120 7554}
96414f01 7555
9845d120 7556/* Remove all recorded register locations for the given variable part
7557 from dataflow set SET, except for those that are identical to loc.
7558 The variable part is specified by variable's declaration or value
7559 DV and offset OFFSET. */
7560
7561static void **
7562clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7563 HOST_WIDE_INT offset, rtx set_src)
7564{
7565 variable var = (variable) *slot;
7566 int pos = find_variable_location_part (var, offset, NULL);
7567
7568 if (pos >= 0)
96414f01 7569 {
9845d120 7570 location_chain node, next;
96414f01 7571
9845d120 7572 /* Remove the register locations from the dataflow set. */
7573 next = var->var_part[pos].loc_chain;
7574 for (node = next; node; node = next)
96414f01 7575 {
9845d120 7576 next = node->next;
7577 if (node->loc != loc
7578 && (!flag_var_tracking_uninit
7579 || !set_src
7580 || MEM_P (set_src)
7581 || !rtx_equal_p (set_src, node->set_src)))
96414f01 7582 {
9845d120 7583 if (REG_P (node->loc))
2a964ec6 7584 {
9845d120 7585 attrs anode, anext;
7586 attrs *anextp;
7587
7588 /* Remove the variable part from the register's
7589 list, but preserve any other variable parts
7590 that might be regarded as live in that same
7591 register. */
7592 anextp = &set->regs[REGNO (node->loc)];
7593 for (anode = *anextp; anode; anode = anext)
2a964ec6 7594 {
9845d120 7595 anext = anode->next;
7596 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7597 && anode->offset == offset)
2a964ec6 7598 {
9845d120 7599 pool_free (attrs_pool, anode);
7600 *anextp = anext;
2a964ec6 7601 }
9845d120 7602 else
7603 anextp = &anode->next;
2a964ec6 7604 }
9845d120 7605 }
7606
7607 slot = delete_slot_part (set, node->loc, slot, offset);
7608 }
7609 }
7610 }
7611
7612 return slot;
7613}
7614
7615/* Remove all recorded register locations for the given variable part
7616 from dataflow set SET, except for those that are identical to loc.
7617 The variable part is specified by variable's declaration or value
7618 DV and offset OFFSET. */
7619
7620static void
7621clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7622 HOST_WIDE_INT offset, rtx set_src)
7623{
7624 void **slot;
7625
7626 if (!dv_as_opaque (dv)
7627 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7628 return;
7629
7630 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7631 if (!slot)
7632 return;
7633
8f9c21d9 7634 clobber_slot_part (set, loc, slot, offset, set_src);
9845d120 7635}
2a964ec6 7636
9845d120 7637/* Delete the part of variable's location from dataflow set SET. The
7638 variable part is specified by its SET->vars slot SLOT and offset
7639 OFFSET and the part's location by LOC. */
7640
7641static void **
7642delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7643 HOST_WIDE_INT offset)
7644{
7645 variable var = (variable) *slot;
7646 int pos = find_variable_location_part (var, offset, NULL);
7647
7648 if (pos >= 0)
7649 {
7650 location_chain node, next;
7651 location_chain *nextp;
7652 bool changed;
72fdb379 7653 rtx cur_loc;
9845d120 7654
bc95df68 7655 if (shared_var_p (var, set->vars))
9845d120 7656 {
7657 /* If the variable contains the location part we have to
7658 make a copy of the variable. */
7659 for (node = var->var_part[pos].loc_chain; node;
7660 node = node->next)
7661 {
7662 if ((REG_P (node->loc) && REG_P (loc)
7663 && REGNO (node->loc) == REGNO (loc))
7664 || rtx_equal_p (node->loc, loc))
7665 {
7666 slot = unshare_variable (set, slot, var,
7667 VAR_INIT_STATUS_UNKNOWN);
7668 var = (variable)*slot;
7669 break;
2a964ec6 7670 }
96414f01 7671 }
7672 }
9845d120 7673
72fdb379 7674 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7675 cur_loc = VAR_LOC_FROM (var);
7676 else
7677 cur_loc = var->var_part[pos].cur_loc;
7678
9845d120 7679 /* Delete the location part. */
bc95df68 7680 changed = false;
9845d120 7681 nextp = &var->var_part[pos].loc_chain;
7682 for (node = *nextp; node; node = next)
7683 {
7684 next = node->next;
7685 if ((REG_P (node->loc) && REG_P (loc)
7686 && REGNO (node->loc) == REGNO (loc))
7687 || rtx_equal_p (node->loc, loc))
7688 {
bc95df68 7689 /* If we have deleted the location which was last emitted
7690 we have to emit new location so add the variable to set
7691 of changed variables. */
72fdb379 7692 if (cur_loc == node->loc)
bc95df68 7693 {
7694 changed = true;
7695 var->var_part[pos].cur_loc = NULL;
72fdb379 7696 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7697 VAR_LOC_FROM (var) = NULL;
bc95df68 7698 }
9845d120 7699 pool_free (loc_chain_pool, node);
7700 *nextp = next;
7701 break;
7702 }
7703 else
7704 nextp = &node->next;
7705 }
7706
9845d120 7707 if (var->var_part[pos].loc_chain == NULL)
7708 {
bc95df68 7709 changed = true;
9845d120 7710 var->n_var_parts--;
9845d120 7711 while (pos < var->n_var_parts)
7712 {
7713 var->var_part[pos] = var->var_part[pos + 1];
7714 pos++;
7715 }
7716 }
7717 if (changed)
7718 variable_was_changed (var, set);
7719 }
7720
7721 return slot;
7722}
7723
7724/* Delete the part of variable's location from dataflow set SET. The
7725 variable part is specified by variable's declaration or value DV
7726 and offset OFFSET and the part's location by LOC. */
7727
7728static void
7729delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7730 HOST_WIDE_INT offset)
7731{
7732 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7733 if (!slot)
7734 return;
7735
8f9c21d9 7736 delete_slot_part (set, loc, slot, offset);
9845d120 7737}
7738
72fdb379 7739DEF_VEC_P (variable);
7740DEF_VEC_ALLOC_P (variable, heap);
7741
7742DEF_VEC_ALLOC_P_STACK (rtx);
7743#define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
7744
bc95df68 7745/* Structure for passing some other parameters to function
7746 vt_expand_loc_callback. */
7747struct expand_loc_callback_data
7748{
7749 /* The variables and values active at this point. */
7750 htab_t vars;
7751
72fdb379 7752 /* Stack of values and debug_exprs under expansion, and their
7753 children. */
7754 VEC (rtx, stack) *expanding;
7755
7756 /* Stack of values and debug_exprs whose expansion hit recursion
7757 cycles. They will have VALUE_RECURSED_INTO marked when added to
7758 this list. This flag will be cleared if any of its dependencies
7759 resolves to a valid location. So, if the flag remains set at the
7760 end of the search, we know no valid location for this one can
7761 possibly exist. */
7762 VEC (rtx, stack) *pending;
7763
7764 /* The maximum depth among the sub-expressions under expansion.
7765 Zero indicates no expansion so far. */
11643610 7766 expand_depth depth;
bc95df68 7767};
7768
72fdb379 7769/* Allocate the one-part auxiliary data structure for VAR, with enough
7770 room for COUNT dependencies. */
7771
7772static void
7773loc_exp_dep_alloc (variable var, int count)
7774{
7775 size_t allocsize;
7776
7777 gcc_checking_assert (var->onepart);
7778
7779 /* We can be called with COUNT == 0 to allocate the data structure
7780 without any dependencies, e.g. for the backlinks only. However,
7781 if we are specifying a COUNT, then the dependency list must have
7782 been emptied before. It would be possible to adjust pointers or
7783 force it empty here, but this is better done at an earlier point
7784 in the algorithm, so we instead leave an assertion to catch
7785 errors. */
7786 gcc_checking_assert (!count
7787 || VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7788
7789 if (VAR_LOC_1PAUX (var)
7790 && VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
7791 return;
7792
7793 allocsize = offsetof (struct onepart_aux, deps)
7794 + VEC_embedded_size (loc_exp_dep, count);
7795
7796 if (VAR_LOC_1PAUX (var))
7797 {
7798 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7799 VAR_LOC_1PAUX (var), allocsize);
7800 /* If the reallocation moves the onepaux structure, the
7801 back-pointer to BACKLINKS in the first list member will still
7802 point to its old location. Adjust it. */
7803 if (VAR_LOC_DEP_LST (var))
7804 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7805 }
7806 else
7807 {
7808 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7809 *VAR_LOC_DEP_LSTP (var) = NULL;
7810 VAR_LOC_FROM (var) = NULL;
11643610 7811 VAR_LOC_DEPTH (var).complexity = 0;
7812 VAR_LOC_DEPTH (var).entryvals = 0;
72fdb379 7813 }
7814 VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
7815}
7816
7817/* Remove all entries from the vector of active dependencies of VAR,
7818 removing them from the back-links lists too. */
7819
7820static void
7821loc_exp_dep_clear (variable var)
7822{
7823 while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
7824 {
7825 loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7826 if (led->next)
7827 led->next->pprev = led->pprev;
7828 if (led->pprev)
7829 *led->pprev = led->next;
7830 VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7831 }
7832}
7833
7834/* Insert an active dependency from VAR on X to the vector of
7835 dependencies, and add the corresponding back-link to X's list of
7836 back-links in VARS. */
7837
7838static void
7839loc_exp_insert_dep (variable var, rtx x, htab_t vars)
7840{
7841 decl_or_value dv;
7842 variable xvar;
7843 loc_exp_dep *led;
7844
7845 dv = dv_from_rtx (x);
7846
7847 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
7848 an additional look up? */
7849 xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7850
7851 if (!xvar)
7852 {
7853 xvar = variable_from_dropped (dv, NO_INSERT);
7854 gcc_checking_assert (xvar);
7855 }
7856
7857 /* No point in adding the same backlink more than once. This may
7858 arise if say the same value appears in two complex expressions in
7859 the same loc_list, or even more than once in a single
7860 expression. */
7861 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
7862 return;
7863
df226854 7864 if (var->onepart == NOT_ONEPART)
7865 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
7866 else
7867 {
7868 VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
7869 led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7870 }
72fdb379 7871 led->dv = var->dv;
7872 led->value = x;
7873
7874 loc_exp_dep_alloc (xvar, 0);
7875 led->pprev = VAR_LOC_DEP_LSTP (xvar);
7876 led->next = *led->pprev;
7877 if (led->next)
7878 led->next->pprev = &led->next;
7879 *led->pprev = led;
7880}
7881
7882/* Create active dependencies of VAR on COUNT values starting at
7883 VALUE, and corresponding back-links to the entries in VARS. Return
7884 true if we found any pending-recursion results. */
7885
7886static bool
7887loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
7888{
7889 bool pending_recursion = false;
7890
7891 gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7892
7893 /* Set up all dependencies from last_child (as set up at the end of
7894 the loop above) to the end. */
7895 loc_exp_dep_alloc (var, count);
7896
7897 while (count--)
7898 {
7899 rtx x = *value++;
7900
7901 if (!pending_recursion)
7902 pending_recursion = !result && VALUE_RECURSED_INTO (x);
7903
7904 loc_exp_insert_dep (var, x, vars);
7905 }
7906
7907 return pending_recursion;
7908}
7909
7910/* Notify the back-links of IVAR that are pending recursion that we
7911 have found a non-NIL value for it, so they are cleared for another
7912 attempt to compute a current location. */
7913
7914static void
7915notify_dependents_of_resolved_value (variable ivar, htab_t vars)
7916{
7917 loc_exp_dep *led, *next;
7918
7919 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
7920 {
7921 decl_or_value dv = led->dv;
7922 variable var;
7923
7924 next = led->next;
7925
7926 if (dv_is_value_p (dv))
7927 {
7928 rtx value = dv_as_value (dv);
7929
7930 /* If we have already resolved it, leave it alone. */
7931 if (!VALUE_RECURSED_INTO (value))
7932 continue;
7933
7934 /* Check that VALUE_RECURSED_INTO, true from the test above,
7935 implies NO_LOC_P. */
7936 gcc_checking_assert (NO_LOC_P (value));
7937
7938 /* We won't notify variables that are being expanded,
7939 because their dependency list is cleared before
7940 recursing. */
8081d3a6 7941 NO_LOC_P (value) = false;
72fdb379 7942 VALUE_RECURSED_INTO (value) = false;
7943
7944 gcc_checking_assert (dv_changed_p (dv));
7945 }
df226854 7946 else
7947 {
7948 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
7949 if (!dv_changed_p (dv))
7950 continue;
7951 }
72fdb379 7952
7953 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7954
7955 if (!var)
7956 var = variable_from_dropped (dv, NO_INSERT);
7957
7958 if (var)
7959 notify_dependents_of_resolved_value (var, vars);
7960
7961 if (next)
7962 next->pprev = led->pprev;
7963 if (led->pprev)
7964 *led->pprev = next;
7965 led->next = NULL;
7966 led->pprev = NULL;
7967 }
7968}
7969
7970static rtx vt_expand_loc_callback (rtx x, bitmap regs,
7971 int max_depth, void *data);
7972
7973/* Return the combined depth, when one sub-expression evaluated to
7974 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
7975
11643610 7976static inline expand_depth
7977update_depth (expand_depth saved_depth, expand_depth best_depth)
72fdb379 7978{
7979 /* If we didn't find anything, stick with what we had. */
11643610 7980 if (!best_depth.complexity)
72fdb379 7981 return saved_depth;
7982
7983 /* If we found hadn't found anything, use the depth of the current
7984 expression. Do NOT add one extra level, we want to compute the
7985 maximum depth among sub-expressions. We'll increment it later,
7986 if appropriate. */
11643610 7987 if (!saved_depth.complexity)
72fdb379 7988 return best_depth;
7989
11643610 7990 /* Combine the entryval count so that regardless of which one we
7991 return, the entryval count is accurate. */
7992 best_depth.entryvals = saved_depth.entryvals
7993 = best_depth.entryvals + saved_depth.entryvals;
7994
7995 if (saved_depth.complexity < best_depth.complexity)
72fdb379 7996 return best_depth;
7997 else
7998 return saved_depth;
7999}
8000
8001/* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8002 DATA for cselib expand callback. If PENDRECP is given, indicate in
8003 it whether any sub-expression couldn't be fully evaluated because
8004 it is pending recursion resolution. */
8005
8006static inline rtx
8007vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8008{
8009 struct expand_loc_callback_data *elcd
8010 = (struct expand_loc_callback_data *) data;
8011 location_chain loc, next;
8012 rtx result = NULL;
8013 int first_child, result_first_child, last_child;
8014 bool pending_recursion;
8015 rtx loc_from = NULL;
8016 struct elt_loc_list *cloc = NULL;
11643610 8017 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8018 int wanted_entryvals, found_entryvals = 0;
72fdb379 8019
8020 /* Clear all backlinks pointing at this, so that we're not notified
8021 while we're active. */
8022 loc_exp_dep_clear (var);
8023
11643610 8024 retry:
72fdb379 8025 if (var->onepart == ONEPART_VALUE)
8026 {
8027 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8028
8029 gcc_checking_assert (cselib_preserved_value_p (val));
8030
8031 cloc = val->locs;
8032 }
8033
8034 first_child = result_first_child = last_child
8035 = VEC_length (rtx, elcd->expanding);
8036
11643610 8037 wanted_entryvals = found_entryvals;
8038
72fdb379 8039 /* Attempt to expand each available location in turn. */
8040 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8041 loc || cloc; loc = next)
8042 {
8043 result_first_child = last_child;
8044
11643610 8045 if (!loc)
72fdb379 8046 {
8047 loc_from = cloc->loc;
8048 next = loc;
8049 cloc = cloc->next;
8050 if (unsuitable_loc (loc_from))
8051 continue;
8052 }
8053 else
8054 {
8055 loc_from = loc->loc;
8056 next = loc->next;
8057 }
8058
8059 gcc_checking_assert (!unsuitable_loc (loc_from));
8060
11643610 8061 elcd->depth.complexity = elcd->depth.entryvals = 0;
72fdb379 8062 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8063 vt_expand_loc_callback, data);
8064 last_child = VEC_length (rtx, elcd->expanding);
8065
8066 if (result)
8067 {
8068 depth = elcd->depth;
8069
11643610 8070 gcc_checking_assert (depth.complexity
8071 || result_first_child == last_child);
72fdb379 8072
8073 if (last_child - result_first_child != 1)
11643610 8074 {
8075 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8076 depth.entryvals++;
8077 depth.complexity++;
8078 }
72fdb379 8079
11643610 8080 if (depth.complexity <= EXPR_USE_DEPTH)
8081 {
8082 if (depth.entryvals <= wanted_entryvals)
8083 break;
8084 else if (!found_entryvals || depth.entryvals < found_entryvals)
8085 found_entryvals = depth.entryvals;
8086 }
72fdb379 8087
8088 result = NULL;
8089 }
8090
8091 /* Set it up in case we leave the loop. */
11643610 8092 depth.complexity = depth.entryvals = 0;
72fdb379 8093 loc_from = NULL;
8094 result_first_child = first_child;
8095 }
8096
11643610 8097 if (!loc_from && wanted_entryvals < found_entryvals)
8098 {
8099 /* We found entries with ENTRY_VALUEs and skipped them. Since
8100 we could not find any expansions without ENTRY_VALUEs, but we
8101 found at least one with them, go back and get an entry with
8102 the minimum number ENTRY_VALUE count that we found. We could
8103 avoid looping, but since each sub-loc is already resolved,
8104 the re-expansion should be trivial. ??? Should we record all
8105 attempted locs as dependencies, so that we retry the
8106 expansion should any of them change, in the hope it can give
8107 us a new entry without an ENTRY_VALUE? */
8108 VEC_truncate (rtx, elcd->expanding, first_child);
8109 goto retry;
8110 }
8111
72fdb379 8112 /* Register all encountered dependencies as active. */
8113 pending_recursion = loc_exp_dep_set
8114 (var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
8115 last_child - result_first_child, elcd->vars);
8116
8117 VEC_truncate (rtx, elcd->expanding, first_child);
8118
8119 /* Record where the expansion came from. */
8120 gcc_checking_assert (!result || !pending_recursion);
8121 VAR_LOC_FROM (var) = loc_from;
8122 VAR_LOC_DEPTH (var) = depth;
8123
11643610 8124 gcc_checking_assert (!depth.complexity == !result);
c206bb4e 8125
72fdb379 8126 elcd->depth = update_depth (saved_depth, depth);
8127
8128 /* Indicate whether any of the dependencies are pending recursion
8129 resolution. */
8130 if (pendrecp)
8131 *pendrecp = pending_recursion;
8132
8133 if (!pendrecp || !pending_recursion)
8134 var->var_part[0].cur_loc = result;
8135
8136 return result;
8137}
8138
9845d120 8139/* Callback for cselib_expand_value, that looks for expressions
3017bc06 8140 holding the value in the var-tracking hash tables. Return X for
8141 standard processing, anything else is to be used as-is. */
9845d120 8142
8143static rtx
72fdb379 8144vt_expand_loc_callback (rtx x, bitmap regs,
8145 int max_depth ATTRIBUTE_UNUSED,
8146 void *data)
9845d120 8147{
bc95df68 8148 struct expand_loc_callback_data *elcd
8149 = (struct expand_loc_callback_data *) data;
9845d120 8150 decl_or_value dv;
8151 variable var;
72fdb379 8152 rtx result, subreg;
8153 bool pending_recursion = false;
8154 bool from_empty = false;
9845d120 8155
688ff29b 8156 switch (GET_CODE (x))
3017bc06 8157 {
688ff29b 8158 case SUBREG:
3017bc06 8159 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
72fdb379 8160 EXPR_DEPTH,
3017bc06 8161 vt_expand_loc_callback, data);
8162
8163 if (!subreg)
8164 return NULL;
8165
8166 result = simplify_gen_subreg (GET_MODE (x), subreg,
8167 GET_MODE (SUBREG_REG (x)),
8168 SUBREG_BYTE (x));
8169
8170 /* Invalid SUBREGs are ok in debug info. ??? We could try
8171 alternate expansions for the VALUE as well. */
bc95df68 8172 if (!result)
3017bc06 8173 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8174
8175 return result;
3017bc06 8176
688ff29b 8177 case DEBUG_EXPR:
688ff29b 8178 case VALUE:
72fdb379 8179 dv = dv_from_rtx (x);
688ff29b 8180 break;
8181
8182 default:
8183 return x;
8184 }
9845d120 8185
72fdb379 8186 VEC_safe_push (rtx, stack, elcd->expanding, x);
8187
8188 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8189 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8190
8191 if (NO_LOC_P (x))
8081d3a6 8192 {
8193 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8194 return NULL;
8195 }
9845d120 8196
bc95df68 8197 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
9845d120 8198
8199 if (!var)
bc95df68 8200 {
72fdb379 8201 from_empty = true;
8202 var = variable_from_dropped (dv, INSERT);
bc95df68 8203 }
9845d120 8204
72fdb379 8205 gcc_checking_assert (var);
8206
8207 if (!dv_changed_p (dv))
bc95df68 8208 {
72fdb379 8209 gcc_checking_assert (!NO_LOC_P (x));
8210 gcc_checking_assert (var->var_part[0].cur_loc);
8211 gcc_checking_assert (VAR_LOC_1PAUX (var));
11643610 8212 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
9845d120 8213
72fdb379 8214 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8215
8216 return var->var_part[0].cur_loc;
8217 }
9845d120 8218
8219 VALUE_RECURSED_INTO (x) = true;
72fdb379 8220 /* This is tentative, but it makes some tests simpler. */
8221 NO_LOC_P (x) = true;
9845d120 8222
72fdb379 8223 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8224
8225 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8226
8227 if (pending_recursion)
9845d120 8228 {
72fdb379 8229 gcc_checking_assert (!result);
8230 VEC_safe_push (rtx, stack, elcd->pending, x);
bc95df68 8231 }
4143d08b 8232 else
bc95df68 8233 {
72fdb379 8234 NO_LOC_P (x) = !result;
8235 VALUE_RECURSED_INTO (x) = false;
8236 set_dv_changed (dv, false);
8237
8238 if (result)
8239 notify_dependents_of_resolved_value (var, elcd->vars);
96414f01 8240 }
9845d120 8241
72fdb379 8242 return result;
96414f01 8243}
8244
72fdb379 8245/* While expanding variables, we may encounter recursion cycles
8246 because of mutual (possibly indirect) dependencies between two
8247 particular variables (or values), say A and B. If we're trying to
8248 expand A when we get to B, which in turn attempts to expand A, if
8249 we can't find any other expansion for B, we'll add B to this
8250 pending-recursion stack, and tentatively return NULL for its
8251 location. This tentative value will be used for any other
8252 occurrences of B, unless A gets some other location, in which case
8253 it will notify B that it is worth another try at computing a
8254 location for it, and it will use the location computed for A then.
8255 At the end of the expansion, the tentative NULL locations become
8256 final for all members of PENDING that didn't get a notification.
8257 This function performs this finalization of NULL locations. */
8258
8259static void
8260resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
8261{
8262 while (!VEC_empty (rtx, pending))
8263 {
8264 rtx x = VEC_pop (rtx, pending);
8265 decl_or_value dv;
8266
8267 if (!VALUE_RECURSED_INTO (x))
8268 continue;
8269
8270 gcc_checking_assert (NO_LOC_P (x));
8271 VALUE_RECURSED_INTO (x) = false;
8272 dv = dv_from_rtx (x);
8273 gcc_checking_assert (dv_changed_p (dv));
8274 set_dv_changed (dv, false);
8275 }
8276}
8277
8278/* Initialize expand_loc_callback_data D with variable hash table V.
8279 It must be a macro because of alloca (VEC stack). */
8280#define INIT_ELCD(d, v) \
8281 do \
8282 { \
8283 (d).vars = (v); \
8284 (d).expanding = VEC_alloc (rtx, stack, 4); \
8285 (d).pending = VEC_alloc (rtx, stack, 4); \
11643610 8286 (d).depth.complexity = (d).depth.entryvals = 0; \
72fdb379 8287 } \
8288 while (0)
8289/* Finalize expand_loc_callback_data D, resolved to location L. */
8290#define FINI_ELCD(d, l) \
8291 do \
8292 { \
8293 resolve_expansions_pending_recursion ((d).pending); \
8294 VEC_free (rtx, stack, (d).pending); \
8295 VEC_free (rtx, stack, (d).expanding); \
8296 \
8297 if ((l) && MEM_P (l)) \
8298 (l) = targetm.delegitimize_address (l); \
8299 } \
8300 while (0)
8301
8302/* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8303 equivalences in VARS, updating their CUR_LOCs in the process. */
5923a5e7 8304
9845d120 8305static rtx
72fdb379 8306vt_expand_loc (rtx loc, htab_t vars)
5923a5e7 8307{
bc95df68 8308 struct expand_loc_callback_data data;
72fdb379 8309 rtx result;
bc95df68 8310
9845d120 8311 if (!MAY_HAVE_DEBUG_INSNS)
8312 return loc;
85bbdb3f 8313
72fdb379 8314 INIT_ELCD (data, vars);
5923a5e7 8315
72fdb379 8316 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8317 vt_expand_loc_callback, &data);
8318
8319 FINI_ELCD (data, result);
8320
8321 return result;
5923a5e7 8322}
8323
72fdb379 8324/* Expand the one-part VARiable to a location, using the equivalences
8325 in VARS, updating their CUR_LOCs in the process. */
bc95df68 8326
72fdb379 8327static rtx
8328vt_expand_1pvar (variable var, htab_t vars)
bc95df68 8329{
8330 struct expand_loc_callback_data data;
72fdb379 8331 rtx loc;
8332
8333 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8334
8335 if (!dv_changed_p (var->dv))
8336 return var->var_part[0].cur_loc;
8337
8338 INIT_ELCD (data, vars);
8339
8340 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8341
8342 gcc_checking_assert (VEC_empty (rtx, data.expanding));
8343
8344 FINI_ELCD (data, loc);
bc95df68 8345
72fdb379 8346 return loc;
bc95df68 8347}
8348
5923a5e7 8349/* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8350 additional parameters: WHERE specifies whether the note shall be emitted
9845d120 8351 before or after instruction INSN. */
5923a5e7 8352
8353static int
8354emit_note_insn_var_location (void **varp, void *data)
8355{
9845d120 8356 variable var = (variable) *varp;
5923a5e7 8357 rtx insn = ((emit_note_data *)data)->insn;
8358 enum emit_note_where where = ((emit_note_data *)data)->where;
9845d120 8359 htab_t vars = ((emit_note_data *)data)->vars;
bc95df68 8360 rtx note, note_vl;
4719779b 8361 int i, j, n_var_parts;
5923a5e7 8362 bool complete;
d53bb226 8363 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
5923a5e7 8364 HOST_WIDE_INT last_limit;
8365 tree type_size_unit;
4719779b 8366 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8367 rtx loc[MAX_VAR_PARTS];
9845d120 8368 tree decl;
bc95df68 8369 location_chain lc;
5923a5e7 8370
72fdb379 8371 gcc_checking_assert (var->onepart == NOT_ONEPART
8372 || var->onepart == ONEPART_VDECL);
9845d120 8373
8374 decl = dv_as_decl (var->dv);
8375
5923a5e7 8376 complete = true;
8377 last_limit = 0;
4719779b 8378 n_var_parts = 0;
72fdb379 8379 if (!var->onepart)
8380 for (i = 0; i < var->n_var_parts; i++)
8381 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8382 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
5923a5e7 8383 for (i = 0; i < var->n_var_parts; i++)
8384 {
4719779b 8385 enum machine_mode mode, wider_mode;
9845d120 8386 rtx loc2;
72fdb379 8387 HOST_WIDE_INT offset;
4719779b 8388
72fdb379 8389 if (i == 0 && var->onepart)
5923a5e7 8390 {
72fdb379 8391 gcc_checking_assert (var->n_var_parts == 1);
8392 offset = 0;
8393 initialized = VAR_INIT_STATUS_INITIALIZED;
8394 loc2 = vt_expand_1pvar (var, vars);
5923a5e7 8395 }
72fdb379 8396 else
bc95df68 8397 {
72fdb379 8398 if (last_limit < VAR_PART_OFFSET (var, i))
8399 {
8400 complete = false;
8401 break;
8402 }
8403 else if (last_limit > VAR_PART_OFFSET (var, i))
8404 continue;
8405 offset = VAR_PART_OFFSET (var, i);
df226854 8406 loc2 = var->var_part[i].cur_loc;
8407 if (loc2 && GET_CODE (loc2) == MEM
8408 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8409 {
8410 rtx depval = XEXP (loc2, 0);
8411
8412 loc2 = vt_expand_loc (loc2, vars);
8413
8414 if (loc2)
8415 loc_exp_insert_dep (var, depval, vars);
8416 }
8417 if (!loc2)
72fdb379 8418 {
8419 complete = false;
8420 continue;
8421 }
df226854 8422 gcc_checking_assert (GET_CODE (loc2) != VALUE);
72fdb379 8423 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8424 if (var->var_part[i].cur_loc == lc->loc)
8425 {
8426 initialized = lc->init;
8427 break;
8428 }
8429 gcc_assert (lc);
bc95df68 8430 }
72fdb379 8431
8432 offsets[n_var_parts] = offset;
9845d120 8433 if (!loc2)
8434 {
8435 complete = false;
8436 continue;
8437 }
8438 loc[n_var_parts] = loc2;
bc95df68 8439 mode = GET_MODE (var->var_part[i].cur_loc);
72fdb379 8440 if (mode == VOIDmode && var->onepart)
1197d3d7 8441 mode = DECL_MODE (decl);
4719779b 8442 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8443
8444 /* Attempt to merge adjacent registers or memory. */
8445 wider_mode = GET_MODE_WIDER_MODE (mode);
8446 for (j = i + 1; j < var->n_var_parts; j++)
72fdb379 8447 if (last_limit <= VAR_PART_OFFSET (var, j))
4719779b 8448 break;
8449 if (j < var->n_var_parts
8450 && wider_mode != VOIDmode
bc95df68 8451 && var->var_part[j].cur_loc
8452 && mode == GET_MODE (var->var_part[j].cur_loc)
1ce27e5a 8453 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
72fdb379 8454 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8455 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
bc95df68 8456 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
4719779b 8457 {
8458 rtx new_loc = NULL;
4719779b 8459
8460 if (REG_P (loc[n_var_parts])
8461 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8462 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
a2c6f0b7 8463 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
4719779b 8464 == REGNO (loc2))
8465 {
8466 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8467 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8468 mode, 0);
8469 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8470 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8471 if (new_loc)
8472 {
8473 if (!REG_P (new_loc)
8474 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8475 new_loc = NULL;
8476 else
8477 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8478 }
8479 }
8480 else if (MEM_P (loc[n_var_parts])
8481 && GET_CODE (XEXP (loc2, 0)) == PLUS
971ba038 8482 && REG_P (XEXP (XEXP (loc2, 0), 0))
8483 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
4719779b 8484 {
971ba038 8485 if ((REG_P (XEXP (loc[n_var_parts], 0))
4719779b 8486 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8487 XEXP (XEXP (loc2, 0), 0))
8488 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8489 == GET_MODE_SIZE (mode))
8490 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
971ba038 8491 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
4719779b 8492 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8493 XEXP (XEXP (loc2, 0), 0))
8494 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8495 + GET_MODE_SIZE (mode)
8496 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8497 new_loc = adjust_address_nv (loc[n_var_parts],
8498 wider_mode, 0);
8499 }
8500
8501 if (new_loc)
8502 {
8503 loc[n_var_parts] = new_loc;
8504 mode = wider_mode;
8505 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8506 i = j;
8507 }
8508 }
8509 ++n_var_parts;
5923a5e7 8510 }
9845d120 8511 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
5923a5e7 8512 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8513 complete = false;
8514
d53bb226 8515 if (! flag_var_tracking_uninit)
8516 initialized = VAR_INIT_STATUS_INITIALIZED;
8517
bc95df68 8518 note_vl = NULL_RTX;
5923a5e7 8519 if (!complete)
bc95df68 8520 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8521 (int) initialized);
4719779b 8522 else if (n_var_parts == 1)
5923a5e7 8523 {
de4798ba 8524 rtx expr_list;
8525
8526 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8527 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8528 else
8529 expr_list = loc[0];
5923a5e7 8530
bc95df68 8531 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8532 (int) initialized);
5923a5e7 8533 }
4719779b 8534 else if (n_var_parts)
5923a5e7 8535 {
5923a5e7 8536 rtx parallel;
8537
4719779b 8538 for (i = 0; i < n_var_parts; i++)
8539 loc[i]
8540 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8541
5923a5e7 8542 parallel = gen_rtx_PARALLEL (VOIDmode,
4719779b 8543 gen_rtvec_v (n_var_parts, loc));
bc95df68 8544 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8545 parallel, (int) initialized);
5923a5e7 8546 }
8547
bc95df68 8548 if (where != EMIT_NOTE_BEFORE_INSN)
8549 {
8550 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8551 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8552 NOTE_DURING_CALL_P (note) = true;
8553 }
8554 else
c0637e80 8555 {
8556 /* Make sure that the call related notes come first. */
8557 while (NEXT_INSN (insn)
8558 && NOTE_P (insn)
d5d74c5f 8559 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8560 && NOTE_DURING_CALL_P (insn))
8561 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
c0637e80 8562 insn = NEXT_INSN (insn);
d5d74c5f 8563 if (NOTE_P (insn)
8564 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8565 && NOTE_DURING_CALL_P (insn))
8566 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
c0637e80 8567 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8568 else
8569 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8570 }
bc95df68 8571 NOTE_VAR_LOCATION (note) = note_vl;
8572
9845d120 8573 set_dv_changed (var->dv, false);
bc95df68 8574 gcc_assert (var->in_changed_variables);
8575 var->in_changed_variables = false;
5923a5e7 8576 htab_clear_slot (changed_variables, varp);
8577
5923a5e7 8578 /* Continue traversing the hash table. */
8579 return 1;
8580}
8581
72fdb379 8582/* While traversing changed_variables, push onto DATA (a stack of RTX
8583 values) entries that aren't user variables. */
9845d120 8584
72fdb379 8585static int
8586values_to_stack (void **slot, void *data)
8587{
8588 VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
8589 variable var = (variable) *slot;
ace62c8c 8590
72fdb379 8591 if (var->onepart == ONEPART_VALUE)
8592 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
8593 else if (var->onepart == ONEPART_DEXPR)
8594 VEC_safe_push (rtx, stack, *changed_values_stack,
8595 DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
ace62c8c 8596
72fdb379 8597 return 1;
8598}
ace62c8c 8599
72fdb379 8600/* Remove from changed_variables the entry whose DV corresponds to
8601 value or debug_expr VAL. */
ace62c8c 8602static void
72fdb379 8603remove_value_from_changed_variables (rtx val)
ace62c8c 8604{
72fdb379 8605 decl_or_value dv = dv_from_rtx (val);
8606 void **slot;
8607 variable var;
ace62c8c 8608
72fdb379 8609 slot = htab_find_slot_with_hash (changed_variables,
8610 dv, dv_htab_hash (dv), NO_INSERT);
8611 var = (variable) *slot;
8612 var->in_changed_variables = false;
8613 htab_clear_slot (changed_variables, slot);
ace62c8c 8614}
8615
72fdb379 8616/* If VAL (a value or debug_expr) has backlinks to variables actively
8617 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8618 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8619 have dependencies of their own to notify. */
9845d120 8620
72fdb379 8621static void
8622notify_dependents_of_changed_value (rtx val, htab_t htab,
8623 VEC (rtx, stack) **changed_values_stack)
9845d120 8624{
72fdb379 8625 void **slot;
8626 variable var;
8627 loc_exp_dep *led;
8628 decl_or_value dv = dv_from_rtx (val);
9845d120 8629
72fdb379 8630 slot = htab_find_slot_with_hash (changed_variables,
8631 dv, dv_htab_hash (dv), NO_INSERT);
8632 if (!slot)
8633 slot = htab_find_slot_with_hash (htab,
8634 dv, dv_htab_hash (dv), NO_INSERT);
8635 if (!slot)
8636 slot = htab_find_slot_with_hash (dropped_values,
8637 dv, dv_htab_hash (dv), NO_INSERT);
8638 var = (variable) *slot;
8639
8640 while ((led = VAR_LOC_DEP_LST (var)))
8641 {
8642 decl_or_value ldv = led->dv;
72fdb379 8643 variable ivar;
9845d120 8644
72fdb379 8645 /* Deactivate and remove the backlink, as it was “used up”. It
8646 makes no sense to attempt to notify the same entity again:
8647 either it will be recomputed and re-register an active
8648 dependency, or it will still have the changed mark. */
8649 if (led->next)
8650 led->next->pprev = led->pprev;
8651 if (led->pprev)
8652 *led->pprev = led->next;
8653 led->next = NULL;
8654 led->pprev = NULL;
9845d120 8655
72fdb379 8656 if (dv_changed_p (ldv))
8657 continue;
8658
8659 switch (dv_onepart_p (ldv))
8660 {
8661 case ONEPART_VALUE:
8662 case ONEPART_DEXPR:
8663 set_dv_changed (ldv, true);
8664 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
8665 break;
8666
df226854 8667 case ONEPART_VDECL:
8668 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
72fdb379 8669 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8670 variable_was_changed (ivar, NULL);
8671 break;
df226854 8672
8673 case NOT_ONEPART:
8674 pool_free (loc_exp_dep_pool, led);
8675 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8676 if (ivar)
8677 {
8678 int i = ivar->n_var_parts;
8679 while (i--)
8680 {
8681 rtx loc = ivar->var_part[i].cur_loc;
8682
8683 if (loc && GET_CODE (loc) == MEM
8684 && XEXP (loc, 0) == val)
8685 {
8686 variable_was_changed (ivar, NULL);
8687 break;
8688 }
8689 }
8690 }
8691 break;
8692
8693 default:
8694 gcc_unreachable ();
72fdb379 8695 }
8696 }
9845d120 8697}
8698
72fdb379 8699/* Take out of changed_variables any entries that don't refer to use
8700 variables. Back-propagate change notifications from values and
8701 debug_exprs to their active dependencies in HTAB or in
8702 CHANGED_VARIABLES. */
bc95df68 8703
72fdb379 8704static void
8705process_changed_values (htab_t htab)
bc95df68 8706{
72fdb379 8707 int i, n;
8708 rtx val;
8709 VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
bc95df68 8710
72fdb379 8711 /* Move values from changed_variables to changed_values_stack. */
8712 htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
bc95df68 8713
72fdb379 8714 /* Back-propagate change notifications in values while popping
8715 them from the stack. */
8716 for (n = i = VEC_length (rtx, changed_values_stack);
8717 i > 0; i = VEC_length (rtx, changed_values_stack))
bc95df68 8718 {
72fdb379 8719 val = VEC_pop (rtx, changed_values_stack);
8720 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8721
8722 /* This condition will hold when visiting each of the entries
8723 originally in changed_variables. We can't remove them
8724 earlier because this could drop the backlinks before we got a
8725 chance to use them. */
8726 if (i == n)
bc95df68 8727 {
72fdb379 8728 remove_value_from_changed_variables (val);
8729 n--;
bc95df68 8730 }
bc95df68 8731 }
72fdb379 8732
8733 VEC_free (rtx, stack, changed_values_stack);
bc95df68 8734}
8735
5923a5e7 8736/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
72fdb379 8737 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8738 the notes shall be emitted before of after instruction INSN. */
5923a5e7 8739
8740static void
9845d120 8741emit_notes_for_changes (rtx insn, enum emit_note_where where,
8742 shared_hash vars)
5923a5e7 8743{
8744 emit_note_data data;
9845d120 8745 htab_t htab = shared_hash_htab (vars);
8746
8747 if (!htab_elements (changed_variables))
8748 return;
8749
8750 if (MAY_HAVE_DEBUG_INSNS)
72fdb379 8751 process_changed_values (htab);
5923a5e7 8752
8753 data.insn = insn;
8754 data.where = where;
9845d120 8755 data.vars = htab;
8756
5923a5e7 8757 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
8758}
8759
8760/* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8761 same variable in hash table DATA or is not there at all. */
8762
8763static int
8764emit_notes_for_differences_1 (void **slot, void *data)
8765{
8766 htab_t new_vars = (htab_t) data;
8767 variable old_var, new_var;
8768
9845d120 8769 old_var = (variable) *slot;
8770 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
8771 dv_htab_hash (old_var->dv));
5923a5e7 8772
8773 if (!new_var)
8774 {
8775 /* Variable has disappeared. */
72fdb379 8776 variable empty_var = NULL;
9845d120 8777
72fdb379 8778 if (old_var->onepart == ONEPART_VALUE
8779 || old_var->onepart == ONEPART_DEXPR)
9845d120 8780 {
72fdb379 8781 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8782 if (empty_var)
9845d120 8783 {
72fdb379 8784 gcc_checking_assert (!empty_var->in_changed_variables);
8785 if (!VAR_LOC_1PAUX (old_var))
8786 {
8787 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8788 VAR_LOC_1PAUX (empty_var) = NULL;
8789 }
8790 else
8791 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9845d120 8792 }
9845d120 8793 }
72fdb379 8794
8795 if (!empty_var)
bc95df68 8796 {
72fdb379 8797 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8798 empty_var->dv = old_var->dv;
8799 empty_var->refcount = 0;
8800 empty_var->n_var_parts = 0;
8801 empty_var->onepart = old_var->onepart;
8802 empty_var->in_changed_variables = false;
8803 }
bc95df68 8804
72fdb379 8805 if (empty_var->onepart)
8806 {
8807 /* Propagate the auxiliary data to (ultimately)
8808 changed_variables. */
8809 empty_var->var_part[0].loc_chain = NULL;
8810 empty_var->var_part[0].cur_loc = NULL;
8811 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8812 VAR_LOC_1PAUX (old_var) = NULL;
bc95df68 8813 }
72fdb379 8814 variable_was_changed (empty_var, NULL);
8815 /* Continue traversing the hash table. */
8816 return 1;
8817 }
8818 /* Update cur_loc and one-part auxiliary data, before new_var goes
8819 through variable_was_changed. */
8820 if (old_var != new_var && new_var->onepart)
8821 {
8822 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8823 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8824 VAR_LOC_1PAUX (old_var) = NULL;
8825 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
bc95df68 8826 }
72fdb379 8827 if (variable_different_p (old_var, new_var))
8828 variable_was_changed (new_var, NULL);
5923a5e7 8829
8830 /* Continue traversing the hash table. */
8831 return 1;
8832}
8833
8834/* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8835 table DATA. */
8836
8837static int
8838emit_notes_for_differences_2 (void **slot, void *data)
8839{
8840 htab_t old_vars = (htab_t) data;
8841 variable old_var, new_var;
8842
9845d120 8843 new_var = (variable) *slot;
8844 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8845 dv_htab_hash (new_var->dv));
5923a5e7 8846 if (!old_var)
8847 {
bc95df68 8848 int i;
bc95df68 8849 for (i = 0; i < new_var->n_var_parts; i++)
8850 new_var->var_part[i].cur_loc = NULL;
5923a5e7 8851 variable_was_changed (new_var, NULL);
8852 }
8853
8854 /* Continue traversing the hash table. */
8855 return 1;
8856}
8857
8858/* Emit notes before INSN for differences between dataflow sets OLD_SET and
8859 NEW_SET. */
8860
8861static void
8862emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8863 dataflow_set *new_set)
8864{
a8f6ad2b 8865 htab_traverse (shared_hash_htab (old_set->vars),
8866 emit_notes_for_differences_1,
8867 shared_hash_htab (new_set->vars));
8868 htab_traverse (shared_hash_htab (new_set->vars),
8869 emit_notes_for_differences_2,
8870 shared_hash_htab (old_set->vars));
9845d120 8871 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
5923a5e7 8872}
8873
8ee59e4e 8874/* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8875
8876static rtx
8877next_non_note_insn_var_location (rtx insn)
8878{
8879 while (insn)
8880 {
8881 insn = NEXT_INSN (insn);
8882 if (insn == 0
8883 || !NOTE_P (insn)
8884 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8885 break;
8886 }
8887
8888 return insn;
8889}
8890
5923a5e7 8891/* Emit the notes for changes of location parts in the basic block BB. */
8892
8893static void
9845d120 8894emit_notes_in_bb (basic_block bb, dataflow_set *set)
5923a5e7 8895{
c77c64d8 8896 unsigned int i;
8897 micro_operation *mo;
5923a5e7 8898
9845d120 8899 dataflow_set_clear (set);
8900 dataflow_set_copy (set, &VTI (bb)->in);
5923a5e7 8901
48148244 8902 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
5923a5e7 8903 {
c77c64d8 8904 rtx insn = mo->insn;
8ee59e4e 8905 rtx next_insn = next_non_note_insn_var_location (insn);
5923a5e7 8906
c77c64d8 8907 switch (mo->type)
5923a5e7 8908 {
8909 case MO_CALL:
9845d120 8910 dataflow_set_clear_at_call (set);
8911 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
4143d08b 8912 {
8913 rtx arguments = mo->u.loc, *p = &arguments, note;
8914 while (*p)
8915 {
8916 XEXP (XEXP (*p, 0), 1)
8917 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
72fdb379 8918 shared_hash_htab (set->vars));
4143d08b 8919 /* If expansion is successful, keep it in the list. */
8920 if (XEXP (XEXP (*p, 0), 1))
8921 p = &XEXP (*p, 1);
8922 /* Otherwise, if the following item is data_value for it,
8923 drop it too too. */
8924 else if (XEXP (*p, 1)
8925 && REG_P (XEXP (XEXP (*p, 0), 0))
8926 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8927 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8928 0))
8929 && REGNO (XEXP (XEXP (*p, 0), 0))
8930 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8931 0), 0)))
8932 *p = XEXP (XEXP (*p, 1), 1);
8933 /* Just drop this item. */
8934 else
8935 *p = XEXP (*p, 1);
8936 }
8937 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8938 NOTE_VAR_LOCATION (note) = arguments;
8939 }
9845d120 8940 break;
8941
8942 case MO_USE:
5923a5e7 8943 {
c77c64d8 8944 rtx loc = mo->u.loc;
5923a5e7 8945
9845d120 8946 if (REG_P (loc))
8947 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8948 else
8949 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8950
8951 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
5923a5e7 8952 }
8953 break;
8954
9845d120 8955 case MO_VAL_LOC:
9a8a3ff4 8956 {
c77c64d8 8957 rtx loc = mo->u.loc;
9845d120 8958 rtx val, vloc;
8959 tree var;
331cf53a 8960
9845d120 8961 if (GET_CODE (loc) == CONCAT)
8962 {
8963 val = XEXP (loc, 0);
8964 vloc = XEXP (loc, 1);
8965 }
9a8a3ff4 8966 else
9845d120 8967 {
8968 val = NULL_RTX;
8969 vloc = loc;
8970 }
8971
8972 var = PAT_VAR_LOCATION_DECL (vloc);
8973
8974 clobber_variable_part (set, NULL_RTX,
8975 dv_from_decl (var), 0, NULL_RTX);
8976 if (val)
8977 {
8978 if (VAL_NEEDS_RESOLUTION (loc))
8979 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8980 set_variable_part (set, val, dv_from_decl (var), 0,
8981 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8982 INSERT);
8983 }
1197d3d7 8984 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8985 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8986 dv_from_decl (var), 0,
8987 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8988 INSERT);
9845d120 8989
8990 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8991 }
8992 break;
8993
8994 case MO_VAL_USE:
8995 {
c77c64d8 8996 rtx loc = mo->u.loc;
9845d120 8997 rtx val, vloc, uloc;
8998
8999 vloc = uloc = XEXP (loc, 1);
9000 val = XEXP (loc, 0);
9001
9002 if (GET_CODE (val) == CONCAT)
9003 {
9004 uloc = XEXP (val, 1);
9005 val = XEXP (val, 0);
9006 }
9007
9008 if (VAL_NEEDS_RESOLUTION (loc))
9009 val_resolve (set, val, vloc, insn);
bf262632 9010 else
9011 val_store (set, val, uloc, insn, false);
9845d120 9012
9013 if (VAL_HOLDS_TRACK_EXPR (loc))
9014 {
9015 if (GET_CODE (uloc) == REG)
9016 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9017 NULL);
9018 else if (GET_CODE (uloc) == MEM)
9019 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9020 NULL);
9021 }
9022
9023 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9024 }
9025 break;
9026
9027 case MO_VAL_SET:
9028 {
c77c64d8 9029 rtx loc = mo->u.loc;
8081d3a6 9030 rtx val, vloc, uloc;
df226854 9031 rtx dstv, srcv;
9a8a3ff4 9032
a243dd11 9033 vloc = loc;
a243dd11 9034 uloc = XEXP (vloc, 1);
9035 val = XEXP (vloc, 0);
9036 vloc = uloc;
9845d120 9037
df226854 9038 if (GET_CODE (uloc) == SET)
9039 {
9040 dstv = SET_DEST (uloc);
9041 srcv = SET_SRC (uloc);
9042 }
9043 else
9044 {
9045 dstv = uloc;
9046 srcv = NULL;
9047 }
9048
9845d120 9049 if (GET_CODE (val) == CONCAT)
9050 {
df226854 9051 dstv = vloc = XEXP (val, 1);
9845d120 9052 val = XEXP (val, 0);
9053 }
9054
9055 if (GET_CODE (vloc) == SET)
9056 {
df226854 9057 srcv = SET_SRC (vloc);
9845d120 9058
df226854 9059 gcc_assert (val != srcv);
9845d120 9060 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9061
df226854 9062 dstv = vloc = SET_DEST (vloc);
9845d120 9063
9064 if (VAL_NEEDS_RESOLUTION (loc))
df226854 9065 val_resolve (set, val, srcv, insn);
9845d120 9066 }
9067 else if (VAL_NEEDS_RESOLUTION (loc))
9068 {
9069 gcc_assert (GET_CODE (uloc) == SET
9070 && GET_CODE (SET_SRC (uloc)) == REG);
9071 val_resolve (set, val, SET_SRC (uloc), insn);
9072 }
9073
9074 if (VAL_HOLDS_TRACK_EXPR (loc))
9075 {
9076 if (VAL_EXPR_IS_CLOBBERED (loc))
9077 {
9078 if (REG_P (uloc))
9079 var_reg_delete (set, uloc, true);
9080 else if (MEM_P (uloc))
df226854 9081 {
9082 gcc_assert (MEM_P (dstv));
9083 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9084 var_mem_delete (set, dstv, true);
9085 }
9845d120 9086 }
9087 else
9088 {
9089 bool copied_p = VAL_EXPR_IS_COPIED (loc);
df226854 9090 rtx src = NULL, dst = uloc;
9845d120 9091 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9092
9093 if (GET_CODE (uloc) == SET)
9094 {
df226854 9095 src = SET_SRC (uloc);
9096 dst = SET_DEST (uloc);
9845d120 9097 }
9098
9099 if (copied_p)
9100 {
df226854 9101 status = find_src_status (set, src);
9845d120 9102
df226854 9103 src = find_src_set_src (set, src);
9845d120 9104 }
9105
df226854 9106 if (REG_P (dst))
9107 var_reg_delete_and_set (set, dst, !copied_p,
9108 status, srcv);
9109 else if (MEM_P (dst))
9110 {
9111 gcc_assert (MEM_P (dstv));
9112 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9113 var_mem_delete_and_set (set, dstv, !copied_p,
9114 status, srcv);
9115 }
9845d120 9116 }
9117 }
9118 else if (REG_P (uloc))
9119 var_regno_delete (set, REGNO (uloc));
c573c1b5 9120 else if (MEM_P (uloc))
9121 clobber_overlapping_mems (set, uloc);
9845d120 9122
df226854 9123 val_store (set, val, dstv, insn, true);
9845d120 9124
8ee59e4e 9125 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9845d120 9126 set->vars);
9a8a3ff4 9127 }
9128 break;
9129
5923a5e7 9130 case MO_SET:
9131 {
c77c64d8 9132 rtx loc = mo->u.loc;
e10d697d 9133 rtx set_src = NULL;
d53bb226 9134
e10d697d 9135 if (GET_CODE (loc) == SET)
d53bb226 9136 {
e10d697d 9137 set_src = SET_SRC (loc);
9138 loc = SET_DEST (loc);
d53bb226 9139 }
5923a5e7 9140
8ad4c111 9141 if (REG_P (loc))
9845d120 9142 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
d53bb226 9143 set_src);
5923a5e7 9144 else
9845d120 9145 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
d53bb226 9146 set_src);
96414f01 9147
8ee59e4e 9148 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9845d120 9149 set->vars);
96414f01 9150 }
9151 break;
9152
9153 case MO_COPY:
9154 {
c77c64d8 9155 rtx loc = mo->u.loc;
d53bb226 9156 enum var_init_status src_status;
e10d697d 9157 rtx set_src = NULL;
9158
9159 if (GET_CODE (loc) == SET)
9160 {
9161 set_src = SET_SRC (loc);
9162 loc = SET_DEST (loc);
9163 }
d53bb226 9164
9845d120 9165 src_status = find_src_status (set, set_src);
9166 set_src = find_src_set_src (set, set_src);
96414f01 9167
9168 if (REG_P (loc))
9845d120 9169 var_reg_delete_and_set (set, loc, false, src_status, set_src);
96414f01 9170 else
9845d120 9171 var_mem_delete_and_set (set, loc, false, src_status, set_src);
5923a5e7 9172
8ee59e4e 9173 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9845d120 9174 set->vars);
5923a5e7 9175 }
9176 break;
9177
9178 case MO_USE_NO_VAR:
5923a5e7 9179 {
c77c64d8 9180 rtx loc = mo->u.loc;
5923a5e7 9181
8ad4c111 9182 if (REG_P (loc))
9845d120 9183 var_reg_delete (set, loc, false);
5923a5e7 9184 else
9845d120 9185 var_mem_delete (set, loc, false);
96414f01 9186
9845d120 9187 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
96414f01 9188 }
9189 break;
5923a5e7 9190
96414f01 9191 case MO_CLOBBER:
9192 {
c77c64d8 9193 rtx loc = mo->u.loc;
96414f01 9194
9195 if (REG_P (loc))
9845d120 9196 var_reg_delete (set, loc, true);
9a8a3ff4 9197 else
9845d120 9198 var_mem_delete (set, loc, true);
96414f01 9199
8ee59e4e 9200 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9845d120 9201 set->vars);
5923a5e7 9202 }
9203 break;
9204
9205 case MO_ADJUST:
c77c64d8 9206 set->stack_adjust += mo->u.adjust;
5923a5e7 9207 break;
9208 }
9209 }
5923a5e7 9210}
9211
9212/* Emit notes for the whole function. */
9213
9214static void
9215vt_emit_notes (void)
9216{
9217 basic_block bb;
9845d120 9218 dataflow_set cur;
5923a5e7 9219
22167fd5 9220 gcc_assert (!htab_elements (changed_variables));
5923a5e7 9221
9845d120 9222 /* Free memory occupied by the out hash tables, as they aren't used
9223 anymore. */
9224 FOR_EACH_BB (bb)
9225 dataflow_set_clear (&VTI (bb)->out);
9226
5923a5e7 9227 /* Enable emitting notes by functions (mainly by set_variable_part and
9228 delete_variable_part). */
9229 emit_notes = true;
9230
9845d120 9231 if (MAY_HAVE_DEBUG_INSNS)
df226854 9232 {
9233 dropped_values = htab_create (cselib_get_next_uid () * 2,
9234 variable_htab_hash, variable_htab_eq,
9235 variable_htab_free);
9236 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9237 sizeof (loc_exp_dep), 64);
9238 }
9845d120 9239
9240 dataflow_set_init (&cur);
5923a5e7 9241
9242 FOR_EACH_BB (bb)
9243 {
9244 /* Emit the notes for changes of variable locations between two
9245 subsequent basic blocks. */
9845d120 9246 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
5923a5e7 9247
9248 /* Emit the notes for the changes in the basic block itself. */
9845d120 9249 emit_notes_in_bb (bb, &cur);
5923a5e7 9250
9845d120 9251 /* Free memory occupied by the in hash table, we won't need it
9252 again. */
9253 dataflow_set_clear (&VTI (bb)->in);
5923a5e7 9254 }
9845d120 9255#ifdef ENABLE_CHECKING
9256 htab_traverse (shared_hash_htab (cur.vars),
9257 emit_notes_for_differences_1,
9258 shared_hash_htab (empty_shared_hash));
9845d120 9259#endif
9260 dataflow_set_destroy (&cur);
9261
9262 if (MAY_HAVE_DEBUG_INSNS)
b7517b82 9263 htab_delete (dropped_values);
9845d120 9264
5923a5e7 9265 emit_notes = false;
9266}
9267
9268/* If there is a declaration and offset associated with register/memory RTL
9269 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9270
9271static bool
9272vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9273{
8ad4c111 9274 if (REG_P (rtl))
5923a5e7 9275 {
9276 if (REG_ATTRS (rtl))
9277 {
9278 *declp = REG_EXPR (rtl);
9279 *offsetp = REG_OFFSET (rtl);
9280 return true;
9281 }
9282 }
e16ceb8e 9283 else if (MEM_P (rtl))
5923a5e7 9284 {
9285 if (MEM_ATTRS (rtl))
9286 {
9287 *declp = MEM_EXPR (rtl);
eeb0ae23 9288 *offsetp = INT_MEM_OFFSET (rtl);
5923a5e7 9289 return true;
9290 }
9291 }
9292 return false;
9293}
9294
8081d3a6 9295/* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9296 of VAL. */
236e4c9e 9297
9298static void
8081d3a6 9299record_entry_value (cselib_val *val, rtx rtl)
72fdb379 9300{
9301 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
72fdb379 9302
9303 ENTRY_VALUE_EXP (ev) = rtl;
9304
8081d3a6 9305 cselib_add_permanent_equiv (val, ev, get_insns ());
236e4c9e 9306}
9307
4d5b4e6a 9308/* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
5923a5e7 9309
9310static void
4d5b4e6a 9311vt_add_function_parameter (tree parm)
5923a5e7 9312{
4d5b4e6a 9313 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9314 rtx incoming = DECL_INCOMING_RTL (parm);
9315 tree decl;
9316 enum machine_mode mode;
9317 HOST_WIDE_INT offset;
9318 dataflow_set *out;
9319 decl_or_value dv;
5923a5e7 9320
4d5b4e6a 9321 if (TREE_CODE (parm) != PARM_DECL)
9322 return;
5923a5e7 9323
4d5b4e6a 9324 if (!decl_rtl || !incoming)
9325 return;
5923a5e7 9326
4d5b4e6a 9327 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9328 return;
5923a5e7 9329
89952baf 9330 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9331 rewrite the incoming location of parameters passed on the stack
9332 into MEMs based on the argument pointer, so that incoming doesn't
9333 depend on a pseudo. */
04c9ad62 9334 if (MEM_P (incoming)
04c9ad62 9335 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9336 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9337 && XEXP (XEXP (incoming, 0), 0)
9338 == crtl->args.internal_arg_pointer
9339 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9340 {
9341 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9342 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9343 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9344 incoming
9345 = replace_equiv_address_nv (incoming,
29c05e22 9346 plus_constant (Pmode,
9347 arg_pointer_rtx, off));
04c9ad62 9348 }
9349
8ee59e4e 9350#ifdef HAVE_window_save
9351 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9352 If the target machine has an explicit window save instruction, the
9353 actual entry value is the corresponding OUTGOING_REGNO instead. */
9354 if (REG_P (incoming)
9355 && HARD_REGISTER_P (incoming)
9356 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9357 {
9358 parm_reg_t *p
9359 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9360 p->incoming = incoming;
9361 incoming
9362 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9363 OUTGOING_REGNO (REGNO (incoming)), 0);
9364 p->outgoing = incoming;
9365 }
9366 else if (MEM_P (incoming)
9367 && REG_P (XEXP (incoming, 0))
9368 && HARD_REGISTER_P (XEXP (incoming, 0)))
9369 {
9370 rtx reg = XEXP (incoming, 0);
9371 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9372 {
9373 parm_reg_t *p
9374 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9375 p->incoming = reg;
9376 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9377 p->outgoing = reg;
9378 incoming = replace_equiv_address_nv (incoming, reg);
9379 }
9380 }
9381#endif
9382
4d5b4e6a 9383 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9384 {
9385 if (REG_P (incoming) || MEM_P (incoming))
80c70e76 9386 {
4d5b4e6a 9387 /* This means argument is passed by invisible reference. */
9388 offset = 0;
9389 decl = parm;
9390 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
80c70e76 9391 }
4d5b4e6a 9392 else
58029e61 9393 {
4d5b4e6a 9394 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9395 return;
9396 offset += byte_lowpart_offset (GET_MODE (incoming),
9397 GET_MODE (decl_rtl));
58029e61 9398 }
4d5b4e6a 9399 }
5923a5e7 9400
4d5b4e6a 9401 if (!decl)
9402 return;
9403
9404 if (parm != decl)
9405 {
9406 /* Assume that DECL_RTL was a pseudo that got spilled to
9407 memory. The spill slot sharing code will force the
9408 memory to reference spill_slot_decl (%sfp), so we don't
9409 match above. That's ok, the pseudo must have referenced
9410 the entire parameter, so just reset OFFSET. */
9411 gcc_assert (decl == get_spill_slot_decl (false));
9412 offset = 0;
9413 }
80c70e76 9414
4d5b4e6a 9415 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9416 return;
5923a5e7 9417
4d5b4e6a 9418 out = &VTI (ENTRY_BLOCK_PTR)->out;
9845d120 9419
4d5b4e6a 9420 dv = dv_from_decl (parm);
9845d120 9421
4d5b4e6a 9422 if (target_for_debug_bind (parm)
9423 /* We can't deal with these right now, because this kind of
9424 variable is single-part. ??? We could handle parallels
9425 that describe multiple locations for the same single
9426 value, but ATM we don't. */
9427 && GET_CODE (incoming) != PARALLEL)
9428 {
9429 cselib_val *val;
9845d120 9430
4d5b4e6a 9431 /* ??? We shouldn't ever hit this, but it may happen because
9432 arguments passed by invisible reference aren't dealt with
9433 above: incoming-rtl will have Pmode rather than the
9434 expected mode for the type. */
9435 if (offset)
9436 return;
9845d120 9437
4143d08b 9438 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
9439 VOIDmode, get_insns ());
9845d120 9440
4d5b4e6a 9441 /* ??? Float-typed values in memory are not handled by
9442 cselib. */
9443 if (val)
5923a5e7 9444 {
4d5b4e6a 9445 preserve_value (val);
9446 set_variable_part (out, val->val_rtx, dv, offset,
9845d120 9447 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
4d5b4e6a 9448 dv = dv_from_value (val->val_rtx);
80c70e76 9449 }
7882e02e 9450
9451 if (MEM_P (incoming))
9452 {
9453 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9454 VOIDmode, get_insns ());
9455 if (val)
9456 {
9457 preserve_value (val);
9458 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9459 }
9460 }
5923a5e7 9461 }
9845d120 9462
4d5b4e6a 9463 if (REG_P (incoming))
9464 {
9465 incoming = var_lowpart (mode, incoming);
9466 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9467 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9468 incoming);
9469 set_variable_part (out, incoming, dv, offset,
9470 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
4143d08b 9471 if (dv_is_value_p (dv))
9472 {
8081d3a6 9473 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
4143d08b 9474 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9475 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9476 {
9477 enum machine_mode indmode
9478 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9479 rtx mem = gen_rtx_MEM (indmode, incoming);
72fdb379 9480 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9481 VOIDmode,
9482 get_insns ());
4143d08b 9483 if (val)
9484 {
9485 preserve_value (val);
8081d3a6 9486 record_entry_value (val, mem);
72fdb379 9487 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9488 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
4143d08b 9489 }
9490 }
9491 }
4d5b4e6a 9492 }
9493 else if (MEM_P (incoming))
9494 {
9495 incoming = var_lowpart (mode, incoming);
9496 set_variable_part (out, incoming, dv, offset,
9497 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9498 }
9499}
9500
9501/* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9502
9503static void
9504vt_add_function_parameters (void)
9505{
9506 tree parm;
9507
9508 for (parm = DECL_ARGUMENTS (current_function_decl);
9509 parm; parm = DECL_CHAIN (parm))
9510 vt_add_function_parameter (parm);
9511
9512 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9513 {
9514 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9515
9516 if (TREE_CODE (vexpr) == INDIRECT_REF)
9517 vexpr = TREE_OPERAND (vexpr, 0);
9518
9519 if (TREE_CODE (vexpr) == PARM_DECL
9520 && DECL_ARTIFICIAL (vexpr)
9521 && !DECL_IGNORED_P (vexpr)
9522 && DECL_NAMELESS (vexpr))
9523 vt_add_function_parameter (vexpr);
9524 }
5923a5e7 9525}
9526
35af0188 9527/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
9528
9529static bool
9530fp_setter (rtx insn)
9531{
9532 rtx pat = PATTERN (insn);
9533 if (RTX_FRAME_RELATED_P (insn))
9534 {
9535 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
9536 if (expr)
9537 pat = XEXP (expr, 0);
9538 }
9539 if (GET_CODE (pat) == SET)
9540 return SET_DEST (pat) == hard_frame_pointer_rtx;
9541 else if (GET_CODE (pat) == PARALLEL)
9542 {
9543 int i;
9544 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9545 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
9546 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
9547 return true;
9548 }
9549 return false;
9550}
9551
9552/* Initialize cfa_base_rtx, create a preserved VALUE for it and
9553 ensure it isn't flushed during cselib_reset_table.
9554 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9555 has been eliminated. */
9556
9557static void
9558vt_init_cfa_base (void)
9559{
9560 cselib_val *val;
9561
9562#ifdef FRAME_POINTER_CFA_OFFSET
9563 cfa_base_rtx = frame_pointer_rtx;
4afc3056 9564 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
35af0188 9565#else
9566 cfa_base_rtx = arg_pointer_rtx;
4afc3056 9567 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
35af0188 9568#endif
dc9d10b1 9569 if (cfa_base_rtx == hard_frame_pointer_rtx
9570 || !fixed_regs[REGNO (cfa_base_rtx)])
9571 {
9572 cfa_base_rtx = NULL_RTX;
9573 return;
9574 }
35af0188 9575 if (!MAY_HAVE_DEBUG_INSNS)
9576 return;
9577
1837e966 9578 /* Tell alias analysis that cfa_base_rtx should share
9579 find_base_term value with stack pointer or hard frame pointer. */
04c9ad62 9580 if (!frame_pointer_needed)
9581 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9582 else if (!crtl->stack_realign_tried)
9583 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9584
c2becbaf 9585 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
1f864115 9586 VOIDmode, get_insns ());
35af0188 9587 preserve_value (val);
4573d576 9588 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
35af0188 9589}
9590
5923a5e7 9591/* Allocate and initialize the data structures for variable tracking
9592 and parse the RTL to get the micro operations. */
9593
35af0188 9594static bool
5923a5e7 9595vt_initialize (void)
9596{
c69a7a6b 9597 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
35af0188 9598 HOST_WIDE_INT fp_cfa_offset = -1;
5923a5e7 9599
9600 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9601
35af0188 9602 attrs_pool = create_alloc_pool ("attrs_def pool",
9603 sizeof (struct attrs_def), 1024);
9604 var_pool = create_alloc_pool ("variable_def pool",
9605 sizeof (struct variable_def)
9606 + (MAX_VAR_PARTS - 1)
9607 * sizeof (((variable)NULL)->var_part[0]), 64);
9608 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9609 sizeof (struct location_chain_def),
9610 1024);
9611 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9612 sizeof (struct shared_hash_def), 256);
9613 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9614 empty_shared_hash->refcount = 1;
9615 empty_shared_hash->htab
9616 = htab_create (1, variable_htab_hash, variable_htab_eq,
9617 variable_htab_free);
9618 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
9619 variable_htab_free);
35af0188 9620
9621 /* Init the IN and OUT sets. */
9622 FOR_ALL_BB (bb)
9623 {
9624 VTI (bb)->visited = false;
9625 VTI (bb)->flooded = false;
9626 dataflow_set_init (&VTI (bb)->in);
9627 dataflow_set_init (&VTI (bb)->out);
9628 VTI (bb)->permp = NULL;
9629 }
9630
9631 if (MAY_HAVE_DEBUG_INSNS)
9632 {
9633 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9845d120 9634 scratch_regs = BITMAP_ALLOC (NULL);
9635 valvar_pool = create_alloc_pool ("small variable_def pool",
9636 sizeof (struct variable_def), 256);
ace62c8c 9637 preserved_values = VEC_alloc (rtx, heap, 256);
9845d120 9638 }
9639 else
9640 {
9641 scratch_regs = NULL;
9642 valvar_pool = NULL;
9643 }
9644
e9c7d461 9645 if (MAY_HAVE_DEBUG_INSNS)
9646 {
9647 rtx reg, expr;
9648 int ofst;
9649 cselib_val *val;
9650
9651#ifdef FRAME_POINTER_CFA_OFFSET
9652 reg = frame_pointer_rtx;
9653 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9654#else
9655 reg = arg_pointer_rtx;
9656 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9657#endif
9658
9659 ofst -= INCOMING_FRAME_SP_OFFSET;
9660
9661 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9662 VOIDmode, get_insns ());
9663 preserve_value (val);
9664 cselib_preserve_cfa_base_value (val, REGNO (reg));
9665 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9666 stack_pointer_rtx, -ofst);
9667 cselib_add_permanent_equiv (val, expr, get_insns ());
9668
9669 if (ofst)
9670 {
9671 val = cselib_lookup_from_insn (stack_pointer_rtx,
9672 GET_MODE (stack_pointer_rtx), 1,
9673 VOIDmode, get_insns ());
9674 preserve_value (val);
9675 expr = plus_constant (GET_MODE (reg), reg, ofst);
9676 cselib_add_permanent_equiv (val, expr, get_insns ());
9677 }
9678 }
9679
c69a7a6b 9680 /* In order to factor out the adjustments made to the stack pointer or to
9681 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9682 instead of individual location lists, we're going to rewrite MEMs based
9683 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9684 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9685 resp. arg_pointer_rtx. We can do this either when there is no frame
9686 pointer in the function and stack adjustments are consistent for all
9687 basic blocks or when there is a frame pointer and no stack realignment.
9688 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9689 has been eliminated. */
35af0188 9690 if (!frame_pointer_needed)
9691 {
9692 rtx reg, elim;
9693
9694 if (!vt_stack_adjustments ())
9695 return false;
9696
9697#ifdef FRAME_POINTER_CFA_OFFSET
9698 reg = frame_pointer_rtx;
9699#else
9700 reg = arg_pointer_rtx;
9701#endif
9702 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9703 if (elim != reg)
9704 {
9705 if (GET_CODE (elim) == PLUS)
9706 elim = XEXP (elim, 0);
9707 if (elim == stack_pointer_rtx)
9708 vt_init_cfa_base ();
9709 }
9710 }
9711 else if (!crtl->stack_realign_tried)
9712 {
9713 rtx reg, elim;
9714
9715#ifdef FRAME_POINTER_CFA_OFFSET
9716 reg = frame_pointer_rtx;
9717 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9718#else
9719 reg = arg_pointer_rtx;
9720 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9721#endif
9722 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9723 if (elim != reg)
9724 {
9725 if (GET_CODE (elim) == PLUS)
9726 {
9727 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9728 elim = XEXP (elim, 0);
9729 }
9730 if (elim != hard_frame_pointer_rtx)
9731 fp_cfa_offset = -1;
35af0188 9732 }
c69a7a6b 9733 else
9734 fp_cfa_offset = -1;
35af0188 9735 }
c69a7a6b 9736
04c9ad62 9737 /* If the stack is realigned and a DRAP register is used, we're going to
9738 rewrite MEMs based on it representing incoming locations of parameters
9739 passed on the stack into MEMs based on the argument pointer. Although
9740 we aren't going to rewrite other MEMs, we still need to initialize the
9741 virtual CFA pointer in order to ensure that the argument pointer will
9742 be seen as a constant throughout the function.
9743
9744 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9745 else if (stack_realign_drap)
9746 {
9747 rtx reg, elim;
9748
9749#ifdef FRAME_POINTER_CFA_OFFSET
9750 reg = frame_pointer_rtx;
9751#else
9752 reg = arg_pointer_rtx;
9753#endif
9754 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9755 if (elim != reg)
9756 {
9757 if (GET_CODE (elim) == PLUS)
9758 elim = XEXP (elim, 0);
9759 if (elim == hard_frame_pointer_rtx)
9760 vt_init_cfa_base ();
9761 }
9762 }
9763
35af0188 9764 hard_frame_pointer_adjustment = -1;
9765
4143d08b 9766 vt_add_function_parameters ();
9767
5923a5e7 9768 FOR_EACH_BB (bb)
9769 {
9770 rtx insn;
f5ceea15 9771 HOST_WIDE_INT pre, post = 0;
348893f5 9772 basic_block first_bb, last_bb;
9845d120 9773
9774 if (MAY_HAVE_DEBUG_INSNS)
9775 {
c77c64d8 9776 cselib_record_sets_hook = add_with_sets;
9845d120 9777 if (dump_file && (dump_flags & TDF_DETAILS))
9778 fprintf (dump_file, "first value: %i\n",
01df1184 9779 cselib_get_next_uid ());
9845d120 9780 }
5923a5e7 9781
348893f5 9782 first_bb = bb;
9783 for (;;)
9784 {
9785 edge e;
9786 if (bb->next_bb == EXIT_BLOCK_PTR
9787 || ! single_pred_p (bb->next_bb))
9788 break;
9789 e = find_edge (bb, bb->next_bb);
9790 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9791 break;
9792 bb = bb->next_bb;
9793 }
9794 last_bb = bb;
9795
c77c64d8 9796 /* Add the micro-operations to the vector. */
348893f5 9797 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
5923a5e7 9798 {
35af0188 9799 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9800 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
348893f5 9801 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9802 insn = NEXT_INSN (insn))
5923a5e7 9803 {
348893f5 9804 if (INSN_P (insn))
5923a5e7 9805 {
348893f5 9806 if (!frame_pointer_needed)
9845d120 9807 {
348893f5 9808 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9809 if (pre)
9810 {
c77c64d8 9811 micro_operation mo;
9812 mo.type = MO_ADJUST;
9813 mo.u.adjust = pre;
9814 mo.insn = insn;
348893f5 9815 if (dump_file && (dump_flags & TDF_DETAILS))
9816 log_op_type (PATTERN (insn), bb, insn,
9817 MO_ADJUST, dump_file);
35af0188 9818 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9819 &mo);
9820 VTI (bb)->out.stack_adjust += pre;
348893f5 9821 }
5923a5e7 9822 }
5923a5e7 9823
348893f5 9824 cselib_hook_called = false;
35af0188 9825 adjust_insn (bb, insn);
348893f5 9826 if (MAY_HAVE_DEBUG_INSNS)
5923a5e7 9827 {
4143d08b 9828 if (CALL_P (insn))
9829 prepare_call_arguments (bb, insn);
348893f5 9830 cselib_process_insn (insn);
9831 if (dump_file && (dump_flags & TDF_DETAILS))
9832 {
9833 print_rtl_single (dump_file, insn);
9834 dump_cselib_table (dump_file);
9835 }
5923a5e7 9836 }
348893f5 9837 if (!cselib_hook_called)
9838 add_with_sets (insn, 0, 0);
35af0188 9839 cancel_changes (0);
5923a5e7 9840
348893f5 9841 if (!frame_pointer_needed && post)
9842 {
c77c64d8 9843 micro_operation mo;
9844 mo.type = MO_ADJUST;
9845 mo.u.adjust = post;
9846 mo.insn = insn;
348893f5 9847 if (dump_file && (dump_flags & TDF_DETAILS))
9848 log_op_type (PATTERN (insn), bb, insn,
9849 MO_ADJUST, dump_file);
35af0188 9850 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9851 &mo);
9852 VTI (bb)->out.stack_adjust += post;
9853 }
9854
9855 if (bb == prologue_bb
c69a7a6b 9856 && fp_cfa_offset != -1
35af0188 9857 && hard_frame_pointer_adjustment == -1
9858 && RTX_FRAME_RELATED_P (insn)
9859 && fp_setter (insn))
9860 {
9861 vt_init_cfa_base ();
9862 hard_frame_pointer_adjustment = fp_cfa_offset;
348893f5 9863 }
5923a5e7 9864 }
9865 }
35af0188 9866 gcc_assert (offset == VTI (bb)->out.stack_adjust);
5923a5e7 9867 }
348893f5 9868
9869 bb = last_bb;
9870
9845d120 9871 if (MAY_HAVE_DEBUG_INSNS)
9872 {
c77c64d8 9873 cselib_preserve_only_values ();
9874 cselib_reset_table (cselib_get_next_uid ());
9845d120 9875 cselib_record_sets_hook = NULL;
9876 }
5923a5e7 9877 }
9878
35af0188 9879 hard_frame_pointer_adjustment = -1;
9845d120 9880 VTI (ENTRY_BLOCK_PTR)->flooded = true;
35af0188 9881 cfa_base_rtx = NULL_RTX;
9882 return true;
5923a5e7 9883}
9884
63f5ad44 9885/* This is *not* reset after each function. It gives each
9886 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
9887 a unique label number. */
9888
9889static int debug_label_num = 1;
9890
9845d120 9891/* Get rid of all debug insns from the insn stream. */
9892
9893static void
9894delete_debug_insns (void)
9895{
9896 basic_block bb;
9897 rtx insn, next;
9898
9899 if (!MAY_HAVE_DEBUG_INSNS)
9900 return;
9901
9902 FOR_EACH_BB (bb)
9903 {
9904 FOR_BB_INSNS_SAFE (bb, insn, next)
9905 if (DEBUG_INSN_P (insn))
63f5ad44 9906 {
9907 tree decl = INSN_VAR_LOCATION_DECL (insn);
9908 if (TREE_CODE (decl) == LABEL_DECL
9909 && DECL_NAME (decl)
9910 && !DECL_RTL_SET_P (decl))
9911 {
9912 PUT_CODE (insn, NOTE);
9913 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
9914 NOTE_DELETED_LABEL_NAME (insn)
9915 = IDENTIFIER_POINTER (DECL_NAME (decl));
9916 SET_DECL_RTL (decl, insn);
9917 CODE_LABEL_NUMBER (insn) = debug_label_num++;
9918 }
9919 else
9920 delete_insn (insn);
9921 }
9845d120 9922 }
9923}
9924
9925/* Run a fast, BB-local only version of var tracking, to take care of
9926 information that we don't do global analysis on, such that not all
9927 information is lost. If SKIPPED holds, we're skipping the global
9928 pass entirely, so we should try to use information it would have
9929 handled as well.. */
9930
9931static void
9932vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9933{
9934 /* ??? Just skip it all for now. */
9935 delete_debug_insns ();
9936}
9937
5923a5e7 9938/* Free the data structures needed for variable tracking. */
9939
9940static void
9941vt_finalize (void)
9942{
9943 basic_block bb;
9944
9945 FOR_EACH_BB (bb)
9946 {
c77c64d8 9947 VEC_free (micro_operation, heap, VTI (bb)->mos);
5923a5e7 9948 }
9949
9950 FOR_ALL_BB (bb)
9951 {
9952 dataflow_set_destroy (&VTI (bb)->in);
9953 dataflow_set_destroy (&VTI (bb)->out);
9845d120 9954 if (VTI (bb)->permp)
9955 {
9956 dataflow_set_destroy (VTI (bb)->permp);
9957 XDELETE (VTI (bb)->permp);
9958 }
5923a5e7 9959 }
9960 free_aux_for_blocks ();
a8f6ad2b 9961 htab_delete (empty_shared_hash->htab);
9962 htab_delete (changed_variables);
5923a5e7 9963 free_alloc_pool (attrs_pool);
9964 free_alloc_pool (var_pool);
9965 free_alloc_pool (loc_chain_pool);
a8f6ad2b 9966 free_alloc_pool (shared_hash_pool);
9845d120 9967
9968 if (MAY_HAVE_DEBUG_INSNS)
9969 {
b7517b82 9970 if (loc_exp_dep_pool)
9971 free_alloc_pool (loc_exp_dep_pool);
9972 loc_exp_dep_pool = NULL;
9845d120 9973 free_alloc_pool (valvar_pool);
ace62c8c 9974 VEC_free (rtx, heap, preserved_values);
9845d120 9975 cselib_finish ();
9976 BITMAP_FREE (scratch_regs);
9977 scratch_regs = NULL;
9978 }
9979
72fdb379 9980#ifdef HAVE_window_save
267001d7 9981 VEC_free (parm_reg_t, gc, windowed_parm_regs);
72fdb379 9982#endif
267001d7 9983
331cf53a 9984 if (vui_vec)
9845d120 9985 XDELETEVEC (vui_vec);
331cf53a 9986 vui_vec = NULL;
9987 vui_allocated = 0;
5923a5e7 9988}
9989
9990/* The entry point to variable tracking pass. */
9991
76f4ab1c 9992static inline unsigned int
9993variable_tracking_main_1 (void)
5923a5e7 9994{
76f4ab1c 9995 bool success;
9996
9845d120 9997 if (flag_var_tracking_assignments < 0)
9998 {
9999 delete_debug_insns ();
10000 return 0;
10001 }
10002
5923a5e7 10003 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9845d120 10004 {
10005 vt_debug_insns_local (true);
10006 return 0;
10007 }
5923a5e7 10008
10009 mark_dfs_back_edges ();
35af0188 10010 if (!vt_initialize ())
5923a5e7 10011 {
35af0188 10012 vt_finalize ();
10013 vt_debug_insns_local (true);
10014 return 0;
5923a5e7 10015 }
10016
76f4ab1c 10017 success = vt_find_locations ();
10018
10019 if (!success && flag_var_tracking_assignments > 0)
10020 {
10021 vt_finalize ();
10022
10023 delete_debug_insns ();
10024
10025 /* This is later restored by our caller. */
10026 flag_var_tracking_assignments = 0;
10027
35af0188 10028 success = vt_initialize ();
10029 gcc_assert (success);
76f4ab1c 10030
10031 success = vt_find_locations ();
10032 }
10033
10034 if (!success)
10035 {
10036 vt_finalize ();
10037 vt_debug_insns_local (false);
10038 return 0;
10039 }
5923a5e7 10040
562d71e8 10041 if (dump_file && (dump_flags & TDF_DETAILS))
5923a5e7 10042 {
10043 dump_dataflow_sets ();
4a020a8c 10044 dump_reg_info (dump_file);
562d71e8 10045 dump_flow_info (dump_file, dump_flags);
5923a5e7 10046 }
10047
e2050933 10048 timevar_push (TV_VAR_TRACKING_EMIT);
9845d120 10049 vt_emit_notes ();
e2050933 10050 timevar_pop (TV_VAR_TRACKING_EMIT);
9845d120 10051
5923a5e7 10052 vt_finalize ();
9845d120 10053 vt_debug_insns_local (false);
2a1990e9 10054 return 0;
5923a5e7 10055}
76f4ab1c 10056
10057unsigned int
10058variable_tracking_main (void)
10059{
10060 unsigned int ret;
10061 int save = flag_var_tracking_assignments;
10062
10063 ret = variable_tracking_main_1 ();
10064
10065 flag_var_tracking_assignments = save;
10066
10067 return ret;
10068}
77fce4cd 10069\f
10070static bool
10071gate_handle_var_tracking (void)
10072{
8a42230a 10073 return (flag_var_tracking && !targetm.delay_vartrack);
77fce4cd 10074}
10075
10076
10077
20099e35 10078struct rtl_opt_pass pass_variable_tracking =
77fce4cd 10079{
20099e35 10080 {
10081 RTL_PASS,
77fce4cd 10082 "vartrack", /* name */
10083 gate_handle_var_tracking, /* gate */
10084 variable_tracking_main, /* execute */
10085 NULL, /* sub */
10086 NULL, /* next */
10087 0, /* static_pass_number */
10088 TV_VAR_TRACKING, /* tv_id */
10089 0, /* properties_required */
10090 0, /* properties_provided */
10091 0, /* properties_destroyed */
10092 0, /* todo_flags_start */
771e2890 10093 TODO_verify_rtl_sharing /* todo_flags_finish */
20099e35 10094 }
77fce4cd 10095};