]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/var-tracking.c
Use rtx_expr_list in various places
[thirdparty/gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
27
28 How does the variable tracking pass work?
29
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
37
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
44
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
54
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
60
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
70
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
78
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
85
86 */
87
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "hash-map.h"
97 #include "hash-table.h"
98 #include "basic-block.h"
99 #include "tm_p.h"
100 #include "hard-reg-set.h"
101 #include "flags.h"
102 #include "insn-config.h"
103 #include "reload.h"
104 #include "sbitmap.h"
105 #include "alloc-pool.h"
106 #include "fibheap.h"
107 #include "regs.h"
108 #include "expr.h"
109 #include "tree-pass.h"
110 #include "bitmap.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "target.h"
115 #include "params.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "recog.h"
119 #include "tm_p.h"
120 #include "alias.h"
121
122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
123 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
124 Currently the value is the same as IDENTIFIER_NODE, which has such
125 a property. If this compile time assertion ever fails, make sure that
126 the new tree code that equals (int) VALUE has the same property. */
127 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
128
129 /* Type of micro operation. */
130 enum micro_operation_type
131 {
132 MO_USE, /* Use location (REG or MEM). */
133 MO_USE_NO_VAR,/* Use location which is not associated with a variable
134 or the variable is not trackable. */
135 MO_VAL_USE, /* Use location which is associated with a value. */
136 MO_VAL_LOC, /* Use location which appears in a debug insn. */
137 MO_VAL_SET, /* Set location associated with a value. */
138 MO_SET, /* Set location. */
139 MO_COPY, /* Copy the same portion of a variable from one
140 location to another. */
141 MO_CLOBBER, /* Clobber location. */
142 MO_CALL, /* Call insn. */
143 MO_ADJUST /* Adjust stack pointer. */
144
145 };
146
147 static const char * const ATTRIBUTE_UNUSED
148 micro_operation_type_name[] = {
149 "MO_USE",
150 "MO_USE_NO_VAR",
151 "MO_VAL_USE",
152 "MO_VAL_LOC",
153 "MO_VAL_SET",
154 "MO_SET",
155 "MO_COPY",
156 "MO_CLOBBER",
157 "MO_CALL",
158 "MO_ADJUST"
159 };
160
161 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
162 Notes emitted as AFTER_CALL are to take effect during the call,
163 rather than after the call. */
164 enum emit_note_where
165 {
166 EMIT_NOTE_BEFORE_INSN,
167 EMIT_NOTE_AFTER_INSN,
168 EMIT_NOTE_AFTER_CALL_INSN
169 };
170
171 /* Structure holding information about micro operation. */
172 typedef struct micro_operation_def
173 {
174 /* Type of micro operation. */
175 enum micro_operation_type type;
176
177 /* The instruction which the micro operation is in, for MO_USE,
178 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
179 instruction or note in the original flow (before any var-tracking
180 notes are inserted, to simplify emission of notes), for MO_SET
181 and MO_CLOBBER. */
182 rtx_insn *insn;
183
184 union {
185 /* Location. For MO_SET and MO_COPY, this is the SET that
186 performs the assignment, if known, otherwise it is the target
187 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
188 CONCAT of the VALUE and the LOC associated with it. For
189 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
190 associated with it. */
191 rtx loc;
192
193 /* Stack adjustment. */
194 HOST_WIDE_INT adjust;
195 } u;
196 } micro_operation;
197
198
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
202
203 /* Return true if a decl_or_value DV is a DECL or NULL. */
204 static inline bool
205 dv_is_decl_p (decl_or_value dv)
206 {
207 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
208 }
209
210 /* Return true if a decl_or_value is a VALUE rtl. */
211 static inline bool
212 dv_is_value_p (decl_or_value dv)
213 {
214 return dv && !dv_is_decl_p (dv);
215 }
216
217 /* Return the decl in the decl_or_value. */
218 static inline tree
219 dv_as_decl (decl_or_value dv)
220 {
221 gcc_checking_assert (dv_is_decl_p (dv));
222 return (tree) dv;
223 }
224
225 /* Return the value in the decl_or_value. */
226 static inline rtx
227 dv_as_value (decl_or_value dv)
228 {
229 gcc_checking_assert (dv_is_value_p (dv));
230 return (rtx)dv;
231 }
232
233 /* Return the opaque pointer in the decl_or_value. */
234 static inline void *
235 dv_as_opaque (decl_or_value dv)
236 {
237 return dv;
238 }
239
240
241 /* Description of location of a part of a variable. The content of a physical
242 register is described by a chain of these structures.
243 The chains are pretty short (usually 1 or 2 elements) and thus
244 chain is the best data structure. */
245 typedef struct attrs_def
246 {
247 /* Pointer to next member of the list. */
248 struct attrs_def *next;
249
250 /* The rtx of register. */
251 rtx loc;
252
253 /* The declaration corresponding to LOC. */
254 decl_or_value dv;
255
256 /* Offset from start of DECL. */
257 HOST_WIDE_INT offset;
258 } *attrs;
259
260 /* Structure for chaining the locations. */
261 typedef struct location_chain_def
262 {
263 /* Next element in the chain. */
264 struct location_chain_def *next;
265
266 /* The location (REG, MEM or VALUE). */
267 rtx loc;
268
269 /* The "value" stored in this location. */
270 rtx set_src;
271
272 /* Initialized? */
273 enum var_init_status init;
274 } *location_chain;
275
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part
277 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
278 location of DV. Each entry is also part of VALUE' s linked-list of
279 backlinks back to DV. */
280 typedef struct loc_exp_dep_s
281 {
282 /* The dependent DV. */
283 decl_or_value dv;
284 /* The dependency VALUE or DECL_DEBUG. */
285 rtx value;
286 /* The next entry in VALUE's backlinks list. */
287 struct loc_exp_dep_s *next;
288 /* A pointer to the pointer to this entry (head or prev's next) in
289 the doubly-linked list. */
290 struct loc_exp_dep_s **pprev;
291 } loc_exp_dep;
292
293
294 /* This data structure holds information about the depth of a variable
295 expansion. */
296 typedef struct expand_depth_struct
297 {
298 /* This measures the complexity of the expanded expression. It
299 grows by one for each level of expansion that adds more than one
300 operand. */
301 int complexity;
302 /* This counts the number of ENTRY_VALUE expressions in an
303 expansion. We want to minimize their use. */
304 int entryvals;
305 } expand_depth;
306
307 /* This data structure is allocated for one-part variables at the time
308 of emitting notes. */
309 struct onepart_aux
310 {
311 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
312 computation used the expansion of this variable, and that ought
313 to be notified should this variable change. If the DV's cur_loc
314 expanded to NULL, all components of the loc list are regarded as
315 active, so that any changes in them give us a chance to get a
316 location. Otherwise, only components of the loc that expanded to
317 non-NULL are regarded as active dependencies. */
318 loc_exp_dep *backlinks;
319 /* This holds the LOC that was expanded into cur_loc. We need only
320 mark a one-part variable as changed if the FROM loc is removed,
321 or if it has no known location and a loc is added, or if it gets
322 a change notification from any of its active dependencies. */
323 rtx from;
324 /* The depth of the cur_loc expression. */
325 expand_depth depth;
326 /* Dependencies actively used when expand FROM into cur_loc. */
327 vec<loc_exp_dep, va_heap, vl_embed> deps;
328 };
329
330 /* Structure describing one part of variable. */
331 typedef struct variable_part_def
332 {
333 /* Chain of locations of the part. */
334 location_chain loc_chain;
335
336 /* Location which was last emitted to location list. */
337 rtx cur_loc;
338
339 union variable_aux
340 {
341 /* The offset in the variable, if !var->onepart. */
342 HOST_WIDE_INT offset;
343
344 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
345 struct onepart_aux *onepaux;
346 } aux;
347 } variable_part;
348
349 /* Maximum number of location parts. */
350 #define MAX_VAR_PARTS 16
351
352 /* Enumeration type used to discriminate various types of one-part
353 variables. */
354 typedef enum onepart_enum
355 {
356 /* Not a one-part variable. */
357 NOT_ONEPART = 0,
358 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
359 ONEPART_VDECL = 1,
360 /* A DEBUG_EXPR_DECL. */
361 ONEPART_DEXPR = 2,
362 /* A VALUE. */
363 ONEPART_VALUE = 3
364 } onepart_enum_t;
365
366 /* Structure describing where the variable is located. */
367 typedef struct variable_def
368 {
369 /* The declaration of the variable, or an RTL value being handled
370 like a declaration. */
371 decl_or_value dv;
372
373 /* Reference count. */
374 int refcount;
375
376 /* Number of variable parts. */
377 char n_var_parts;
378
379 /* What type of DV this is, according to enum onepart_enum. */
380 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
381
382 /* True if this variable_def struct is currently in the
383 changed_variables hash table. */
384 bool in_changed_variables;
385
386 /* The variable parts. */
387 variable_part var_part[1];
388 } *variable;
389 typedef const struct variable_def *const_variable;
390
391 /* Pointer to the BB's information specific to variable tracking pass. */
392 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
393
394 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
395 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
396
397 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
398
399 /* Access VAR's Ith part's offset, checking that it's not a one-part
400 variable. */
401 #define VAR_PART_OFFSET(var, i) __extension__ \
402 (*({ variable const __v = (var); \
403 gcc_checking_assert (!__v->onepart); \
404 &__v->var_part[(i)].aux.offset; }))
405
406 /* Access VAR's one-part auxiliary data, checking that it is a
407 one-part variable. */
408 #define VAR_LOC_1PAUX(var) __extension__ \
409 (*({ variable const __v = (var); \
410 gcc_checking_assert (__v->onepart); \
411 &__v->var_part[0].aux.onepaux; }))
412
413 #else
414 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
415 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
416 #endif
417
418 /* These are accessor macros for the one-part auxiliary data. When
419 convenient for users, they're guarded by tests that the data was
420 allocated. */
421 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
422 ? VAR_LOC_1PAUX (var)->backlinks \
423 : NULL)
424 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
425 ? &VAR_LOC_1PAUX (var)->backlinks \
426 : NULL)
427 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
428 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
429 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
430 ? &VAR_LOC_1PAUX (var)->deps \
431 : NULL)
432
433
434
435 typedef unsigned int dvuid;
436
437 /* Return the uid of DV. */
438
439 static inline dvuid
440 dv_uid (decl_or_value dv)
441 {
442 if (dv_is_value_p (dv))
443 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
444 else
445 return DECL_UID (dv_as_decl (dv));
446 }
447
448 /* Compute the hash from the uid. */
449
450 static inline hashval_t
451 dv_uid2hash (dvuid uid)
452 {
453 return uid;
454 }
455
456 /* The hash function for a mask table in a shared_htab chain. */
457
458 static inline hashval_t
459 dv_htab_hash (decl_or_value dv)
460 {
461 return dv_uid2hash (dv_uid (dv));
462 }
463
464 static void variable_htab_free (void *);
465
466 /* Variable hashtable helpers. */
467
468 struct variable_hasher
469 {
470 typedef variable_def value_type;
471 typedef void compare_type;
472 static inline hashval_t hash (const value_type *);
473 static inline bool equal (const value_type *, const compare_type *);
474 static inline void remove (value_type *);
475 };
476
477 /* The hash function for variable_htab, computes the hash value
478 from the declaration of variable X. */
479
480 inline hashval_t
481 variable_hasher::hash (const value_type *v)
482 {
483 return dv_htab_hash (v->dv);
484 }
485
486 /* Compare the declaration of variable X with declaration Y. */
487
488 inline bool
489 variable_hasher::equal (const value_type *v, const compare_type *y)
490 {
491 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
492
493 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
494 }
495
496 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
497
498 inline void
499 variable_hasher::remove (value_type *var)
500 {
501 variable_htab_free (var);
502 }
503
504 typedef hash_table<variable_hasher> variable_table_type;
505 typedef variable_table_type::iterator variable_iterator_type;
506
507 /* Structure for passing some other parameters to function
508 emit_note_insn_var_location. */
509 typedef struct emit_note_data_def
510 {
511 /* The instruction which the note will be emitted before/after. */
512 rtx_insn *insn;
513
514 /* Where the note will be emitted (before/after insn)? */
515 enum emit_note_where where;
516
517 /* The variables and values active at this point. */
518 variable_table_type *vars;
519 } emit_note_data;
520
521 /* Structure holding a refcounted hash table. If refcount > 1,
522 it must be first unshared before modified. */
523 typedef struct shared_hash_def
524 {
525 /* Reference count. */
526 int refcount;
527
528 /* Actual hash table. */
529 variable_table_type *htab;
530 } *shared_hash;
531
532 /* Structure holding the IN or OUT set for a basic block. */
533 typedef struct dataflow_set_def
534 {
535 /* Adjustment of stack offset. */
536 HOST_WIDE_INT stack_adjust;
537
538 /* Attributes for registers (lists of attrs). */
539 attrs regs[FIRST_PSEUDO_REGISTER];
540
541 /* Variable locations. */
542 shared_hash vars;
543
544 /* Vars that is being traversed. */
545 shared_hash traversed_vars;
546 } dataflow_set;
547
548 /* The structure (one for each basic block) containing the information
549 needed for variable tracking. */
550 typedef struct variable_tracking_info_def
551 {
552 /* The vector of micro operations. */
553 vec<micro_operation> mos;
554
555 /* The IN and OUT set for dataflow analysis. */
556 dataflow_set in;
557 dataflow_set out;
558
559 /* The permanent-in dataflow set for this block. This is used to
560 hold values for which we had to compute entry values. ??? This
561 should probably be dynamically allocated, to avoid using more
562 memory in non-debug builds. */
563 dataflow_set *permp;
564
565 /* Has the block been visited in DFS? */
566 bool visited;
567
568 /* Has the block been flooded in VTA? */
569 bool flooded;
570
571 } *variable_tracking_info;
572
573 /* Alloc pool for struct attrs_def. */
574 static alloc_pool attrs_pool;
575
576 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
577 static alloc_pool var_pool;
578
579 /* Alloc pool for struct variable_def with a single var_part entry. */
580 static alloc_pool valvar_pool;
581
582 /* Alloc pool for struct location_chain_def. */
583 static alloc_pool loc_chain_pool;
584
585 /* Alloc pool for struct shared_hash_def. */
586 static alloc_pool shared_hash_pool;
587
588 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
589 static alloc_pool loc_exp_dep_pool;
590
591 /* Changed variables, notes will be emitted for them. */
592 static variable_table_type *changed_variables;
593
594 /* Shall notes be emitted? */
595 static bool emit_notes;
596
597 /* Values whose dynamic location lists have gone empty, but whose
598 cselib location lists are still usable. Use this to hold the
599 current location, the backlinks, etc, during emit_notes. */
600 static variable_table_type *dropped_values;
601
602 /* Empty shared hashtable. */
603 static shared_hash empty_shared_hash;
604
605 /* Scratch register bitmap used by cselib_expand_value_rtx. */
606 static bitmap scratch_regs = NULL;
607
608 #ifdef HAVE_window_save
609 typedef struct GTY(()) parm_reg {
610 rtx outgoing;
611 rtx incoming;
612 } parm_reg_t;
613
614
615 /* Vector of windowed parameter registers, if any. */
616 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
617 #endif
618
619 /* Variable used to tell whether cselib_process_insn called our hook. */
620 static bool cselib_hook_called;
621
622 /* Local function prototypes. */
623 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
624 HOST_WIDE_INT *);
625 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
626 HOST_WIDE_INT *);
627 static bool vt_stack_adjustments (void);
628
629 static void init_attrs_list_set (attrs *);
630 static void attrs_list_clear (attrs *);
631 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
632 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
633 static void attrs_list_copy (attrs *, attrs);
634 static void attrs_list_union (attrs *, attrs);
635
636 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
637 variable var, enum var_init_status);
638 static void vars_copy (variable_table_type *, variable_table_type *);
639 static tree var_debug_decl (tree);
640 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
641 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
642 enum var_init_status, rtx);
643 static void var_reg_delete (dataflow_set *, rtx, bool);
644 static void var_regno_delete (dataflow_set *, int);
645 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
646 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
647 enum var_init_status, rtx);
648 static void var_mem_delete (dataflow_set *, rtx, bool);
649
650 static void dataflow_set_init (dataflow_set *);
651 static void dataflow_set_clear (dataflow_set *);
652 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
653 static int variable_union_info_cmp_pos (const void *, const void *);
654 static void dataflow_set_union (dataflow_set *, dataflow_set *);
655 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
656 static bool canon_value_cmp (rtx, rtx);
657 static int loc_cmp (rtx, rtx);
658 static bool variable_part_different_p (variable_part *, variable_part *);
659 static bool onepart_variable_different_p (variable, variable);
660 static bool variable_different_p (variable, variable);
661 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
662 static void dataflow_set_destroy (dataflow_set *);
663
664 static bool contains_symbol_ref (rtx);
665 static bool track_expr_p (tree, bool);
666 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
667 static int add_uses (rtx *, void *);
668 static void add_uses_1 (rtx *, void *);
669 static void add_stores (rtx, const_rtx, void *);
670 static bool compute_bb_dataflow (basic_block);
671 static bool vt_find_locations (void);
672
673 static void dump_attrs_list (attrs);
674 static void dump_var (variable);
675 static void dump_vars (variable_table_type *);
676 static void dump_dataflow_set (dataflow_set *);
677 static void dump_dataflow_sets (void);
678
679 static void set_dv_changed (decl_or_value, bool);
680 static void variable_was_changed (variable, dataflow_set *);
681 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
682 decl_or_value, HOST_WIDE_INT,
683 enum var_init_status, rtx);
684 static void set_variable_part (dataflow_set *, rtx,
685 decl_or_value, HOST_WIDE_INT,
686 enum var_init_status, rtx, enum insert_option);
687 static variable_def **clobber_slot_part (dataflow_set *, rtx,
688 variable_def **, HOST_WIDE_INT, rtx);
689 static void clobber_variable_part (dataflow_set *, rtx,
690 decl_or_value, HOST_WIDE_INT, rtx);
691 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
692 HOST_WIDE_INT);
693 static void delete_variable_part (dataflow_set *, rtx,
694 decl_or_value, HOST_WIDE_INT);
695 static void emit_notes_in_bb (basic_block, dataflow_set *);
696 static void vt_emit_notes (void);
697
698 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
699 static void vt_add_function_parameters (void);
700 static bool vt_initialize (void);
701 static void vt_finalize (void);
702
703 /* Given a SET, calculate the amount of stack adjustment it contains
704 PRE- and POST-modifying stack pointer.
705 This function is similar to stack_adjust_offset. */
706
707 static void
708 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
709 HOST_WIDE_INT *post)
710 {
711 rtx src = SET_SRC (pattern);
712 rtx dest = SET_DEST (pattern);
713 enum rtx_code code;
714
715 if (dest == stack_pointer_rtx)
716 {
717 /* (set (reg sp) (plus (reg sp) (const_int))) */
718 code = GET_CODE (src);
719 if (! (code == PLUS || code == MINUS)
720 || XEXP (src, 0) != stack_pointer_rtx
721 || !CONST_INT_P (XEXP (src, 1)))
722 return;
723
724 if (code == MINUS)
725 *post += INTVAL (XEXP (src, 1));
726 else
727 *post -= INTVAL (XEXP (src, 1));
728 }
729 else if (MEM_P (dest))
730 {
731 /* (set (mem (pre_dec (reg sp))) (foo)) */
732 src = XEXP (dest, 0);
733 code = GET_CODE (src);
734
735 switch (code)
736 {
737 case PRE_MODIFY:
738 case POST_MODIFY:
739 if (XEXP (src, 0) == stack_pointer_rtx)
740 {
741 rtx val = XEXP (XEXP (src, 1), 1);
742 /* We handle only adjustments by constant amount. */
743 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
744 CONST_INT_P (val));
745
746 if (code == PRE_MODIFY)
747 *pre -= INTVAL (val);
748 else
749 *post -= INTVAL (val);
750 break;
751 }
752 return;
753
754 case PRE_DEC:
755 if (XEXP (src, 0) == stack_pointer_rtx)
756 {
757 *pre += GET_MODE_SIZE (GET_MODE (dest));
758 break;
759 }
760 return;
761
762 case POST_DEC:
763 if (XEXP (src, 0) == stack_pointer_rtx)
764 {
765 *post += GET_MODE_SIZE (GET_MODE (dest));
766 break;
767 }
768 return;
769
770 case PRE_INC:
771 if (XEXP (src, 0) == stack_pointer_rtx)
772 {
773 *pre -= GET_MODE_SIZE (GET_MODE (dest));
774 break;
775 }
776 return;
777
778 case POST_INC:
779 if (XEXP (src, 0) == stack_pointer_rtx)
780 {
781 *post -= GET_MODE_SIZE (GET_MODE (dest));
782 break;
783 }
784 return;
785
786 default:
787 return;
788 }
789 }
790 }
791
792 /* Given an INSN, calculate the amount of stack adjustment it contains
793 PRE- and POST-modifying stack pointer. */
794
795 static void
796 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
797 HOST_WIDE_INT *post)
798 {
799 rtx pattern;
800
801 *pre = 0;
802 *post = 0;
803
804 pattern = PATTERN (insn);
805 if (RTX_FRAME_RELATED_P (insn))
806 {
807 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
808 if (expr)
809 pattern = XEXP (expr, 0);
810 }
811
812 if (GET_CODE (pattern) == SET)
813 stack_adjust_offset_pre_post (pattern, pre, post);
814 else if (GET_CODE (pattern) == PARALLEL
815 || GET_CODE (pattern) == SEQUENCE)
816 {
817 int i;
818
819 /* There may be stack adjustments inside compound insns. Search
820 for them. */
821 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
822 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
823 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
824 }
825 }
826
827 /* Compute stack adjustments for all blocks by traversing DFS tree.
828 Return true when the adjustments on all incoming edges are consistent.
829 Heavily borrowed from pre_and_rev_post_order_compute. */
830
831 static bool
832 vt_stack_adjustments (void)
833 {
834 edge_iterator *stack;
835 int sp;
836
837 /* Initialize entry block. */
838 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
839 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust =
840 INCOMING_FRAME_SP_OFFSET;
841 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust =
842 INCOMING_FRAME_SP_OFFSET;
843
844 /* Allocate stack for back-tracking up CFG. */
845 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
846 sp = 0;
847
848 /* Push the first edge on to the stack. */
849 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
850
851 while (sp)
852 {
853 edge_iterator ei;
854 basic_block src;
855 basic_block dest;
856
857 /* Look at the edge on the top of the stack. */
858 ei = stack[sp - 1];
859 src = ei_edge (ei)->src;
860 dest = ei_edge (ei)->dest;
861
862 /* Check if the edge destination has been visited yet. */
863 if (!VTI (dest)->visited)
864 {
865 rtx_insn *insn;
866 HOST_WIDE_INT pre, post, offset;
867 VTI (dest)->visited = true;
868 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
869
870 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
871 for (insn = BB_HEAD (dest);
872 insn != NEXT_INSN (BB_END (dest));
873 insn = NEXT_INSN (insn))
874 if (INSN_P (insn))
875 {
876 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
877 offset += pre + post;
878 }
879
880 VTI (dest)->out.stack_adjust = offset;
881
882 if (EDGE_COUNT (dest->succs) > 0)
883 /* Since the DEST node has been visited for the first
884 time, check its successors. */
885 stack[sp++] = ei_start (dest->succs);
886 }
887 else
888 {
889 /* We can end up with different stack adjustments for the exit block
890 of a shrink-wrapped function if stack_adjust_offset_pre_post
891 doesn't understand the rtx pattern used to restore the stack
892 pointer in the epilogue. For example, on s390(x), the stack
893 pointer is often restored via a load-multiple instruction
894 and so no stack_adjust offset is recorded for it. This means
895 that the stack offset at the end of the epilogue block is the
896 the same as the offset before the epilogue, whereas other paths
897 to the exit block will have the correct stack_adjust.
898
899 It is safe to ignore these differences because (a) we never
900 use the stack_adjust for the exit block in this pass and
901 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
902 function are correct.
903
904 We must check whether the adjustments on other edges are
905 the same though. */
906 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
907 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
908 {
909 free (stack);
910 return false;
911 }
912
913 if (! ei_one_before_end_p (ei))
914 /* Go to the next edge. */
915 ei_next (&stack[sp - 1]);
916 else
917 /* Return to previous level if there are no more edges. */
918 sp--;
919 }
920 }
921
922 free (stack);
923 return true;
924 }
925
926 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
927 hard_frame_pointer_rtx is being mapped to it and offset for it. */
928 static rtx cfa_base_rtx;
929 static HOST_WIDE_INT cfa_base_offset;
930
931 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
932 or hard_frame_pointer_rtx. */
933
934 static inline rtx
935 compute_cfa_pointer (HOST_WIDE_INT adjustment)
936 {
937 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
938 }
939
940 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
941 or -1 if the replacement shouldn't be done. */
942 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
943
944 /* Data for adjust_mems callback. */
945
946 struct adjust_mem_data
947 {
948 bool store;
949 enum machine_mode mem_mode;
950 HOST_WIDE_INT stack_adjust;
951 rtx_expr_list *side_effects;
952 };
953
954 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
955 transformation of wider mode arithmetics to narrower mode,
956 -1 if it is suitable and subexpressions shouldn't be
957 traversed and 0 if it is suitable and subexpressions should
958 be traversed. Called through for_each_rtx. */
959
960 static int
961 use_narrower_mode_test (rtx *loc, void *data)
962 {
963 rtx subreg = (rtx) data;
964
965 if (CONSTANT_P (*loc))
966 return -1;
967 switch (GET_CODE (*loc))
968 {
969 case REG:
970 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
971 return 1;
972 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
973 *loc, subreg_lowpart_offset (GET_MODE (subreg),
974 GET_MODE (*loc))))
975 return 1;
976 return -1;
977 case PLUS:
978 case MINUS:
979 case MULT:
980 return 0;
981 case ASHIFT:
982 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
983 return 1;
984 else
985 return -1;
986 default:
987 return 1;
988 }
989 }
990
991 /* Transform X into narrower mode MODE from wider mode WMODE. */
992
993 static rtx
994 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
995 {
996 rtx op0, op1;
997 if (CONSTANT_P (x))
998 return lowpart_subreg (mode, x, wmode);
999 switch (GET_CODE (x))
1000 {
1001 case REG:
1002 return lowpart_subreg (mode, x, wmode);
1003 case PLUS:
1004 case MINUS:
1005 case MULT:
1006 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1007 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1008 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1009 case ASHIFT:
1010 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1011 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
1012 default:
1013 gcc_unreachable ();
1014 }
1015 }
1016
1017 /* Helper function for adjusting used MEMs. */
1018
1019 static rtx
1020 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1021 {
1022 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1023 rtx mem, addr = loc, tem;
1024 enum machine_mode mem_mode_save;
1025 bool store_save;
1026 switch (GET_CODE (loc))
1027 {
1028 case REG:
1029 /* Don't do any sp or fp replacements outside of MEM addresses
1030 on the LHS. */
1031 if (amd->mem_mode == VOIDmode && amd->store)
1032 return loc;
1033 if (loc == stack_pointer_rtx
1034 && !frame_pointer_needed
1035 && cfa_base_rtx)
1036 return compute_cfa_pointer (amd->stack_adjust);
1037 else if (loc == hard_frame_pointer_rtx
1038 && frame_pointer_needed
1039 && hard_frame_pointer_adjustment != -1
1040 && cfa_base_rtx)
1041 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1042 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1043 return loc;
1044 case MEM:
1045 mem = loc;
1046 if (!amd->store)
1047 {
1048 mem = targetm.delegitimize_address (mem);
1049 if (mem != loc && !MEM_P (mem))
1050 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1051 }
1052
1053 addr = XEXP (mem, 0);
1054 mem_mode_save = amd->mem_mode;
1055 amd->mem_mode = GET_MODE (mem);
1056 store_save = amd->store;
1057 amd->store = false;
1058 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1059 amd->store = store_save;
1060 amd->mem_mode = mem_mode_save;
1061 if (mem == loc)
1062 addr = targetm.delegitimize_address (addr);
1063 if (addr != XEXP (mem, 0))
1064 mem = replace_equiv_address_nv (mem, addr);
1065 if (!amd->store)
1066 mem = avoid_constant_pool_reference (mem);
1067 return mem;
1068 case PRE_INC:
1069 case PRE_DEC:
1070 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1071 gen_int_mode (GET_CODE (loc) == PRE_INC
1072 ? GET_MODE_SIZE (amd->mem_mode)
1073 : -GET_MODE_SIZE (amd->mem_mode),
1074 GET_MODE (loc)));
1075 case POST_INC:
1076 case POST_DEC:
1077 if (addr == loc)
1078 addr = XEXP (loc, 0);
1079 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1080 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1081 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1082 gen_int_mode ((GET_CODE (loc) == PRE_INC
1083 || GET_CODE (loc) == POST_INC)
1084 ? GET_MODE_SIZE (amd->mem_mode)
1085 : -GET_MODE_SIZE (amd->mem_mode),
1086 GET_MODE (loc)));
1087 store_save = amd->store;
1088 amd->store = false;
1089 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1090 amd->store = store_save;
1091 amd->side_effects = alloc_EXPR_LIST (0,
1092 gen_rtx_SET (VOIDmode,
1093 XEXP (loc, 0), tem),
1094 amd->side_effects);
1095 return addr;
1096 case PRE_MODIFY:
1097 addr = XEXP (loc, 1);
1098 case POST_MODIFY:
1099 if (addr == loc)
1100 addr = XEXP (loc, 0);
1101 gcc_assert (amd->mem_mode != VOIDmode);
1102 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1103 store_save = amd->store;
1104 amd->store = false;
1105 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1106 adjust_mems, data);
1107 amd->store = store_save;
1108 amd->side_effects = alloc_EXPR_LIST (0,
1109 gen_rtx_SET (VOIDmode,
1110 XEXP (loc, 0), tem),
1111 amd->side_effects);
1112 return addr;
1113 case SUBREG:
1114 /* First try without delegitimization of whole MEMs and
1115 avoid_constant_pool_reference, which is more likely to succeed. */
1116 store_save = amd->store;
1117 amd->store = true;
1118 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1119 data);
1120 amd->store = store_save;
1121 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1122 if (mem == SUBREG_REG (loc))
1123 {
1124 tem = loc;
1125 goto finish_subreg;
1126 }
1127 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1128 GET_MODE (SUBREG_REG (loc)),
1129 SUBREG_BYTE (loc));
1130 if (tem)
1131 goto finish_subreg;
1132 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1133 GET_MODE (SUBREG_REG (loc)),
1134 SUBREG_BYTE (loc));
1135 if (tem == NULL_RTX)
1136 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1137 finish_subreg:
1138 if (MAY_HAVE_DEBUG_INSNS
1139 && GET_CODE (tem) == SUBREG
1140 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1141 || GET_CODE (SUBREG_REG (tem)) == MINUS
1142 || GET_CODE (SUBREG_REG (tem)) == MULT
1143 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1144 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1145 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1146 && GET_MODE_SIZE (GET_MODE (tem))
1147 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1148 && subreg_lowpart_p (tem)
1149 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1150 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1151 GET_MODE (SUBREG_REG (tem)));
1152 return tem;
1153 case ASM_OPERANDS:
1154 /* Don't do any replacements in second and following
1155 ASM_OPERANDS of inline-asm with multiple sets.
1156 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1157 and ASM_OPERANDS_LABEL_VEC need to be equal between
1158 all the ASM_OPERANDs in the insn and adjust_insn will
1159 fix this up. */
1160 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1161 return loc;
1162 break;
1163 default:
1164 break;
1165 }
1166 return NULL_RTX;
1167 }
1168
1169 /* Helper function for replacement of uses. */
1170
1171 static void
1172 adjust_mem_uses (rtx *x, void *data)
1173 {
1174 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1175 if (new_x != *x)
1176 validate_change (NULL_RTX, x, new_x, true);
1177 }
1178
1179 /* Helper function for replacement of stores. */
1180
1181 static void
1182 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1183 {
1184 if (MEM_P (loc))
1185 {
1186 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1187 adjust_mems, data);
1188 if (new_dest != SET_DEST (expr))
1189 {
1190 rtx xexpr = CONST_CAST_RTX (expr);
1191 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1192 }
1193 }
1194 }
1195
1196 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1197 replace them with their value in the insn and add the side-effects
1198 as other sets to the insn. */
1199
1200 static void
1201 adjust_insn (basic_block bb, rtx_insn *insn)
1202 {
1203 struct adjust_mem_data amd;
1204 rtx set;
1205
1206 #ifdef HAVE_window_save
1207 /* If the target machine has an explicit window save instruction, the
1208 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1209 if (RTX_FRAME_RELATED_P (insn)
1210 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1211 {
1212 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1213 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1214 parm_reg_t *p;
1215
1216 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1217 {
1218 XVECEXP (rtl, 0, i * 2)
1219 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1220 /* Do not clobber the attached DECL, but only the REG. */
1221 XVECEXP (rtl, 0, i * 2 + 1)
1222 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1223 gen_raw_REG (GET_MODE (p->outgoing),
1224 REGNO (p->outgoing)));
1225 }
1226
1227 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1228 return;
1229 }
1230 #endif
1231
1232 amd.mem_mode = VOIDmode;
1233 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1234 amd.side_effects = NULL;
1235
1236 amd.store = true;
1237 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1238
1239 amd.store = false;
1240 if (GET_CODE (PATTERN (insn)) == PARALLEL
1241 && asm_noperands (PATTERN (insn)) > 0
1242 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1243 {
1244 rtx body, set0;
1245 int i;
1246
1247 /* inline-asm with multiple sets is tiny bit more complicated,
1248 because the 3 vectors in ASM_OPERANDS need to be shared between
1249 all ASM_OPERANDS in the instruction. adjust_mems will
1250 not touch ASM_OPERANDS other than the first one, asm_noperands
1251 test above needs to be called before that (otherwise it would fail)
1252 and afterwards this code fixes it up. */
1253 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1254 body = PATTERN (insn);
1255 set0 = XVECEXP (body, 0, 0);
1256 gcc_checking_assert (GET_CODE (set0) == SET
1257 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1258 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1259 for (i = 1; i < XVECLEN (body, 0); i++)
1260 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1261 break;
1262 else
1263 {
1264 set = XVECEXP (body, 0, i);
1265 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1266 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1267 == i);
1268 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1269 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1270 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1271 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1272 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1273 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1274 {
1275 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1276 ASM_OPERANDS_INPUT_VEC (newsrc)
1277 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1278 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1279 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1280 ASM_OPERANDS_LABEL_VEC (newsrc)
1281 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1282 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1283 }
1284 }
1285 }
1286 else
1287 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1288
1289 /* For read-only MEMs containing some constant, prefer those
1290 constants. */
1291 set = single_set (insn);
1292 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1293 {
1294 rtx note = find_reg_equal_equiv_note (insn);
1295
1296 if (note && CONSTANT_P (XEXP (note, 0)))
1297 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1298 }
1299
1300 if (amd.side_effects)
1301 {
1302 rtx *pat, new_pat, s;
1303 int i, oldn, newn;
1304
1305 pat = &PATTERN (insn);
1306 if (GET_CODE (*pat) == COND_EXEC)
1307 pat = &COND_EXEC_CODE (*pat);
1308 if (GET_CODE (*pat) == PARALLEL)
1309 oldn = XVECLEN (*pat, 0);
1310 else
1311 oldn = 1;
1312 for (s = amd.side_effects, newn = 0; s; newn++)
1313 s = XEXP (s, 1);
1314 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1315 if (GET_CODE (*pat) == PARALLEL)
1316 for (i = 0; i < oldn; i++)
1317 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1318 else
1319 XVECEXP (new_pat, 0, 0) = *pat;
1320 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1321 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1322 free_EXPR_LIST_list (&amd.side_effects);
1323 validate_change (NULL_RTX, pat, new_pat, true);
1324 }
1325 }
1326
1327 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1328 static inline rtx
1329 dv_as_rtx (decl_or_value dv)
1330 {
1331 tree decl;
1332
1333 if (dv_is_value_p (dv))
1334 return dv_as_value (dv);
1335
1336 decl = dv_as_decl (dv);
1337
1338 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1339 return DECL_RTL_KNOWN_SET (decl);
1340 }
1341
1342 /* Return nonzero if a decl_or_value must not have more than one
1343 variable part. The returned value discriminates among various
1344 kinds of one-part DVs ccording to enum onepart_enum. */
1345 static inline onepart_enum_t
1346 dv_onepart_p (decl_or_value dv)
1347 {
1348 tree decl;
1349
1350 if (!MAY_HAVE_DEBUG_INSNS)
1351 return NOT_ONEPART;
1352
1353 if (dv_is_value_p (dv))
1354 return ONEPART_VALUE;
1355
1356 decl = dv_as_decl (dv);
1357
1358 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1359 return ONEPART_DEXPR;
1360
1361 if (target_for_debug_bind (decl) != NULL_TREE)
1362 return ONEPART_VDECL;
1363
1364 return NOT_ONEPART;
1365 }
1366
1367 /* Return the variable pool to be used for a dv of type ONEPART. */
1368 static inline alloc_pool
1369 onepart_pool (onepart_enum_t onepart)
1370 {
1371 return onepart ? valvar_pool : var_pool;
1372 }
1373
1374 /* Build a decl_or_value out of a decl. */
1375 static inline decl_or_value
1376 dv_from_decl (tree decl)
1377 {
1378 decl_or_value dv;
1379 dv = decl;
1380 gcc_checking_assert (dv_is_decl_p (dv));
1381 return dv;
1382 }
1383
1384 /* Build a decl_or_value out of a value. */
1385 static inline decl_or_value
1386 dv_from_value (rtx value)
1387 {
1388 decl_or_value dv;
1389 dv = value;
1390 gcc_checking_assert (dv_is_value_p (dv));
1391 return dv;
1392 }
1393
1394 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1395 static inline decl_or_value
1396 dv_from_rtx (rtx x)
1397 {
1398 decl_or_value dv;
1399
1400 switch (GET_CODE (x))
1401 {
1402 case DEBUG_EXPR:
1403 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1404 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1405 break;
1406
1407 case VALUE:
1408 dv = dv_from_value (x);
1409 break;
1410
1411 default:
1412 gcc_unreachable ();
1413 }
1414
1415 return dv;
1416 }
1417
1418 extern void debug_dv (decl_or_value dv);
1419
1420 DEBUG_FUNCTION void
1421 debug_dv (decl_or_value dv)
1422 {
1423 if (dv_is_value_p (dv))
1424 debug_rtx (dv_as_value (dv));
1425 else
1426 debug_generic_stmt (dv_as_decl (dv));
1427 }
1428
1429 static void loc_exp_dep_clear (variable var);
1430
1431 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1432
1433 static void
1434 variable_htab_free (void *elem)
1435 {
1436 int i;
1437 variable var = (variable) elem;
1438 location_chain node, next;
1439
1440 gcc_checking_assert (var->refcount > 0);
1441
1442 var->refcount--;
1443 if (var->refcount > 0)
1444 return;
1445
1446 for (i = 0; i < var->n_var_parts; i++)
1447 {
1448 for (node = var->var_part[i].loc_chain; node; node = next)
1449 {
1450 next = node->next;
1451 pool_free (loc_chain_pool, node);
1452 }
1453 var->var_part[i].loc_chain = NULL;
1454 }
1455 if (var->onepart && VAR_LOC_1PAUX (var))
1456 {
1457 loc_exp_dep_clear (var);
1458 if (VAR_LOC_DEP_LST (var))
1459 VAR_LOC_DEP_LST (var)->pprev = NULL;
1460 XDELETE (VAR_LOC_1PAUX (var));
1461 /* These may be reused across functions, so reset
1462 e.g. NO_LOC_P. */
1463 if (var->onepart == ONEPART_DEXPR)
1464 set_dv_changed (var->dv, true);
1465 }
1466 pool_free (onepart_pool (var->onepart), var);
1467 }
1468
1469 /* Initialize the set (array) SET of attrs to empty lists. */
1470
1471 static void
1472 init_attrs_list_set (attrs *set)
1473 {
1474 int i;
1475
1476 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1477 set[i] = NULL;
1478 }
1479
1480 /* Make the list *LISTP empty. */
1481
1482 static void
1483 attrs_list_clear (attrs *listp)
1484 {
1485 attrs list, next;
1486
1487 for (list = *listp; list; list = next)
1488 {
1489 next = list->next;
1490 pool_free (attrs_pool, list);
1491 }
1492 *listp = NULL;
1493 }
1494
1495 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1496
1497 static attrs
1498 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1499 {
1500 for (; list; list = list->next)
1501 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1502 return list;
1503 return NULL;
1504 }
1505
1506 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1507
1508 static void
1509 attrs_list_insert (attrs *listp, decl_or_value dv,
1510 HOST_WIDE_INT offset, rtx loc)
1511 {
1512 attrs list;
1513
1514 list = (attrs) pool_alloc (attrs_pool);
1515 list->loc = loc;
1516 list->dv = dv;
1517 list->offset = offset;
1518 list->next = *listp;
1519 *listp = list;
1520 }
1521
1522 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1523
1524 static void
1525 attrs_list_copy (attrs *dstp, attrs src)
1526 {
1527 attrs n;
1528
1529 attrs_list_clear (dstp);
1530 for (; src; src = src->next)
1531 {
1532 n = (attrs) pool_alloc (attrs_pool);
1533 n->loc = src->loc;
1534 n->dv = src->dv;
1535 n->offset = src->offset;
1536 n->next = *dstp;
1537 *dstp = n;
1538 }
1539 }
1540
1541 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1542
1543 static void
1544 attrs_list_union (attrs *dstp, attrs src)
1545 {
1546 for (; src; src = src->next)
1547 {
1548 if (!attrs_list_member (*dstp, src->dv, src->offset))
1549 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1550 }
1551 }
1552
1553 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1554 *DSTP. */
1555
1556 static void
1557 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1558 {
1559 gcc_assert (!*dstp);
1560 for (; src; src = src->next)
1561 {
1562 if (!dv_onepart_p (src->dv))
1563 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1564 }
1565 for (src = src2; src; src = src->next)
1566 {
1567 if (!dv_onepart_p (src->dv)
1568 && !attrs_list_member (*dstp, src->dv, src->offset))
1569 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1570 }
1571 }
1572
1573 /* Shared hashtable support. */
1574
1575 /* Return true if VARS is shared. */
1576
1577 static inline bool
1578 shared_hash_shared (shared_hash vars)
1579 {
1580 return vars->refcount > 1;
1581 }
1582
1583 /* Return the hash table for VARS. */
1584
1585 static inline variable_table_type *
1586 shared_hash_htab (shared_hash vars)
1587 {
1588 return vars->htab;
1589 }
1590
1591 /* Return true if VAR is shared, or maybe because VARS is shared. */
1592
1593 static inline bool
1594 shared_var_p (variable var, shared_hash vars)
1595 {
1596 /* Don't count an entry in the changed_variables table as a duplicate. */
1597 return ((var->refcount > 1 + (int) var->in_changed_variables)
1598 || shared_hash_shared (vars));
1599 }
1600
1601 /* Copy variables into a new hash table. */
1602
1603 static shared_hash
1604 shared_hash_unshare (shared_hash vars)
1605 {
1606 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1607 gcc_assert (vars->refcount > 1);
1608 new_vars->refcount = 1;
1609 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1610 vars_copy (new_vars->htab, vars->htab);
1611 vars->refcount--;
1612 return new_vars;
1613 }
1614
1615 /* Increment reference counter on VARS and return it. */
1616
1617 static inline shared_hash
1618 shared_hash_copy (shared_hash vars)
1619 {
1620 vars->refcount++;
1621 return vars;
1622 }
1623
1624 /* Decrement reference counter and destroy hash table if not shared
1625 anymore. */
1626
1627 static void
1628 shared_hash_destroy (shared_hash vars)
1629 {
1630 gcc_checking_assert (vars->refcount > 0);
1631 if (--vars->refcount == 0)
1632 {
1633 delete vars->htab;
1634 pool_free (shared_hash_pool, vars);
1635 }
1636 }
1637
1638 /* Unshare *PVARS if shared and return slot for DV. If INS is
1639 INSERT, insert it if not already present. */
1640
1641 static inline variable_def **
1642 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1643 hashval_t dvhash, enum insert_option ins)
1644 {
1645 if (shared_hash_shared (*pvars))
1646 *pvars = shared_hash_unshare (*pvars);
1647 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1648 }
1649
1650 static inline variable_def **
1651 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1652 enum insert_option ins)
1653 {
1654 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1655 }
1656
1657 /* Return slot for DV, if it is already present in the hash table.
1658 If it is not present, insert it only VARS is not shared, otherwise
1659 return NULL. */
1660
1661 static inline variable_def **
1662 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1663 {
1664 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1665 shared_hash_shared (vars)
1666 ? NO_INSERT : INSERT);
1667 }
1668
1669 static inline variable_def **
1670 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1671 {
1672 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1673 }
1674
1675 /* Return slot for DV only if it is already present in the hash table. */
1676
1677 static inline variable_def **
1678 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1679 hashval_t dvhash)
1680 {
1681 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1682 }
1683
1684 static inline variable_def **
1685 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1686 {
1687 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1688 }
1689
1690 /* Return variable for DV or NULL if not already present in the hash
1691 table. */
1692
1693 static inline variable
1694 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1695 {
1696 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1697 }
1698
1699 static inline variable
1700 shared_hash_find (shared_hash vars, decl_or_value dv)
1701 {
1702 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1703 }
1704
1705 /* Return true if TVAL is better than CVAL as a canonival value. We
1706 choose lowest-numbered VALUEs, using the RTX address as a
1707 tie-breaker. The idea is to arrange them into a star topology,
1708 such that all of them are at most one step away from the canonical
1709 value, and the canonical value has backlinks to all of them, in
1710 addition to all the actual locations. We don't enforce this
1711 topology throughout the entire dataflow analysis, though.
1712 */
1713
1714 static inline bool
1715 canon_value_cmp (rtx tval, rtx cval)
1716 {
1717 return !cval
1718 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1719 }
1720
1721 static bool dst_can_be_shared;
1722
1723 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1724
1725 static variable_def **
1726 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1727 enum var_init_status initialized)
1728 {
1729 variable new_var;
1730 int i;
1731
1732 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1733 new_var->dv = var->dv;
1734 new_var->refcount = 1;
1735 var->refcount--;
1736 new_var->n_var_parts = var->n_var_parts;
1737 new_var->onepart = var->onepart;
1738 new_var->in_changed_variables = false;
1739
1740 if (! flag_var_tracking_uninit)
1741 initialized = VAR_INIT_STATUS_INITIALIZED;
1742
1743 for (i = 0; i < var->n_var_parts; i++)
1744 {
1745 location_chain node;
1746 location_chain *nextp;
1747
1748 if (i == 0 && var->onepart)
1749 {
1750 /* One-part auxiliary data is only used while emitting
1751 notes, so propagate it to the new variable in the active
1752 dataflow set. If we're not emitting notes, this will be
1753 a no-op. */
1754 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1755 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1756 VAR_LOC_1PAUX (var) = NULL;
1757 }
1758 else
1759 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1760 nextp = &new_var->var_part[i].loc_chain;
1761 for (node = var->var_part[i].loc_chain; node; node = node->next)
1762 {
1763 location_chain new_lc;
1764
1765 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1766 new_lc->next = NULL;
1767 if (node->init > initialized)
1768 new_lc->init = node->init;
1769 else
1770 new_lc->init = initialized;
1771 if (node->set_src && !(MEM_P (node->set_src)))
1772 new_lc->set_src = node->set_src;
1773 else
1774 new_lc->set_src = NULL;
1775 new_lc->loc = node->loc;
1776
1777 *nextp = new_lc;
1778 nextp = &new_lc->next;
1779 }
1780
1781 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1782 }
1783
1784 dst_can_be_shared = false;
1785 if (shared_hash_shared (set->vars))
1786 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1787 else if (set->traversed_vars && set->vars != set->traversed_vars)
1788 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1789 *slot = new_var;
1790 if (var->in_changed_variables)
1791 {
1792 variable_def **cslot
1793 = changed_variables->find_slot_with_hash (var->dv,
1794 dv_htab_hash (var->dv),
1795 NO_INSERT);
1796 gcc_assert (*cslot == (void *) var);
1797 var->in_changed_variables = false;
1798 variable_htab_free (var);
1799 *cslot = new_var;
1800 new_var->in_changed_variables = true;
1801 }
1802 return slot;
1803 }
1804
1805 /* Copy all variables from hash table SRC to hash table DST. */
1806
1807 static void
1808 vars_copy (variable_table_type *dst, variable_table_type *src)
1809 {
1810 variable_iterator_type hi;
1811 variable var;
1812
1813 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1814 {
1815 variable_def **dstp;
1816 var->refcount++;
1817 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1818 INSERT);
1819 *dstp = var;
1820 }
1821 }
1822
1823 /* Map a decl to its main debug decl. */
1824
1825 static inline tree
1826 var_debug_decl (tree decl)
1827 {
1828 if (decl && TREE_CODE (decl) == VAR_DECL
1829 && DECL_HAS_DEBUG_EXPR_P (decl))
1830 {
1831 tree debugdecl = DECL_DEBUG_EXPR (decl);
1832 if (DECL_P (debugdecl))
1833 decl = debugdecl;
1834 }
1835
1836 return decl;
1837 }
1838
1839 /* Set the register LOC to contain DV, OFFSET. */
1840
1841 static void
1842 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1843 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1844 enum insert_option iopt)
1845 {
1846 attrs node;
1847 bool decl_p = dv_is_decl_p (dv);
1848
1849 if (decl_p)
1850 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1851
1852 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1853 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1854 && node->offset == offset)
1855 break;
1856 if (!node)
1857 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1858 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1859 }
1860
1861 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1862
1863 static void
1864 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1865 rtx set_src)
1866 {
1867 tree decl = REG_EXPR (loc);
1868 HOST_WIDE_INT offset = REG_OFFSET (loc);
1869
1870 var_reg_decl_set (set, loc, initialized,
1871 dv_from_decl (decl), offset, set_src, INSERT);
1872 }
1873
1874 static enum var_init_status
1875 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1876 {
1877 variable var;
1878 int i;
1879 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1880
1881 if (! flag_var_tracking_uninit)
1882 return VAR_INIT_STATUS_INITIALIZED;
1883
1884 var = shared_hash_find (set->vars, dv);
1885 if (var)
1886 {
1887 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1888 {
1889 location_chain nextp;
1890 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1891 if (rtx_equal_p (nextp->loc, loc))
1892 {
1893 ret_val = nextp->init;
1894 break;
1895 }
1896 }
1897 }
1898
1899 return ret_val;
1900 }
1901
1902 /* Delete current content of register LOC in dataflow set SET and set
1903 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1904 MODIFY is true, any other live copies of the same variable part are
1905 also deleted from the dataflow set, otherwise the variable part is
1906 assumed to be copied from another location holding the same
1907 part. */
1908
1909 static void
1910 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1911 enum var_init_status initialized, rtx set_src)
1912 {
1913 tree decl = REG_EXPR (loc);
1914 HOST_WIDE_INT offset = REG_OFFSET (loc);
1915 attrs node, next;
1916 attrs *nextp;
1917
1918 decl = var_debug_decl (decl);
1919
1920 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1921 initialized = get_init_value (set, loc, dv_from_decl (decl));
1922
1923 nextp = &set->regs[REGNO (loc)];
1924 for (node = *nextp; node; node = next)
1925 {
1926 next = node->next;
1927 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1928 {
1929 delete_variable_part (set, node->loc, node->dv, node->offset);
1930 pool_free (attrs_pool, node);
1931 *nextp = next;
1932 }
1933 else
1934 {
1935 node->loc = loc;
1936 nextp = &node->next;
1937 }
1938 }
1939 if (modify)
1940 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1941 var_reg_set (set, loc, initialized, set_src);
1942 }
1943
1944 /* Delete the association of register LOC in dataflow set SET with any
1945 variables that aren't onepart. If CLOBBER is true, also delete any
1946 other live copies of the same variable part, and delete the
1947 association with onepart dvs too. */
1948
1949 static void
1950 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1951 {
1952 attrs *nextp = &set->regs[REGNO (loc)];
1953 attrs node, next;
1954
1955 if (clobber)
1956 {
1957 tree decl = REG_EXPR (loc);
1958 HOST_WIDE_INT offset = REG_OFFSET (loc);
1959
1960 decl = var_debug_decl (decl);
1961
1962 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1963 }
1964
1965 for (node = *nextp; node; node = next)
1966 {
1967 next = node->next;
1968 if (clobber || !dv_onepart_p (node->dv))
1969 {
1970 delete_variable_part (set, node->loc, node->dv, node->offset);
1971 pool_free (attrs_pool, node);
1972 *nextp = next;
1973 }
1974 else
1975 nextp = &node->next;
1976 }
1977 }
1978
1979 /* Delete content of register with number REGNO in dataflow set SET. */
1980
1981 static void
1982 var_regno_delete (dataflow_set *set, int regno)
1983 {
1984 attrs *reg = &set->regs[regno];
1985 attrs node, next;
1986
1987 for (node = *reg; node; node = next)
1988 {
1989 next = node->next;
1990 delete_variable_part (set, node->loc, node->dv, node->offset);
1991 pool_free (attrs_pool, node);
1992 }
1993 *reg = NULL;
1994 }
1995
1996 /* Return true if I is the negated value of a power of two. */
1997 static bool
1998 negative_power_of_two_p (HOST_WIDE_INT i)
1999 {
2000 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2001 return x == (x & -x);
2002 }
2003
2004 /* Strip constant offsets and alignments off of LOC. Return the base
2005 expression. */
2006
2007 static rtx
2008 vt_get_canonicalize_base (rtx loc)
2009 {
2010 while ((GET_CODE (loc) == PLUS
2011 || GET_CODE (loc) == AND)
2012 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2013 && (GET_CODE (loc) != AND
2014 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2015 loc = XEXP (loc, 0);
2016
2017 return loc;
2018 }
2019
2020 /* This caches canonicalized addresses for VALUEs, computed using
2021 information in the global cselib table. */
2022 static hash_map<rtx, rtx> *global_get_addr_cache;
2023
2024 /* This caches canonicalized addresses for VALUEs, computed using
2025 information from the global cache and information pertaining to a
2026 basic block being analyzed. */
2027 static hash_map<rtx, rtx> *local_get_addr_cache;
2028
2029 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2030
2031 /* Return the canonical address for LOC, that must be a VALUE, using a
2032 cached global equivalence or computing it and storing it in the
2033 global cache. */
2034
2035 static rtx
2036 get_addr_from_global_cache (rtx const loc)
2037 {
2038 rtx x;
2039
2040 gcc_checking_assert (GET_CODE (loc) == VALUE);
2041
2042 bool existed;
2043 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2044 if (existed)
2045 return *slot;
2046
2047 x = canon_rtx (get_addr (loc));
2048
2049 /* Tentative, avoiding infinite recursion. */
2050 *slot = x;
2051
2052 if (x != loc)
2053 {
2054 rtx nx = vt_canonicalize_addr (NULL, x);
2055 if (nx != x)
2056 {
2057 /* The table may have moved during recursion, recompute
2058 SLOT. */
2059 *global_get_addr_cache->get (loc) = x = nx;
2060 }
2061 }
2062
2063 return x;
2064 }
2065
2066 /* Return the canonical address for LOC, that must be a VALUE, using a
2067 cached local equivalence or computing it and storing it in the
2068 local cache. */
2069
2070 static rtx
2071 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2072 {
2073 rtx x;
2074 decl_or_value dv;
2075 variable var;
2076 location_chain l;
2077
2078 gcc_checking_assert (GET_CODE (loc) == VALUE);
2079
2080 bool existed;
2081 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2082 if (existed)
2083 return *slot;
2084
2085 x = get_addr_from_global_cache (loc);
2086
2087 /* Tentative, avoiding infinite recursion. */
2088 *slot = x;
2089
2090 /* Recurse to cache local expansion of X, or if we need to search
2091 for a VALUE in the expansion. */
2092 if (x != loc)
2093 {
2094 rtx nx = vt_canonicalize_addr (set, x);
2095 if (nx != x)
2096 {
2097 slot = local_get_addr_cache->get (loc);
2098 *slot = x = nx;
2099 }
2100 return x;
2101 }
2102
2103 dv = dv_from_rtx (x);
2104 var = shared_hash_find (set->vars, dv);
2105 if (!var)
2106 return x;
2107
2108 /* Look for an improved equivalent expression. */
2109 for (l = var->var_part[0].loc_chain; l; l = l->next)
2110 {
2111 rtx base = vt_get_canonicalize_base (l->loc);
2112 if (GET_CODE (base) == VALUE
2113 && canon_value_cmp (base, loc))
2114 {
2115 rtx nx = vt_canonicalize_addr (set, l->loc);
2116 if (x != nx)
2117 {
2118 slot = local_get_addr_cache->get (loc);
2119 *slot = x = nx;
2120 }
2121 break;
2122 }
2123 }
2124
2125 return x;
2126 }
2127
2128 /* Canonicalize LOC using equivalences from SET in addition to those
2129 in the cselib static table. It expects a VALUE-based expression,
2130 and it will only substitute VALUEs with other VALUEs or
2131 function-global equivalences, so that, if two addresses have base
2132 VALUEs that are locally or globally related in ways that
2133 memrefs_conflict_p cares about, they will both canonicalize to
2134 expressions that have the same base VALUE.
2135
2136 The use of VALUEs as canonical base addresses enables the canonical
2137 RTXs to remain unchanged globally, if they resolve to a constant,
2138 or throughout a basic block otherwise, so that they can be cached
2139 and the cache needs not be invalidated when REGs, MEMs or such
2140 change. */
2141
2142 static rtx
2143 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2144 {
2145 HOST_WIDE_INT ofst = 0;
2146 enum machine_mode mode = GET_MODE (oloc);
2147 rtx loc = oloc;
2148 rtx x;
2149 bool retry = true;
2150
2151 while (retry)
2152 {
2153 while (GET_CODE (loc) == PLUS
2154 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2155 {
2156 ofst += INTVAL (XEXP (loc, 1));
2157 loc = XEXP (loc, 0);
2158 }
2159
2160 /* Alignment operations can't normally be combined, so just
2161 canonicalize the base and we're done. We'll normally have
2162 only one stack alignment anyway. */
2163 if (GET_CODE (loc) == AND
2164 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2165 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2166 {
2167 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2168 if (x != XEXP (loc, 0))
2169 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2170 retry = false;
2171 }
2172
2173 if (GET_CODE (loc) == VALUE)
2174 {
2175 if (set)
2176 loc = get_addr_from_local_cache (set, loc);
2177 else
2178 loc = get_addr_from_global_cache (loc);
2179
2180 /* Consolidate plus_constants. */
2181 while (ofst && GET_CODE (loc) == PLUS
2182 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2183 {
2184 ofst += INTVAL (XEXP (loc, 1));
2185 loc = XEXP (loc, 0);
2186 }
2187
2188 retry = false;
2189 }
2190 else
2191 {
2192 x = canon_rtx (loc);
2193 if (retry)
2194 retry = (x != loc);
2195 loc = x;
2196 }
2197 }
2198
2199 /* Add OFST back in. */
2200 if (ofst)
2201 {
2202 /* Don't build new RTL if we can help it. */
2203 if (GET_CODE (oloc) == PLUS
2204 && XEXP (oloc, 0) == loc
2205 && INTVAL (XEXP (oloc, 1)) == ofst)
2206 return oloc;
2207
2208 loc = plus_constant (mode, loc, ofst);
2209 }
2210
2211 return loc;
2212 }
2213
2214 /* Return true iff there's a true dependence between MLOC and LOC.
2215 MADDR must be a canonicalized version of MLOC's address. */
2216
2217 static inline bool
2218 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2219 {
2220 if (GET_CODE (loc) != MEM)
2221 return false;
2222
2223 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2224 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2225 return false;
2226
2227 return true;
2228 }
2229
2230 /* Hold parameters for the hashtab traversal function
2231 drop_overlapping_mem_locs, see below. */
2232
2233 struct overlapping_mems
2234 {
2235 dataflow_set *set;
2236 rtx loc, addr;
2237 };
2238
2239 /* Remove all MEMs that overlap with COMS->LOC from the location list
2240 of a hash table entry for a value. COMS->ADDR must be a
2241 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2242 canonicalized itself. */
2243
2244 int
2245 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2246 {
2247 dataflow_set *set = coms->set;
2248 rtx mloc = coms->loc, addr = coms->addr;
2249 variable var = *slot;
2250
2251 if (var->onepart == ONEPART_VALUE)
2252 {
2253 location_chain loc, *locp;
2254 bool changed = false;
2255 rtx cur_loc;
2256
2257 gcc_assert (var->n_var_parts == 1);
2258
2259 if (shared_var_p (var, set->vars))
2260 {
2261 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2262 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2263 break;
2264
2265 if (!loc)
2266 return 1;
2267
2268 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2269 var = *slot;
2270 gcc_assert (var->n_var_parts == 1);
2271 }
2272
2273 if (VAR_LOC_1PAUX (var))
2274 cur_loc = VAR_LOC_FROM (var);
2275 else
2276 cur_loc = var->var_part[0].cur_loc;
2277
2278 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2279 loc; loc = *locp)
2280 {
2281 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2282 {
2283 locp = &loc->next;
2284 continue;
2285 }
2286
2287 *locp = loc->next;
2288 /* If we have deleted the location which was last emitted
2289 we have to emit new location so add the variable to set
2290 of changed variables. */
2291 if (cur_loc == loc->loc)
2292 {
2293 changed = true;
2294 var->var_part[0].cur_loc = NULL;
2295 if (VAR_LOC_1PAUX (var))
2296 VAR_LOC_FROM (var) = NULL;
2297 }
2298 pool_free (loc_chain_pool, loc);
2299 }
2300
2301 if (!var->var_part[0].loc_chain)
2302 {
2303 var->n_var_parts--;
2304 changed = true;
2305 }
2306 if (changed)
2307 variable_was_changed (var, set);
2308 }
2309
2310 return 1;
2311 }
2312
2313 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2314
2315 static void
2316 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2317 {
2318 struct overlapping_mems coms;
2319
2320 gcc_checking_assert (GET_CODE (loc) == MEM);
2321
2322 coms.set = set;
2323 coms.loc = canon_rtx (loc);
2324 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2325
2326 set->traversed_vars = set->vars;
2327 shared_hash_htab (set->vars)
2328 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2329 set->traversed_vars = NULL;
2330 }
2331
2332 /* Set the location of DV, OFFSET as the MEM LOC. */
2333
2334 static void
2335 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2336 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2337 enum insert_option iopt)
2338 {
2339 if (dv_is_decl_p (dv))
2340 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2341
2342 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2343 }
2344
2345 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2346 SET to LOC.
2347 Adjust the address first if it is stack pointer based. */
2348
2349 static void
2350 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2351 rtx set_src)
2352 {
2353 tree decl = MEM_EXPR (loc);
2354 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2355
2356 var_mem_decl_set (set, loc, initialized,
2357 dv_from_decl (decl), offset, set_src, INSERT);
2358 }
2359
2360 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2361 dataflow set SET to LOC. If MODIFY is true, any other live copies
2362 of the same variable part are also deleted from the dataflow set,
2363 otherwise the variable part is assumed to be copied from another
2364 location holding the same part.
2365 Adjust the address first if it is stack pointer based. */
2366
2367 static void
2368 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2369 enum var_init_status initialized, rtx set_src)
2370 {
2371 tree decl = MEM_EXPR (loc);
2372 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2373
2374 clobber_overlapping_mems (set, loc);
2375 decl = var_debug_decl (decl);
2376
2377 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2378 initialized = get_init_value (set, loc, dv_from_decl (decl));
2379
2380 if (modify)
2381 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2382 var_mem_set (set, loc, initialized, set_src);
2383 }
2384
2385 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2386 true, also delete any other live copies of the same variable part.
2387 Adjust the address first if it is stack pointer based. */
2388
2389 static void
2390 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2391 {
2392 tree decl = MEM_EXPR (loc);
2393 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2394
2395 clobber_overlapping_mems (set, loc);
2396 decl = var_debug_decl (decl);
2397 if (clobber)
2398 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2399 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2400 }
2401
2402 /* Return true if LOC should not be expanded for location expressions,
2403 or used in them. */
2404
2405 static inline bool
2406 unsuitable_loc (rtx loc)
2407 {
2408 switch (GET_CODE (loc))
2409 {
2410 case PC:
2411 case SCRATCH:
2412 case CC0:
2413 case ASM_INPUT:
2414 case ASM_OPERANDS:
2415 return true;
2416
2417 default:
2418 return false;
2419 }
2420 }
2421
2422 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2423 bound to it. */
2424
2425 static inline void
2426 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2427 {
2428 if (REG_P (loc))
2429 {
2430 if (modified)
2431 var_regno_delete (set, REGNO (loc));
2432 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2433 dv_from_value (val), 0, NULL_RTX, INSERT);
2434 }
2435 else if (MEM_P (loc))
2436 {
2437 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2438
2439 if (modified)
2440 clobber_overlapping_mems (set, loc);
2441
2442 if (l && GET_CODE (l->loc) == VALUE)
2443 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2444
2445 /* If this MEM is a global constant, we don't need it in the
2446 dynamic tables. ??? We should test this before emitting the
2447 micro-op in the first place. */
2448 while (l)
2449 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2450 break;
2451 else
2452 l = l->next;
2453
2454 if (!l)
2455 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2456 dv_from_value (val), 0, NULL_RTX, INSERT);
2457 }
2458 else
2459 {
2460 /* Other kinds of equivalences are necessarily static, at least
2461 so long as we do not perform substitutions while merging
2462 expressions. */
2463 gcc_unreachable ();
2464 set_variable_part (set, loc, dv_from_value (val), 0,
2465 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2466 }
2467 }
2468
2469 /* Bind a value to a location it was just stored in. If MODIFIED
2470 holds, assume the location was modified, detaching it from any
2471 values bound to it. */
2472
2473 static void
2474 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2475 bool modified)
2476 {
2477 cselib_val *v = CSELIB_VAL_PTR (val);
2478
2479 gcc_assert (cselib_preserved_value_p (v));
2480
2481 if (dump_file)
2482 {
2483 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2484 print_inline_rtx (dump_file, loc, 0);
2485 fprintf (dump_file, " evaluates to ");
2486 print_inline_rtx (dump_file, val, 0);
2487 if (v->locs)
2488 {
2489 struct elt_loc_list *l;
2490 for (l = v->locs; l; l = l->next)
2491 {
2492 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2493 print_inline_rtx (dump_file, l->loc, 0);
2494 }
2495 }
2496 fprintf (dump_file, "\n");
2497 }
2498
2499 gcc_checking_assert (!unsuitable_loc (loc));
2500
2501 val_bind (set, val, loc, modified);
2502 }
2503
2504 /* Clear (canonical address) slots that reference X. */
2505
2506 bool
2507 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2508 {
2509 if (vt_get_canonicalize_base (*slot) == x)
2510 *slot = NULL;
2511 return true;
2512 }
2513
2514 /* Reset this node, detaching all its equivalences. Return the slot
2515 in the variable hash table that holds dv, if there is one. */
2516
2517 static void
2518 val_reset (dataflow_set *set, decl_or_value dv)
2519 {
2520 variable var = shared_hash_find (set->vars, dv) ;
2521 location_chain node;
2522 rtx cval;
2523
2524 if (!var || !var->n_var_parts)
2525 return;
2526
2527 gcc_assert (var->n_var_parts == 1);
2528
2529 if (var->onepart == ONEPART_VALUE)
2530 {
2531 rtx x = dv_as_value (dv);
2532
2533 /* Relationships in the global cache don't change, so reset the
2534 local cache entry only. */
2535 rtx *slot = local_get_addr_cache->get (x);
2536 if (slot)
2537 {
2538 /* If the value resolved back to itself, odds are that other
2539 values may have cached it too. These entries now refer
2540 to the old X, so detach them too. Entries that used the
2541 old X but resolved to something else remain ok as long as
2542 that something else isn't also reset. */
2543 if (*slot == x)
2544 local_get_addr_cache
2545 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2546 *slot = NULL;
2547 }
2548 }
2549
2550 cval = NULL;
2551 for (node = var->var_part[0].loc_chain; node; node = node->next)
2552 if (GET_CODE (node->loc) == VALUE
2553 && canon_value_cmp (node->loc, cval))
2554 cval = node->loc;
2555
2556 for (node = var->var_part[0].loc_chain; node; node = node->next)
2557 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2558 {
2559 /* Redirect the equivalence link to the new canonical
2560 value, or simply remove it if it would point at
2561 itself. */
2562 if (cval)
2563 set_variable_part (set, cval, dv_from_value (node->loc),
2564 0, node->init, node->set_src, NO_INSERT);
2565 delete_variable_part (set, dv_as_value (dv),
2566 dv_from_value (node->loc), 0);
2567 }
2568
2569 if (cval)
2570 {
2571 decl_or_value cdv = dv_from_value (cval);
2572
2573 /* Keep the remaining values connected, accummulating links
2574 in the canonical value. */
2575 for (node = var->var_part[0].loc_chain; node; node = node->next)
2576 {
2577 if (node->loc == cval)
2578 continue;
2579 else if (GET_CODE (node->loc) == REG)
2580 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2581 node->set_src, NO_INSERT);
2582 else if (GET_CODE (node->loc) == MEM)
2583 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2584 node->set_src, NO_INSERT);
2585 else
2586 set_variable_part (set, node->loc, cdv, 0,
2587 node->init, node->set_src, NO_INSERT);
2588 }
2589 }
2590
2591 /* We remove this last, to make sure that the canonical value is not
2592 removed to the point of requiring reinsertion. */
2593 if (cval)
2594 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2595
2596 clobber_variable_part (set, NULL, dv, 0, NULL);
2597 }
2598
2599 /* Find the values in a given location and map the val to another
2600 value, if it is unique, or add the location as one holding the
2601 value. */
2602
2603 static void
2604 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2605 {
2606 decl_or_value dv = dv_from_value (val);
2607
2608 if (dump_file && (dump_flags & TDF_DETAILS))
2609 {
2610 if (insn)
2611 fprintf (dump_file, "%i: ", INSN_UID (insn));
2612 else
2613 fprintf (dump_file, "head: ");
2614 print_inline_rtx (dump_file, val, 0);
2615 fputs (" is at ", dump_file);
2616 print_inline_rtx (dump_file, loc, 0);
2617 fputc ('\n', dump_file);
2618 }
2619
2620 val_reset (set, dv);
2621
2622 gcc_checking_assert (!unsuitable_loc (loc));
2623
2624 if (REG_P (loc))
2625 {
2626 attrs node, found = NULL;
2627
2628 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2629 if (dv_is_value_p (node->dv)
2630 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2631 {
2632 found = node;
2633
2634 /* Map incoming equivalences. ??? Wouldn't it be nice if
2635 we just started sharing the location lists? Maybe a
2636 circular list ending at the value itself or some
2637 such. */
2638 set_variable_part (set, dv_as_value (node->dv),
2639 dv_from_value (val), node->offset,
2640 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2641 set_variable_part (set, val, node->dv, node->offset,
2642 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2643 }
2644
2645 /* If we didn't find any equivalence, we need to remember that
2646 this value is held in the named register. */
2647 if (found)
2648 return;
2649 }
2650 /* ??? Attempt to find and merge equivalent MEMs or other
2651 expressions too. */
2652
2653 val_bind (set, val, loc, false);
2654 }
2655
2656 /* Initialize dataflow set SET to be empty.
2657 VARS_SIZE is the initial size of hash table VARS. */
2658
2659 static void
2660 dataflow_set_init (dataflow_set *set)
2661 {
2662 init_attrs_list_set (set->regs);
2663 set->vars = shared_hash_copy (empty_shared_hash);
2664 set->stack_adjust = 0;
2665 set->traversed_vars = NULL;
2666 }
2667
2668 /* Delete the contents of dataflow set SET. */
2669
2670 static void
2671 dataflow_set_clear (dataflow_set *set)
2672 {
2673 int i;
2674
2675 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2676 attrs_list_clear (&set->regs[i]);
2677
2678 shared_hash_destroy (set->vars);
2679 set->vars = shared_hash_copy (empty_shared_hash);
2680 }
2681
2682 /* Copy the contents of dataflow set SRC to DST. */
2683
2684 static void
2685 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2686 {
2687 int i;
2688
2689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2690 attrs_list_copy (&dst->regs[i], src->regs[i]);
2691
2692 shared_hash_destroy (dst->vars);
2693 dst->vars = shared_hash_copy (src->vars);
2694 dst->stack_adjust = src->stack_adjust;
2695 }
2696
2697 /* Information for merging lists of locations for a given offset of variable.
2698 */
2699 struct variable_union_info
2700 {
2701 /* Node of the location chain. */
2702 location_chain lc;
2703
2704 /* The sum of positions in the input chains. */
2705 int pos;
2706
2707 /* The position in the chain of DST dataflow set. */
2708 int pos_dst;
2709 };
2710
2711 /* Buffer for location list sorting and its allocated size. */
2712 static struct variable_union_info *vui_vec;
2713 static int vui_allocated;
2714
2715 /* Compare function for qsort, order the structures by POS element. */
2716
2717 static int
2718 variable_union_info_cmp_pos (const void *n1, const void *n2)
2719 {
2720 const struct variable_union_info *const i1 =
2721 (const struct variable_union_info *) n1;
2722 const struct variable_union_info *const i2 =
2723 ( const struct variable_union_info *) n2;
2724
2725 if (i1->pos != i2->pos)
2726 return i1->pos - i2->pos;
2727
2728 return (i1->pos_dst - i2->pos_dst);
2729 }
2730
2731 /* Compute union of location parts of variable *SLOT and the same variable
2732 from hash table DATA. Compute "sorted" union of the location chains
2733 for common offsets, i.e. the locations of a variable part are sorted by
2734 a priority where the priority is the sum of the positions in the 2 chains
2735 (if a location is only in one list the position in the second list is
2736 defined to be larger than the length of the chains).
2737 When we are updating the location parts the newest location is in the
2738 beginning of the chain, so when we do the described "sorted" union
2739 we keep the newest locations in the beginning. */
2740
2741 static int
2742 variable_union (variable src, dataflow_set *set)
2743 {
2744 variable dst;
2745 variable_def **dstp;
2746 int i, j, k;
2747
2748 dstp = shared_hash_find_slot (set->vars, src->dv);
2749 if (!dstp || !*dstp)
2750 {
2751 src->refcount++;
2752
2753 dst_can_be_shared = false;
2754 if (!dstp)
2755 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2756
2757 *dstp = src;
2758
2759 /* Continue traversing the hash table. */
2760 return 1;
2761 }
2762 else
2763 dst = *dstp;
2764
2765 gcc_assert (src->n_var_parts);
2766 gcc_checking_assert (src->onepart == dst->onepart);
2767
2768 /* We can combine one-part variables very efficiently, because their
2769 entries are in canonical order. */
2770 if (src->onepart)
2771 {
2772 location_chain *nodep, dnode, snode;
2773
2774 gcc_assert (src->n_var_parts == 1
2775 && dst->n_var_parts == 1);
2776
2777 snode = src->var_part[0].loc_chain;
2778 gcc_assert (snode);
2779
2780 restart_onepart_unshared:
2781 nodep = &dst->var_part[0].loc_chain;
2782 dnode = *nodep;
2783 gcc_assert (dnode);
2784
2785 while (snode)
2786 {
2787 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2788
2789 if (r > 0)
2790 {
2791 location_chain nnode;
2792
2793 if (shared_var_p (dst, set->vars))
2794 {
2795 dstp = unshare_variable (set, dstp, dst,
2796 VAR_INIT_STATUS_INITIALIZED);
2797 dst = *dstp;
2798 goto restart_onepart_unshared;
2799 }
2800
2801 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2802 nnode->loc = snode->loc;
2803 nnode->init = snode->init;
2804 if (!snode->set_src || MEM_P (snode->set_src))
2805 nnode->set_src = NULL;
2806 else
2807 nnode->set_src = snode->set_src;
2808 nnode->next = dnode;
2809 dnode = nnode;
2810 }
2811 else if (r == 0)
2812 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2813
2814 if (r >= 0)
2815 snode = snode->next;
2816
2817 nodep = &dnode->next;
2818 dnode = *nodep;
2819 }
2820
2821 return 1;
2822 }
2823
2824 gcc_checking_assert (!src->onepart);
2825
2826 /* Count the number of location parts, result is K. */
2827 for (i = 0, j = 0, k = 0;
2828 i < src->n_var_parts && j < dst->n_var_parts; k++)
2829 {
2830 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2831 {
2832 i++;
2833 j++;
2834 }
2835 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2836 i++;
2837 else
2838 j++;
2839 }
2840 k += src->n_var_parts - i;
2841 k += dst->n_var_parts - j;
2842
2843 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2844 thus there are at most MAX_VAR_PARTS different offsets. */
2845 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2846
2847 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2848 {
2849 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2850 dst = *dstp;
2851 }
2852
2853 i = src->n_var_parts - 1;
2854 j = dst->n_var_parts - 1;
2855 dst->n_var_parts = k;
2856
2857 for (k--; k >= 0; k--)
2858 {
2859 location_chain node, node2;
2860
2861 if (i >= 0 && j >= 0
2862 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2863 {
2864 /* Compute the "sorted" union of the chains, i.e. the locations which
2865 are in both chains go first, they are sorted by the sum of
2866 positions in the chains. */
2867 int dst_l, src_l;
2868 int ii, jj, n;
2869 struct variable_union_info *vui;
2870
2871 /* If DST is shared compare the location chains.
2872 If they are different we will modify the chain in DST with
2873 high probability so make a copy of DST. */
2874 if (shared_var_p (dst, set->vars))
2875 {
2876 for (node = src->var_part[i].loc_chain,
2877 node2 = dst->var_part[j].loc_chain; node && node2;
2878 node = node->next, node2 = node2->next)
2879 {
2880 if (!((REG_P (node2->loc)
2881 && REG_P (node->loc)
2882 && REGNO (node2->loc) == REGNO (node->loc))
2883 || rtx_equal_p (node2->loc, node->loc)))
2884 {
2885 if (node2->init < node->init)
2886 node2->init = node->init;
2887 break;
2888 }
2889 }
2890 if (node || node2)
2891 {
2892 dstp = unshare_variable (set, dstp, dst,
2893 VAR_INIT_STATUS_UNKNOWN);
2894 dst = (variable)*dstp;
2895 }
2896 }
2897
2898 src_l = 0;
2899 for (node = src->var_part[i].loc_chain; node; node = node->next)
2900 src_l++;
2901 dst_l = 0;
2902 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2903 dst_l++;
2904
2905 if (dst_l == 1)
2906 {
2907 /* The most common case, much simpler, no qsort is needed. */
2908 location_chain dstnode = dst->var_part[j].loc_chain;
2909 dst->var_part[k].loc_chain = dstnode;
2910 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2911 node2 = dstnode;
2912 for (node = src->var_part[i].loc_chain; node; node = node->next)
2913 if (!((REG_P (dstnode->loc)
2914 && REG_P (node->loc)
2915 && REGNO (dstnode->loc) == REGNO (node->loc))
2916 || rtx_equal_p (dstnode->loc, node->loc)))
2917 {
2918 location_chain new_node;
2919
2920 /* Copy the location from SRC. */
2921 new_node = (location_chain) pool_alloc (loc_chain_pool);
2922 new_node->loc = node->loc;
2923 new_node->init = node->init;
2924 if (!node->set_src || MEM_P (node->set_src))
2925 new_node->set_src = NULL;
2926 else
2927 new_node->set_src = node->set_src;
2928 node2->next = new_node;
2929 node2 = new_node;
2930 }
2931 node2->next = NULL;
2932 }
2933 else
2934 {
2935 if (src_l + dst_l > vui_allocated)
2936 {
2937 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2938 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2939 vui_allocated);
2940 }
2941 vui = vui_vec;
2942
2943 /* Fill in the locations from DST. */
2944 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2945 node = node->next, jj++)
2946 {
2947 vui[jj].lc = node;
2948 vui[jj].pos_dst = jj;
2949
2950 /* Pos plus value larger than a sum of 2 valid positions. */
2951 vui[jj].pos = jj + src_l + dst_l;
2952 }
2953
2954 /* Fill in the locations from SRC. */
2955 n = dst_l;
2956 for (node = src->var_part[i].loc_chain, ii = 0; node;
2957 node = node->next, ii++)
2958 {
2959 /* Find location from NODE. */
2960 for (jj = 0; jj < dst_l; jj++)
2961 {
2962 if ((REG_P (vui[jj].lc->loc)
2963 && REG_P (node->loc)
2964 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2965 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2966 {
2967 vui[jj].pos = jj + ii;
2968 break;
2969 }
2970 }
2971 if (jj >= dst_l) /* The location has not been found. */
2972 {
2973 location_chain new_node;
2974
2975 /* Copy the location from SRC. */
2976 new_node = (location_chain) pool_alloc (loc_chain_pool);
2977 new_node->loc = node->loc;
2978 new_node->init = node->init;
2979 if (!node->set_src || MEM_P (node->set_src))
2980 new_node->set_src = NULL;
2981 else
2982 new_node->set_src = node->set_src;
2983 vui[n].lc = new_node;
2984 vui[n].pos_dst = src_l + dst_l;
2985 vui[n].pos = ii + src_l + dst_l;
2986 n++;
2987 }
2988 }
2989
2990 if (dst_l == 2)
2991 {
2992 /* Special case still very common case. For dst_l == 2
2993 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2994 vui[i].pos == i + src_l + dst_l. */
2995 if (vui[0].pos > vui[1].pos)
2996 {
2997 /* Order should be 1, 0, 2... */
2998 dst->var_part[k].loc_chain = vui[1].lc;
2999 vui[1].lc->next = vui[0].lc;
3000 if (n >= 3)
3001 {
3002 vui[0].lc->next = vui[2].lc;
3003 vui[n - 1].lc->next = NULL;
3004 }
3005 else
3006 vui[0].lc->next = NULL;
3007 ii = 3;
3008 }
3009 else
3010 {
3011 dst->var_part[k].loc_chain = vui[0].lc;
3012 if (n >= 3 && vui[2].pos < vui[1].pos)
3013 {
3014 /* Order should be 0, 2, 1, 3... */
3015 vui[0].lc->next = vui[2].lc;
3016 vui[2].lc->next = vui[1].lc;
3017 if (n >= 4)
3018 {
3019 vui[1].lc->next = vui[3].lc;
3020 vui[n - 1].lc->next = NULL;
3021 }
3022 else
3023 vui[1].lc->next = NULL;
3024 ii = 4;
3025 }
3026 else
3027 {
3028 /* Order should be 0, 1, 2... */
3029 ii = 1;
3030 vui[n - 1].lc->next = NULL;
3031 }
3032 }
3033 for (; ii < n; ii++)
3034 vui[ii - 1].lc->next = vui[ii].lc;
3035 }
3036 else
3037 {
3038 qsort (vui, n, sizeof (struct variable_union_info),
3039 variable_union_info_cmp_pos);
3040
3041 /* Reconnect the nodes in sorted order. */
3042 for (ii = 1; ii < n; ii++)
3043 vui[ii - 1].lc->next = vui[ii].lc;
3044 vui[n - 1].lc->next = NULL;
3045 dst->var_part[k].loc_chain = vui[0].lc;
3046 }
3047
3048 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3049 }
3050 i--;
3051 j--;
3052 }
3053 else if ((i >= 0 && j >= 0
3054 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3055 || i < 0)
3056 {
3057 dst->var_part[k] = dst->var_part[j];
3058 j--;
3059 }
3060 else if ((i >= 0 && j >= 0
3061 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3062 || j < 0)
3063 {
3064 location_chain *nextp;
3065
3066 /* Copy the chain from SRC. */
3067 nextp = &dst->var_part[k].loc_chain;
3068 for (node = src->var_part[i].loc_chain; node; node = node->next)
3069 {
3070 location_chain new_lc;
3071
3072 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3073 new_lc->next = NULL;
3074 new_lc->init = node->init;
3075 if (!node->set_src || MEM_P (node->set_src))
3076 new_lc->set_src = NULL;
3077 else
3078 new_lc->set_src = node->set_src;
3079 new_lc->loc = node->loc;
3080
3081 *nextp = new_lc;
3082 nextp = &new_lc->next;
3083 }
3084
3085 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3086 i--;
3087 }
3088 dst->var_part[k].cur_loc = NULL;
3089 }
3090
3091 if (flag_var_tracking_uninit)
3092 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3093 {
3094 location_chain node, node2;
3095 for (node = src->var_part[i].loc_chain; node; node = node->next)
3096 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3097 if (rtx_equal_p (node->loc, node2->loc))
3098 {
3099 if (node->init > node2->init)
3100 node2->init = node->init;
3101 }
3102 }
3103
3104 /* Continue traversing the hash table. */
3105 return 1;
3106 }
3107
3108 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3109
3110 static void
3111 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3112 {
3113 int i;
3114
3115 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3116 attrs_list_union (&dst->regs[i], src->regs[i]);
3117
3118 if (dst->vars == empty_shared_hash)
3119 {
3120 shared_hash_destroy (dst->vars);
3121 dst->vars = shared_hash_copy (src->vars);
3122 }
3123 else
3124 {
3125 variable_iterator_type hi;
3126 variable var;
3127
3128 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3129 var, variable, hi)
3130 variable_union (var, dst);
3131 }
3132 }
3133
3134 /* Whether the value is currently being expanded. */
3135 #define VALUE_RECURSED_INTO(x) \
3136 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3137
3138 /* Whether no expansion was found, saving useless lookups.
3139 It must only be set when VALUE_CHANGED is clear. */
3140 #define NO_LOC_P(x) \
3141 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3142
3143 /* Whether cur_loc in the value needs to be (re)computed. */
3144 #define VALUE_CHANGED(x) \
3145 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3146 /* Whether cur_loc in the decl needs to be (re)computed. */
3147 #define DECL_CHANGED(x) TREE_VISITED (x)
3148
3149 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3150 user DECLs, this means they're in changed_variables. Values and
3151 debug exprs may be left with this flag set if no user variable
3152 requires them to be evaluated. */
3153
3154 static inline void
3155 set_dv_changed (decl_or_value dv, bool newv)
3156 {
3157 switch (dv_onepart_p (dv))
3158 {
3159 case ONEPART_VALUE:
3160 if (newv)
3161 NO_LOC_P (dv_as_value (dv)) = false;
3162 VALUE_CHANGED (dv_as_value (dv)) = newv;
3163 break;
3164
3165 case ONEPART_DEXPR:
3166 if (newv)
3167 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3168 /* Fall through... */
3169
3170 default:
3171 DECL_CHANGED (dv_as_decl (dv)) = newv;
3172 break;
3173 }
3174 }
3175
3176 /* Return true if DV needs to have its cur_loc recomputed. */
3177
3178 static inline bool
3179 dv_changed_p (decl_or_value dv)
3180 {
3181 return (dv_is_value_p (dv)
3182 ? VALUE_CHANGED (dv_as_value (dv))
3183 : DECL_CHANGED (dv_as_decl (dv)));
3184 }
3185
3186 /* Return a location list node whose loc is rtx_equal to LOC, in the
3187 location list of a one-part variable or value VAR, or in that of
3188 any values recursively mentioned in the location lists. VARS must
3189 be in star-canonical form. */
3190
3191 static location_chain
3192 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3193 {
3194 location_chain node;
3195 enum rtx_code loc_code;
3196
3197 if (!var)
3198 return NULL;
3199
3200 gcc_checking_assert (var->onepart);
3201
3202 if (!var->n_var_parts)
3203 return NULL;
3204
3205 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3206
3207 loc_code = GET_CODE (loc);
3208 for (node = var->var_part[0].loc_chain; node; node = node->next)
3209 {
3210 decl_or_value dv;
3211 variable rvar;
3212
3213 if (GET_CODE (node->loc) != loc_code)
3214 {
3215 if (GET_CODE (node->loc) != VALUE)
3216 continue;
3217 }
3218 else if (loc == node->loc)
3219 return node;
3220 else if (loc_code != VALUE)
3221 {
3222 if (rtx_equal_p (loc, node->loc))
3223 return node;
3224 continue;
3225 }
3226
3227 /* Since we're in star-canonical form, we don't need to visit
3228 non-canonical nodes: one-part variables and non-canonical
3229 values would only point back to the canonical node. */
3230 if (dv_is_value_p (var->dv)
3231 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3232 {
3233 /* Skip all subsequent VALUEs. */
3234 while (node->next && GET_CODE (node->next->loc) == VALUE)
3235 {
3236 node = node->next;
3237 gcc_checking_assert (!canon_value_cmp (node->loc,
3238 dv_as_value (var->dv)));
3239 if (loc == node->loc)
3240 return node;
3241 }
3242 continue;
3243 }
3244
3245 gcc_checking_assert (node == var->var_part[0].loc_chain);
3246 gcc_checking_assert (!node->next);
3247
3248 dv = dv_from_value (node->loc);
3249 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3250 return find_loc_in_1pdv (loc, rvar, vars);
3251 }
3252
3253 /* ??? Gotta look in cselib_val locations too. */
3254
3255 return NULL;
3256 }
3257
3258 /* Hash table iteration argument passed to variable_merge. */
3259 struct dfset_merge
3260 {
3261 /* The set in which the merge is to be inserted. */
3262 dataflow_set *dst;
3263 /* The set that we're iterating in. */
3264 dataflow_set *cur;
3265 /* The set that may contain the other dv we are to merge with. */
3266 dataflow_set *src;
3267 /* Number of onepart dvs in src. */
3268 int src_onepart_cnt;
3269 };
3270
3271 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3272 loc_cmp order, and it is maintained as such. */
3273
3274 static void
3275 insert_into_intersection (location_chain *nodep, rtx loc,
3276 enum var_init_status status)
3277 {
3278 location_chain node;
3279 int r;
3280
3281 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3282 if ((r = loc_cmp (node->loc, loc)) == 0)
3283 {
3284 node->init = MIN (node->init, status);
3285 return;
3286 }
3287 else if (r > 0)
3288 break;
3289
3290 node = (location_chain) pool_alloc (loc_chain_pool);
3291
3292 node->loc = loc;
3293 node->set_src = NULL;
3294 node->init = status;
3295 node->next = *nodep;
3296 *nodep = node;
3297 }
3298
3299 /* Insert in DEST the intersection of the locations present in both
3300 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3301 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3302 DSM->dst. */
3303
3304 static void
3305 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3306 location_chain s1node, variable s2var)
3307 {
3308 dataflow_set *s1set = dsm->cur;
3309 dataflow_set *s2set = dsm->src;
3310 location_chain found;
3311
3312 if (s2var)
3313 {
3314 location_chain s2node;
3315
3316 gcc_checking_assert (s2var->onepart);
3317
3318 if (s2var->n_var_parts)
3319 {
3320 s2node = s2var->var_part[0].loc_chain;
3321
3322 for (; s1node && s2node;
3323 s1node = s1node->next, s2node = s2node->next)
3324 if (s1node->loc != s2node->loc)
3325 break;
3326 else if (s1node->loc == val)
3327 continue;
3328 else
3329 insert_into_intersection (dest, s1node->loc,
3330 MIN (s1node->init, s2node->init));
3331 }
3332 }
3333
3334 for (; s1node; s1node = s1node->next)
3335 {
3336 if (s1node->loc == val)
3337 continue;
3338
3339 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3340 shared_hash_htab (s2set->vars))))
3341 {
3342 insert_into_intersection (dest, s1node->loc,
3343 MIN (s1node->init, found->init));
3344 continue;
3345 }
3346
3347 if (GET_CODE (s1node->loc) == VALUE
3348 && !VALUE_RECURSED_INTO (s1node->loc))
3349 {
3350 decl_or_value dv = dv_from_value (s1node->loc);
3351 variable svar = shared_hash_find (s1set->vars, dv);
3352 if (svar)
3353 {
3354 if (svar->n_var_parts == 1)
3355 {
3356 VALUE_RECURSED_INTO (s1node->loc) = true;
3357 intersect_loc_chains (val, dest, dsm,
3358 svar->var_part[0].loc_chain,
3359 s2var);
3360 VALUE_RECURSED_INTO (s1node->loc) = false;
3361 }
3362 }
3363 }
3364
3365 /* ??? gotta look in cselib_val locations too. */
3366
3367 /* ??? if the location is equivalent to any location in src,
3368 searched recursively
3369
3370 add to dst the values needed to represent the equivalence
3371
3372 telling whether locations S is equivalent to another dv's
3373 location list:
3374
3375 for each location D in the list
3376
3377 if S and D satisfy rtx_equal_p, then it is present
3378
3379 else if D is a value, recurse without cycles
3380
3381 else if S and D have the same CODE and MODE
3382
3383 for each operand oS and the corresponding oD
3384
3385 if oS and oD are not equivalent, then S an D are not equivalent
3386
3387 else if they are RTX vectors
3388
3389 if any vector oS element is not equivalent to its respective oD,
3390 then S and D are not equivalent
3391
3392 */
3393
3394
3395 }
3396 }
3397
3398 /* Return -1 if X should be before Y in a location list for a 1-part
3399 variable, 1 if Y should be before X, and 0 if they're equivalent
3400 and should not appear in the list. */
3401
3402 static int
3403 loc_cmp (rtx x, rtx y)
3404 {
3405 int i, j, r;
3406 RTX_CODE code = GET_CODE (x);
3407 const char *fmt;
3408
3409 if (x == y)
3410 return 0;
3411
3412 if (REG_P (x))
3413 {
3414 if (!REG_P (y))
3415 return -1;
3416 gcc_assert (GET_MODE (x) == GET_MODE (y));
3417 if (REGNO (x) == REGNO (y))
3418 return 0;
3419 else if (REGNO (x) < REGNO (y))
3420 return -1;
3421 else
3422 return 1;
3423 }
3424
3425 if (REG_P (y))
3426 return 1;
3427
3428 if (MEM_P (x))
3429 {
3430 if (!MEM_P (y))
3431 return -1;
3432 gcc_assert (GET_MODE (x) == GET_MODE (y));
3433 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3434 }
3435
3436 if (MEM_P (y))
3437 return 1;
3438
3439 if (GET_CODE (x) == VALUE)
3440 {
3441 if (GET_CODE (y) != VALUE)
3442 return -1;
3443 /* Don't assert the modes are the same, that is true only
3444 when not recursing. (subreg:QI (value:SI 1:1) 0)
3445 and (subreg:QI (value:DI 2:2) 0) can be compared,
3446 even when the modes are different. */
3447 if (canon_value_cmp (x, y))
3448 return -1;
3449 else
3450 return 1;
3451 }
3452
3453 if (GET_CODE (y) == VALUE)
3454 return 1;
3455
3456 /* Entry value is the least preferable kind of expression. */
3457 if (GET_CODE (x) == ENTRY_VALUE)
3458 {
3459 if (GET_CODE (y) != ENTRY_VALUE)
3460 return 1;
3461 gcc_assert (GET_MODE (x) == GET_MODE (y));
3462 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3463 }
3464
3465 if (GET_CODE (y) == ENTRY_VALUE)
3466 return -1;
3467
3468 if (GET_CODE (x) == GET_CODE (y))
3469 /* Compare operands below. */;
3470 else if (GET_CODE (x) < GET_CODE (y))
3471 return -1;
3472 else
3473 return 1;
3474
3475 gcc_assert (GET_MODE (x) == GET_MODE (y));
3476
3477 if (GET_CODE (x) == DEBUG_EXPR)
3478 {
3479 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3480 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3481 return -1;
3482 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3483 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3484 return 1;
3485 }
3486
3487 fmt = GET_RTX_FORMAT (code);
3488 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3489 switch (fmt[i])
3490 {
3491 case 'w':
3492 if (XWINT (x, i) == XWINT (y, i))
3493 break;
3494 else if (XWINT (x, i) < XWINT (y, i))
3495 return -1;
3496 else
3497 return 1;
3498
3499 case 'n':
3500 case 'i':
3501 if (XINT (x, i) == XINT (y, i))
3502 break;
3503 else if (XINT (x, i) < XINT (y, i))
3504 return -1;
3505 else
3506 return 1;
3507
3508 case 'V':
3509 case 'E':
3510 /* Compare the vector length first. */
3511 if (XVECLEN (x, i) == XVECLEN (y, i))
3512 /* Compare the vectors elements. */;
3513 else if (XVECLEN (x, i) < XVECLEN (y, i))
3514 return -1;
3515 else
3516 return 1;
3517
3518 for (j = 0; j < XVECLEN (x, i); j++)
3519 if ((r = loc_cmp (XVECEXP (x, i, j),
3520 XVECEXP (y, i, j))))
3521 return r;
3522 break;
3523
3524 case 'e':
3525 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3526 return r;
3527 break;
3528
3529 case 'S':
3530 case 's':
3531 if (XSTR (x, i) == XSTR (y, i))
3532 break;
3533 if (!XSTR (x, i))
3534 return -1;
3535 if (!XSTR (y, i))
3536 return 1;
3537 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3538 break;
3539 else if (r < 0)
3540 return -1;
3541 else
3542 return 1;
3543
3544 case 'u':
3545 /* These are just backpointers, so they don't matter. */
3546 break;
3547
3548 case '0':
3549 case 't':
3550 break;
3551
3552 /* It is believed that rtx's at this level will never
3553 contain anything but integers and other rtx's,
3554 except for within LABEL_REFs and SYMBOL_REFs. */
3555 default:
3556 gcc_unreachable ();
3557 }
3558 if (CONST_WIDE_INT_P (x))
3559 {
3560 /* Compare the vector length first. */
3561 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3562 return 1;
3563 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3564 return -1;
3565
3566 /* Compare the vectors elements. */;
3567 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3568 {
3569 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3570 return -1;
3571 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3572 return 1;
3573 }
3574 }
3575
3576 return 0;
3577 }
3578
3579 #if ENABLE_CHECKING
3580 /* Check the order of entries in one-part variables. */
3581
3582 int
3583 canonicalize_loc_order_check (variable_def **slot,
3584 dataflow_set *data ATTRIBUTE_UNUSED)
3585 {
3586 variable var = *slot;
3587 location_chain node, next;
3588
3589 #ifdef ENABLE_RTL_CHECKING
3590 int i;
3591 for (i = 0; i < var->n_var_parts; i++)
3592 gcc_assert (var->var_part[0].cur_loc == NULL);
3593 gcc_assert (!var->in_changed_variables);
3594 #endif
3595
3596 if (!var->onepart)
3597 return 1;
3598
3599 gcc_assert (var->n_var_parts == 1);
3600 node = var->var_part[0].loc_chain;
3601 gcc_assert (node);
3602
3603 while ((next = node->next))
3604 {
3605 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3606 node = next;
3607 }
3608
3609 return 1;
3610 }
3611 #endif
3612
3613 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3614 more likely to be chosen as canonical for an equivalence set.
3615 Ensure less likely values can reach more likely neighbors, making
3616 the connections bidirectional. */
3617
3618 int
3619 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3620 {
3621 variable var = *slot;
3622 decl_or_value dv = var->dv;
3623 rtx val;
3624 location_chain node;
3625
3626 if (!dv_is_value_p (dv))
3627 return 1;
3628
3629 gcc_checking_assert (var->n_var_parts == 1);
3630
3631 val = dv_as_value (dv);
3632
3633 for (node = var->var_part[0].loc_chain; node; node = node->next)
3634 if (GET_CODE (node->loc) == VALUE)
3635 {
3636 if (canon_value_cmp (node->loc, val))
3637 VALUE_RECURSED_INTO (val) = true;
3638 else
3639 {
3640 decl_or_value odv = dv_from_value (node->loc);
3641 variable_def **oslot;
3642 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3643
3644 set_slot_part (set, val, oslot, odv, 0,
3645 node->init, NULL_RTX);
3646
3647 VALUE_RECURSED_INTO (node->loc) = true;
3648 }
3649 }
3650
3651 return 1;
3652 }
3653
3654 /* Remove redundant entries from equivalence lists in onepart
3655 variables, canonicalizing equivalence sets into star shapes. */
3656
3657 int
3658 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3659 {
3660 variable var = *slot;
3661 decl_or_value dv = var->dv;
3662 location_chain node;
3663 decl_or_value cdv;
3664 rtx val, cval;
3665 variable_def **cslot;
3666 bool has_value;
3667 bool has_marks;
3668
3669 if (!var->onepart)
3670 return 1;
3671
3672 gcc_checking_assert (var->n_var_parts == 1);
3673
3674 if (dv_is_value_p (dv))
3675 {
3676 cval = dv_as_value (dv);
3677 if (!VALUE_RECURSED_INTO (cval))
3678 return 1;
3679 VALUE_RECURSED_INTO (cval) = false;
3680 }
3681 else
3682 cval = NULL_RTX;
3683
3684 restart:
3685 val = cval;
3686 has_value = false;
3687 has_marks = false;
3688
3689 gcc_assert (var->n_var_parts == 1);
3690
3691 for (node = var->var_part[0].loc_chain; node; node = node->next)
3692 if (GET_CODE (node->loc) == VALUE)
3693 {
3694 has_value = true;
3695 if (VALUE_RECURSED_INTO (node->loc))
3696 has_marks = true;
3697 if (canon_value_cmp (node->loc, cval))
3698 cval = node->loc;
3699 }
3700
3701 if (!has_value)
3702 return 1;
3703
3704 if (cval == val)
3705 {
3706 if (!has_marks || dv_is_decl_p (dv))
3707 return 1;
3708
3709 /* Keep it marked so that we revisit it, either after visiting a
3710 child node, or after visiting a new parent that might be
3711 found out. */
3712 VALUE_RECURSED_INTO (val) = true;
3713
3714 for (node = var->var_part[0].loc_chain; node; node = node->next)
3715 if (GET_CODE (node->loc) == VALUE
3716 && VALUE_RECURSED_INTO (node->loc))
3717 {
3718 cval = node->loc;
3719 restart_with_cval:
3720 VALUE_RECURSED_INTO (cval) = false;
3721 dv = dv_from_value (cval);
3722 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3723 if (!slot)
3724 {
3725 gcc_assert (dv_is_decl_p (var->dv));
3726 /* The canonical value was reset and dropped.
3727 Remove it. */
3728 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3729 return 1;
3730 }
3731 var = *slot;
3732 gcc_assert (dv_is_value_p (var->dv));
3733 if (var->n_var_parts == 0)
3734 return 1;
3735 gcc_assert (var->n_var_parts == 1);
3736 goto restart;
3737 }
3738
3739 VALUE_RECURSED_INTO (val) = false;
3740
3741 return 1;
3742 }
3743
3744 /* Push values to the canonical one. */
3745 cdv = dv_from_value (cval);
3746 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3747
3748 for (node = var->var_part[0].loc_chain; node; node = node->next)
3749 if (node->loc != cval)
3750 {
3751 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3752 node->init, NULL_RTX);
3753 if (GET_CODE (node->loc) == VALUE)
3754 {
3755 decl_or_value ndv = dv_from_value (node->loc);
3756
3757 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3758 NO_INSERT);
3759
3760 if (canon_value_cmp (node->loc, val))
3761 {
3762 /* If it could have been a local minimum, it's not any more,
3763 since it's now neighbor to cval, so it may have to push
3764 to it. Conversely, if it wouldn't have prevailed over
3765 val, then whatever mark it has is fine: if it was to
3766 push, it will now push to a more canonical node, but if
3767 it wasn't, then it has already pushed any values it might
3768 have to. */
3769 VALUE_RECURSED_INTO (node->loc) = true;
3770 /* Make sure we visit node->loc by ensuring we cval is
3771 visited too. */
3772 VALUE_RECURSED_INTO (cval) = true;
3773 }
3774 else if (!VALUE_RECURSED_INTO (node->loc))
3775 /* If we have no need to "recurse" into this node, it's
3776 already "canonicalized", so drop the link to the old
3777 parent. */
3778 clobber_variable_part (set, cval, ndv, 0, NULL);
3779 }
3780 else if (GET_CODE (node->loc) == REG)
3781 {
3782 attrs list = set->regs[REGNO (node->loc)], *listp;
3783
3784 /* Change an existing attribute referring to dv so that it
3785 refers to cdv, removing any duplicate this might
3786 introduce, and checking that no previous duplicates
3787 existed, all in a single pass. */
3788
3789 while (list)
3790 {
3791 if (list->offset == 0
3792 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3793 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3794 break;
3795
3796 list = list->next;
3797 }
3798
3799 gcc_assert (list);
3800 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3801 {
3802 list->dv = cdv;
3803 for (listp = &list->next; (list = *listp); listp = &list->next)
3804 {
3805 if (list->offset)
3806 continue;
3807
3808 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3809 {
3810 *listp = list->next;
3811 pool_free (attrs_pool, list);
3812 list = *listp;
3813 break;
3814 }
3815
3816 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3817 }
3818 }
3819 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3820 {
3821 for (listp = &list->next; (list = *listp); listp = &list->next)
3822 {
3823 if (list->offset)
3824 continue;
3825
3826 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3827 {
3828 *listp = list->next;
3829 pool_free (attrs_pool, list);
3830 list = *listp;
3831 break;
3832 }
3833
3834 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3835 }
3836 }
3837 else
3838 gcc_unreachable ();
3839
3840 #if ENABLE_CHECKING
3841 while (list)
3842 {
3843 if (list->offset == 0
3844 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3845 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3846 gcc_unreachable ();
3847
3848 list = list->next;
3849 }
3850 #endif
3851 }
3852 }
3853
3854 if (val)
3855 set_slot_part (set, val, cslot, cdv, 0,
3856 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3857
3858 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3859
3860 /* Variable may have been unshared. */
3861 var = *slot;
3862 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3863 && var->var_part[0].loc_chain->next == NULL);
3864
3865 if (VALUE_RECURSED_INTO (cval))
3866 goto restart_with_cval;
3867
3868 return 1;
3869 }
3870
3871 /* Bind one-part variables to the canonical value in an equivalence
3872 set. Not doing this causes dataflow convergence failure in rare
3873 circumstances, see PR42873. Unfortunately we can't do this
3874 efficiently as part of canonicalize_values_star, since we may not
3875 have determined or even seen the canonical value of a set when we
3876 get to a variable that references another member of the set. */
3877
3878 int
3879 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3880 {
3881 variable var = *slot;
3882 decl_or_value dv = var->dv;
3883 location_chain node;
3884 rtx cval;
3885 decl_or_value cdv;
3886 variable_def **cslot;
3887 variable cvar;
3888 location_chain cnode;
3889
3890 if (!var->onepart || var->onepart == ONEPART_VALUE)
3891 return 1;
3892
3893 gcc_assert (var->n_var_parts == 1);
3894
3895 node = var->var_part[0].loc_chain;
3896
3897 if (GET_CODE (node->loc) != VALUE)
3898 return 1;
3899
3900 gcc_assert (!node->next);
3901 cval = node->loc;
3902
3903 /* Push values to the canonical one. */
3904 cdv = dv_from_value (cval);
3905 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3906 if (!cslot)
3907 return 1;
3908 cvar = *cslot;
3909 gcc_assert (cvar->n_var_parts == 1);
3910
3911 cnode = cvar->var_part[0].loc_chain;
3912
3913 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3914 that are not “more canonical” than it. */
3915 if (GET_CODE (cnode->loc) != VALUE
3916 || !canon_value_cmp (cnode->loc, cval))
3917 return 1;
3918
3919 /* CVAL was found to be non-canonical. Change the variable to point
3920 to the canonical VALUE. */
3921 gcc_assert (!cnode->next);
3922 cval = cnode->loc;
3923
3924 slot = set_slot_part (set, cval, slot, dv, 0,
3925 node->init, node->set_src);
3926 clobber_slot_part (set, cval, slot, 0, node->set_src);
3927
3928 return 1;
3929 }
3930
3931 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3932 corresponding entry in DSM->src. Multi-part variables are combined
3933 with variable_union, whereas onepart dvs are combined with
3934 intersection. */
3935
3936 static int
3937 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3938 {
3939 dataflow_set *dst = dsm->dst;
3940 variable_def **dstslot;
3941 variable s2var, dvar = NULL;
3942 decl_or_value dv = s1var->dv;
3943 onepart_enum_t onepart = s1var->onepart;
3944 rtx val;
3945 hashval_t dvhash;
3946 location_chain node, *nodep;
3947
3948 /* If the incoming onepart variable has an empty location list, then
3949 the intersection will be just as empty. For other variables,
3950 it's always union. */
3951 gcc_checking_assert (s1var->n_var_parts
3952 && s1var->var_part[0].loc_chain);
3953
3954 if (!onepart)
3955 return variable_union (s1var, dst);
3956
3957 gcc_checking_assert (s1var->n_var_parts == 1);
3958
3959 dvhash = dv_htab_hash (dv);
3960 if (dv_is_value_p (dv))
3961 val = dv_as_value (dv);
3962 else
3963 val = NULL;
3964
3965 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3966 if (!s2var)
3967 {
3968 dst_can_be_shared = false;
3969 return 1;
3970 }
3971
3972 dsm->src_onepart_cnt--;
3973 gcc_assert (s2var->var_part[0].loc_chain
3974 && s2var->onepart == onepart
3975 && s2var->n_var_parts == 1);
3976
3977 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3978 if (dstslot)
3979 {
3980 dvar = *dstslot;
3981 gcc_assert (dvar->refcount == 1
3982 && dvar->onepart == onepart
3983 && dvar->n_var_parts == 1);
3984 nodep = &dvar->var_part[0].loc_chain;
3985 }
3986 else
3987 {
3988 nodep = &node;
3989 node = NULL;
3990 }
3991
3992 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3993 {
3994 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3995 dvhash, INSERT);
3996 *dstslot = dvar = s2var;
3997 dvar->refcount++;
3998 }
3999 else
4000 {
4001 dst_can_be_shared = false;
4002
4003 intersect_loc_chains (val, nodep, dsm,
4004 s1var->var_part[0].loc_chain, s2var);
4005
4006 if (!dstslot)
4007 {
4008 if (node)
4009 {
4010 dvar = (variable) pool_alloc (onepart_pool (onepart));
4011 dvar->dv = dv;
4012 dvar->refcount = 1;
4013 dvar->n_var_parts = 1;
4014 dvar->onepart = onepart;
4015 dvar->in_changed_variables = false;
4016 dvar->var_part[0].loc_chain = node;
4017 dvar->var_part[0].cur_loc = NULL;
4018 if (onepart)
4019 VAR_LOC_1PAUX (dvar) = NULL;
4020 else
4021 VAR_PART_OFFSET (dvar, 0) = 0;
4022
4023 dstslot
4024 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4025 INSERT);
4026 gcc_assert (!*dstslot);
4027 *dstslot = dvar;
4028 }
4029 else
4030 return 1;
4031 }
4032 }
4033
4034 nodep = &dvar->var_part[0].loc_chain;
4035 while ((node = *nodep))
4036 {
4037 location_chain *nextp = &node->next;
4038
4039 if (GET_CODE (node->loc) == REG)
4040 {
4041 attrs list;
4042
4043 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4044 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4045 && dv_is_value_p (list->dv))
4046 break;
4047
4048 if (!list)
4049 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4050 dv, 0, node->loc);
4051 /* If this value became canonical for another value that had
4052 this register, we want to leave it alone. */
4053 else if (dv_as_value (list->dv) != val)
4054 {
4055 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4056 dstslot, dv, 0,
4057 node->init, NULL_RTX);
4058 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4059
4060 /* Since nextp points into the removed node, we can't
4061 use it. The pointer to the next node moved to nodep.
4062 However, if the variable we're walking is unshared
4063 during our walk, we'll keep walking the location list
4064 of the previously-shared variable, in which case the
4065 node won't have been removed, and we'll want to skip
4066 it. That's why we test *nodep here. */
4067 if (*nodep != node)
4068 nextp = nodep;
4069 }
4070 }
4071 else
4072 /* Canonicalization puts registers first, so we don't have to
4073 walk it all. */
4074 break;
4075 nodep = nextp;
4076 }
4077
4078 if (dvar != *dstslot)
4079 dvar = *dstslot;
4080 nodep = &dvar->var_part[0].loc_chain;
4081
4082 if (val)
4083 {
4084 /* Mark all referenced nodes for canonicalization, and make sure
4085 we have mutual equivalence links. */
4086 VALUE_RECURSED_INTO (val) = true;
4087 for (node = *nodep; node; node = node->next)
4088 if (GET_CODE (node->loc) == VALUE)
4089 {
4090 VALUE_RECURSED_INTO (node->loc) = true;
4091 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4092 node->init, NULL, INSERT);
4093 }
4094
4095 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4096 gcc_assert (*dstslot == dvar);
4097 canonicalize_values_star (dstslot, dst);
4098 gcc_checking_assert (dstslot
4099 == shared_hash_find_slot_noinsert_1 (dst->vars,
4100 dv, dvhash));
4101 dvar = *dstslot;
4102 }
4103 else
4104 {
4105 bool has_value = false, has_other = false;
4106
4107 /* If we have one value and anything else, we're going to
4108 canonicalize this, so make sure all values have an entry in
4109 the table and are marked for canonicalization. */
4110 for (node = *nodep; node; node = node->next)
4111 {
4112 if (GET_CODE (node->loc) == VALUE)
4113 {
4114 /* If this was marked during register canonicalization,
4115 we know we have to canonicalize values. */
4116 if (has_value)
4117 has_other = true;
4118 has_value = true;
4119 if (has_other)
4120 break;
4121 }
4122 else
4123 {
4124 has_other = true;
4125 if (has_value)
4126 break;
4127 }
4128 }
4129
4130 if (has_value && has_other)
4131 {
4132 for (node = *nodep; node; node = node->next)
4133 {
4134 if (GET_CODE (node->loc) == VALUE)
4135 {
4136 decl_or_value dv = dv_from_value (node->loc);
4137 variable_def **slot = NULL;
4138
4139 if (shared_hash_shared (dst->vars))
4140 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4141 if (!slot)
4142 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4143 INSERT);
4144 if (!*slot)
4145 {
4146 variable var = (variable) pool_alloc (onepart_pool
4147 (ONEPART_VALUE));
4148 var->dv = dv;
4149 var->refcount = 1;
4150 var->n_var_parts = 1;
4151 var->onepart = ONEPART_VALUE;
4152 var->in_changed_variables = false;
4153 var->var_part[0].loc_chain = NULL;
4154 var->var_part[0].cur_loc = NULL;
4155 VAR_LOC_1PAUX (var) = NULL;
4156 *slot = var;
4157 }
4158
4159 VALUE_RECURSED_INTO (node->loc) = true;
4160 }
4161 }
4162
4163 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4164 gcc_assert (*dstslot == dvar);
4165 canonicalize_values_star (dstslot, dst);
4166 gcc_checking_assert (dstslot
4167 == shared_hash_find_slot_noinsert_1 (dst->vars,
4168 dv, dvhash));
4169 dvar = *dstslot;
4170 }
4171 }
4172
4173 if (!onepart_variable_different_p (dvar, s2var))
4174 {
4175 variable_htab_free (dvar);
4176 *dstslot = dvar = s2var;
4177 dvar->refcount++;
4178 }
4179 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4180 {
4181 variable_htab_free (dvar);
4182 *dstslot = dvar = s1var;
4183 dvar->refcount++;
4184 dst_can_be_shared = false;
4185 }
4186 else
4187 dst_can_be_shared = false;
4188
4189 return 1;
4190 }
4191
4192 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4193 multi-part variable. Unions of multi-part variables and
4194 intersections of one-part ones will be handled in
4195 variable_merge_over_cur(). */
4196
4197 static int
4198 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4199 {
4200 dataflow_set *dst = dsm->dst;
4201 decl_or_value dv = s2var->dv;
4202
4203 if (!s2var->onepart)
4204 {
4205 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4206 *dstp = s2var;
4207 s2var->refcount++;
4208 return 1;
4209 }
4210
4211 dsm->src_onepart_cnt++;
4212 return 1;
4213 }
4214
4215 /* Combine dataflow set information from SRC2 into DST, using PDST
4216 to carry over information across passes. */
4217
4218 static void
4219 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4220 {
4221 dataflow_set cur = *dst;
4222 dataflow_set *src1 = &cur;
4223 struct dfset_merge dsm;
4224 int i;
4225 size_t src1_elems, src2_elems;
4226 variable_iterator_type hi;
4227 variable var;
4228
4229 src1_elems = shared_hash_htab (src1->vars)->elements ();
4230 src2_elems = shared_hash_htab (src2->vars)->elements ();
4231 dataflow_set_init (dst);
4232 dst->stack_adjust = cur.stack_adjust;
4233 shared_hash_destroy (dst->vars);
4234 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4235 dst->vars->refcount = 1;
4236 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4237
4238 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4239 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4240
4241 dsm.dst = dst;
4242 dsm.src = src2;
4243 dsm.cur = src1;
4244 dsm.src_onepart_cnt = 0;
4245
4246 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4247 var, variable, hi)
4248 variable_merge_over_src (var, &dsm);
4249 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4250 var, variable, hi)
4251 variable_merge_over_cur (var, &dsm);
4252
4253 if (dsm.src_onepart_cnt)
4254 dst_can_be_shared = false;
4255
4256 dataflow_set_destroy (src1);
4257 }
4258
4259 /* Mark register equivalences. */
4260
4261 static void
4262 dataflow_set_equiv_regs (dataflow_set *set)
4263 {
4264 int i;
4265 attrs list, *listp;
4266
4267 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4268 {
4269 rtx canon[NUM_MACHINE_MODES];
4270
4271 /* If the list is empty or one entry, no need to canonicalize
4272 anything. */
4273 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4274 continue;
4275
4276 memset (canon, 0, sizeof (canon));
4277
4278 for (list = set->regs[i]; list; list = list->next)
4279 if (list->offset == 0 && dv_is_value_p (list->dv))
4280 {
4281 rtx val = dv_as_value (list->dv);
4282 rtx *cvalp = &canon[(int)GET_MODE (val)];
4283 rtx cval = *cvalp;
4284
4285 if (canon_value_cmp (val, cval))
4286 *cvalp = val;
4287 }
4288
4289 for (list = set->regs[i]; list; list = list->next)
4290 if (list->offset == 0 && dv_onepart_p (list->dv))
4291 {
4292 rtx cval = canon[(int)GET_MODE (list->loc)];
4293
4294 if (!cval)
4295 continue;
4296
4297 if (dv_is_value_p (list->dv))
4298 {
4299 rtx val = dv_as_value (list->dv);
4300
4301 if (val == cval)
4302 continue;
4303
4304 VALUE_RECURSED_INTO (val) = true;
4305 set_variable_part (set, val, dv_from_value (cval), 0,
4306 VAR_INIT_STATUS_INITIALIZED,
4307 NULL, NO_INSERT);
4308 }
4309
4310 VALUE_RECURSED_INTO (cval) = true;
4311 set_variable_part (set, cval, list->dv, 0,
4312 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4313 }
4314
4315 for (listp = &set->regs[i]; (list = *listp);
4316 listp = list ? &list->next : listp)
4317 if (list->offset == 0 && dv_onepart_p (list->dv))
4318 {
4319 rtx cval = canon[(int)GET_MODE (list->loc)];
4320 variable_def **slot;
4321
4322 if (!cval)
4323 continue;
4324
4325 if (dv_is_value_p (list->dv))
4326 {
4327 rtx val = dv_as_value (list->dv);
4328 if (!VALUE_RECURSED_INTO (val))
4329 continue;
4330 }
4331
4332 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4333 canonicalize_values_star (slot, set);
4334 if (*listp != list)
4335 list = NULL;
4336 }
4337 }
4338 }
4339
4340 /* Remove any redundant values in the location list of VAR, which must
4341 be unshared and 1-part. */
4342
4343 static void
4344 remove_duplicate_values (variable var)
4345 {
4346 location_chain node, *nodep;
4347
4348 gcc_assert (var->onepart);
4349 gcc_assert (var->n_var_parts == 1);
4350 gcc_assert (var->refcount == 1);
4351
4352 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4353 {
4354 if (GET_CODE (node->loc) == VALUE)
4355 {
4356 if (VALUE_RECURSED_INTO (node->loc))
4357 {
4358 /* Remove duplicate value node. */
4359 *nodep = node->next;
4360 pool_free (loc_chain_pool, node);
4361 continue;
4362 }
4363 else
4364 VALUE_RECURSED_INTO (node->loc) = true;
4365 }
4366 nodep = &node->next;
4367 }
4368
4369 for (node = var->var_part[0].loc_chain; node; node = node->next)
4370 if (GET_CODE (node->loc) == VALUE)
4371 {
4372 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4373 VALUE_RECURSED_INTO (node->loc) = false;
4374 }
4375 }
4376
4377
4378 /* Hash table iteration argument passed to variable_post_merge. */
4379 struct dfset_post_merge
4380 {
4381 /* The new input set for the current block. */
4382 dataflow_set *set;
4383 /* Pointer to the permanent input set for the current block, or
4384 NULL. */
4385 dataflow_set **permp;
4386 };
4387
4388 /* Create values for incoming expressions associated with one-part
4389 variables that don't have value numbers for them. */
4390
4391 int
4392 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4393 {
4394 dataflow_set *set = dfpm->set;
4395 variable var = *slot;
4396 location_chain node;
4397
4398 if (!var->onepart || !var->n_var_parts)
4399 return 1;
4400
4401 gcc_assert (var->n_var_parts == 1);
4402
4403 if (dv_is_decl_p (var->dv))
4404 {
4405 bool check_dupes = false;
4406
4407 restart:
4408 for (node = var->var_part[0].loc_chain; node; node = node->next)
4409 {
4410 if (GET_CODE (node->loc) == VALUE)
4411 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4412 else if (GET_CODE (node->loc) == REG)
4413 {
4414 attrs att, *attp, *curp = NULL;
4415
4416 if (var->refcount != 1)
4417 {
4418 slot = unshare_variable (set, slot, var,
4419 VAR_INIT_STATUS_INITIALIZED);
4420 var = *slot;
4421 goto restart;
4422 }
4423
4424 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4425 attp = &att->next)
4426 if (att->offset == 0
4427 && GET_MODE (att->loc) == GET_MODE (node->loc))
4428 {
4429 if (dv_is_value_p (att->dv))
4430 {
4431 rtx cval = dv_as_value (att->dv);
4432 node->loc = cval;
4433 check_dupes = true;
4434 break;
4435 }
4436 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4437 curp = attp;
4438 }
4439
4440 if (!curp)
4441 {
4442 curp = attp;
4443 while (*curp)
4444 if ((*curp)->offset == 0
4445 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4446 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4447 break;
4448 else
4449 curp = &(*curp)->next;
4450 gcc_assert (*curp);
4451 }
4452
4453 if (!att)
4454 {
4455 decl_or_value cdv;
4456 rtx cval;
4457
4458 if (!*dfpm->permp)
4459 {
4460 *dfpm->permp = XNEW (dataflow_set);
4461 dataflow_set_init (*dfpm->permp);
4462 }
4463
4464 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4465 att; att = att->next)
4466 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4467 {
4468 gcc_assert (att->offset == 0
4469 && dv_is_value_p (att->dv));
4470 val_reset (set, att->dv);
4471 break;
4472 }
4473
4474 if (att)
4475 {
4476 cdv = att->dv;
4477 cval = dv_as_value (cdv);
4478 }
4479 else
4480 {
4481 /* Create a unique value to hold this register,
4482 that ought to be found and reused in
4483 subsequent rounds. */
4484 cselib_val *v;
4485 gcc_assert (!cselib_lookup (node->loc,
4486 GET_MODE (node->loc), 0,
4487 VOIDmode));
4488 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4489 VOIDmode);
4490 cselib_preserve_value (v);
4491 cselib_invalidate_rtx (node->loc);
4492 cval = v->val_rtx;
4493 cdv = dv_from_value (cval);
4494 if (dump_file)
4495 fprintf (dump_file,
4496 "Created new value %u:%u for reg %i\n",
4497 v->uid, v->hash, REGNO (node->loc));
4498 }
4499
4500 var_reg_decl_set (*dfpm->permp, node->loc,
4501 VAR_INIT_STATUS_INITIALIZED,
4502 cdv, 0, NULL, INSERT);
4503
4504 node->loc = cval;
4505 check_dupes = true;
4506 }
4507
4508 /* Remove attribute referring to the decl, which now
4509 uses the value for the register, already existing or
4510 to be added when we bring perm in. */
4511 att = *curp;
4512 *curp = att->next;
4513 pool_free (attrs_pool, att);
4514 }
4515 }
4516
4517 if (check_dupes)
4518 remove_duplicate_values (var);
4519 }
4520
4521 return 1;
4522 }
4523
4524 /* Reset values in the permanent set that are not associated with the
4525 chosen expression. */
4526
4527 int
4528 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4529 {
4530 dataflow_set *set = dfpm->set;
4531 variable pvar = *pslot, var;
4532 location_chain pnode;
4533 decl_or_value dv;
4534 attrs att;
4535
4536 gcc_assert (dv_is_value_p (pvar->dv)
4537 && pvar->n_var_parts == 1);
4538 pnode = pvar->var_part[0].loc_chain;
4539 gcc_assert (pnode
4540 && !pnode->next
4541 && REG_P (pnode->loc));
4542
4543 dv = pvar->dv;
4544
4545 var = shared_hash_find (set->vars, dv);
4546 if (var)
4547 {
4548 /* Although variable_post_merge_new_vals may have made decls
4549 non-star-canonical, values that pre-existed in canonical form
4550 remain canonical, and newly-created values reference a single
4551 REG, so they are canonical as well. Since VAR has the
4552 location list for a VALUE, using find_loc_in_1pdv for it is
4553 fine, since VALUEs don't map back to DECLs. */
4554 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4555 return 1;
4556 val_reset (set, dv);
4557 }
4558
4559 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4560 if (att->offset == 0
4561 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4562 && dv_is_value_p (att->dv))
4563 break;
4564
4565 /* If there is a value associated with this register already, create
4566 an equivalence. */
4567 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4568 {
4569 rtx cval = dv_as_value (att->dv);
4570 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4571 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4572 NULL, INSERT);
4573 }
4574 else if (!att)
4575 {
4576 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4577 dv, 0, pnode->loc);
4578 variable_union (pvar, set);
4579 }
4580
4581 return 1;
4582 }
4583
4584 /* Just checking stuff and registering register attributes for
4585 now. */
4586
4587 static void
4588 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4589 {
4590 struct dfset_post_merge dfpm;
4591
4592 dfpm.set = set;
4593 dfpm.permp = permp;
4594
4595 shared_hash_htab (set->vars)
4596 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4597 if (*permp)
4598 shared_hash_htab ((*permp)->vars)
4599 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4600 shared_hash_htab (set->vars)
4601 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4602 shared_hash_htab (set->vars)
4603 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4604 }
4605
4606 /* Return a node whose loc is a MEM that refers to EXPR in the
4607 location list of a one-part variable or value VAR, or in that of
4608 any values recursively mentioned in the location lists. */
4609
4610 static location_chain
4611 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4612 {
4613 location_chain node;
4614 decl_or_value dv;
4615 variable var;
4616 location_chain where = NULL;
4617
4618 if (!val)
4619 return NULL;
4620
4621 gcc_assert (GET_CODE (val) == VALUE
4622 && !VALUE_RECURSED_INTO (val));
4623
4624 dv = dv_from_value (val);
4625 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4626
4627 if (!var)
4628 return NULL;
4629
4630 gcc_assert (var->onepart);
4631
4632 if (!var->n_var_parts)
4633 return NULL;
4634
4635 VALUE_RECURSED_INTO (val) = true;
4636
4637 for (node = var->var_part[0].loc_chain; node; node = node->next)
4638 if (MEM_P (node->loc)
4639 && MEM_EXPR (node->loc) == expr
4640 && INT_MEM_OFFSET (node->loc) == 0)
4641 {
4642 where = node;
4643 break;
4644 }
4645 else if (GET_CODE (node->loc) == VALUE
4646 && !VALUE_RECURSED_INTO (node->loc)
4647 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4648 break;
4649
4650 VALUE_RECURSED_INTO (val) = false;
4651
4652 return where;
4653 }
4654
4655 /* Return TRUE if the value of MEM may vary across a call. */
4656
4657 static bool
4658 mem_dies_at_call (rtx mem)
4659 {
4660 tree expr = MEM_EXPR (mem);
4661 tree decl;
4662
4663 if (!expr)
4664 return true;
4665
4666 decl = get_base_address (expr);
4667
4668 if (!decl)
4669 return true;
4670
4671 if (!DECL_P (decl))
4672 return true;
4673
4674 return (may_be_aliased (decl)
4675 || (!TREE_READONLY (decl) && is_global_var (decl)));
4676 }
4677
4678 /* Remove all MEMs from the location list of a hash table entry for a
4679 one-part variable, except those whose MEM attributes map back to
4680 the variable itself, directly or within a VALUE. */
4681
4682 int
4683 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4684 {
4685 variable var = *slot;
4686
4687 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4688 {
4689 tree decl = dv_as_decl (var->dv);
4690 location_chain loc, *locp;
4691 bool changed = false;
4692
4693 if (!var->n_var_parts)
4694 return 1;
4695
4696 gcc_assert (var->n_var_parts == 1);
4697
4698 if (shared_var_p (var, set->vars))
4699 {
4700 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4701 {
4702 /* We want to remove dying MEMs that doesn't refer to DECL. */
4703 if (GET_CODE (loc->loc) == MEM
4704 && (MEM_EXPR (loc->loc) != decl
4705 || INT_MEM_OFFSET (loc->loc) != 0)
4706 && !mem_dies_at_call (loc->loc))
4707 break;
4708 /* We want to move here MEMs that do refer to DECL. */
4709 else if (GET_CODE (loc->loc) == VALUE
4710 && find_mem_expr_in_1pdv (decl, loc->loc,
4711 shared_hash_htab (set->vars)))
4712 break;
4713 }
4714
4715 if (!loc)
4716 return 1;
4717
4718 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4719 var = *slot;
4720 gcc_assert (var->n_var_parts == 1);
4721 }
4722
4723 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4724 loc; loc = *locp)
4725 {
4726 rtx old_loc = loc->loc;
4727 if (GET_CODE (old_loc) == VALUE)
4728 {
4729 location_chain mem_node
4730 = find_mem_expr_in_1pdv (decl, loc->loc,
4731 shared_hash_htab (set->vars));
4732
4733 /* ??? This picks up only one out of multiple MEMs that
4734 refer to the same variable. Do we ever need to be
4735 concerned about dealing with more than one, or, given
4736 that they should all map to the same variable
4737 location, their addresses will have been merged and
4738 they will be regarded as equivalent? */
4739 if (mem_node)
4740 {
4741 loc->loc = mem_node->loc;
4742 loc->set_src = mem_node->set_src;
4743 loc->init = MIN (loc->init, mem_node->init);
4744 }
4745 }
4746
4747 if (GET_CODE (loc->loc) != MEM
4748 || (MEM_EXPR (loc->loc) == decl
4749 && INT_MEM_OFFSET (loc->loc) == 0)
4750 || !mem_dies_at_call (loc->loc))
4751 {
4752 if (old_loc != loc->loc && emit_notes)
4753 {
4754 if (old_loc == var->var_part[0].cur_loc)
4755 {
4756 changed = true;
4757 var->var_part[0].cur_loc = NULL;
4758 }
4759 }
4760 locp = &loc->next;
4761 continue;
4762 }
4763
4764 if (emit_notes)
4765 {
4766 if (old_loc == var->var_part[0].cur_loc)
4767 {
4768 changed = true;
4769 var->var_part[0].cur_loc = NULL;
4770 }
4771 }
4772 *locp = loc->next;
4773 pool_free (loc_chain_pool, loc);
4774 }
4775
4776 if (!var->var_part[0].loc_chain)
4777 {
4778 var->n_var_parts--;
4779 changed = true;
4780 }
4781 if (changed)
4782 variable_was_changed (var, set);
4783 }
4784
4785 return 1;
4786 }
4787
4788 /* Remove all MEMs from the location list of a hash table entry for a
4789 value. */
4790
4791 int
4792 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4793 {
4794 variable var = *slot;
4795
4796 if (var->onepart == ONEPART_VALUE)
4797 {
4798 location_chain loc, *locp;
4799 bool changed = false;
4800 rtx cur_loc;
4801
4802 gcc_assert (var->n_var_parts == 1);
4803
4804 if (shared_var_p (var, set->vars))
4805 {
4806 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4807 if (GET_CODE (loc->loc) == MEM
4808 && mem_dies_at_call (loc->loc))
4809 break;
4810
4811 if (!loc)
4812 return 1;
4813
4814 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4815 var = *slot;
4816 gcc_assert (var->n_var_parts == 1);
4817 }
4818
4819 if (VAR_LOC_1PAUX (var))
4820 cur_loc = VAR_LOC_FROM (var);
4821 else
4822 cur_loc = var->var_part[0].cur_loc;
4823
4824 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4825 loc; loc = *locp)
4826 {
4827 if (GET_CODE (loc->loc) != MEM
4828 || !mem_dies_at_call (loc->loc))
4829 {
4830 locp = &loc->next;
4831 continue;
4832 }
4833
4834 *locp = loc->next;
4835 /* If we have deleted the location which was last emitted
4836 we have to emit new location so add the variable to set
4837 of changed variables. */
4838 if (cur_loc == loc->loc)
4839 {
4840 changed = true;
4841 var->var_part[0].cur_loc = NULL;
4842 if (VAR_LOC_1PAUX (var))
4843 VAR_LOC_FROM (var) = NULL;
4844 }
4845 pool_free (loc_chain_pool, loc);
4846 }
4847
4848 if (!var->var_part[0].loc_chain)
4849 {
4850 var->n_var_parts--;
4851 changed = true;
4852 }
4853 if (changed)
4854 variable_was_changed (var, set);
4855 }
4856
4857 return 1;
4858 }
4859
4860 /* Remove all variable-location information about call-clobbered
4861 registers, as well as associations between MEMs and VALUEs. */
4862
4863 static void
4864 dataflow_set_clear_at_call (dataflow_set *set)
4865 {
4866 unsigned int r;
4867 hard_reg_set_iterator hrsi;
4868
4869 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4870 var_regno_delete (set, r);
4871
4872 if (MAY_HAVE_DEBUG_INSNS)
4873 {
4874 set->traversed_vars = set->vars;
4875 shared_hash_htab (set->vars)
4876 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4877 set->traversed_vars = set->vars;
4878 shared_hash_htab (set->vars)
4879 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4880 set->traversed_vars = NULL;
4881 }
4882 }
4883
4884 static bool
4885 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4886 {
4887 location_chain lc1, lc2;
4888
4889 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4890 {
4891 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4892 {
4893 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4894 {
4895 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4896 break;
4897 }
4898 if (rtx_equal_p (lc1->loc, lc2->loc))
4899 break;
4900 }
4901 if (!lc2)
4902 return true;
4903 }
4904 return false;
4905 }
4906
4907 /* Return true if one-part variables VAR1 and VAR2 are different.
4908 They must be in canonical order. */
4909
4910 static bool
4911 onepart_variable_different_p (variable var1, variable var2)
4912 {
4913 location_chain lc1, lc2;
4914
4915 if (var1 == var2)
4916 return false;
4917
4918 gcc_assert (var1->n_var_parts == 1
4919 && var2->n_var_parts == 1);
4920
4921 lc1 = var1->var_part[0].loc_chain;
4922 lc2 = var2->var_part[0].loc_chain;
4923
4924 gcc_assert (lc1 && lc2);
4925
4926 while (lc1 && lc2)
4927 {
4928 if (loc_cmp (lc1->loc, lc2->loc))
4929 return true;
4930 lc1 = lc1->next;
4931 lc2 = lc2->next;
4932 }
4933
4934 return lc1 != lc2;
4935 }
4936
4937 /* Return true if variables VAR1 and VAR2 are different. */
4938
4939 static bool
4940 variable_different_p (variable var1, variable var2)
4941 {
4942 int i;
4943
4944 if (var1 == var2)
4945 return false;
4946
4947 if (var1->onepart != var2->onepart)
4948 return true;
4949
4950 if (var1->n_var_parts != var2->n_var_parts)
4951 return true;
4952
4953 if (var1->onepart && var1->n_var_parts)
4954 {
4955 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4956 && var1->n_var_parts == 1);
4957 /* One-part values have locations in a canonical order. */
4958 return onepart_variable_different_p (var1, var2);
4959 }
4960
4961 for (i = 0; i < var1->n_var_parts; i++)
4962 {
4963 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4964 return true;
4965 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4966 return true;
4967 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4968 return true;
4969 }
4970 return false;
4971 }
4972
4973 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4974
4975 static bool
4976 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4977 {
4978 variable_iterator_type hi;
4979 variable var1;
4980
4981 if (old_set->vars == new_set->vars)
4982 return false;
4983
4984 if (shared_hash_htab (old_set->vars)->elements ()
4985 != shared_hash_htab (new_set->vars)->elements ())
4986 return true;
4987
4988 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
4989 var1, variable, hi)
4990 {
4991 variable_table_type *htab = shared_hash_htab (new_set->vars);
4992 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4993 if (!var2)
4994 {
4995 if (dump_file && (dump_flags & TDF_DETAILS))
4996 {
4997 fprintf (dump_file, "dataflow difference found: removal of:\n");
4998 dump_var (var1);
4999 }
5000 return true;
5001 }
5002
5003 if (variable_different_p (var1, var2))
5004 {
5005 if (dump_file && (dump_flags & TDF_DETAILS))
5006 {
5007 fprintf (dump_file, "dataflow difference found: "
5008 "old and new follow:\n");
5009 dump_var (var1);
5010 dump_var (var2);
5011 }
5012 return true;
5013 }
5014 }
5015
5016 /* No need to traverse the second hashtab, if both have the same number
5017 of elements and the second one had all entries found in the first one,
5018 then it can't have any extra entries. */
5019 return false;
5020 }
5021
5022 /* Free the contents of dataflow set SET. */
5023
5024 static void
5025 dataflow_set_destroy (dataflow_set *set)
5026 {
5027 int i;
5028
5029 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5030 attrs_list_clear (&set->regs[i]);
5031
5032 shared_hash_destroy (set->vars);
5033 set->vars = NULL;
5034 }
5035
5036 /* Return true if RTL X contains a SYMBOL_REF. */
5037
5038 static bool
5039 contains_symbol_ref (rtx x)
5040 {
5041 const char *fmt;
5042 RTX_CODE code;
5043 int i;
5044
5045 if (!x)
5046 return false;
5047
5048 code = GET_CODE (x);
5049 if (code == SYMBOL_REF)
5050 return true;
5051
5052 fmt = GET_RTX_FORMAT (code);
5053 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5054 {
5055 if (fmt[i] == 'e')
5056 {
5057 if (contains_symbol_ref (XEXP (x, i)))
5058 return true;
5059 }
5060 else if (fmt[i] == 'E')
5061 {
5062 int j;
5063 for (j = 0; j < XVECLEN (x, i); j++)
5064 if (contains_symbol_ref (XVECEXP (x, i, j)))
5065 return true;
5066 }
5067 }
5068
5069 return false;
5070 }
5071
5072 /* Shall EXPR be tracked? */
5073
5074 static bool
5075 track_expr_p (tree expr, bool need_rtl)
5076 {
5077 rtx decl_rtl;
5078 tree realdecl;
5079
5080 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5081 return DECL_RTL_SET_P (expr);
5082
5083 /* If EXPR is not a parameter or a variable do not track it. */
5084 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5085 return 0;
5086
5087 /* It also must have a name... */
5088 if (!DECL_NAME (expr) && need_rtl)
5089 return 0;
5090
5091 /* ... and a RTL assigned to it. */
5092 decl_rtl = DECL_RTL_IF_SET (expr);
5093 if (!decl_rtl && need_rtl)
5094 return 0;
5095
5096 /* If this expression is really a debug alias of some other declaration, we
5097 don't need to track this expression if the ultimate declaration is
5098 ignored. */
5099 realdecl = expr;
5100 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5101 {
5102 realdecl = DECL_DEBUG_EXPR (realdecl);
5103 if (!DECL_P (realdecl))
5104 {
5105 if (handled_component_p (realdecl)
5106 || (TREE_CODE (realdecl) == MEM_REF
5107 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5108 {
5109 HOST_WIDE_INT bitsize, bitpos, maxsize;
5110 tree innerdecl
5111 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5112 &maxsize);
5113 if (!DECL_P (innerdecl)
5114 || DECL_IGNORED_P (innerdecl)
5115 /* Do not track declarations for parts of tracked parameters
5116 since we want to track them as a whole instead. */
5117 || (TREE_CODE (innerdecl) == PARM_DECL
5118 && DECL_MODE (innerdecl) != BLKmode
5119 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5120 || TREE_STATIC (innerdecl)
5121 || bitsize <= 0
5122 || bitpos + bitsize > 256
5123 || bitsize != maxsize)
5124 return 0;
5125 else
5126 realdecl = expr;
5127 }
5128 else
5129 return 0;
5130 }
5131 }
5132
5133 /* Do not track EXPR if REALDECL it should be ignored for debugging
5134 purposes. */
5135 if (DECL_IGNORED_P (realdecl))
5136 return 0;
5137
5138 /* Do not track global variables until we are able to emit correct location
5139 list for them. */
5140 if (TREE_STATIC (realdecl))
5141 return 0;
5142
5143 /* When the EXPR is a DECL for alias of some variable (see example)
5144 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5145 DECL_RTL contains SYMBOL_REF.
5146
5147 Example:
5148 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5149 char **_dl_argv;
5150 */
5151 if (decl_rtl && MEM_P (decl_rtl)
5152 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5153 return 0;
5154
5155 /* If RTX is a memory it should not be very large (because it would be
5156 an array or struct). */
5157 if (decl_rtl && MEM_P (decl_rtl))
5158 {
5159 /* Do not track structures and arrays. */
5160 if (GET_MODE (decl_rtl) == BLKmode
5161 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5162 return 0;
5163 if (MEM_SIZE_KNOWN_P (decl_rtl)
5164 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5165 return 0;
5166 }
5167
5168 DECL_CHANGED (expr) = 0;
5169 DECL_CHANGED (realdecl) = 0;
5170 return 1;
5171 }
5172
5173 /* Determine whether a given LOC refers to the same variable part as
5174 EXPR+OFFSET. */
5175
5176 static bool
5177 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5178 {
5179 tree expr2;
5180 HOST_WIDE_INT offset2;
5181
5182 if (! DECL_P (expr))
5183 return false;
5184
5185 if (REG_P (loc))
5186 {
5187 expr2 = REG_EXPR (loc);
5188 offset2 = REG_OFFSET (loc);
5189 }
5190 else if (MEM_P (loc))
5191 {
5192 expr2 = MEM_EXPR (loc);
5193 offset2 = INT_MEM_OFFSET (loc);
5194 }
5195 else
5196 return false;
5197
5198 if (! expr2 || ! DECL_P (expr2))
5199 return false;
5200
5201 expr = var_debug_decl (expr);
5202 expr2 = var_debug_decl (expr2);
5203
5204 return (expr == expr2 && offset == offset2);
5205 }
5206
5207 /* LOC is a REG or MEM that we would like to track if possible.
5208 If EXPR is null, we don't know what expression LOC refers to,
5209 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5210 LOC is an lvalue register.
5211
5212 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5213 is something we can track. When returning true, store the mode of
5214 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5215 from EXPR in *OFFSET_OUT (if nonnull). */
5216
5217 static bool
5218 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5219 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5220 {
5221 enum machine_mode mode;
5222
5223 if (expr == NULL || !track_expr_p (expr, true))
5224 return false;
5225
5226 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5227 whole subreg, but only the old inner part is really relevant. */
5228 mode = GET_MODE (loc);
5229 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5230 {
5231 enum machine_mode pseudo_mode;
5232
5233 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5234 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5235 {
5236 offset += byte_lowpart_offset (pseudo_mode, mode);
5237 mode = pseudo_mode;
5238 }
5239 }
5240
5241 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5242 Do the same if we are storing to a register and EXPR occupies
5243 the whole of register LOC; in that case, the whole of EXPR is
5244 being changed. We exclude complex modes from the second case
5245 because the real and imaginary parts are represented as separate
5246 pseudo registers, even if the whole complex value fits into one
5247 hard register. */
5248 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5249 || (store_reg_p
5250 && !COMPLEX_MODE_P (DECL_MODE (expr))
5251 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5252 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5253 {
5254 mode = DECL_MODE (expr);
5255 offset = 0;
5256 }
5257
5258 if (offset < 0 || offset >= MAX_VAR_PARTS)
5259 return false;
5260
5261 if (mode_out)
5262 *mode_out = mode;
5263 if (offset_out)
5264 *offset_out = offset;
5265 return true;
5266 }
5267
5268 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5269 want to track. When returning nonnull, make sure that the attributes
5270 on the returned value are updated. */
5271
5272 static rtx
5273 var_lowpart (enum machine_mode mode, rtx loc)
5274 {
5275 unsigned int offset, reg_offset, regno;
5276
5277 if (GET_MODE (loc) == mode)
5278 return loc;
5279
5280 if (!REG_P (loc) && !MEM_P (loc))
5281 return NULL;
5282
5283 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5284
5285 if (MEM_P (loc))
5286 return adjust_address_nv (loc, mode, offset);
5287
5288 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5289 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5290 reg_offset, mode);
5291 return gen_rtx_REG_offset (loc, mode, regno, offset);
5292 }
5293
5294 /* Carry information about uses and stores while walking rtx. */
5295
5296 struct count_use_info
5297 {
5298 /* The insn where the RTX is. */
5299 rtx_insn *insn;
5300
5301 /* The basic block where insn is. */
5302 basic_block bb;
5303
5304 /* The array of n_sets sets in the insn, as determined by cselib. */
5305 struct cselib_set *sets;
5306 int n_sets;
5307
5308 /* True if we're counting stores, false otherwise. */
5309 bool store_p;
5310 };
5311
5312 /* Find a VALUE corresponding to X. */
5313
5314 static inline cselib_val *
5315 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5316 {
5317 int i;
5318
5319 if (cui->sets)
5320 {
5321 /* This is called after uses are set up and before stores are
5322 processed by cselib, so it's safe to look up srcs, but not
5323 dsts. So we look up expressions that appear in srcs or in
5324 dest expressions, but we search the sets array for dests of
5325 stores. */
5326 if (cui->store_p)
5327 {
5328 /* Some targets represent memset and memcpy patterns
5329 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5330 (set (mem:BLK ...) (const_int ...)) or
5331 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5332 in that case, otherwise we end up with mode mismatches. */
5333 if (mode == BLKmode && MEM_P (x))
5334 return NULL;
5335 for (i = 0; i < cui->n_sets; i++)
5336 if (cui->sets[i].dest == x)
5337 return cui->sets[i].src_elt;
5338 }
5339 else
5340 return cselib_lookup (x, mode, 0, VOIDmode);
5341 }
5342
5343 return NULL;
5344 }
5345
5346 /* Replace all registers and addresses in an expression with VALUE
5347 expressions that map back to them, unless the expression is a
5348 register. If no mapping is or can be performed, returns NULL. */
5349
5350 static rtx
5351 replace_expr_with_values (rtx loc)
5352 {
5353 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5354 return NULL;
5355 else if (MEM_P (loc))
5356 {
5357 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5358 get_address_mode (loc), 0,
5359 GET_MODE (loc));
5360 if (addr)
5361 return replace_equiv_address_nv (loc, addr->val_rtx);
5362 else
5363 return NULL;
5364 }
5365 else
5366 return cselib_subst_to_values (loc, VOIDmode);
5367 }
5368
5369 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5370 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5371 RTX. */
5372
5373 static int
5374 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5375 {
5376 rtx loc = *x;
5377
5378 return GET_CODE (loc) == DEBUG_EXPR;
5379 }
5380
5381 /* Determine what kind of micro operation to choose for a USE. Return
5382 MO_CLOBBER if no micro operation is to be generated. */
5383
5384 static enum micro_operation_type
5385 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5386 {
5387 tree expr;
5388
5389 if (cui && cui->sets)
5390 {
5391 if (GET_CODE (loc) == VAR_LOCATION)
5392 {
5393 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5394 {
5395 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5396 if (! VAR_LOC_UNKNOWN_P (ploc))
5397 {
5398 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5399 VOIDmode);
5400
5401 /* ??? flag_float_store and volatile mems are never
5402 given values, but we could in theory use them for
5403 locations. */
5404 gcc_assert (val || 1);
5405 }
5406 return MO_VAL_LOC;
5407 }
5408 else
5409 return MO_CLOBBER;
5410 }
5411
5412 if (REG_P (loc) || MEM_P (loc))
5413 {
5414 if (modep)
5415 *modep = GET_MODE (loc);
5416 if (cui->store_p)
5417 {
5418 if (REG_P (loc)
5419 || (find_use_val (loc, GET_MODE (loc), cui)
5420 && cselib_lookup (XEXP (loc, 0),
5421 get_address_mode (loc), 0,
5422 GET_MODE (loc))))
5423 return MO_VAL_SET;
5424 }
5425 else
5426 {
5427 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5428
5429 if (val && !cselib_preserved_value_p (val))
5430 return MO_VAL_USE;
5431 }
5432 }
5433 }
5434
5435 if (REG_P (loc))
5436 {
5437 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5438
5439 if (loc == cfa_base_rtx)
5440 return MO_CLOBBER;
5441 expr = REG_EXPR (loc);
5442
5443 if (!expr)
5444 return MO_USE_NO_VAR;
5445 else if (target_for_debug_bind (var_debug_decl (expr)))
5446 return MO_CLOBBER;
5447 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5448 false, modep, NULL))
5449 return MO_USE;
5450 else
5451 return MO_USE_NO_VAR;
5452 }
5453 else if (MEM_P (loc))
5454 {
5455 expr = MEM_EXPR (loc);
5456
5457 if (!expr)
5458 return MO_CLOBBER;
5459 else if (target_for_debug_bind (var_debug_decl (expr)))
5460 return MO_CLOBBER;
5461 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5462 false, modep, NULL)
5463 /* Multi-part variables shouldn't refer to one-part
5464 variable names such as VALUEs (never happens) or
5465 DEBUG_EXPRs (only happens in the presence of debug
5466 insns). */
5467 && (!MAY_HAVE_DEBUG_INSNS
5468 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5469 return MO_USE;
5470 else
5471 return MO_CLOBBER;
5472 }
5473
5474 return MO_CLOBBER;
5475 }
5476
5477 /* Log to OUT information about micro-operation MOPT involving X in
5478 INSN of BB. */
5479
5480 static inline void
5481 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5482 enum micro_operation_type mopt, FILE *out)
5483 {
5484 fprintf (out, "bb %i op %i insn %i %s ",
5485 bb->index, VTI (bb)->mos.length (),
5486 INSN_UID (insn), micro_operation_type_name[mopt]);
5487 print_inline_rtx (out, x, 2);
5488 fputc ('\n', out);
5489 }
5490
5491 /* Tell whether the CONCAT used to holds a VALUE and its location
5492 needs value resolution, i.e., an attempt of mapping the location
5493 back to other incoming values. */
5494 #define VAL_NEEDS_RESOLUTION(x) \
5495 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5496 /* Whether the location in the CONCAT is a tracked expression, that
5497 should also be handled like a MO_USE. */
5498 #define VAL_HOLDS_TRACK_EXPR(x) \
5499 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5500 /* Whether the location in the CONCAT should be handled like a MO_COPY
5501 as well. */
5502 #define VAL_EXPR_IS_COPIED(x) \
5503 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5504 /* Whether the location in the CONCAT should be handled like a
5505 MO_CLOBBER as well. */
5506 #define VAL_EXPR_IS_CLOBBERED(x) \
5507 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5508
5509 /* All preserved VALUEs. */
5510 static vec<rtx> preserved_values;
5511
5512 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5513
5514 static void
5515 preserve_value (cselib_val *val)
5516 {
5517 cselib_preserve_value (val);
5518 preserved_values.safe_push (val->val_rtx);
5519 }
5520
5521 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5522 any rtxes not suitable for CONST use not replaced by VALUEs
5523 are discovered. */
5524
5525 static int
5526 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5527 {
5528 if (*x == NULL_RTX)
5529 return 0;
5530
5531 switch (GET_CODE (*x))
5532 {
5533 case REG:
5534 case DEBUG_EXPR:
5535 case PC:
5536 case SCRATCH:
5537 case CC0:
5538 case ASM_INPUT:
5539 case ASM_OPERANDS:
5540 return 1;
5541 case MEM:
5542 return !MEM_READONLY_P (*x);
5543 default:
5544 return 0;
5545 }
5546 }
5547
5548 /* Add uses (register and memory references) LOC which will be tracked
5549 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5550
5551 static int
5552 add_uses (rtx *ploc, void *data)
5553 {
5554 rtx loc = *ploc;
5555 enum machine_mode mode = VOIDmode;
5556 struct count_use_info *cui = (struct count_use_info *)data;
5557 enum micro_operation_type type = use_type (loc, cui, &mode);
5558
5559 if (type != MO_CLOBBER)
5560 {
5561 basic_block bb = cui->bb;
5562 micro_operation mo;
5563
5564 mo.type = type;
5565 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5566 mo.insn = cui->insn;
5567
5568 if (type == MO_VAL_LOC)
5569 {
5570 rtx oloc = loc;
5571 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5572 cselib_val *val;
5573
5574 gcc_assert (cui->sets);
5575
5576 if (MEM_P (vloc)
5577 && !REG_P (XEXP (vloc, 0))
5578 && !MEM_P (XEXP (vloc, 0)))
5579 {
5580 rtx mloc = vloc;
5581 enum machine_mode address_mode = get_address_mode (mloc);
5582 cselib_val *val
5583 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5584 GET_MODE (mloc));
5585
5586 if (val && !cselib_preserved_value_p (val))
5587 preserve_value (val);
5588 }
5589
5590 if (CONSTANT_P (vloc)
5591 && (GET_CODE (vloc) != CONST
5592 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5593 /* For constants don't look up any value. */;
5594 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5595 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5596 {
5597 enum machine_mode mode2;
5598 enum micro_operation_type type2;
5599 rtx nloc = NULL;
5600 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5601
5602 if (resolvable)
5603 nloc = replace_expr_with_values (vloc);
5604
5605 if (nloc)
5606 {
5607 oloc = shallow_copy_rtx (oloc);
5608 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5609 }
5610
5611 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5612
5613 type2 = use_type (vloc, 0, &mode2);
5614
5615 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5616 || type2 == MO_CLOBBER);
5617
5618 if (type2 == MO_CLOBBER
5619 && !cselib_preserved_value_p (val))
5620 {
5621 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5622 preserve_value (val);
5623 }
5624 }
5625 else if (!VAR_LOC_UNKNOWN_P (vloc))
5626 {
5627 oloc = shallow_copy_rtx (oloc);
5628 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5629 }
5630
5631 mo.u.loc = oloc;
5632 }
5633 else if (type == MO_VAL_USE)
5634 {
5635 enum machine_mode mode2 = VOIDmode;
5636 enum micro_operation_type type2;
5637 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5638 rtx vloc, oloc = loc, nloc;
5639
5640 gcc_assert (cui->sets);
5641
5642 if (MEM_P (oloc)
5643 && !REG_P (XEXP (oloc, 0))
5644 && !MEM_P (XEXP (oloc, 0)))
5645 {
5646 rtx mloc = oloc;
5647 enum machine_mode address_mode = get_address_mode (mloc);
5648 cselib_val *val
5649 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5650 GET_MODE (mloc));
5651
5652 if (val && !cselib_preserved_value_p (val))
5653 preserve_value (val);
5654 }
5655
5656 type2 = use_type (loc, 0, &mode2);
5657
5658 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5659 || type2 == MO_CLOBBER);
5660
5661 if (type2 == MO_USE)
5662 vloc = var_lowpart (mode2, loc);
5663 else
5664 vloc = oloc;
5665
5666 /* The loc of a MO_VAL_USE may have two forms:
5667
5668 (concat val src): val is at src, a value-based
5669 representation.
5670
5671 (concat (concat val use) src): same as above, with use as
5672 the MO_USE tracked value, if it differs from src.
5673
5674 */
5675
5676 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5677 nloc = replace_expr_with_values (loc);
5678 if (!nloc)
5679 nloc = oloc;
5680
5681 if (vloc != nloc)
5682 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5683 else
5684 oloc = val->val_rtx;
5685
5686 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5687
5688 if (type2 == MO_USE)
5689 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5690 if (!cselib_preserved_value_p (val))
5691 {
5692 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5693 preserve_value (val);
5694 }
5695 }
5696 else
5697 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5698
5699 if (dump_file && (dump_flags & TDF_DETAILS))
5700 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5701 VTI (bb)->mos.safe_push (mo);
5702 }
5703
5704 return 0;
5705 }
5706
5707 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5708
5709 static void
5710 add_uses_1 (rtx *x, void *cui)
5711 {
5712 for_each_rtx (x, add_uses, cui);
5713 }
5714
5715 /* This is the value used during expansion of locations. We want it
5716 to be unbounded, so that variables expanded deep in a recursion
5717 nest are fully evaluated, so that their values are cached
5718 correctly. We avoid recursion cycles through other means, and we
5719 don't unshare RTL, so excess complexity is not a problem. */
5720 #define EXPR_DEPTH (INT_MAX)
5721 /* We use this to keep too-complex expressions from being emitted as
5722 location notes, and then to debug information. Users can trade
5723 compile time for ridiculously complex expressions, although they're
5724 seldom useful, and they may often have to be discarded as not
5725 representable anyway. */
5726 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5727
5728 /* Attempt to reverse the EXPR operation in the debug info and record
5729 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5730 no longer live we can express its value as VAL - 6. */
5731
5732 static void
5733 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5734 {
5735 rtx src, arg, ret;
5736 cselib_val *v;
5737 struct elt_loc_list *l;
5738 enum rtx_code code;
5739 int count;
5740
5741 if (GET_CODE (expr) != SET)
5742 return;
5743
5744 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5745 return;
5746
5747 src = SET_SRC (expr);
5748 switch (GET_CODE (src))
5749 {
5750 case PLUS:
5751 case MINUS:
5752 case XOR:
5753 case NOT:
5754 case NEG:
5755 if (!REG_P (XEXP (src, 0)))
5756 return;
5757 break;
5758 case SIGN_EXTEND:
5759 case ZERO_EXTEND:
5760 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5761 return;
5762 break;
5763 default:
5764 return;
5765 }
5766
5767 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5768 return;
5769
5770 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5771 if (!v || !cselib_preserved_value_p (v))
5772 return;
5773
5774 /* Use canonical V to avoid creating multiple redundant expressions
5775 for different VALUES equivalent to V. */
5776 v = canonical_cselib_val (v);
5777
5778 /* Adding a reverse op isn't useful if V already has an always valid
5779 location. Ignore ENTRY_VALUE, while it is always constant, we should
5780 prefer non-ENTRY_VALUE locations whenever possible. */
5781 for (l = v->locs, count = 0; l; l = l->next, count++)
5782 if (CONSTANT_P (l->loc)
5783 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5784 return;
5785 /* Avoid creating too large locs lists. */
5786 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5787 return;
5788
5789 switch (GET_CODE (src))
5790 {
5791 case NOT:
5792 case NEG:
5793 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5794 return;
5795 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5796 break;
5797 case SIGN_EXTEND:
5798 case ZERO_EXTEND:
5799 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5800 break;
5801 case XOR:
5802 code = XOR;
5803 goto binary;
5804 case PLUS:
5805 code = MINUS;
5806 goto binary;
5807 case MINUS:
5808 code = PLUS;
5809 goto binary;
5810 binary:
5811 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5812 return;
5813 arg = XEXP (src, 1);
5814 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5815 {
5816 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5817 if (arg == NULL_RTX)
5818 return;
5819 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5820 return;
5821 }
5822 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5823 if (ret == val)
5824 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5825 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5826 breaks a lot of routines during var-tracking. */
5827 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5828 break;
5829 default:
5830 gcc_unreachable ();
5831 }
5832
5833 cselib_add_permanent_equiv (v, ret, insn);
5834 }
5835
5836 /* Add stores (register and memory references) LOC which will be tracked
5837 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5838 CUIP->insn is instruction which the LOC is part of. */
5839
5840 static void
5841 add_stores (rtx loc, const_rtx expr, void *cuip)
5842 {
5843 enum machine_mode mode = VOIDmode, mode2;
5844 struct count_use_info *cui = (struct count_use_info *)cuip;
5845 basic_block bb = cui->bb;
5846 micro_operation mo;
5847 rtx oloc = loc, nloc, src = NULL;
5848 enum micro_operation_type type = use_type (loc, cui, &mode);
5849 bool track_p = false;
5850 cselib_val *v;
5851 bool resolve, preserve;
5852
5853 if (type == MO_CLOBBER)
5854 return;
5855
5856 mode2 = mode;
5857
5858 if (REG_P (loc))
5859 {
5860 gcc_assert (loc != cfa_base_rtx);
5861 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5862 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5863 || GET_CODE (expr) == CLOBBER)
5864 {
5865 mo.type = MO_CLOBBER;
5866 mo.u.loc = loc;
5867 if (GET_CODE (expr) == SET
5868 && SET_DEST (expr) == loc
5869 && !unsuitable_loc (SET_SRC (expr))
5870 && find_use_val (loc, mode, cui))
5871 {
5872 gcc_checking_assert (type == MO_VAL_SET);
5873 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5874 }
5875 }
5876 else
5877 {
5878 if (GET_CODE (expr) == SET
5879 && SET_DEST (expr) == loc
5880 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5881 src = var_lowpart (mode2, SET_SRC (expr));
5882 loc = var_lowpart (mode2, loc);
5883
5884 if (src == NULL)
5885 {
5886 mo.type = MO_SET;
5887 mo.u.loc = loc;
5888 }
5889 else
5890 {
5891 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5892 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5893 {
5894 /* If this is an instruction copying (part of) a parameter
5895 passed by invisible reference to its register location,
5896 pretend it's a SET so that the initial memory location
5897 is discarded, as the parameter register can be reused
5898 for other purposes and we do not track locations based
5899 on generic registers. */
5900 if (MEM_P (src)
5901 && REG_EXPR (loc)
5902 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5903 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5904 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5905 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5906 != arg_pointer_rtx)
5907 mo.type = MO_SET;
5908 else
5909 mo.type = MO_COPY;
5910 }
5911 else
5912 mo.type = MO_SET;
5913 mo.u.loc = xexpr;
5914 }
5915 }
5916 mo.insn = cui->insn;
5917 }
5918 else if (MEM_P (loc)
5919 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5920 || cui->sets))
5921 {
5922 if (MEM_P (loc) && type == MO_VAL_SET
5923 && !REG_P (XEXP (loc, 0))
5924 && !MEM_P (XEXP (loc, 0)))
5925 {
5926 rtx mloc = loc;
5927 enum machine_mode address_mode = get_address_mode (mloc);
5928 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5929 address_mode, 0,
5930 GET_MODE (mloc));
5931
5932 if (val && !cselib_preserved_value_p (val))
5933 preserve_value (val);
5934 }
5935
5936 if (GET_CODE (expr) == CLOBBER || !track_p)
5937 {
5938 mo.type = MO_CLOBBER;
5939 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5940 }
5941 else
5942 {
5943 if (GET_CODE (expr) == SET
5944 && SET_DEST (expr) == loc
5945 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5946 src = var_lowpart (mode2, SET_SRC (expr));
5947 loc = var_lowpart (mode2, loc);
5948
5949 if (src == NULL)
5950 {
5951 mo.type = MO_SET;
5952 mo.u.loc = loc;
5953 }
5954 else
5955 {
5956 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5957 if (same_variable_part_p (SET_SRC (xexpr),
5958 MEM_EXPR (loc),
5959 INT_MEM_OFFSET (loc)))
5960 mo.type = MO_COPY;
5961 else
5962 mo.type = MO_SET;
5963 mo.u.loc = xexpr;
5964 }
5965 }
5966 mo.insn = cui->insn;
5967 }
5968 else
5969 return;
5970
5971 if (type != MO_VAL_SET)
5972 goto log_and_return;
5973
5974 v = find_use_val (oloc, mode, cui);
5975
5976 if (!v)
5977 goto log_and_return;
5978
5979 resolve = preserve = !cselib_preserved_value_p (v);
5980
5981 /* We cannot track values for multiple-part variables, so we track only
5982 locations for tracked parameters passed either by invisible reference
5983 or directly in multiple locations. */
5984 if (track_p
5985 && REG_P (loc)
5986 && REG_EXPR (loc)
5987 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5988 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5989 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
5990 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5991 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
5992 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
5993 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
5994 {
5995 /* Although we don't use the value here, it could be used later by the
5996 mere virtue of its existence as the operand of the reverse operation
5997 that gave rise to it (typically extension/truncation). Make sure it
5998 is preserved as required by vt_expand_var_loc_chain. */
5999 if (preserve)
6000 preserve_value (v);
6001 goto log_and_return;
6002 }
6003
6004 if (loc == stack_pointer_rtx
6005 && hard_frame_pointer_adjustment != -1
6006 && preserve)
6007 cselib_set_value_sp_based (v);
6008
6009 nloc = replace_expr_with_values (oloc);
6010 if (nloc)
6011 oloc = nloc;
6012
6013 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6014 {
6015 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6016
6017 if (oval == v)
6018 return;
6019 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6020
6021 if (oval && !cselib_preserved_value_p (oval))
6022 {
6023 micro_operation moa;
6024
6025 preserve_value (oval);
6026
6027 moa.type = MO_VAL_USE;
6028 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6029 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6030 moa.insn = cui->insn;
6031
6032 if (dump_file && (dump_flags & TDF_DETAILS))
6033 log_op_type (moa.u.loc, cui->bb, cui->insn,
6034 moa.type, dump_file);
6035 VTI (bb)->mos.safe_push (moa);
6036 }
6037
6038 resolve = false;
6039 }
6040 else if (resolve && GET_CODE (mo.u.loc) == SET)
6041 {
6042 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6043 nloc = replace_expr_with_values (SET_SRC (expr));
6044 else
6045 nloc = NULL_RTX;
6046
6047 /* Avoid the mode mismatch between oexpr and expr. */
6048 if (!nloc && mode != mode2)
6049 {
6050 nloc = SET_SRC (expr);
6051 gcc_assert (oloc == SET_DEST (expr));
6052 }
6053
6054 if (nloc && nloc != SET_SRC (mo.u.loc))
6055 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
6056 else
6057 {
6058 if (oloc == SET_DEST (mo.u.loc))
6059 /* No point in duplicating. */
6060 oloc = mo.u.loc;
6061 if (!REG_P (SET_SRC (mo.u.loc)))
6062 resolve = false;
6063 }
6064 }
6065 else if (!resolve)
6066 {
6067 if (GET_CODE (mo.u.loc) == SET
6068 && oloc == SET_DEST (mo.u.loc))
6069 /* No point in duplicating. */
6070 oloc = mo.u.loc;
6071 }
6072 else
6073 resolve = false;
6074
6075 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6076
6077 if (mo.u.loc != oloc)
6078 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6079
6080 /* The loc of a MO_VAL_SET may have various forms:
6081
6082 (concat val dst): dst now holds val
6083
6084 (concat val (set dst src)): dst now holds val, copied from src
6085
6086 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6087 after replacing mems and non-top-level regs with values.
6088
6089 (concat (concat val dstv) (set dst src)): dst now holds val,
6090 copied from src. dstv is a value-based representation of dst, if
6091 it differs from dst. If resolution is needed, src is a REG, and
6092 its mode is the same as that of val.
6093
6094 (concat (concat val (set dstv srcv)) (set dst src)): src
6095 copied to dst, holding val. dstv and srcv are value-based
6096 representations of dst and src, respectively.
6097
6098 */
6099
6100 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6101 reverse_op (v->val_rtx, expr, cui->insn);
6102
6103 mo.u.loc = loc;
6104
6105 if (track_p)
6106 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6107 if (preserve)
6108 {
6109 VAL_NEEDS_RESOLUTION (loc) = resolve;
6110 preserve_value (v);
6111 }
6112 if (mo.type == MO_CLOBBER)
6113 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6114 if (mo.type == MO_COPY)
6115 VAL_EXPR_IS_COPIED (loc) = 1;
6116
6117 mo.type = MO_VAL_SET;
6118
6119 log_and_return:
6120 if (dump_file && (dump_flags & TDF_DETAILS))
6121 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6122 VTI (bb)->mos.safe_push (mo);
6123 }
6124
6125 /* Arguments to the call. */
6126 static rtx call_arguments;
6127
6128 /* Compute call_arguments. */
6129
6130 static void
6131 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6132 {
6133 rtx link, x, call;
6134 rtx prev, cur, next;
6135 rtx this_arg = NULL_RTX;
6136 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6137 tree obj_type_ref = NULL_TREE;
6138 CUMULATIVE_ARGS args_so_far_v;
6139 cumulative_args_t args_so_far;
6140
6141 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6142 args_so_far = pack_cumulative_args (&args_so_far_v);
6143 call = get_call_rtx_from (insn);
6144 if (call)
6145 {
6146 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6147 {
6148 rtx symbol = XEXP (XEXP (call, 0), 0);
6149 if (SYMBOL_REF_DECL (symbol))
6150 fndecl = SYMBOL_REF_DECL (symbol);
6151 }
6152 if (fndecl == NULL_TREE)
6153 fndecl = MEM_EXPR (XEXP (call, 0));
6154 if (fndecl
6155 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6156 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6157 fndecl = NULL_TREE;
6158 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6159 type = TREE_TYPE (fndecl);
6160 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6161 {
6162 if (TREE_CODE (fndecl) == INDIRECT_REF
6163 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6164 obj_type_ref = TREE_OPERAND (fndecl, 0);
6165 fndecl = NULL_TREE;
6166 }
6167 if (type)
6168 {
6169 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6170 t = TREE_CHAIN (t))
6171 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6172 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6173 break;
6174 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6175 type = NULL;
6176 else
6177 {
6178 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6179 link = CALL_INSN_FUNCTION_USAGE (insn);
6180 #ifndef PCC_STATIC_STRUCT_RETURN
6181 if (aggregate_value_p (TREE_TYPE (type), type)
6182 && targetm.calls.struct_value_rtx (type, 0) == 0)
6183 {
6184 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6185 enum machine_mode mode = TYPE_MODE (struct_addr);
6186 rtx reg;
6187 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6188 nargs + 1);
6189 reg = targetm.calls.function_arg (args_so_far, mode,
6190 struct_addr, true);
6191 targetm.calls.function_arg_advance (args_so_far, mode,
6192 struct_addr, true);
6193 if (reg == NULL_RTX)
6194 {
6195 for (; link; link = XEXP (link, 1))
6196 if (GET_CODE (XEXP (link, 0)) == USE
6197 && MEM_P (XEXP (XEXP (link, 0), 0)))
6198 {
6199 link = XEXP (link, 1);
6200 break;
6201 }
6202 }
6203 }
6204 else
6205 #endif
6206 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6207 nargs);
6208 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6209 {
6210 enum machine_mode mode;
6211 t = TYPE_ARG_TYPES (type);
6212 mode = TYPE_MODE (TREE_VALUE (t));
6213 this_arg = targetm.calls.function_arg (args_so_far, mode,
6214 TREE_VALUE (t), true);
6215 if (this_arg && !REG_P (this_arg))
6216 this_arg = NULL_RTX;
6217 else if (this_arg == NULL_RTX)
6218 {
6219 for (; link; link = XEXP (link, 1))
6220 if (GET_CODE (XEXP (link, 0)) == USE
6221 && MEM_P (XEXP (XEXP (link, 0), 0)))
6222 {
6223 this_arg = XEXP (XEXP (link, 0), 0);
6224 break;
6225 }
6226 }
6227 }
6228 }
6229 }
6230 }
6231 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6232
6233 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6234 if (GET_CODE (XEXP (link, 0)) == USE)
6235 {
6236 rtx item = NULL_RTX;
6237 x = XEXP (XEXP (link, 0), 0);
6238 if (GET_MODE (link) == VOIDmode
6239 || GET_MODE (link) == BLKmode
6240 || (GET_MODE (link) != GET_MODE (x)
6241 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6242 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6243 /* Can't do anything for these, if the original type mode
6244 isn't known or can't be converted. */;
6245 else if (REG_P (x))
6246 {
6247 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6248 if (val && cselib_preserved_value_p (val))
6249 item = val->val_rtx;
6250 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6251 {
6252 enum machine_mode mode = GET_MODE (x);
6253
6254 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6255 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6256 {
6257 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6258
6259 if (reg == NULL_RTX || !REG_P (reg))
6260 continue;
6261 val = cselib_lookup (reg, mode, 0, VOIDmode);
6262 if (val && cselib_preserved_value_p (val))
6263 {
6264 item = val->val_rtx;
6265 break;
6266 }
6267 }
6268 }
6269 }
6270 else if (MEM_P (x))
6271 {
6272 rtx mem = x;
6273 cselib_val *val;
6274
6275 if (!frame_pointer_needed)
6276 {
6277 struct adjust_mem_data amd;
6278 amd.mem_mode = VOIDmode;
6279 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6280 amd.side_effects = NULL;
6281 amd.store = true;
6282 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6283 &amd);
6284 gcc_assert (amd.side_effects == NULL_RTX);
6285 }
6286 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6287 if (val && cselib_preserved_value_p (val))
6288 item = val->val_rtx;
6289 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6290 {
6291 /* For non-integer stack argument see also if they weren't
6292 initialized by integers. */
6293 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6294 if (imode != GET_MODE (mem) && imode != BLKmode)
6295 {
6296 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6297 imode, 0, VOIDmode);
6298 if (val && cselib_preserved_value_p (val))
6299 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6300 imode);
6301 }
6302 }
6303 }
6304 if (item)
6305 {
6306 rtx x2 = x;
6307 if (GET_MODE (item) != GET_MODE (link))
6308 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6309 if (GET_MODE (x2) != GET_MODE (link))
6310 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6311 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6312 call_arguments
6313 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6314 }
6315 if (t && t != void_list_node)
6316 {
6317 tree argtype = TREE_VALUE (t);
6318 enum machine_mode mode = TYPE_MODE (argtype);
6319 rtx reg;
6320 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6321 {
6322 argtype = build_pointer_type (argtype);
6323 mode = TYPE_MODE (argtype);
6324 }
6325 reg = targetm.calls.function_arg (args_so_far, mode,
6326 argtype, true);
6327 if (TREE_CODE (argtype) == REFERENCE_TYPE
6328 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6329 && reg
6330 && REG_P (reg)
6331 && GET_MODE (reg) == mode
6332 && GET_MODE_CLASS (mode) == MODE_INT
6333 && REG_P (x)
6334 && REGNO (x) == REGNO (reg)
6335 && GET_MODE (x) == mode
6336 && item)
6337 {
6338 enum machine_mode indmode
6339 = TYPE_MODE (TREE_TYPE (argtype));
6340 rtx mem = gen_rtx_MEM (indmode, x);
6341 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6342 if (val && cselib_preserved_value_p (val))
6343 {
6344 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6345 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6346 call_arguments);
6347 }
6348 else
6349 {
6350 struct elt_loc_list *l;
6351 tree initial;
6352
6353 /* Try harder, when passing address of a constant
6354 pool integer it can be easily read back. */
6355 item = XEXP (item, 1);
6356 if (GET_CODE (item) == SUBREG)
6357 item = SUBREG_REG (item);
6358 gcc_assert (GET_CODE (item) == VALUE);
6359 val = CSELIB_VAL_PTR (item);
6360 for (l = val->locs; l; l = l->next)
6361 if (GET_CODE (l->loc) == SYMBOL_REF
6362 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6363 && SYMBOL_REF_DECL (l->loc)
6364 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6365 {
6366 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6367 if (tree_fits_shwi_p (initial))
6368 {
6369 item = GEN_INT (tree_to_shwi (initial));
6370 item = gen_rtx_CONCAT (indmode, mem, item);
6371 call_arguments
6372 = gen_rtx_EXPR_LIST (VOIDmode, item,
6373 call_arguments);
6374 }
6375 break;
6376 }
6377 }
6378 }
6379 targetm.calls.function_arg_advance (args_so_far, mode,
6380 argtype, true);
6381 t = TREE_CHAIN (t);
6382 }
6383 }
6384
6385 /* Add debug arguments. */
6386 if (fndecl
6387 && TREE_CODE (fndecl) == FUNCTION_DECL
6388 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6389 {
6390 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6391 if (debug_args)
6392 {
6393 unsigned int ix;
6394 tree param;
6395 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6396 {
6397 rtx item;
6398 tree dtemp = (**debug_args)[ix + 1];
6399 enum machine_mode mode = DECL_MODE (dtemp);
6400 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6401 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6402 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6403 call_arguments);
6404 }
6405 }
6406 }
6407
6408 /* Reverse call_arguments chain. */
6409 prev = NULL_RTX;
6410 for (cur = call_arguments; cur; cur = next)
6411 {
6412 next = XEXP (cur, 1);
6413 XEXP (cur, 1) = prev;
6414 prev = cur;
6415 }
6416 call_arguments = prev;
6417
6418 x = get_call_rtx_from (insn);
6419 if (x)
6420 {
6421 x = XEXP (XEXP (x, 0), 0);
6422 if (GET_CODE (x) == SYMBOL_REF)
6423 /* Don't record anything. */;
6424 else if (CONSTANT_P (x))
6425 {
6426 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6427 pc_rtx, x);
6428 call_arguments
6429 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6430 }
6431 else
6432 {
6433 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6434 if (val && cselib_preserved_value_p (val))
6435 {
6436 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6437 call_arguments
6438 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6439 }
6440 }
6441 }
6442 if (this_arg)
6443 {
6444 enum machine_mode mode
6445 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6446 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6447 HOST_WIDE_INT token
6448 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6449 if (token)
6450 clobbered = plus_constant (mode, clobbered,
6451 token * GET_MODE_SIZE (mode));
6452 clobbered = gen_rtx_MEM (mode, clobbered);
6453 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6454 call_arguments
6455 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6456 }
6457 }
6458
6459 /* Callback for cselib_record_sets_hook, that records as micro
6460 operations uses and stores in an insn after cselib_record_sets has
6461 analyzed the sets in an insn, but before it modifies the stored
6462 values in the internal tables, unless cselib_record_sets doesn't
6463 call it directly (perhaps because we're not doing cselib in the
6464 first place, in which case sets and n_sets will be 0). */
6465
6466 static void
6467 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6468 {
6469 basic_block bb = BLOCK_FOR_INSN (insn);
6470 int n1, n2;
6471 struct count_use_info cui;
6472 micro_operation *mos;
6473
6474 cselib_hook_called = true;
6475
6476 cui.insn = insn;
6477 cui.bb = bb;
6478 cui.sets = sets;
6479 cui.n_sets = n_sets;
6480
6481 n1 = VTI (bb)->mos.length ();
6482 cui.store_p = false;
6483 note_uses (&PATTERN (insn), add_uses_1, &cui);
6484 n2 = VTI (bb)->mos.length () - 1;
6485 mos = VTI (bb)->mos.address ();
6486
6487 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6488 MO_VAL_LOC last. */
6489 while (n1 < n2)
6490 {
6491 while (n1 < n2 && mos[n1].type == MO_USE)
6492 n1++;
6493 while (n1 < n2 && mos[n2].type != MO_USE)
6494 n2--;
6495 if (n1 < n2)
6496 {
6497 micro_operation sw;
6498
6499 sw = mos[n1];
6500 mos[n1] = mos[n2];
6501 mos[n2] = sw;
6502 }
6503 }
6504
6505 n2 = VTI (bb)->mos.length () - 1;
6506 while (n1 < n2)
6507 {
6508 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6509 n1++;
6510 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6511 n2--;
6512 if (n1 < n2)
6513 {
6514 micro_operation sw;
6515
6516 sw = mos[n1];
6517 mos[n1] = mos[n2];
6518 mos[n2] = sw;
6519 }
6520 }
6521
6522 if (CALL_P (insn))
6523 {
6524 micro_operation mo;
6525
6526 mo.type = MO_CALL;
6527 mo.insn = insn;
6528 mo.u.loc = call_arguments;
6529 call_arguments = NULL_RTX;
6530
6531 if (dump_file && (dump_flags & TDF_DETAILS))
6532 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6533 VTI (bb)->mos.safe_push (mo);
6534 }
6535
6536 n1 = VTI (bb)->mos.length ();
6537 /* This will record NEXT_INSN (insn), such that we can
6538 insert notes before it without worrying about any
6539 notes that MO_USEs might emit after the insn. */
6540 cui.store_p = true;
6541 note_stores (PATTERN (insn), add_stores, &cui);
6542 n2 = VTI (bb)->mos.length () - 1;
6543 mos = VTI (bb)->mos.address ();
6544
6545 /* Order the MO_VAL_USEs first (note_stores does nothing
6546 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6547 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6548 while (n1 < n2)
6549 {
6550 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6551 n1++;
6552 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6553 n2--;
6554 if (n1 < n2)
6555 {
6556 micro_operation sw;
6557
6558 sw = mos[n1];
6559 mos[n1] = mos[n2];
6560 mos[n2] = sw;
6561 }
6562 }
6563
6564 n2 = VTI (bb)->mos.length () - 1;
6565 while (n1 < n2)
6566 {
6567 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6568 n1++;
6569 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6570 n2--;
6571 if (n1 < n2)
6572 {
6573 micro_operation sw;
6574
6575 sw = mos[n1];
6576 mos[n1] = mos[n2];
6577 mos[n2] = sw;
6578 }
6579 }
6580 }
6581
6582 static enum var_init_status
6583 find_src_status (dataflow_set *in, rtx src)
6584 {
6585 tree decl = NULL_TREE;
6586 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6587
6588 if (! flag_var_tracking_uninit)
6589 status = VAR_INIT_STATUS_INITIALIZED;
6590
6591 if (src && REG_P (src))
6592 decl = var_debug_decl (REG_EXPR (src));
6593 else if (src && MEM_P (src))
6594 decl = var_debug_decl (MEM_EXPR (src));
6595
6596 if (src && decl)
6597 status = get_init_value (in, src, dv_from_decl (decl));
6598
6599 return status;
6600 }
6601
6602 /* SRC is the source of an assignment. Use SET to try to find what
6603 was ultimately assigned to SRC. Return that value if known,
6604 otherwise return SRC itself. */
6605
6606 static rtx
6607 find_src_set_src (dataflow_set *set, rtx src)
6608 {
6609 tree decl = NULL_TREE; /* The variable being copied around. */
6610 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6611 variable var;
6612 location_chain nextp;
6613 int i;
6614 bool found;
6615
6616 if (src && REG_P (src))
6617 decl = var_debug_decl (REG_EXPR (src));
6618 else if (src && MEM_P (src))
6619 decl = var_debug_decl (MEM_EXPR (src));
6620
6621 if (src && decl)
6622 {
6623 decl_or_value dv = dv_from_decl (decl);
6624
6625 var = shared_hash_find (set->vars, dv);
6626 if (var)
6627 {
6628 found = false;
6629 for (i = 0; i < var->n_var_parts && !found; i++)
6630 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6631 nextp = nextp->next)
6632 if (rtx_equal_p (nextp->loc, src))
6633 {
6634 set_src = nextp->set_src;
6635 found = true;
6636 }
6637
6638 }
6639 }
6640
6641 return set_src;
6642 }
6643
6644 /* Compute the changes of variable locations in the basic block BB. */
6645
6646 static bool
6647 compute_bb_dataflow (basic_block bb)
6648 {
6649 unsigned int i;
6650 micro_operation *mo;
6651 bool changed;
6652 dataflow_set old_out;
6653 dataflow_set *in = &VTI (bb)->in;
6654 dataflow_set *out = &VTI (bb)->out;
6655
6656 dataflow_set_init (&old_out);
6657 dataflow_set_copy (&old_out, out);
6658 dataflow_set_copy (out, in);
6659
6660 if (MAY_HAVE_DEBUG_INSNS)
6661 local_get_addr_cache = new hash_map<rtx, rtx>;
6662
6663 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6664 {
6665 rtx_insn *insn = mo->insn;
6666
6667 switch (mo->type)
6668 {
6669 case MO_CALL:
6670 dataflow_set_clear_at_call (out);
6671 break;
6672
6673 case MO_USE:
6674 {
6675 rtx loc = mo->u.loc;
6676
6677 if (REG_P (loc))
6678 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6679 else if (MEM_P (loc))
6680 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6681 }
6682 break;
6683
6684 case MO_VAL_LOC:
6685 {
6686 rtx loc = mo->u.loc;
6687 rtx val, vloc;
6688 tree var;
6689
6690 if (GET_CODE (loc) == CONCAT)
6691 {
6692 val = XEXP (loc, 0);
6693 vloc = XEXP (loc, 1);
6694 }
6695 else
6696 {
6697 val = NULL_RTX;
6698 vloc = loc;
6699 }
6700
6701 var = PAT_VAR_LOCATION_DECL (vloc);
6702
6703 clobber_variable_part (out, NULL_RTX,
6704 dv_from_decl (var), 0, NULL_RTX);
6705 if (val)
6706 {
6707 if (VAL_NEEDS_RESOLUTION (loc))
6708 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6709 set_variable_part (out, val, dv_from_decl (var), 0,
6710 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6711 INSERT);
6712 }
6713 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6714 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6715 dv_from_decl (var), 0,
6716 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6717 INSERT);
6718 }
6719 break;
6720
6721 case MO_VAL_USE:
6722 {
6723 rtx loc = mo->u.loc;
6724 rtx val, vloc, uloc;
6725
6726 vloc = uloc = XEXP (loc, 1);
6727 val = XEXP (loc, 0);
6728
6729 if (GET_CODE (val) == CONCAT)
6730 {
6731 uloc = XEXP (val, 1);
6732 val = XEXP (val, 0);
6733 }
6734
6735 if (VAL_NEEDS_RESOLUTION (loc))
6736 val_resolve (out, val, vloc, insn);
6737 else
6738 val_store (out, val, uloc, insn, false);
6739
6740 if (VAL_HOLDS_TRACK_EXPR (loc))
6741 {
6742 if (GET_CODE (uloc) == REG)
6743 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6744 NULL);
6745 else if (GET_CODE (uloc) == MEM)
6746 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6747 NULL);
6748 }
6749 }
6750 break;
6751
6752 case MO_VAL_SET:
6753 {
6754 rtx loc = mo->u.loc;
6755 rtx val, vloc, uloc;
6756 rtx dstv, srcv;
6757
6758 vloc = loc;
6759 uloc = XEXP (vloc, 1);
6760 val = XEXP (vloc, 0);
6761 vloc = uloc;
6762
6763 if (GET_CODE (uloc) == SET)
6764 {
6765 dstv = SET_DEST (uloc);
6766 srcv = SET_SRC (uloc);
6767 }
6768 else
6769 {
6770 dstv = uloc;
6771 srcv = NULL;
6772 }
6773
6774 if (GET_CODE (val) == CONCAT)
6775 {
6776 dstv = vloc = XEXP (val, 1);
6777 val = XEXP (val, 0);
6778 }
6779
6780 if (GET_CODE (vloc) == SET)
6781 {
6782 srcv = SET_SRC (vloc);
6783
6784 gcc_assert (val != srcv);
6785 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6786
6787 dstv = vloc = SET_DEST (vloc);
6788
6789 if (VAL_NEEDS_RESOLUTION (loc))
6790 val_resolve (out, val, srcv, insn);
6791 }
6792 else if (VAL_NEEDS_RESOLUTION (loc))
6793 {
6794 gcc_assert (GET_CODE (uloc) == SET
6795 && GET_CODE (SET_SRC (uloc)) == REG);
6796 val_resolve (out, val, SET_SRC (uloc), insn);
6797 }
6798
6799 if (VAL_HOLDS_TRACK_EXPR (loc))
6800 {
6801 if (VAL_EXPR_IS_CLOBBERED (loc))
6802 {
6803 if (REG_P (uloc))
6804 var_reg_delete (out, uloc, true);
6805 else if (MEM_P (uloc))
6806 {
6807 gcc_assert (MEM_P (dstv));
6808 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6809 var_mem_delete (out, dstv, true);
6810 }
6811 }
6812 else
6813 {
6814 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6815 rtx src = NULL, dst = uloc;
6816 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6817
6818 if (GET_CODE (uloc) == SET)
6819 {
6820 src = SET_SRC (uloc);
6821 dst = SET_DEST (uloc);
6822 }
6823
6824 if (copied_p)
6825 {
6826 if (flag_var_tracking_uninit)
6827 {
6828 status = find_src_status (in, src);
6829
6830 if (status == VAR_INIT_STATUS_UNKNOWN)
6831 status = find_src_status (out, src);
6832 }
6833
6834 src = find_src_set_src (in, src);
6835 }
6836
6837 if (REG_P (dst))
6838 var_reg_delete_and_set (out, dst, !copied_p,
6839 status, srcv);
6840 else if (MEM_P (dst))
6841 {
6842 gcc_assert (MEM_P (dstv));
6843 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6844 var_mem_delete_and_set (out, dstv, !copied_p,
6845 status, srcv);
6846 }
6847 }
6848 }
6849 else if (REG_P (uloc))
6850 var_regno_delete (out, REGNO (uloc));
6851 else if (MEM_P (uloc))
6852 {
6853 gcc_checking_assert (GET_CODE (vloc) == MEM);
6854 gcc_checking_assert (dstv == vloc);
6855 if (dstv != vloc)
6856 clobber_overlapping_mems (out, vloc);
6857 }
6858
6859 val_store (out, val, dstv, insn, true);
6860 }
6861 break;
6862
6863 case MO_SET:
6864 {
6865 rtx loc = mo->u.loc;
6866 rtx set_src = NULL;
6867
6868 if (GET_CODE (loc) == SET)
6869 {
6870 set_src = SET_SRC (loc);
6871 loc = SET_DEST (loc);
6872 }
6873
6874 if (REG_P (loc))
6875 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6876 set_src);
6877 else if (MEM_P (loc))
6878 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6879 set_src);
6880 }
6881 break;
6882
6883 case MO_COPY:
6884 {
6885 rtx loc = mo->u.loc;
6886 enum var_init_status src_status;
6887 rtx set_src = NULL;
6888
6889 if (GET_CODE (loc) == SET)
6890 {
6891 set_src = SET_SRC (loc);
6892 loc = SET_DEST (loc);
6893 }
6894
6895 if (! flag_var_tracking_uninit)
6896 src_status = VAR_INIT_STATUS_INITIALIZED;
6897 else
6898 {
6899 src_status = find_src_status (in, set_src);
6900
6901 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6902 src_status = find_src_status (out, set_src);
6903 }
6904
6905 set_src = find_src_set_src (in, set_src);
6906
6907 if (REG_P (loc))
6908 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6909 else if (MEM_P (loc))
6910 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6911 }
6912 break;
6913
6914 case MO_USE_NO_VAR:
6915 {
6916 rtx loc = mo->u.loc;
6917
6918 if (REG_P (loc))
6919 var_reg_delete (out, loc, false);
6920 else if (MEM_P (loc))
6921 var_mem_delete (out, loc, false);
6922 }
6923 break;
6924
6925 case MO_CLOBBER:
6926 {
6927 rtx loc = mo->u.loc;
6928
6929 if (REG_P (loc))
6930 var_reg_delete (out, loc, true);
6931 else if (MEM_P (loc))
6932 var_mem_delete (out, loc, true);
6933 }
6934 break;
6935
6936 case MO_ADJUST:
6937 out->stack_adjust += mo->u.adjust;
6938 break;
6939 }
6940 }
6941
6942 if (MAY_HAVE_DEBUG_INSNS)
6943 {
6944 delete local_get_addr_cache;
6945 local_get_addr_cache = NULL;
6946
6947 dataflow_set_equiv_regs (out);
6948 shared_hash_htab (out->vars)
6949 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6950 shared_hash_htab (out->vars)
6951 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6952 #if ENABLE_CHECKING
6953 shared_hash_htab (out->vars)
6954 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6955 #endif
6956 }
6957 changed = dataflow_set_different (&old_out, out);
6958 dataflow_set_destroy (&old_out);
6959 return changed;
6960 }
6961
6962 /* Find the locations of variables in the whole function. */
6963
6964 static bool
6965 vt_find_locations (void)
6966 {
6967 fibheap_t worklist, pending, fibheap_swap;
6968 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6969 basic_block bb;
6970 edge e;
6971 int *bb_order;
6972 int *rc_order;
6973 int i;
6974 int htabsz = 0;
6975 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6976 bool success = true;
6977
6978 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6979 /* Compute reverse completion order of depth first search of the CFG
6980 so that the data-flow runs faster. */
6981 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6982 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6983 pre_and_rev_post_order_compute (NULL, rc_order, false);
6984 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6985 bb_order[rc_order[i]] = i;
6986 free (rc_order);
6987
6988 worklist = fibheap_new ();
6989 pending = fibheap_new ();
6990 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
6991 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
6992 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
6993 bitmap_clear (in_worklist);
6994
6995 FOR_EACH_BB_FN (bb, cfun)
6996 fibheap_insert (pending, bb_order[bb->index], bb);
6997 bitmap_ones (in_pending);
6998
6999 while (success && !fibheap_empty (pending))
7000 {
7001 fibheap_swap = pending;
7002 pending = worklist;
7003 worklist = fibheap_swap;
7004 sbitmap_swap = in_pending;
7005 in_pending = in_worklist;
7006 in_worklist = sbitmap_swap;
7007
7008 bitmap_clear (visited);
7009
7010 while (!fibheap_empty (worklist))
7011 {
7012 bb = (basic_block) fibheap_extract_min (worklist);
7013 bitmap_clear_bit (in_worklist, bb->index);
7014 gcc_assert (!bitmap_bit_p (visited, bb->index));
7015 if (!bitmap_bit_p (visited, bb->index))
7016 {
7017 bool changed;
7018 edge_iterator ei;
7019 int oldinsz, oldoutsz;
7020
7021 bitmap_set_bit (visited, bb->index);
7022
7023 if (VTI (bb)->in.vars)
7024 {
7025 htabsz
7026 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7027 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7028 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7029 oldoutsz
7030 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7031 }
7032 else
7033 oldinsz = oldoutsz = 0;
7034
7035 if (MAY_HAVE_DEBUG_INSNS)
7036 {
7037 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7038 bool first = true, adjust = false;
7039
7040 /* Calculate the IN set as the intersection of
7041 predecessor OUT sets. */
7042
7043 dataflow_set_clear (in);
7044 dst_can_be_shared = true;
7045
7046 FOR_EACH_EDGE (e, ei, bb->preds)
7047 if (!VTI (e->src)->flooded)
7048 gcc_assert (bb_order[bb->index]
7049 <= bb_order[e->src->index]);
7050 else if (first)
7051 {
7052 dataflow_set_copy (in, &VTI (e->src)->out);
7053 first_out = &VTI (e->src)->out;
7054 first = false;
7055 }
7056 else
7057 {
7058 dataflow_set_merge (in, &VTI (e->src)->out);
7059 adjust = true;
7060 }
7061
7062 if (adjust)
7063 {
7064 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7065 #if ENABLE_CHECKING
7066 /* Merge and merge_adjust should keep entries in
7067 canonical order. */
7068 shared_hash_htab (in->vars)
7069 ->traverse <dataflow_set *,
7070 canonicalize_loc_order_check> (in);
7071 #endif
7072 if (dst_can_be_shared)
7073 {
7074 shared_hash_destroy (in->vars);
7075 in->vars = shared_hash_copy (first_out->vars);
7076 }
7077 }
7078
7079 VTI (bb)->flooded = true;
7080 }
7081 else
7082 {
7083 /* Calculate the IN set as union of predecessor OUT sets. */
7084 dataflow_set_clear (&VTI (bb)->in);
7085 FOR_EACH_EDGE (e, ei, bb->preds)
7086 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7087 }
7088
7089 changed = compute_bb_dataflow (bb);
7090 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7091 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7092
7093 if (htabmax && htabsz > htabmax)
7094 {
7095 if (MAY_HAVE_DEBUG_INSNS)
7096 inform (DECL_SOURCE_LOCATION (cfun->decl),
7097 "variable tracking size limit exceeded with "
7098 "-fvar-tracking-assignments, retrying without");
7099 else
7100 inform (DECL_SOURCE_LOCATION (cfun->decl),
7101 "variable tracking size limit exceeded");
7102 success = false;
7103 break;
7104 }
7105
7106 if (changed)
7107 {
7108 FOR_EACH_EDGE (e, ei, bb->succs)
7109 {
7110 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7111 continue;
7112
7113 if (bitmap_bit_p (visited, e->dest->index))
7114 {
7115 if (!bitmap_bit_p (in_pending, e->dest->index))
7116 {
7117 /* Send E->DEST to next round. */
7118 bitmap_set_bit (in_pending, e->dest->index);
7119 fibheap_insert (pending,
7120 bb_order[e->dest->index],
7121 e->dest);
7122 }
7123 }
7124 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7125 {
7126 /* Add E->DEST to current round. */
7127 bitmap_set_bit (in_worklist, e->dest->index);
7128 fibheap_insert (worklist, bb_order[e->dest->index],
7129 e->dest);
7130 }
7131 }
7132 }
7133
7134 if (dump_file)
7135 fprintf (dump_file,
7136 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7137 bb->index,
7138 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7139 oldinsz,
7140 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7141 oldoutsz,
7142 (int)worklist->nodes, (int)pending->nodes, htabsz);
7143
7144 if (dump_file && (dump_flags & TDF_DETAILS))
7145 {
7146 fprintf (dump_file, "BB %i IN:\n", bb->index);
7147 dump_dataflow_set (&VTI (bb)->in);
7148 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7149 dump_dataflow_set (&VTI (bb)->out);
7150 }
7151 }
7152 }
7153 }
7154
7155 if (success && MAY_HAVE_DEBUG_INSNS)
7156 FOR_EACH_BB_FN (bb, cfun)
7157 gcc_assert (VTI (bb)->flooded);
7158
7159 free (bb_order);
7160 fibheap_delete (worklist);
7161 fibheap_delete (pending);
7162 sbitmap_free (visited);
7163 sbitmap_free (in_worklist);
7164 sbitmap_free (in_pending);
7165
7166 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7167 return success;
7168 }
7169
7170 /* Print the content of the LIST to dump file. */
7171
7172 static void
7173 dump_attrs_list (attrs list)
7174 {
7175 for (; list; list = list->next)
7176 {
7177 if (dv_is_decl_p (list->dv))
7178 print_mem_expr (dump_file, dv_as_decl (list->dv));
7179 else
7180 print_rtl_single (dump_file, dv_as_value (list->dv));
7181 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7182 }
7183 fprintf (dump_file, "\n");
7184 }
7185
7186 /* Print the information about variable *SLOT to dump file. */
7187
7188 int
7189 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7190 {
7191 variable var = *slot;
7192
7193 dump_var (var);
7194
7195 /* Continue traversing the hash table. */
7196 return 1;
7197 }
7198
7199 /* Print the information about variable VAR to dump file. */
7200
7201 static void
7202 dump_var (variable var)
7203 {
7204 int i;
7205 location_chain node;
7206
7207 if (dv_is_decl_p (var->dv))
7208 {
7209 const_tree decl = dv_as_decl (var->dv);
7210
7211 if (DECL_NAME (decl))
7212 {
7213 fprintf (dump_file, " name: %s",
7214 IDENTIFIER_POINTER (DECL_NAME (decl)));
7215 if (dump_flags & TDF_UID)
7216 fprintf (dump_file, "D.%u", DECL_UID (decl));
7217 }
7218 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7219 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7220 else
7221 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7222 fprintf (dump_file, "\n");
7223 }
7224 else
7225 {
7226 fputc (' ', dump_file);
7227 print_rtl_single (dump_file, dv_as_value (var->dv));
7228 }
7229
7230 for (i = 0; i < var->n_var_parts; i++)
7231 {
7232 fprintf (dump_file, " offset %ld\n",
7233 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7234 for (node = var->var_part[i].loc_chain; node; node = node->next)
7235 {
7236 fprintf (dump_file, " ");
7237 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7238 fprintf (dump_file, "[uninit]");
7239 print_rtl_single (dump_file, node->loc);
7240 }
7241 }
7242 }
7243
7244 /* Print the information about variables from hash table VARS to dump file. */
7245
7246 static void
7247 dump_vars (variable_table_type *vars)
7248 {
7249 if (vars->elements () > 0)
7250 {
7251 fprintf (dump_file, "Variables:\n");
7252 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7253 }
7254 }
7255
7256 /* Print the dataflow set SET to dump file. */
7257
7258 static void
7259 dump_dataflow_set (dataflow_set *set)
7260 {
7261 int i;
7262
7263 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7264 set->stack_adjust);
7265 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7266 {
7267 if (set->regs[i])
7268 {
7269 fprintf (dump_file, "Reg %d:", i);
7270 dump_attrs_list (set->regs[i]);
7271 }
7272 }
7273 dump_vars (shared_hash_htab (set->vars));
7274 fprintf (dump_file, "\n");
7275 }
7276
7277 /* Print the IN and OUT sets for each basic block to dump file. */
7278
7279 static void
7280 dump_dataflow_sets (void)
7281 {
7282 basic_block bb;
7283
7284 FOR_EACH_BB_FN (bb, cfun)
7285 {
7286 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7287 fprintf (dump_file, "IN:\n");
7288 dump_dataflow_set (&VTI (bb)->in);
7289 fprintf (dump_file, "OUT:\n");
7290 dump_dataflow_set (&VTI (bb)->out);
7291 }
7292 }
7293
7294 /* Return the variable for DV in dropped_values, inserting one if
7295 requested with INSERT. */
7296
7297 static inline variable
7298 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7299 {
7300 variable_def **slot;
7301 variable empty_var;
7302 onepart_enum_t onepart;
7303
7304 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7305
7306 if (!slot)
7307 return NULL;
7308
7309 if (*slot)
7310 return *slot;
7311
7312 gcc_checking_assert (insert == INSERT);
7313
7314 onepart = dv_onepart_p (dv);
7315
7316 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7317
7318 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7319 empty_var->dv = dv;
7320 empty_var->refcount = 1;
7321 empty_var->n_var_parts = 0;
7322 empty_var->onepart = onepart;
7323 empty_var->in_changed_variables = false;
7324 empty_var->var_part[0].loc_chain = NULL;
7325 empty_var->var_part[0].cur_loc = NULL;
7326 VAR_LOC_1PAUX (empty_var) = NULL;
7327 set_dv_changed (dv, true);
7328
7329 *slot = empty_var;
7330
7331 return empty_var;
7332 }
7333
7334 /* Recover the one-part aux from dropped_values. */
7335
7336 static struct onepart_aux *
7337 recover_dropped_1paux (variable var)
7338 {
7339 variable dvar;
7340
7341 gcc_checking_assert (var->onepart);
7342
7343 if (VAR_LOC_1PAUX (var))
7344 return VAR_LOC_1PAUX (var);
7345
7346 if (var->onepart == ONEPART_VDECL)
7347 return NULL;
7348
7349 dvar = variable_from_dropped (var->dv, NO_INSERT);
7350
7351 if (!dvar)
7352 return NULL;
7353
7354 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7355 VAR_LOC_1PAUX (dvar) = NULL;
7356
7357 return VAR_LOC_1PAUX (var);
7358 }
7359
7360 /* Add variable VAR to the hash table of changed variables and
7361 if it has no locations delete it from SET's hash table. */
7362
7363 static void
7364 variable_was_changed (variable var, dataflow_set *set)
7365 {
7366 hashval_t hash = dv_htab_hash (var->dv);
7367
7368 if (emit_notes)
7369 {
7370 variable_def **slot;
7371
7372 /* Remember this decl or VALUE has been added to changed_variables. */
7373 set_dv_changed (var->dv, true);
7374
7375 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7376
7377 if (*slot)
7378 {
7379 variable old_var = *slot;
7380 gcc_assert (old_var->in_changed_variables);
7381 old_var->in_changed_variables = false;
7382 if (var != old_var && var->onepart)
7383 {
7384 /* Restore the auxiliary info from an empty variable
7385 previously created for changed_variables, so it is
7386 not lost. */
7387 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7388 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7389 VAR_LOC_1PAUX (old_var) = NULL;
7390 }
7391 variable_htab_free (*slot);
7392 }
7393
7394 if (set && var->n_var_parts == 0)
7395 {
7396 onepart_enum_t onepart = var->onepart;
7397 variable empty_var = NULL;
7398 variable_def **dslot = NULL;
7399
7400 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7401 {
7402 dslot = dropped_values->find_slot_with_hash (var->dv,
7403 dv_htab_hash (var->dv),
7404 INSERT);
7405 empty_var = *dslot;
7406
7407 if (empty_var)
7408 {
7409 gcc_checking_assert (!empty_var->in_changed_variables);
7410 if (!VAR_LOC_1PAUX (var))
7411 {
7412 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7413 VAR_LOC_1PAUX (empty_var) = NULL;
7414 }
7415 else
7416 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7417 }
7418 }
7419
7420 if (!empty_var)
7421 {
7422 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7423 empty_var->dv = var->dv;
7424 empty_var->refcount = 1;
7425 empty_var->n_var_parts = 0;
7426 empty_var->onepart = onepart;
7427 if (dslot)
7428 {
7429 empty_var->refcount++;
7430 *dslot = empty_var;
7431 }
7432 }
7433 else
7434 empty_var->refcount++;
7435 empty_var->in_changed_variables = true;
7436 *slot = empty_var;
7437 if (onepart)
7438 {
7439 empty_var->var_part[0].loc_chain = NULL;
7440 empty_var->var_part[0].cur_loc = NULL;
7441 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7442 VAR_LOC_1PAUX (var) = NULL;
7443 }
7444 goto drop_var;
7445 }
7446 else
7447 {
7448 if (var->onepart && !VAR_LOC_1PAUX (var))
7449 recover_dropped_1paux (var);
7450 var->refcount++;
7451 var->in_changed_variables = true;
7452 *slot = var;
7453 }
7454 }
7455 else
7456 {
7457 gcc_assert (set);
7458 if (var->n_var_parts == 0)
7459 {
7460 variable_def **slot;
7461
7462 drop_var:
7463 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7464 if (slot)
7465 {
7466 if (shared_hash_shared (set->vars))
7467 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7468 NO_INSERT);
7469 shared_hash_htab (set->vars)->clear_slot (slot);
7470 }
7471 }
7472 }
7473 }
7474
7475 /* Look for the index in VAR->var_part corresponding to OFFSET.
7476 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7477 referenced int will be set to the index that the part has or should
7478 have, if it should be inserted. */
7479
7480 static inline int
7481 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7482 int *insertion_point)
7483 {
7484 int pos, low, high;
7485
7486 if (var->onepart)
7487 {
7488 if (offset != 0)
7489 return -1;
7490
7491 if (insertion_point)
7492 *insertion_point = 0;
7493
7494 return var->n_var_parts - 1;
7495 }
7496
7497 /* Find the location part. */
7498 low = 0;
7499 high = var->n_var_parts;
7500 while (low != high)
7501 {
7502 pos = (low + high) / 2;
7503 if (VAR_PART_OFFSET (var, pos) < offset)
7504 low = pos + 1;
7505 else
7506 high = pos;
7507 }
7508 pos = low;
7509
7510 if (insertion_point)
7511 *insertion_point = pos;
7512
7513 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7514 return pos;
7515
7516 return -1;
7517 }
7518
7519 static variable_def **
7520 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7521 decl_or_value dv, HOST_WIDE_INT offset,
7522 enum var_init_status initialized, rtx set_src)
7523 {
7524 int pos;
7525 location_chain node, next;
7526 location_chain *nextp;
7527 variable var;
7528 onepart_enum_t onepart;
7529
7530 var = *slot;
7531
7532 if (var)
7533 onepart = var->onepart;
7534 else
7535 onepart = dv_onepart_p (dv);
7536
7537 gcc_checking_assert (offset == 0 || !onepart);
7538 gcc_checking_assert (loc != dv_as_opaque (dv));
7539
7540 if (! flag_var_tracking_uninit)
7541 initialized = VAR_INIT_STATUS_INITIALIZED;
7542
7543 if (!var)
7544 {
7545 /* Create new variable information. */
7546 var = (variable) pool_alloc (onepart_pool (onepart));
7547 var->dv = dv;
7548 var->refcount = 1;
7549 var->n_var_parts = 1;
7550 var->onepart = onepart;
7551 var->in_changed_variables = false;
7552 if (var->onepart)
7553 VAR_LOC_1PAUX (var) = NULL;
7554 else
7555 VAR_PART_OFFSET (var, 0) = offset;
7556 var->var_part[0].loc_chain = NULL;
7557 var->var_part[0].cur_loc = NULL;
7558 *slot = var;
7559 pos = 0;
7560 nextp = &var->var_part[0].loc_chain;
7561 }
7562 else if (onepart)
7563 {
7564 int r = -1, c = 0;
7565
7566 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7567
7568 pos = 0;
7569
7570 if (GET_CODE (loc) == VALUE)
7571 {
7572 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7573 nextp = &node->next)
7574 if (GET_CODE (node->loc) == VALUE)
7575 {
7576 if (node->loc == loc)
7577 {
7578 r = 0;
7579 break;
7580 }
7581 if (canon_value_cmp (node->loc, loc))
7582 c++;
7583 else
7584 {
7585 r = 1;
7586 break;
7587 }
7588 }
7589 else if (REG_P (node->loc) || MEM_P (node->loc))
7590 c++;
7591 else
7592 {
7593 r = 1;
7594 break;
7595 }
7596 }
7597 else if (REG_P (loc))
7598 {
7599 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7600 nextp = &node->next)
7601 if (REG_P (node->loc))
7602 {
7603 if (REGNO (node->loc) < REGNO (loc))
7604 c++;
7605 else
7606 {
7607 if (REGNO (node->loc) == REGNO (loc))
7608 r = 0;
7609 else
7610 r = 1;
7611 break;
7612 }
7613 }
7614 else
7615 {
7616 r = 1;
7617 break;
7618 }
7619 }
7620 else if (MEM_P (loc))
7621 {
7622 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7623 nextp = &node->next)
7624 if (REG_P (node->loc))
7625 c++;
7626 else if (MEM_P (node->loc))
7627 {
7628 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7629 break;
7630 else
7631 c++;
7632 }
7633 else
7634 {
7635 r = 1;
7636 break;
7637 }
7638 }
7639 else
7640 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7641 nextp = &node->next)
7642 if ((r = loc_cmp (node->loc, loc)) >= 0)
7643 break;
7644 else
7645 c++;
7646
7647 if (r == 0)
7648 return slot;
7649
7650 if (shared_var_p (var, set->vars))
7651 {
7652 slot = unshare_variable (set, slot, var, initialized);
7653 var = *slot;
7654 for (nextp = &var->var_part[0].loc_chain; c;
7655 nextp = &(*nextp)->next)
7656 c--;
7657 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7658 }
7659 }
7660 else
7661 {
7662 int inspos = 0;
7663
7664 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7665
7666 pos = find_variable_location_part (var, offset, &inspos);
7667
7668 if (pos >= 0)
7669 {
7670 node = var->var_part[pos].loc_chain;
7671
7672 if (node
7673 && ((REG_P (node->loc) && REG_P (loc)
7674 && REGNO (node->loc) == REGNO (loc))
7675 || rtx_equal_p (node->loc, loc)))
7676 {
7677 /* LOC is in the beginning of the chain so we have nothing
7678 to do. */
7679 if (node->init < initialized)
7680 node->init = initialized;
7681 if (set_src != NULL)
7682 node->set_src = set_src;
7683
7684 return slot;
7685 }
7686 else
7687 {
7688 /* We have to make a copy of a shared variable. */
7689 if (shared_var_p (var, set->vars))
7690 {
7691 slot = unshare_variable (set, slot, var, initialized);
7692 var = *slot;
7693 }
7694 }
7695 }
7696 else
7697 {
7698 /* We have not found the location part, new one will be created. */
7699
7700 /* We have to make a copy of the shared variable. */
7701 if (shared_var_p (var, set->vars))
7702 {
7703 slot = unshare_variable (set, slot, var, initialized);
7704 var = *slot;
7705 }
7706
7707 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7708 thus there are at most MAX_VAR_PARTS different offsets. */
7709 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7710 && (!var->n_var_parts || !onepart));
7711
7712 /* We have to move the elements of array starting at index
7713 inspos to the next position. */
7714 for (pos = var->n_var_parts; pos > inspos; pos--)
7715 var->var_part[pos] = var->var_part[pos - 1];
7716
7717 var->n_var_parts++;
7718 gcc_checking_assert (!onepart);
7719 VAR_PART_OFFSET (var, pos) = offset;
7720 var->var_part[pos].loc_chain = NULL;
7721 var->var_part[pos].cur_loc = NULL;
7722 }
7723
7724 /* Delete the location from the list. */
7725 nextp = &var->var_part[pos].loc_chain;
7726 for (node = var->var_part[pos].loc_chain; node; node = next)
7727 {
7728 next = node->next;
7729 if ((REG_P (node->loc) && REG_P (loc)
7730 && REGNO (node->loc) == REGNO (loc))
7731 || rtx_equal_p (node->loc, loc))
7732 {
7733 /* Save these values, to assign to the new node, before
7734 deleting this one. */
7735 if (node->init > initialized)
7736 initialized = node->init;
7737 if (node->set_src != NULL && set_src == NULL)
7738 set_src = node->set_src;
7739 if (var->var_part[pos].cur_loc == node->loc)
7740 var->var_part[pos].cur_loc = NULL;
7741 pool_free (loc_chain_pool, node);
7742 *nextp = next;
7743 break;
7744 }
7745 else
7746 nextp = &node->next;
7747 }
7748
7749 nextp = &var->var_part[pos].loc_chain;
7750 }
7751
7752 /* Add the location to the beginning. */
7753 node = (location_chain) pool_alloc (loc_chain_pool);
7754 node->loc = loc;
7755 node->init = initialized;
7756 node->set_src = set_src;
7757 node->next = *nextp;
7758 *nextp = node;
7759
7760 /* If no location was emitted do so. */
7761 if (var->var_part[pos].cur_loc == NULL)
7762 variable_was_changed (var, set);
7763
7764 return slot;
7765 }
7766
7767 /* Set the part of variable's location in the dataflow set SET. The
7768 variable part is specified by variable's declaration in DV and
7769 offset OFFSET and the part's location by LOC. IOPT should be
7770 NO_INSERT if the variable is known to be in SET already and the
7771 variable hash table must not be resized, and INSERT otherwise. */
7772
7773 static void
7774 set_variable_part (dataflow_set *set, rtx loc,
7775 decl_or_value dv, HOST_WIDE_INT offset,
7776 enum var_init_status initialized, rtx set_src,
7777 enum insert_option iopt)
7778 {
7779 variable_def **slot;
7780
7781 if (iopt == NO_INSERT)
7782 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7783 else
7784 {
7785 slot = shared_hash_find_slot (set->vars, dv);
7786 if (!slot)
7787 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7788 }
7789 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7790 }
7791
7792 /* Remove all recorded register locations for the given variable part
7793 from dataflow set SET, except for those that are identical to loc.
7794 The variable part is specified by variable's declaration or value
7795 DV and offset OFFSET. */
7796
7797 static variable_def **
7798 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7799 HOST_WIDE_INT offset, rtx set_src)
7800 {
7801 variable var = *slot;
7802 int pos = find_variable_location_part (var, offset, NULL);
7803
7804 if (pos >= 0)
7805 {
7806 location_chain node, next;
7807
7808 /* Remove the register locations from the dataflow set. */
7809 next = var->var_part[pos].loc_chain;
7810 for (node = next; node; node = next)
7811 {
7812 next = node->next;
7813 if (node->loc != loc
7814 && (!flag_var_tracking_uninit
7815 || !set_src
7816 || MEM_P (set_src)
7817 || !rtx_equal_p (set_src, node->set_src)))
7818 {
7819 if (REG_P (node->loc))
7820 {
7821 attrs anode, anext;
7822 attrs *anextp;
7823
7824 /* Remove the variable part from the register's
7825 list, but preserve any other variable parts
7826 that might be regarded as live in that same
7827 register. */
7828 anextp = &set->regs[REGNO (node->loc)];
7829 for (anode = *anextp; anode; anode = anext)
7830 {
7831 anext = anode->next;
7832 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7833 && anode->offset == offset)
7834 {
7835 pool_free (attrs_pool, anode);
7836 *anextp = anext;
7837 }
7838 else
7839 anextp = &anode->next;
7840 }
7841 }
7842
7843 slot = delete_slot_part (set, node->loc, slot, offset);
7844 }
7845 }
7846 }
7847
7848 return slot;
7849 }
7850
7851 /* Remove all recorded register locations for the given variable part
7852 from dataflow set SET, except for those that are identical to loc.
7853 The variable part is specified by variable's declaration or value
7854 DV and offset OFFSET. */
7855
7856 static void
7857 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7858 HOST_WIDE_INT offset, rtx set_src)
7859 {
7860 variable_def **slot;
7861
7862 if (!dv_as_opaque (dv)
7863 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7864 return;
7865
7866 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7867 if (!slot)
7868 return;
7869
7870 clobber_slot_part (set, loc, slot, offset, set_src);
7871 }
7872
7873 /* Delete the part of variable's location from dataflow set SET. The
7874 variable part is specified by its SET->vars slot SLOT and offset
7875 OFFSET and the part's location by LOC. */
7876
7877 static variable_def **
7878 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7879 HOST_WIDE_INT offset)
7880 {
7881 variable var = *slot;
7882 int pos = find_variable_location_part (var, offset, NULL);
7883
7884 if (pos >= 0)
7885 {
7886 location_chain node, next;
7887 location_chain *nextp;
7888 bool changed;
7889 rtx cur_loc;
7890
7891 if (shared_var_p (var, set->vars))
7892 {
7893 /* If the variable contains the location part we have to
7894 make a copy of the variable. */
7895 for (node = var->var_part[pos].loc_chain; node;
7896 node = node->next)
7897 {
7898 if ((REG_P (node->loc) && REG_P (loc)
7899 && REGNO (node->loc) == REGNO (loc))
7900 || rtx_equal_p (node->loc, loc))
7901 {
7902 slot = unshare_variable (set, slot, var,
7903 VAR_INIT_STATUS_UNKNOWN);
7904 var = *slot;
7905 break;
7906 }
7907 }
7908 }
7909
7910 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7911 cur_loc = VAR_LOC_FROM (var);
7912 else
7913 cur_loc = var->var_part[pos].cur_loc;
7914
7915 /* Delete the location part. */
7916 changed = false;
7917 nextp = &var->var_part[pos].loc_chain;
7918 for (node = *nextp; node; node = next)
7919 {
7920 next = node->next;
7921 if ((REG_P (node->loc) && REG_P (loc)
7922 && REGNO (node->loc) == REGNO (loc))
7923 || rtx_equal_p (node->loc, loc))
7924 {
7925 /* If we have deleted the location which was last emitted
7926 we have to emit new location so add the variable to set
7927 of changed variables. */
7928 if (cur_loc == node->loc)
7929 {
7930 changed = true;
7931 var->var_part[pos].cur_loc = NULL;
7932 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7933 VAR_LOC_FROM (var) = NULL;
7934 }
7935 pool_free (loc_chain_pool, node);
7936 *nextp = next;
7937 break;
7938 }
7939 else
7940 nextp = &node->next;
7941 }
7942
7943 if (var->var_part[pos].loc_chain == NULL)
7944 {
7945 changed = true;
7946 var->n_var_parts--;
7947 while (pos < var->n_var_parts)
7948 {
7949 var->var_part[pos] = var->var_part[pos + 1];
7950 pos++;
7951 }
7952 }
7953 if (changed)
7954 variable_was_changed (var, set);
7955 }
7956
7957 return slot;
7958 }
7959
7960 /* Delete the part of variable's location from dataflow set SET. The
7961 variable part is specified by variable's declaration or value DV
7962 and offset OFFSET and the part's location by LOC. */
7963
7964 static void
7965 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7966 HOST_WIDE_INT offset)
7967 {
7968 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7969 if (!slot)
7970 return;
7971
7972 delete_slot_part (set, loc, slot, offset);
7973 }
7974
7975
7976 /* Structure for passing some other parameters to function
7977 vt_expand_loc_callback. */
7978 struct expand_loc_callback_data
7979 {
7980 /* The variables and values active at this point. */
7981 variable_table_type *vars;
7982
7983 /* Stack of values and debug_exprs under expansion, and their
7984 children. */
7985 auto_vec<rtx, 4> expanding;
7986
7987 /* Stack of values and debug_exprs whose expansion hit recursion
7988 cycles. They will have VALUE_RECURSED_INTO marked when added to
7989 this list. This flag will be cleared if any of its dependencies
7990 resolves to a valid location. So, if the flag remains set at the
7991 end of the search, we know no valid location for this one can
7992 possibly exist. */
7993 auto_vec<rtx, 4> pending;
7994
7995 /* The maximum depth among the sub-expressions under expansion.
7996 Zero indicates no expansion so far. */
7997 expand_depth depth;
7998 };
7999
8000 /* Allocate the one-part auxiliary data structure for VAR, with enough
8001 room for COUNT dependencies. */
8002
8003 static void
8004 loc_exp_dep_alloc (variable var, int count)
8005 {
8006 size_t allocsize;
8007
8008 gcc_checking_assert (var->onepart);
8009
8010 /* We can be called with COUNT == 0 to allocate the data structure
8011 without any dependencies, e.g. for the backlinks only. However,
8012 if we are specifying a COUNT, then the dependency list must have
8013 been emptied before. It would be possible to adjust pointers or
8014 force it empty here, but this is better done at an earlier point
8015 in the algorithm, so we instead leave an assertion to catch
8016 errors. */
8017 gcc_checking_assert (!count
8018 || VAR_LOC_DEP_VEC (var) == NULL
8019 || VAR_LOC_DEP_VEC (var)->is_empty ());
8020
8021 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8022 return;
8023
8024 allocsize = offsetof (struct onepart_aux, deps)
8025 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8026
8027 if (VAR_LOC_1PAUX (var))
8028 {
8029 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8030 VAR_LOC_1PAUX (var), allocsize);
8031 /* If the reallocation moves the onepaux structure, the
8032 back-pointer to BACKLINKS in the first list member will still
8033 point to its old location. Adjust it. */
8034 if (VAR_LOC_DEP_LST (var))
8035 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8036 }
8037 else
8038 {
8039 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8040 *VAR_LOC_DEP_LSTP (var) = NULL;
8041 VAR_LOC_FROM (var) = NULL;
8042 VAR_LOC_DEPTH (var).complexity = 0;
8043 VAR_LOC_DEPTH (var).entryvals = 0;
8044 }
8045 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8046 }
8047
8048 /* Remove all entries from the vector of active dependencies of VAR,
8049 removing them from the back-links lists too. */
8050
8051 static void
8052 loc_exp_dep_clear (variable var)
8053 {
8054 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8055 {
8056 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8057 if (led->next)
8058 led->next->pprev = led->pprev;
8059 if (led->pprev)
8060 *led->pprev = led->next;
8061 VAR_LOC_DEP_VEC (var)->pop ();
8062 }
8063 }
8064
8065 /* Insert an active dependency from VAR on X to the vector of
8066 dependencies, and add the corresponding back-link to X's list of
8067 back-links in VARS. */
8068
8069 static void
8070 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8071 {
8072 decl_or_value dv;
8073 variable xvar;
8074 loc_exp_dep *led;
8075
8076 dv = dv_from_rtx (x);
8077
8078 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8079 an additional look up? */
8080 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8081
8082 if (!xvar)
8083 {
8084 xvar = variable_from_dropped (dv, NO_INSERT);
8085 gcc_checking_assert (xvar);
8086 }
8087
8088 /* No point in adding the same backlink more than once. This may
8089 arise if say the same value appears in two complex expressions in
8090 the same loc_list, or even more than once in a single
8091 expression. */
8092 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8093 return;
8094
8095 if (var->onepart == NOT_ONEPART)
8096 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8097 else
8098 {
8099 loc_exp_dep empty;
8100 memset (&empty, 0, sizeof (empty));
8101 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8102 led = &VAR_LOC_DEP_VEC (var)->last ();
8103 }
8104 led->dv = var->dv;
8105 led->value = x;
8106
8107 loc_exp_dep_alloc (xvar, 0);
8108 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8109 led->next = *led->pprev;
8110 if (led->next)
8111 led->next->pprev = &led->next;
8112 *led->pprev = led;
8113 }
8114
8115 /* Create active dependencies of VAR on COUNT values starting at
8116 VALUE, and corresponding back-links to the entries in VARS. Return
8117 true if we found any pending-recursion results. */
8118
8119 static bool
8120 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8121 variable_table_type *vars)
8122 {
8123 bool pending_recursion = false;
8124
8125 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8126 || VAR_LOC_DEP_VEC (var)->is_empty ());
8127
8128 /* Set up all dependencies from last_child (as set up at the end of
8129 the loop above) to the end. */
8130 loc_exp_dep_alloc (var, count);
8131
8132 while (count--)
8133 {
8134 rtx x = *value++;
8135
8136 if (!pending_recursion)
8137 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8138
8139 loc_exp_insert_dep (var, x, vars);
8140 }
8141
8142 return pending_recursion;
8143 }
8144
8145 /* Notify the back-links of IVAR that are pending recursion that we
8146 have found a non-NIL value for it, so they are cleared for another
8147 attempt to compute a current location. */
8148
8149 static void
8150 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8151 {
8152 loc_exp_dep *led, *next;
8153
8154 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8155 {
8156 decl_or_value dv = led->dv;
8157 variable var;
8158
8159 next = led->next;
8160
8161 if (dv_is_value_p (dv))
8162 {
8163 rtx value = dv_as_value (dv);
8164
8165 /* If we have already resolved it, leave it alone. */
8166 if (!VALUE_RECURSED_INTO (value))
8167 continue;
8168
8169 /* Check that VALUE_RECURSED_INTO, true from the test above,
8170 implies NO_LOC_P. */
8171 gcc_checking_assert (NO_LOC_P (value));
8172
8173 /* We won't notify variables that are being expanded,
8174 because their dependency list is cleared before
8175 recursing. */
8176 NO_LOC_P (value) = false;
8177 VALUE_RECURSED_INTO (value) = false;
8178
8179 gcc_checking_assert (dv_changed_p (dv));
8180 }
8181 else
8182 {
8183 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8184 if (!dv_changed_p (dv))
8185 continue;
8186 }
8187
8188 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8189
8190 if (!var)
8191 var = variable_from_dropped (dv, NO_INSERT);
8192
8193 if (var)
8194 notify_dependents_of_resolved_value (var, vars);
8195
8196 if (next)
8197 next->pprev = led->pprev;
8198 if (led->pprev)
8199 *led->pprev = next;
8200 led->next = NULL;
8201 led->pprev = NULL;
8202 }
8203 }
8204
8205 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8206 int max_depth, void *data);
8207
8208 /* Return the combined depth, when one sub-expression evaluated to
8209 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8210
8211 static inline expand_depth
8212 update_depth (expand_depth saved_depth, expand_depth best_depth)
8213 {
8214 /* If we didn't find anything, stick with what we had. */
8215 if (!best_depth.complexity)
8216 return saved_depth;
8217
8218 /* If we found hadn't found anything, use the depth of the current
8219 expression. Do NOT add one extra level, we want to compute the
8220 maximum depth among sub-expressions. We'll increment it later,
8221 if appropriate. */
8222 if (!saved_depth.complexity)
8223 return best_depth;
8224
8225 /* Combine the entryval count so that regardless of which one we
8226 return, the entryval count is accurate. */
8227 best_depth.entryvals = saved_depth.entryvals
8228 = best_depth.entryvals + saved_depth.entryvals;
8229
8230 if (saved_depth.complexity < best_depth.complexity)
8231 return best_depth;
8232 else
8233 return saved_depth;
8234 }
8235
8236 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8237 DATA for cselib expand callback. If PENDRECP is given, indicate in
8238 it whether any sub-expression couldn't be fully evaluated because
8239 it is pending recursion resolution. */
8240
8241 static inline rtx
8242 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8243 {
8244 struct expand_loc_callback_data *elcd
8245 = (struct expand_loc_callback_data *) data;
8246 location_chain loc, next;
8247 rtx result = NULL;
8248 int first_child, result_first_child, last_child;
8249 bool pending_recursion;
8250 rtx loc_from = NULL;
8251 struct elt_loc_list *cloc = NULL;
8252 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8253 int wanted_entryvals, found_entryvals = 0;
8254
8255 /* Clear all backlinks pointing at this, so that we're not notified
8256 while we're active. */
8257 loc_exp_dep_clear (var);
8258
8259 retry:
8260 if (var->onepart == ONEPART_VALUE)
8261 {
8262 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8263
8264 gcc_checking_assert (cselib_preserved_value_p (val));
8265
8266 cloc = val->locs;
8267 }
8268
8269 first_child = result_first_child = last_child
8270 = elcd->expanding.length ();
8271
8272 wanted_entryvals = found_entryvals;
8273
8274 /* Attempt to expand each available location in turn. */
8275 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8276 loc || cloc; loc = next)
8277 {
8278 result_first_child = last_child;
8279
8280 if (!loc)
8281 {
8282 loc_from = cloc->loc;
8283 next = loc;
8284 cloc = cloc->next;
8285 if (unsuitable_loc (loc_from))
8286 continue;
8287 }
8288 else
8289 {
8290 loc_from = loc->loc;
8291 next = loc->next;
8292 }
8293
8294 gcc_checking_assert (!unsuitable_loc (loc_from));
8295
8296 elcd->depth.complexity = elcd->depth.entryvals = 0;
8297 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8298 vt_expand_loc_callback, data);
8299 last_child = elcd->expanding.length ();
8300
8301 if (result)
8302 {
8303 depth = elcd->depth;
8304
8305 gcc_checking_assert (depth.complexity
8306 || result_first_child == last_child);
8307
8308 if (last_child - result_first_child != 1)
8309 {
8310 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8311 depth.entryvals++;
8312 depth.complexity++;
8313 }
8314
8315 if (depth.complexity <= EXPR_USE_DEPTH)
8316 {
8317 if (depth.entryvals <= wanted_entryvals)
8318 break;
8319 else if (!found_entryvals || depth.entryvals < found_entryvals)
8320 found_entryvals = depth.entryvals;
8321 }
8322
8323 result = NULL;
8324 }
8325
8326 /* Set it up in case we leave the loop. */
8327 depth.complexity = depth.entryvals = 0;
8328 loc_from = NULL;
8329 result_first_child = first_child;
8330 }
8331
8332 if (!loc_from && wanted_entryvals < found_entryvals)
8333 {
8334 /* We found entries with ENTRY_VALUEs and skipped them. Since
8335 we could not find any expansions without ENTRY_VALUEs, but we
8336 found at least one with them, go back and get an entry with
8337 the minimum number ENTRY_VALUE count that we found. We could
8338 avoid looping, but since each sub-loc is already resolved,
8339 the re-expansion should be trivial. ??? Should we record all
8340 attempted locs as dependencies, so that we retry the
8341 expansion should any of them change, in the hope it can give
8342 us a new entry without an ENTRY_VALUE? */
8343 elcd->expanding.truncate (first_child);
8344 goto retry;
8345 }
8346
8347 /* Register all encountered dependencies as active. */
8348 pending_recursion = loc_exp_dep_set
8349 (var, result, elcd->expanding.address () + result_first_child,
8350 last_child - result_first_child, elcd->vars);
8351
8352 elcd->expanding.truncate (first_child);
8353
8354 /* Record where the expansion came from. */
8355 gcc_checking_assert (!result || !pending_recursion);
8356 VAR_LOC_FROM (var) = loc_from;
8357 VAR_LOC_DEPTH (var) = depth;
8358
8359 gcc_checking_assert (!depth.complexity == !result);
8360
8361 elcd->depth = update_depth (saved_depth, depth);
8362
8363 /* Indicate whether any of the dependencies are pending recursion
8364 resolution. */
8365 if (pendrecp)
8366 *pendrecp = pending_recursion;
8367
8368 if (!pendrecp || !pending_recursion)
8369 var->var_part[0].cur_loc = result;
8370
8371 return result;
8372 }
8373
8374 /* Callback for cselib_expand_value, that looks for expressions
8375 holding the value in the var-tracking hash tables. Return X for
8376 standard processing, anything else is to be used as-is. */
8377
8378 static rtx
8379 vt_expand_loc_callback (rtx x, bitmap regs,
8380 int max_depth ATTRIBUTE_UNUSED,
8381 void *data)
8382 {
8383 struct expand_loc_callback_data *elcd
8384 = (struct expand_loc_callback_data *) data;
8385 decl_or_value dv;
8386 variable var;
8387 rtx result, subreg;
8388 bool pending_recursion = false;
8389 bool from_empty = false;
8390
8391 switch (GET_CODE (x))
8392 {
8393 case SUBREG:
8394 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8395 EXPR_DEPTH,
8396 vt_expand_loc_callback, data);
8397
8398 if (!subreg)
8399 return NULL;
8400
8401 result = simplify_gen_subreg (GET_MODE (x), subreg,
8402 GET_MODE (SUBREG_REG (x)),
8403 SUBREG_BYTE (x));
8404
8405 /* Invalid SUBREGs are ok in debug info. ??? We could try
8406 alternate expansions for the VALUE as well. */
8407 if (!result)
8408 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8409
8410 return result;
8411
8412 case DEBUG_EXPR:
8413 case VALUE:
8414 dv = dv_from_rtx (x);
8415 break;
8416
8417 default:
8418 return x;
8419 }
8420
8421 elcd->expanding.safe_push (x);
8422
8423 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8424 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8425
8426 if (NO_LOC_P (x))
8427 {
8428 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8429 return NULL;
8430 }
8431
8432 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8433
8434 if (!var)
8435 {
8436 from_empty = true;
8437 var = variable_from_dropped (dv, INSERT);
8438 }
8439
8440 gcc_checking_assert (var);
8441
8442 if (!dv_changed_p (dv))
8443 {
8444 gcc_checking_assert (!NO_LOC_P (x));
8445 gcc_checking_assert (var->var_part[0].cur_loc);
8446 gcc_checking_assert (VAR_LOC_1PAUX (var));
8447 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8448
8449 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8450
8451 return var->var_part[0].cur_loc;
8452 }
8453
8454 VALUE_RECURSED_INTO (x) = true;
8455 /* This is tentative, but it makes some tests simpler. */
8456 NO_LOC_P (x) = true;
8457
8458 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8459
8460 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8461
8462 if (pending_recursion)
8463 {
8464 gcc_checking_assert (!result);
8465 elcd->pending.safe_push (x);
8466 }
8467 else
8468 {
8469 NO_LOC_P (x) = !result;
8470 VALUE_RECURSED_INTO (x) = false;
8471 set_dv_changed (dv, false);
8472
8473 if (result)
8474 notify_dependents_of_resolved_value (var, elcd->vars);
8475 }
8476
8477 return result;
8478 }
8479
8480 /* While expanding variables, we may encounter recursion cycles
8481 because of mutual (possibly indirect) dependencies between two
8482 particular variables (or values), say A and B. If we're trying to
8483 expand A when we get to B, which in turn attempts to expand A, if
8484 we can't find any other expansion for B, we'll add B to this
8485 pending-recursion stack, and tentatively return NULL for its
8486 location. This tentative value will be used for any other
8487 occurrences of B, unless A gets some other location, in which case
8488 it will notify B that it is worth another try at computing a
8489 location for it, and it will use the location computed for A then.
8490 At the end of the expansion, the tentative NULL locations become
8491 final for all members of PENDING that didn't get a notification.
8492 This function performs this finalization of NULL locations. */
8493
8494 static void
8495 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8496 {
8497 while (!pending->is_empty ())
8498 {
8499 rtx x = pending->pop ();
8500 decl_or_value dv;
8501
8502 if (!VALUE_RECURSED_INTO (x))
8503 continue;
8504
8505 gcc_checking_assert (NO_LOC_P (x));
8506 VALUE_RECURSED_INTO (x) = false;
8507 dv = dv_from_rtx (x);
8508 gcc_checking_assert (dv_changed_p (dv));
8509 set_dv_changed (dv, false);
8510 }
8511 }
8512
8513 /* Initialize expand_loc_callback_data D with variable hash table V.
8514 It must be a macro because of alloca (vec stack). */
8515 #define INIT_ELCD(d, v) \
8516 do \
8517 { \
8518 (d).vars = (v); \
8519 (d).depth.complexity = (d).depth.entryvals = 0; \
8520 } \
8521 while (0)
8522 /* Finalize expand_loc_callback_data D, resolved to location L. */
8523 #define FINI_ELCD(d, l) \
8524 do \
8525 { \
8526 resolve_expansions_pending_recursion (&(d).pending); \
8527 (d).pending.release (); \
8528 (d).expanding.release (); \
8529 \
8530 if ((l) && MEM_P (l)) \
8531 (l) = targetm.delegitimize_address (l); \
8532 } \
8533 while (0)
8534
8535 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8536 equivalences in VARS, updating their CUR_LOCs in the process. */
8537
8538 static rtx
8539 vt_expand_loc (rtx loc, variable_table_type *vars)
8540 {
8541 struct expand_loc_callback_data data;
8542 rtx result;
8543
8544 if (!MAY_HAVE_DEBUG_INSNS)
8545 return loc;
8546
8547 INIT_ELCD (data, vars);
8548
8549 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8550 vt_expand_loc_callback, &data);
8551
8552 FINI_ELCD (data, result);
8553
8554 return result;
8555 }
8556
8557 /* Expand the one-part VARiable to a location, using the equivalences
8558 in VARS, updating their CUR_LOCs in the process. */
8559
8560 static rtx
8561 vt_expand_1pvar (variable var, variable_table_type *vars)
8562 {
8563 struct expand_loc_callback_data data;
8564 rtx loc;
8565
8566 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8567
8568 if (!dv_changed_p (var->dv))
8569 return var->var_part[0].cur_loc;
8570
8571 INIT_ELCD (data, vars);
8572
8573 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8574
8575 gcc_checking_assert (data.expanding.is_empty ());
8576
8577 FINI_ELCD (data, loc);
8578
8579 return loc;
8580 }
8581
8582 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8583 additional parameters: WHERE specifies whether the note shall be emitted
8584 before or after instruction INSN. */
8585
8586 int
8587 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8588 {
8589 variable var = *varp;
8590 rtx_insn *insn = data->insn;
8591 enum emit_note_where where = data->where;
8592 variable_table_type *vars = data->vars;
8593 rtx_note *note;
8594 rtx note_vl;
8595 int i, j, n_var_parts;
8596 bool complete;
8597 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8598 HOST_WIDE_INT last_limit;
8599 tree type_size_unit;
8600 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8601 rtx loc[MAX_VAR_PARTS];
8602 tree decl;
8603 location_chain lc;
8604
8605 gcc_checking_assert (var->onepart == NOT_ONEPART
8606 || var->onepart == ONEPART_VDECL);
8607
8608 decl = dv_as_decl (var->dv);
8609
8610 complete = true;
8611 last_limit = 0;
8612 n_var_parts = 0;
8613 if (!var->onepart)
8614 for (i = 0; i < var->n_var_parts; i++)
8615 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8616 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8617 for (i = 0; i < var->n_var_parts; i++)
8618 {
8619 enum machine_mode mode, wider_mode;
8620 rtx loc2;
8621 HOST_WIDE_INT offset;
8622
8623 if (i == 0 && var->onepart)
8624 {
8625 gcc_checking_assert (var->n_var_parts == 1);
8626 offset = 0;
8627 initialized = VAR_INIT_STATUS_INITIALIZED;
8628 loc2 = vt_expand_1pvar (var, vars);
8629 }
8630 else
8631 {
8632 if (last_limit < VAR_PART_OFFSET (var, i))
8633 {
8634 complete = false;
8635 break;
8636 }
8637 else if (last_limit > VAR_PART_OFFSET (var, i))
8638 continue;
8639 offset = VAR_PART_OFFSET (var, i);
8640 loc2 = var->var_part[i].cur_loc;
8641 if (loc2 && GET_CODE (loc2) == MEM
8642 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8643 {
8644 rtx depval = XEXP (loc2, 0);
8645
8646 loc2 = vt_expand_loc (loc2, vars);
8647
8648 if (loc2)
8649 loc_exp_insert_dep (var, depval, vars);
8650 }
8651 if (!loc2)
8652 {
8653 complete = false;
8654 continue;
8655 }
8656 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8657 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8658 if (var->var_part[i].cur_loc == lc->loc)
8659 {
8660 initialized = lc->init;
8661 break;
8662 }
8663 gcc_assert (lc);
8664 }
8665
8666 offsets[n_var_parts] = offset;
8667 if (!loc2)
8668 {
8669 complete = false;
8670 continue;
8671 }
8672 loc[n_var_parts] = loc2;
8673 mode = GET_MODE (var->var_part[i].cur_loc);
8674 if (mode == VOIDmode && var->onepart)
8675 mode = DECL_MODE (decl);
8676 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8677
8678 /* Attempt to merge adjacent registers or memory. */
8679 wider_mode = GET_MODE_WIDER_MODE (mode);
8680 for (j = i + 1; j < var->n_var_parts; j++)
8681 if (last_limit <= VAR_PART_OFFSET (var, j))
8682 break;
8683 if (j < var->n_var_parts
8684 && wider_mode != VOIDmode
8685 && var->var_part[j].cur_loc
8686 && mode == GET_MODE (var->var_part[j].cur_loc)
8687 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8688 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8689 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8690 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8691 {
8692 rtx new_loc = NULL;
8693
8694 if (REG_P (loc[n_var_parts])
8695 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8696 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8697 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8698 == REGNO (loc2))
8699 {
8700 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8701 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8702 mode, 0);
8703 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8704 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8705 if (new_loc)
8706 {
8707 if (!REG_P (new_loc)
8708 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8709 new_loc = NULL;
8710 else
8711 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8712 }
8713 }
8714 else if (MEM_P (loc[n_var_parts])
8715 && GET_CODE (XEXP (loc2, 0)) == PLUS
8716 && REG_P (XEXP (XEXP (loc2, 0), 0))
8717 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8718 {
8719 if ((REG_P (XEXP (loc[n_var_parts], 0))
8720 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8721 XEXP (XEXP (loc2, 0), 0))
8722 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8723 == GET_MODE_SIZE (mode))
8724 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8725 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8726 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8727 XEXP (XEXP (loc2, 0), 0))
8728 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8729 + GET_MODE_SIZE (mode)
8730 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8731 new_loc = adjust_address_nv (loc[n_var_parts],
8732 wider_mode, 0);
8733 }
8734
8735 if (new_loc)
8736 {
8737 loc[n_var_parts] = new_loc;
8738 mode = wider_mode;
8739 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8740 i = j;
8741 }
8742 }
8743 ++n_var_parts;
8744 }
8745 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8746 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8747 complete = false;
8748
8749 if (! flag_var_tracking_uninit)
8750 initialized = VAR_INIT_STATUS_INITIALIZED;
8751
8752 note_vl = NULL_RTX;
8753 if (!complete)
8754 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8755 else if (n_var_parts == 1)
8756 {
8757 rtx expr_list;
8758
8759 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8760 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8761 else
8762 expr_list = loc[0];
8763
8764 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8765 }
8766 else if (n_var_parts)
8767 {
8768 rtx parallel;
8769
8770 for (i = 0; i < n_var_parts; i++)
8771 loc[i]
8772 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8773
8774 parallel = gen_rtx_PARALLEL (VOIDmode,
8775 gen_rtvec_v (n_var_parts, loc));
8776 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8777 parallel, initialized);
8778 }
8779
8780 if (where != EMIT_NOTE_BEFORE_INSN)
8781 {
8782 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8783 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8784 NOTE_DURING_CALL_P (note) = true;
8785 }
8786 else
8787 {
8788 /* Make sure that the call related notes come first. */
8789 while (NEXT_INSN (insn)
8790 && NOTE_P (insn)
8791 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8792 && NOTE_DURING_CALL_P (insn))
8793 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8794 insn = NEXT_INSN (insn);
8795 if (NOTE_P (insn)
8796 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8797 && NOTE_DURING_CALL_P (insn))
8798 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8799 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8800 else
8801 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8802 }
8803 NOTE_VAR_LOCATION (note) = note_vl;
8804
8805 set_dv_changed (var->dv, false);
8806 gcc_assert (var->in_changed_variables);
8807 var->in_changed_variables = false;
8808 changed_variables->clear_slot (varp);
8809
8810 /* Continue traversing the hash table. */
8811 return 1;
8812 }
8813
8814 /* While traversing changed_variables, push onto DATA (a stack of RTX
8815 values) entries that aren't user variables. */
8816
8817 int
8818 var_track_values_to_stack (variable_def **slot,
8819 vec<rtx, va_heap> *changed_values_stack)
8820 {
8821 variable var = *slot;
8822
8823 if (var->onepart == ONEPART_VALUE)
8824 changed_values_stack->safe_push (dv_as_value (var->dv));
8825 else if (var->onepart == ONEPART_DEXPR)
8826 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8827
8828 return 1;
8829 }
8830
8831 /* Remove from changed_variables the entry whose DV corresponds to
8832 value or debug_expr VAL. */
8833 static void
8834 remove_value_from_changed_variables (rtx val)
8835 {
8836 decl_or_value dv = dv_from_rtx (val);
8837 variable_def **slot;
8838 variable var;
8839
8840 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8841 NO_INSERT);
8842 var = *slot;
8843 var->in_changed_variables = false;
8844 changed_variables->clear_slot (slot);
8845 }
8846
8847 /* If VAL (a value or debug_expr) has backlinks to variables actively
8848 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8849 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8850 have dependencies of their own to notify. */
8851
8852 static void
8853 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8854 vec<rtx, va_heap> *changed_values_stack)
8855 {
8856 variable_def **slot;
8857 variable var;
8858 loc_exp_dep *led;
8859 decl_or_value dv = dv_from_rtx (val);
8860
8861 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8862 NO_INSERT);
8863 if (!slot)
8864 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8865 if (!slot)
8866 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8867 NO_INSERT);
8868 var = *slot;
8869
8870 while ((led = VAR_LOC_DEP_LST (var)))
8871 {
8872 decl_or_value ldv = led->dv;
8873 variable ivar;
8874
8875 /* Deactivate and remove the backlink, as it was “used up”. It
8876 makes no sense to attempt to notify the same entity again:
8877 either it will be recomputed and re-register an active
8878 dependency, or it will still have the changed mark. */
8879 if (led->next)
8880 led->next->pprev = led->pprev;
8881 if (led->pprev)
8882 *led->pprev = led->next;
8883 led->next = NULL;
8884 led->pprev = NULL;
8885
8886 if (dv_changed_p (ldv))
8887 continue;
8888
8889 switch (dv_onepart_p (ldv))
8890 {
8891 case ONEPART_VALUE:
8892 case ONEPART_DEXPR:
8893 set_dv_changed (ldv, true);
8894 changed_values_stack->safe_push (dv_as_rtx (ldv));
8895 break;
8896
8897 case ONEPART_VDECL:
8898 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8899 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8900 variable_was_changed (ivar, NULL);
8901 break;
8902
8903 case NOT_ONEPART:
8904 pool_free (loc_exp_dep_pool, led);
8905 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8906 if (ivar)
8907 {
8908 int i = ivar->n_var_parts;
8909 while (i--)
8910 {
8911 rtx loc = ivar->var_part[i].cur_loc;
8912
8913 if (loc && GET_CODE (loc) == MEM
8914 && XEXP (loc, 0) == val)
8915 {
8916 variable_was_changed (ivar, NULL);
8917 break;
8918 }
8919 }
8920 }
8921 break;
8922
8923 default:
8924 gcc_unreachable ();
8925 }
8926 }
8927 }
8928
8929 /* Take out of changed_variables any entries that don't refer to use
8930 variables. Back-propagate change notifications from values and
8931 debug_exprs to their active dependencies in HTAB or in
8932 CHANGED_VARIABLES. */
8933
8934 static void
8935 process_changed_values (variable_table_type *htab)
8936 {
8937 int i, n;
8938 rtx val;
8939 auto_vec<rtx, 20> changed_values_stack;
8940
8941 /* Move values from changed_variables to changed_values_stack. */
8942 changed_variables
8943 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8944 (&changed_values_stack);
8945
8946 /* Back-propagate change notifications in values while popping
8947 them from the stack. */
8948 for (n = i = changed_values_stack.length ();
8949 i > 0; i = changed_values_stack.length ())
8950 {
8951 val = changed_values_stack.pop ();
8952 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8953
8954 /* This condition will hold when visiting each of the entries
8955 originally in changed_variables. We can't remove them
8956 earlier because this could drop the backlinks before we got a
8957 chance to use them. */
8958 if (i == n)
8959 {
8960 remove_value_from_changed_variables (val);
8961 n--;
8962 }
8963 }
8964 }
8965
8966 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8967 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8968 the notes shall be emitted before of after instruction INSN. */
8969
8970 static void
8971 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8972 shared_hash vars)
8973 {
8974 emit_note_data data;
8975 variable_table_type *htab = shared_hash_htab (vars);
8976
8977 if (!changed_variables->elements ())
8978 return;
8979
8980 if (MAY_HAVE_DEBUG_INSNS)
8981 process_changed_values (htab);
8982
8983 data.insn = insn;
8984 data.where = where;
8985 data.vars = htab;
8986
8987 changed_variables
8988 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8989 }
8990
8991 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8992 same variable in hash table DATA or is not there at all. */
8993
8994 int
8995 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
8996 {
8997 variable old_var, new_var;
8998
8999 old_var = *slot;
9000 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9001
9002 if (!new_var)
9003 {
9004 /* Variable has disappeared. */
9005 variable empty_var = NULL;
9006
9007 if (old_var->onepart == ONEPART_VALUE
9008 || old_var->onepart == ONEPART_DEXPR)
9009 {
9010 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9011 if (empty_var)
9012 {
9013 gcc_checking_assert (!empty_var->in_changed_variables);
9014 if (!VAR_LOC_1PAUX (old_var))
9015 {
9016 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9017 VAR_LOC_1PAUX (empty_var) = NULL;
9018 }
9019 else
9020 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9021 }
9022 }
9023
9024 if (!empty_var)
9025 {
9026 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
9027 empty_var->dv = old_var->dv;
9028 empty_var->refcount = 0;
9029 empty_var->n_var_parts = 0;
9030 empty_var->onepart = old_var->onepart;
9031 empty_var->in_changed_variables = false;
9032 }
9033
9034 if (empty_var->onepart)
9035 {
9036 /* Propagate the auxiliary data to (ultimately)
9037 changed_variables. */
9038 empty_var->var_part[0].loc_chain = NULL;
9039 empty_var->var_part[0].cur_loc = NULL;
9040 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9041 VAR_LOC_1PAUX (old_var) = NULL;
9042 }
9043 variable_was_changed (empty_var, NULL);
9044 /* Continue traversing the hash table. */
9045 return 1;
9046 }
9047 /* Update cur_loc and one-part auxiliary data, before new_var goes
9048 through variable_was_changed. */
9049 if (old_var != new_var && new_var->onepart)
9050 {
9051 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9052 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9053 VAR_LOC_1PAUX (old_var) = NULL;
9054 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9055 }
9056 if (variable_different_p (old_var, new_var))
9057 variable_was_changed (new_var, NULL);
9058
9059 /* Continue traversing the hash table. */
9060 return 1;
9061 }
9062
9063 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9064 table DATA. */
9065
9066 int
9067 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9068 {
9069 variable old_var, new_var;
9070
9071 new_var = *slot;
9072 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9073 if (!old_var)
9074 {
9075 int i;
9076 for (i = 0; i < new_var->n_var_parts; i++)
9077 new_var->var_part[i].cur_loc = NULL;
9078 variable_was_changed (new_var, NULL);
9079 }
9080
9081 /* Continue traversing the hash table. */
9082 return 1;
9083 }
9084
9085 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9086 NEW_SET. */
9087
9088 static void
9089 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9090 dataflow_set *new_set)
9091 {
9092 shared_hash_htab (old_set->vars)
9093 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9094 (shared_hash_htab (new_set->vars));
9095 shared_hash_htab (new_set->vars)
9096 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9097 (shared_hash_htab (old_set->vars));
9098 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9099 }
9100
9101 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9102
9103 static rtx_insn *
9104 next_non_note_insn_var_location (rtx_insn *insn)
9105 {
9106 while (insn)
9107 {
9108 insn = NEXT_INSN (insn);
9109 if (insn == 0
9110 || !NOTE_P (insn)
9111 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9112 break;
9113 }
9114
9115 return insn;
9116 }
9117
9118 /* Emit the notes for changes of location parts in the basic block BB. */
9119
9120 static void
9121 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9122 {
9123 unsigned int i;
9124 micro_operation *mo;
9125
9126 dataflow_set_clear (set);
9127 dataflow_set_copy (set, &VTI (bb)->in);
9128
9129 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9130 {
9131 rtx_insn *insn = mo->insn;
9132 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9133
9134 switch (mo->type)
9135 {
9136 case MO_CALL:
9137 dataflow_set_clear_at_call (set);
9138 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9139 {
9140 rtx arguments = mo->u.loc, *p = &arguments;
9141 rtx_note *note;
9142 while (*p)
9143 {
9144 XEXP (XEXP (*p, 0), 1)
9145 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9146 shared_hash_htab (set->vars));
9147 /* If expansion is successful, keep it in the list. */
9148 if (XEXP (XEXP (*p, 0), 1))
9149 p = &XEXP (*p, 1);
9150 /* Otherwise, if the following item is data_value for it,
9151 drop it too too. */
9152 else if (XEXP (*p, 1)
9153 && REG_P (XEXP (XEXP (*p, 0), 0))
9154 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9155 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9156 0))
9157 && REGNO (XEXP (XEXP (*p, 0), 0))
9158 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9159 0), 0)))
9160 *p = XEXP (XEXP (*p, 1), 1);
9161 /* Just drop this item. */
9162 else
9163 *p = XEXP (*p, 1);
9164 }
9165 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9166 NOTE_VAR_LOCATION (note) = arguments;
9167 }
9168 break;
9169
9170 case MO_USE:
9171 {
9172 rtx loc = mo->u.loc;
9173
9174 if (REG_P (loc))
9175 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9176 else
9177 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9178
9179 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9180 }
9181 break;
9182
9183 case MO_VAL_LOC:
9184 {
9185 rtx loc = mo->u.loc;
9186 rtx val, vloc;
9187 tree var;
9188
9189 if (GET_CODE (loc) == CONCAT)
9190 {
9191 val = XEXP (loc, 0);
9192 vloc = XEXP (loc, 1);
9193 }
9194 else
9195 {
9196 val = NULL_RTX;
9197 vloc = loc;
9198 }
9199
9200 var = PAT_VAR_LOCATION_DECL (vloc);
9201
9202 clobber_variable_part (set, NULL_RTX,
9203 dv_from_decl (var), 0, NULL_RTX);
9204 if (val)
9205 {
9206 if (VAL_NEEDS_RESOLUTION (loc))
9207 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9208 set_variable_part (set, val, dv_from_decl (var), 0,
9209 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9210 INSERT);
9211 }
9212 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9213 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9214 dv_from_decl (var), 0,
9215 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9216 INSERT);
9217
9218 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9219 }
9220 break;
9221
9222 case MO_VAL_USE:
9223 {
9224 rtx loc = mo->u.loc;
9225 rtx val, vloc, uloc;
9226
9227 vloc = uloc = XEXP (loc, 1);
9228 val = XEXP (loc, 0);
9229
9230 if (GET_CODE (val) == CONCAT)
9231 {
9232 uloc = XEXP (val, 1);
9233 val = XEXP (val, 0);
9234 }
9235
9236 if (VAL_NEEDS_RESOLUTION (loc))
9237 val_resolve (set, val, vloc, insn);
9238 else
9239 val_store (set, val, uloc, insn, false);
9240
9241 if (VAL_HOLDS_TRACK_EXPR (loc))
9242 {
9243 if (GET_CODE (uloc) == REG)
9244 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9245 NULL);
9246 else if (GET_CODE (uloc) == MEM)
9247 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9248 NULL);
9249 }
9250
9251 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9252 }
9253 break;
9254
9255 case MO_VAL_SET:
9256 {
9257 rtx loc = mo->u.loc;
9258 rtx val, vloc, uloc;
9259 rtx dstv, srcv;
9260
9261 vloc = loc;
9262 uloc = XEXP (vloc, 1);
9263 val = XEXP (vloc, 0);
9264 vloc = uloc;
9265
9266 if (GET_CODE (uloc) == SET)
9267 {
9268 dstv = SET_DEST (uloc);
9269 srcv = SET_SRC (uloc);
9270 }
9271 else
9272 {
9273 dstv = uloc;
9274 srcv = NULL;
9275 }
9276
9277 if (GET_CODE (val) == CONCAT)
9278 {
9279 dstv = vloc = XEXP (val, 1);
9280 val = XEXP (val, 0);
9281 }
9282
9283 if (GET_CODE (vloc) == SET)
9284 {
9285 srcv = SET_SRC (vloc);
9286
9287 gcc_assert (val != srcv);
9288 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9289
9290 dstv = vloc = SET_DEST (vloc);
9291
9292 if (VAL_NEEDS_RESOLUTION (loc))
9293 val_resolve (set, val, srcv, insn);
9294 }
9295 else if (VAL_NEEDS_RESOLUTION (loc))
9296 {
9297 gcc_assert (GET_CODE (uloc) == SET
9298 && GET_CODE (SET_SRC (uloc)) == REG);
9299 val_resolve (set, val, SET_SRC (uloc), insn);
9300 }
9301
9302 if (VAL_HOLDS_TRACK_EXPR (loc))
9303 {
9304 if (VAL_EXPR_IS_CLOBBERED (loc))
9305 {
9306 if (REG_P (uloc))
9307 var_reg_delete (set, uloc, true);
9308 else if (MEM_P (uloc))
9309 {
9310 gcc_assert (MEM_P (dstv));
9311 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9312 var_mem_delete (set, dstv, true);
9313 }
9314 }
9315 else
9316 {
9317 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9318 rtx src = NULL, dst = uloc;
9319 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9320
9321 if (GET_CODE (uloc) == SET)
9322 {
9323 src = SET_SRC (uloc);
9324 dst = SET_DEST (uloc);
9325 }
9326
9327 if (copied_p)
9328 {
9329 status = find_src_status (set, src);
9330
9331 src = find_src_set_src (set, src);
9332 }
9333
9334 if (REG_P (dst))
9335 var_reg_delete_and_set (set, dst, !copied_p,
9336 status, srcv);
9337 else if (MEM_P (dst))
9338 {
9339 gcc_assert (MEM_P (dstv));
9340 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9341 var_mem_delete_and_set (set, dstv, !copied_p,
9342 status, srcv);
9343 }
9344 }
9345 }
9346 else if (REG_P (uloc))
9347 var_regno_delete (set, REGNO (uloc));
9348 else if (MEM_P (uloc))
9349 {
9350 gcc_checking_assert (GET_CODE (vloc) == MEM);
9351 gcc_checking_assert (vloc == dstv);
9352 if (vloc != dstv)
9353 clobber_overlapping_mems (set, vloc);
9354 }
9355
9356 val_store (set, val, dstv, insn, true);
9357
9358 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9359 set->vars);
9360 }
9361 break;
9362
9363 case MO_SET:
9364 {
9365 rtx loc = mo->u.loc;
9366 rtx set_src = NULL;
9367
9368 if (GET_CODE (loc) == SET)
9369 {
9370 set_src = SET_SRC (loc);
9371 loc = SET_DEST (loc);
9372 }
9373
9374 if (REG_P (loc))
9375 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9376 set_src);
9377 else
9378 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9379 set_src);
9380
9381 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9382 set->vars);
9383 }
9384 break;
9385
9386 case MO_COPY:
9387 {
9388 rtx loc = mo->u.loc;
9389 enum var_init_status src_status;
9390 rtx set_src = NULL;
9391
9392 if (GET_CODE (loc) == SET)
9393 {
9394 set_src = SET_SRC (loc);
9395 loc = SET_DEST (loc);
9396 }
9397
9398 src_status = find_src_status (set, set_src);
9399 set_src = find_src_set_src (set, set_src);
9400
9401 if (REG_P (loc))
9402 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9403 else
9404 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9405
9406 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9407 set->vars);
9408 }
9409 break;
9410
9411 case MO_USE_NO_VAR:
9412 {
9413 rtx loc = mo->u.loc;
9414
9415 if (REG_P (loc))
9416 var_reg_delete (set, loc, false);
9417 else
9418 var_mem_delete (set, loc, false);
9419
9420 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9421 }
9422 break;
9423
9424 case MO_CLOBBER:
9425 {
9426 rtx loc = mo->u.loc;
9427
9428 if (REG_P (loc))
9429 var_reg_delete (set, loc, true);
9430 else
9431 var_mem_delete (set, loc, true);
9432
9433 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9434 set->vars);
9435 }
9436 break;
9437
9438 case MO_ADJUST:
9439 set->stack_adjust += mo->u.adjust;
9440 break;
9441 }
9442 }
9443 }
9444
9445 /* Emit notes for the whole function. */
9446
9447 static void
9448 vt_emit_notes (void)
9449 {
9450 basic_block bb;
9451 dataflow_set cur;
9452
9453 gcc_assert (!changed_variables->elements ());
9454
9455 /* Free memory occupied by the out hash tables, as they aren't used
9456 anymore. */
9457 FOR_EACH_BB_FN (bb, cfun)
9458 dataflow_set_clear (&VTI (bb)->out);
9459
9460 /* Enable emitting notes by functions (mainly by set_variable_part and
9461 delete_variable_part). */
9462 emit_notes = true;
9463
9464 if (MAY_HAVE_DEBUG_INSNS)
9465 {
9466 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9467 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9468 sizeof (loc_exp_dep), 64);
9469 }
9470
9471 dataflow_set_init (&cur);
9472
9473 FOR_EACH_BB_FN (bb, cfun)
9474 {
9475 /* Emit the notes for changes of variable locations between two
9476 subsequent basic blocks. */
9477 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9478
9479 if (MAY_HAVE_DEBUG_INSNS)
9480 local_get_addr_cache = new hash_map<rtx, rtx>;
9481
9482 /* Emit the notes for the changes in the basic block itself. */
9483 emit_notes_in_bb (bb, &cur);
9484
9485 if (MAY_HAVE_DEBUG_INSNS)
9486 delete local_get_addr_cache;
9487 local_get_addr_cache = NULL;
9488
9489 /* Free memory occupied by the in hash table, we won't need it
9490 again. */
9491 dataflow_set_clear (&VTI (bb)->in);
9492 }
9493 #ifdef ENABLE_CHECKING
9494 shared_hash_htab (cur.vars)
9495 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9496 (shared_hash_htab (empty_shared_hash));
9497 #endif
9498 dataflow_set_destroy (&cur);
9499
9500 if (MAY_HAVE_DEBUG_INSNS)
9501 delete dropped_values;
9502 dropped_values = NULL;
9503
9504 emit_notes = false;
9505 }
9506
9507 /* If there is a declaration and offset associated with register/memory RTL
9508 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9509
9510 static bool
9511 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9512 {
9513 if (REG_P (rtl))
9514 {
9515 if (REG_ATTRS (rtl))
9516 {
9517 *declp = REG_EXPR (rtl);
9518 *offsetp = REG_OFFSET (rtl);
9519 return true;
9520 }
9521 }
9522 else if (GET_CODE (rtl) == PARALLEL)
9523 {
9524 tree decl = NULL_TREE;
9525 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9526 int len = XVECLEN (rtl, 0), i;
9527
9528 for (i = 0; i < len; i++)
9529 {
9530 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9531 if (!REG_P (reg) || !REG_ATTRS (reg))
9532 break;
9533 if (!decl)
9534 decl = REG_EXPR (reg);
9535 if (REG_EXPR (reg) != decl)
9536 break;
9537 if (REG_OFFSET (reg) < offset)
9538 offset = REG_OFFSET (reg);
9539 }
9540
9541 if (i == len)
9542 {
9543 *declp = decl;
9544 *offsetp = offset;
9545 return true;
9546 }
9547 }
9548 else if (MEM_P (rtl))
9549 {
9550 if (MEM_ATTRS (rtl))
9551 {
9552 *declp = MEM_EXPR (rtl);
9553 *offsetp = INT_MEM_OFFSET (rtl);
9554 return true;
9555 }
9556 }
9557 return false;
9558 }
9559
9560 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9561 of VAL. */
9562
9563 static void
9564 record_entry_value (cselib_val *val, rtx rtl)
9565 {
9566 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9567
9568 ENTRY_VALUE_EXP (ev) = rtl;
9569
9570 cselib_add_permanent_equiv (val, ev, get_insns ());
9571 }
9572
9573 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9574
9575 static void
9576 vt_add_function_parameter (tree parm)
9577 {
9578 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9579 rtx incoming = DECL_INCOMING_RTL (parm);
9580 tree decl;
9581 enum machine_mode mode;
9582 HOST_WIDE_INT offset;
9583 dataflow_set *out;
9584 decl_or_value dv;
9585
9586 if (TREE_CODE (parm) != PARM_DECL)
9587 return;
9588
9589 if (!decl_rtl || !incoming)
9590 return;
9591
9592 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9593 return;
9594
9595 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9596 rewrite the incoming location of parameters passed on the stack
9597 into MEMs based on the argument pointer, so that incoming doesn't
9598 depend on a pseudo. */
9599 if (MEM_P (incoming)
9600 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9601 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9602 && XEXP (XEXP (incoming, 0), 0)
9603 == crtl->args.internal_arg_pointer
9604 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9605 {
9606 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9607 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9608 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9609 incoming
9610 = replace_equiv_address_nv (incoming,
9611 plus_constant (Pmode,
9612 arg_pointer_rtx, off));
9613 }
9614
9615 #ifdef HAVE_window_save
9616 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9617 If the target machine has an explicit window save instruction, the
9618 actual entry value is the corresponding OUTGOING_REGNO instead. */
9619 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9620 {
9621 if (REG_P (incoming)
9622 && HARD_REGISTER_P (incoming)
9623 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9624 {
9625 parm_reg_t p;
9626 p.incoming = incoming;
9627 incoming
9628 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9629 OUTGOING_REGNO (REGNO (incoming)), 0);
9630 p.outgoing = incoming;
9631 vec_safe_push (windowed_parm_regs, p);
9632 }
9633 else if (GET_CODE (incoming) == PARALLEL)
9634 {
9635 rtx outgoing
9636 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9637 int i;
9638
9639 for (i = 0; i < XVECLEN (incoming, 0); i++)
9640 {
9641 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9642 parm_reg_t p;
9643 p.incoming = reg;
9644 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9645 OUTGOING_REGNO (REGNO (reg)), 0);
9646 p.outgoing = reg;
9647 XVECEXP (outgoing, 0, i)
9648 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9649 XEXP (XVECEXP (incoming, 0, i), 1));
9650 vec_safe_push (windowed_parm_regs, p);
9651 }
9652
9653 incoming = outgoing;
9654 }
9655 else if (MEM_P (incoming)
9656 && REG_P (XEXP (incoming, 0))
9657 && HARD_REGISTER_P (XEXP (incoming, 0)))
9658 {
9659 rtx reg = XEXP (incoming, 0);
9660 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9661 {
9662 parm_reg_t p;
9663 p.incoming = reg;
9664 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9665 p.outgoing = reg;
9666 vec_safe_push (windowed_parm_regs, p);
9667 incoming = replace_equiv_address_nv (incoming, reg);
9668 }
9669 }
9670 }
9671 #endif
9672
9673 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9674 {
9675 if (MEM_P (incoming))
9676 {
9677 /* This means argument is passed by invisible reference. */
9678 offset = 0;
9679 decl = parm;
9680 }
9681 else
9682 {
9683 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9684 return;
9685 offset += byte_lowpart_offset (GET_MODE (incoming),
9686 GET_MODE (decl_rtl));
9687 }
9688 }
9689
9690 if (!decl)
9691 return;
9692
9693 if (parm != decl)
9694 {
9695 /* If that DECL_RTL wasn't a pseudo that got spilled to
9696 memory, bail out. Otherwise, the spill slot sharing code
9697 will force the memory to reference spill_slot_decl (%sfp),
9698 so we don't match above. That's ok, the pseudo must have
9699 referenced the entire parameter, so just reset OFFSET. */
9700 if (decl != get_spill_slot_decl (false))
9701 return;
9702 offset = 0;
9703 }
9704
9705 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9706 return;
9707
9708 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9709
9710 dv = dv_from_decl (parm);
9711
9712 if (target_for_debug_bind (parm)
9713 /* We can't deal with these right now, because this kind of
9714 variable is single-part. ??? We could handle parallels
9715 that describe multiple locations for the same single
9716 value, but ATM we don't. */
9717 && GET_CODE (incoming) != PARALLEL)
9718 {
9719 cselib_val *val;
9720 rtx lowpart;
9721
9722 /* ??? We shouldn't ever hit this, but it may happen because
9723 arguments passed by invisible reference aren't dealt with
9724 above: incoming-rtl will have Pmode rather than the
9725 expected mode for the type. */
9726 if (offset)
9727 return;
9728
9729 lowpart = var_lowpart (mode, incoming);
9730 if (!lowpart)
9731 return;
9732
9733 val = cselib_lookup_from_insn (lowpart, mode, true,
9734 VOIDmode, get_insns ());
9735
9736 /* ??? Float-typed values in memory are not handled by
9737 cselib. */
9738 if (val)
9739 {
9740 preserve_value (val);
9741 set_variable_part (out, val->val_rtx, dv, offset,
9742 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9743 dv = dv_from_value (val->val_rtx);
9744 }
9745
9746 if (MEM_P (incoming))
9747 {
9748 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9749 VOIDmode, get_insns ());
9750 if (val)
9751 {
9752 preserve_value (val);
9753 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9754 }
9755 }
9756 }
9757
9758 if (REG_P (incoming))
9759 {
9760 incoming = var_lowpart (mode, incoming);
9761 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9762 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9763 incoming);
9764 set_variable_part (out, incoming, dv, offset,
9765 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9766 if (dv_is_value_p (dv))
9767 {
9768 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9769 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9770 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9771 {
9772 enum machine_mode indmode
9773 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9774 rtx mem = gen_rtx_MEM (indmode, incoming);
9775 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9776 VOIDmode,
9777 get_insns ());
9778 if (val)
9779 {
9780 preserve_value (val);
9781 record_entry_value (val, mem);
9782 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9783 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9784 }
9785 }
9786 }
9787 }
9788 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9789 {
9790 int i;
9791
9792 for (i = 0; i < XVECLEN (incoming, 0); i++)
9793 {
9794 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9795 offset = REG_OFFSET (reg);
9796 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9797 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9798 set_variable_part (out, reg, dv, offset,
9799 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9800 }
9801 }
9802 else if (MEM_P (incoming))
9803 {
9804 incoming = var_lowpart (mode, incoming);
9805 set_variable_part (out, incoming, dv, offset,
9806 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9807 }
9808 }
9809
9810 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9811
9812 static void
9813 vt_add_function_parameters (void)
9814 {
9815 tree parm;
9816
9817 for (parm = DECL_ARGUMENTS (current_function_decl);
9818 parm; parm = DECL_CHAIN (parm))
9819 vt_add_function_parameter (parm);
9820
9821 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9822 {
9823 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9824
9825 if (TREE_CODE (vexpr) == INDIRECT_REF)
9826 vexpr = TREE_OPERAND (vexpr, 0);
9827
9828 if (TREE_CODE (vexpr) == PARM_DECL
9829 && DECL_ARTIFICIAL (vexpr)
9830 && !DECL_IGNORED_P (vexpr)
9831 && DECL_NAMELESS (vexpr))
9832 vt_add_function_parameter (vexpr);
9833 }
9834 }
9835
9836 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9837 ensure it isn't flushed during cselib_reset_table.
9838 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9839 has been eliminated. */
9840
9841 static void
9842 vt_init_cfa_base (void)
9843 {
9844 cselib_val *val;
9845
9846 #ifdef FRAME_POINTER_CFA_OFFSET
9847 cfa_base_rtx = frame_pointer_rtx;
9848 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9849 #else
9850 cfa_base_rtx = arg_pointer_rtx;
9851 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9852 #endif
9853 if (cfa_base_rtx == hard_frame_pointer_rtx
9854 || !fixed_regs[REGNO (cfa_base_rtx)])
9855 {
9856 cfa_base_rtx = NULL_RTX;
9857 return;
9858 }
9859 if (!MAY_HAVE_DEBUG_INSNS)
9860 return;
9861
9862 /* Tell alias analysis that cfa_base_rtx should share
9863 find_base_term value with stack pointer or hard frame pointer. */
9864 if (!frame_pointer_needed)
9865 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9866 else if (!crtl->stack_realign_tried)
9867 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9868
9869 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9870 VOIDmode, get_insns ());
9871 preserve_value (val);
9872 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9873 }
9874
9875 /* Allocate and initialize the data structures for variable tracking
9876 and parse the RTL to get the micro operations. */
9877
9878 static bool
9879 vt_initialize (void)
9880 {
9881 basic_block bb;
9882 HOST_WIDE_INT fp_cfa_offset = -1;
9883
9884 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9885
9886 attrs_pool = create_alloc_pool ("attrs_def pool",
9887 sizeof (struct attrs_def), 1024);
9888 var_pool = create_alloc_pool ("variable_def pool",
9889 sizeof (struct variable_def)
9890 + (MAX_VAR_PARTS - 1)
9891 * sizeof (((variable)NULL)->var_part[0]), 64);
9892 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9893 sizeof (struct location_chain_def),
9894 1024);
9895 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9896 sizeof (struct shared_hash_def), 256);
9897 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9898 empty_shared_hash->refcount = 1;
9899 empty_shared_hash->htab = new variable_table_type (1);
9900 changed_variables = new variable_table_type (10);
9901
9902 /* Init the IN and OUT sets. */
9903 FOR_ALL_BB_FN (bb, cfun)
9904 {
9905 VTI (bb)->visited = false;
9906 VTI (bb)->flooded = false;
9907 dataflow_set_init (&VTI (bb)->in);
9908 dataflow_set_init (&VTI (bb)->out);
9909 VTI (bb)->permp = NULL;
9910 }
9911
9912 if (MAY_HAVE_DEBUG_INSNS)
9913 {
9914 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9915 scratch_regs = BITMAP_ALLOC (NULL);
9916 valvar_pool = create_alloc_pool ("small variable_def pool",
9917 sizeof (struct variable_def), 256);
9918 preserved_values.create (256);
9919 global_get_addr_cache = new hash_map<rtx, rtx>;
9920 }
9921 else
9922 {
9923 scratch_regs = NULL;
9924 valvar_pool = NULL;
9925 global_get_addr_cache = NULL;
9926 }
9927
9928 if (MAY_HAVE_DEBUG_INSNS)
9929 {
9930 rtx reg, expr;
9931 int ofst;
9932 cselib_val *val;
9933
9934 #ifdef FRAME_POINTER_CFA_OFFSET
9935 reg = frame_pointer_rtx;
9936 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9937 #else
9938 reg = arg_pointer_rtx;
9939 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9940 #endif
9941
9942 ofst -= INCOMING_FRAME_SP_OFFSET;
9943
9944 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9945 VOIDmode, get_insns ());
9946 preserve_value (val);
9947 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9948 cselib_preserve_cfa_base_value (val, REGNO (reg));
9949 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9950 stack_pointer_rtx, -ofst);
9951 cselib_add_permanent_equiv (val, expr, get_insns ());
9952
9953 if (ofst)
9954 {
9955 val = cselib_lookup_from_insn (stack_pointer_rtx,
9956 GET_MODE (stack_pointer_rtx), 1,
9957 VOIDmode, get_insns ());
9958 preserve_value (val);
9959 expr = plus_constant (GET_MODE (reg), reg, ofst);
9960 cselib_add_permanent_equiv (val, expr, get_insns ());
9961 }
9962 }
9963
9964 /* In order to factor out the adjustments made to the stack pointer or to
9965 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9966 instead of individual location lists, we're going to rewrite MEMs based
9967 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9968 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9969 resp. arg_pointer_rtx. We can do this either when there is no frame
9970 pointer in the function and stack adjustments are consistent for all
9971 basic blocks or when there is a frame pointer and no stack realignment.
9972 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9973 has been eliminated. */
9974 if (!frame_pointer_needed)
9975 {
9976 rtx reg, elim;
9977
9978 if (!vt_stack_adjustments ())
9979 return false;
9980
9981 #ifdef FRAME_POINTER_CFA_OFFSET
9982 reg = frame_pointer_rtx;
9983 #else
9984 reg = arg_pointer_rtx;
9985 #endif
9986 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9987 if (elim != reg)
9988 {
9989 if (GET_CODE (elim) == PLUS)
9990 elim = XEXP (elim, 0);
9991 if (elim == stack_pointer_rtx)
9992 vt_init_cfa_base ();
9993 }
9994 }
9995 else if (!crtl->stack_realign_tried)
9996 {
9997 rtx reg, elim;
9998
9999 #ifdef FRAME_POINTER_CFA_OFFSET
10000 reg = frame_pointer_rtx;
10001 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10002 #else
10003 reg = arg_pointer_rtx;
10004 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10005 #endif
10006 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10007 if (elim != reg)
10008 {
10009 if (GET_CODE (elim) == PLUS)
10010 {
10011 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10012 elim = XEXP (elim, 0);
10013 }
10014 if (elim != hard_frame_pointer_rtx)
10015 fp_cfa_offset = -1;
10016 }
10017 else
10018 fp_cfa_offset = -1;
10019 }
10020
10021 /* If the stack is realigned and a DRAP register is used, we're going to
10022 rewrite MEMs based on it representing incoming locations of parameters
10023 passed on the stack into MEMs based on the argument pointer. Although
10024 we aren't going to rewrite other MEMs, we still need to initialize the
10025 virtual CFA pointer in order to ensure that the argument pointer will
10026 be seen as a constant throughout the function.
10027
10028 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10029 else if (stack_realign_drap)
10030 {
10031 rtx reg, elim;
10032
10033 #ifdef FRAME_POINTER_CFA_OFFSET
10034 reg = frame_pointer_rtx;
10035 #else
10036 reg = arg_pointer_rtx;
10037 #endif
10038 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10039 if (elim != reg)
10040 {
10041 if (GET_CODE (elim) == PLUS)
10042 elim = XEXP (elim, 0);
10043 if (elim == hard_frame_pointer_rtx)
10044 vt_init_cfa_base ();
10045 }
10046 }
10047
10048 hard_frame_pointer_adjustment = -1;
10049
10050 vt_add_function_parameters ();
10051
10052 FOR_EACH_BB_FN (bb, cfun)
10053 {
10054 rtx_insn *insn;
10055 HOST_WIDE_INT pre, post = 0;
10056 basic_block first_bb, last_bb;
10057
10058 if (MAY_HAVE_DEBUG_INSNS)
10059 {
10060 cselib_record_sets_hook = add_with_sets;
10061 if (dump_file && (dump_flags & TDF_DETAILS))
10062 fprintf (dump_file, "first value: %i\n",
10063 cselib_get_next_uid ());
10064 }
10065
10066 first_bb = bb;
10067 for (;;)
10068 {
10069 edge e;
10070 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10071 || ! single_pred_p (bb->next_bb))
10072 break;
10073 e = find_edge (bb, bb->next_bb);
10074 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10075 break;
10076 bb = bb->next_bb;
10077 }
10078 last_bb = bb;
10079
10080 /* Add the micro-operations to the vector. */
10081 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10082 {
10083 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10084 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10085 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10086 insn = NEXT_INSN (insn))
10087 {
10088 if (INSN_P (insn))
10089 {
10090 if (!frame_pointer_needed)
10091 {
10092 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10093 if (pre)
10094 {
10095 micro_operation mo;
10096 mo.type = MO_ADJUST;
10097 mo.u.adjust = pre;
10098 mo.insn = insn;
10099 if (dump_file && (dump_flags & TDF_DETAILS))
10100 log_op_type (PATTERN (insn), bb, insn,
10101 MO_ADJUST, dump_file);
10102 VTI (bb)->mos.safe_push (mo);
10103 VTI (bb)->out.stack_adjust += pre;
10104 }
10105 }
10106
10107 cselib_hook_called = false;
10108 adjust_insn (bb, insn);
10109 if (MAY_HAVE_DEBUG_INSNS)
10110 {
10111 if (CALL_P (insn))
10112 prepare_call_arguments (bb, insn);
10113 cselib_process_insn (insn);
10114 if (dump_file && (dump_flags & TDF_DETAILS))
10115 {
10116 print_rtl_single (dump_file, insn);
10117 dump_cselib_table (dump_file);
10118 }
10119 }
10120 if (!cselib_hook_called)
10121 add_with_sets (insn, 0, 0);
10122 cancel_changes (0);
10123
10124 if (!frame_pointer_needed && post)
10125 {
10126 micro_operation mo;
10127 mo.type = MO_ADJUST;
10128 mo.u.adjust = post;
10129 mo.insn = insn;
10130 if (dump_file && (dump_flags & TDF_DETAILS))
10131 log_op_type (PATTERN (insn), bb, insn,
10132 MO_ADJUST, dump_file);
10133 VTI (bb)->mos.safe_push (mo);
10134 VTI (bb)->out.stack_adjust += post;
10135 }
10136
10137 if (fp_cfa_offset != -1
10138 && hard_frame_pointer_adjustment == -1
10139 && fp_setter_insn (insn))
10140 {
10141 vt_init_cfa_base ();
10142 hard_frame_pointer_adjustment = fp_cfa_offset;
10143 /* Disassociate sp from fp now. */
10144 if (MAY_HAVE_DEBUG_INSNS)
10145 {
10146 cselib_val *v;
10147 cselib_invalidate_rtx (stack_pointer_rtx);
10148 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10149 VOIDmode);
10150 if (v && !cselib_preserved_value_p (v))
10151 {
10152 cselib_set_value_sp_based (v);
10153 preserve_value (v);
10154 }
10155 }
10156 }
10157 }
10158 }
10159 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10160 }
10161
10162 bb = last_bb;
10163
10164 if (MAY_HAVE_DEBUG_INSNS)
10165 {
10166 cselib_preserve_only_values ();
10167 cselib_reset_table (cselib_get_next_uid ());
10168 cselib_record_sets_hook = NULL;
10169 }
10170 }
10171
10172 hard_frame_pointer_adjustment = -1;
10173 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10174 cfa_base_rtx = NULL_RTX;
10175 return true;
10176 }
10177
10178 /* This is *not* reset after each function. It gives each
10179 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10180 a unique label number. */
10181
10182 static int debug_label_num = 1;
10183
10184 /* Get rid of all debug insns from the insn stream. */
10185
10186 static void
10187 delete_debug_insns (void)
10188 {
10189 basic_block bb;
10190 rtx_insn *insn, *next;
10191
10192 if (!MAY_HAVE_DEBUG_INSNS)
10193 return;
10194
10195 FOR_EACH_BB_FN (bb, cfun)
10196 {
10197 FOR_BB_INSNS_SAFE (bb, insn, next)
10198 if (DEBUG_INSN_P (insn))
10199 {
10200 tree decl = INSN_VAR_LOCATION_DECL (insn);
10201 if (TREE_CODE (decl) == LABEL_DECL
10202 && DECL_NAME (decl)
10203 && !DECL_RTL_SET_P (decl))
10204 {
10205 PUT_CODE (insn, NOTE);
10206 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10207 NOTE_DELETED_LABEL_NAME (insn)
10208 = IDENTIFIER_POINTER (DECL_NAME (decl));
10209 SET_DECL_RTL (decl, insn);
10210 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10211 }
10212 else
10213 delete_insn (insn);
10214 }
10215 }
10216 }
10217
10218 /* Run a fast, BB-local only version of var tracking, to take care of
10219 information that we don't do global analysis on, such that not all
10220 information is lost. If SKIPPED holds, we're skipping the global
10221 pass entirely, so we should try to use information it would have
10222 handled as well.. */
10223
10224 static void
10225 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10226 {
10227 /* ??? Just skip it all for now. */
10228 delete_debug_insns ();
10229 }
10230
10231 /* Free the data structures needed for variable tracking. */
10232
10233 static void
10234 vt_finalize (void)
10235 {
10236 basic_block bb;
10237
10238 FOR_EACH_BB_FN (bb, cfun)
10239 {
10240 VTI (bb)->mos.release ();
10241 }
10242
10243 FOR_ALL_BB_FN (bb, cfun)
10244 {
10245 dataflow_set_destroy (&VTI (bb)->in);
10246 dataflow_set_destroy (&VTI (bb)->out);
10247 if (VTI (bb)->permp)
10248 {
10249 dataflow_set_destroy (VTI (bb)->permp);
10250 XDELETE (VTI (bb)->permp);
10251 }
10252 }
10253 free_aux_for_blocks ();
10254 delete empty_shared_hash->htab;
10255 empty_shared_hash->htab = NULL;
10256 delete changed_variables;
10257 changed_variables = NULL;
10258 free_alloc_pool (attrs_pool);
10259 free_alloc_pool (var_pool);
10260 free_alloc_pool (loc_chain_pool);
10261 free_alloc_pool (shared_hash_pool);
10262
10263 if (MAY_HAVE_DEBUG_INSNS)
10264 {
10265 if (global_get_addr_cache)
10266 delete global_get_addr_cache;
10267 global_get_addr_cache = NULL;
10268 if (loc_exp_dep_pool)
10269 free_alloc_pool (loc_exp_dep_pool);
10270 loc_exp_dep_pool = NULL;
10271 free_alloc_pool (valvar_pool);
10272 preserved_values.release ();
10273 cselib_finish ();
10274 BITMAP_FREE (scratch_regs);
10275 scratch_regs = NULL;
10276 }
10277
10278 #ifdef HAVE_window_save
10279 vec_free (windowed_parm_regs);
10280 #endif
10281
10282 if (vui_vec)
10283 XDELETEVEC (vui_vec);
10284 vui_vec = NULL;
10285 vui_allocated = 0;
10286 }
10287
10288 /* The entry point to variable tracking pass. */
10289
10290 static inline unsigned int
10291 variable_tracking_main_1 (void)
10292 {
10293 bool success;
10294
10295 if (flag_var_tracking_assignments < 0)
10296 {
10297 delete_debug_insns ();
10298 return 0;
10299 }
10300
10301 if (n_basic_blocks_for_fn (cfun) > 500 &&
10302 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10303 {
10304 vt_debug_insns_local (true);
10305 return 0;
10306 }
10307
10308 mark_dfs_back_edges ();
10309 if (!vt_initialize ())
10310 {
10311 vt_finalize ();
10312 vt_debug_insns_local (true);
10313 return 0;
10314 }
10315
10316 success = vt_find_locations ();
10317
10318 if (!success && flag_var_tracking_assignments > 0)
10319 {
10320 vt_finalize ();
10321
10322 delete_debug_insns ();
10323
10324 /* This is later restored by our caller. */
10325 flag_var_tracking_assignments = 0;
10326
10327 success = vt_initialize ();
10328 gcc_assert (success);
10329
10330 success = vt_find_locations ();
10331 }
10332
10333 if (!success)
10334 {
10335 vt_finalize ();
10336 vt_debug_insns_local (false);
10337 return 0;
10338 }
10339
10340 if (dump_file && (dump_flags & TDF_DETAILS))
10341 {
10342 dump_dataflow_sets ();
10343 dump_reg_info (dump_file);
10344 dump_flow_info (dump_file, dump_flags);
10345 }
10346
10347 timevar_push (TV_VAR_TRACKING_EMIT);
10348 vt_emit_notes ();
10349 timevar_pop (TV_VAR_TRACKING_EMIT);
10350
10351 vt_finalize ();
10352 vt_debug_insns_local (false);
10353 return 0;
10354 }
10355
10356 unsigned int
10357 variable_tracking_main (void)
10358 {
10359 unsigned int ret;
10360 int save = flag_var_tracking_assignments;
10361
10362 ret = variable_tracking_main_1 ();
10363
10364 flag_var_tracking_assignments = save;
10365
10366 return ret;
10367 }
10368 \f
10369 namespace {
10370
10371 const pass_data pass_data_variable_tracking =
10372 {
10373 RTL_PASS, /* type */
10374 "vartrack", /* name */
10375 OPTGROUP_NONE, /* optinfo_flags */
10376 TV_VAR_TRACKING, /* tv_id */
10377 0, /* properties_required */
10378 0, /* properties_provided */
10379 0, /* properties_destroyed */
10380 0, /* todo_flags_start */
10381 0, /* todo_flags_finish */
10382 };
10383
10384 class pass_variable_tracking : public rtl_opt_pass
10385 {
10386 public:
10387 pass_variable_tracking (gcc::context *ctxt)
10388 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10389 {}
10390
10391 /* opt_pass methods: */
10392 virtual bool gate (function *)
10393 {
10394 return (flag_var_tracking && !targetm.delay_vartrack);
10395 }
10396
10397 virtual unsigned int execute (function *)
10398 {
10399 return variable_tracking_main ();
10400 }
10401
10402 }; // class pass_variable_tracking
10403
10404 } // anon namespace
10405
10406 rtl_opt_pass *
10407 make_pass_variable_tracking (gcc::context *ctxt)
10408 {
10409 return new pass_variable_tracking (ctxt);
10410 }