]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/var-tracking.c
poly_int: REG_OFFSET
[thirdparty/gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
27
28 How does the variable tracking pass work?
29
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
37
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
44
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
54
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
60
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
70
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
78
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
85
86 */
87
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "target.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "cfghooks.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
98 #include "memmodel.h"
99 #include "tm_p.h"
100 #include "insn-config.h"
101 #include "regs.h"
102 #include "emit-rtl.h"
103 #include "recog.h"
104 #include "diagnostic.h"
105 #include "varasm.h"
106 #include "stor-layout.h"
107 #include "cfgrtl.h"
108 #include "cfganal.h"
109 #include "reload.h"
110 #include "calls.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "params.h"
115 #include "tree-pretty-print.h"
116 #include "rtl-iter.h"
117 #include "fibonacci_heap.h"
118
119 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
120 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
121
122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
123 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
124 Currently the value is the same as IDENTIFIER_NODE, which has such
125 a property. If this compile time assertion ever fails, make sure that
126 the new tree code that equals (int) VALUE has the same property. */
127 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
128
129 /* Type of micro operation. */
130 enum micro_operation_type
131 {
132 MO_USE, /* Use location (REG or MEM). */
133 MO_USE_NO_VAR,/* Use location which is not associated with a variable
134 or the variable is not trackable. */
135 MO_VAL_USE, /* Use location which is associated with a value. */
136 MO_VAL_LOC, /* Use location which appears in a debug insn. */
137 MO_VAL_SET, /* Set location associated with a value. */
138 MO_SET, /* Set location. */
139 MO_COPY, /* Copy the same portion of a variable from one
140 location to another. */
141 MO_CLOBBER, /* Clobber location. */
142 MO_CALL, /* Call insn. */
143 MO_ADJUST /* Adjust stack pointer. */
144
145 };
146
147 static const char * const ATTRIBUTE_UNUSED
148 micro_operation_type_name[] = {
149 "MO_USE",
150 "MO_USE_NO_VAR",
151 "MO_VAL_USE",
152 "MO_VAL_LOC",
153 "MO_VAL_SET",
154 "MO_SET",
155 "MO_COPY",
156 "MO_CLOBBER",
157 "MO_CALL",
158 "MO_ADJUST"
159 };
160
161 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
162 Notes emitted as AFTER_CALL are to take effect during the call,
163 rather than after the call. */
164 enum emit_note_where
165 {
166 EMIT_NOTE_BEFORE_INSN,
167 EMIT_NOTE_AFTER_INSN,
168 EMIT_NOTE_AFTER_CALL_INSN
169 };
170
171 /* Structure holding information about micro operation. */
172 struct micro_operation
173 {
174 /* Type of micro operation. */
175 enum micro_operation_type type;
176
177 /* The instruction which the micro operation is in, for MO_USE,
178 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
179 instruction or note in the original flow (before any var-tracking
180 notes are inserted, to simplify emission of notes), for MO_SET
181 and MO_CLOBBER. */
182 rtx_insn *insn;
183
184 union {
185 /* Location. For MO_SET and MO_COPY, this is the SET that
186 performs the assignment, if known, otherwise it is the target
187 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
188 CONCAT of the VALUE and the LOC associated with it. For
189 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
190 associated with it. */
191 rtx loc;
192
193 /* Stack adjustment. */
194 HOST_WIDE_INT adjust;
195 } u;
196 };
197
198
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
202
203 /* Return true if a decl_or_value DV is a DECL or NULL. */
204 static inline bool
205 dv_is_decl_p (decl_or_value dv)
206 {
207 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
208 }
209
210 /* Return true if a decl_or_value is a VALUE rtl. */
211 static inline bool
212 dv_is_value_p (decl_or_value dv)
213 {
214 return dv && !dv_is_decl_p (dv);
215 }
216
217 /* Return the decl in the decl_or_value. */
218 static inline tree
219 dv_as_decl (decl_or_value dv)
220 {
221 gcc_checking_assert (dv_is_decl_p (dv));
222 return (tree) dv;
223 }
224
225 /* Return the value in the decl_or_value. */
226 static inline rtx
227 dv_as_value (decl_or_value dv)
228 {
229 gcc_checking_assert (dv_is_value_p (dv));
230 return (rtx)dv;
231 }
232
233 /* Return the opaque pointer in the decl_or_value. */
234 static inline void *
235 dv_as_opaque (decl_or_value dv)
236 {
237 return dv;
238 }
239
240
241 /* Description of location of a part of a variable. The content of a physical
242 register is described by a chain of these structures.
243 The chains are pretty short (usually 1 or 2 elements) and thus
244 chain is the best data structure. */
245 struct attrs
246 {
247 /* Pointer to next member of the list. */
248 attrs *next;
249
250 /* The rtx of register. */
251 rtx loc;
252
253 /* The declaration corresponding to LOC. */
254 decl_or_value dv;
255
256 /* Offset from start of DECL. */
257 HOST_WIDE_INT offset;
258 };
259
260 /* Structure for chaining the locations. */
261 struct location_chain
262 {
263 /* Next element in the chain. */
264 location_chain *next;
265
266 /* The location (REG, MEM or VALUE). */
267 rtx loc;
268
269 /* The "value" stored in this location. */
270 rtx set_src;
271
272 /* Initialized? */
273 enum var_init_status init;
274 };
275
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part
277 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
278 location of DV. Each entry is also part of VALUE' s linked-list of
279 backlinks back to DV. */
280 struct loc_exp_dep
281 {
282 /* The dependent DV. */
283 decl_or_value dv;
284 /* The dependency VALUE or DECL_DEBUG. */
285 rtx value;
286 /* The next entry in VALUE's backlinks list. */
287 struct loc_exp_dep *next;
288 /* A pointer to the pointer to this entry (head or prev's next) in
289 the doubly-linked list. */
290 struct loc_exp_dep **pprev;
291 };
292
293
294 /* This data structure holds information about the depth of a variable
295 expansion. */
296 struct expand_depth
297 {
298 /* This measures the complexity of the expanded expression. It
299 grows by one for each level of expansion that adds more than one
300 operand. */
301 int complexity;
302 /* This counts the number of ENTRY_VALUE expressions in an
303 expansion. We want to minimize their use. */
304 int entryvals;
305 };
306
307 /* This data structure is allocated for one-part variables at the time
308 of emitting notes. */
309 struct onepart_aux
310 {
311 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
312 computation used the expansion of this variable, and that ought
313 to be notified should this variable change. If the DV's cur_loc
314 expanded to NULL, all components of the loc list are regarded as
315 active, so that any changes in them give us a chance to get a
316 location. Otherwise, only components of the loc that expanded to
317 non-NULL are regarded as active dependencies. */
318 loc_exp_dep *backlinks;
319 /* This holds the LOC that was expanded into cur_loc. We need only
320 mark a one-part variable as changed if the FROM loc is removed,
321 or if it has no known location and a loc is added, or if it gets
322 a change notification from any of its active dependencies. */
323 rtx from;
324 /* The depth of the cur_loc expression. */
325 expand_depth depth;
326 /* Dependencies actively used when expand FROM into cur_loc. */
327 vec<loc_exp_dep, va_heap, vl_embed> deps;
328 };
329
330 /* Structure describing one part of variable. */
331 struct variable_part
332 {
333 /* Chain of locations of the part. */
334 location_chain *loc_chain;
335
336 /* Location which was last emitted to location list. */
337 rtx cur_loc;
338
339 union variable_aux
340 {
341 /* The offset in the variable, if !var->onepart. */
342 HOST_WIDE_INT offset;
343
344 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
345 struct onepart_aux *onepaux;
346 } aux;
347 };
348
349 /* Maximum number of location parts. */
350 #define MAX_VAR_PARTS 16
351
352 /* Enumeration type used to discriminate various types of one-part
353 variables. */
354 enum onepart_enum
355 {
356 /* Not a one-part variable. */
357 NOT_ONEPART = 0,
358 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
359 ONEPART_VDECL = 1,
360 /* A DEBUG_EXPR_DECL. */
361 ONEPART_DEXPR = 2,
362 /* A VALUE. */
363 ONEPART_VALUE = 3
364 };
365
366 /* Structure describing where the variable is located. */
367 struct variable
368 {
369 /* The declaration of the variable, or an RTL value being handled
370 like a declaration. */
371 decl_or_value dv;
372
373 /* Reference count. */
374 int refcount;
375
376 /* Number of variable parts. */
377 char n_var_parts;
378
379 /* What type of DV this is, according to enum onepart_enum. */
380 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
381
382 /* True if this variable_def struct is currently in the
383 changed_variables hash table. */
384 bool in_changed_variables;
385
386 /* The variable parts. */
387 variable_part var_part[1];
388 };
389
390 /* Pointer to the BB's information specific to variable tracking pass. */
391 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
392
393 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */
394
395 static inline HOST_WIDE_INT
396 int_mem_offset (const_rtx mem)
397 {
398 if (MEM_OFFSET_KNOWN_P (mem))
399 return MEM_OFFSET (mem);
400 return 0;
401 }
402
403 #if CHECKING_P && (GCC_VERSION >= 2007)
404
405 /* Access VAR's Ith part's offset, checking that it's not a one-part
406 variable. */
407 #define VAR_PART_OFFSET(var, i) __extension__ \
408 (*({ variable *const __v = (var); \
409 gcc_checking_assert (!__v->onepart); \
410 &__v->var_part[(i)].aux.offset; }))
411
412 /* Access VAR's one-part auxiliary data, checking that it is a
413 one-part variable. */
414 #define VAR_LOC_1PAUX(var) __extension__ \
415 (*({ variable *const __v = (var); \
416 gcc_checking_assert (__v->onepart); \
417 &__v->var_part[0].aux.onepaux; }))
418
419 #else
420 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
421 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
422 #endif
423
424 /* These are accessor macros for the one-part auxiliary data. When
425 convenient for users, they're guarded by tests that the data was
426 allocated. */
427 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
428 ? VAR_LOC_1PAUX (var)->backlinks \
429 : NULL)
430 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->backlinks \
432 : NULL)
433 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
434 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
435 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
436 ? &VAR_LOC_1PAUX (var)->deps \
437 : NULL)
438
439
440
441 typedef unsigned int dvuid;
442
443 /* Return the uid of DV. */
444
445 static inline dvuid
446 dv_uid (decl_or_value dv)
447 {
448 if (dv_is_value_p (dv))
449 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
450 else
451 return DECL_UID (dv_as_decl (dv));
452 }
453
454 /* Compute the hash from the uid. */
455
456 static inline hashval_t
457 dv_uid2hash (dvuid uid)
458 {
459 return uid;
460 }
461
462 /* The hash function for a mask table in a shared_htab chain. */
463
464 static inline hashval_t
465 dv_htab_hash (decl_or_value dv)
466 {
467 return dv_uid2hash (dv_uid (dv));
468 }
469
470 static void variable_htab_free (void *);
471
472 /* Variable hashtable helpers. */
473
474 struct variable_hasher : pointer_hash <variable>
475 {
476 typedef void *compare_type;
477 static inline hashval_t hash (const variable *);
478 static inline bool equal (const variable *, const void *);
479 static inline void remove (variable *);
480 };
481
482 /* The hash function for variable_htab, computes the hash value
483 from the declaration of variable X. */
484
485 inline hashval_t
486 variable_hasher::hash (const variable *v)
487 {
488 return dv_htab_hash (v->dv);
489 }
490
491 /* Compare the declaration of variable X with declaration Y. */
492
493 inline bool
494 variable_hasher::equal (const variable *v, const void *y)
495 {
496 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
497
498 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
499 }
500
501 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
502
503 inline void
504 variable_hasher::remove (variable *var)
505 {
506 variable_htab_free (var);
507 }
508
509 typedef hash_table<variable_hasher> variable_table_type;
510 typedef variable_table_type::iterator variable_iterator_type;
511
512 /* Structure for passing some other parameters to function
513 emit_note_insn_var_location. */
514 struct emit_note_data
515 {
516 /* The instruction which the note will be emitted before/after. */
517 rtx_insn *insn;
518
519 /* Where the note will be emitted (before/after insn)? */
520 enum emit_note_where where;
521
522 /* The variables and values active at this point. */
523 variable_table_type *vars;
524 };
525
526 /* Structure holding a refcounted hash table. If refcount > 1,
527 it must be first unshared before modified. */
528 struct shared_hash
529 {
530 /* Reference count. */
531 int refcount;
532
533 /* Actual hash table. */
534 variable_table_type *htab;
535 };
536
537 /* Structure holding the IN or OUT set for a basic block. */
538 struct dataflow_set
539 {
540 /* Adjustment of stack offset. */
541 HOST_WIDE_INT stack_adjust;
542
543 /* Attributes for registers (lists of attrs). */
544 attrs *regs[FIRST_PSEUDO_REGISTER];
545
546 /* Variable locations. */
547 shared_hash *vars;
548
549 /* Vars that is being traversed. */
550 shared_hash *traversed_vars;
551 };
552
553 /* The structure (one for each basic block) containing the information
554 needed for variable tracking. */
555 struct variable_tracking_info
556 {
557 /* The vector of micro operations. */
558 vec<micro_operation> mos;
559
560 /* The IN and OUT set for dataflow analysis. */
561 dataflow_set in;
562 dataflow_set out;
563
564 /* The permanent-in dataflow set for this block. This is used to
565 hold values for which we had to compute entry values. ??? This
566 should probably be dynamically allocated, to avoid using more
567 memory in non-debug builds. */
568 dataflow_set *permp;
569
570 /* Has the block been visited in DFS? */
571 bool visited;
572
573 /* Has the block been flooded in VTA? */
574 bool flooded;
575
576 };
577
578 /* Alloc pool for struct attrs_def. */
579 object_allocator<attrs> attrs_pool ("attrs pool");
580
581 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
582
583 static pool_allocator var_pool
584 ("variable_def pool", sizeof (variable) +
585 (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
586
587 /* Alloc pool for struct variable_def with a single var_part entry. */
588 static pool_allocator valvar_pool
589 ("small variable_def pool", sizeof (variable));
590
591 /* Alloc pool for struct location_chain. */
592 static object_allocator<location_chain> location_chain_pool
593 ("location_chain pool");
594
595 /* Alloc pool for struct shared_hash. */
596 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool");
597
598 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
599 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool");
600
601 /* Changed variables, notes will be emitted for them. */
602 static variable_table_type *changed_variables;
603
604 /* Shall notes be emitted? */
605 static bool emit_notes;
606
607 /* Values whose dynamic location lists have gone empty, but whose
608 cselib location lists are still usable. Use this to hold the
609 current location, the backlinks, etc, during emit_notes. */
610 static variable_table_type *dropped_values;
611
612 /* Empty shared hashtable. */
613 static shared_hash *empty_shared_hash;
614
615 /* Scratch register bitmap used by cselib_expand_value_rtx. */
616 static bitmap scratch_regs = NULL;
617
618 #ifdef HAVE_window_save
619 struct GTY(()) parm_reg {
620 rtx outgoing;
621 rtx incoming;
622 };
623
624
625 /* Vector of windowed parameter registers, if any. */
626 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
627 #endif
628
629 /* Variable used to tell whether cselib_process_insn called our hook. */
630 static bool cselib_hook_called;
631
632 /* Local function prototypes. */
633 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
634 HOST_WIDE_INT *);
635 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
636 HOST_WIDE_INT *);
637 static bool vt_stack_adjustments (void);
638
639 static void init_attrs_list_set (attrs **);
640 static void attrs_list_clear (attrs **);
641 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
642 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
643 static void attrs_list_copy (attrs **, attrs *);
644 static void attrs_list_union (attrs **, attrs *);
645
646 static variable **unshare_variable (dataflow_set *set, variable **slot,
647 variable *var, enum var_init_status);
648 static void vars_copy (variable_table_type *, variable_table_type *);
649 static tree var_debug_decl (tree);
650 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
651 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
652 enum var_init_status, rtx);
653 static void var_reg_delete (dataflow_set *, rtx, bool);
654 static void var_regno_delete (dataflow_set *, int);
655 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
656 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
657 enum var_init_status, rtx);
658 static void var_mem_delete (dataflow_set *, rtx, bool);
659
660 static void dataflow_set_init (dataflow_set *);
661 static void dataflow_set_clear (dataflow_set *);
662 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
663 static int variable_union_info_cmp_pos (const void *, const void *);
664 static void dataflow_set_union (dataflow_set *, dataflow_set *);
665 static location_chain *find_loc_in_1pdv (rtx, variable *,
666 variable_table_type *);
667 static bool canon_value_cmp (rtx, rtx);
668 static int loc_cmp (rtx, rtx);
669 static bool variable_part_different_p (variable_part *, variable_part *);
670 static bool onepart_variable_different_p (variable *, variable *);
671 static bool variable_different_p (variable *, variable *);
672 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
673 static void dataflow_set_destroy (dataflow_set *);
674
675 static bool track_expr_p (tree, bool);
676 static void add_uses_1 (rtx *, void *);
677 static void add_stores (rtx, const_rtx, void *);
678 static bool compute_bb_dataflow (basic_block);
679 static bool vt_find_locations (void);
680
681 static void dump_attrs_list (attrs *);
682 static void dump_var (variable *);
683 static void dump_vars (variable_table_type *);
684 static void dump_dataflow_set (dataflow_set *);
685 static void dump_dataflow_sets (void);
686
687 static void set_dv_changed (decl_or_value, bool);
688 static void variable_was_changed (variable *, dataflow_set *);
689 static variable **set_slot_part (dataflow_set *, rtx, variable **,
690 decl_or_value, HOST_WIDE_INT,
691 enum var_init_status, rtx);
692 static void set_variable_part (dataflow_set *, rtx,
693 decl_or_value, HOST_WIDE_INT,
694 enum var_init_status, rtx, enum insert_option);
695 static variable **clobber_slot_part (dataflow_set *, rtx,
696 variable **, HOST_WIDE_INT, rtx);
697 static void clobber_variable_part (dataflow_set *, rtx,
698 decl_or_value, HOST_WIDE_INT, rtx);
699 static variable **delete_slot_part (dataflow_set *, rtx, variable **,
700 HOST_WIDE_INT);
701 static void delete_variable_part (dataflow_set *, rtx,
702 decl_or_value, HOST_WIDE_INT);
703 static void emit_notes_in_bb (basic_block, dataflow_set *);
704 static void vt_emit_notes (void);
705
706 static void vt_add_function_parameters (void);
707 static bool vt_initialize (void);
708 static void vt_finalize (void);
709
710 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
711
712 static int
713 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
714 void *arg)
715 {
716 if (dest != stack_pointer_rtx)
717 return 0;
718
719 switch (GET_CODE (op))
720 {
721 case PRE_INC:
722 case PRE_DEC:
723 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
724 return 0;
725 case POST_INC:
726 case POST_DEC:
727 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
728 return 0;
729 case PRE_MODIFY:
730 case POST_MODIFY:
731 /* We handle only adjustments by constant amount. */
732 gcc_assert (GET_CODE (src) == PLUS
733 && CONST_INT_P (XEXP (src, 1))
734 && XEXP (src, 0) == stack_pointer_rtx);
735 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
736 -= INTVAL (XEXP (src, 1));
737 return 0;
738 default:
739 gcc_unreachable ();
740 }
741 }
742
743 /* Given a SET, calculate the amount of stack adjustment it contains
744 PRE- and POST-modifying stack pointer.
745 This function is similar to stack_adjust_offset. */
746
747 static void
748 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
749 HOST_WIDE_INT *post)
750 {
751 rtx src = SET_SRC (pattern);
752 rtx dest = SET_DEST (pattern);
753 enum rtx_code code;
754
755 if (dest == stack_pointer_rtx)
756 {
757 /* (set (reg sp) (plus (reg sp) (const_int))) */
758 code = GET_CODE (src);
759 if (! (code == PLUS || code == MINUS)
760 || XEXP (src, 0) != stack_pointer_rtx
761 || !CONST_INT_P (XEXP (src, 1)))
762 return;
763
764 if (code == MINUS)
765 *post += INTVAL (XEXP (src, 1));
766 else
767 *post -= INTVAL (XEXP (src, 1));
768 return;
769 }
770 HOST_WIDE_INT res[2] = { 0, 0 };
771 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
772 *pre += res[0];
773 *post += res[1];
774 }
775
776 /* Given an INSN, calculate the amount of stack adjustment it contains
777 PRE- and POST-modifying stack pointer. */
778
779 static void
780 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
781 HOST_WIDE_INT *post)
782 {
783 rtx pattern;
784
785 *pre = 0;
786 *post = 0;
787
788 pattern = PATTERN (insn);
789 if (RTX_FRAME_RELATED_P (insn))
790 {
791 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
792 if (expr)
793 pattern = XEXP (expr, 0);
794 }
795
796 if (GET_CODE (pattern) == SET)
797 stack_adjust_offset_pre_post (pattern, pre, post);
798 else if (GET_CODE (pattern) == PARALLEL
799 || GET_CODE (pattern) == SEQUENCE)
800 {
801 int i;
802
803 /* There may be stack adjustments inside compound insns. Search
804 for them. */
805 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
806 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
807 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
808 }
809 }
810
811 /* Compute stack adjustments for all blocks by traversing DFS tree.
812 Return true when the adjustments on all incoming edges are consistent.
813 Heavily borrowed from pre_and_rev_post_order_compute. */
814
815 static bool
816 vt_stack_adjustments (void)
817 {
818 edge_iterator *stack;
819 int sp;
820
821 /* Initialize entry block. */
822 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
823 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
824 = INCOMING_FRAME_SP_OFFSET;
825 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
826 = INCOMING_FRAME_SP_OFFSET;
827
828 /* Allocate stack for back-tracking up CFG. */
829 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
830 sp = 0;
831
832 /* Push the first edge on to the stack. */
833 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
834
835 while (sp)
836 {
837 edge_iterator ei;
838 basic_block src;
839 basic_block dest;
840
841 /* Look at the edge on the top of the stack. */
842 ei = stack[sp - 1];
843 src = ei_edge (ei)->src;
844 dest = ei_edge (ei)->dest;
845
846 /* Check if the edge destination has been visited yet. */
847 if (!VTI (dest)->visited)
848 {
849 rtx_insn *insn;
850 HOST_WIDE_INT pre, post, offset;
851 VTI (dest)->visited = true;
852 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
853
854 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
855 for (insn = BB_HEAD (dest);
856 insn != NEXT_INSN (BB_END (dest));
857 insn = NEXT_INSN (insn))
858 if (INSN_P (insn))
859 {
860 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
861 offset += pre + post;
862 }
863
864 VTI (dest)->out.stack_adjust = offset;
865
866 if (EDGE_COUNT (dest->succs) > 0)
867 /* Since the DEST node has been visited for the first
868 time, check its successors. */
869 stack[sp++] = ei_start (dest->succs);
870 }
871 else
872 {
873 /* We can end up with different stack adjustments for the exit block
874 of a shrink-wrapped function if stack_adjust_offset_pre_post
875 doesn't understand the rtx pattern used to restore the stack
876 pointer in the epilogue. For example, on s390(x), the stack
877 pointer is often restored via a load-multiple instruction
878 and so no stack_adjust offset is recorded for it. This means
879 that the stack offset at the end of the epilogue block is the
880 same as the offset before the epilogue, whereas other paths
881 to the exit block will have the correct stack_adjust.
882
883 It is safe to ignore these differences because (a) we never
884 use the stack_adjust for the exit block in this pass and
885 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
886 function are correct.
887
888 We must check whether the adjustments on other edges are
889 the same though. */
890 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
891 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
892 {
893 free (stack);
894 return false;
895 }
896
897 if (! ei_one_before_end_p (ei))
898 /* Go to the next edge. */
899 ei_next (&stack[sp - 1]);
900 else
901 /* Return to previous level if there are no more edges. */
902 sp--;
903 }
904 }
905
906 free (stack);
907 return true;
908 }
909
910 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
911 hard_frame_pointer_rtx is being mapped to it and offset for it. */
912 static rtx cfa_base_rtx;
913 static HOST_WIDE_INT cfa_base_offset;
914
915 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
916 or hard_frame_pointer_rtx. */
917
918 static inline rtx
919 compute_cfa_pointer (HOST_WIDE_INT adjustment)
920 {
921 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
922 }
923
924 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
925 or -1 if the replacement shouldn't be done. */
926 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
927
928 /* Data for adjust_mems callback. */
929
930 struct adjust_mem_data
931 {
932 bool store;
933 machine_mode mem_mode;
934 HOST_WIDE_INT stack_adjust;
935 auto_vec<rtx> side_effects;
936 };
937
938 /* Helper for adjust_mems. Return true if X is suitable for
939 transformation of wider mode arithmetics to narrower mode. */
940
941 static bool
942 use_narrower_mode_test (rtx x, const_rtx subreg)
943 {
944 subrtx_var_iterator::array_type array;
945 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
946 {
947 rtx x = *iter;
948 if (CONSTANT_P (x))
949 iter.skip_subrtxes ();
950 else
951 switch (GET_CODE (x))
952 {
953 case REG:
954 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
955 return false;
956 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
957 subreg_lowpart_offset (GET_MODE (subreg),
958 GET_MODE (x))))
959 return false;
960 break;
961 case PLUS:
962 case MINUS:
963 case MULT:
964 break;
965 case ASHIFT:
966 iter.substitute (XEXP (x, 0));
967 break;
968 default:
969 return false;
970 }
971 }
972 return true;
973 }
974
975 /* Transform X into narrower mode MODE from wider mode WMODE. */
976
977 static rtx
978 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode)
979 {
980 rtx op0, op1;
981 if (CONSTANT_P (x))
982 return lowpart_subreg (mode, x, wmode);
983 switch (GET_CODE (x))
984 {
985 case REG:
986 return lowpart_subreg (mode, x, wmode);
987 case PLUS:
988 case MINUS:
989 case MULT:
990 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
991 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
992 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
993 case ASHIFT:
994 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
995 op1 = XEXP (x, 1);
996 /* Ensure shift amount is not wider than mode. */
997 if (GET_MODE (op1) == VOIDmode)
998 op1 = lowpart_subreg (mode, op1, wmode);
999 else if (GET_MODE_PRECISION (mode)
1000 < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1))))
1001 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1002 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1003 default:
1004 gcc_unreachable ();
1005 }
1006 }
1007
1008 /* Helper function for adjusting used MEMs. */
1009
1010 static rtx
1011 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1012 {
1013 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1014 rtx mem, addr = loc, tem;
1015 machine_mode mem_mode_save;
1016 bool store_save;
1017 scalar_int_mode tem_mode, tem_subreg_mode;
1018 switch (GET_CODE (loc))
1019 {
1020 case REG:
1021 /* Don't do any sp or fp replacements outside of MEM addresses
1022 on the LHS. */
1023 if (amd->mem_mode == VOIDmode && amd->store)
1024 return loc;
1025 if (loc == stack_pointer_rtx
1026 && !frame_pointer_needed
1027 && cfa_base_rtx)
1028 return compute_cfa_pointer (amd->stack_adjust);
1029 else if (loc == hard_frame_pointer_rtx
1030 && frame_pointer_needed
1031 && hard_frame_pointer_adjustment != -1
1032 && cfa_base_rtx)
1033 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1034 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1035 return loc;
1036 case MEM:
1037 mem = loc;
1038 if (!amd->store)
1039 {
1040 mem = targetm.delegitimize_address (mem);
1041 if (mem != loc && !MEM_P (mem))
1042 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1043 }
1044
1045 addr = XEXP (mem, 0);
1046 mem_mode_save = amd->mem_mode;
1047 amd->mem_mode = GET_MODE (mem);
1048 store_save = amd->store;
1049 amd->store = false;
1050 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1051 amd->store = store_save;
1052 amd->mem_mode = mem_mode_save;
1053 if (mem == loc)
1054 addr = targetm.delegitimize_address (addr);
1055 if (addr != XEXP (mem, 0))
1056 mem = replace_equiv_address_nv (mem, addr);
1057 if (!amd->store)
1058 mem = avoid_constant_pool_reference (mem);
1059 return mem;
1060 case PRE_INC:
1061 case PRE_DEC:
1062 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1063 gen_int_mode (GET_CODE (loc) == PRE_INC
1064 ? GET_MODE_SIZE (amd->mem_mode)
1065 : -GET_MODE_SIZE (amd->mem_mode),
1066 GET_MODE (loc)));
1067 /* FALLTHRU */
1068 case POST_INC:
1069 case POST_DEC:
1070 if (addr == loc)
1071 addr = XEXP (loc, 0);
1072 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1073 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1074 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1075 gen_int_mode ((GET_CODE (loc) == PRE_INC
1076 || GET_CODE (loc) == POST_INC)
1077 ? GET_MODE_SIZE (amd->mem_mode)
1078 : -GET_MODE_SIZE (amd->mem_mode),
1079 GET_MODE (loc)));
1080 store_save = amd->store;
1081 amd->store = false;
1082 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1083 amd->store = store_save;
1084 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1085 return addr;
1086 case PRE_MODIFY:
1087 addr = XEXP (loc, 1);
1088 /* FALLTHRU */
1089 case POST_MODIFY:
1090 if (addr == loc)
1091 addr = XEXP (loc, 0);
1092 gcc_assert (amd->mem_mode != VOIDmode);
1093 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1094 store_save = amd->store;
1095 amd->store = false;
1096 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1097 adjust_mems, data);
1098 amd->store = store_save;
1099 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1100 return addr;
1101 case SUBREG:
1102 /* First try without delegitimization of whole MEMs and
1103 avoid_constant_pool_reference, which is more likely to succeed. */
1104 store_save = amd->store;
1105 amd->store = true;
1106 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1107 data);
1108 amd->store = store_save;
1109 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1110 if (mem == SUBREG_REG (loc))
1111 {
1112 tem = loc;
1113 goto finish_subreg;
1114 }
1115 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1116 GET_MODE (SUBREG_REG (loc)),
1117 SUBREG_BYTE (loc));
1118 if (tem)
1119 goto finish_subreg;
1120 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1121 GET_MODE (SUBREG_REG (loc)),
1122 SUBREG_BYTE (loc));
1123 if (tem == NULL_RTX)
1124 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1125 finish_subreg:
1126 if (MAY_HAVE_DEBUG_BIND_INSNS
1127 && GET_CODE (tem) == SUBREG
1128 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1129 || GET_CODE (SUBREG_REG (tem)) == MINUS
1130 || GET_CODE (SUBREG_REG (tem)) == MULT
1131 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1132 && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
1133 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
1134 &tem_subreg_mode)
1135 && (GET_MODE_PRECISION (tem_mode)
1136 < GET_MODE_PRECISION (tem_subreg_mode))
1137 && subreg_lowpart_p (tem)
1138 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1139 return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode);
1140 return tem;
1141 case ASM_OPERANDS:
1142 /* Don't do any replacements in second and following
1143 ASM_OPERANDS of inline-asm with multiple sets.
1144 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1145 and ASM_OPERANDS_LABEL_VEC need to be equal between
1146 all the ASM_OPERANDs in the insn and adjust_insn will
1147 fix this up. */
1148 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1149 return loc;
1150 break;
1151 default:
1152 break;
1153 }
1154 return NULL_RTX;
1155 }
1156
1157 /* Helper function for replacement of uses. */
1158
1159 static void
1160 adjust_mem_uses (rtx *x, void *data)
1161 {
1162 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1163 if (new_x != *x)
1164 validate_change (NULL_RTX, x, new_x, true);
1165 }
1166
1167 /* Helper function for replacement of stores. */
1168
1169 static void
1170 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1171 {
1172 if (MEM_P (loc))
1173 {
1174 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1175 adjust_mems, data);
1176 if (new_dest != SET_DEST (expr))
1177 {
1178 rtx xexpr = CONST_CAST_RTX (expr);
1179 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1180 }
1181 }
1182 }
1183
1184 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1185 replace them with their value in the insn and add the side-effects
1186 as other sets to the insn. */
1187
1188 static void
1189 adjust_insn (basic_block bb, rtx_insn *insn)
1190 {
1191 rtx set;
1192
1193 #ifdef HAVE_window_save
1194 /* If the target machine has an explicit window save instruction, the
1195 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1196 if (RTX_FRAME_RELATED_P (insn)
1197 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1198 {
1199 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1200 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1201 parm_reg *p;
1202
1203 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1204 {
1205 XVECEXP (rtl, 0, i * 2)
1206 = gen_rtx_SET (p->incoming, p->outgoing);
1207 /* Do not clobber the attached DECL, but only the REG. */
1208 XVECEXP (rtl, 0, i * 2 + 1)
1209 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1210 gen_raw_REG (GET_MODE (p->outgoing),
1211 REGNO (p->outgoing)));
1212 }
1213
1214 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1215 return;
1216 }
1217 #endif
1218
1219 adjust_mem_data amd;
1220 amd.mem_mode = VOIDmode;
1221 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1222
1223 amd.store = true;
1224 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1225
1226 amd.store = false;
1227 if (GET_CODE (PATTERN (insn)) == PARALLEL
1228 && asm_noperands (PATTERN (insn)) > 0
1229 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1230 {
1231 rtx body, set0;
1232 int i;
1233
1234 /* inline-asm with multiple sets is tiny bit more complicated,
1235 because the 3 vectors in ASM_OPERANDS need to be shared between
1236 all ASM_OPERANDS in the instruction. adjust_mems will
1237 not touch ASM_OPERANDS other than the first one, asm_noperands
1238 test above needs to be called before that (otherwise it would fail)
1239 and afterwards this code fixes it up. */
1240 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1241 body = PATTERN (insn);
1242 set0 = XVECEXP (body, 0, 0);
1243 gcc_checking_assert (GET_CODE (set0) == SET
1244 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1245 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1246 for (i = 1; i < XVECLEN (body, 0); i++)
1247 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1248 break;
1249 else
1250 {
1251 set = XVECEXP (body, 0, i);
1252 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1253 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1254 == i);
1255 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1256 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1257 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1258 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1259 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1260 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1261 {
1262 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1263 ASM_OPERANDS_INPUT_VEC (newsrc)
1264 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1265 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1266 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1267 ASM_OPERANDS_LABEL_VEC (newsrc)
1268 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1269 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1270 }
1271 }
1272 }
1273 else
1274 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1275
1276 /* For read-only MEMs containing some constant, prefer those
1277 constants. */
1278 set = single_set (insn);
1279 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1280 {
1281 rtx note = find_reg_equal_equiv_note (insn);
1282
1283 if (note && CONSTANT_P (XEXP (note, 0)))
1284 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1285 }
1286
1287 if (!amd.side_effects.is_empty ())
1288 {
1289 rtx *pat, new_pat;
1290 int i, oldn;
1291
1292 pat = &PATTERN (insn);
1293 if (GET_CODE (*pat) == COND_EXEC)
1294 pat = &COND_EXEC_CODE (*pat);
1295 if (GET_CODE (*pat) == PARALLEL)
1296 oldn = XVECLEN (*pat, 0);
1297 else
1298 oldn = 1;
1299 unsigned int newn = amd.side_effects.length ();
1300 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1301 if (GET_CODE (*pat) == PARALLEL)
1302 for (i = 0; i < oldn; i++)
1303 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1304 else
1305 XVECEXP (new_pat, 0, 0) = *pat;
1306
1307 rtx effect;
1308 unsigned int j;
1309 FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect)
1310 XVECEXP (new_pat, 0, j + oldn) = effect;
1311 validate_change (NULL_RTX, pat, new_pat, true);
1312 }
1313 }
1314
1315 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1316 static inline rtx
1317 dv_as_rtx (decl_or_value dv)
1318 {
1319 tree decl;
1320
1321 if (dv_is_value_p (dv))
1322 return dv_as_value (dv);
1323
1324 decl = dv_as_decl (dv);
1325
1326 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1327 return DECL_RTL_KNOWN_SET (decl);
1328 }
1329
1330 /* Return nonzero if a decl_or_value must not have more than one
1331 variable part. The returned value discriminates among various
1332 kinds of one-part DVs ccording to enum onepart_enum. */
1333 static inline onepart_enum
1334 dv_onepart_p (decl_or_value dv)
1335 {
1336 tree decl;
1337
1338 if (!MAY_HAVE_DEBUG_BIND_INSNS)
1339 return NOT_ONEPART;
1340
1341 if (dv_is_value_p (dv))
1342 return ONEPART_VALUE;
1343
1344 decl = dv_as_decl (dv);
1345
1346 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1347 return ONEPART_DEXPR;
1348
1349 if (target_for_debug_bind (decl) != NULL_TREE)
1350 return ONEPART_VDECL;
1351
1352 return NOT_ONEPART;
1353 }
1354
1355 /* Return the variable pool to be used for a dv of type ONEPART. */
1356 static inline pool_allocator &
1357 onepart_pool (onepart_enum onepart)
1358 {
1359 return onepart ? valvar_pool : var_pool;
1360 }
1361
1362 /* Allocate a variable_def from the corresponding variable pool. */
1363 static inline variable *
1364 onepart_pool_allocate (onepart_enum onepart)
1365 {
1366 return (variable*) onepart_pool (onepart).allocate ();
1367 }
1368
1369 /* Build a decl_or_value out of a decl. */
1370 static inline decl_or_value
1371 dv_from_decl (tree decl)
1372 {
1373 decl_or_value dv;
1374 dv = decl;
1375 gcc_checking_assert (dv_is_decl_p (dv));
1376 return dv;
1377 }
1378
1379 /* Build a decl_or_value out of a value. */
1380 static inline decl_or_value
1381 dv_from_value (rtx value)
1382 {
1383 decl_or_value dv;
1384 dv = value;
1385 gcc_checking_assert (dv_is_value_p (dv));
1386 return dv;
1387 }
1388
1389 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1390 static inline decl_or_value
1391 dv_from_rtx (rtx x)
1392 {
1393 decl_or_value dv;
1394
1395 switch (GET_CODE (x))
1396 {
1397 case DEBUG_EXPR:
1398 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1399 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1400 break;
1401
1402 case VALUE:
1403 dv = dv_from_value (x);
1404 break;
1405
1406 default:
1407 gcc_unreachable ();
1408 }
1409
1410 return dv;
1411 }
1412
1413 extern void debug_dv (decl_or_value dv);
1414
1415 DEBUG_FUNCTION void
1416 debug_dv (decl_or_value dv)
1417 {
1418 if (dv_is_value_p (dv))
1419 debug_rtx (dv_as_value (dv));
1420 else
1421 debug_generic_stmt (dv_as_decl (dv));
1422 }
1423
1424 static void loc_exp_dep_clear (variable *var);
1425
1426 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1427
1428 static void
1429 variable_htab_free (void *elem)
1430 {
1431 int i;
1432 variable *var = (variable *) elem;
1433 location_chain *node, *next;
1434
1435 gcc_checking_assert (var->refcount > 0);
1436
1437 var->refcount--;
1438 if (var->refcount > 0)
1439 return;
1440
1441 for (i = 0; i < var->n_var_parts; i++)
1442 {
1443 for (node = var->var_part[i].loc_chain; node; node = next)
1444 {
1445 next = node->next;
1446 delete node;
1447 }
1448 var->var_part[i].loc_chain = NULL;
1449 }
1450 if (var->onepart && VAR_LOC_1PAUX (var))
1451 {
1452 loc_exp_dep_clear (var);
1453 if (VAR_LOC_DEP_LST (var))
1454 VAR_LOC_DEP_LST (var)->pprev = NULL;
1455 XDELETE (VAR_LOC_1PAUX (var));
1456 /* These may be reused across functions, so reset
1457 e.g. NO_LOC_P. */
1458 if (var->onepart == ONEPART_DEXPR)
1459 set_dv_changed (var->dv, true);
1460 }
1461 onepart_pool (var->onepart).remove (var);
1462 }
1463
1464 /* Initialize the set (array) SET of attrs to empty lists. */
1465
1466 static void
1467 init_attrs_list_set (attrs **set)
1468 {
1469 int i;
1470
1471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1472 set[i] = NULL;
1473 }
1474
1475 /* Make the list *LISTP empty. */
1476
1477 static void
1478 attrs_list_clear (attrs **listp)
1479 {
1480 attrs *list, *next;
1481
1482 for (list = *listp; list; list = next)
1483 {
1484 next = list->next;
1485 delete list;
1486 }
1487 *listp = NULL;
1488 }
1489
1490 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1491
1492 static attrs *
1493 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
1494 {
1495 for (; list; list = list->next)
1496 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1497 return list;
1498 return NULL;
1499 }
1500
1501 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1502
1503 static void
1504 attrs_list_insert (attrs **listp, decl_or_value dv,
1505 HOST_WIDE_INT offset, rtx loc)
1506 {
1507 attrs *list = new attrs;
1508 list->loc = loc;
1509 list->dv = dv;
1510 list->offset = offset;
1511 list->next = *listp;
1512 *listp = list;
1513 }
1514
1515 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1516
1517 static void
1518 attrs_list_copy (attrs **dstp, attrs *src)
1519 {
1520 attrs_list_clear (dstp);
1521 for (; src; src = src->next)
1522 {
1523 attrs *n = new attrs;
1524 n->loc = src->loc;
1525 n->dv = src->dv;
1526 n->offset = src->offset;
1527 n->next = *dstp;
1528 *dstp = n;
1529 }
1530 }
1531
1532 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1533
1534 static void
1535 attrs_list_union (attrs **dstp, attrs *src)
1536 {
1537 for (; src; src = src->next)
1538 {
1539 if (!attrs_list_member (*dstp, src->dv, src->offset))
1540 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1541 }
1542 }
1543
1544 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1545 *DSTP. */
1546
1547 static void
1548 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
1549 {
1550 gcc_assert (!*dstp);
1551 for (; src; src = src->next)
1552 {
1553 if (!dv_onepart_p (src->dv))
1554 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1555 }
1556 for (src = src2; src; src = src->next)
1557 {
1558 if (!dv_onepart_p (src->dv)
1559 && !attrs_list_member (*dstp, src->dv, src->offset))
1560 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1561 }
1562 }
1563
1564 /* Shared hashtable support. */
1565
1566 /* Return true if VARS is shared. */
1567
1568 static inline bool
1569 shared_hash_shared (shared_hash *vars)
1570 {
1571 return vars->refcount > 1;
1572 }
1573
1574 /* Return the hash table for VARS. */
1575
1576 static inline variable_table_type *
1577 shared_hash_htab (shared_hash *vars)
1578 {
1579 return vars->htab;
1580 }
1581
1582 /* Return true if VAR is shared, or maybe because VARS is shared. */
1583
1584 static inline bool
1585 shared_var_p (variable *var, shared_hash *vars)
1586 {
1587 /* Don't count an entry in the changed_variables table as a duplicate. */
1588 return ((var->refcount > 1 + (int) var->in_changed_variables)
1589 || shared_hash_shared (vars));
1590 }
1591
1592 /* Copy variables into a new hash table. */
1593
1594 static shared_hash *
1595 shared_hash_unshare (shared_hash *vars)
1596 {
1597 shared_hash *new_vars = new shared_hash;
1598 gcc_assert (vars->refcount > 1);
1599 new_vars->refcount = 1;
1600 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1601 vars_copy (new_vars->htab, vars->htab);
1602 vars->refcount--;
1603 return new_vars;
1604 }
1605
1606 /* Increment reference counter on VARS and return it. */
1607
1608 static inline shared_hash *
1609 shared_hash_copy (shared_hash *vars)
1610 {
1611 vars->refcount++;
1612 return vars;
1613 }
1614
1615 /* Decrement reference counter and destroy hash table if not shared
1616 anymore. */
1617
1618 static void
1619 shared_hash_destroy (shared_hash *vars)
1620 {
1621 gcc_checking_assert (vars->refcount > 0);
1622 if (--vars->refcount == 0)
1623 {
1624 delete vars->htab;
1625 delete vars;
1626 }
1627 }
1628
1629 /* Unshare *PVARS if shared and return slot for DV. If INS is
1630 INSERT, insert it if not already present. */
1631
1632 static inline variable **
1633 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
1634 hashval_t dvhash, enum insert_option ins)
1635 {
1636 if (shared_hash_shared (*pvars))
1637 *pvars = shared_hash_unshare (*pvars);
1638 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1639 }
1640
1641 static inline variable **
1642 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
1643 enum insert_option ins)
1644 {
1645 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1646 }
1647
1648 /* Return slot for DV, if it is already present in the hash table.
1649 If it is not present, insert it only VARS is not shared, otherwise
1650 return NULL. */
1651
1652 static inline variable **
1653 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1654 {
1655 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1656 shared_hash_shared (vars)
1657 ? NO_INSERT : INSERT);
1658 }
1659
1660 static inline variable **
1661 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
1662 {
1663 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1664 }
1665
1666 /* Return slot for DV only if it is already present in the hash table. */
1667
1668 static inline variable **
1669 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
1670 hashval_t dvhash)
1671 {
1672 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1673 }
1674
1675 static inline variable **
1676 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
1677 {
1678 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1679 }
1680
1681 /* Return variable for DV or NULL if not already present in the hash
1682 table. */
1683
1684 static inline variable *
1685 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1686 {
1687 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1688 }
1689
1690 static inline variable *
1691 shared_hash_find (shared_hash *vars, decl_or_value dv)
1692 {
1693 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1694 }
1695
1696 /* Return true if TVAL is better than CVAL as a canonival value. We
1697 choose lowest-numbered VALUEs, using the RTX address as a
1698 tie-breaker. The idea is to arrange them into a star topology,
1699 such that all of them are at most one step away from the canonical
1700 value, and the canonical value has backlinks to all of them, in
1701 addition to all the actual locations. We don't enforce this
1702 topology throughout the entire dataflow analysis, though.
1703 */
1704
1705 static inline bool
1706 canon_value_cmp (rtx tval, rtx cval)
1707 {
1708 return !cval
1709 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1710 }
1711
1712 static bool dst_can_be_shared;
1713
1714 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1715
1716 static variable **
1717 unshare_variable (dataflow_set *set, variable **slot, variable *var,
1718 enum var_init_status initialized)
1719 {
1720 variable *new_var;
1721 int i;
1722
1723 new_var = onepart_pool_allocate (var->onepart);
1724 new_var->dv = var->dv;
1725 new_var->refcount = 1;
1726 var->refcount--;
1727 new_var->n_var_parts = var->n_var_parts;
1728 new_var->onepart = var->onepart;
1729 new_var->in_changed_variables = false;
1730
1731 if (! flag_var_tracking_uninit)
1732 initialized = VAR_INIT_STATUS_INITIALIZED;
1733
1734 for (i = 0; i < var->n_var_parts; i++)
1735 {
1736 location_chain *node;
1737 location_chain **nextp;
1738
1739 if (i == 0 && var->onepart)
1740 {
1741 /* One-part auxiliary data is only used while emitting
1742 notes, so propagate it to the new variable in the active
1743 dataflow set. If we're not emitting notes, this will be
1744 a no-op. */
1745 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1746 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1747 VAR_LOC_1PAUX (var) = NULL;
1748 }
1749 else
1750 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1751 nextp = &new_var->var_part[i].loc_chain;
1752 for (node = var->var_part[i].loc_chain; node; node = node->next)
1753 {
1754 location_chain *new_lc;
1755
1756 new_lc = new location_chain;
1757 new_lc->next = NULL;
1758 if (node->init > initialized)
1759 new_lc->init = node->init;
1760 else
1761 new_lc->init = initialized;
1762 if (node->set_src && !(MEM_P (node->set_src)))
1763 new_lc->set_src = node->set_src;
1764 else
1765 new_lc->set_src = NULL;
1766 new_lc->loc = node->loc;
1767
1768 *nextp = new_lc;
1769 nextp = &new_lc->next;
1770 }
1771
1772 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1773 }
1774
1775 dst_can_be_shared = false;
1776 if (shared_hash_shared (set->vars))
1777 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1778 else if (set->traversed_vars && set->vars != set->traversed_vars)
1779 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1780 *slot = new_var;
1781 if (var->in_changed_variables)
1782 {
1783 variable **cslot
1784 = changed_variables->find_slot_with_hash (var->dv,
1785 dv_htab_hash (var->dv),
1786 NO_INSERT);
1787 gcc_assert (*cslot == (void *) var);
1788 var->in_changed_variables = false;
1789 variable_htab_free (var);
1790 *cslot = new_var;
1791 new_var->in_changed_variables = true;
1792 }
1793 return slot;
1794 }
1795
1796 /* Copy all variables from hash table SRC to hash table DST. */
1797
1798 static void
1799 vars_copy (variable_table_type *dst, variable_table_type *src)
1800 {
1801 variable_iterator_type hi;
1802 variable *var;
1803
1804 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1805 {
1806 variable **dstp;
1807 var->refcount++;
1808 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1809 INSERT);
1810 *dstp = var;
1811 }
1812 }
1813
1814 /* Map a decl to its main debug decl. */
1815
1816 static inline tree
1817 var_debug_decl (tree decl)
1818 {
1819 if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
1820 {
1821 tree debugdecl = DECL_DEBUG_EXPR (decl);
1822 if (DECL_P (debugdecl))
1823 decl = debugdecl;
1824 }
1825
1826 return decl;
1827 }
1828
1829 /* Set the register LOC to contain DV, OFFSET. */
1830
1831 static void
1832 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1833 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1834 enum insert_option iopt)
1835 {
1836 attrs *node;
1837 bool decl_p = dv_is_decl_p (dv);
1838
1839 if (decl_p)
1840 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1841
1842 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1843 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1844 && node->offset == offset)
1845 break;
1846 if (!node)
1847 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1848 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1849 }
1850
1851 /* Return true if we should track a location that is OFFSET bytes from
1852 a variable. Store the constant offset in *OFFSET_OUT if so. */
1853
1854 static bool
1855 track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
1856 {
1857 HOST_WIDE_INT const_offset;
1858 if (!offset.is_constant (&const_offset)
1859 || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
1860 return false;
1861 *offset_out = const_offset;
1862 return true;
1863 }
1864
1865 /* Return the offset of a register that track_offset_p says we
1866 should track. */
1867
1868 static HOST_WIDE_INT
1869 get_tracked_reg_offset (rtx loc)
1870 {
1871 HOST_WIDE_INT offset;
1872 if (!track_offset_p (REG_OFFSET (loc), &offset))
1873 gcc_unreachable ();
1874 return offset;
1875 }
1876
1877 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1878
1879 static void
1880 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1881 rtx set_src)
1882 {
1883 tree decl = REG_EXPR (loc);
1884 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1885
1886 var_reg_decl_set (set, loc, initialized,
1887 dv_from_decl (decl), offset, set_src, INSERT);
1888 }
1889
1890 static enum var_init_status
1891 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1892 {
1893 variable *var;
1894 int i;
1895 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1896
1897 if (! flag_var_tracking_uninit)
1898 return VAR_INIT_STATUS_INITIALIZED;
1899
1900 var = shared_hash_find (set->vars, dv);
1901 if (var)
1902 {
1903 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1904 {
1905 location_chain *nextp;
1906 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1907 if (rtx_equal_p (nextp->loc, loc))
1908 {
1909 ret_val = nextp->init;
1910 break;
1911 }
1912 }
1913 }
1914
1915 return ret_val;
1916 }
1917
1918 /* Delete current content of register LOC in dataflow set SET and set
1919 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1920 MODIFY is true, any other live copies of the same variable part are
1921 also deleted from the dataflow set, otherwise the variable part is
1922 assumed to be copied from another location holding the same
1923 part. */
1924
1925 static void
1926 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1927 enum var_init_status initialized, rtx set_src)
1928 {
1929 tree decl = REG_EXPR (loc);
1930 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1931 attrs *node, *next;
1932 attrs **nextp;
1933
1934 decl = var_debug_decl (decl);
1935
1936 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1937 initialized = get_init_value (set, loc, dv_from_decl (decl));
1938
1939 nextp = &set->regs[REGNO (loc)];
1940 for (node = *nextp; node; node = next)
1941 {
1942 next = node->next;
1943 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1944 {
1945 delete_variable_part (set, node->loc, node->dv, node->offset);
1946 delete node;
1947 *nextp = next;
1948 }
1949 else
1950 {
1951 node->loc = loc;
1952 nextp = &node->next;
1953 }
1954 }
1955 if (modify)
1956 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1957 var_reg_set (set, loc, initialized, set_src);
1958 }
1959
1960 /* Delete the association of register LOC in dataflow set SET with any
1961 variables that aren't onepart. If CLOBBER is true, also delete any
1962 other live copies of the same variable part, and delete the
1963 association with onepart dvs too. */
1964
1965 static void
1966 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1967 {
1968 attrs **nextp = &set->regs[REGNO (loc)];
1969 attrs *node, *next;
1970
1971 HOST_WIDE_INT offset;
1972 if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
1973 {
1974 tree decl = REG_EXPR (loc);
1975
1976 decl = var_debug_decl (decl);
1977
1978 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1979 }
1980
1981 for (node = *nextp; node; node = next)
1982 {
1983 next = node->next;
1984 if (clobber || !dv_onepart_p (node->dv))
1985 {
1986 delete_variable_part (set, node->loc, node->dv, node->offset);
1987 delete node;
1988 *nextp = next;
1989 }
1990 else
1991 nextp = &node->next;
1992 }
1993 }
1994
1995 /* Delete content of register with number REGNO in dataflow set SET. */
1996
1997 static void
1998 var_regno_delete (dataflow_set *set, int regno)
1999 {
2000 attrs **reg = &set->regs[regno];
2001 attrs *node, *next;
2002
2003 for (node = *reg; node; node = next)
2004 {
2005 next = node->next;
2006 delete_variable_part (set, node->loc, node->dv, node->offset);
2007 delete node;
2008 }
2009 *reg = NULL;
2010 }
2011
2012 /* Return true if I is the negated value of a power of two. */
2013 static bool
2014 negative_power_of_two_p (HOST_WIDE_INT i)
2015 {
2016 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2017 return pow2_or_zerop (x);
2018 }
2019
2020 /* Strip constant offsets and alignments off of LOC. Return the base
2021 expression. */
2022
2023 static rtx
2024 vt_get_canonicalize_base (rtx loc)
2025 {
2026 while ((GET_CODE (loc) == PLUS
2027 || GET_CODE (loc) == AND)
2028 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2029 && (GET_CODE (loc) != AND
2030 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2031 loc = XEXP (loc, 0);
2032
2033 return loc;
2034 }
2035
2036 /* This caches canonicalized addresses for VALUEs, computed using
2037 information in the global cselib table. */
2038 static hash_map<rtx, rtx> *global_get_addr_cache;
2039
2040 /* This caches canonicalized addresses for VALUEs, computed using
2041 information from the global cache and information pertaining to a
2042 basic block being analyzed. */
2043 static hash_map<rtx, rtx> *local_get_addr_cache;
2044
2045 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2046
2047 /* Return the canonical address for LOC, that must be a VALUE, using a
2048 cached global equivalence or computing it and storing it in the
2049 global cache. */
2050
2051 static rtx
2052 get_addr_from_global_cache (rtx const loc)
2053 {
2054 rtx x;
2055
2056 gcc_checking_assert (GET_CODE (loc) == VALUE);
2057
2058 bool existed;
2059 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2060 if (existed)
2061 return *slot;
2062
2063 x = canon_rtx (get_addr (loc));
2064
2065 /* Tentative, avoiding infinite recursion. */
2066 *slot = x;
2067
2068 if (x != loc)
2069 {
2070 rtx nx = vt_canonicalize_addr (NULL, x);
2071 if (nx != x)
2072 {
2073 /* The table may have moved during recursion, recompute
2074 SLOT. */
2075 *global_get_addr_cache->get (loc) = x = nx;
2076 }
2077 }
2078
2079 return x;
2080 }
2081
2082 /* Return the canonical address for LOC, that must be a VALUE, using a
2083 cached local equivalence or computing it and storing it in the
2084 local cache. */
2085
2086 static rtx
2087 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2088 {
2089 rtx x;
2090 decl_or_value dv;
2091 variable *var;
2092 location_chain *l;
2093
2094 gcc_checking_assert (GET_CODE (loc) == VALUE);
2095
2096 bool existed;
2097 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2098 if (existed)
2099 return *slot;
2100
2101 x = get_addr_from_global_cache (loc);
2102
2103 /* Tentative, avoiding infinite recursion. */
2104 *slot = x;
2105
2106 /* Recurse to cache local expansion of X, or if we need to search
2107 for a VALUE in the expansion. */
2108 if (x != loc)
2109 {
2110 rtx nx = vt_canonicalize_addr (set, x);
2111 if (nx != x)
2112 {
2113 slot = local_get_addr_cache->get (loc);
2114 *slot = x = nx;
2115 }
2116 return x;
2117 }
2118
2119 dv = dv_from_rtx (x);
2120 var = shared_hash_find (set->vars, dv);
2121 if (!var)
2122 return x;
2123
2124 /* Look for an improved equivalent expression. */
2125 for (l = var->var_part[0].loc_chain; l; l = l->next)
2126 {
2127 rtx base = vt_get_canonicalize_base (l->loc);
2128 if (GET_CODE (base) == VALUE
2129 && canon_value_cmp (base, loc))
2130 {
2131 rtx nx = vt_canonicalize_addr (set, l->loc);
2132 if (x != nx)
2133 {
2134 slot = local_get_addr_cache->get (loc);
2135 *slot = x = nx;
2136 }
2137 break;
2138 }
2139 }
2140
2141 return x;
2142 }
2143
2144 /* Canonicalize LOC using equivalences from SET in addition to those
2145 in the cselib static table. It expects a VALUE-based expression,
2146 and it will only substitute VALUEs with other VALUEs or
2147 function-global equivalences, so that, if two addresses have base
2148 VALUEs that are locally or globally related in ways that
2149 memrefs_conflict_p cares about, they will both canonicalize to
2150 expressions that have the same base VALUE.
2151
2152 The use of VALUEs as canonical base addresses enables the canonical
2153 RTXs to remain unchanged globally, if they resolve to a constant,
2154 or throughout a basic block otherwise, so that they can be cached
2155 and the cache needs not be invalidated when REGs, MEMs or such
2156 change. */
2157
2158 static rtx
2159 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2160 {
2161 HOST_WIDE_INT ofst = 0;
2162 machine_mode mode = GET_MODE (oloc);
2163 rtx loc = oloc;
2164 rtx x;
2165 bool retry = true;
2166
2167 while (retry)
2168 {
2169 while (GET_CODE (loc) == PLUS
2170 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2171 {
2172 ofst += INTVAL (XEXP (loc, 1));
2173 loc = XEXP (loc, 0);
2174 }
2175
2176 /* Alignment operations can't normally be combined, so just
2177 canonicalize the base and we're done. We'll normally have
2178 only one stack alignment anyway. */
2179 if (GET_CODE (loc) == AND
2180 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2181 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2182 {
2183 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2184 if (x != XEXP (loc, 0))
2185 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2186 retry = false;
2187 }
2188
2189 if (GET_CODE (loc) == VALUE)
2190 {
2191 if (set)
2192 loc = get_addr_from_local_cache (set, loc);
2193 else
2194 loc = get_addr_from_global_cache (loc);
2195
2196 /* Consolidate plus_constants. */
2197 while (ofst && GET_CODE (loc) == PLUS
2198 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2199 {
2200 ofst += INTVAL (XEXP (loc, 1));
2201 loc = XEXP (loc, 0);
2202 }
2203
2204 retry = false;
2205 }
2206 else
2207 {
2208 x = canon_rtx (loc);
2209 if (retry)
2210 retry = (x != loc);
2211 loc = x;
2212 }
2213 }
2214
2215 /* Add OFST back in. */
2216 if (ofst)
2217 {
2218 /* Don't build new RTL if we can help it. */
2219 if (GET_CODE (oloc) == PLUS
2220 && XEXP (oloc, 0) == loc
2221 && INTVAL (XEXP (oloc, 1)) == ofst)
2222 return oloc;
2223
2224 loc = plus_constant (mode, loc, ofst);
2225 }
2226
2227 return loc;
2228 }
2229
2230 /* Return true iff there's a true dependence between MLOC and LOC.
2231 MADDR must be a canonicalized version of MLOC's address. */
2232
2233 static inline bool
2234 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2235 {
2236 if (GET_CODE (loc) != MEM)
2237 return false;
2238
2239 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2240 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2241 return false;
2242
2243 return true;
2244 }
2245
2246 /* Hold parameters for the hashtab traversal function
2247 drop_overlapping_mem_locs, see below. */
2248
2249 struct overlapping_mems
2250 {
2251 dataflow_set *set;
2252 rtx loc, addr;
2253 };
2254
2255 /* Remove all MEMs that overlap with COMS->LOC from the location list
2256 of a hash table entry for a onepart variable. COMS->ADDR must be a
2257 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2258 canonicalized itself. */
2259
2260 int
2261 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
2262 {
2263 dataflow_set *set = coms->set;
2264 rtx mloc = coms->loc, addr = coms->addr;
2265 variable *var = *slot;
2266
2267 if (var->onepart != NOT_ONEPART)
2268 {
2269 location_chain *loc, **locp;
2270 bool changed = false;
2271 rtx cur_loc;
2272
2273 gcc_assert (var->n_var_parts == 1);
2274
2275 if (shared_var_p (var, set->vars))
2276 {
2277 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2278 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2279 break;
2280
2281 if (!loc)
2282 return 1;
2283
2284 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2285 var = *slot;
2286 gcc_assert (var->n_var_parts == 1);
2287 }
2288
2289 if (VAR_LOC_1PAUX (var))
2290 cur_loc = VAR_LOC_FROM (var);
2291 else
2292 cur_loc = var->var_part[0].cur_loc;
2293
2294 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2295 loc; loc = *locp)
2296 {
2297 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2298 {
2299 locp = &loc->next;
2300 continue;
2301 }
2302
2303 *locp = loc->next;
2304 /* If we have deleted the location which was last emitted
2305 we have to emit new location so add the variable to set
2306 of changed variables. */
2307 if (cur_loc == loc->loc)
2308 {
2309 changed = true;
2310 var->var_part[0].cur_loc = NULL;
2311 if (VAR_LOC_1PAUX (var))
2312 VAR_LOC_FROM (var) = NULL;
2313 }
2314 delete loc;
2315 }
2316
2317 if (!var->var_part[0].loc_chain)
2318 {
2319 var->n_var_parts--;
2320 changed = true;
2321 }
2322 if (changed)
2323 variable_was_changed (var, set);
2324 }
2325
2326 return 1;
2327 }
2328
2329 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2330
2331 static void
2332 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2333 {
2334 struct overlapping_mems coms;
2335
2336 gcc_checking_assert (GET_CODE (loc) == MEM);
2337
2338 coms.set = set;
2339 coms.loc = canon_rtx (loc);
2340 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2341
2342 set->traversed_vars = set->vars;
2343 shared_hash_htab (set->vars)
2344 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2345 set->traversed_vars = NULL;
2346 }
2347
2348 /* Set the location of DV, OFFSET as the MEM LOC. */
2349
2350 static void
2351 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2352 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2353 enum insert_option iopt)
2354 {
2355 if (dv_is_decl_p (dv))
2356 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2357
2358 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2359 }
2360
2361 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2362 SET to LOC.
2363 Adjust the address first if it is stack pointer based. */
2364
2365 static void
2366 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2367 rtx set_src)
2368 {
2369 tree decl = MEM_EXPR (loc);
2370 HOST_WIDE_INT offset = int_mem_offset (loc);
2371
2372 var_mem_decl_set (set, loc, initialized,
2373 dv_from_decl (decl), offset, set_src, INSERT);
2374 }
2375
2376 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2377 dataflow set SET to LOC. If MODIFY is true, any other live copies
2378 of the same variable part are also deleted from the dataflow set,
2379 otherwise the variable part is assumed to be copied from another
2380 location holding the same part.
2381 Adjust the address first if it is stack pointer based. */
2382
2383 static void
2384 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2385 enum var_init_status initialized, rtx set_src)
2386 {
2387 tree decl = MEM_EXPR (loc);
2388 HOST_WIDE_INT offset = int_mem_offset (loc);
2389
2390 clobber_overlapping_mems (set, loc);
2391 decl = var_debug_decl (decl);
2392
2393 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2394 initialized = get_init_value (set, loc, dv_from_decl (decl));
2395
2396 if (modify)
2397 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2398 var_mem_set (set, loc, initialized, set_src);
2399 }
2400
2401 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2402 true, also delete any other live copies of the same variable part.
2403 Adjust the address first if it is stack pointer based. */
2404
2405 static void
2406 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2407 {
2408 tree decl = MEM_EXPR (loc);
2409 HOST_WIDE_INT offset = int_mem_offset (loc);
2410
2411 clobber_overlapping_mems (set, loc);
2412 decl = var_debug_decl (decl);
2413 if (clobber)
2414 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2415 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2416 }
2417
2418 /* Return true if LOC should not be expanded for location expressions,
2419 or used in them. */
2420
2421 static inline bool
2422 unsuitable_loc (rtx loc)
2423 {
2424 switch (GET_CODE (loc))
2425 {
2426 case PC:
2427 case SCRATCH:
2428 case CC0:
2429 case ASM_INPUT:
2430 case ASM_OPERANDS:
2431 return true;
2432
2433 default:
2434 return false;
2435 }
2436 }
2437
2438 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2439 bound to it. */
2440
2441 static inline void
2442 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2443 {
2444 if (REG_P (loc))
2445 {
2446 if (modified)
2447 var_regno_delete (set, REGNO (loc));
2448 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2449 dv_from_value (val), 0, NULL_RTX, INSERT);
2450 }
2451 else if (MEM_P (loc))
2452 {
2453 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2454
2455 if (modified)
2456 clobber_overlapping_mems (set, loc);
2457
2458 if (l && GET_CODE (l->loc) == VALUE)
2459 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2460
2461 /* If this MEM is a global constant, we don't need it in the
2462 dynamic tables. ??? We should test this before emitting the
2463 micro-op in the first place. */
2464 while (l)
2465 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2466 break;
2467 else
2468 l = l->next;
2469
2470 if (!l)
2471 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2472 dv_from_value (val), 0, NULL_RTX, INSERT);
2473 }
2474 else
2475 {
2476 /* Other kinds of equivalences are necessarily static, at least
2477 so long as we do not perform substitutions while merging
2478 expressions. */
2479 gcc_unreachable ();
2480 set_variable_part (set, loc, dv_from_value (val), 0,
2481 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2482 }
2483 }
2484
2485 /* Bind a value to a location it was just stored in. If MODIFIED
2486 holds, assume the location was modified, detaching it from any
2487 values bound to it. */
2488
2489 static void
2490 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2491 bool modified)
2492 {
2493 cselib_val *v = CSELIB_VAL_PTR (val);
2494
2495 gcc_assert (cselib_preserved_value_p (v));
2496
2497 if (dump_file)
2498 {
2499 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2500 print_inline_rtx (dump_file, loc, 0);
2501 fprintf (dump_file, " evaluates to ");
2502 print_inline_rtx (dump_file, val, 0);
2503 if (v->locs)
2504 {
2505 struct elt_loc_list *l;
2506 for (l = v->locs; l; l = l->next)
2507 {
2508 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2509 print_inline_rtx (dump_file, l->loc, 0);
2510 }
2511 }
2512 fprintf (dump_file, "\n");
2513 }
2514
2515 gcc_checking_assert (!unsuitable_loc (loc));
2516
2517 val_bind (set, val, loc, modified);
2518 }
2519
2520 /* Clear (canonical address) slots that reference X. */
2521
2522 bool
2523 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2524 {
2525 if (vt_get_canonicalize_base (*slot) == x)
2526 *slot = NULL;
2527 return true;
2528 }
2529
2530 /* Reset this node, detaching all its equivalences. Return the slot
2531 in the variable hash table that holds dv, if there is one. */
2532
2533 static void
2534 val_reset (dataflow_set *set, decl_or_value dv)
2535 {
2536 variable *var = shared_hash_find (set->vars, dv) ;
2537 location_chain *node;
2538 rtx cval;
2539
2540 if (!var || !var->n_var_parts)
2541 return;
2542
2543 gcc_assert (var->n_var_parts == 1);
2544
2545 if (var->onepart == ONEPART_VALUE)
2546 {
2547 rtx x = dv_as_value (dv);
2548
2549 /* Relationships in the global cache don't change, so reset the
2550 local cache entry only. */
2551 rtx *slot = local_get_addr_cache->get (x);
2552 if (slot)
2553 {
2554 /* If the value resolved back to itself, odds are that other
2555 values may have cached it too. These entries now refer
2556 to the old X, so detach them too. Entries that used the
2557 old X but resolved to something else remain ok as long as
2558 that something else isn't also reset. */
2559 if (*slot == x)
2560 local_get_addr_cache
2561 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2562 *slot = NULL;
2563 }
2564 }
2565
2566 cval = NULL;
2567 for (node = var->var_part[0].loc_chain; node; node = node->next)
2568 if (GET_CODE (node->loc) == VALUE
2569 && canon_value_cmp (node->loc, cval))
2570 cval = node->loc;
2571
2572 for (node = var->var_part[0].loc_chain; node; node = node->next)
2573 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2574 {
2575 /* Redirect the equivalence link to the new canonical
2576 value, or simply remove it if it would point at
2577 itself. */
2578 if (cval)
2579 set_variable_part (set, cval, dv_from_value (node->loc),
2580 0, node->init, node->set_src, NO_INSERT);
2581 delete_variable_part (set, dv_as_value (dv),
2582 dv_from_value (node->loc), 0);
2583 }
2584
2585 if (cval)
2586 {
2587 decl_or_value cdv = dv_from_value (cval);
2588
2589 /* Keep the remaining values connected, accumulating links
2590 in the canonical value. */
2591 for (node = var->var_part[0].loc_chain; node; node = node->next)
2592 {
2593 if (node->loc == cval)
2594 continue;
2595 else if (GET_CODE (node->loc) == REG)
2596 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2597 node->set_src, NO_INSERT);
2598 else if (GET_CODE (node->loc) == MEM)
2599 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2600 node->set_src, NO_INSERT);
2601 else
2602 set_variable_part (set, node->loc, cdv, 0,
2603 node->init, node->set_src, NO_INSERT);
2604 }
2605 }
2606
2607 /* We remove this last, to make sure that the canonical value is not
2608 removed to the point of requiring reinsertion. */
2609 if (cval)
2610 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2611
2612 clobber_variable_part (set, NULL, dv, 0, NULL);
2613 }
2614
2615 /* Find the values in a given location and map the val to another
2616 value, if it is unique, or add the location as one holding the
2617 value. */
2618
2619 static void
2620 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2621 {
2622 decl_or_value dv = dv_from_value (val);
2623
2624 if (dump_file && (dump_flags & TDF_DETAILS))
2625 {
2626 if (insn)
2627 fprintf (dump_file, "%i: ", INSN_UID (insn));
2628 else
2629 fprintf (dump_file, "head: ");
2630 print_inline_rtx (dump_file, val, 0);
2631 fputs (" is at ", dump_file);
2632 print_inline_rtx (dump_file, loc, 0);
2633 fputc ('\n', dump_file);
2634 }
2635
2636 val_reset (set, dv);
2637
2638 gcc_checking_assert (!unsuitable_loc (loc));
2639
2640 if (REG_P (loc))
2641 {
2642 attrs *node, *found = NULL;
2643
2644 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2645 if (dv_is_value_p (node->dv)
2646 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2647 {
2648 found = node;
2649
2650 /* Map incoming equivalences. ??? Wouldn't it be nice if
2651 we just started sharing the location lists? Maybe a
2652 circular list ending at the value itself or some
2653 such. */
2654 set_variable_part (set, dv_as_value (node->dv),
2655 dv_from_value (val), node->offset,
2656 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2657 set_variable_part (set, val, node->dv, node->offset,
2658 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2659 }
2660
2661 /* If we didn't find any equivalence, we need to remember that
2662 this value is held in the named register. */
2663 if (found)
2664 return;
2665 }
2666 /* ??? Attempt to find and merge equivalent MEMs or other
2667 expressions too. */
2668
2669 val_bind (set, val, loc, false);
2670 }
2671
2672 /* Initialize dataflow set SET to be empty.
2673 VARS_SIZE is the initial size of hash table VARS. */
2674
2675 static void
2676 dataflow_set_init (dataflow_set *set)
2677 {
2678 init_attrs_list_set (set->regs);
2679 set->vars = shared_hash_copy (empty_shared_hash);
2680 set->stack_adjust = 0;
2681 set->traversed_vars = NULL;
2682 }
2683
2684 /* Delete the contents of dataflow set SET. */
2685
2686 static void
2687 dataflow_set_clear (dataflow_set *set)
2688 {
2689 int i;
2690
2691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2692 attrs_list_clear (&set->regs[i]);
2693
2694 shared_hash_destroy (set->vars);
2695 set->vars = shared_hash_copy (empty_shared_hash);
2696 }
2697
2698 /* Copy the contents of dataflow set SRC to DST. */
2699
2700 static void
2701 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2702 {
2703 int i;
2704
2705 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2706 attrs_list_copy (&dst->regs[i], src->regs[i]);
2707
2708 shared_hash_destroy (dst->vars);
2709 dst->vars = shared_hash_copy (src->vars);
2710 dst->stack_adjust = src->stack_adjust;
2711 }
2712
2713 /* Information for merging lists of locations for a given offset of variable.
2714 */
2715 struct variable_union_info
2716 {
2717 /* Node of the location chain. */
2718 location_chain *lc;
2719
2720 /* The sum of positions in the input chains. */
2721 int pos;
2722
2723 /* The position in the chain of DST dataflow set. */
2724 int pos_dst;
2725 };
2726
2727 /* Buffer for location list sorting and its allocated size. */
2728 static struct variable_union_info *vui_vec;
2729 static int vui_allocated;
2730
2731 /* Compare function for qsort, order the structures by POS element. */
2732
2733 static int
2734 variable_union_info_cmp_pos (const void *n1, const void *n2)
2735 {
2736 const struct variable_union_info *const i1 =
2737 (const struct variable_union_info *) n1;
2738 const struct variable_union_info *const i2 =
2739 ( const struct variable_union_info *) n2;
2740
2741 if (i1->pos != i2->pos)
2742 return i1->pos - i2->pos;
2743
2744 return (i1->pos_dst - i2->pos_dst);
2745 }
2746
2747 /* Compute union of location parts of variable *SLOT and the same variable
2748 from hash table DATA. Compute "sorted" union of the location chains
2749 for common offsets, i.e. the locations of a variable part are sorted by
2750 a priority where the priority is the sum of the positions in the 2 chains
2751 (if a location is only in one list the position in the second list is
2752 defined to be larger than the length of the chains).
2753 When we are updating the location parts the newest location is in the
2754 beginning of the chain, so when we do the described "sorted" union
2755 we keep the newest locations in the beginning. */
2756
2757 static int
2758 variable_union (variable *src, dataflow_set *set)
2759 {
2760 variable *dst;
2761 variable **dstp;
2762 int i, j, k;
2763
2764 dstp = shared_hash_find_slot (set->vars, src->dv);
2765 if (!dstp || !*dstp)
2766 {
2767 src->refcount++;
2768
2769 dst_can_be_shared = false;
2770 if (!dstp)
2771 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2772
2773 *dstp = src;
2774
2775 /* Continue traversing the hash table. */
2776 return 1;
2777 }
2778 else
2779 dst = *dstp;
2780
2781 gcc_assert (src->n_var_parts);
2782 gcc_checking_assert (src->onepart == dst->onepart);
2783
2784 /* We can combine one-part variables very efficiently, because their
2785 entries are in canonical order. */
2786 if (src->onepart)
2787 {
2788 location_chain **nodep, *dnode, *snode;
2789
2790 gcc_assert (src->n_var_parts == 1
2791 && dst->n_var_parts == 1);
2792
2793 snode = src->var_part[0].loc_chain;
2794 gcc_assert (snode);
2795
2796 restart_onepart_unshared:
2797 nodep = &dst->var_part[0].loc_chain;
2798 dnode = *nodep;
2799 gcc_assert (dnode);
2800
2801 while (snode)
2802 {
2803 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2804
2805 if (r > 0)
2806 {
2807 location_chain *nnode;
2808
2809 if (shared_var_p (dst, set->vars))
2810 {
2811 dstp = unshare_variable (set, dstp, dst,
2812 VAR_INIT_STATUS_INITIALIZED);
2813 dst = *dstp;
2814 goto restart_onepart_unshared;
2815 }
2816
2817 *nodep = nnode = new location_chain;
2818 nnode->loc = snode->loc;
2819 nnode->init = snode->init;
2820 if (!snode->set_src || MEM_P (snode->set_src))
2821 nnode->set_src = NULL;
2822 else
2823 nnode->set_src = snode->set_src;
2824 nnode->next = dnode;
2825 dnode = nnode;
2826 }
2827 else if (r == 0)
2828 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2829
2830 if (r >= 0)
2831 snode = snode->next;
2832
2833 nodep = &dnode->next;
2834 dnode = *nodep;
2835 }
2836
2837 return 1;
2838 }
2839
2840 gcc_checking_assert (!src->onepart);
2841
2842 /* Count the number of location parts, result is K. */
2843 for (i = 0, j = 0, k = 0;
2844 i < src->n_var_parts && j < dst->n_var_parts; k++)
2845 {
2846 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2847 {
2848 i++;
2849 j++;
2850 }
2851 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2852 i++;
2853 else
2854 j++;
2855 }
2856 k += src->n_var_parts - i;
2857 k += dst->n_var_parts - j;
2858
2859 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2860 thus there are at most MAX_VAR_PARTS different offsets. */
2861 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2862
2863 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2864 {
2865 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2866 dst = *dstp;
2867 }
2868
2869 i = src->n_var_parts - 1;
2870 j = dst->n_var_parts - 1;
2871 dst->n_var_parts = k;
2872
2873 for (k--; k >= 0; k--)
2874 {
2875 location_chain *node, *node2;
2876
2877 if (i >= 0 && j >= 0
2878 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2879 {
2880 /* Compute the "sorted" union of the chains, i.e. the locations which
2881 are in both chains go first, they are sorted by the sum of
2882 positions in the chains. */
2883 int dst_l, src_l;
2884 int ii, jj, n;
2885 struct variable_union_info *vui;
2886
2887 /* If DST is shared compare the location chains.
2888 If they are different we will modify the chain in DST with
2889 high probability so make a copy of DST. */
2890 if (shared_var_p (dst, set->vars))
2891 {
2892 for (node = src->var_part[i].loc_chain,
2893 node2 = dst->var_part[j].loc_chain; node && node2;
2894 node = node->next, node2 = node2->next)
2895 {
2896 if (!((REG_P (node2->loc)
2897 && REG_P (node->loc)
2898 && REGNO (node2->loc) == REGNO (node->loc))
2899 || rtx_equal_p (node2->loc, node->loc)))
2900 {
2901 if (node2->init < node->init)
2902 node2->init = node->init;
2903 break;
2904 }
2905 }
2906 if (node || node2)
2907 {
2908 dstp = unshare_variable (set, dstp, dst,
2909 VAR_INIT_STATUS_UNKNOWN);
2910 dst = (variable *)*dstp;
2911 }
2912 }
2913
2914 src_l = 0;
2915 for (node = src->var_part[i].loc_chain; node; node = node->next)
2916 src_l++;
2917 dst_l = 0;
2918 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2919 dst_l++;
2920
2921 if (dst_l == 1)
2922 {
2923 /* The most common case, much simpler, no qsort is needed. */
2924 location_chain *dstnode = dst->var_part[j].loc_chain;
2925 dst->var_part[k].loc_chain = dstnode;
2926 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2927 node2 = dstnode;
2928 for (node = src->var_part[i].loc_chain; node; node = node->next)
2929 if (!((REG_P (dstnode->loc)
2930 && REG_P (node->loc)
2931 && REGNO (dstnode->loc) == REGNO (node->loc))
2932 || rtx_equal_p (dstnode->loc, node->loc)))
2933 {
2934 location_chain *new_node;
2935
2936 /* Copy the location from SRC. */
2937 new_node = new location_chain;
2938 new_node->loc = node->loc;
2939 new_node->init = node->init;
2940 if (!node->set_src || MEM_P (node->set_src))
2941 new_node->set_src = NULL;
2942 else
2943 new_node->set_src = node->set_src;
2944 node2->next = new_node;
2945 node2 = new_node;
2946 }
2947 node2->next = NULL;
2948 }
2949 else
2950 {
2951 if (src_l + dst_l > vui_allocated)
2952 {
2953 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2954 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2955 vui_allocated);
2956 }
2957 vui = vui_vec;
2958
2959 /* Fill in the locations from DST. */
2960 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2961 node = node->next, jj++)
2962 {
2963 vui[jj].lc = node;
2964 vui[jj].pos_dst = jj;
2965
2966 /* Pos plus value larger than a sum of 2 valid positions. */
2967 vui[jj].pos = jj + src_l + dst_l;
2968 }
2969
2970 /* Fill in the locations from SRC. */
2971 n = dst_l;
2972 for (node = src->var_part[i].loc_chain, ii = 0; node;
2973 node = node->next, ii++)
2974 {
2975 /* Find location from NODE. */
2976 for (jj = 0; jj < dst_l; jj++)
2977 {
2978 if ((REG_P (vui[jj].lc->loc)
2979 && REG_P (node->loc)
2980 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2981 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2982 {
2983 vui[jj].pos = jj + ii;
2984 break;
2985 }
2986 }
2987 if (jj >= dst_l) /* The location has not been found. */
2988 {
2989 location_chain *new_node;
2990
2991 /* Copy the location from SRC. */
2992 new_node = new location_chain;
2993 new_node->loc = node->loc;
2994 new_node->init = node->init;
2995 if (!node->set_src || MEM_P (node->set_src))
2996 new_node->set_src = NULL;
2997 else
2998 new_node->set_src = node->set_src;
2999 vui[n].lc = new_node;
3000 vui[n].pos_dst = src_l + dst_l;
3001 vui[n].pos = ii + src_l + dst_l;
3002 n++;
3003 }
3004 }
3005
3006 if (dst_l == 2)
3007 {
3008 /* Special case still very common case. For dst_l == 2
3009 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3010 vui[i].pos == i + src_l + dst_l. */
3011 if (vui[0].pos > vui[1].pos)
3012 {
3013 /* Order should be 1, 0, 2... */
3014 dst->var_part[k].loc_chain = vui[1].lc;
3015 vui[1].lc->next = vui[0].lc;
3016 if (n >= 3)
3017 {
3018 vui[0].lc->next = vui[2].lc;
3019 vui[n - 1].lc->next = NULL;
3020 }
3021 else
3022 vui[0].lc->next = NULL;
3023 ii = 3;
3024 }
3025 else
3026 {
3027 dst->var_part[k].loc_chain = vui[0].lc;
3028 if (n >= 3 && vui[2].pos < vui[1].pos)
3029 {
3030 /* Order should be 0, 2, 1, 3... */
3031 vui[0].lc->next = vui[2].lc;
3032 vui[2].lc->next = vui[1].lc;
3033 if (n >= 4)
3034 {
3035 vui[1].lc->next = vui[3].lc;
3036 vui[n - 1].lc->next = NULL;
3037 }
3038 else
3039 vui[1].lc->next = NULL;
3040 ii = 4;
3041 }
3042 else
3043 {
3044 /* Order should be 0, 1, 2... */
3045 ii = 1;
3046 vui[n - 1].lc->next = NULL;
3047 }
3048 }
3049 for (; ii < n; ii++)
3050 vui[ii - 1].lc->next = vui[ii].lc;
3051 }
3052 else
3053 {
3054 qsort (vui, n, sizeof (struct variable_union_info),
3055 variable_union_info_cmp_pos);
3056
3057 /* Reconnect the nodes in sorted order. */
3058 for (ii = 1; ii < n; ii++)
3059 vui[ii - 1].lc->next = vui[ii].lc;
3060 vui[n - 1].lc->next = NULL;
3061 dst->var_part[k].loc_chain = vui[0].lc;
3062 }
3063
3064 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3065 }
3066 i--;
3067 j--;
3068 }
3069 else if ((i >= 0 && j >= 0
3070 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3071 || i < 0)
3072 {
3073 dst->var_part[k] = dst->var_part[j];
3074 j--;
3075 }
3076 else if ((i >= 0 && j >= 0
3077 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3078 || j < 0)
3079 {
3080 location_chain **nextp;
3081
3082 /* Copy the chain from SRC. */
3083 nextp = &dst->var_part[k].loc_chain;
3084 for (node = src->var_part[i].loc_chain; node; node = node->next)
3085 {
3086 location_chain *new_lc;
3087
3088 new_lc = new location_chain;
3089 new_lc->next = NULL;
3090 new_lc->init = node->init;
3091 if (!node->set_src || MEM_P (node->set_src))
3092 new_lc->set_src = NULL;
3093 else
3094 new_lc->set_src = node->set_src;
3095 new_lc->loc = node->loc;
3096
3097 *nextp = new_lc;
3098 nextp = &new_lc->next;
3099 }
3100
3101 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3102 i--;
3103 }
3104 dst->var_part[k].cur_loc = NULL;
3105 }
3106
3107 if (flag_var_tracking_uninit)
3108 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3109 {
3110 location_chain *node, *node2;
3111 for (node = src->var_part[i].loc_chain; node; node = node->next)
3112 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3113 if (rtx_equal_p (node->loc, node2->loc))
3114 {
3115 if (node->init > node2->init)
3116 node2->init = node->init;
3117 }
3118 }
3119
3120 /* Continue traversing the hash table. */
3121 return 1;
3122 }
3123
3124 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3125
3126 static void
3127 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3128 {
3129 int i;
3130
3131 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3132 attrs_list_union (&dst->regs[i], src->regs[i]);
3133
3134 if (dst->vars == empty_shared_hash)
3135 {
3136 shared_hash_destroy (dst->vars);
3137 dst->vars = shared_hash_copy (src->vars);
3138 }
3139 else
3140 {
3141 variable_iterator_type hi;
3142 variable *var;
3143
3144 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3145 var, variable, hi)
3146 variable_union (var, dst);
3147 }
3148 }
3149
3150 /* Whether the value is currently being expanded. */
3151 #define VALUE_RECURSED_INTO(x) \
3152 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3153
3154 /* Whether no expansion was found, saving useless lookups.
3155 It must only be set when VALUE_CHANGED is clear. */
3156 #define NO_LOC_P(x) \
3157 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3158
3159 /* Whether cur_loc in the value needs to be (re)computed. */
3160 #define VALUE_CHANGED(x) \
3161 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3162 /* Whether cur_loc in the decl needs to be (re)computed. */
3163 #define DECL_CHANGED(x) TREE_VISITED (x)
3164
3165 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3166 user DECLs, this means they're in changed_variables. Values and
3167 debug exprs may be left with this flag set if no user variable
3168 requires them to be evaluated. */
3169
3170 static inline void
3171 set_dv_changed (decl_or_value dv, bool newv)
3172 {
3173 switch (dv_onepart_p (dv))
3174 {
3175 case ONEPART_VALUE:
3176 if (newv)
3177 NO_LOC_P (dv_as_value (dv)) = false;
3178 VALUE_CHANGED (dv_as_value (dv)) = newv;
3179 break;
3180
3181 case ONEPART_DEXPR:
3182 if (newv)
3183 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3184 /* Fall through. */
3185
3186 default:
3187 DECL_CHANGED (dv_as_decl (dv)) = newv;
3188 break;
3189 }
3190 }
3191
3192 /* Return true if DV needs to have its cur_loc recomputed. */
3193
3194 static inline bool
3195 dv_changed_p (decl_or_value dv)
3196 {
3197 return (dv_is_value_p (dv)
3198 ? VALUE_CHANGED (dv_as_value (dv))
3199 : DECL_CHANGED (dv_as_decl (dv)));
3200 }
3201
3202 /* Return a location list node whose loc is rtx_equal to LOC, in the
3203 location list of a one-part variable or value VAR, or in that of
3204 any values recursively mentioned in the location lists. VARS must
3205 be in star-canonical form. */
3206
3207 static location_chain *
3208 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
3209 {
3210 location_chain *node;
3211 enum rtx_code loc_code;
3212
3213 if (!var)
3214 return NULL;
3215
3216 gcc_checking_assert (var->onepart);
3217
3218 if (!var->n_var_parts)
3219 return NULL;
3220
3221 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3222
3223 loc_code = GET_CODE (loc);
3224 for (node = var->var_part[0].loc_chain; node; node = node->next)
3225 {
3226 decl_or_value dv;
3227 variable *rvar;
3228
3229 if (GET_CODE (node->loc) != loc_code)
3230 {
3231 if (GET_CODE (node->loc) != VALUE)
3232 continue;
3233 }
3234 else if (loc == node->loc)
3235 return node;
3236 else if (loc_code != VALUE)
3237 {
3238 if (rtx_equal_p (loc, node->loc))
3239 return node;
3240 continue;
3241 }
3242
3243 /* Since we're in star-canonical form, we don't need to visit
3244 non-canonical nodes: one-part variables and non-canonical
3245 values would only point back to the canonical node. */
3246 if (dv_is_value_p (var->dv)
3247 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3248 {
3249 /* Skip all subsequent VALUEs. */
3250 while (node->next && GET_CODE (node->next->loc) == VALUE)
3251 {
3252 node = node->next;
3253 gcc_checking_assert (!canon_value_cmp (node->loc,
3254 dv_as_value (var->dv)));
3255 if (loc == node->loc)
3256 return node;
3257 }
3258 continue;
3259 }
3260
3261 gcc_checking_assert (node == var->var_part[0].loc_chain);
3262 gcc_checking_assert (!node->next);
3263
3264 dv = dv_from_value (node->loc);
3265 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3266 return find_loc_in_1pdv (loc, rvar, vars);
3267 }
3268
3269 /* ??? Gotta look in cselib_val locations too. */
3270
3271 return NULL;
3272 }
3273
3274 /* Hash table iteration argument passed to variable_merge. */
3275 struct dfset_merge
3276 {
3277 /* The set in which the merge is to be inserted. */
3278 dataflow_set *dst;
3279 /* The set that we're iterating in. */
3280 dataflow_set *cur;
3281 /* The set that may contain the other dv we are to merge with. */
3282 dataflow_set *src;
3283 /* Number of onepart dvs in src. */
3284 int src_onepart_cnt;
3285 };
3286
3287 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3288 loc_cmp order, and it is maintained as such. */
3289
3290 static void
3291 insert_into_intersection (location_chain **nodep, rtx loc,
3292 enum var_init_status status)
3293 {
3294 location_chain *node;
3295 int r;
3296
3297 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3298 if ((r = loc_cmp (node->loc, loc)) == 0)
3299 {
3300 node->init = MIN (node->init, status);
3301 return;
3302 }
3303 else if (r > 0)
3304 break;
3305
3306 node = new location_chain;
3307
3308 node->loc = loc;
3309 node->set_src = NULL;
3310 node->init = status;
3311 node->next = *nodep;
3312 *nodep = node;
3313 }
3314
3315 /* Insert in DEST the intersection of the locations present in both
3316 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3317 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3318 DSM->dst. */
3319
3320 static void
3321 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
3322 location_chain *s1node, variable *s2var)
3323 {
3324 dataflow_set *s1set = dsm->cur;
3325 dataflow_set *s2set = dsm->src;
3326 location_chain *found;
3327
3328 if (s2var)
3329 {
3330 location_chain *s2node;
3331
3332 gcc_checking_assert (s2var->onepart);
3333
3334 if (s2var->n_var_parts)
3335 {
3336 s2node = s2var->var_part[0].loc_chain;
3337
3338 for (; s1node && s2node;
3339 s1node = s1node->next, s2node = s2node->next)
3340 if (s1node->loc != s2node->loc)
3341 break;
3342 else if (s1node->loc == val)
3343 continue;
3344 else
3345 insert_into_intersection (dest, s1node->loc,
3346 MIN (s1node->init, s2node->init));
3347 }
3348 }
3349
3350 for (; s1node; s1node = s1node->next)
3351 {
3352 if (s1node->loc == val)
3353 continue;
3354
3355 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3356 shared_hash_htab (s2set->vars))))
3357 {
3358 insert_into_intersection (dest, s1node->loc,
3359 MIN (s1node->init, found->init));
3360 continue;
3361 }
3362
3363 if (GET_CODE (s1node->loc) == VALUE
3364 && !VALUE_RECURSED_INTO (s1node->loc))
3365 {
3366 decl_or_value dv = dv_from_value (s1node->loc);
3367 variable *svar = shared_hash_find (s1set->vars, dv);
3368 if (svar)
3369 {
3370 if (svar->n_var_parts == 1)
3371 {
3372 VALUE_RECURSED_INTO (s1node->loc) = true;
3373 intersect_loc_chains (val, dest, dsm,
3374 svar->var_part[0].loc_chain,
3375 s2var);
3376 VALUE_RECURSED_INTO (s1node->loc) = false;
3377 }
3378 }
3379 }
3380
3381 /* ??? gotta look in cselib_val locations too. */
3382
3383 /* ??? if the location is equivalent to any location in src,
3384 searched recursively
3385
3386 add to dst the values needed to represent the equivalence
3387
3388 telling whether locations S is equivalent to another dv's
3389 location list:
3390
3391 for each location D in the list
3392
3393 if S and D satisfy rtx_equal_p, then it is present
3394
3395 else if D is a value, recurse without cycles
3396
3397 else if S and D have the same CODE and MODE
3398
3399 for each operand oS and the corresponding oD
3400
3401 if oS and oD are not equivalent, then S an D are not equivalent
3402
3403 else if they are RTX vectors
3404
3405 if any vector oS element is not equivalent to its respective oD,
3406 then S and D are not equivalent
3407
3408 */
3409
3410
3411 }
3412 }
3413
3414 /* Return -1 if X should be before Y in a location list for a 1-part
3415 variable, 1 if Y should be before X, and 0 if they're equivalent
3416 and should not appear in the list. */
3417
3418 static int
3419 loc_cmp (rtx x, rtx y)
3420 {
3421 int i, j, r;
3422 RTX_CODE code = GET_CODE (x);
3423 const char *fmt;
3424
3425 if (x == y)
3426 return 0;
3427
3428 if (REG_P (x))
3429 {
3430 if (!REG_P (y))
3431 return -1;
3432 gcc_assert (GET_MODE (x) == GET_MODE (y));
3433 if (REGNO (x) == REGNO (y))
3434 return 0;
3435 else if (REGNO (x) < REGNO (y))
3436 return -1;
3437 else
3438 return 1;
3439 }
3440
3441 if (REG_P (y))
3442 return 1;
3443
3444 if (MEM_P (x))
3445 {
3446 if (!MEM_P (y))
3447 return -1;
3448 gcc_assert (GET_MODE (x) == GET_MODE (y));
3449 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3450 }
3451
3452 if (MEM_P (y))
3453 return 1;
3454
3455 if (GET_CODE (x) == VALUE)
3456 {
3457 if (GET_CODE (y) != VALUE)
3458 return -1;
3459 /* Don't assert the modes are the same, that is true only
3460 when not recursing. (subreg:QI (value:SI 1:1) 0)
3461 and (subreg:QI (value:DI 2:2) 0) can be compared,
3462 even when the modes are different. */
3463 if (canon_value_cmp (x, y))
3464 return -1;
3465 else
3466 return 1;
3467 }
3468
3469 if (GET_CODE (y) == VALUE)
3470 return 1;
3471
3472 /* Entry value is the least preferable kind of expression. */
3473 if (GET_CODE (x) == ENTRY_VALUE)
3474 {
3475 if (GET_CODE (y) != ENTRY_VALUE)
3476 return 1;
3477 gcc_assert (GET_MODE (x) == GET_MODE (y));
3478 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3479 }
3480
3481 if (GET_CODE (y) == ENTRY_VALUE)
3482 return -1;
3483
3484 if (GET_CODE (x) == GET_CODE (y))
3485 /* Compare operands below. */;
3486 else if (GET_CODE (x) < GET_CODE (y))
3487 return -1;
3488 else
3489 return 1;
3490
3491 gcc_assert (GET_MODE (x) == GET_MODE (y));
3492
3493 if (GET_CODE (x) == DEBUG_EXPR)
3494 {
3495 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3496 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3497 return -1;
3498 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3499 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3500 return 1;
3501 }
3502
3503 fmt = GET_RTX_FORMAT (code);
3504 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3505 switch (fmt[i])
3506 {
3507 case 'w':
3508 if (XWINT (x, i) == XWINT (y, i))
3509 break;
3510 else if (XWINT (x, i) < XWINT (y, i))
3511 return -1;
3512 else
3513 return 1;
3514
3515 case 'n':
3516 case 'i':
3517 if (XINT (x, i) == XINT (y, i))
3518 break;
3519 else if (XINT (x, i) < XINT (y, i))
3520 return -1;
3521 else
3522 return 1;
3523
3524 case 'V':
3525 case 'E':
3526 /* Compare the vector length first. */
3527 if (XVECLEN (x, i) == XVECLEN (y, i))
3528 /* Compare the vectors elements. */;
3529 else if (XVECLEN (x, i) < XVECLEN (y, i))
3530 return -1;
3531 else
3532 return 1;
3533
3534 for (j = 0; j < XVECLEN (x, i); j++)
3535 if ((r = loc_cmp (XVECEXP (x, i, j),
3536 XVECEXP (y, i, j))))
3537 return r;
3538 break;
3539
3540 case 'e':
3541 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3542 return r;
3543 break;
3544
3545 case 'S':
3546 case 's':
3547 if (XSTR (x, i) == XSTR (y, i))
3548 break;
3549 if (!XSTR (x, i))
3550 return -1;
3551 if (!XSTR (y, i))
3552 return 1;
3553 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3554 break;
3555 else if (r < 0)
3556 return -1;
3557 else
3558 return 1;
3559
3560 case 'u':
3561 /* These are just backpointers, so they don't matter. */
3562 break;
3563
3564 case '0':
3565 case 't':
3566 break;
3567
3568 /* It is believed that rtx's at this level will never
3569 contain anything but integers and other rtx's,
3570 except for within LABEL_REFs and SYMBOL_REFs. */
3571 default:
3572 gcc_unreachable ();
3573 }
3574 if (CONST_WIDE_INT_P (x))
3575 {
3576 /* Compare the vector length first. */
3577 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3578 return 1;
3579 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3580 return -1;
3581
3582 /* Compare the vectors elements. */;
3583 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3584 {
3585 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3586 return -1;
3587 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3588 return 1;
3589 }
3590 }
3591
3592 return 0;
3593 }
3594
3595 /* Check the order of entries in one-part variables. */
3596
3597 int
3598 canonicalize_loc_order_check (variable **slot,
3599 dataflow_set *data ATTRIBUTE_UNUSED)
3600 {
3601 variable *var = *slot;
3602 location_chain *node, *next;
3603
3604 #ifdef ENABLE_RTL_CHECKING
3605 int i;
3606 for (i = 0; i < var->n_var_parts; i++)
3607 gcc_assert (var->var_part[0].cur_loc == NULL);
3608 gcc_assert (!var->in_changed_variables);
3609 #endif
3610
3611 if (!var->onepart)
3612 return 1;
3613
3614 gcc_assert (var->n_var_parts == 1);
3615 node = var->var_part[0].loc_chain;
3616 gcc_assert (node);
3617
3618 while ((next = node->next))
3619 {
3620 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3621 node = next;
3622 }
3623
3624 return 1;
3625 }
3626
3627 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3628 more likely to be chosen as canonical for an equivalence set.
3629 Ensure less likely values can reach more likely neighbors, making
3630 the connections bidirectional. */
3631
3632 int
3633 canonicalize_values_mark (variable **slot, dataflow_set *set)
3634 {
3635 variable *var = *slot;
3636 decl_or_value dv = var->dv;
3637 rtx val;
3638 location_chain *node;
3639
3640 if (!dv_is_value_p (dv))
3641 return 1;
3642
3643 gcc_checking_assert (var->n_var_parts == 1);
3644
3645 val = dv_as_value (dv);
3646
3647 for (node = var->var_part[0].loc_chain; node; node = node->next)
3648 if (GET_CODE (node->loc) == VALUE)
3649 {
3650 if (canon_value_cmp (node->loc, val))
3651 VALUE_RECURSED_INTO (val) = true;
3652 else
3653 {
3654 decl_or_value odv = dv_from_value (node->loc);
3655 variable **oslot;
3656 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3657
3658 set_slot_part (set, val, oslot, odv, 0,
3659 node->init, NULL_RTX);
3660
3661 VALUE_RECURSED_INTO (node->loc) = true;
3662 }
3663 }
3664
3665 return 1;
3666 }
3667
3668 /* Remove redundant entries from equivalence lists in onepart
3669 variables, canonicalizing equivalence sets into star shapes. */
3670
3671 int
3672 canonicalize_values_star (variable **slot, dataflow_set *set)
3673 {
3674 variable *var = *slot;
3675 decl_or_value dv = var->dv;
3676 location_chain *node;
3677 decl_or_value cdv;
3678 rtx val, cval;
3679 variable **cslot;
3680 bool has_value;
3681 bool has_marks;
3682
3683 if (!var->onepart)
3684 return 1;
3685
3686 gcc_checking_assert (var->n_var_parts == 1);
3687
3688 if (dv_is_value_p (dv))
3689 {
3690 cval = dv_as_value (dv);
3691 if (!VALUE_RECURSED_INTO (cval))
3692 return 1;
3693 VALUE_RECURSED_INTO (cval) = false;
3694 }
3695 else
3696 cval = NULL_RTX;
3697
3698 restart:
3699 val = cval;
3700 has_value = false;
3701 has_marks = false;
3702
3703 gcc_assert (var->n_var_parts == 1);
3704
3705 for (node = var->var_part[0].loc_chain; node; node = node->next)
3706 if (GET_CODE (node->loc) == VALUE)
3707 {
3708 has_value = true;
3709 if (VALUE_RECURSED_INTO (node->loc))
3710 has_marks = true;
3711 if (canon_value_cmp (node->loc, cval))
3712 cval = node->loc;
3713 }
3714
3715 if (!has_value)
3716 return 1;
3717
3718 if (cval == val)
3719 {
3720 if (!has_marks || dv_is_decl_p (dv))
3721 return 1;
3722
3723 /* Keep it marked so that we revisit it, either after visiting a
3724 child node, or after visiting a new parent that might be
3725 found out. */
3726 VALUE_RECURSED_INTO (val) = true;
3727
3728 for (node = var->var_part[0].loc_chain; node; node = node->next)
3729 if (GET_CODE (node->loc) == VALUE
3730 && VALUE_RECURSED_INTO (node->loc))
3731 {
3732 cval = node->loc;
3733 restart_with_cval:
3734 VALUE_RECURSED_INTO (cval) = false;
3735 dv = dv_from_value (cval);
3736 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3737 if (!slot)
3738 {
3739 gcc_assert (dv_is_decl_p (var->dv));
3740 /* The canonical value was reset and dropped.
3741 Remove it. */
3742 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3743 return 1;
3744 }
3745 var = *slot;
3746 gcc_assert (dv_is_value_p (var->dv));
3747 if (var->n_var_parts == 0)
3748 return 1;
3749 gcc_assert (var->n_var_parts == 1);
3750 goto restart;
3751 }
3752
3753 VALUE_RECURSED_INTO (val) = false;
3754
3755 return 1;
3756 }
3757
3758 /* Push values to the canonical one. */
3759 cdv = dv_from_value (cval);
3760 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3761
3762 for (node = var->var_part[0].loc_chain; node; node = node->next)
3763 if (node->loc != cval)
3764 {
3765 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3766 node->init, NULL_RTX);
3767 if (GET_CODE (node->loc) == VALUE)
3768 {
3769 decl_or_value ndv = dv_from_value (node->loc);
3770
3771 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3772 NO_INSERT);
3773
3774 if (canon_value_cmp (node->loc, val))
3775 {
3776 /* If it could have been a local minimum, it's not any more,
3777 since it's now neighbor to cval, so it may have to push
3778 to it. Conversely, if it wouldn't have prevailed over
3779 val, then whatever mark it has is fine: if it was to
3780 push, it will now push to a more canonical node, but if
3781 it wasn't, then it has already pushed any values it might
3782 have to. */
3783 VALUE_RECURSED_INTO (node->loc) = true;
3784 /* Make sure we visit node->loc by ensuring we cval is
3785 visited too. */
3786 VALUE_RECURSED_INTO (cval) = true;
3787 }
3788 else if (!VALUE_RECURSED_INTO (node->loc))
3789 /* If we have no need to "recurse" into this node, it's
3790 already "canonicalized", so drop the link to the old
3791 parent. */
3792 clobber_variable_part (set, cval, ndv, 0, NULL);
3793 }
3794 else if (GET_CODE (node->loc) == REG)
3795 {
3796 attrs *list = set->regs[REGNO (node->loc)], **listp;
3797
3798 /* Change an existing attribute referring to dv so that it
3799 refers to cdv, removing any duplicate this might
3800 introduce, and checking that no previous duplicates
3801 existed, all in a single pass. */
3802
3803 while (list)
3804 {
3805 if (list->offset == 0
3806 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3807 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3808 break;
3809
3810 list = list->next;
3811 }
3812
3813 gcc_assert (list);
3814 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3815 {
3816 list->dv = cdv;
3817 for (listp = &list->next; (list = *listp); listp = &list->next)
3818 {
3819 if (list->offset)
3820 continue;
3821
3822 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3823 {
3824 *listp = list->next;
3825 delete list;
3826 list = *listp;
3827 break;
3828 }
3829
3830 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3831 }
3832 }
3833 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3834 {
3835 for (listp = &list->next; (list = *listp); listp = &list->next)
3836 {
3837 if (list->offset)
3838 continue;
3839
3840 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3841 {
3842 *listp = list->next;
3843 delete list;
3844 list = *listp;
3845 break;
3846 }
3847
3848 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3849 }
3850 }
3851 else
3852 gcc_unreachable ();
3853
3854 if (flag_checking)
3855 while (list)
3856 {
3857 if (list->offset == 0
3858 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3859 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3860 gcc_unreachable ();
3861
3862 list = list->next;
3863 }
3864 }
3865 }
3866
3867 if (val)
3868 set_slot_part (set, val, cslot, cdv, 0,
3869 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3870
3871 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3872
3873 /* Variable may have been unshared. */
3874 var = *slot;
3875 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3876 && var->var_part[0].loc_chain->next == NULL);
3877
3878 if (VALUE_RECURSED_INTO (cval))
3879 goto restart_with_cval;
3880
3881 return 1;
3882 }
3883
3884 /* Bind one-part variables to the canonical value in an equivalence
3885 set. Not doing this causes dataflow convergence failure in rare
3886 circumstances, see PR42873. Unfortunately we can't do this
3887 efficiently as part of canonicalize_values_star, since we may not
3888 have determined or even seen the canonical value of a set when we
3889 get to a variable that references another member of the set. */
3890
3891 int
3892 canonicalize_vars_star (variable **slot, dataflow_set *set)
3893 {
3894 variable *var = *slot;
3895 decl_or_value dv = var->dv;
3896 location_chain *node;
3897 rtx cval;
3898 decl_or_value cdv;
3899 variable **cslot;
3900 variable *cvar;
3901 location_chain *cnode;
3902
3903 if (!var->onepart || var->onepart == ONEPART_VALUE)
3904 return 1;
3905
3906 gcc_assert (var->n_var_parts == 1);
3907
3908 node = var->var_part[0].loc_chain;
3909
3910 if (GET_CODE (node->loc) != VALUE)
3911 return 1;
3912
3913 gcc_assert (!node->next);
3914 cval = node->loc;
3915
3916 /* Push values to the canonical one. */
3917 cdv = dv_from_value (cval);
3918 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3919 if (!cslot)
3920 return 1;
3921 cvar = *cslot;
3922 gcc_assert (cvar->n_var_parts == 1);
3923
3924 cnode = cvar->var_part[0].loc_chain;
3925
3926 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3927 that are not “more canonical” than it. */
3928 if (GET_CODE (cnode->loc) != VALUE
3929 || !canon_value_cmp (cnode->loc, cval))
3930 return 1;
3931
3932 /* CVAL was found to be non-canonical. Change the variable to point
3933 to the canonical VALUE. */
3934 gcc_assert (!cnode->next);
3935 cval = cnode->loc;
3936
3937 slot = set_slot_part (set, cval, slot, dv, 0,
3938 node->init, node->set_src);
3939 clobber_slot_part (set, cval, slot, 0, node->set_src);
3940
3941 return 1;
3942 }
3943
3944 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3945 corresponding entry in DSM->src. Multi-part variables are combined
3946 with variable_union, whereas onepart dvs are combined with
3947 intersection. */
3948
3949 static int
3950 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
3951 {
3952 dataflow_set *dst = dsm->dst;
3953 variable **dstslot;
3954 variable *s2var, *dvar = NULL;
3955 decl_or_value dv = s1var->dv;
3956 onepart_enum onepart = s1var->onepart;
3957 rtx val;
3958 hashval_t dvhash;
3959 location_chain *node, **nodep;
3960
3961 /* If the incoming onepart variable has an empty location list, then
3962 the intersection will be just as empty. For other variables,
3963 it's always union. */
3964 gcc_checking_assert (s1var->n_var_parts
3965 && s1var->var_part[0].loc_chain);
3966
3967 if (!onepart)
3968 return variable_union (s1var, dst);
3969
3970 gcc_checking_assert (s1var->n_var_parts == 1);
3971
3972 dvhash = dv_htab_hash (dv);
3973 if (dv_is_value_p (dv))
3974 val = dv_as_value (dv);
3975 else
3976 val = NULL;
3977
3978 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3979 if (!s2var)
3980 {
3981 dst_can_be_shared = false;
3982 return 1;
3983 }
3984
3985 dsm->src_onepart_cnt--;
3986 gcc_assert (s2var->var_part[0].loc_chain
3987 && s2var->onepart == onepart
3988 && s2var->n_var_parts == 1);
3989
3990 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3991 if (dstslot)
3992 {
3993 dvar = *dstslot;
3994 gcc_assert (dvar->refcount == 1
3995 && dvar->onepart == onepart
3996 && dvar->n_var_parts == 1);
3997 nodep = &dvar->var_part[0].loc_chain;
3998 }
3999 else
4000 {
4001 nodep = &node;
4002 node = NULL;
4003 }
4004
4005 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4006 {
4007 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4008 dvhash, INSERT);
4009 *dstslot = dvar = s2var;
4010 dvar->refcount++;
4011 }
4012 else
4013 {
4014 dst_can_be_shared = false;
4015
4016 intersect_loc_chains (val, nodep, dsm,
4017 s1var->var_part[0].loc_chain, s2var);
4018
4019 if (!dstslot)
4020 {
4021 if (node)
4022 {
4023 dvar = onepart_pool_allocate (onepart);
4024 dvar->dv = dv;
4025 dvar->refcount = 1;
4026 dvar->n_var_parts = 1;
4027 dvar->onepart = onepart;
4028 dvar->in_changed_variables = false;
4029 dvar->var_part[0].loc_chain = node;
4030 dvar->var_part[0].cur_loc = NULL;
4031 if (onepart)
4032 VAR_LOC_1PAUX (dvar) = NULL;
4033 else
4034 VAR_PART_OFFSET (dvar, 0) = 0;
4035
4036 dstslot
4037 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4038 INSERT);
4039 gcc_assert (!*dstslot);
4040 *dstslot = dvar;
4041 }
4042 else
4043 return 1;
4044 }
4045 }
4046
4047 nodep = &dvar->var_part[0].loc_chain;
4048 while ((node = *nodep))
4049 {
4050 location_chain **nextp = &node->next;
4051
4052 if (GET_CODE (node->loc) == REG)
4053 {
4054 attrs *list;
4055
4056 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4057 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4058 && dv_is_value_p (list->dv))
4059 break;
4060
4061 if (!list)
4062 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4063 dv, 0, node->loc);
4064 /* If this value became canonical for another value that had
4065 this register, we want to leave it alone. */
4066 else if (dv_as_value (list->dv) != val)
4067 {
4068 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4069 dstslot, dv, 0,
4070 node->init, NULL_RTX);
4071 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4072
4073 /* Since nextp points into the removed node, we can't
4074 use it. The pointer to the next node moved to nodep.
4075 However, if the variable we're walking is unshared
4076 during our walk, we'll keep walking the location list
4077 of the previously-shared variable, in which case the
4078 node won't have been removed, and we'll want to skip
4079 it. That's why we test *nodep here. */
4080 if (*nodep != node)
4081 nextp = nodep;
4082 }
4083 }
4084 else
4085 /* Canonicalization puts registers first, so we don't have to
4086 walk it all. */
4087 break;
4088 nodep = nextp;
4089 }
4090
4091 if (dvar != *dstslot)
4092 dvar = *dstslot;
4093 nodep = &dvar->var_part[0].loc_chain;
4094
4095 if (val)
4096 {
4097 /* Mark all referenced nodes for canonicalization, and make sure
4098 we have mutual equivalence links. */
4099 VALUE_RECURSED_INTO (val) = true;
4100 for (node = *nodep; node; node = node->next)
4101 if (GET_CODE (node->loc) == VALUE)
4102 {
4103 VALUE_RECURSED_INTO (node->loc) = true;
4104 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4105 node->init, NULL, INSERT);
4106 }
4107
4108 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4109 gcc_assert (*dstslot == dvar);
4110 canonicalize_values_star (dstslot, dst);
4111 gcc_checking_assert (dstslot
4112 == shared_hash_find_slot_noinsert_1 (dst->vars,
4113 dv, dvhash));
4114 dvar = *dstslot;
4115 }
4116 else
4117 {
4118 bool has_value = false, has_other = false;
4119
4120 /* If we have one value and anything else, we're going to
4121 canonicalize this, so make sure all values have an entry in
4122 the table and are marked for canonicalization. */
4123 for (node = *nodep; node; node = node->next)
4124 {
4125 if (GET_CODE (node->loc) == VALUE)
4126 {
4127 /* If this was marked during register canonicalization,
4128 we know we have to canonicalize values. */
4129 if (has_value)
4130 has_other = true;
4131 has_value = true;
4132 if (has_other)
4133 break;
4134 }
4135 else
4136 {
4137 has_other = true;
4138 if (has_value)
4139 break;
4140 }
4141 }
4142
4143 if (has_value && has_other)
4144 {
4145 for (node = *nodep; node; node = node->next)
4146 {
4147 if (GET_CODE (node->loc) == VALUE)
4148 {
4149 decl_or_value dv = dv_from_value (node->loc);
4150 variable **slot = NULL;
4151
4152 if (shared_hash_shared (dst->vars))
4153 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4154 if (!slot)
4155 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4156 INSERT);
4157 if (!*slot)
4158 {
4159 variable *var = onepart_pool_allocate (ONEPART_VALUE);
4160 var->dv = dv;
4161 var->refcount = 1;
4162 var->n_var_parts = 1;
4163 var->onepart = ONEPART_VALUE;
4164 var->in_changed_variables = false;
4165 var->var_part[0].loc_chain = NULL;
4166 var->var_part[0].cur_loc = NULL;
4167 VAR_LOC_1PAUX (var) = NULL;
4168 *slot = var;
4169 }
4170
4171 VALUE_RECURSED_INTO (node->loc) = true;
4172 }
4173 }
4174
4175 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4176 gcc_assert (*dstslot == dvar);
4177 canonicalize_values_star (dstslot, dst);
4178 gcc_checking_assert (dstslot
4179 == shared_hash_find_slot_noinsert_1 (dst->vars,
4180 dv, dvhash));
4181 dvar = *dstslot;
4182 }
4183 }
4184
4185 if (!onepart_variable_different_p (dvar, s2var))
4186 {
4187 variable_htab_free (dvar);
4188 *dstslot = dvar = s2var;
4189 dvar->refcount++;
4190 }
4191 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4192 {
4193 variable_htab_free (dvar);
4194 *dstslot = dvar = s1var;
4195 dvar->refcount++;
4196 dst_can_be_shared = false;
4197 }
4198 else
4199 dst_can_be_shared = false;
4200
4201 return 1;
4202 }
4203
4204 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4205 multi-part variable. Unions of multi-part variables and
4206 intersections of one-part ones will be handled in
4207 variable_merge_over_cur(). */
4208
4209 static int
4210 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
4211 {
4212 dataflow_set *dst = dsm->dst;
4213 decl_or_value dv = s2var->dv;
4214
4215 if (!s2var->onepart)
4216 {
4217 variable **dstp = shared_hash_find_slot (dst->vars, dv);
4218 *dstp = s2var;
4219 s2var->refcount++;
4220 return 1;
4221 }
4222
4223 dsm->src_onepart_cnt++;
4224 return 1;
4225 }
4226
4227 /* Combine dataflow set information from SRC2 into DST, using PDST
4228 to carry over information across passes. */
4229
4230 static void
4231 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4232 {
4233 dataflow_set cur = *dst;
4234 dataflow_set *src1 = &cur;
4235 struct dfset_merge dsm;
4236 int i;
4237 size_t src1_elems, src2_elems;
4238 variable_iterator_type hi;
4239 variable *var;
4240
4241 src1_elems = shared_hash_htab (src1->vars)->elements ();
4242 src2_elems = shared_hash_htab (src2->vars)->elements ();
4243 dataflow_set_init (dst);
4244 dst->stack_adjust = cur.stack_adjust;
4245 shared_hash_destroy (dst->vars);
4246 dst->vars = new shared_hash;
4247 dst->vars->refcount = 1;
4248 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4249
4250 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4251 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4252
4253 dsm.dst = dst;
4254 dsm.src = src2;
4255 dsm.cur = src1;
4256 dsm.src_onepart_cnt = 0;
4257
4258 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4259 var, variable, hi)
4260 variable_merge_over_src (var, &dsm);
4261 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4262 var, variable, hi)
4263 variable_merge_over_cur (var, &dsm);
4264
4265 if (dsm.src_onepart_cnt)
4266 dst_can_be_shared = false;
4267
4268 dataflow_set_destroy (src1);
4269 }
4270
4271 /* Mark register equivalences. */
4272
4273 static void
4274 dataflow_set_equiv_regs (dataflow_set *set)
4275 {
4276 int i;
4277 attrs *list, **listp;
4278
4279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4280 {
4281 rtx canon[NUM_MACHINE_MODES];
4282
4283 /* If the list is empty or one entry, no need to canonicalize
4284 anything. */
4285 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4286 continue;
4287
4288 memset (canon, 0, sizeof (canon));
4289
4290 for (list = set->regs[i]; list; list = list->next)
4291 if (list->offset == 0 && dv_is_value_p (list->dv))
4292 {
4293 rtx val = dv_as_value (list->dv);
4294 rtx *cvalp = &canon[(int)GET_MODE (val)];
4295 rtx cval = *cvalp;
4296
4297 if (canon_value_cmp (val, cval))
4298 *cvalp = val;
4299 }
4300
4301 for (list = set->regs[i]; list; list = list->next)
4302 if (list->offset == 0 && dv_onepart_p (list->dv))
4303 {
4304 rtx cval = canon[(int)GET_MODE (list->loc)];
4305
4306 if (!cval)
4307 continue;
4308
4309 if (dv_is_value_p (list->dv))
4310 {
4311 rtx val = dv_as_value (list->dv);
4312
4313 if (val == cval)
4314 continue;
4315
4316 VALUE_RECURSED_INTO (val) = true;
4317 set_variable_part (set, val, dv_from_value (cval), 0,
4318 VAR_INIT_STATUS_INITIALIZED,
4319 NULL, NO_INSERT);
4320 }
4321
4322 VALUE_RECURSED_INTO (cval) = true;
4323 set_variable_part (set, cval, list->dv, 0,
4324 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4325 }
4326
4327 for (listp = &set->regs[i]; (list = *listp);
4328 listp = list ? &list->next : listp)
4329 if (list->offset == 0 && dv_onepart_p (list->dv))
4330 {
4331 rtx cval = canon[(int)GET_MODE (list->loc)];
4332 variable **slot;
4333
4334 if (!cval)
4335 continue;
4336
4337 if (dv_is_value_p (list->dv))
4338 {
4339 rtx val = dv_as_value (list->dv);
4340 if (!VALUE_RECURSED_INTO (val))
4341 continue;
4342 }
4343
4344 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4345 canonicalize_values_star (slot, set);
4346 if (*listp != list)
4347 list = NULL;
4348 }
4349 }
4350 }
4351
4352 /* Remove any redundant values in the location list of VAR, which must
4353 be unshared and 1-part. */
4354
4355 static void
4356 remove_duplicate_values (variable *var)
4357 {
4358 location_chain *node, **nodep;
4359
4360 gcc_assert (var->onepart);
4361 gcc_assert (var->n_var_parts == 1);
4362 gcc_assert (var->refcount == 1);
4363
4364 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4365 {
4366 if (GET_CODE (node->loc) == VALUE)
4367 {
4368 if (VALUE_RECURSED_INTO (node->loc))
4369 {
4370 /* Remove duplicate value node. */
4371 *nodep = node->next;
4372 delete node;
4373 continue;
4374 }
4375 else
4376 VALUE_RECURSED_INTO (node->loc) = true;
4377 }
4378 nodep = &node->next;
4379 }
4380
4381 for (node = var->var_part[0].loc_chain; node; node = node->next)
4382 if (GET_CODE (node->loc) == VALUE)
4383 {
4384 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4385 VALUE_RECURSED_INTO (node->loc) = false;
4386 }
4387 }
4388
4389
4390 /* Hash table iteration argument passed to variable_post_merge. */
4391 struct dfset_post_merge
4392 {
4393 /* The new input set for the current block. */
4394 dataflow_set *set;
4395 /* Pointer to the permanent input set for the current block, or
4396 NULL. */
4397 dataflow_set **permp;
4398 };
4399
4400 /* Create values for incoming expressions associated with one-part
4401 variables that don't have value numbers for them. */
4402
4403 int
4404 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
4405 {
4406 dataflow_set *set = dfpm->set;
4407 variable *var = *slot;
4408 location_chain *node;
4409
4410 if (!var->onepart || !var->n_var_parts)
4411 return 1;
4412
4413 gcc_assert (var->n_var_parts == 1);
4414
4415 if (dv_is_decl_p (var->dv))
4416 {
4417 bool check_dupes = false;
4418
4419 restart:
4420 for (node = var->var_part[0].loc_chain; node; node = node->next)
4421 {
4422 if (GET_CODE (node->loc) == VALUE)
4423 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4424 else if (GET_CODE (node->loc) == REG)
4425 {
4426 attrs *att, **attp, **curp = NULL;
4427
4428 if (var->refcount != 1)
4429 {
4430 slot = unshare_variable (set, slot, var,
4431 VAR_INIT_STATUS_INITIALIZED);
4432 var = *slot;
4433 goto restart;
4434 }
4435
4436 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4437 attp = &att->next)
4438 if (att->offset == 0
4439 && GET_MODE (att->loc) == GET_MODE (node->loc))
4440 {
4441 if (dv_is_value_p (att->dv))
4442 {
4443 rtx cval = dv_as_value (att->dv);
4444 node->loc = cval;
4445 check_dupes = true;
4446 break;
4447 }
4448 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4449 curp = attp;
4450 }
4451
4452 if (!curp)
4453 {
4454 curp = attp;
4455 while (*curp)
4456 if ((*curp)->offset == 0
4457 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4458 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4459 break;
4460 else
4461 curp = &(*curp)->next;
4462 gcc_assert (*curp);
4463 }
4464
4465 if (!att)
4466 {
4467 decl_or_value cdv;
4468 rtx cval;
4469
4470 if (!*dfpm->permp)
4471 {
4472 *dfpm->permp = XNEW (dataflow_set);
4473 dataflow_set_init (*dfpm->permp);
4474 }
4475
4476 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4477 att; att = att->next)
4478 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4479 {
4480 gcc_assert (att->offset == 0
4481 && dv_is_value_p (att->dv));
4482 val_reset (set, att->dv);
4483 break;
4484 }
4485
4486 if (att)
4487 {
4488 cdv = att->dv;
4489 cval = dv_as_value (cdv);
4490 }
4491 else
4492 {
4493 /* Create a unique value to hold this register,
4494 that ought to be found and reused in
4495 subsequent rounds. */
4496 cselib_val *v;
4497 gcc_assert (!cselib_lookup (node->loc,
4498 GET_MODE (node->loc), 0,
4499 VOIDmode));
4500 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4501 VOIDmode);
4502 cselib_preserve_value (v);
4503 cselib_invalidate_rtx (node->loc);
4504 cval = v->val_rtx;
4505 cdv = dv_from_value (cval);
4506 if (dump_file)
4507 fprintf (dump_file,
4508 "Created new value %u:%u for reg %i\n",
4509 v->uid, v->hash, REGNO (node->loc));
4510 }
4511
4512 var_reg_decl_set (*dfpm->permp, node->loc,
4513 VAR_INIT_STATUS_INITIALIZED,
4514 cdv, 0, NULL, INSERT);
4515
4516 node->loc = cval;
4517 check_dupes = true;
4518 }
4519
4520 /* Remove attribute referring to the decl, which now
4521 uses the value for the register, already existing or
4522 to be added when we bring perm in. */
4523 att = *curp;
4524 *curp = att->next;
4525 delete att;
4526 }
4527 }
4528
4529 if (check_dupes)
4530 remove_duplicate_values (var);
4531 }
4532
4533 return 1;
4534 }
4535
4536 /* Reset values in the permanent set that are not associated with the
4537 chosen expression. */
4538
4539 int
4540 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
4541 {
4542 dataflow_set *set = dfpm->set;
4543 variable *pvar = *pslot, *var;
4544 location_chain *pnode;
4545 decl_or_value dv;
4546 attrs *att;
4547
4548 gcc_assert (dv_is_value_p (pvar->dv)
4549 && pvar->n_var_parts == 1);
4550 pnode = pvar->var_part[0].loc_chain;
4551 gcc_assert (pnode
4552 && !pnode->next
4553 && REG_P (pnode->loc));
4554
4555 dv = pvar->dv;
4556
4557 var = shared_hash_find (set->vars, dv);
4558 if (var)
4559 {
4560 /* Although variable_post_merge_new_vals may have made decls
4561 non-star-canonical, values that pre-existed in canonical form
4562 remain canonical, and newly-created values reference a single
4563 REG, so they are canonical as well. Since VAR has the
4564 location list for a VALUE, using find_loc_in_1pdv for it is
4565 fine, since VALUEs don't map back to DECLs. */
4566 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4567 return 1;
4568 val_reset (set, dv);
4569 }
4570
4571 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4572 if (att->offset == 0
4573 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4574 && dv_is_value_p (att->dv))
4575 break;
4576
4577 /* If there is a value associated with this register already, create
4578 an equivalence. */
4579 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4580 {
4581 rtx cval = dv_as_value (att->dv);
4582 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4583 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4584 NULL, INSERT);
4585 }
4586 else if (!att)
4587 {
4588 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4589 dv, 0, pnode->loc);
4590 variable_union (pvar, set);
4591 }
4592
4593 return 1;
4594 }
4595
4596 /* Just checking stuff and registering register attributes for
4597 now. */
4598
4599 static void
4600 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4601 {
4602 struct dfset_post_merge dfpm;
4603
4604 dfpm.set = set;
4605 dfpm.permp = permp;
4606
4607 shared_hash_htab (set->vars)
4608 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4609 if (*permp)
4610 shared_hash_htab ((*permp)->vars)
4611 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4612 shared_hash_htab (set->vars)
4613 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4614 shared_hash_htab (set->vars)
4615 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4616 }
4617
4618 /* Return a node whose loc is a MEM that refers to EXPR in the
4619 location list of a one-part variable or value VAR, or in that of
4620 any values recursively mentioned in the location lists. */
4621
4622 static location_chain *
4623 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4624 {
4625 location_chain *node;
4626 decl_or_value dv;
4627 variable *var;
4628 location_chain *where = NULL;
4629
4630 if (!val)
4631 return NULL;
4632
4633 gcc_assert (GET_CODE (val) == VALUE
4634 && !VALUE_RECURSED_INTO (val));
4635
4636 dv = dv_from_value (val);
4637 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4638
4639 if (!var)
4640 return NULL;
4641
4642 gcc_assert (var->onepart);
4643
4644 if (!var->n_var_parts)
4645 return NULL;
4646
4647 VALUE_RECURSED_INTO (val) = true;
4648
4649 for (node = var->var_part[0].loc_chain; node; node = node->next)
4650 if (MEM_P (node->loc)
4651 && MEM_EXPR (node->loc) == expr
4652 && int_mem_offset (node->loc) == 0)
4653 {
4654 where = node;
4655 break;
4656 }
4657 else if (GET_CODE (node->loc) == VALUE
4658 && !VALUE_RECURSED_INTO (node->loc)
4659 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4660 break;
4661
4662 VALUE_RECURSED_INTO (val) = false;
4663
4664 return where;
4665 }
4666
4667 /* Return TRUE if the value of MEM may vary across a call. */
4668
4669 static bool
4670 mem_dies_at_call (rtx mem)
4671 {
4672 tree expr = MEM_EXPR (mem);
4673 tree decl;
4674
4675 if (!expr)
4676 return true;
4677
4678 decl = get_base_address (expr);
4679
4680 if (!decl)
4681 return true;
4682
4683 if (!DECL_P (decl))
4684 return true;
4685
4686 return (may_be_aliased (decl)
4687 || (!TREE_READONLY (decl) && is_global_var (decl)));
4688 }
4689
4690 /* Remove all MEMs from the location list of a hash table entry for a
4691 one-part variable, except those whose MEM attributes map back to
4692 the variable itself, directly or within a VALUE. */
4693
4694 int
4695 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
4696 {
4697 variable *var = *slot;
4698
4699 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4700 {
4701 tree decl = dv_as_decl (var->dv);
4702 location_chain *loc, **locp;
4703 bool changed = false;
4704
4705 if (!var->n_var_parts)
4706 return 1;
4707
4708 gcc_assert (var->n_var_parts == 1);
4709
4710 if (shared_var_p (var, set->vars))
4711 {
4712 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4713 {
4714 /* We want to remove dying MEMs that don't refer to DECL. */
4715 if (GET_CODE (loc->loc) == MEM
4716 && (MEM_EXPR (loc->loc) != decl
4717 || int_mem_offset (loc->loc) != 0)
4718 && mem_dies_at_call (loc->loc))
4719 break;
4720 /* We want to move here MEMs that do refer to DECL. */
4721 else if (GET_CODE (loc->loc) == VALUE
4722 && find_mem_expr_in_1pdv (decl, loc->loc,
4723 shared_hash_htab (set->vars)))
4724 break;
4725 }
4726
4727 if (!loc)
4728 return 1;
4729
4730 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4731 var = *slot;
4732 gcc_assert (var->n_var_parts == 1);
4733 }
4734
4735 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4736 loc; loc = *locp)
4737 {
4738 rtx old_loc = loc->loc;
4739 if (GET_CODE (old_loc) == VALUE)
4740 {
4741 location_chain *mem_node
4742 = find_mem_expr_in_1pdv (decl, loc->loc,
4743 shared_hash_htab (set->vars));
4744
4745 /* ??? This picks up only one out of multiple MEMs that
4746 refer to the same variable. Do we ever need to be
4747 concerned about dealing with more than one, or, given
4748 that they should all map to the same variable
4749 location, their addresses will have been merged and
4750 they will be regarded as equivalent? */
4751 if (mem_node)
4752 {
4753 loc->loc = mem_node->loc;
4754 loc->set_src = mem_node->set_src;
4755 loc->init = MIN (loc->init, mem_node->init);
4756 }
4757 }
4758
4759 if (GET_CODE (loc->loc) != MEM
4760 || (MEM_EXPR (loc->loc) == decl
4761 && int_mem_offset (loc->loc) == 0)
4762 || !mem_dies_at_call (loc->loc))
4763 {
4764 if (old_loc != loc->loc && emit_notes)
4765 {
4766 if (old_loc == var->var_part[0].cur_loc)
4767 {
4768 changed = true;
4769 var->var_part[0].cur_loc = NULL;
4770 }
4771 }
4772 locp = &loc->next;
4773 continue;
4774 }
4775
4776 if (emit_notes)
4777 {
4778 if (old_loc == var->var_part[0].cur_loc)
4779 {
4780 changed = true;
4781 var->var_part[0].cur_loc = NULL;
4782 }
4783 }
4784 *locp = loc->next;
4785 delete loc;
4786 }
4787
4788 if (!var->var_part[0].loc_chain)
4789 {
4790 var->n_var_parts--;
4791 changed = true;
4792 }
4793 if (changed)
4794 variable_was_changed (var, set);
4795 }
4796
4797 return 1;
4798 }
4799
4800 /* Remove all MEMs from the location list of a hash table entry for a
4801 onepart variable. */
4802
4803 int
4804 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
4805 {
4806 variable *var = *slot;
4807
4808 if (var->onepart != NOT_ONEPART)
4809 {
4810 location_chain *loc, **locp;
4811 bool changed = false;
4812 rtx cur_loc;
4813
4814 gcc_assert (var->n_var_parts == 1);
4815
4816 if (shared_var_p (var, set->vars))
4817 {
4818 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4819 if (GET_CODE (loc->loc) == MEM
4820 && mem_dies_at_call (loc->loc))
4821 break;
4822
4823 if (!loc)
4824 return 1;
4825
4826 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4827 var = *slot;
4828 gcc_assert (var->n_var_parts == 1);
4829 }
4830
4831 if (VAR_LOC_1PAUX (var))
4832 cur_loc = VAR_LOC_FROM (var);
4833 else
4834 cur_loc = var->var_part[0].cur_loc;
4835
4836 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4837 loc; loc = *locp)
4838 {
4839 if (GET_CODE (loc->loc) != MEM
4840 || !mem_dies_at_call (loc->loc))
4841 {
4842 locp = &loc->next;
4843 continue;
4844 }
4845
4846 *locp = loc->next;
4847 /* If we have deleted the location which was last emitted
4848 we have to emit new location so add the variable to set
4849 of changed variables. */
4850 if (cur_loc == loc->loc)
4851 {
4852 changed = true;
4853 var->var_part[0].cur_loc = NULL;
4854 if (VAR_LOC_1PAUX (var))
4855 VAR_LOC_FROM (var) = NULL;
4856 }
4857 delete loc;
4858 }
4859
4860 if (!var->var_part[0].loc_chain)
4861 {
4862 var->n_var_parts--;
4863 changed = true;
4864 }
4865 if (changed)
4866 variable_was_changed (var, set);
4867 }
4868
4869 return 1;
4870 }
4871
4872 /* Remove all variable-location information about call-clobbered
4873 registers, as well as associations between MEMs and VALUEs. */
4874
4875 static void
4876 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn)
4877 {
4878 unsigned int r;
4879 hard_reg_set_iterator hrsi;
4880 HARD_REG_SET invalidated_regs;
4881
4882 get_call_reg_set_usage (call_insn, &invalidated_regs,
4883 regs_invalidated_by_call);
4884
4885 EXECUTE_IF_SET_IN_HARD_REG_SET (invalidated_regs, 0, r, hrsi)
4886 var_regno_delete (set, r);
4887
4888 if (MAY_HAVE_DEBUG_BIND_INSNS)
4889 {
4890 set->traversed_vars = set->vars;
4891 shared_hash_htab (set->vars)
4892 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4893 set->traversed_vars = set->vars;
4894 shared_hash_htab (set->vars)
4895 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4896 set->traversed_vars = NULL;
4897 }
4898 }
4899
4900 static bool
4901 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4902 {
4903 location_chain *lc1, *lc2;
4904
4905 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4906 {
4907 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4908 {
4909 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4910 {
4911 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4912 break;
4913 }
4914 if (rtx_equal_p (lc1->loc, lc2->loc))
4915 break;
4916 }
4917 if (!lc2)
4918 return true;
4919 }
4920 return false;
4921 }
4922
4923 /* Return true if one-part variables VAR1 and VAR2 are different.
4924 They must be in canonical order. */
4925
4926 static bool
4927 onepart_variable_different_p (variable *var1, variable *var2)
4928 {
4929 location_chain *lc1, *lc2;
4930
4931 if (var1 == var2)
4932 return false;
4933
4934 gcc_assert (var1->n_var_parts == 1
4935 && var2->n_var_parts == 1);
4936
4937 lc1 = var1->var_part[0].loc_chain;
4938 lc2 = var2->var_part[0].loc_chain;
4939
4940 gcc_assert (lc1 && lc2);
4941
4942 while (lc1 && lc2)
4943 {
4944 if (loc_cmp (lc1->loc, lc2->loc))
4945 return true;
4946 lc1 = lc1->next;
4947 lc2 = lc2->next;
4948 }
4949
4950 return lc1 != lc2;
4951 }
4952
4953 /* Return true if one-part variables VAR1 and VAR2 are different.
4954 They must be in canonical order. */
4955
4956 static void
4957 dump_onepart_variable_differences (variable *var1, variable *var2)
4958 {
4959 location_chain *lc1, *lc2;
4960
4961 gcc_assert (var1 != var2);
4962 gcc_assert (dump_file);
4963 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4964 gcc_assert (var1->n_var_parts == 1
4965 && var2->n_var_parts == 1);
4966
4967 lc1 = var1->var_part[0].loc_chain;
4968 lc2 = var2->var_part[0].loc_chain;
4969
4970 gcc_assert (lc1 && lc2);
4971
4972 while (lc1 && lc2)
4973 {
4974 switch (loc_cmp (lc1->loc, lc2->loc))
4975 {
4976 case -1:
4977 fprintf (dump_file, "removed: ");
4978 print_rtl_single (dump_file, lc1->loc);
4979 lc1 = lc1->next;
4980 continue;
4981 case 0:
4982 break;
4983 case 1:
4984 fprintf (dump_file, "added: ");
4985 print_rtl_single (dump_file, lc2->loc);
4986 lc2 = lc2->next;
4987 continue;
4988 default:
4989 gcc_unreachable ();
4990 }
4991 lc1 = lc1->next;
4992 lc2 = lc2->next;
4993 }
4994
4995 while (lc1)
4996 {
4997 fprintf (dump_file, "removed: ");
4998 print_rtl_single (dump_file, lc1->loc);
4999 lc1 = lc1->next;
5000 }
5001
5002 while (lc2)
5003 {
5004 fprintf (dump_file, "added: ");
5005 print_rtl_single (dump_file, lc2->loc);
5006 lc2 = lc2->next;
5007 }
5008 }
5009
5010 /* Return true if variables VAR1 and VAR2 are different. */
5011
5012 static bool
5013 variable_different_p (variable *var1, variable *var2)
5014 {
5015 int i;
5016
5017 if (var1 == var2)
5018 return false;
5019
5020 if (var1->onepart != var2->onepart)
5021 return true;
5022
5023 if (var1->n_var_parts != var2->n_var_parts)
5024 return true;
5025
5026 if (var1->onepart && var1->n_var_parts)
5027 {
5028 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5029 && var1->n_var_parts == 1);
5030 /* One-part values have locations in a canonical order. */
5031 return onepart_variable_different_p (var1, var2);
5032 }
5033
5034 for (i = 0; i < var1->n_var_parts; i++)
5035 {
5036 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5037 return true;
5038 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5039 return true;
5040 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5041 return true;
5042 }
5043 return false;
5044 }
5045
5046 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5047
5048 static bool
5049 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5050 {
5051 variable_iterator_type hi;
5052 variable *var1;
5053 bool diffound = false;
5054 bool details = (dump_file && (dump_flags & TDF_DETAILS));
5055
5056 #define RETRUE \
5057 do \
5058 { \
5059 if (!details) \
5060 return true; \
5061 else \
5062 diffound = true; \
5063 } \
5064 while (0)
5065
5066 if (old_set->vars == new_set->vars)
5067 return false;
5068
5069 if (shared_hash_htab (old_set->vars)->elements ()
5070 != shared_hash_htab (new_set->vars)->elements ())
5071 RETRUE;
5072
5073 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5074 var1, variable, hi)
5075 {
5076 variable_table_type *htab = shared_hash_htab (new_set->vars);
5077 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5078
5079 if (!var2)
5080 {
5081 if (dump_file && (dump_flags & TDF_DETAILS))
5082 {
5083 fprintf (dump_file, "dataflow difference found: removal of:\n");
5084 dump_var (var1);
5085 }
5086 RETRUE;
5087 }
5088 else if (variable_different_p (var1, var2))
5089 {
5090 if (details)
5091 {
5092 fprintf (dump_file, "dataflow difference found: "
5093 "old and new follow:\n");
5094 dump_var (var1);
5095 if (dv_onepart_p (var1->dv))
5096 dump_onepart_variable_differences (var1, var2);
5097 dump_var (var2);
5098 }
5099 RETRUE;
5100 }
5101 }
5102
5103 /* There's no need to traverse the second hashtab unless we want to
5104 print the details. If both have the same number of elements and
5105 the second one had all entries found in the first one, then the
5106 second can't have any extra entries. */
5107 if (!details)
5108 return diffound;
5109
5110 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars),
5111 var1, variable, hi)
5112 {
5113 variable_table_type *htab = shared_hash_htab (old_set->vars);
5114 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5115 if (!var2)
5116 {
5117 if (details)
5118 {
5119 fprintf (dump_file, "dataflow difference found: addition of:\n");
5120 dump_var (var1);
5121 }
5122 RETRUE;
5123 }
5124 }
5125
5126 #undef RETRUE
5127
5128 return diffound;
5129 }
5130
5131 /* Free the contents of dataflow set SET. */
5132
5133 static void
5134 dataflow_set_destroy (dataflow_set *set)
5135 {
5136 int i;
5137
5138 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5139 attrs_list_clear (&set->regs[i]);
5140
5141 shared_hash_destroy (set->vars);
5142 set->vars = NULL;
5143 }
5144
5145 /* Return true if T is a tracked parameter with non-degenerate record type. */
5146
5147 static bool
5148 tracked_record_parameter_p (tree t)
5149 {
5150 if (TREE_CODE (t) != PARM_DECL)
5151 return false;
5152
5153 if (DECL_MODE (t) == BLKmode)
5154 return false;
5155
5156 tree type = TREE_TYPE (t);
5157 if (TREE_CODE (type) != RECORD_TYPE)
5158 return false;
5159
5160 if (TYPE_FIELDS (type) == NULL_TREE
5161 || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE)
5162 return false;
5163
5164 return true;
5165 }
5166
5167 /* Shall EXPR be tracked? */
5168
5169 static bool
5170 track_expr_p (tree expr, bool need_rtl)
5171 {
5172 rtx decl_rtl;
5173 tree realdecl;
5174
5175 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5176 return DECL_RTL_SET_P (expr);
5177
5178 /* If EXPR is not a parameter or a variable do not track it. */
5179 if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL)
5180 return 0;
5181
5182 /* It also must have a name... */
5183 if (!DECL_NAME (expr) && need_rtl)
5184 return 0;
5185
5186 /* ... and a RTL assigned to it. */
5187 decl_rtl = DECL_RTL_IF_SET (expr);
5188 if (!decl_rtl && need_rtl)
5189 return 0;
5190
5191 /* If this expression is really a debug alias of some other declaration, we
5192 don't need to track this expression if the ultimate declaration is
5193 ignored. */
5194 realdecl = expr;
5195 if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl))
5196 {
5197 realdecl = DECL_DEBUG_EXPR (realdecl);
5198 if (!DECL_P (realdecl))
5199 {
5200 if (handled_component_p (realdecl)
5201 || (TREE_CODE (realdecl) == MEM_REF
5202 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5203 {
5204 HOST_WIDE_INT bitsize, bitpos, maxsize;
5205 bool reverse;
5206 tree innerdecl
5207 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5208 &maxsize, &reverse);
5209 if (!DECL_P (innerdecl)
5210 || DECL_IGNORED_P (innerdecl)
5211 /* Do not track declarations for parts of tracked record
5212 parameters since we want to track them as a whole. */
5213 || tracked_record_parameter_p (innerdecl)
5214 || TREE_STATIC (innerdecl)
5215 || bitsize <= 0
5216 || bitpos + bitsize > 256
5217 || bitsize != maxsize)
5218 return 0;
5219 else
5220 realdecl = expr;
5221 }
5222 else
5223 return 0;
5224 }
5225 }
5226
5227 /* Do not track EXPR if REALDECL it should be ignored for debugging
5228 purposes. */
5229 if (DECL_IGNORED_P (realdecl))
5230 return 0;
5231
5232 /* Do not track global variables until we are able to emit correct location
5233 list for them. */
5234 if (TREE_STATIC (realdecl))
5235 return 0;
5236
5237 /* When the EXPR is a DECL for alias of some variable (see example)
5238 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5239 DECL_RTL contains SYMBOL_REF.
5240
5241 Example:
5242 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5243 char **_dl_argv;
5244 */
5245 if (decl_rtl && MEM_P (decl_rtl)
5246 && contains_symbol_ref_p (XEXP (decl_rtl, 0)))
5247 return 0;
5248
5249 /* If RTX is a memory it should not be very large (because it would be
5250 an array or struct). */
5251 if (decl_rtl && MEM_P (decl_rtl))
5252 {
5253 /* Do not track structures and arrays. */
5254 if ((GET_MODE (decl_rtl) == BLKmode
5255 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5256 && !tracked_record_parameter_p (realdecl))
5257 return 0;
5258 if (MEM_SIZE_KNOWN_P (decl_rtl)
5259 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5260 return 0;
5261 }
5262
5263 DECL_CHANGED (expr) = 0;
5264 DECL_CHANGED (realdecl) = 0;
5265 return 1;
5266 }
5267
5268 /* Determine whether a given LOC refers to the same variable part as
5269 EXPR+OFFSET. */
5270
5271 static bool
5272 same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
5273 {
5274 tree expr2;
5275 poly_int64 offset2;
5276
5277 if (! DECL_P (expr))
5278 return false;
5279
5280 if (REG_P (loc))
5281 {
5282 expr2 = REG_EXPR (loc);
5283 offset2 = REG_OFFSET (loc);
5284 }
5285 else if (MEM_P (loc))
5286 {
5287 expr2 = MEM_EXPR (loc);
5288 offset2 = int_mem_offset (loc);
5289 }
5290 else
5291 return false;
5292
5293 if (! expr2 || ! DECL_P (expr2))
5294 return false;
5295
5296 expr = var_debug_decl (expr);
5297 expr2 = var_debug_decl (expr2);
5298
5299 return (expr == expr2 && known_eq (offset, offset2));
5300 }
5301
5302 /* LOC is a REG or MEM that we would like to track if possible.
5303 If EXPR is null, we don't know what expression LOC refers to,
5304 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5305 LOC is an lvalue register.
5306
5307 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5308 is something we can track. When returning true, store the mode of
5309 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5310 from EXPR in *OFFSET_OUT (if nonnull). */
5311
5312 static bool
5313 track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
5314 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5315 {
5316 machine_mode mode;
5317
5318 if (expr == NULL || !track_expr_p (expr, true))
5319 return false;
5320
5321 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5322 whole subreg, but only the old inner part is really relevant. */
5323 mode = GET_MODE (loc);
5324 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5325 {
5326 machine_mode pseudo_mode;
5327
5328 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5329 if (paradoxical_subreg_p (mode, pseudo_mode))
5330 {
5331 offset += byte_lowpart_offset (pseudo_mode, mode);
5332 mode = pseudo_mode;
5333 }
5334 }
5335
5336 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5337 Do the same if we are storing to a register and EXPR occupies
5338 the whole of register LOC; in that case, the whole of EXPR is
5339 being changed. We exclude complex modes from the second case
5340 because the real and imaginary parts are represented as separate
5341 pseudo registers, even if the whole complex value fits into one
5342 hard register. */
5343 if ((paradoxical_subreg_p (mode, DECL_MODE (expr))
5344 || (store_reg_p
5345 && !COMPLEX_MODE_P (DECL_MODE (expr))
5346 && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
5347 && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
5348 {
5349 mode = DECL_MODE (expr);
5350 offset = 0;
5351 }
5352
5353 HOST_WIDE_INT const_offset;
5354 if (!track_offset_p (offset, &const_offset))
5355 return false;
5356
5357 if (mode_out)
5358 *mode_out = mode;
5359 if (offset_out)
5360 *offset_out = const_offset;
5361 return true;
5362 }
5363
5364 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5365 want to track. When returning nonnull, make sure that the attributes
5366 on the returned value are updated. */
5367
5368 static rtx
5369 var_lowpart (machine_mode mode, rtx loc)
5370 {
5371 unsigned int offset, reg_offset, regno;
5372
5373 if (GET_MODE (loc) == mode)
5374 return loc;
5375
5376 if (!REG_P (loc) && !MEM_P (loc))
5377 return NULL;
5378
5379 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5380
5381 if (MEM_P (loc))
5382 return adjust_address_nv (loc, mode, offset);
5383
5384 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5385 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5386 reg_offset, mode);
5387 return gen_rtx_REG_offset (loc, mode, regno, offset);
5388 }
5389
5390 /* Carry information about uses and stores while walking rtx. */
5391
5392 struct count_use_info
5393 {
5394 /* The insn where the RTX is. */
5395 rtx_insn *insn;
5396
5397 /* The basic block where insn is. */
5398 basic_block bb;
5399
5400 /* The array of n_sets sets in the insn, as determined by cselib. */
5401 struct cselib_set *sets;
5402 int n_sets;
5403
5404 /* True if we're counting stores, false otherwise. */
5405 bool store_p;
5406 };
5407
5408 /* Find a VALUE corresponding to X. */
5409
5410 static inline cselib_val *
5411 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5412 {
5413 int i;
5414
5415 if (cui->sets)
5416 {
5417 /* This is called after uses are set up and before stores are
5418 processed by cselib, so it's safe to look up srcs, but not
5419 dsts. So we look up expressions that appear in srcs or in
5420 dest expressions, but we search the sets array for dests of
5421 stores. */
5422 if (cui->store_p)
5423 {
5424 /* Some targets represent memset and memcpy patterns
5425 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5426 (set (mem:BLK ...) (const_int ...)) or
5427 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5428 in that case, otherwise we end up with mode mismatches. */
5429 if (mode == BLKmode && MEM_P (x))
5430 return NULL;
5431 for (i = 0; i < cui->n_sets; i++)
5432 if (cui->sets[i].dest == x)
5433 return cui->sets[i].src_elt;
5434 }
5435 else
5436 return cselib_lookup (x, mode, 0, VOIDmode);
5437 }
5438
5439 return NULL;
5440 }
5441
5442 /* Replace all registers and addresses in an expression with VALUE
5443 expressions that map back to them, unless the expression is a
5444 register. If no mapping is or can be performed, returns NULL. */
5445
5446 static rtx
5447 replace_expr_with_values (rtx loc)
5448 {
5449 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5450 return NULL;
5451 else if (MEM_P (loc))
5452 {
5453 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5454 get_address_mode (loc), 0,
5455 GET_MODE (loc));
5456 if (addr)
5457 return replace_equiv_address_nv (loc, addr->val_rtx);
5458 else
5459 return NULL;
5460 }
5461 else
5462 return cselib_subst_to_values (loc, VOIDmode);
5463 }
5464
5465 /* Return true if X contains a DEBUG_EXPR. */
5466
5467 static bool
5468 rtx_debug_expr_p (const_rtx x)
5469 {
5470 subrtx_iterator::array_type array;
5471 FOR_EACH_SUBRTX (iter, array, x, ALL)
5472 if (GET_CODE (*iter) == DEBUG_EXPR)
5473 return true;
5474 return false;
5475 }
5476
5477 /* Determine what kind of micro operation to choose for a USE. Return
5478 MO_CLOBBER if no micro operation is to be generated. */
5479
5480 static enum micro_operation_type
5481 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5482 {
5483 tree expr;
5484
5485 if (cui && cui->sets)
5486 {
5487 if (GET_CODE (loc) == VAR_LOCATION)
5488 {
5489 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5490 {
5491 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5492 if (! VAR_LOC_UNKNOWN_P (ploc))
5493 {
5494 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5495 VOIDmode);
5496
5497 /* ??? flag_float_store and volatile mems are never
5498 given values, but we could in theory use them for
5499 locations. */
5500 gcc_assert (val || 1);
5501 }
5502 return MO_VAL_LOC;
5503 }
5504 else
5505 return MO_CLOBBER;
5506 }
5507
5508 if (REG_P (loc) || MEM_P (loc))
5509 {
5510 if (modep)
5511 *modep = GET_MODE (loc);
5512 if (cui->store_p)
5513 {
5514 if (REG_P (loc)
5515 || (find_use_val (loc, GET_MODE (loc), cui)
5516 && cselib_lookup (XEXP (loc, 0),
5517 get_address_mode (loc), 0,
5518 GET_MODE (loc))))
5519 return MO_VAL_SET;
5520 }
5521 else
5522 {
5523 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5524
5525 if (val && !cselib_preserved_value_p (val))
5526 return MO_VAL_USE;
5527 }
5528 }
5529 }
5530
5531 if (REG_P (loc))
5532 {
5533 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5534
5535 if (loc == cfa_base_rtx)
5536 return MO_CLOBBER;
5537 expr = REG_EXPR (loc);
5538
5539 if (!expr)
5540 return MO_USE_NO_VAR;
5541 else if (target_for_debug_bind (var_debug_decl (expr)))
5542 return MO_CLOBBER;
5543 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5544 false, modep, NULL))
5545 return MO_USE;
5546 else
5547 return MO_USE_NO_VAR;
5548 }
5549 else if (MEM_P (loc))
5550 {
5551 expr = MEM_EXPR (loc);
5552
5553 if (!expr)
5554 return MO_CLOBBER;
5555 else if (target_for_debug_bind (var_debug_decl (expr)))
5556 return MO_CLOBBER;
5557 else if (track_loc_p (loc, expr, int_mem_offset (loc),
5558 false, modep, NULL)
5559 /* Multi-part variables shouldn't refer to one-part
5560 variable names such as VALUEs (never happens) or
5561 DEBUG_EXPRs (only happens in the presence of debug
5562 insns). */
5563 && (!MAY_HAVE_DEBUG_BIND_INSNS
5564 || !rtx_debug_expr_p (XEXP (loc, 0))))
5565 return MO_USE;
5566 else
5567 return MO_CLOBBER;
5568 }
5569
5570 return MO_CLOBBER;
5571 }
5572
5573 /* Log to OUT information about micro-operation MOPT involving X in
5574 INSN of BB. */
5575
5576 static inline void
5577 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5578 enum micro_operation_type mopt, FILE *out)
5579 {
5580 fprintf (out, "bb %i op %i insn %i %s ",
5581 bb->index, VTI (bb)->mos.length (),
5582 INSN_UID (insn), micro_operation_type_name[mopt]);
5583 print_inline_rtx (out, x, 2);
5584 fputc ('\n', out);
5585 }
5586
5587 /* Tell whether the CONCAT used to holds a VALUE and its location
5588 needs value resolution, i.e., an attempt of mapping the location
5589 back to other incoming values. */
5590 #define VAL_NEEDS_RESOLUTION(x) \
5591 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5592 /* Whether the location in the CONCAT is a tracked expression, that
5593 should also be handled like a MO_USE. */
5594 #define VAL_HOLDS_TRACK_EXPR(x) \
5595 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5596 /* Whether the location in the CONCAT should be handled like a MO_COPY
5597 as well. */
5598 #define VAL_EXPR_IS_COPIED(x) \
5599 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5600 /* Whether the location in the CONCAT should be handled like a
5601 MO_CLOBBER as well. */
5602 #define VAL_EXPR_IS_CLOBBERED(x) \
5603 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5604
5605 /* All preserved VALUEs. */
5606 static vec<rtx> preserved_values;
5607
5608 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5609
5610 static void
5611 preserve_value (cselib_val *val)
5612 {
5613 cselib_preserve_value (val);
5614 preserved_values.safe_push (val->val_rtx);
5615 }
5616
5617 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5618 any rtxes not suitable for CONST use not replaced by VALUEs
5619 are discovered. */
5620
5621 static bool
5622 non_suitable_const (const_rtx x)
5623 {
5624 subrtx_iterator::array_type array;
5625 FOR_EACH_SUBRTX (iter, array, x, ALL)
5626 {
5627 const_rtx x = *iter;
5628 switch (GET_CODE (x))
5629 {
5630 case REG:
5631 case DEBUG_EXPR:
5632 case PC:
5633 case SCRATCH:
5634 case CC0:
5635 case ASM_INPUT:
5636 case ASM_OPERANDS:
5637 return true;
5638 case MEM:
5639 if (!MEM_READONLY_P (x))
5640 return true;
5641 break;
5642 default:
5643 break;
5644 }
5645 }
5646 return false;
5647 }
5648
5649 /* Add uses (register and memory references) LOC which will be tracked
5650 to VTI (bb)->mos. */
5651
5652 static void
5653 add_uses (rtx loc, struct count_use_info *cui)
5654 {
5655 machine_mode mode = VOIDmode;
5656 enum micro_operation_type type = use_type (loc, cui, &mode);
5657
5658 if (type != MO_CLOBBER)
5659 {
5660 basic_block bb = cui->bb;
5661 micro_operation mo;
5662
5663 mo.type = type;
5664 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5665 mo.insn = cui->insn;
5666
5667 if (type == MO_VAL_LOC)
5668 {
5669 rtx oloc = loc;
5670 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5671 cselib_val *val;
5672
5673 gcc_assert (cui->sets);
5674
5675 if (MEM_P (vloc)
5676 && !REG_P (XEXP (vloc, 0))
5677 && !MEM_P (XEXP (vloc, 0)))
5678 {
5679 rtx mloc = vloc;
5680 machine_mode address_mode = get_address_mode (mloc);
5681 cselib_val *val
5682 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5683 GET_MODE (mloc));
5684
5685 if (val && !cselib_preserved_value_p (val))
5686 preserve_value (val);
5687 }
5688
5689 if (CONSTANT_P (vloc)
5690 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5691 /* For constants don't look up any value. */;
5692 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5693 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5694 {
5695 machine_mode mode2;
5696 enum micro_operation_type type2;
5697 rtx nloc = NULL;
5698 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5699
5700 if (resolvable)
5701 nloc = replace_expr_with_values (vloc);
5702
5703 if (nloc)
5704 {
5705 oloc = shallow_copy_rtx (oloc);
5706 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5707 }
5708
5709 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5710
5711 type2 = use_type (vloc, 0, &mode2);
5712
5713 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5714 || type2 == MO_CLOBBER);
5715
5716 if (type2 == MO_CLOBBER
5717 && !cselib_preserved_value_p (val))
5718 {
5719 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5720 preserve_value (val);
5721 }
5722 }
5723 else if (!VAR_LOC_UNKNOWN_P (vloc))
5724 {
5725 oloc = shallow_copy_rtx (oloc);
5726 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5727 }
5728
5729 mo.u.loc = oloc;
5730 }
5731 else if (type == MO_VAL_USE)
5732 {
5733 machine_mode mode2 = VOIDmode;
5734 enum micro_operation_type type2;
5735 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5736 rtx vloc, oloc = loc, nloc;
5737
5738 gcc_assert (cui->sets);
5739
5740 if (MEM_P (oloc)
5741 && !REG_P (XEXP (oloc, 0))
5742 && !MEM_P (XEXP (oloc, 0)))
5743 {
5744 rtx mloc = oloc;
5745 machine_mode address_mode = get_address_mode (mloc);
5746 cselib_val *val
5747 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5748 GET_MODE (mloc));
5749
5750 if (val && !cselib_preserved_value_p (val))
5751 preserve_value (val);
5752 }
5753
5754 type2 = use_type (loc, 0, &mode2);
5755
5756 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5757 || type2 == MO_CLOBBER);
5758
5759 if (type2 == MO_USE)
5760 vloc = var_lowpart (mode2, loc);
5761 else
5762 vloc = oloc;
5763
5764 /* The loc of a MO_VAL_USE may have two forms:
5765
5766 (concat val src): val is at src, a value-based
5767 representation.
5768
5769 (concat (concat val use) src): same as above, with use as
5770 the MO_USE tracked value, if it differs from src.
5771
5772 */
5773
5774 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5775 nloc = replace_expr_with_values (loc);
5776 if (!nloc)
5777 nloc = oloc;
5778
5779 if (vloc != nloc)
5780 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5781 else
5782 oloc = val->val_rtx;
5783
5784 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5785
5786 if (type2 == MO_USE)
5787 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5788 if (!cselib_preserved_value_p (val))
5789 {
5790 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5791 preserve_value (val);
5792 }
5793 }
5794 else
5795 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5796
5797 if (dump_file && (dump_flags & TDF_DETAILS))
5798 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5799 VTI (bb)->mos.safe_push (mo);
5800 }
5801 }
5802
5803 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5804
5805 static void
5806 add_uses_1 (rtx *x, void *cui)
5807 {
5808 subrtx_var_iterator::array_type array;
5809 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5810 add_uses (*iter, (struct count_use_info *) cui);
5811 }
5812
5813 /* This is the value used during expansion of locations. We want it
5814 to be unbounded, so that variables expanded deep in a recursion
5815 nest are fully evaluated, so that their values are cached
5816 correctly. We avoid recursion cycles through other means, and we
5817 don't unshare RTL, so excess complexity is not a problem. */
5818 #define EXPR_DEPTH (INT_MAX)
5819 /* We use this to keep too-complex expressions from being emitted as
5820 location notes, and then to debug information. Users can trade
5821 compile time for ridiculously complex expressions, although they're
5822 seldom useful, and they may often have to be discarded as not
5823 representable anyway. */
5824 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5825
5826 /* Attempt to reverse the EXPR operation in the debug info and record
5827 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5828 no longer live we can express its value as VAL - 6. */
5829
5830 static void
5831 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5832 {
5833 rtx src, arg, ret;
5834 cselib_val *v;
5835 struct elt_loc_list *l;
5836 enum rtx_code code;
5837 int count;
5838
5839 if (GET_CODE (expr) != SET)
5840 return;
5841
5842 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5843 return;
5844
5845 src = SET_SRC (expr);
5846 switch (GET_CODE (src))
5847 {
5848 case PLUS:
5849 case MINUS:
5850 case XOR:
5851 case NOT:
5852 case NEG:
5853 if (!REG_P (XEXP (src, 0)))
5854 return;
5855 break;
5856 case SIGN_EXTEND:
5857 case ZERO_EXTEND:
5858 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5859 return;
5860 break;
5861 default:
5862 return;
5863 }
5864
5865 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5866 return;
5867
5868 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5869 if (!v || !cselib_preserved_value_p (v))
5870 return;
5871
5872 /* Use canonical V to avoid creating multiple redundant expressions
5873 for different VALUES equivalent to V. */
5874 v = canonical_cselib_val (v);
5875
5876 /* Adding a reverse op isn't useful if V already has an always valid
5877 location. Ignore ENTRY_VALUE, while it is always constant, we should
5878 prefer non-ENTRY_VALUE locations whenever possible. */
5879 for (l = v->locs, count = 0; l; l = l->next, count++)
5880 if (CONSTANT_P (l->loc)
5881 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5882 return;
5883 /* Avoid creating too large locs lists. */
5884 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5885 return;
5886
5887 switch (GET_CODE (src))
5888 {
5889 case NOT:
5890 case NEG:
5891 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5892 return;
5893 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5894 break;
5895 case SIGN_EXTEND:
5896 case ZERO_EXTEND:
5897 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5898 break;
5899 case XOR:
5900 code = XOR;
5901 goto binary;
5902 case PLUS:
5903 code = MINUS;
5904 goto binary;
5905 case MINUS:
5906 code = PLUS;
5907 goto binary;
5908 binary:
5909 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5910 return;
5911 arg = XEXP (src, 1);
5912 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5913 {
5914 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5915 if (arg == NULL_RTX)
5916 return;
5917 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5918 return;
5919 }
5920 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5921 break;
5922 default:
5923 gcc_unreachable ();
5924 }
5925
5926 cselib_add_permanent_equiv (v, ret, insn);
5927 }
5928
5929 /* Add stores (register and memory references) LOC which will be tracked
5930 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5931 CUIP->insn is instruction which the LOC is part of. */
5932
5933 static void
5934 add_stores (rtx loc, const_rtx expr, void *cuip)
5935 {
5936 machine_mode mode = VOIDmode, mode2;
5937 struct count_use_info *cui = (struct count_use_info *)cuip;
5938 basic_block bb = cui->bb;
5939 micro_operation mo;
5940 rtx oloc = loc, nloc, src = NULL;
5941 enum micro_operation_type type = use_type (loc, cui, &mode);
5942 bool track_p = false;
5943 cselib_val *v;
5944 bool resolve, preserve;
5945
5946 if (type == MO_CLOBBER)
5947 return;
5948
5949 mode2 = mode;
5950
5951 if (REG_P (loc))
5952 {
5953 gcc_assert (loc != cfa_base_rtx);
5954 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5955 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5956 || GET_CODE (expr) == CLOBBER)
5957 {
5958 mo.type = MO_CLOBBER;
5959 mo.u.loc = loc;
5960 if (GET_CODE (expr) == SET
5961 && SET_DEST (expr) == loc
5962 && !unsuitable_loc (SET_SRC (expr))
5963 && find_use_val (loc, mode, cui))
5964 {
5965 gcc_checking_assert (type == MO_VAL_SET);
5966 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5967 }
5968 }
5969 else
5970 {
5971 if (GET_CODE (expr) == SET
5972 && SET_DEST (expr) == loc
5973 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5974 src = var_lowpart (mode2, SET_SRC (expr));
5975 loc = var_lowpart (mode2, loc);
5976
5977 if (src == NULL)
5978 {
5979 mo.type = MO_SET;
5980 mo.u.loc = loc;
5981 }
5982 else
5983 {
5984 rtx xexpr = gen_rtx_SET (loc, src);
5985 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5986 {
5987 /* If this is an instruction copying (part of) a parameter
5988 passed by invisible reference to its register location,
5989 pretend it's a SET so that the initial memory location
5990 is discarded, as the parameter register can be reused
5991 for other purposes and we do not track locations based
5992 on generic registers. */
5993 if (MEM_P (src)
5994 && REG_EXPR (loc)
5995 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5996 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5997 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5998 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5999 != arg_pointer_rtx)
6000 mo.type = MO_SET;
6001 else
6002 mo.type = MO_COPY;
6003 }
6004 else
6005 mo.type = MO_SET;
6006 mo.u.loc = xexpr;
6007 }
6008 }
6009 mo.insn = cui->insn;
6010 }
6011 else if (MEM_P (loc)
6012 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
6013 || cui->sets))
6014 {
6015 if (MEM_P (loc) && type == MO_VAL_SET
6016 && !REG_P (XEXP (loc, 0))
6017 && !MEM_P (XEXP (loc, 0)))
6018 {
6019 rtx mloc = loc;
6020 machine_mode address_mode = get_address_mode (mloc);
6021 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6022 address_mode, 0,
6023 GET_MODE (mloc));
6024
6025 if (val && !cselib_preserved_value_p (val))
6026 preserve_value (val);
6027 }
6028
6029 if (GET_CODE (expr) == CLOBBER || !track_p)
6030 {
6031 mo.type = MO_CLOBBER;
6032 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6033 }
6034 else
6035 {
6036 if (GET_CODE (expr) == SET
6037 && SET_DEST (expr) == loc
6038 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6039 src = var_lowpart (mode2, SET_SRC (expr));
6040 loc = var_lowpart (mode2, loc);
6041
6042 if (src == NULL)
6043 {
6044 mo.type = MO_SET;
6045 mo.u.loc = loc;
6046 }
6047 else
6048 {
6049 rtx xexpr = gen_rtx_SET (loc, src);
6050 if (same_variable_part_p (SET_SRC (xexpr),
6051 MEM_EXPR (loc),
6052 int_mem_offset (loc)))
6053 mo.type = MO_COPY;
6054 else
6055 mo.type = MO_SET;
6056 mo.u.loc = xexpr;
6057 }
6058 }
6059 mo.insn = cui->insn;
6060 }
6061 else
6062 return;
6063
6064 if (type != MO_VAL_SET)
6065 goto log_and_return;
6066
6067 v = find_use_val (oloc, mode, cui);
6068
6069 if (!v)
6070 goto log_and_return;
6071
6072 resolve = preserve = !cselib_preserved_value_p (v);
6073
6074 /* We cannot track values for multiple-part variables, so we track only
6075 locations for tracked record parameters. */
6076 if (track_p
6077 && REG_P (loc)
6078 && REG_EXPR (loc)
6079 && tracked_record_parameter_p (REG_EXPR (loc)))
6080 {
6081 /* Although we don't use the value here, it could be used later by the
6082 mere virtue of its existence as the operand of the reverse operation
6083 that gave rise to it (typically extension/truncation). Make sure it
6084 is preserved as required by vt_expand_var_loc_chain. */
6085 if (preserve)
6086 preserve_value (v);
6087 goto log_and_return;
6088 }
6089
6090 if (loc == stack_pointer_rtx
6091 && hard_frame_pointer_adjustment != -1
6092 && preserve)
6093 cselib_set_value_sp_based (v);
6094
6095 nloc = replace_expr_with_values (oloc);
6096 if (nloc)
6097 oloc = nloc;
6098
6099 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6100 {
6101 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6102
6103 if (oval == v)
6104 return;
6105 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6106
6107 if (oval && !cselib_preserved_value_p (oval))
6108 {
6109 micro_operation moa;
6110
6111 preserve_value (oval);
6112
6113 moa.type = MO_VAL_USE;
6114 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6115 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6116 moa.insn = cui->insn;
6117
6118 if (dump_file && (dump_flags & TDF_DETAILS))
6119 log_op_type (moa.u.loc, cui->bb, cui->insn,
6120 moa.type, dump_file);
6121 VTI (bb)->mos.safe_push (moa);
6122 }
6123
6124 resolve = false;
6125 }
6126 else if (resolve && GET_CODE (mo.u.loc) == SET)
6127 {
6128 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6129 nloc = replace_expr_with_values (SET_SRC (expr));
6130 else
6131 nloc = NULL_RTX;
6132
6133 /* Avoid the mode mismatch between oexpr and expr. */
6134 if (!nloc && mode != mode2)
6135 {
6136 nloc = SET_SRC (expr);
6137 gcc_assert (oloc == SET_DEST (expr));
6138 }
6139
6140 if (nloc && nloc != SET_SRC (mo.u.loc))
6141 oloc = gen_rtx_SET (oloc, nloc);
6142 else
6143 {
6144 if (oloc == SET_DEST (mo.u.loc))
6145 /* No point in duplicating. */
6146 oloc = mo.u.loc;
6147 if (!REG_P (SET_SRC (mo.u.loc)))
6148 resolve = false;
6149 }
6150 }
6151 else if (!resolve)
6152 {
6153 if (GET_CODE (mo.u.loc) == SET
6154 && oloc == SET_DEST (mo.u.loc))
6155 /* No point in duplicating. */
6156 oloc = mo.u.loc;
6157 }
6158 else
6159 resolve = false;
6160
6161 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6162
6163 if (mo.u.loc != oloc)
6164 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6165
6166 /* The loc of a MO_VAL_SET may have various forms:
6167
6168 (concat val dst): dst now holds val
6169
6170 (concat val (set dst src)): dst now holds val, copied from src
6171
6172 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6173 after replacing mems and non-top-level regs with values.
6174
6175 (concat (concat val dstv) (set dst src)): dst now holds val,
6176 copied from src. dstv is a value-based representation of dst, if
6177 it differs from dst. If resolution is needed, src is a REG, and
6178 its mode is the same as that of val.
6179
6180 (concat (concat val (set dstv srcv)) (set dst src)): src
6181 copied to dst, holding val. dstv and srcv are value-based
6182 representations of dst and src, respectively.
6183
6184 */
6185
6186 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6187 reverse_op (v->val_rtx, expr, cui->insn);
6188
6189 mo.u.loc = loc;
6190
6191 if (track_p)
6192 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6193 if (preserve)
6194 {
6195 VAL_NEEDS_RESOLUTION (loc) = resolve;
6196 preserve_value (v);
6197 }
6198 if (mo.type == MO_CLOBBER)
6199 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6200 if (mo.type == MO_COPY)
6201 VAL_EXPR_IS_COPIED (loc) = 1;
6202
6203 mo.type = MO_VAL_SET;
6204
6205 log_and_return:
6206 if (dump_file && (dump_flags & TDF_DETAILS))
6207 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6208 VTI (bb)->mos.safe_push (mo);
6209 }
6210
6211 /* Arguments to the call. */
6212 static rtx call_arguments;
6213
6214 /* Compute call_arguments. */
6215
6216 static void
6217 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6218 {
6219 rtx link, x, call;
6220 rtx prev, cur, next;
6221 rtx this_arg = NULL_RTX;
6222 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6223 tree obj_type_ref = NULL_TREE;
6224 CUMULATIVE_ARGS args_so_far_v;
6225 cumulative_args_t args_so_far;
6226
6227 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6228 args_so_far = pack_cumulative_args (&args_so_far_v);
6229 call = get_call_rtx_from (insn);
6230 if (call)
6231 {
6232 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6233 {
6234 rtx symbol = XEXP (XEXP (call, 0), 0);
6235 if (SYMBOL_REF_DECL (symbol))
6236 fndecl = SYMBOL_REF_DECL (symbol);
6237 }
6238 if (fndecl == NULL_TREE)
6239 fndecl = MEM_EXPR (XEXP (call, 0));
6240 if (fndecl
6241 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6242 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6243 fndecl = NULL_TREE;
6244 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6245 type = TREE_TYPE (fndecl);
6246 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6247 {
6248 if (TREE_CODE (fndecl) == INDIRECT_REF
6249 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6250 obj_type_ref = TREE_OPERAND (fndecl, 0);
6251 fndecl = NULL_TREE;
6252 }
6253 if (type)
6254 {
6255 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6256 t = TREE_CHAIN (t))
6257 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6258 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6259 break;
6260 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6261 type = NULL;
6262 else
6263 {
6264 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6265 link = CALL_INSN_FUNCTION_USAGE (insn);
6266 #ifndef PCC_STATIC_STRUCT_RETURN
6267 if (aggregate_value_p (TREE_TYPE (type), type)
6268 && targetm.calls.struct_value_rtx (type, 0) == 0)
6269 {
6270 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6271 machine_mode mode = TYPE_MODE (struct_addr);
6272 rtx reg;
6273 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6274 nargs + 1);
6275 reg = targetm.calls.function_arg (args_so_far, mode,
6276 struct_addr, true);
6277 targetm.calls.function_arg_advance (args_so_far, mode,
6278 struct_addr, true);
6279 if (reg == NULL_RTX)
6280 {
6281 for (; link; link = XEXP (link, 1))
6282 if (GET_CODE (XEXP (link, 0)) == USE
6283 && MEM_P (XEXP (XEXP (link, 0), 0)))
6284 {
6285 link = XEXP (link, 1);
6286 break;
6287 }
6288 }
6289 }
6290 else
6291 #endif
6292 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6293 nargs);
6294 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6295 {
6296 machine_mode mode;
6297 t = TYPE_ARG_TYPES (type);
6298 mode = TYPE_MODE (TREE_VALUE (t));
6299 this_arg = targetm.calls.function_arg (args_so_far, mode,
6300 TREE_VALUE (t), true);
6301 if (this_arg && !REG_P (this_arg))
6302 this_arg = NULL_RTX;
6303 else if (this_arg == NULL_RTX)
6304 {
6305 for (; link; link = XEXP (link, 1))
6306 if (GET_CODE (XEXP (link, 0)) == USE
6307 && MEM_P (XEXP (XEXP (link, 0), 0)))
6308 {
6309 this_arg = XEXP (XEXP (link, 0), 0);
6310 break;
6311 }
6312 }
6313 }
6314 }
6315 }
6316 }
6317 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6318
6319 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6320 if (GET_CODE (XEXP (link, 0)) == USE)
6321 {
6322 rtx item = NULL_RTX;
6323 x = XEXP (XEXP (link, 0), 0);
6324 if (GET_MODE (link) == VOIDmode
6325 || GET_MODE (link) == BLKmode
6326 || (GET_MODE (link) != GET_MODE (x)
6327 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6328 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6329 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6330 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6331 /* Can't do anything for these, if the original type mode
6332 isn't known or can't be converted. */;
6333 else if (REG_P (x))
6334 {
6335 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6336 scalar_int_mode mode;
6337 if (val && cselib_preserved_value_p (val))
6338 item = val->val_rtx;
6339 else if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
6340 {
6341 opt_scalar_int_mode mode_iter;
6342 FOR_EACH_WIDER_MODE (mode_iter, mode)
6343 {
6344 mode = mode_iter.require ();
6345 if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
6346 break;
6347
6348 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6349 if (reg == NULL_RTX || !REG_P (reg))
6350 continue;
6351 val = cselib_lookup (reg, mode, 0, VOIDmode);
6352 if (val && cselib_preserved_value_p (val))
6353 {
6354 item = val->val_rtx;
6355 break;
6356 }
6357 }
6358 }
6359 }
6360 else if (MEM_P (x))
6361 {
6362 rtx mem = x;
6363 cselib_val *val;
6364
6365 if (!frame_pointer_needed)
6366 {
6367 struct adjust_mem_data amd;
6368 amd.mem_mode = VOIDmode;
6369 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6370 amd.store = true;
6371 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6372 &amd);
6373 gcc_assert (amd.side_effects.is_empty ());
6374 }
6375 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6376 if (val && cselib_preserved_value_p (val))
6377 item = val->val_rtx;
6378 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6379 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6380 {
6381 /* For non-integer stack argument see also if they weren't
6382 initialized by integers. */
6383 scalar_int_mode imode;
6384 if (int_mode_for_mode (GET_MODE (mem)).exists (&imode)
6385 && imode != GET_MODE (mem))
6386 {
6387 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6388 imode, 0, VOIDmode);
6389 if (val && cselib_preserved_value_p (val))
6390 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6391 imode);
6392 }
6393 }
6394 }
6395 if (item)
6396 {
6397 rtx x2 = x;
6398 if (GET_MODE (item) != GET_MODE (link))
6399 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6400 if (GET_MODE (x2) != GET_MODE (link))
6401 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6402 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6403 call_arguments
6404 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6405 }
6406 if (t && t != void_list_node)
6407 {
6408 tree argtype = TREE_VALUE (t);
6409 machine_mode mode = TYPE_MODE (argtype);
6410 rtx reg;
6411 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6412 {
6413 argtype = build_pointer_type (argtype);
6414 mode = TYPE_MODE (argtype);
6415 }
6416 reg = targetm.calls.function_arg (args_so_far, mode,
6417 argtype, true);
6418 if (TREE_CODE (argtype) == REFERENCE_TYPE
6419 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6420 && reg
6421 && REG_P (reg)
6422 && GET_MODE (reg) == mode
6423 && (GET_MODE_CLASS (mode) == MODE_INT
6424 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6425 && REG_P (x)
6426 && REGNO (x) == REGNO (reg)
6427 && GET_MODE (x) == mode
6428 && item)
6429 {
6430 machine_mode indmode
6431 = TYPE_MODE (TREE_TYPE (argtype));
6432 rtx mem = gen_rtx_MEM (indmode, x);
6433 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6434 if (val && cselib_preserved_value_p (val))
6435 {
6436 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6437 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6438 call_arguments);
6439 }
6440 else
6441 {
6442 struct elt_loc_list *l;
6443 tree initial;
6444
6445 /* Try harder, when passing address of a constant
6446 pool integer it can be easily read back. */
6447 item = XEXP (item, 1);
6448 if (GET_CODE (item) == SUBREG)
6449 item = SUBREG_REG (item);
6450 gcc_assert (GET_CODE (item) == VALUE);
6451 val = CSELIB_VAL_PTR (item);
6452 for (l = val->locs; l; l = l->next)
6453 if (GET_CODE (l->loc) == SYMBOL_REF
6454 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6455 && SYMBOL_REF_DECL (l->loc)
6456 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6457 {
6458 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6459 if (tree_fits_shwi_p (initial))
6460 {
6461 item = GEN_INT (tree_to_shwi (initial));
6462 item = gen_rtx_CONCAT (indmode, mem, item);
6463 call_arguments
6464 = gen_rtx_EXPR_LIST (VOIDmode, item,
6465 call_arguments);
6466 }
6467 break;
6468 }
6469 }
6470 }
6471 targetm.calls.function_arg_advance (args_so_far, mode,
6472 argtype, true);
6473 t = TREE_CHAIN (t);
6474 }
6475 }
6476
6477 /* Add debug arguments. */
6478 if (fndecl
6479 && TREE_CODE (fndecl) == FUNCTION_DECL
6480 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6481 {
6482 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6483 if (debug_args)
6484 {
6485 unsigned int ix;
6486 tree param;
6487 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6488 {
6489 rtx item;
6490 tree dtemp = (**debug_args)[ix + 1];
6491 machine_mode mode = DECL_MODE (dtemp);
6492 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6493 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6494 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6495 call_arguments);
6496 }
6497 }
6498 }
6499
6500 /* Reverse call_arguments chain. */
6501 prev = NULL_RTX;
6502 for (cur = call_arguments; cur; cur = next)
6503 {
6504 next = XEXP (cur, 1);
6505 XEXP (cur, 1) = prev;
6506 prev = cur;
6507 }
6508 call_arguments = prev;
6509
6510 x = get_call_rtx_from (insn);
6511 if (x)
6512 {
6513 x = XEXP (XEXP (x, 0), 0);
6514 if (GET_CODE (x) == SYMBOL_REF)
6515 /* Don't record anything. */;
6516 else if (CONSTANT_P (x))
6517 {
6518 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6519 pc_rtx, x);
6520 call_arguments
6521 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6522 }
6523 else
6524 {
6525 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6526 if (val && cselib_preserved_value_p (val))
6527 {
6528 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6529 call_arguments
6530 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6531 }
6532 }
6533 }
6534 if (this_arg)
6535 {
6536 machine_mode mode
6537 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6538 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6539 HOST_WIDE_INT token
6540 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6541 if (token)
6542 clobbered = plus_constant (mode, clobbered,
6543 token * GET_MODE_SIZE (mode));
6544 clobbered = gen_rtx_MEM (mode, clobbered);
6545 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6546 call_arguments
6547 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6548 }
6549 }
6550
6551 /* Callback for cselib_record_sets_hook, that records as micro
6552 operations uses and stores in an insn after cselib_record_sets has
6553 analyzed the sets in an insn, but before it modifies the stored
6554 values in the internal tables, unless cselib_record_sets doesn't
6555 call it directly (perhaps because we're not doing cselib in the
6556 first place, in which case sets and n_sets will be 0). */
6557
6558 static void
6559 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6560 {
6561 basic_block bb = BLOCK_FOR_INSN (insn);
6562 int n1, n2;
6563 struct count_use_info cui;
6564 micro_operation *mos;
6565
6566 cselib_hook_called = true;
6567
6568 cui.insn = insn;
6569 cui.bb = bb;
6570 cui.sets = sets;
6571 cui.n_sets = n_sets;
6572
6573 n1 = VTI (bb)->mos.length ();
6574 cui.store_p = false;
6575 note_uses (&PATTERN (insn), add_uses_1, &cui);
6576 n2 = VTI (bb)->mos.length () - 1;
6577 mos = VTI (bb)->mos.address ();
6578
6579 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6580 MO_VAL_LOC last. */
6581 while (n1 < n2)
6582 {
6583 while (n1 < n2 && mos[n1].type == MO_USE)
6584 n1++;
6585 while (n1 < n2 && mos[n2].type != MO_USE)
6586 n2--;
6587 if (n1 < n2)
6588 std::swap (mos[n1], mos[n2]);
6589 }
6590
6591 n2 = VTI (bb)->mos.length () - 1;
6592 while (n1 < n2)
6593 {
6594 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6595 n1++;
6596 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6597 n2--;
6598 if (n1 < n2)
6599 std::swap (mos[n1], mos[n2]);
6600 }
6601
6602 if (CALL_P (insn))
6603 {
6604 micro_operation mo;
6605
6606 mo.type = MO_CALL;
6607 mo.insn = insn;
6608 mo.u.loc = call_arguments;
6609 call_arguments = NULL_RTX;
6610
6611 if (dump_file && (dump_flags & TDF_DETAILS))
6612 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6613 VTI (bb)->mos.safe_push (mo);
6614 }
6615
6616 n1 = VTI (bb)->mos.length ();
6617 /* This will record NEXT_INSN (insn), such that we can
6618 insert notes before it without worrying about any
6619 notes that MO_USEs might emit after the insn. */
6620 cui.store_p = true;
6621 note_stores (PATTERN (insn), add_stores, &cui);
6622 n2 = VTI (bb)->mos.length () - 1;
6623 mos = VTI (bb)->mos.address ();
6624
6625 /* Order the MO_VAL_USEs first (note_stores does nothing
6626 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6627 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6628 while (n1 < n2)
6629 {
6630 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6631 n1++;
6632 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6633 n2--;
6634 if (n1 < n2)
6635 std::swap (mos[n1], mos[n2]);
6636 }
6637
6638 n2 = VTI (bb)->mos.length () - 1;
6639 while (n1 < n2)
6640 {
6641 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6642 n1++;
6643 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6644 n2--;
6645 if (n1 < n2)
6646 std::swap (mos[n1], mos[n2]);
6647 }
6648 }
6649
6650 static enum var_init_status
6651 find_src_status (dataflow_set *in, rtx src)
6652 {
6653 tree decl = NULL_TREE;
6654 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6655
6656 if (! flag_var_tracking_uninit)
6657 status = VAR_INIT_STATUS_INITIALIZED;
6658
6659 if (src && REG_P (src))
6660 decl = var_debug_decl (REG_EXPR (src));
6661 else if (src && MEM_P (src))
6662 decl = var_debug_decl (MEM_EXPR (src));
6663
6664 if (src && decl)
6665 status = get_init_value (in, src, dv_from_decl (decl));
6666
6667 return status;
6668 }
6669
6670 /* SRC is the source of an assignment. Use SET to try to find what
6671 was ultimately assigned to SRC. Return that value if known,
6672 otherwise return SRC itself. */
6673
6674 static rtx
6675 find_src_set_src (dataflow_set *set, rtx src)
6676 {
6677 tree decl = NULL_TREE; /* The variable being copied around. */
6678 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6679 variable *var;
6680 location_chain *nextp;
6681 int i;
6682 bool found;
6683
6684 if (src && REG_P (src))
6685 decl = var_debug_decl (REG_EXPR (src));
6686 else if (src && MEM_P (src))
6687 decl = var_debug_decl (MEM_EXPR (src));
6688
6689 if (src && decl)
6690 {
6691 decl_or_value dv = dv_from_decl (decl);
6692
6693 var = shared_hash_find (set->vars, dv);
6694 if (var)
6695 {
6696 found = false;
6697 for (i = 0; i < var->n_var_parts && !found; i++)
6698 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6699 nextp = nextp->next)
6700 if (rtx_equal_p (nextp->loc, src))
6701 {
6702 set_src = nextp->set_src;
6703 found = true;
6704 }
6705
6706 }
6707 }
6708
6709 return set_src;
6710 }
6711
6712 /* Compute the changes of variable locations in the basic block BB. */
6713
6714 static bool
6715 compute_bb_dataflow (basic_block bb)
6716 {
6717 unsigned int i;
6718 micro_operation *mo;
6719 bool changed;
6720 dataflow_set old_out;
6721 dataflow_set *in = &VTI (bb)->in;
6722 dataflow_set *out = &VTI (bb)->out;
6723
6724 dataflow_set_init (&old_out);
6725 dataflow_set_copy (&old_out, out);
6726 dataflow_set_copy (out, in);
6727
6728 if (MAY_HAVE_DEBUG_BIND_INSNS)
6729 local_get_addr_cache = new hash_map<rtx, rtx>;
6730
6731 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6732 {
6733 rtx_insn *insn = mo->insn;
6734
6735 switch (mo->type)
6736 {
6737 case MO_CALL:
6738 dataflow_set_clear_at_call (out, insn);
6739 break;
6740
6741 case MO_USE:
6742 {
6743 rtx loc = mo->u.loc;
6744
6745 if (REG_P (loc))
6746 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6747 else if (MEM_P (loc))
6748 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6749 }
6750 break;
6751
6752 case MO_VAL_LOC:
6753 {
6754 rtx loc = mo->u.loc;
6755 rtx val, vloc;
6756 tree var;
6757
6758 if (GET_CODE (loc) == CONCAT)
6759 {
6760 val = XEXP (loc, 0);
6761 vloc = XEXP (loc, 1);
6762 }
6763 else
6764 {
6765 val = NULL_RTX;
6766 vloc = loc;
6767 }
6768
6769 var = PAT_VAR_LOCATION_DECL (vloc);
6770
6771 clobber_variable_part (out, NULL_RTX,
6772 dv_from_decl (var), 0, NULL_RTX);
6773 if (val)
6774 {
6775 if (VAL_NEEDS_RESOLUTION (loc))
6776 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6777 set_variable_part (out, val, dv_from_decl (var), 0,
6778 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6779 INSERT);
6780 }
6781 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6782 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6783 dv_from_decl (var), 0,
6784 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6785 INSERT);
6786 }
6787 break;
6788
6789 case MO_VAL_USE:
6790 {
6791 rtx loc = mo->u.loc;
6792 rtx val, vloc, uloc;
6793
6794 vloc = uloc = XEXP (loc, 1);
6795 val = XEXP (loc, 0);
6796
6797 if (GET_CODE (val) == CONCAT)
6798 {
6799 uloc = XEXP (val, 1);
6800 val = XEXP (val, 0);
6801 }
6802
6803 if (VAL_NEEDS_RESOLUTION (loc))
6804 val_resolve (out, val, vloc, insn);
6805 else
6806 val_store (out, val, uloc, insn, false);
6807
6808 if (VAL_HOLDS_TRACK_EXPR (loc))
6809 {
6810 if (GET_CODE (uloc) == REG)
6811 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6812 NULL);
6813 else if (GET_CODE (uloc) == MEM)
6814 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6815 NULL);
6816 }
6817 }
6818 break;
6819
6820 case MO_VAL_SET:
6821 {
6822 rtx loc = mo->u.loc;
6823 rtx val, vloc, uloc;
6824 rtx dstv, srcv;
6825
6826 vloc = loc;
6827 uloc = XEXP (vloc, 1);
6828 val = XEXP (vloc, 0);
6829 vloc = uloc;
6830
6831 if (GET_CODE (uloc) == SET)
6832 {
6833 dstv = SET_DEST (uloc);
6834 srcv = SET_SRC (uloc);
6835 }
6836 else
6837 {
6838 dstv = uloc;
6839 srcv = NULL;
6840 }
6841
6842 if (GET_CODE (val) == CONCAT)
6843 {
6844 dstv = vloc = XEXP (val, 1);
6845 val = XEXP (val, 0);
6846 }
6847
6848 if (GET_CODE (vloc) == SET)
6849 {
6850 srcv = SET_SRC (vloc);
6851
6852 gcc_assert (val != srcv);
6853 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6854
6855 dstv = vloc = SET_DEST (vloc);
6856
6857 if (VAL_NEEDS_RESOLUTION (loc))
6858 val_resolve (out, val, srcv, insn);
6859 }
6860 else if (VAL_NEEDS_RESOLUTION (loc))
6861 {
6862 gcc_assert (GET_CODE (uloc) == SET
6863 && GET_CODE (SET_SRC (uloc)) == REG);
6864 val_resolve (out, val, SET_SRC (uloc), insn);
6865 }
6866
6867 if (VAL_HOLDS_TRACK_EXPR (loc))
6868 {
6869 if (VAL_EXPR_IS_CLOBBERED (loc))
6870 {
6871 if (REG_P (uloc))
6872 var_reg_delete (out, uloc, true);
6873 else if (MEM_P (uloc))
6874 {
6875 gcc_assert (MEM_P (dstv));
6876 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6877 var_mem_delete (out, dstv, true);
6878 }
6879 }
6880 else
6881 {
6882 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6883 rtx src = NULL, dst = uloc;
6884 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6885
6886 if (GET_CODE (uloc) == SET)
6887 {
6888 src = SET_SRC (uloc);
6889 dst = SET_DEST (uloc);
6890 }
6891
6892 if (copied_p)
6893 {
6894 if (flag_var_tracking_uninit)
6895 {
6896 status = find_src_status (in, src);
6897
6898 if (status == VAR_INIT_STATUS_UNKNOWN)
6899 status = find_src_status (out, src);
6900 }
6901
6902 src = find_src_set_src (in, src);
6903 }
6904
6905 if (REG_P (dst))
6906 var_reg_delete_and_set (out, dst, !copied_p,
6907 status, srcv);
6908 else if (MEM_P (dst))
6909 {
6910 gcc_assert (MEM_P (dstv));
6911 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6912 var_mem_delete_and_set (out, dstv, !copied_p,
6913 status, srcv);
6914 }
6915 }
6916 }
6917 else if (REG_P (uloc))
6918 var_regno_delete (out, REGNO (uloc));
6919 else if (MEM_P (uloc))
6920 {
6921 gcc_checking_assert (GET_CODE (vloc) == MEM);
6922 gcc_checking_assert (dstv == vloc);
6923 if (dstv != vloc)
6924 clobber_overlapping_mems (out, vloc);
6925 }
6926
6927 val_store (out, val, dstv, insn, true);
6928 }
6929 break;
6930
6931 case MO_SET:
6932 {
6933 rtx loc = mo->u.loc;
6934 rtx set_src = NULL;
6935
6936 if (GET_CODE (loc) == SET)
6937 {
6938 set_src = SET_SRC (loc);
6939 loc = SET_DEST (loc);
6940 }
6941
6942 if (REG_P (loc))
6943 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6944 set_src);
6945 else if (MEM_P (loc))
6946 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6947 set_src);
6948 }
6949 break;
6950
6951 case MO_COPY:
6952 {
6953 rtx loc = mo->u.loc;
6954 enum var_init_status src_status;
6955 rtx set_src = NULL;
6956
6957 if (GET_CODE (loc) == SET)
6958 {
6959 set_src = SET_SRC (loc);
6960 loc = SET_DEST (loc);
6961 }
6962
6963 if (! flag_var_tracking_uninit)
6964 src_status = VAR_INIT_STATUS_INITIALIZED;
6965 else
6966 {
6967 src_status = find_src_status (in, set_src);
6968
6969 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6970 src_status = find_src_status (out, set_src);
6971 }
6972
6973 set_src = find_src_set_src (in, set_src);
6974
6975 if (REG_P (loc))
6976 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6977 else if (MEM_P (loc))
6978 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6979 }
6980 break;
6981
6982 case MO_USE_NO_VAR:
6983 {
6984 rtx loc = mo->u.loc;
6985
6986 if (REG_P (loc))
6987 var_reg_delete (out, loc, false);
6988 else if (MEM_P (loc))
6989 var_mem_delete (out, loc, false);
6990 }
6991 break;
6992
6993 case MO_CLOBBER:
6994 {
6995 rtx loc = mo->u.loc;
6996
6997 if (REG_P (loc))
6998 var_reg_delete (out, loc, true);
6999 else if (MEM_P (loc))
7000 var_mem_delete (out, loc, true);
7001 }
7002 break;
7003
7004 case MO_ADJUST:
7005 out->stack_adjust += mo->u.adjust;
7006 break;
7007 }
7008 }
7009
7010 if (MAY_HAVE_DEBUG_BIND_INSNS)
7011 {
7012 delete local_get_addr_cache;
7013 local_get_addr_cache = NULL;
7014
7015 dataflow_set_equiv_regs (out);
7016 shared_hash_htab (out->vars)
7017 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
7018 shared_hash_htab (out->vars)
7019 ->traverse <dataflow_set *, canonicalize_values_star> (out);
7020 if (flag_checking)
7021 shared_hash_htab (out->vars)
7022 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7023 }
7024 changed = dataflow_set_different (&old_out, out);
7025 dataflow_set_destroy (&old_out);
7026 return changed;
7027 }
7028
7029 /* Find the locations of variables in the whole function. */
7030
7031 static bool
7032 vt_find_locations (void)
7033 {
7034 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7035 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7036 sbitmap in_worklist, in_pending;
7037 basic_block bb;
7038 edge e;
7039 int *bb_order;
7040 int *rc_order;
7041 int i;
7042 int htabsz = 0;
7043 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7044 bool success = true;
7045
7046 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7047 /* Compute reverse completion order of depth first search of the CFG
7048 so that the data-flow runs faster. */
7049 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7050 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7051 pre_and_rev_post_order_compute (NULL, rc_order, false);
7052 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7053 bb_order[rc_order[i]] = i;
7054 free (rc_order);
7055
7056 auto_sbitmap visited (last_basic_block_for_fn (cfun));
7057 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7058 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7059 bitmap_clear (in_worklist);
7060
7061 FOR_EACH_BB_FN (bb, cfun)
7062 pending->insert (bb_order[bb->index], bb);
7063 bitmap_ones (in_pending);
7064
7065 while (success && !pending->empty ())
7066 {
7067 std::swap (worklist, pending);
7068 std::swap (in_worklist, in_pending);
7069
7070 bitmap_clear (visited);
7071
7072 while (!worklist->empty ())
7073 {
7074 bb = worklist->extract_min ();
7075 bitmap_clear_bit (in_worklist, bb->index);
7076 gcc_assert (!bitmap_bit_p (visited, bb->index));
7077 if (!bitmap_bit_p (visited, bb->index))
7078 {
7079 bool changed;
7080 edge_iterator ei;
7081 int oldinsz, oldoutsz;
7082
7083 bitmap_set_bit (visited, bb->index);
7084
7085 if (VTI (bb)->in.vars)
7086 {
7087 htabsz
7088 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7089 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7090 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7091 oldoutsz
7092 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7093 }
7094 else
7095 oldinsz = oldoutsz = 0;
7096
7097 if (MAY_HAVE_DEBUG_BIND_INSNS)
7098 {
7099 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7100 bool first = true, adjust = false;
7101
7102 /* Calculate the IN set as the intersection of
7103 predecessor OUT sets. */
7104
7105 dataflow_set_clear (in);
7106 dst_can_be_shared = true;
7107
7108 FOR_EACH_EDGE (e, ei, bb->preds)
7109 if (!VTI (e->src)->flooded)
7110 gcc_assert (bb_order[bb->index]
7111 <= bb_order[e->src->index]);
7112 else if (first)
7113 {
7114 dataflow_set_copy (in, &VTI (e->src)->out);
7115 first_out = &VTI (e->src)->out;
7116 first = false;
7117 }
7118 else
7119 {
7120 dataflow_set_merge (in, &VTI (e->src)->out);
7121 adjust = true;
7122 }
7123
7124 if (adjust)
7125 {
7126 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7127
7128 if (flag_checking)
7129 /* Merge and merge_adjust should keep entries in
7130 canonical order. */
7131 shared_hash_htab (in->vars)
7132 ->traverse <dataflow_set *,
7133 canonicalize_loc_order_check> (in);
7134
7135 if (dst_can_be_shared)
7136 {
7137 shared_hash_destroy (in->vars);
7138 in->vars = shared_hash_copy (first_out->vars);
7139 }
7140 }
7141
7142 VTI (bb)->flooded = true;
7143 }
7144 else
7145 {
7146 /* Calculate the IN set as union of predecessor OUT sets. */
7147 dataflow_set_clear (&VTI (bb)->in);
7148 FOR_EACH_EDGE (e, ei, bb->preds)
7149 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7150 }
7151
7152 changed = compute_bb_dataflow (bb);
7153 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7154 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7155
7156 if (htabmax && htabsz > htabmax)
7157 {
7158 if (MAY_HAVE_DEBUG_BIND_INSNS)
7159 inform (DECL_SOURCE_LOCATION (cfun->decl),
7160 "variable tracking size limit exceeded with "
7161 "-fvar-tracking-assignments, retrying without");
7162 else
7163 inform (DECL_SOURCE_LOCATION (cfun->decl),
7164 "variable tracking size limit exceeded");
7165 success = false;
7166 break;
7167 }
7168
7169 if (changed)
7170 {
7171 FOR_EACH_EDGE (e, ei, bb->succs)
7172 {
7173 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7174 continue;
7175
7176 if (bitmap_bit_p (visited, e->dest->index))
7177 {
7178 if (!bitmap_bit_p (in_pending, e->dest->index))
7179 {
7180 /* Send E->DEST to next round. */
7181 bitmap_set_bit (in_pending, e->dest->index);
7182 pending->insert (bb_order[e->dest->index],
7183 e->dest);
7184 }
7185 }
7186 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7187 {
7188 /* Add E->DEST to current round. */
7189 bitmap_set_bit (in_worklist, e->dest->index);
7190 worklist->insert (bb_order[e->dest->index],
7191 e->dest);
7192 }
7193 }
7194 }
7195
7196 if (dump_file)
7197 fprintf (dump_file,
7198 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7199 bb->index,
7200 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7201 oldinsz,
7202 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7203 oldoutsz,
7204 (int)worklist->nodes (), (int)pending->nodes (),
7205 htabsz);
7206
7207 if (dump_file && (dump_flags & TDF_DETAILS))
7208 {
7209 fprintf (dump_file, "BB %i IN:\n", bb->index);
7210 dump_dataflow_set (&VTI (bb)->in);
7211 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7212 dump_dataflow_set (&VTI (bb)->out);
7213 }
7214 }
7215 }
7216 }
7217
7218 if (success && MAY_HAVE_DEBUG_BIND_INSNS)
7219 FOR_EACH_BB_FN (bb, cfun)
7220 gcc_assert (VTI (bb)->flooded);
7221
7222 free (bb_order);
7223 delete worklist;
7224 delete pending;
7225 sbitmap_free (in_worklist);
7226 sbitmap_free (in_pending);
7227
7228 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7229 return success;
7230 }
7231
7232 /* Print the content of the LIST to dump file. */
7233
7234 static void
7235 dump_attrs_list (attrs *list)
7236 {
7237 for (; list; list = list->next)
7238 {
7239 if (dv_is_decl_p (list->dv))
7240 print_mem_expr (dump_file, dv_as_decl (list->dv));
7241 else
7242 print_rtl_single (dump_file, dv_as_value (list->dv));
7243 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7244 }
7245 fprintf (dump_file, "\n");
7246 }
7247
7248 /* Print the information about variable *SLOT to dump file. */
7249
7250 int
7251 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
7252 {
7253 variable *var = *slot;
7254
7255 dump_var (var);
7256
7257 /* Continue traversing the hash table. */
7258 return 1;
7259 }
7260
7261 /* Print the information about variable VAR to dump file. */
7262
7263 static void
7264 dump_var (variable *var)
7265 {
7266 int i;
7267 location_chain *node;
7268
7269 if (dv_is_decl_p (var->dv))
7270 {
7271 const_tree decl = dv_as_decl (var->dv);
7272
7273 if (DECL_NAME (decl))
7274 {
7275 fprintf (dump_file, " name: %s",
7276 IDENTIFIER_POINTER (DECL_NAME (decl)));
7277 if (dump_flags & TDF_UID)
7278 fprintf (dump_file, "D.%u", DECL_UID (decl));
7279 }
7280 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7281 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7282 else
7283 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7284 fprintf (dump_file, "\n");
7285 }
7286 else
7287 {
7288 fputc (' ', dump_file);
7289 print_rtl_single (dump_file, dv_as_value (var->dv));
7290 }
7291
7292 for (i = 0; i < var->n_var_parts; i++)
7293 {
7294 fprintf (dump_file, " offset %ld\n",
7295 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7296 for (node = var->var_part[i].loc_chain; node; node = node->next)
7297 {
7298 fprintf (dump_file, " ");
7299 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7300 fprintf (dump_file, "[uninit]");
7301 print_rtl_single (dump_file, node->loc);
7302 }
7303 }
7304 }
7305
7306 /* Print the information about variables from hash table VARS to dump file. */
7307
7308 static void
7309 dump_vars (variable_table_type *vars)
7310 {
7311 if (vars->elements () > 0)
7312 {
7313 fprintf (dump_file, "Variables:\n");
7314 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7315 }
7316 }
7317
7318 /* Print the dataflow set SET to dump file. */
7319
7320 static void
7321 dump_dataflow_set (dataflow_set *set)
7322 {
7323 int i;
7324
7325 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7326 set->stack_adjust);
7327 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7328 {
7329 if (set->regs[i])
7330 {
7331 fprintf (dump_file, "Reg %d:", i);
7332 dump_attrs_list (set->regs[i]);
7333 }
7334 }
7335 dump_vars (shared_hash_htab (set->vars));
7336 fprintf (dump_file, "\n");
7337 }
7338
7339 /* Print the IN and OUT sets for each basic block to dump file. */
7340
7341 static void
7342 dump_dataflow_sets (void)
7343 {
7344 basic_block bb;
7345
7346 FOR_EACH_BB_FN (bb, cfun)
7347 {
7348 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7349 fprintf (dump_file, "IN:\n");
7350 dump_dataflow_set (&VTI (bb)->in);
7351 fprintf (dump_file, "OUT:\n");
7352 dump_dataflow_set (&VTI (bb)->out);
7353 }
7354 }
7355
7356 /* Return the variable for DV in dropped_values, inserting one if
7357 requested with INSERT. */
7358
7359 static inline variable *
7360 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7361 {
7362 variable **slot;
7363 variable *empty_var;
7364 onepart_enum onepart;
7365
7366 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7367
7368 if (!slot)
7369 return NULL;
7370
7371 if (*slot)
7372 return *slot;
7373
7374 gcc_checking_assert (insert == INSERT);
7375
7376 onepart = dv_onepart_p (dv);
7377
7378 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7379
7380 empty_var = onepart_pool_allocate (onepart);
7381 empty_var->dv = dv;
7382 empty_var->refcount = 1;
7383 empty_var->n_var_parts = 0;
7384 empty_var->onepart = onepart;
7385 empty_var->in_changed_variables = false;
7386 empty_var->var_part[0].loc_chain = NULL;
7387 empty_var->var_part[0].cur_loc = NULL;
7388 VAR_LOC_1PAUX (empty_var) = NULL;
7389 set_dv_changed (dv, true);
7390
7391 *slot = empty_var;
7392
7393 return empty_var;
7394 }
7395
7396 /* Recover the one-part aux from dropped_values. */
7397
7398 static struct onepart_aux *
7399 recover_dropped_1paux (variable *var)
7400 {
7401 variable *dvar;
7402
7403 gcc_checking_assert (var->onepart);
7404
7405 if (VAR_LOC_1PAUX (var))
7406 return VAR_LOC_1PAUX (var);
7407
7408 if (var->onepart == ONEPART_VDECL)
7409 return NULL;
7410
7411 dvar = variable_from_dropped (var->dv, NO_INSERT);
7412
7413 if (!dvar)
7414 return NULL;
7415
7416 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7417 VAR_LOC_1PAUX (dvar) = NULL;
7418
7419 return VAR_LOC_1PAUX (var);
7420 }
7421
7422 /* Add variable VAR to the hash table of changed variables and
7423 if it has no locations delete it from SET's hash table. */
7424
7425 static void
7426 variable_was_changed (variable *var, dataflow_set *set)
7427 {
7428 hashval_t hash = dv_htab_hash (var->dv);
7429
7430 if (emit_notes)
7431 {
7432 variable **slot;
7433
7434 /* Remember this decl or VALUE has been added to changed_variables. */
7435 set_dv_changed (var->dv, true);
7436
7437 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7438
7439 if (*slot)
7440 {
7441 variable *old_var = *slot;
7442 gcc_assert (old_var->in_changed_variables);
7443 old_var->in_changed_variables = false;
7444 if (var != old_var && var->onepart)
7445 {
7446 /* Restore the auxiliary info from an empty variable
7447 previously created for changed_variables, so it is
7448 not lost. */
7449 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7450 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7451 VAR_LOC_1PAUX (old_var) = NULL;
7452 }
7453 variable_htab_free (*slot);
7454 }
7455
7456 if (set && var->n_var_parts == 0)
7457 {
7458 onepart_enum onepart = var->onepart;
7459 variable *empty_var = NULL;
7460 variable **dslot = NULL;
7461
7462 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7463 {
7464 dslot = dropped_values->find_slot_with_hash (var->dv,
7465 dv_htab_hash (var->dv),
7466 INSERT);
7467 empty_var = *dslot;
7468
7469 if (empty_var)
7470 {
7471 gcc_checking_assert (!empty_var->in_changed_variables);
7472 if (!VAR_LOC_1PAUX (var))
7473 {
7474 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7475 VAR_LOC_1PAUX (empty_var) = NULL;
7476 }
7477 else
7478 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7479 }
7480 }
7481
7482 if (!empty_var)
7483 {
7484 empty_var = onepart_pool_allocate (onepart);
7485 empty_var->dv = var->dv;
7486 empty_var->refcount = 1;
7487 empty_var->n_var_parts = 0;
7488 empty_var->onepart = onepart;
7489 if (dslot)
7490 {
7491 empty_var->refcount++;
7492 *dslot = empty_var;
7493 }
7494 }
7495 else
7496 empty_var->refcount++;
7497 empty_var->in_changed_variables = true;
7498 *slot = empty_var;
7499 if (onepart)
7500 {
7501 empty_var->var_part[0].loc_chain = NULL;
7502 empty_var->var_part[0].cur_loc = NULL;
7503 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7504 VAR_LOC_1PAUX (var) = NULL;
7505 }
7506 goto drop_var;
7507 }
7508 else
7509 {
7510 if (var->onepart && !VAR_LOC_1PAUX (var))
7511 recover_dropped_1paux (var);
7512 var->refcount++;
7513 var->in_changed_variables = true;
7514 *slot = var;
7515 }
7516 }
7517 else
7518 {
7519 gcc_assert (set);
7520 if (var->n_var_parts == 0)
7521 {
7522 variable **slot;
7523
7524 drop_var:
7525 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7526 if (slot)
7527 {
7528 if (shared_hash_shared (set->vars))
7529 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7530 NO_INSERT);
7531 shared_hash_htab (set->vars)->clear_slot (slot);
7532 }
7533 }
7534 }
7535 }
7536
7537 /* Look for the index in VAR->var_part corresponding to OFFSET.
7538 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7539 referenced int will be set to the index that the part has or should
7540 have, if it should be inserted. */
7541
7542 static inline int
7543 find_variable_location_part (variable *var, HOST_WIDE_INT offset,
7544 int *insertion_point)
7545 {
7546 int pos, low, high;
7547
7548 if (var->onepart)
7549 {
7550 if (offset != 0)
7551 return -1;
7552
7553 if (insertion_point)
7554 *insertion_point = 0;
7555
7556 return var->n_var_parts - 1;
7557 }
7558
7559 /* Find the location part. */
7560 low = 0;
7561 high = var->n_var_parts;
7562 while (low != high)
7563 {
7564 pos = (low + high) / 2;
7565 if (VAR_PART_OFFSET (var, pos) < offset)
7566 low = pos + 1;
7567 else
7568 high = pos;
7569 }
7570 pos = low;
7571
7572 if (insertion_point)
7573 *insertion_point = pos;
7574
7575 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7576 return pos;
7577
7578 return -1;
7579 }
7580
7581 static variable **
7582 set_slot_part (dataflow_set *set, rtx loc, variable **slot,
7583 decl_or_value dv, HOST_WIDE_INT offset,
7584 enum var_init_status initialized, rtx set_src)
7585 {
7586 int pos;
7587 location_chain *node, *next;
7588 location_chain **nextp;
7589 variable *var;
7590 onepart_enum onepart;
7591
7592 var = *slot;
7593
7594 if (var)
7595 onepart = var->onepart;
7596 else
7597 onepart = dv_onepart_p (dv);
7598
7599 gcc_checking_assert (offset == 0 || !onepart);
7600 gcc_checking_assert (loc != dv_as_opaque (dv));
7601
7602 if (! flag_var_tracking_uninit)
7603 initialized = VAR_INIT_STATUS_INITIALIZED;
7604
7605 if (!var)
7606 {
7607 /* Create new variable information. */
7608 var = onepart_pool_allocate (onepart);
7609 var->dv = dv;
7610 var->refcount = 1;
7611 var->n_var_parts = 1;
7612 var->onepart = onepart;
7613 var->in_changed_variables = false;
7614 if (var->onepart)
7615 VAR_LOC_1PAUX (var) = NULL;
7616 else
7617 VAR_PART_OFFSET (var, 0) = offset;
7618 var->var_part[0].loc_chain = NULL;
7619 var->var_part[0].cur_loc = NULL;
7620 *slot = var;
7621 pos = 0;
7622 nextp = &var->var_part[0].loc_chain;
7623 }
7624 else if (onepart)
7625 {
7626 int r = -1, c = 0;
7627
7628 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7629
7630 pos = 0;
7631
7632 if (GET_CODE (loc) == VALUE)
7633 {
7634 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7635 nextp = &node->next)
7636 if (GET_CODE (node->loc) == VALUE)
7637 {
7638 if (node->loc == loc)
7639 {
7640 r = 0;
7641 break;
7642 }
7643 if (canon_value_cmp (node->loc, loc))
7644 c++;
7645 else
7646 {
7647 r = 1;
7648 break;
7649 }
7650 }
7651 else if (REG_P (node->loc) || MEM_P (node->loc))
7652 c++;
7653 else
7654 {
7655 r = 1;
7656 break;
7657 }
7658 }
7659 else if (REG_P (loc))
7660 {
7661 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7662 nextp = &node->next)
7663 if (REG_P (node->loc))
7664 {
7665 if (REGNO (node->loc) < REGNO (loc))
7666 c++;
7667 else
7668 {
7669 if (REGNO (node->loc) == REGNO (loc))
7670 r = 0;
7671 else
7672 r = 1;
7673 break;
7674 }
7675 }
7676 else
7677 {
7678 r = 1;
7679 break;
7680 }
7681 }
7682 else if (MEM_P (loc))
7683 {
7684 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7685 nextp = &node->next)
7686 if (REG_P (node->loc))
7687 c++;
7688 else if (MEM_P (node->loc))
7689 {
7690 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7691 break;
7692 else
7693 c++;
7694 }
7695 else
7696 {
7697 r = 1;
7698 break;
7699 }
7700 }
7701 else
7702 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7703 nextp = &node->next)
7704 if ((r = loc_cmp (node->loc, loc)) >= 0)
7705 break;
7706 else
7707 c++;
7708
7709 if (r == 0)
7710 return slot;
7711
7712 if (shared_var_p (var, set->vars))
7713 {
7714 slot = unshare_variable (set, slot, var, initialized);
7715 var = *slot;
7716 for (nextp = &var->var_part[0].loc_chain; c;
7717 nextp = &(*nextp)->next)
7718 c--;
7719 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7720 }
7721 }
7722 else
7723 {
7724 int inspos = 0;
7725
7726 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7727
7728 pos = find_variable_location_part (var, offset, &inspos);
7729
7730 if (pos >= 0)
7731 {
7732 node = var->var_part[pos].loc_chain;
7733
7734 if (node
7735 && ((REG_P (node->loc) && REG_P (loc)
7736 && REGNO (node->loc) == REGNO (loc))
7737 || rtx_equal_p (node->loc, loc)))
7738 {
7739 /* LOC is in the beginning of the chain so we have nothing
7740 to do. */
7741 if (node->init < initialized)
7742 node->init = initialized;
7743 if (set_src != NULL)
7744 node->set_src = set_src;
7745
7746 return slot;
7747 }
7748 else
7749 {
7750 /* We have to make a copy of a shared variable. */
7751 if (shared_var_p (var, set->vars))
7752 {
7753 slot = unshare_variable (set, slot, var, initialized);
7754 var = *slot;
7755 }
7756 }
7757 }
7758 else
7759 {
7760 /* We have not found the location part, new one will be created. */
7761
7762 /* We have to make a copy of the shared variable. */
7763 if (shared_var_p (var, set->vars))
7764 {
7765 slot = unshare_variable (set, slot, var, initialized);
7766 var = *slot;
7767 }
7768
7769 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7770 thus there are at most MAX_VAR_PARTS different offsets. */
7771 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7772 && (!var->n_var_parts || !onepart));
7773
7774 /* We have to move the elements of array starting at index
7775 inspos to the next position. */
7776 for (pos = var->n_var_parts; pos > inspos; pos--)
7777 var->var_part[pos] = var->var_part[pos - 1];
7778
7779 var->n_var_parts++;
7780 gcc_checking_assert (!onepart);
7781 VAR_PART_OFFSET (var, pos) = offset;
7782 var->var_part[pos].loc_chain = NULL;
7783 var->var_part[pos].cur_loc = NULL;
7784 }
7785
7786 /* Delete the location from the list. */
7787 nextp = &var->var_part[pos].loc_chain;
7788 for (node = var->var_part[pos].loc_chain; node; node = next)
7789 {
7790 next = node->next;
7791 if ((REG_P (node->loc) && REG_P (loc)
7792 && REGNO (node->loc) == REGNO (loc))
7793 || rtx_equal_p (node->loc, loc))
7794 {
7795 /* Save these values, to assign to the new node, before
7796 deleting this one. */
7797 if (node->init > initialized)
7798 initialized = node->init;
7799 if (node->set_src != NULL && set_src == NULL)
7800 set_src = node->set_src;
7801 if (var->var_part[pos].cur_loc == node->loc)
7802 var->var_part[pos].cur_loc = NULL;
7803 delete node;
7804 *nextp = next;
7805 break;
7806 }
7807 else
7808 nextp = &node->next;
7809 }
7810
7811 nextp = &var->var_part[pos].loc_chain;
7812 }
7813
7814 /* Add the location to the beginning. */
7815 node = new location_chain;
7816 node->loc = loc;
7817 node->init = initialized;
7818 node->set_src = set_src;
7819 node->next = *nextp;
7820 *nextp = node;
7821
7822 /* If no location was emitted do so. */
7823 if (var->var_part[pos].cur_loc == NULL)
7824 variable_was_changed (var, set);
7825
7826 return slot;
7827 }
7828
7829 /* Set the part of variable's location in the dataflow set SET. The
7830 variable part is specified by variable's declaration in DV and
7831 offset OFFSET and the part's location by LOC. IOPT should be
7832 NO_INSERT if the variable is known to be in SET already and the
7833 variable hash table must not be resized, and INSERT otherwise. */
7834
7835 static void
7836 set_variable_part (dataflow_set *set, rtx loc,
7837 decl_or_value dv, HOST_WIDE_INT offset,
7838 enum var_init_status initialized, rtx set_src,
7839 enum insert_option iopt)
7840 {
7841 variable **slot;
7842
7843 if (iopt == NO_INSERT)
7844 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7845 else
7846 {
7847 slot = shared_hash_find_slot (set->vars, dv);
7848 if (!slot)
7849 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7850 }
7851 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7852 }
7853
7854 /* Remove all recorded register locations for the given variable part
7855 from dataflow set SET, except for those that are identical to loc.
7856 The variable part is specified by variable's declaration or value
7857 DV and offset OFFSET. */
7858
7859 static variable **
7860 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
7861 HOST_WIDE_INT offset, rtx set_src)
7862 {
7863 variable *var = *slot;
7864 int pos = find_variable_location_part (var, offset, NULL);
7865
7866 if (pos >= 0)
7867 {
7868 location_chain *node, *next;
7869
7870 /* Remove the register locations from the dataflow set. */
7871 next = var->var_part[pos].loc_chain;
7872 for (node = next; node; node = next)
7873 {
7874 next = node->next;
7875 if (node->loc != loc
7876 && (!flag_var_tracking_uninit
7877 || !set_src
7878 || MEM_P (set_src)
7879 || !rtx_equal_p (set_src, node->set_src)))
7880 {
7881 if (REG_P (node->loc))
7882 {
7883 attrs *anode, *anext;
7884 attrs **anextp;
7885
7886 /* Remove the variable part from the register's
7887 list, but preserve any other variable parts
7888 that might be regarded as live in that same
7889 register. */
7890 anextp = &set->regs[REGNO (node->loc)];
7891 for (anode = *anextp; anode; anode = anext)
7892 {
7893 anext = anode->next;
7894 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7895 && anode->offset == offset)
7896 {
7897 delete anode;
7898 *anextp = anext;
7899 }
7900 else
7901 anextp = &anode->next;
7902 }
7903 }
7904
7905 slot = delete_slot_part (set, node->loc, slot, offset);
7906 }
7907 }
7908 }
7909
7910 return slot;
7911 }
7912
7913 /* Remove all recorded register locations for the given variable part
7914 from dataflow set SET, except for those that are identical to loc.
7915 The variable part is specified by variable's declaration or value
7916 DV and offset OFFSET. */
7917
7918 static void
7919 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7920 HOST_WIDE_INT offset, rtx set_src)
7921 {
7922 variable **slot;
7923
7924 if (!dv_as_opaque (dv)
7925 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7926 return;
7927
7928 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7929 if (!slot)
7930 return;
7931
7932 clobber_slot_part (set, loc, slot, offset, set_src);
7933 }
7934
7935 /* Delete the part of variable's location from dataflow set SET. The
7936 variable part is specified by its SET->vars slot SLOT and offset
7937 OFFSET and the part's location by LOC. */
7938
7939 static variable **
7940 delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
7941 HOST_WIDE_INT offset)
7942 {
7943 variable *var = *slot;
7944 int pos = find_variable_location_part (var, offset, NULL);
7945
7946 if (pos >= 0)
7947 {
7948 location_chain *node, *next;
7949 location_chain **nextp;
7950 bool changed;
7951 rtx cur_loc;
7952
7953 if (shared_var_p (var, set->vars))
7954 {
7955 /* If the variable contains the location part we have to
7956 make a copy of the variable. */
7957 for (node = var->var_part[pos].loc_chain; node;
7958 node = node->next)
7959 {
7960 if ((REG_P (node->loc) && REG_P (loc)
7961 && REGNO (node->loc) == REGNO (loc))
7962 || rtx_equal_p (node->loc, loc))
7963 {
7964 slot = unshare_variable (set, slot, var,
7965 VAR_INIT_STATUS_UNKNOWN);
7966 var = *slot;
7967 break;
7968 }
7969 }
7970 }
7971
7972 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7973 cur_loc = VAR_LOC_FROM (var);
7974 else
7975 cur_loc = var->var_part[pos].cur_loc;
7976
7977 /* Delete the location part. */
7978 changed = false;
7979 nextp = &var->var_part[pos].loc_chain;
7980 for (node = *nextp; node; node = next)
7981 {
7982 next = node->next;
7983 if ((REG_P (node->loc) && REG_P (loc)
7984 && REGNO (node->loc) == REGNO (loc))
7985 || rtx_equal_p (node->loc, loc))
7986 {
7987 /* If we have deleted the location which was last emitted
7988 we have to emit new location so add the variable to set
7989 of changed variables. */
7990 if (cur_loc == node->loc)
7991 {
7992 changed = true;
7993 var->var_part[pos].cur_loc = NULL;
7994 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7995 VAR_LOC_FROM (var) = NULL;
7996 }
7997 delete node;
7998 *nextp = next;
7999 break;
8000 }
8001 else
8002 nextp = &node->next;
8003 }
8004
8005 if (var->var_part[pos].loc_chain == NULL)
8006 {
8007 changed = true;
8008 var->n_var_parts--;
8009 while (pos < var->n_var_parts)
8010 {
8011 var->var_part[pos] = var->var_part[pos + 1];
8012 pos++;
8013 }
8014 }
8015 if (changed)
8016 variable_was_changed (var, set);
8017 }
8018
8019 return slot;
8020 }
8021
8022 /* Delete the part of variable's location from dataflow set SET. The
8023 variable part is specified by variable's declaration or value DV
8024 and offset OFFSET and the part's location by LOC. */
8025
8026 static void
8027 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8028 HOST_WIDE_INT offset)
8029 {
8030 variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8031 if (!slot)
8032 return;
8033
8034 delete_slot_part (set, loc, slot, offset);
8035 }
8036
8037
8038 /* Structure for passing some other parameters to function
8039 vt_expand_loc_callback. */
8040 struct expand_loc_callback_data
8041 {
8042 /* The variables and values active at this point. */
8043 variable_table_type *vars;
8044
8045 /* Stack of values and debug_exprs under expansion, and their
8046 children. */
8047 auto_vec<rtx, 4> expanding;
8048
8049 /* Stack of values and debug_exprs whose expansion hit recursion
8050 cycles. They will have VALUE_RECURSED_INTO marked when added to
8051 this list. This flag will be cleared if any of its dependencies
8052 resolves to a valid location. So, if the flag remains set at the
8053 end of the search, we know no valid location for this one can
8054 possibly exist. */
8055 auto_vec<rtx, 4> pending;
8056
8057 /* The maximum depth among the sub-expressions under expansion.
8058 Zero indicates no expansion so far. */
8059 expand_depth depth;
8060 };
8061
8062 /* Allocate the one-part auxiliary data structure for VAR, with enough
8063 room for COUNT dependencies. */
8064
8065 static void
8066 loc_exp_dep_alloc (variable *var, int count)
8067 {
8068 size_t allocsize;
8069
8070 gcc_checking_assert (var->onepart);
8071
8072 /* We can be called with COUNT == 0 to allocate the data structure
8073 without any dependencies, e.g. for the backlinks only. However,
8074 if we are specifying a COUNT, then the dependency list must have
8075 been emptied before. It would be possible to adjust pointers or
8076 force it empty here, but this is better done at an earlier point
8077 in the algorithm, so we instead leave an assertion to catch
8078 errors. */
8079 gcc_checking_assert (!count
8080 || VAR_LOC_DEP_VEC (var) == NULL
8081 || VAR_LOC_DEP_VEC (var)->is_empty ());
8082
8083 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8084 return;
8085
8086 allocsize = offsetof (struct onepart_aux, deps)
8087 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8088
8089 if (VAR_LOC_1PAUX (var))
8090 {
8091 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8092 VAR_LOC_1PAUX (var), allocsize);
8093 /* If the reallocation moves the onepaux structure, the
8094 back-pointer to BACKLINKS in the first list member will still
8095 point to its old location. Adjust it. */
8096 if (VAR_LOC_DEP_LST (var))
8097 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8098 }
8099 else
8100 {
8101 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8102 *VAR_LOC_DEP_LSTP (var) = NULL;
8103 VAR_LOC_FROM (var) = NULL;
8104 VAR_LOC_DEPTH (var).complexity = 0;
8105 VAR_LOC_DEPTH (var).entryvals = 0;
8106 }
8107 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8108 }
8109
8110 /* Remove all entries from the vector of active dependencies of VAR,
8111 removing them from the back-links lists too. */
8112
8113 static void
8114 loc_exp_dep_clear (variable *var)
8115 {
8116 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8117 {
8118 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8119 if (led->next)
8120 led->next->pprev = led->pprev;
8121 if (led->pprev)
8122 *led->pprev = led->next;
8123 VAR_LOC_DEP_VEC (var)->pop ();
8124 }
8125 }
8126
8127 /* Insert an active dependency from VAR on X to the vector of
8128 dependencies, and add the corresponding back-link to X's list of
8129 back-links in VARS. */
8130
8131 static void
8132 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
8133 {
8134 decl_or_value dv;
8135 variable *xvar;
8136 loc_exp_dep *led;
8137
8138 dv = dv_from_rtx (x);
8139
8140 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8141 an additional look up? */
8142 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8143
8144 if (!xvar)
8145 {
8146 xvar = variable_from_dropped (dv, NO_INSERT);
8147 gcc_checking_assert (xvar);
8148 }
8149
8150 /* No point in adding the same backlink more than once. This may
8151 arise if say the same value appears in two complex expressions in
8152 the same loc_list, or even more than once in a single
8153 expression. */
8154 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8155 return;
8156
8157 if (var->onepart == NOT_ONEPART)
8158 led = new loc_exp_dep;
8159 else
8160 {
8161 loc_exp_dep empty;
8162 memset (&empty, 0, sizeof (empty));
8163 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8164 led = &VAR_LOC_DEP_VEC (var)->last ();
8165 }
8166 led->dv = var->dv;
8167 led->value = x;
8168
8169 loc_exp_dep_alloc (xvar, 0);
8170 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8171 led->next = *led->pprev;
8172 if (led->next)
8173 led->next->pprev = &led->next;
8174 *led->pprev = led;
8175 }
8176
8177 /* Create active dependencies of VAR on COUNT values starting at
8178 VALUE, and corresponding back-links to the entries in VARS. Return
8179 true if we found any pending-recursion results. */
8180
8181 static bool
8182 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
8183 variable_table_type *vars)
8184 {
8185 bool pending_recursion = false;
8186
8187 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8188 || VAR_LOC_DEP_VEC (var)->is_empty ());
8189
8190 /* Set up all dependencies from last_child (as set up at the end of
8191 the loop above) to the end. */
8192 loc_exp_dep_alloc (var, count);
8193
8194 while (count--)
8195 {
8196 rtx x = *value++;
8197
8198 if (!pending_recursion)
8199 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8200
8201 loc_exp_insert_dep (var, x, vars);
8202 }
8203
8204 return pending_recursion;
8205 }
8206
8207 /* Notify the back-links of IVAR that are pending recursion that we
8208 have found a non-NIL value for it, so they are cleared for another
8209 attempt to compute a current location. */
8210
8211 static void
8212 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
8213 {
8214 loc_exp_dep *led, *next;
8215
8216 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8217 {
8218 decl_or_value dv = led->dv;
8219 variable *var;
8220
8221 next = led->next;
8222
8223 if (dv_is_value_p (dv))
8224 {
8225 rtx value = dv_as_value (dv);
8226
8227 /* If we have already resolved it, leave it alone. */
8228 if (!VALUE_RECURSED_INTO (value))
8229 continue;
8230
8231 /* Check that VALUE_RECURSED_INTO, true from the test above,
8232 implies NO_LOC_P. */
8233 gcc_checking_assert (NO_LOC_P (value));
8234
8235 /* We won't notify variables that are being expanded,
8236 because their dependency list is cleared before
8237 recursing. */
8238 NO_LOC_P (value) = false;
8239 VALUE_RECURSED_INTO (value) = false;
8240
8241 gcc_checking_assert (dv_changed_p (dv));
8242 }
8243 else
8244 {
8245 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8246 if (!dv_changed_p (dv))
8247 continue;
8248 }
8249
8250 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8251
8252 if (!var)
8253 var = variable_from_dropped (dv, NO_INSERT);
8254
8255 if (var)
8256 notify_dependents_of_resolved_value (var, vars);
8257
8258 if (next)
8259 next->pprev = led->pprev;
8260 if (led->pprev)
8261 *led->pprev = next;
8262 led->next = NULL;
8263 led->pprev = NULL;
8264 }
8265 }
8266
8267 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8268 int max_depth, void *data);
8269
8270 /* Return the combined depth, when one sub-expression evaluated to
8271 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8272
8273 static inline expand_depth
8274 update_depth (expand_depth saved_depth, expand_depth best_depth)
8275 {
8276 /* If we didn't find anything, stick with what we had. */
8277 if (!best_depth.complexity)
8278 return saved_depth;
8279
8280 /* If we found hadn't found anything, use the depth of the current
8281 expression. Do NOT add one extra level, we want to compute the
8282 maximum depth among sub-expressions. We'll increment it later,
8283 if appropriate. */
8284 if (!saved_depth.complexity)
8285 return best_depth;
8286
8287 /* Combine the entryval count so that regardless of which one we
8288 return, the entryval count is accurate. */
8289 best_depth.entryvals = saved_depth.entryvals
8290 = best_depth.entryvals + saved_depth.entryvals;
8291
8292 if (saved_depth.complexity < best_depth.complexity)
8293 return best_depth;
8294 else
8295 return saved_depth;
8296 }
8297
8298 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8299 DATA for cselib expand callback. If PENDRECP is given, indicate in
8300 it whether any sub-expression couldn't be fully evaluated because
8301 it is pending recursion resolution. */
8302
8303 static inline rtx
8304 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
8305 bool *pendrecp)
8306 {
8307 struct expand_loc_callback_data *elcd
8308 = (struct expand_loc_callback_data *) data;
8309 location_chain *loc, *next;
8310 rtx result = NULL;
8311 int first_child, result_first_child, last_child;
8312 bool pending_recursion;
8313 rtx loc_from = NULL;
8314 struct elt_loc_list *cloc = NULL;
8315 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8316 int wanted_entryvals, found_entryvals = 0;
8317
8318 /* Clear all backlinks pointing at this, so that we're not notified
8319 while we're active. */
8320 loc_exp_dep_clear (var);
8321
8322 retry:
8323 if (var->onepart == ONEPART_VALUE)
8324 {
8325 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8326
8327 gcc_checking_assert (cselib_preserved_value_p (val));
8328
8329 cloc = val->locs;
8330 }
8331
8332 first_child = result_first_child = last_child
8333 = elcd->expanding.length ();
8334
8335 wanted_entryvals = found_entryvals;
8336
8337 /* Attempt to expand each available location in turn. */
8338 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8339 loc || cloc; loc = next)
8340 {
8341 result_first_child = last_child;
8342
8343 if (!loc)
8344 {
8345 loc_from = cloc->loc;
8346 next = loc;
8347 cloc = cloc->next;
8348 if (unsuitable_loc (loc_from))
8349 continue;
8350 }
8351 else
8352 {
8353 loc_from = loc->loc;
8354 next = loc->next;
8355 }
8356
8357 gcc_checking_assert (!unsuitable_loc (loc_from));
8358
8359 elcd->depth.complexity = elcd->depth.entryvals = 0;
8360 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8361 vt_expand_loc_callback, data);
8362 last_child = elcd->expanding.length ();
8363
8364 if (result)
8365 {
8366 depth = elcd->depth;
8367
8368 gcc_checking_assert (depth.complexity
8369 || result_first_child == last_child);
8370
8371 if (last_child - result_first_child != 1)
8372 {
8373 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8374 depth.entryvals++;
8375 depth.complexity++;
8376 }
8377
8378 if (depth.complexity <= EXPR_USE_DEPTH)
8379 {
8380 if (depth.entryvals <= wanted_entryvals)
8381 break;
8382 else if (!found_entryvals || depth.entryvals < found_entryvals)
8383 found_entryvals = depth.entryvals;
8384 }
8385
8386 result = NULL;
8387 }
8388
8389 /* Set it up in case we leave the loop. */
8390 depth.complexity = depth.entryvals = 0;
8391 loc_from = NULL;
8392 result_first_child = first_child;
8393 }
8394
8395 if (!loc_from && wanted_entryvals < found_entryvals)
8396 {
8397 /* We found entries with ENTRY_VALUEs and skipped them. Since
8398 we could not find any expansions without ENTRY_VALUEs, but we
8399 found at least one with them, go back and get an entry with
8400 the minimum number ENTRY_VALUE count that we found. We could
8401 avoid looping, but since each sub-loc is already resolved,
8402 the re-expansion should be trivial. ??? Should we record all
8403 attempted locs as dependencies, so that we retry the
8404 expansion should any of them change, in the hope it can give
8405 us a new entry without an ENTRY_VALUE? */
8406 elcd->expanding.truncate (first_child);
8407 goto retry;
8408 }
8409
8410 /* Register all encountered dependencies as active. */
8411 pending_recursion = loc_exp_dep_set
8412 (var, result, elcd->expanding.address () + result_first_child,
8413 last_child - result_first_child, elcd->vars);
8414
8415 elcd->expanding.truncate (first_child);
8416
8417 /* Record where the expansion came from. */
8418 gcc_checking_assert (!result || !pending_recursion);
8419 VAR_LOC_FROM (var) = loc_from;
8420 VAR_LOC_DEPTH (var) = depth;
8421
8422 gcc_checking_assert (!depth.complexity == !result);
8423
8424 elcd->depth = update_depth (saved_depth, depth);
8425
8426 /* Indicate whether any of the dependencies are pending recursion
8427 resolution. */
8428 if (pendrecp)
8429 *pendrecp = pending_recursion;
8430
8431 if (!pendrecp || !pending_recursion)
8432 var->var_part[0].cur_loc = result;
8433
8434 return result;
8435 }
8436
8437 /* Callback for cselib_expand_value, that looks for expressions
8438 holding the value in the var-tracking hash tables. Return X for
8439 standard processing, anything else is to be used as-is. */
8440
8441 static rtx
8442 vt_expand_loc_callback (rtx x, bitmap regs,
8443 int max_depth ATTRIBUTE_UNUSED,
8444 void *data)
8445 {
8446 struct expand_loc_callback_data *elcd
8447 = (struct expand_loc_callback_data *) data;
8448 decl_or_value dv;
8449 variable *var;
8450 rtx result, subreg;
8451 bool pending_recursion = false;
8452 bool from_empty = false;
8453
8454 switch (GET_CODE (x))
8455 {
8456 case SUBREG:
8457 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8458 EXPR_DEPTH,
8459 vt_expand_loc_callback, data);
8460
8461 if (!subreg)
8462 return NULL;
8463
8464 result = simplify_gen_subreg (GET_MODE (x), subreg,
8465 GET_MODE (SUBREG_REG (x)),
8466 SUBREG_BYTE (x));
8467
8468 /* Invalid SUBREGs are ok in debug info. ??? We could try
8469 alternate expansions for the VALUE as well. */
8470 if (!result)
8471 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8472
8473 return result;
8474
8475 case DEBUG_EXPR:
8476 case VALUE:
8477 dv = dv_from_rtx (x);
8478 break;
8479
8480 default:
8481 return x;
8482 }
8483
8484 elcd->expanding.safe_push (x);
8485
8486 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8487 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8488
8489 if (NO_LOC_P (x))
8490 {
8491 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8492 return NULL;
8493 }
8494
8495 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8496
8497 if (!var)
8498 {
8499 from_empty = true;
8500 var = variable_from_dropped (dv, INSERT);
8501 }
8502
8503 gcc_checking_assert (var);
8504
8505 if (!dv_changed_p (dv))
8506 {
8507 gcc_checking_assert (!NO_LOC_P (x));
8508 gcc_checking_assert (var->var_part[0].cur_loc);
8509 gcc_checking_assert (VAR_LOC_1PAUX (var));
8510 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8511
8512 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8513
8514 return var->var_part[0].cur_loc;
8515 }
8516
8517 VALUE_RECURSED_INTO (x) = true;
8518 /* This is tentative, but it makes some tests simpler. */
8519 NO_LOC_P (x) = true;
8520
8521 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8522
8523 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8524
8525 if (pending_recursion)
8526 {
8527 gcc_checking_assert (!result);
8528 elcd->pending.safe_push (x);
8529 }
8530 else
8531 {
8532 NO_LOC_P (x) = !result;
8533 VALUE_RECURSED_INTO (x) = false;
8534 set_dv_changed (dv, false);
8535
8536 if (result)
8537 notify_dependents_of_resolved_value (var, elcd->vars);
8538 }
8539
8540 return result;
8541 }
8542
8543 /* While expanding variables, we may encounter recursion cycles
8544 because of mutual (possibly indirect) dependencies between two
8545 particular variables (or values), say A and B. If we're trying to
8546 expand A when we get to B, which in turn attempts to expand A, if
8547 we can't find any other expansion for B, we'll add B to this
8548 pending-recursion stack, and tentatively return NULL for its
8549 location. This tentative value will be used for any other
8550 occurrences of B, unless A gets some other location, in which case
8551 it will notify B that it is worth another try at computing a
8552 location for it, and it will use the location computed for A then.
8553 At the end of the expansion, the tentative NULL locations become
8554 final for all members of PENDING that didn't get a notification.
8555 This function performs this finalization of NULL locations. */
8556
8557 static void
8558 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8559 {
8560 while (!pending->is_empty ())
8561 {
8562 rtx x = pending->pop ();
8563 decl_or_value dv;
8564
8565 if (!VALUE_RECURSED_INTO (x))
8566 continue;
8567
8568 gcc_checking_assert (NO_LOC_P (x));
8569 VALUE_RECURSED_INTO (x) = false;
8570 dv = dv_from_rtx (x);
8571 gcc_checking_assert (dv_changed_p (dv));
8572 set_dv_changed (dv, false);
8573 }
8574 }
8575
8576 /* Initialize expand_loc_callback_data D with variable hash table V.
8577 It must be a macro because of alloca (vec stack). */
8578 #define INIT_ELCD(d, v) \
8579 do \
8580 { \
8581 (d).vars = (v); \
8582 (d).depth.complexity = (d).depth.entryvals = 0; \
8583 } \
8584 while (0)
8585 /* Finalize expand_loc_callback_data D, resolved to location L. */
8586 #define FINI_ELCD(d, l) \
8587 do \
8588 { \
8589 resolve_expansions_pending_recursion (&(d).pending); \
8590 (d).pending.release (); \
8591 (d).expanding.release (); \
8592 \
8593 if ((l) && MEM_P (l)) \
8594 (l) = targetm.delegitimize_address (l); \
8595 } \
8596 while (0)
8597
8598 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8599 equivalences in VARS, updating their CUR_LOCs in the process. */
8600
8601 static rtx
8602 vt_expand_loc (rtx loc, variable_table_type *vars)
8603 {
8604 struct expand_loc_callback_data data;
8605 rtx result;
8606
8607 if (!MAY_HAVE_DEBUG_BIND_INSNS)
8608 return loc;
8609
8610 INIT_ELCD (data, vars);
8611
8612 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8613 vt_expand_loc_callback, &data);
8614
8615 FINI_ELCD (data, result);
8616
8617 return result;
8618 }
8619
8620 /* Expand the one-part VARiable to a location, using the equivalences
8621 in VARS, updating their CUR_LOCs in the process. */
8622
8623 static rtx
8624 vt_expand_1pvar (variable *var, variable_table_type *vars)
8625 {
8626 struct expand_loc_callback_data data;
8627 rtx loc;
8628
8629 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8630
8631 if (!dv_changed_p (var->dv))
8632 return var->var_part[0].cur_loc;
8633
8634 INIT_ELCD (data, vars);
8635
8636 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8637
8638 gcc_checking_assert (data.expanding.is_empty ());
8639
8640 FINI_ELCD (data, loc);
8641
8642 return loc;
8643 }
8644
8645 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8646 additional parameters: WHERE specifies whether the note shall be emitted
8647 before or after instruction INSN. */
8648
8649 int
8650 emit_note_insn_var_location (variable **varp, emit_note_data *data)
8651 {
8652 variable *var = *varp;
8653 rtx_insn *insn = data->insn;
8654 enum emit_note_where where = data->where;
8655 variable_table_type *vars = data->vars;
8656 rtx_note *note;
8657 rtx note_vl;
8658 int i, j, n_var_parts;
8659 bool complete;
8660 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8661 HOST_WIDE_INT last_limit;
8662 tree type_size_unit;
8663 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8664 rtx loc[MAX_VAR_PARTS];
8665 tree decl;
8666 location_chain *lc;
8667
8668 gcc_checking_assert (var->onepart == NOT_ONEPART
8669 || var->onepart == ONEPART_VDECL);
8670
8671 decl = dv_as_decl (var->dv);
8672
8673 complete = true;
8674 last_limit = 0;
8675 n_var_parts = 0;
8676 if (!var->onepart)
8677 for (i = 0; i < var->n_var_parts; i++)
8678 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8679 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8680 for (i = 0; i < var->n_var_parts; i++)
8681 {
8682 machine_mode mode, wider_mode;
8683 rtx loc2;
8684 HOST_WIDE_INT offset;
8685
8686 if (i == 0 && var->onepart)
8687 {
8688 gcc_checking_assert (var->n_var_parts == 1);
8689 offset = 0;
8690 initialized = VAR_INIT_STATUS_INITIALIZED;
8691 loc2 = vt_expand_1pvar (var, vars);
8692 }
8693 else
8694 {
8695 if (last_limit < VAR_PART_OFFSET (var, i))
8696 {
8697 complete = false;
8698 break;
8699 }
8700 else if (last_limit > VAR_PART_OFFSET (var, i))
8701 continue;
8702 offset = VAR_PART_OFFSET (var, i);
8703 loc2 = var->var_part[i].cur_loc;
8704 if (loc2 && GET_CODE (loc2) == MEM
8705 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8706 {
8707 rtx depval = XEXP (loc2, 0);
8708
8709 loc2 = vt_expand_loc (loc2, vars);
8710
8711 if (loc2)
8712 loc_exp_insert_dep (var, depval, vars);
8713 }
8714 if (!loc2)
8715 {
8716 complete = false;
8717 continue;
8718 }
8719 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8720 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8721 if (var->var_part[i].cur_loc == lc->loc)
8722 {
8723 initialized = lc->init;
8724 break;
8725 }
8726 gcc_assert (lc);
8727 }
8728
8729 offsets[n_var_parts] = offset;
8730 if (!loc2)
8731 {
8732 complete = false;
8733 continue;
8734 }
8735 loc[n_var_parts] = loc2;
8736 mode = GET_MODE (var->var_part[i].cur_loc);
8737 if (mode == VOIDmode && var->onepart)
8738 mode = DECL_MODE (decl);
8739 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8740
8741 /* Attempt to merge adjacent registers or memory. */
8742 for (j = i + 1; j < var->n_var_parts; j++)
8743 if (last_limit <= VAR_PART_OFFSET (var, j))
8744 break;
8745 if (j < var->n_var_parts
8746 && GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
8747 && var->var_part[j].cur_loc
8748 && mode == GET_MODE (var->var_part[j].cur_loc)
8749 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8750 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8751 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8752 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8753 {
8754 rtx new_loc = NULL;
8755
8756 if (REG_P (loc[n_var_parts])
8757 && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
8758 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode)
8759 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8760 == REGNO (loc2))
8761 {
8762 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8763 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8764 mode, 0);
8765 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8766 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8767 if (new_loc)
8768 {
8769 if (!REG_P (new_loc)
8770 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8771 new_loc = NULL;
8772 else
8773 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8774 }
8775 }
8776 else if (MEM_P (loc[n_var_parts])
8777 && GET_CODE (XEXP (loc2, 0)) == PLUS
8778 && REG_P (XEXP (XEXP (loc2, 0), 0))
8779 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8780 {
8781 if ((REG_P (XEXP (loc[n_var_parts], 0))
8782 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8783 XEXP (XEXP (loc2, 0), 0))
8784 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8785 == GET_MODE_SIZE (mode))
8786 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8787 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8788 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8789 XEXP (XEXP (loc2, 0), 0))
8790 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8791 + GET_MODE_SIZE (mode)
8792 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8793 new_loc = adjust_address_nv (loc[n_var_parts],
8794 wider_mode, 0);
8795 }
8796
8797 if (new_loc)
8798 {
8799 loc[n_var_parts] = new_loc;
8800 mode = wider_mode;
8801 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8802 i = j;
8803 }
8804 }
8805 ++n_var_parts;
8806 }
8807 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8808 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8809 complete = false;
8810
8811 if (! flag_var_tracking_uninit)
8812 initialized = VAR_INIT_STATUS_INITIALIZED;
8813
8814 note_vl = NULL_RTX;
8815 if (!complete)
8816 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8817 else if (n_var_parts == 1)
8818 {
8819 rtx expr_list;
8820
8821 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8822 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8823 else
8824 expr_list = loc[0];
8825
8826 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8827 }
8828 else if (n_var_parts)
8829 {
8830 rtx parallel;
8831
8832 for (i = 0; i < n_var_parts; i++)
8833 loc[i]
8834 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8835
8836 parallel = gen_rtx_PARALLEL (VOIDmode,
8837 gen_rtvec_v (n_var_parts, loc));
8838 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8839 parallel, initialized);
8840 }
8841
8842 if (where != EMIT_NOTE_BEFORE_INSN)
8843 {
8844 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8845 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8846 NOTE_DURING_CALL_P (note) = true;
8847 }
8848 else
8849 {
8850 /* Make sure that the call related notes come first. */
8851 while (NEXT_INSN (insn)
8852 && NOTE_P (insn)
8853 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8854 && NOTE_DURING_CALL_P (insn))
8855 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8856 insn = NEXT_INSN (insn);
8857 if (NOTE_P (insn)
8858 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8859 && NOTE_DURING_CALL_P (insn))
8860 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8861 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8862 else
8863 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8864 }
8865 NOTE_VAR_LOCATION (note) = note_vl;
8866
8867 set_dv_changed (var->dv, false);
8868 gcc_assert (var->in_changed_variables);
8869 var->in_changed_variables = false;
8870 changed_variables->clear_slot (varp);
8871
8872 /* Continue traversing the hash table. */
8873 return 1;
8874 }
8875
8876 /* While traversing changed_variables, push onto DATA (a stack of RTX
8877 values) entries that aren't user variables. */
8878
8879 int
8880 var_track_values_to_stack (variable **slot,
8881 vec<rtx, va_heap> *changed_values_stack)
8882 {
8883 variable *var = *slot;
8884
8885 if (var->onepart == ONEPART_VALUE)
8886 changed_values_stack->safe_push (dv_as_value (var->dv));
8887 else if (var->onepart == ONEPART_DEXPR)
8888 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8889
8890 return 1;
8891 }
8892
8893 /* Remove from changed_variables the entry whose DV corresponds to
8894 value or debug_expr VAL. */
8895 static void
8896 remove_value_from_changed_variables (rtx val)
8897 {
8898 decl_or_value dv = dv_from_rtx (val);
8899 variable **slot;
8900 variable *var;
8901
8902 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8903 NO_INSERT);
8904 var = *slot;
8905 var->in_changed_variables = false;
8906 changed_variables->clear_slot (slot);
8907 }
8908
8909 /* If VAL (a value or debug_expr) has backlinks to variables actively
8910 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8911 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8912 have dependencies of their own to notify. */
8913
8914 static void
8915 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8916 vec<rtx, va_heap> *changed_values_stack)
8917 {
8918 variable **slot;
8919 variable *var;
8920 loc_exp_dep *led;
8921 decl_or_value dv = dv_from_rtx (val);
8922
8923 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8924 NO_INSERT);
8925 if (!slot)
8926 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8927 if (!slot)
8928 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8929 NO_INSERT);
8930 var = *slot;
8931
8932 while ((led = VAR_LOC_DEP_LST (var)))
8933 {
8934 decl_or_value ldv = led->dv;
8935 variable *ivar;
8936
8937 /* Deactivate and remove the backlink, as it was “used up”. It
8938 makes no sense to attempt to notify the same entity again:
8939 either it will be recomputed and re-register an active
8940 dependency, or it will still have the changed mark. */
8941 if (led->next)
8942 led->next->pprev = led->pprev;
8943 if (led->pprev)
8944 *led->pprev = led->next;
8945 led->next = NULL;
8946 led->pprev = NULL;
8947
8948 if (dv_changed_p (ldv))
8949 continue;
8950
8951 switch (dv_onepart_p (ldv))
8952 {
8953 case ONEPART_VALUE:
8954 case ONEPART_DEXPR:
8955 set_dv_changed (ldv, true);
8956 changed_values_stack->safe_push (dv_as_rtx (ldv));
8957 break;
8958
8959 case ONEPART_VDECL:
8960 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8961 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8962 variable_was_changed (ivar, NULL);
8963 break;
8964
8965 case NOT_ONEPART:
8966 delete led;
8967 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8968 if (ivar)
8969 {
8970 int i = ivar->n_var_parts;
8971 while (i--)
8972 {
8973 rtx loc = ivar->var_part[i].cur_loc;
8974
8975 if (loc && GET_CODE (loc) == MEM
8976 && XEXP (loc, 0) == val)
8977 {
8978 variable_was_changed (ivar, NULL);
8979 break;
8980 }
8981 }
8982 }
8983 break;
8984
8985 default:
8986 gcc_unreachable ();
8987 }
8988 }
8989 }
8990
8991 /* Take out of changed_variables any entries that don't refer to use
8992 variables. Back-propagate change notifications from values and
8993 debug_exprs to their active dependencies in HTAB or in
8994 CHANGED_VARIABLES. */
8995
8996 static void
8997 process_changed_values (variable_table_type *htab)
8998 {
8999 int i, n;
9000 rtx val;
9001 auto_vec<rtx, 20> changed_values_stack;
9002
9003 /* Move values from changed_variables to changed_values_stack. */
9004 changed_variables
9005 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
9006 (&changed_values_stack);
9007
9008 /* Back-propagate change notifications in values while popping
9009 them from the stack. */
9010 for (n = i = changed_values_stack.length ();
9011 i > 0; i = changed_values_stack.length ())
9012 {
9013 val = changed_values_stack.pop ();
9014 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9015
9016 /* This condition will hold when visiting each of the entries
9017 originally in changed_variables. We can't remove them
9018 earlier because this could drop the backlinks before we got a
9019 chance to use them. */
9020 if (i == n)
9021 {
9022 remove_value_from_changed_variables (val);
9023 n--;
9024 }
9025 }
9026 }
9027
9028 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9029 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9030 the notes shall be emitted before of after instruction INSN. */
9031
9032 static void
9033 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9034 shared_hash *vars)
9035 {
9036 emit_note_data data;
9037 variable_table_type *htab = shared_hash_htab (vars);
9038
9039 if (!changed_variables->elements ())
9040 return;
9041
9042 if (MAY_HAVE_DEBUG_BIND_INSNS)
9043 process_changed_values (htab);
9044
9045 data.insn = insn;
9046 data.where = where;
9047 data.vars = htab;
9048
9049 changed_variables
9050 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9051 }
9052
9053 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9054 same variable in hash table DATA or is not there at all. */
9055
9056 int
9057 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
9058 {
9059 variable *old_var, *new_var;
9060
9061 old_var = *slot;
9062 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9063
9064 if (!new_var)
9065 {
9066 /* Variable has disappeared. */
9067 variable *empty_var = NULL;
9068
9069 if (old_var->onepart == ONEPART_VALUE
9070 || old_var->onepart == ONEPART_DEXPR)
9071 {
9072 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9073 if (empty_var)
9074 {
9075 gcc_checking_assert (!empty_var->in_changed_variables);
9076 if (!VAR_LOC_1PAUX (old_var))
9077 {
9078 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9079 VAR_LOC_1PAUX (empty_var) = NULL;
9080 }
9081 else
9082 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9083 }
9084 }
9085
9086 if (!empty_var)
9087 {
9088 empty_var = onepart_pool_allocate (old_var->onepart);
9089 empty_var->dv = old_var->dv;
9090 empty_var->refcount = 0;
9091 empty_var->n_var_parts = 0;
9092 empty_var->onepart = old_var->onepart;
9093 empty_var->in_changed_variables = false;
9094 }
9095
9096 if (empty_var->onepart)
9097 {
9098 /* Propagate the auxiliary data to (ultimately)
9099 changed_variables. */
9100 empty_var->var_part[0].loc_chain = NULL;
9101 empty_var->var_part[0].cur_loc = NULL;
9102 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9103 VAR_LOC_1PAUX (old_var) = NULL;
9104 }
9105 variable_was_changed (empty_var, NULL);
9106 /* Continue traversing the hash table. */
9107 return 1;
9108 }
9109 /* Update cur_loc and one-part auxiliary data, before new_var goes
9110 through variable_was_changed. */
9111 if (old_var != new_var && new_var->onepart)
9112 {
9113 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9114 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9115 VAR_LOC_1PAUX (old_var) = NULL;
9116 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9117 }
9118 if (variable_different_p (old_var, new_var))
9119 variable_was_changed (new_var, NULL);
9120
9121 /* Continue traversing the hash table. */
9122 return 1;
9123 }
9124
9125 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9126 table DATA. */
9127
9128 int
9129 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
9130 {
9131 variable *old_var, *new_var;
9132
9133 new_var = *slot;
9134 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9135 if (!old_var)
9136 {
9137 int i;
9138 for (i = 0; i < new_var->n_var_parts; i++)
9139 new_var->var_part[i].cur_loc = NULL;
9140 variable_was_changed (new_var, NULL);
9141 }
9142
9143 /* Continue traversing the hash table. */
9144 return 1;
9145 }
9146
9147 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9148 NEW_SET. */
9149
9150 static void
9151 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9152 dataflow_set *new_set)
9153 {
9154 shared_hash_htab (old_set->vars)
9155 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9156 (shared_hash_htab (new_set->vars));
9157 shared_hash_htab (new_set->vars)
9158 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9159 (shared_hash_htab (old_set->vars));
9160 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9161 }
9162
9163 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9164
9165 static rtx_insn *
9166 next_non_note_insn_var_location (rtx_insn *insn)
9167 {
9168 while (insn)
9169 {
9170 insn = NEXT_INSN (insn);
9171 if (insn == 0
9172 || !NOTE_P (insn)
9173 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9174 break;
9175 }
9176
9177 return insn;
9178 }
9179
9180 /* Emit the notes for changes of location parts in the basic block BB. */
9181
9182 static void
9183 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9184 {
9185 unsigned int i;
9186 micro_operation *mo;
9187
9188 dataflow_set_clear (set);
9189 dataflow_set_copy (set, &VTI (bb)->in);
9190
9191 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9192 {
9193 rtx_insn *insn = mo->insn;
9194 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9195
9196 switch (mo->type)
9197 {
9198 case MO_CALL:
9199 dataflow_set_clear_at_call (set, insn);
9200 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9201 {
9202 rtx arguments = mo->u.loc, *p = &arguments;
9203 rtx_note *note;
9204 while (*p)
9205 {
9206 XEXP (XEXP (*p, 0), 1)
9207 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9208 shared_hash_htab (set->vars));
9209 /* If expansion is successful, keep it in the list. */
9210 if (XEXP (XEXP (*p, 0), 1))
9211 p = &XEXP (*p, 1);
9212 /* Otherwise, if the following item is data_value for it,
9213 drop it too too. */
9214 else if (XEXP (*p, 1)
9215 && REG_P (XEXP (XEXP (*p, 0), 0))
9216 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9217 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9218 0))
9219 && REGNO (XEXP (XEXP (*p, 0), 0))
9220 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9221 0), 0)))
9222 *p = XEXP (XEXP (*p, 1), 1);
9223 /* Just drop this item. */
9224 else
9225 *p = XEXP (*p, 1);
9226 }
9227 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9228 NOTE_VAR_LOCATION (note) = arguments;
9229 }
9230 break;
9231
9232 case MO_USE:
9233 {
9234 rtx loc = mo->u.loc;
9235
9236 if (REG_P (loc))
9237 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9238 else
9239 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9240
9241 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9242 }
9243 break;
9244
9245 case MO_VAL_LOC:
9246 {
9247 rtx loc = mo->u.loc;
9248 rtx val, vloc;
9249 tree var;
9250
9251 if (GET_CODE (loc) == CONCAT)
9252 {
9253 val = XEXP (loc, 0);
9254 vloc = XEXP (loc, 1);
9255 }
9256 else
9257 {
9258 val = NULL_RTX;
9259 vloc = loc;
9260 }
9261
9262 var = PAT_VAR_LOCATION_DECL (vloc);
9263
9264 clobber_variable_part (set, NULL_RTX,
9265 dv_from_decl (var), 0, NULL_RTX);
9266 if (val)
9267 {
9268 if (VAL_NEEDS_RESOLUTION (loc))
9269 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9270 set_variable_part (set, val, dv_from_decl (var), 0,
9271 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9272 INSERT);
9273 }
9274 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9275 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9276 dv_from_decl (var), 0,
9277 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9278 INSERT);
9279
9280 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9281 }
9282 break;
9283
9284 case MO_VAL_USE:
9285 {
9286 rtx loc = mo->u.loc;
9287 rtx val, vloc, uloc;
9288
9289 vloc = uloc = XEXP (loc, 1);
9290 val = XEXP (loc, 0);
9291
9292 if (GET_CODE (val) == CONCAT)
9293 {
9294 uloc = XEXP (val, 1);
9295 val = XEXP (val, 0);
9296 }
9297
9298 if (VAL_NEEDS_RESOLUTION (loc))
9299 val_resolve (set, val, vloc, insn);
9300 else
9301 val_store (set, val, uloc, insn, false);
9302
9303 if (VAL_HOLDS_TRACK_EXPR (loc))
9304 {
9305 if (GET_CODE (uloc) == REG)
9306 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9307 NULL);
9308 else if (GET_CODE (uloc) == MEM)
9309 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9310 NULL);
9311 }
9312
9313 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9314 }
9315 break;
9316
9317 case MO_VAL_SET:
9318 {
9319 rtx loc = mo->u.loc;
9320 rtx val, vloc, uloc;
9321 rtx dstv, srcv;
9322
9323 vloc = loc;
9324 uloc = XEXP (vloc, 1);
9325 val = XEXP (vloc, 0);
9326 vloc = uloc;
9327
9328 if (GET_CODE (uloc) == SET)
9329 {
9330 dstv = SET_DEST (uloc);
9331 srcv = SET_SRC (uloc);
9332 }
9333 else
9334 {
9335 dstv = uloc;
9336 srcv = NULL;
9337 }
9338
9339 if (GET_CODE (val) == CONCAT)
9340 {
9341 dstv = vloc = XEXP (val, 1);
9342 val = XEXP (val, 0);
9343 }
9344
9345 if (GET_CODE (vloc) == SET)
9346 {
9347 srcv = SET_SRC (vloc);
9348
9349 gcc_assert (val != srcv);
9350 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9351
9352 dstv = vloc = SET_DEST (vloc);
9353
9354 if (VAL_NEEDS_RESOLUTION (loc))
9355 val_resolve (set, val, srcv, insn);
9356 }
9357 else if (VAL_NEEDS_RESOLUTION (loc))
9358 {
9359 gcc_assert (GET_CODE (uloc) == SET
9360 && GET_CODE (SET_SRC (uloc)) == REG);
9361 val_resolve (set, val, SET_SRC (uloc), insn);
9362 }
9363
9364 if (VAL_HOLDS_TRACK_EXPR (loc))
9365 {
9366 if (VAL_EXPR_IS_CLOBBERED (loc))
9367 {
9368 if (REG_P (uloc))
9369 var_reg_delete (set, uloc, true);
9370 else if (MEM_P (uloc))
9371 {
9372 gcc_assert (MEM_P (dstv));
9373 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9374 var_mem_delete (set, dstv, true);
9375 }
9376 }
9377 else
9378 {
9379 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9380 rtx src = NULL, dst = uloc;
9381 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9382
9383 if (GET_CODE (uloc) == SET)
9384 {
9385 src = SET_SRC (uloc);
9386 dst = SET_DEST (uloc);
9387 }
9388
9389 if (copied_p)
9390 {
9391 status = find_src_status (set, src);
9392
9393 src = find_src_set_src (set, src);
9394 }
9395
9396 if (REG_P (dst))
9397 var_reg_delete_and_set (set, dst, !copied_p,
9398 status, srcv);
9399 else if (MEM_P (dst))
9400 {
9401 gcc_assert (MEM_P (dstv));
9402 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9403 var_mem_delete_and_set (set, dstv, !copied_p,
9404 status, srcv);
9405 }
9406 }
9407 }
9408 else if (REG_P (uloc))
9409 var_regno_delete (set, REGNO (uloc));
9410 else if (MEM_P (uloc))
9411 {
9412 gcc_checking_assert (GET_CODE (vloc) == MEM);
9413 gcc_checking_assert (vloc == dstv);
9414 if (vloc != dstv)
9415 clobber_overlapping_mems (set, vloc);
9416 }
9417
9418 val_store (set, val, dstv, insn, true);
9419
9420 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9421 set->vars);
9422 }
9423 break;
9424
9425 case MO_SET:
9426 {
9427 rtx loc = mo->u.loc;
9428 rtx set_src = NULL;
9429
9430 if (GET_CODE (loc) == SET)
9431 {
9432 set_src = SET_SRC (loc);
9433 loc = SET_DEST (loc);
9434 }
9435
9436 if (REG_P (loc))
9437 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9438 set_src);
9439 else
9440 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9441 set_src);
9442
9443 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9444 set->vars);
9445 }
9446 break;
9447
9448 case MO_COPY:
9449 {
9450 rtx loc = mo->u.loc;
9451 enum var_init_status src_status;
9452 rtx set_src = NULL;
9453
9454 if (GET_CODE (loc) == SET)
9455 {
9456 set_src = SET_SRC (loc);
9457 loc = SET_DEST (loc);
9458 }
9459
9460 src_status = find_src_status (set, set_src);
9461 set_src = find_src_set_src (set, set_src);
9462
9463 if (REG_P (loc))
9464 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9465 else
9466 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9467
9468 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9469 set->vars);
9470 }
9471 break;
9472
9473 case MO_USE_NO_VAR:
9474 {
9475 rtx loc = mo->u.loc;
9476
9477 if (REG_P (loc))
9478 var_reg_delete (set, loc, false);
9479 else
9480 var_mem_delete (set, loc, false);
9481
9482 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9483 }
9484 break;
9485
9486 case MO_CLOBBER:
9487 {
9488 rtx loc = mo->u.loc;
9489
9490 if (REG_P (loc))
9491 var_reg_delete (set, loc, true);
9492 else
9493 var_mem_delete (set, loc, true);
9494
9495 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9496 set->vars);
9497 }
9498 break;
9499
9500 case MO_ADJUST:
9501 set->stack_adjust += mo->u.adjust;
9502 break;
9503 }
9504 }
9505 }
9506
9507 /* Return BB's head, unless BB is the block that succeeds ENTRY_BLOCK,
9508 in which case it searches back from BB's head for the very first
9509 insn. Use [get_first_insn (bb), BB_HEAD (bb->next_bb)[ as a range
9510 to iterate over all insns of a function while iterating over its
9511 BBs. */
9512
9513 static rtx_insn *
9514 get_first_insn (basic_block bb)
9515 {
9516 rtx_insn *insn = BB_HEAD (bb);
9517
9518 if (bb->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
9519 while (rtx_insn *prev = PREV_INSN (insn))
9520 insn = prev;
9521
9522 return insn;
9523 }
9524
9525 /* Emit notes for the whole function. */
9526
9527 static void
9528 vt_emit_notes (void)
9529 {
9530 basic_block bb;
9531 dataflow_set cur;
9532
9533 gcc_assert (!changed_variables->elements ());
9534
9535 /* Free memory occupied by the out hash tables, as they aren't used
9536 anymore. */
9537 FOR_EACH_BB_FN (bb, cfun)
9538 dataflow_set_clear (&VTI (bb)->out);
9539
9540 /* Enable emitting notes by functions (mainly by set_variable_part and
9541 delete_variable_part). */
9542 emit_notes = true;
9543
9544 if (MAY_HAVE_DEBUG_BIND_INSNS)
9545 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9546
9547 dataflow_set_init (&cur);
9548
9549 FOR_EACH_BB_FN (bb, cfun)
9550 {
9551 /* Emit the notes for changes of variable locations between two
9552 subsequent basic blocks. */
9553 emit_notes_for_differences (get_first_insn (bb),
9554 &cur, &VTI (bb)->in);
9555
9556 if (MAY_HAVE_DEBUG_BIND_INSNS)
9557 local_get_addr_cache = new hash_map<rtx, rtx>;
9558
9559 /* Emit the notes for the changes in the basic block itself. */
9560 emit_notes_in_bb (bb, &cur);
9561
9562 if (MAY_HAVE_DEBUG_BIND_INSNS)
9563 delete local_get_addr_cache;
9564 local_get_addr_cache = NULL;
9565
9566 /* Free memory occupied by the in hash table, we won't need it
9567 again. */
9568 dataflow_set_clear (&VTI (bb)->in);
9569 }
9570
9571 if (flag_checking)
9572 shared_hash_htab (cur.vars)
9573 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9574 (shared_hash_htab (empty_shared_hash));
9575
9576 dataflow_set_destroy (&cur);
9577
9578 if (MAY_HAVE_DEBUG_BIND_INSNS)
9579 delete dropped_values;
9580 dropped_values = NULL;
9581
9582 emit_notes = false;
9583 }
9584
9585 /* If there is a declaration and offset associated with register/memory RTL
9586 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9587
9588 static bool
9589 vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
9590 {
9591 if (REG_P (rtl))
9592 {
9593 if (REG_ATTRS (rtl))
9594 {
9595 *declp = REG_EXPR (rtl);
9596 *offsetp = REG_OFFSET (rtl);
9597 return true;
9598 }
9599 }
9600 else if (GET_CODE (rtl) == PARALLEL)
9601 {
9602 tree decl = NULL_TREE;
9603 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9604 int len = XVECLEN (rtl, 0), i;
9605
9606 for (i = 0; i < len; i++)
9607 {
9608 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9609 if (!REG_P (reg) || !REG_ATTRS (reg))
9610 break;
9611 if (!decl)
9612 decl = REG_EXPR (reg);
9613 if (REG_EXPR (reg) != decl)
9614 break;
9615 HOST_WIDE_INT this_offset;
9616 if (!track_offset_p (REG_OFFSET (reg), &this_offset))
9617 break;
9618 offset = MIN (offset, this_offset);
9619 }
9620
9621 if (i == len)
9622 {
9623 *declp = decl;
9624 *offsetp = offset;
9625 return true;
9626 }
9627 }
9628 else if (MEM_P (rtl))
9629 {
9630 if (MEM_ATTRS (rtl))
9631 {
9632 *declp = MEM_EXPR (rtl);
9633 *offsetp = int_mem_offset (rtl);
9634 return true;
9635 }
9636 }
9637 return false;
9638 }
9639
9640 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9641 of VAL. */
9642
9643 static void
9644 record_entry_value (cselib_val *val, rtx rtl)
9645 {
9646 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9647
9648 ENTRY_VALUE_EXP (ev) = rtl;
9649
9650 cselib_add_permanent_equiv (val, ev, get_insns ());
9651 }
9652
9653 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9654
9655 static void
9656 vt_add_function_parameter (tree parm)
9657 {
9658 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9659 rtx incoming = DECL_INCOMING_RTL (parm);
9660 tree decl;
9661 machine_mode mode;
9662 poly_int64 offset;
9663 dataflow_set *out;
9664 decl_or_value dv;
9665
9666 if (TREE_CODE (parm) != PARM_DECL)
9667 return;
9668
9669 if (!decl_rtl || !incoming)
9670 return;
9671
9672 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9673 return;
9674
9675 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9676 rewrite the incoming location of parameters passed on the stack
9677 into MEMs based on the argument pointer, so that incoming doesn't
9678 depend on a pseudo. */
9679 if (MEM_P (incoming)
9680 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9681 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9682 && XEXP (XEXP (incoming, 0), 0)
9683 == crtl->args.internal_arg_pointer
9684 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9685 {
9686 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9687 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9688 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9689 incoming
9690 = replace_equiv_address_nv (incoming,
9691 plus_constant (Pmode,
9692 arg_pointer_rtx, off));
9693 }
9694
9695 #ifdef HAVE_window_save
9696 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9697 If the target machine has an explicit window save instruction, the
9698 actual entry value is the corresponding OUTGOING_REGNO instead. */
9699 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9700 {
9701 if (REG_P (incoming)
9702 && HARD_REGISTER_P (incoming)
9703 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9704 {
9705 parm_reg p;
9706 p.incoming = incoming;
9707 incoming
9708 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9709 OUTGOING_REGNO (REGNO (incoming)), 0);
9710 p.outgoing = incoming;
9711 vec_safe_push (windowed_parm_regs, p);
9712 }
9713 else if (GET_CODE (incoming) == PARALLEL)
9714 {
9715 rtx outgoing
9716 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9717 int i;
9718
9719 for (i = 0; i < XVECLEN (incoming, 0); i++)
9720 {
9721 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9722 parm_reg p;
9723 p.incoming = reg;
9724 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9725 OUTGOING_REGNO (REGNO (reg)), 0);
9726 p.outgoing = reg;
9727 XVECEXP (outgoing, 0, i)
9728 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9729 XEXP (XVECEXP (incoming, 0, i), 1));
9730 vec_safe_push (windowed_parm_regs, p);
9731 }
9732
9733 incoming = outgoing;
9734 }
9735 else if (MEM_P (incoming)
9736 && REG_P (XEXP (incoming, 0))
9737 && HARD_REGISTER_P (XEXP (incoming, 0)))
9738 {
9739 rtx reg = XEXP (incoming, 0);
9740 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9741 {
9742 parm_reg p;
9743 p.incoming = reg;
9744 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9745 p.outgoing = reg;
9746 vec_safe_push (windowed_parm_regs, p);
9747 incoming = replace_equiv_address_nv (incoming, reg);
9748 }
9749 }
9750 }
9751 #endif
9752
9753 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9754 {
9755 if (MEM_P (incoming))
9756 {
9757 /* This means argument is passed by invisible reference. */
9758 offset = 0;
9759 decl = parm;
9760 }
9761 else
9762 {
9763 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9764 return;
9765 offset += byte_lowpart_offset (GET_MODE (incoming),
9766 GET_MODE (decl_rtl));
9767 }
9768 }
9769
9770 if (!decl)
9771 return;
9772
9773 if (parm != decl)
9774 {
9775 /* If that DECL_RTL wasn't a pseudo that got spilled to
9776 memory, bail out. Otherwise, the spill slot sharing code
9777 will force the memory to reference spill_slot_decl (%sfp),
9778 so we don't match above. That's ok, the pseudo must have
9779 referenced the entire parameter, so just reset OFFSET. */
9780 if (decl != get_spill_slot_decl (false))
9781 return;
9782 offset = 0;
9783 }
9784
9785 HOST_WIDE_INT const_offset;
9786 if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
9787 return;
9788
9789 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9790
9791 dv = dv_from_decl (parm);
9792
9793 if (target_for_debug_bind (parm)
9794 /* We can't deal with these right now, because this kind of
9795 variable is single-part. ??? We could handle parallels
9796 that describe multiple locations for the same single
9797 value, but ATM we don't. */
9798 && GET_CODE (incoming) != PARALLEL)
9799 {
9800 cselib_val *val;
9801 rtx lowpart;
9802
9803 /* ??? We shouldn't ever hit this, but it may happen because
9804 arguments passed by invisible reference aren't dealt with
9805 above: incoming-rtl will have Pmode rather than the
9806 expected mode for the type. */
9807 if (const_offset)
9808 return;
9809
9810 lowpart = var_lowpart (mode, incoming);
9811 if (!lowpart)
9812 return;
9813
9814 val = cselib_lookup_from_insn (lowpart, mode, true,
9815 VOIDmode, get_insns ());
9816
9817 /* ??? Float-typed values in memory are not handled by
9818 cselib. */
9819 if (val)
9820 {
9821 preserve_value (val);
9822 set_variable_part (out, val->val_rtx, dv, const_offset,
9823 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9824 dv = dv_from_value (val->val_rtx);
9825 }
9826
9827 if (MEM_P (incoming))
9828 {
9829 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9830 VOIDmode, get_insns ());
9831 if (val)
9832 {
9833 preserve_value (val);
9834 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9835 }
9836 }
9837 }
9838
9839 if (REG_P (incoming))
9840 {
9841 incoming = var_lowpart (mode, incoming);
9842 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9843 attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
9844 incoming);
9845 set_variable_part (out, incoming, dv, const_offset,
9846 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9847 if (dv_is_value_p (dv))
9848 {
9849 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9850 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9851 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9852 {
9853 machine_mode indmode
9854 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9855 rtx mem = gen_rtx_MEM (indmode, incoming);
9856 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9857 VOIDmode,
9858 get_insns ());
9859 if (val)
9860 {
9861 preserve_value (val);
9862 record_entry_value (val, mem);
9863 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9864 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9865 }
9866 }
9867 }
9868 }
9869 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9870 {
9871 int i;
9872
9873 for (i = 0; i < XVECLEN (incoming, 0); i++)
9874 {
9875 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9876 /* vt_get_decl_and_offset has already checked that the offset
9877 is a valid variable part. */
9878 const_offset = get_tracked_reg_offset (reg);
9879 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9880 attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
9881 set_variable_part (out, reg, dv, const_offset,
9882 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9883 }
9884 }
9885 else if (MEM_P (incoming))
9886 {
9887 incoming = var_lowpart (mode, incoming);
9888 set_variable_part (out, incoming, dv, const_offset,
9889 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9890 }
9891 }
9892
9893 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9894
9895 static void
9896 vt_add_function_parameters (void)
9897 {
9898 tree parm;
9899
9900 for (parm = DECL_ARGUMENTS (current_function_decl);
9901 parm; parm = DECL_CHAIN (parm))
9902 if (!POINTER_BOUNDS_P (parm))
9903 vt_add_function_parameter (parm);
9904
9905 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9906 {
9907 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9908
9909 if (TREE_CODE (vexpr) == INDIRECT_REF)
9910 vexpr = TREE_OPERAND (vexpr, 0);
9911
9912 if (TREE_CODE (vexpr) == PARM_DECL
9913 && DECL_ARTIFICIAL (vexpr)
9914 && !DECL_IGNORED_P (vexpr)
9915 && DECL_NAMELESS (vexpr))
9916 vt_add_function_parameter (vexpr);
9917 }
9918 }
9919
9920 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9921 ensure it isn't flushed during cselib_reset_table.
9922 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9923 has been eliminated. */
9924
9925 static void
9926 vt_init_cfa_base (void)
9927 {
9928 cselib_val *val;
9929
9930 #ifdef FRAME_POINTER_CFA_OFFSET
9931 cfa_base_rtx = frame_pointer_rtx;
9932 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9933 #else
9934 cfa_base_rtx = arg_pointer_rtx;
9935 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9936 #endif
9937 if (cfa_base_rtx == hard_frame_pointer_rtx
9938 || !fixed_regs[REGNO (cfa_base_rtx)])
9939 {
9940 cfa_base_rtx = NULL_RTX;
9941 return;
9942 }
9943 if (!MAY_HAVE_DEBUG_BIND_INSNS)
9944 return;
9945
9946 /* Tell alias analysis that cfa_base_rtx should share
9947 find_base_term value with stack pointer or hard frame pointer. */
9948 if (!frame_pointer_needed)
9949 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9950 else if (!crtl->stack_realign_tried)
9951 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9952
9953 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9954 VOIDmode, get_insns ());
9955 preserve_value (val);
9956 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9957 }
9958
9959 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */
9960
9961 static rtx_insn *
9962 reemit_marker_as_note (rtx_insn *insn, basic_block *bb)
9963 {
9964 gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
9965
9966 enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn);
9967
9968 switch (kind)
9969 {
9970 case NOTE_INSN_BEGIN_STMT:
9971 {
9972 rtx_insn *note = NULL;
9973 if (cfun->debug_nonbind_markers)
9974 {
9975 note = emit_note_before (kind, insn);
9976 NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
9977 if (bb)
9978 BLOCK_FOR_INSN (note) = *bb;
9979 }
9980 delete_insn (insn);
9981 return note;
9982 }
9983
9984 default:
9985 gcc_unreachable ();
9986 }
9987 }
9988
9989 /* Allocate and initialize the data structures for variable tracking
9990 and parse the RTL to get the micro operations. */
9991
9992 static bool
9993 vt_initialize (void)
9994 {
9995 basic_block bb;
9996 HOST_WIDE_INT fp_cfa_offset = -1;
9997
9998 alloc_aux_for_blocks (sizeof (variable_tracking_info));
9999
10000 empty_shared_hash = shared_hash_pool.allocate ();
10001 empty_shared_hash->refcount = 1;
10002 empty_shared_hash->htab = new variable_table_type (1);
10003 changed_variables = new variable_table_type (10);
10004
10005 /* Init the IN and OUT sets. */
10006 FOR_ALL_BB_FN (bb, cfun)
10007 {
10008 VTI (bb)->visited = false;
10009 VTI (bb)->flooded = false;
10010 dataflow_set_init (&VTI (bb)->in);
10011 dataflow_set_init (&VTI (bb)->out);
10012 VTI (bb)->permp = NULL;
10013 }
10014
10015 if (MAY_HAVE_DEBUG_BIND_INSNS)
10016 {
10017 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
10018 scratch_regs = BITMAP_ALLOC (NULL);
10019 preserved_values.create (256);
10020 global_get_addr_cache = new hash_map<rtx, rtx>;
10021 }
10022 else
10023 {
10024 scratch_regs = NULL;
10025 global_get_addr_cache = NULL;
10026 }
10027
10028 if (MAY_HAVE_DEBUG_BIND_INSNS)
10029 {
10030 rtx reg, expr;
10031 int ofst;
10032 cselib_val *val;
10033
10034 #ifdef FRAME_POINTER_CFA_OFFSET
10035 reg = frame_pointer_rtx;
10036 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10037 #else
10038 reg = arg_pointer_rtx;
10039 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
10040 #endif
10041
10042 ofst -= INCOMING_FRAME_SP_OFFSET;
10043
10044 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
10045 VOIDmode, get_insns ());
10046 preserve_value (val);
10047 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
10048 cselib_preserve_cfa_base_value (val, REGNO (reg));
10049 expr = plus_constant (GET_MODE (stack_pointer_rtx),
10050 stack_pointer_rtx, -ofst);
10051 cselib_add_permanent_equiv (val, expr, get_insns ());
10052
10053 if (ofst)
10054 {
10055 val = cselib_lookup_from_insn (stack_pointer_rtx,
10056 GET_MODE (stack_pointer_rtx), 1,
10057 VOIDmode, get_insns ());
10058 preserve_value (val);
10059 expr = plus_constant (GET_MODE (reg), reg, ofst);
10060 cselib_add_permanent_equiv (val, expr, get_insns ());
10061 }
10062 }
10063
10064 /* In order to factor out the adjustments made to the stack pointer or to
10065 the hard frame pointer and thus be able to use DW_OP_fbreg operations
10066 instead of individual location lists, we're going to rewrite MEMs based
10067 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10068 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10069 resp. arg_pointer_rtx. We can do this either when there is no frame
10070 pointer in the function and stack adjustments are consistent for all
10071 basic blocks or when there is a frame pointer and no stack realignment.
10072 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10073 has been eliminated. */
10074 if (!frame_pointer_needed)
10075 {
10076 rtx reg, elim;
10077
10078 if (!vt_stack_adjustments ())
10079 return false;
10080
10081 #ifdef FRAME_POINTER_CFA_OFFSET
10082 reg = frame_pointer_rtx;
10083 #else
10084 reg = arg_pointer_rtx;
10085 #endif
10086 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10087 if (elim != reg)
10088 {
10089 if (GET_CODE (elim) == PLUS)
10090 elim = XEXP (elim, 0);
10091 if (elim == stack_pointer_rtx)
10092 vt_init_cfa_base ();
10093 }
10094 }
10095 else if (!crtl->stack_realign_tried)
10096 {
10097 rtx reg, elim;
10098
10099 #ifdef FRAME_POINTER_CFA_OFFSET
10100 reg = frame_pointer_rtx;
10101 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10102 #else
10103 reg = arg_pointer_rtx;
10104 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10105 #endif
10106 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10107 if (elim != reg)
10108 {
10109 if (GET_CODE (elim) == PLUS)
10110 {
10111 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10112 elim = XEXP (elim, 0);
10113 }
10114 if (elim != hard_frame_pointer_rtx)
10115 fp_cfa_offset = -1;
10116 }
10117 else
10118 fp_cfa_offset = -1;
10119 }
10120
10121 /* If the stack is realigned and a DRAP register is used, we're going to
10122 rewrite MEMs based on it representing incoming locations of parameters
10123 passed on the stack into MEMs based on the argument pointer. Although
10124 we aren't going to rewrite other MEMs, we still need to initialize the
10125 virtual CFA pointer in order to ensure that the argument pointer will
10126 be seen as a constant throughout the function.
10127
10128 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10129 else if (stack_realign_drap)
10130 {
10131 rtx reg, elim;
10132
10133 #ifdef FRAME_POINTER_CFA_OFFSET
10134 reg = frame_pointer_rtx;
10135 #else
10136 reg = arg_pointer_rtx;
10137 #endif
10138 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10139 if (elim != reg)
10140 {
10141 if (GET_CODE (elim) == PLUS)
10142 elim = XEXP (elim, 0);
10143 if (elim == hard_frame_pointer_rtx)
10144 vt_init_cfa_base ();
10145 }
10146 }
10147
10148 hard_frame_pointer_adjustment = -1;
10149
10150 vt_add_function_parameters ();
10151
10152 FOR_EACH_BB_FN (bb, cfun)
10153 {
10154 rtx_insn *insn;
10155 HOST_WIDE_INT pre, post = 0;
10156 basic_block first_bb, last_bb;
10157
10158 if (MAY_HAVE_DEBUG_BIND_INSNS)
10159 {
10160 cselib_record_sets_hook = add_with_sets;
10161 if (dump_file && (dump_flags & TDF_DETAILS))
10162 fprintf (dump_file, "first value: %i\n",
10163 cselib_get_next_uid ());
10164 }
10165
10166 first_bb = bb;
10167 for (;;)
10168 {
10169 edge e;
10170 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10171 || ! single_pred_p (bb->next_bb))
10172 break;
10173 e = find_edge (bb, bb->next_bb);
10174 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10175 break;
10176 bb = bb->next_bb;
10177 }
10178 last_bb = bb;
10179
10180 /* Add the micro-operations to the vector. */
10181 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10182 {
10183 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10184 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10185
10186 /* If we are walking the first basic block, walk any HEADER
10187 insns that might be before it too. Unfortunately,
10188 BB_HEADER and BB_FOOTER are not set while we run this
10189 pass. */
10190 rtx_insn *next;
10191 bool outside_bb = true;
10192 for (insn = get_first_insn (bb); insn != BB_HEAD (bb->next_bb);
10193 insn = next)
10194 {
10195 if (insn == BB_HEAD (bb))
10196 outside_bb = false;
10197 else if (insn == NEXT_INSN (BB_END (bb)))
10198 outside_bb = true;
10199 next = NEXT_INSN (insn);
10200 if (INSN_P (insn))
10201 {
10202 if (outside_bb)
10203 {
10204 /* Ignore non-debug insns outside of basic blocks. */
10205 if (!DEBUG_INSN_P (insn))
10206 continue;
10207 /* Debug binds shouldn't appear outside of bbs. */
10208 gcc_assert (!DEBUG_BIND_INSN_P (insn));
10209 }
10210 basic_block save_bb = BLOCK_FOR_INSN (insn);
10211 if (!BLOCK_FOR_INSN (insn))
10212 {
10213 gcc_assert (outside_bb);
10214 BLOCK_FOR_INSN (insn) = bb;
10215 }
10216 else
10217 gcc_assert (BLOCK_FOR_INSN (insn) == bb);
10218
10219 if (!frame_pointer_needed)
10220 {
10221 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10222 if (pre)
10223 {
10224 micro_operation mo;
10225 mo.type = MO_ADJUST;
10226 mo.u.adjust = pre;
10227 mo.insn = insn;
10228 if (dump_file && (dump_flags & TDF_DETAILS))
10229 log_op_type (PATTERN (insn), bb, insn,
10230 MO_ADJUST, dump_file);
10231 VTI (bb)->mos.safe_push (mo);
10232 VTI (bb)->out.stack_adjust += pre;
10233 }
10234 }
10235
10236 cselib_hook_called = false;
10237 adjust_insn (bb, insn);
10238 if (DEBUG_MARKER_INSN_P (insn))
10239 {
10240 insn = reemit_marker_as_note (insn, &save_bb);
10241 continue;
10242 }
10243
10244 if (MAY_HAVE_DEBUG_BIND_INSNS)
10245 {
10246 if (CALL_P (insn))
10247 prepare_call_arguments (bb, insn);
10248 cselib_process_insn (insn);
10249 if (dump_file && (dump_flags & TDF_DETAILS))
10250 {
10251 print_rtl_single (dump_file, insn);
10252 dump_cselib_table (dump_file);
10253 }
10254 }
10255 if (!cselib_hook_called)
10256 add_with_sets (insn, 0, 0);
10257 cancel_changes (0);
10258
10259 if (!frame_pointer_needed && post)
10260 {
10261 micro_operation mo;
10262 mo.type = MO_ADJUST;
10263 mo.u.adjust = post;
10264 mo.insn = insn;
10265 if (dump_file && (dump_flags & TDF_DETAILS))
10266 log_op_type (PATTERN (insn), bb, insn,
10267 MO_ADJUST, dump_file);
10268 VTI (bb)->mos.safe_push (mo);
10269 VTI (bb)->out.stack_adjust += post;
10270 }
10271
10272 if (fp_cfa_offset != -1
10273 && hard_frame_pointer_adjustment == -1
10274 && fp_setter_insn (insn))
10275 {
10276 vt_init_cfa_base ();
10277 hard_frame_pointer_adjustment = fp_cfa_offset;
10278 /* Disassociate sp from fp now. */
10279 if (MAY_HAVE_DEBUG_BIND_INSNS)
10280 {
10281 cselib_val *v;
10282 cselib_invalidate_rtx (stack_pointer_rtx);
10283 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10284 VOIDmode);
10285 if (v && !cselib_preserved_value_p (v))
10286 {
10287 cselib_set_value_sp_based (v);
10288 preserve_value (v);
10289 }
10290 }
10291 }
10292 BLOCK_FOR_INSN (insn) = save_bb;
10293 }
10294 }
10295 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10296 }
10297
10298 bb = last_bb;
10299
10300 if (MAY_HAVE_DEBUG_BIND_INSNS)
10301 {
10302 cselib_preserve_only_values ();
10303 cselib_reset_table (cselib_get_next_uid ());
10304 cselib_record_sets_hook = NULL;
10305 }
10306 }
10307
10308 hard_frame_pointer_adjustment = -1;
10309 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10310 cfa_base_rtx = NULL_RTX;
10311 return true;
10312 }
10313
10314 /* This is *not* reset after each function. It gives each
10315 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10316 a unique label number. */
10317
10318 static int debug_label_num = 1;
10319
10320 /* Remove from the insn stream all debug insns used for variable
10321 tracking at assignments. */
10322
10323 static void
10324 delete_vta_debug_insns (void)
10325 {
10326 basic_block bb;
10327 rtx_insn *insn, *next;
10328
10329 if (!MAY_HAVE_DEBUG_INSNS)
10330 return;
10331
10332 FOR_EACH_BB_FN (bb, cfun)
10333 {
10334 for (insn = get_first_insn (bb);
10335 insn != BB_HEAD (bb->next_bb)
10336 ? next = NEXT_INSN (insn), true : false;
10337 insn = next)
10338 if (DEBUG_INSN_P (insn))
10339 {
10340 if (DEBUG_MARKER_INSN_P (insn))
10341 {
10342 insn = reemit_marker_as_note (insn, NULL);
10343 continue;
10344 }
10345
10346 tree decl = INSN_VAR_LOCATION_DECL (insn);
10347 if (TREE_CODE (decl) == LABEL_DECL
10348 && DECL_NAME (decl)
10349 && !DECL_RTL_SET_P (decl))
10350 {
10351 PUT_CODE (insn, NOTE);
10352 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10353 NOTE_DELETED_LABEL_NAME (insn)
10354 = IDENTIFIER_POINTER (DECL_NAME (decl));
10355 SET_DECL_RTL (decl, insn);
10356 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10357 }
10358 else
10359 delete_insn (insn);
10360 }
10361 }
10362 }
10363
10364 /* Run a fast, BB-local only version of var tracking, to take care of
10365 information that we don't do global analysis on, such that not all
10366 information is lost. If SKIPPED holds, we're skipping the global
10367 pass entirely, so we should try to use information it would have
10368 handled as well.. */
10369
10370 static void
10371 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10372 {
10373 /* ??? Just skip it all for now. */
10374 delete_vta_debug_insns ();
10375 }
10376
10377 /* Free the data structures needed for variable tracking. */
10378
10379 static void
10380 vt_finalize (void)
10381 {
10382 basic_block bb;
10383
10384 FOR_EACH_BB_FN (bb, cfun)
10385 {
10386 VTI (bb)->mos.release ();
10387 }
10388
10389 FOR_ALL_BB_FN (bb, cfun)
10390 {
10391 dataflow_set_destroy (&VTI (bb)->in);
10392 dataflow_set_destroy (&VTI (bb)->out);
10393 if (VTI (bb)->permp)
10394 {
10395 dataflow_set_destroy (VTI (bb)->permp);
10396 XDELETE (VTI (bb)->permp);
10397 }
10398 }
10399 free_aux_for_blocks ();
10400 delete empty_shared_hash->htab;
10401 empty_shared_hash->htab = NULL;
10402 delete changed_variables;
10403 changed_variables = NULL;
10404 attrs_pool.release ();
10405 var_pool.release ();
10406 location_chain_pool.release ();
10407 shared_hash_pool.release ();
10408
10409 if (MAY_HAVE_DEBUG_BIND_INSNS)
10410 {
10411 if (global_get_addr_cache)
10412 delete global_get_addr_cache;
10413 global_get_addr_cache = NULL;
10414 loc_exp_dep_pool.release ();
10415 valvar_pool.release ();
10416 preserved_values.release ();
10417 cselib_finish ();
10418 BITMAP_FREE (scratch_regs);
10419 scratch_regs = NULL;
10420 }
10421
10422 #ifdef HAVE_window_save
10423 vec_free (windowed_parm_regs);
10424 #endif
10425
10426 if (vui_vec)
10427 XDELETEVEC (vui_vec);
10428 vui_vec = NULL;
10429 vui_allocated = 0;
10430 }
10431
10432 /* The entry point to variable tracking pass. */
10433
10434 static inline unsigned int
10435 variable_tracking_main_1 (void)
10436 {
10437 bool success;
10438
10439 /* We won't be called as a separate pass if flag_var_tracking is not
10440 set, but final may call us to turn debug markers into notes. */
10441 if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS)
10442 || flag_var_tracking_assignments < 0
10443 /* Var-tracking right now assumes the IR doesn't contain
10444 any pseudos at this point. */
10445 || targetm.no_register_allocation)
10446 {
10447 delete_vta_debug_insns ();
10448 return 0;
10449 }
10450
10451 if (!flag_var_tracking)
10452 return 0;
10453
10454 if (n_basic_blocks_for_fn (cfun) > 500
10455 && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10456 {
10457 vt_debug_insns_local (true);
10458 return 0;
10459 }
10460
10461 mark_dfs_back_edges ();
10462 if (!vt_initialize ())
10463 {
10464 vt_finalize ();
10465 vt_debug_insns_local (true);
10466 return 0;
10467 }
10468
10469 success = vt_find_locations ();
10470
10471 if (!success && flag_var_tracking_assignments > 0)
10472 {
10473 vt_finalize ();
10474
10475 delete_vta_debug_insns ();
10476
10477 /* This is later restored by our caller. */
10478 flag_var_tracking_assignments = 0;
10479
10480 success = vt_initialize ();
10481 gcc_assert (success);
10482
10483 success = vt_find_locations ();
10484 }
10485
10486 if (!success)
10487 {
10488 vt_finalize ();
10489 vt_debug_insns_local (false);
10490 return 0;
10491 }
10492
10493 if (dump_file && (dump_flags & TDF_DETAILS))
10494 {
10495 dump_dataflow_sets ();
10496 dump_reg_info (dump_file);
10497 dump_flow_info (dump_file, dump_flags);
10498 }
10499
10500 timevar_push (TV_VAR_TRACKING_EMIT);
10501 vt_emit_notes ();
10502 timevar_pop (TV_VAR_TRACKING_EMIT);
10503
10504 vt_finalize ();
10505 vt_debug_insns_local (false);
10506 return 0;
10507 }
10508
10509 unsigned int
10510 variable_tracking_main (void)
10511 {
10512 unsigned int ret;
10513 int save = flag_var_tracking_assignments;
10514
10515 ret = variable_tracking_main_1 ();
10516
10517 flag_var_tracking_assignments = save;
10518
10519 return ret;
10520 }
10521 \f
10522 namespace {
10523
10524 const pass_data pass_data_variable_tracking =
10525 {
10526 RTL_PASS, /* type */
10527 "vartrack", /* name */
10528 OPTGROUP_NONE, /* optinfo_flags */
10529 TV_VAR_TRACKING, /* tv_id */
10530 0, /* properties_required */
10531 0, /* properties_provided */
10532 0, /* properties_destroyed */
10533 0, /* todo_flags_start */
10534 0, /* todo_flags_finish */
10535 };
10536
10537 class pass_variable_tracking : public rtl_opt_pass
10538 {
10539 public:
10540 pass_variable_tracking (gcc::context *ctxt)
10541 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10542 {}
10543
10544 /* opt_pass methods: */
10545 virtual bool gate (function *)
10546 {
10547 return (flag_var_tracking && !targetm.delay_vartrack);
10548 }
10549
10550 virtual unsigned int execute (function *)
10551 {
10552 return variable_tracking_main ();
10553 }
10554
10555 }; // class pass_variable_tracking
10556
10557 } // anon namespace
10558
10559 rtl_opt_pass *
10560 make_pass_variable_tracking (gcc::context *ctxt)
10561 {
10562 return new pass_variable_tracking (ctxt);
10563 }