]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/var-tracking.c
rs6000: re-enable web and rnreg with -funroll-loops
[thirdparty/gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
27
28 How does the variable tracking pass work?
29
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
37
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
44
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
54
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
60
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
70
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
78
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
85
86 */
87
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "target.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "cfghooks.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
98 #include "memmodel.h"
99 #include "tm_p.h"
100 #include "insn-config.h"
101 #include "regs.h"
102 #include "emit-rtl.h"
103 #include "recog.h"
104 #include "diagnostic.h"
105 #include "varasm.h"
106 #include "stor-layout.h"
107 #include "cfgrtl.h"
108 #include "cfganal.h"
109 #include "reload.h"
110 #include "calls.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "tree-pretty-print.h"
115 #include "rtl-iter.h"
116 #include "fibonacci_heap.h"
117 #include "print-rtl.h"
118 #include "function-abi.h"
119
120 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
121 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
122
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
129
130 /* Type of micro operation. */
131 enum micro_operation_type
132 {
133 MO_USE, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE, /* Use location which is associated with a value. */
137 MO_VAL_LOC, /* Use location which appears in a debug insn. */
138 MO_VAL_SET, /* Set location associated with a value. */
139 MO_SET, /* Set location. */
140 MO_COPY, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER, /* Clobber location. */
143 MO_CALL, /* Call insn. */
144 MO_ADJUST /* Adjust stack pointer. */
145
146 };
147
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150 "MO_USE",
151 "MO_USE_NO_VAR",
152 "MO_VAL_USE",
153 "MO_VAL_LOC",
154 "MO_VAL_SET",
155 "MO_SET",
156 "MO_COPY",
157 "MO_CLOBBER",
158 "MO_CALL",
159 "MO_ADJUST"
160 };
161
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
165 enum emit_note_where
166 {
167 EMIT_NOTE_BEFORE_INSN,
168 EMIT_NOTE_AFTER_INSN,
169 EMIT_NOTE_AFTER_CALL_INSN
170 };
171
172 /* Structure holding information about micro operation. */
173 struct micro_operation
174 {
175 /* Type of micro operation. */
176 enum micro_operation_type type;
177
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
182 and MO_CLOBBER. */
183 rtx_insn *insn;
184
185 union {
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
192 rtx loc;
193
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust;
196 } u;
197 };
198
199
200 /* A declaration of a variable, or an RTL value being handled like a
201 declaration. */
202 typedef void *decl_or_value;
203
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 static inline bool
206 dv_is_decl_p (decl_or_value dv)
207 {
208 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
209 }
210
211 /* Return true if a decl_or_value is a VALUE rtl. */
212 static inline bool
213 dv_is_value_p (decl_or_value dv)
214 {
215 return dv && !dv_is_decl_p (dv);
216 }
217
218 /* Return the decl in the decl_or_value. */
219 static inline tree
220 dv_as_decl (decl_or_value dv)
221 {
222 gcc_checking_assert (dv_is_decl_p (dv));
223 return (tree) dv;
224 }
225
226 /* Return the value in the decl_or_value. */
227 static inline rtx
228 dv_as_value (decl_or_value dv)
229 {
230 gcc_checking_assert (dv_is_value_p (dv));
231 return (rtx)dv;
232 }
233
234 /* Return the opaque pointer in the decl_or_value. */
235 static inline void *
236 dv_as_opaque (decl_or_value dv)
237 {
238 return dv;
239 }
240
241
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 struct attrs
247 {
248 /* Pointer to next member of the list. */
249 attrs *next;
250
251 /* The rtx of register. */
252 rtx loc;
253
254 /* The declaration corresponding to LOC. */
255 decl_or_value dv;
256
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset;
259 };
260
261 /* Structure for chaining the locations. */
262 struct location_chain
263 {
264 /* Next element in the chain. */
265 location_chain *next;
266
267 /* The location (REG, MEM or VALUE). */
268 rtx loc;
269
270 /* The "value" stored in this location. */
271 rtx set_src;
272
273 /* Initialized? */
274 enum var_init_status init;
275 };
276
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 struct loc_exp_dep
282 {
283 /* The dependent DV. */
284 decl_or_value dv;
285 /* The dependency VALUE or DECL_DEBUG. */
286 rtx value;
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep *next;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep **pprev;
292 };
293
294
295 /* This data structure holds information about the depth of a variable
296 expansion. */
297 struct expand_depth
298 {
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
301 operand. */
302 int complexity;
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
305 int entryvals;
306 };
307
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
310 struct onepart_aux
311 {
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep *backlinks;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
324 rtx from;
325 /* The depth of the cur_loc expression. */
326 expand_depth depth;
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec<loc_exp_dep, va_heap, vl_embed> deps;
329 };
330
331 /* Structure describing one part of variable. */
332 struct variable_part
333 {
334 /* Chain of locations of the part. */
335 location_chain *loc_chain;
336
337 /* Location which was last emitted to location list. */
338 rtx cur_loc;
339
340 union variable_aux
341 {
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset;
344
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux *onepaux;
347 } aux;
348 };
349
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
352
353 /* Enumeration type used to discriminate various types of one-part
354 variables. */
355 enum onepart_enum
356 {
357 /* Not a one-part variable. */
358 NOT_ONEPART = 0,
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
360 ONEPART_VDECL = 1,
361 /* A DEBUG_EXPR_DECL. */
362 ONEPART_DEXPR = 2,
363 /* A VALUE. */
364 ONEPART_VALUE = 3
365 };
366
367 /* Structure describing where the variable is located. */
368 struct variable
369 {
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
372 decl_or_value dv;
373
374 /* Reference count. */
375 int refcount;
376
377 /* Number of variable parts. */
378 char n_var_parts;
379
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
382
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables;
386
387 /* The variable parts. */
388 variable_part var_part[1];
389 };
390
391 /* Pointer to the BB's information specific to variable tracking pass. */
392 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
393
394 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */
395
396 static inline HOST_WIDE_INT
397 int_mem_offset (const_rtx mem)
398 {
399 HOST_WIDE_INT offset;
400 if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset))
401 return offset;
402 return 0;
403 }
404
405 #if CHECKING_P && (GCC_VERSION >= 2007)
406
407 /* Access VAR's Ith part's offset, checking that it's not a one-part
408 variable. */
409 #define VAR_PART_OFFSET(var, i) __extension__ \
410 (*({ variable *const __v = (var); \
411 gcc_checking_assert (!__v->onepart); \
412 &__v->var_part[(i)].aux.offset; }))
413
414 /* Access VAR's one-part auxiliary data, checking that it is a
415 one-part variable. */
416 #define VAR_LOC_1PAUX(var) __extension__ \
417 (*({ variable *const __v = (var); \
418 gcc_checking_assert (__v->onepart); \
419 &__v->var_part[0].aux.onepaux; }))
420
421 #else
422 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
423 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
424 #endif
425
426 /* These are accessor macros for the one-part auxiliary data. When
427 convenient for users, they're guarded by tests that the data was
428 allocated. */
429 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
430 ? VAR_LOC_1PAUX (var)->backlinks \
431 : NULL)
432 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
433 ? &VAR_LOC_1PAUX (var)->backlinks \
434 : NULL)
435 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
436 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
437 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
438 ? &VAR_LOC_1PAUX (var)->deps \
439 : NULL)
440
441
442
443 typedef unsigned int dvuid;
444
445 /* Return the uid of DV. */
446
447 static inline dvuid
448 dv_uid (decl_or_value dv)
449 {
450 if (dv_is_value_p (dv))
451 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
452 else
453 return DECL_UID (dv_as_decl (dv));
454 }
455
456 /* Compute the hash from the uid. */
457
458 static inline hashval_t
459 dv_uid2hash (dvuid uid)
460 {
461 return uid;
462 }
463
464 /* The hash function for a mask table in a shared_htab chain. */
465
466 static inline hashval_t
467 dv_htab_hash (decl_or_value dv)
468 {
469 return dv_uid2hash (dv_uid (dv));
470 }
471
472 static void variable_htab_free (void *);
473
474 /* Variable hashtable helpers. */
475
476 struct variable_hasher : pointer_hash <variable>
477 {
478 typedef void *compare_type;
479 static inline hashval_t hash (const variable *);
480 static inline bool equal (const variable *, const void *);
481 static inline void remove (variable *);
482 };
483
484 /* The hash function for variable_htab, computes the hash value
485 from the declaration of variable X. */
486
487 inline hashval_t
488 variable_hasher::hash (const variable *v)
489 {
490 return dv_htab_hash (v->dv);
491 }
492
493 /* Compare the declaration of variable X with declaration Y. */
494
495 inline bool
496 variable_hasher::equal (const variable *v, const void *y)
497 {
498 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
499
500 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
501 }
502
503 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
504
505 inline void
506 variable_hasher::remove (variable *var)
507 {
508 variable_htab_free (var);
509 }
510
511 typedef hash_table<variable_hasher> variable_table_type;
512 typedef variable_table_type::iterator variable_iterator_type;
513
514 /* Structure for passing some other parameters to function
515 emit_note_insn_var_location. */
516 struct emit_note_data
517 {
518 /* The instruction which the note will be emitted before/after. */
519 rtx_insn *insn;
520
521 /* Where the note will be emitted (before/after insn)? */
522 enum emit_note_where where;
523
524 /* The variables and values active at this point. */
525 variable_table_type *vars;
526 };
527
528 /* Structure holding a refcounted hash table. If refcount > 1,
529 it must be first unshared before modified. */
530 struct shared_hash
531 {
532 /* Reference count. */
533 int refcount;
534
535 /* Actual hash table. */
536 variable_table_type *htab;
537 };
538
539 /* Structure holding the IN or OUT set for a basic block. */
540 struct dataflow_set
541 {
542 /* Adjustment of stack offset. */
543 HOST_WIDE_INT stack_adjust;
544
545 /* Attributes for registers (lists of attrs). */
546 attrs *regs[FIRST_PSEUDO_REGISTER];
547
548 /* Variable locations. */
549 shared_hash *vars;
550
551 /* Vars that is being traversed. */
552 shared_hash *traversed_vars;
553 };
554
555 /* The structure (one for each basic block) containing the information
556 needed for variable tracking. */
557 struct variable_tracking_info
558 {
559 /* The vector of micro operations. */
560 vec<micro_operation> mos;
561
562 /* The IN and OUT set for dataflow analysis. */
563 dataflow_set in;
564 dataflow_set out;
565
566 /* The permanent-in dataflow set for this block. This is used to
567 hold values for which we had to compute entry values. ??? This
568 should probably be dynamically allocated, to avoid using more
569 memory in non-debug builds. */
570 dataflow_set *permp;
571
572 /* Has the block been visited in DFS? */
573 bool visited;
574
575 /* Has the block been flooded in VTA? */
576 bool flooded;
577
578 };
579
580 /* Alloc pool for struct attrs_def. */
581 object_allocator<attrs> attrs_pool ("attrs pool");
582
583 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
584
585 static pool_allocator var_pool
586 ("variable_def pool", sizeof (variable) +
587 (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
588
589 /* Alloc pool for struct variable_def with a single var_part entry. */
590 static pool_allocator valvar_pool
591 ("small variable_def pool", sizeof (variable));
592
593 /* Alloc pool for struct location_chain. */
594 static object_allocator<location_chain> location_chain_pool
595 ("location_chain pool");
596
597 /* Alloc pool for struct shared_hash. */
598 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool");
599
600 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
601 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool");
602
603 /* Changed variables, notes will be emitted for them. */
604 static variable_table_type *changed_variables;
605
606 /* Shall notes be emitted? */
607 static bool emit_notes;
608
609 /* Values whose dynamic location lists have gone empty, but whose
610 cselib location lists are still usable. Use this to hold the
611 current location, the backlinks, etc, during emit_notes. */
612 static variable_table_type *dropped_values;
613
614 /* Empty shared hashtable. */
615 static shared_hash *empty_shared_hash;
616
617 /* Scratch register bitmap used by cselib_expand_value_rtx. */
618 static bitmap scratch_regs = NULL;
619
620 #ifdef HAVE_window_save
621 struct GTY(()) parm_reg {
622 rtx outgoing;
623 rtx incoming;
624 };
625
626
627 /* Vector of windowed parameter registers, if any. */
628 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
629 #endif
630
631 /* Variable used to tell whether cselib_process_insn called our hook. */
632 static bool cselib_hook_called;
633
634 /* Local function prototypes. */
635 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
636 HOST_WIDE_INT *);
637 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
638 HOST_WIDE_INT *);
639 static bool vt_stack_adjustments (void);
640
641 static void init_attrs_list_set (attrs **);
642 static void attrs_list_clear (attrs **);
643 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
644 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
645 static void attrs_list_copy (attrs **, attrs *);
646 static void attrs_list_union (attrs **, attrs *);
647
648 static variable **unshare_variable (dataflow_set *set, variable **slot,
649 variable *var, enum var_init_status);
650 static void vars_copy (variable_table_type *, variable_table_type *);
651 static tree var_debug_decl (tree);
652 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
653 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
654 enum var_init_status, rtx);
655 static void var_reg_delete (dataflow_set *, rtx, bool);
656 static void var_regno_delete (dataflow_set *, int);
657 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
658 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
659 enum var_init_status, rtx);
660 static void var_mem_delete (dataflow_set *, rtx, bool);
661
662 static void dataflow_set_init (dataflow_set *);
663 static void dataflow_set_clear (dataflow_set *);
664 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
665 static int variable_union_info_cmp_pos (const void *, const void *);
666 static void dataflow_set_union (dataflow_set *, dataflow_set *);
667 static location_chain *find_loc_in_1pdv (rtx, variable *,
668 variable_table_type *);
669 static bool canon_value_cmp (rtx, rtx);
670 static int loc_cmp (rtx, rtx);
671 static bool variable_part_different_p (variable_part *, variable_part *);
672 static bool onepart_variable_different_p (variable *, variable *);
673 static bool variable_different_p (variable *, variable *);
674 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
675 static void dataflow_set_destroy (dataflow_set *);
676
677 static bool track_expr_p (tree, bool);
678 static void add_uses_1 (rtx *, void *);
679 static void add_stores (rtx, const_rtx, void *);
680 static bool compute_bb_dataflow (basic_block);
681 static bool vt_find_locations (void);
682
683 static void dump_attrs_list (attrs *);
684 static void dump_var (variable *);
685 static void dump_vars (variable_table_type *);
686 static void dump_dataflow_set (dataflow_set *);
687 static void dump_dataflow_sets (void);
688
689 static void set_dv_changed (decl_or_value, bool);
690 static void variable_was_changed (variable *, dataflow_set *);
691 static variable **set_slot_part (dataflow_set *, rtx, variable **,
692 decl_or_value, HOST_WIDE_INT,
693 enum var_init_status, rtx);
694 static void set_variable_part (dataflow_set *, rtx,
695 decl_or_value, HOST_WIDE_INT,
696 enum var_init_status, rtx, enum insert_option);
697 static variable **clobber_slot_part (dataflow_set *, rtx,
698 variable **, HOST_WIDE_INT, rtx);
699 static void clobber_variable_part (dataflow_set *, rtx,
700 decl_or_value, HOST_WIDE_INT, rtx);
701 static variable **delete_slot_part (dataflow_set *, rtx, variable **,
702 HOST_WIDE_INT);
703 static void delete_variable_part (dataflow_set *, rtx,
704 decl_or_value, HOST_WIDE_INT);
705 static void emit_notes_in_bb (basic_block, dataflow_set *);
706 static void vt_emit_notes (void);
707
708 static void vt_add_function_parameters (void);
709 static bool vt_initialize (void);
710 static void vt_finalize (void);
711
712 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
713
714 static int
715 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
716 void *arg)
717 {
718 if (dest != stack_pointer_rtx)
719 return 0;
720
721 switch (GET_CODE (op))
722 {
723 case PRE_INC:
724 case PRE_DEC:
725 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
726 return 0;
727 case POST_INC:
728 case POST_DEC:
729 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
730 return 0;
731 case PRE_MODIFY:
732 case POST_MODIFY:
733 /* We handle only adjustments by constant amount. */
734 gcc_assert (GET_CODE (src) == PLUS
735 && CONST_INT_P (XEXP (src, 1))
736 && XEXP (src, 0) == stack_pointer_rtx);
737 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
738 -= INTVAL (XEXP (src, 1));
739 return 0;
740 default:
741 gcc_unreachable ();
742 }
743 }
744
745 /* Given a SET, calculate the amount of stack adjustment it contains
746 PRE- and POST-modifying stack pointer.
747 This function is similar to stack_adjust_offset. */
748
749 static void
750 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
751 HOST_WIDE_INT *post)
752 {
753 rtx src = SET_SRC (pattern);
754 rtx dest = SET_DEST (pattern);
755 enum rtx_code code;
756
757 if (dest == stack_pointer_rtx)
758 {
759 /* (set (reg sp) (plus (reg sp) (const_int))) */
760 code = GET_CODE (src);
761 if (! (code == PLUS || code == MINUS)
762 || XEXP (src, 0) != stack_pointer_rtx
763 || !CONST_INT_P (XEXP (src, 1)))
764 return;
765
766 if (code == MINUS)
767 *post += INTVAL (XEXP (src, 1));
768 else
769 *post -= INTVAL (XEXP (src, 1));
770 return;
771 }
772 HOST_WIDE_INT res[2] = { 0, 0 };
773 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
774 *pre += res[0];
775 *post += res[1];
776 }
777
778 /* Given an INSN, calculate the amount of stack adjustment it contains
779 PRE- and POST-modifying stack pointer. */
780
781 static void
782 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
783 HOST_WIDE_INT *post)
784 {
785 rtx pattern;
786
787 *pre = 0;
788 *post = 0;
789
790 pattern = PATTERN (insn);
791 if (RTX_FRAME_RELATED_P (insn))
792 {
793 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
794 if (expr)
795 pattern = XEXP (expr, 0);
796 }
797
798 if (GET_CODE (pattern) == SET)
799 stack_adjust_offset_pre_post (pattern, pre, post);
800 else if (GET_CODE (pattern) == PARALLEL
801 || GET_CODE (pattern) == SEQUENCE)
802 {
803 int i;
804
805 /* There may be stack adjustments inside compound insns. Search
806 for them. */
807 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
808 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
809 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
810 }
811 }
812
813 /* Compute stack adjustments for all blocks by traversing DFS tree.
814 Return true when the adjustments on all incoming edges are consistent.
815 Heavily borrowed from pre_and_rev_post_order_compute. */
816
817 static bool
818 vt_stack_adjustments (void)
819 {
820 edge_iterator *stack;
821 int sp;
822
823 /* Initialize entry block. */
824 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
825 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
826 = INCOMING_FRAME_SP_OFFSET;
827 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
828 = INCOMING_FRAME_SP_OFFSET;
829
830 /* Allocate stack for back-tracking up CFG. */
831 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
832 sp = 0;
833
834 /* Push the first edge on to the stack. */
835 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
836
837 while (sp)
838 {
839 edge_iterator ei;
840 basic_block src;
841 basic_block dest;
842
843 /* Look at the edge on the top of the stack. */
844 ei = stack[sp - 1];
845 src = ei_edge (ei)->src;
846 dest = ei_edge (ei)->dest;
847
848 /* Check if the edge destination has been visited yet. */
849 if (!VTI (dest)->visited)
850 {
851 rtx_insn *insn;
852 HOST_WIDE_INT pre, post, offset;
853 VTI (dest)->visited = true;
854 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
855
856 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
857 for (insn = BB_HEAD (dest);
858 insn != NEXT_INSN (BB_END (dest));
859 insn = NEXT_INSN (insn))
860 if (INSN_P (insn))
861 {
862 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
863 offset += pre + post;
864 }
865
866 VTI (dest)->out.stack_adjust = offset;
867
868 if (EDGE_COUNT (dest->succs) > 0)
869 /* Since the DEST node has been visited for the first
870 time, check its successors. */
871 stack[sp++] = ei_start (dest->succs);
872 }
873 else
874 {
875 /* We can end up with different stack adjustments for the exit block
876 of a shrink-wrapped function if stack_adjust_offset_pre_post
877 doesn't understand the rtx pattern used to restore the stack
878 pointer in the epilogue. For example, on s390(x), the stack
879 pointer is often restored via a load-multiple instruction
880 and so no stack_adjust offset is recorded for it. This means
881 that the stack offset at the end of the epilogue block is the
882 same as the offset before the epilogue, whereas other paths
883 to the exit block will have the correct stack_adjust.
884
885 It is safe to ignore these differences because (a) we never
886 use the stack_adjust for the exit block in this pass and
887 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
888 function are correct.
889
890 We must check whether the adjustments on other edges are
891 the same though. */
892 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
893 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
894 {
895 free (stack);
896 return false;
897 }
898
899 if (! ei_one_before_end_p (ei))
900 /* Go to the next edge. */
901 ei_next (&stack[sp - 1]);
902 else
903 /* Return to previous level if there are no more edges. */
904 sp--;
905 }
906 }
907
908 free (stack);
909 return true;
910 }
911
912 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
913 hard_frame_pointer_rtx is being mapped to it and offset for it. */
914 static rtx cfa_base_rtx;
915 static HOST_WIDE_INT cfa_base_offset;
916
917 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
918 or hard_frame_pointer_rtx. */
919
920 static inline rtx
921 compute_cfa_pointer (poly_int64 adjustment)
922 {
923 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
924 }
925
926 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
927 or -1 if the replacement shouldn't be done. */
928 static poly_int64 hard_frame_pointer_adjustment = -1;
929
930 /* Data for adjust_mems callback. */
931
932 class adjust_mem_data
933 {
934 public:
935 bool store;
936 machine_mode mem_mode;
937 HOST_WIDE_INT stack_adjust;
938 auto_vec<rtx> side_effects;
939 };
940
941 /* Helper for adjust_mems. Return true if X is suitable for
942 transformation of wider mode arithmetics to narrower mode. */
943
944 static bool
945 use_narrower_mode_test (rtx x, const_rtx subreg)
946 {
947 subrtx_var_iterator::array_type array;
948 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
949 {
950 rtx x = *iter;
951 if (CONSTANT_P (x))
952 iter.skip_subrtxes ();
953 else
954 switch (GET_CODE (x))
955 {
956 case REG:
957 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
958 return false;
959 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
960 subreg_lowpart_offset (GET_MODE (subreg),
961 GET_MODE (x))))
962 return false;
963 break;
964 case PLUS:
965 case MINUS:
966 case MULT:
967 break;
968 case ASHIFT:
969 if (GET_MODE (XEXP (x, 1)) != VOIDmode)
970 {
971 enum machine_mode mode = GET_MODE (subreg);
972 rtx op1 = XEXP (x, 1);
973 enum machine_mode op1_mode = GET_MODE (op1);
974 if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))
975 < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode)))
976 {
977 poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode);
978 if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT)
979 {
980 if (!simplify_subreg (mode, op1, op1_mode, byte))
981 return false;
982 }
983 else if (!validate_subreg (mode, op1_mode, op1, byte))
984 return false;
985 }
986 }
987 iter.substitute (XEXP (x, 0));
988 break;
989 default:
990 return false;
991 }
992 }
993 return true;
994 }
995
996 /* Transform X into narrower mode MODE from wider mode WMODE. */
997
998 static rtx
999 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode)
1000 {
1001 rtx op0, op1;
1002 if (CONSTANT_P (x))
1003 return lowpart_subreg (mode, x, wmode);
1004 switch (GET_CODE (x))
1005 {
1006 case REG:
1007 return lowpart_subreg (mode, x, wmode);
1008 case PLUS:
1009 case MINUS:
1010 case MULT:
1011 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1012 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1013 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1014 case ASHIFT:
1015 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1016 op1 = XEXP (x, 1);
1017 /* Ensure shift amount is not wider than mode. */
1018 if (GET_MODE (op1) == VOIDmode)
1019 op1 = lowpart_subreg (mode, op1, wmode);
1020 else if (GET_MODE_PRECISION (mode)
1021 < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1))))
1022 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1023 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1024 default:
1025 gcc_unreachable ();
1026 }
1027 }
1028
1029 /* Helper function for adjusting used MEMs. */
1030
1031 static rtx
1032 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1033 {
1034 class adjust_mem_data *amd = (class adjust_mem_data *) data;
1035 rtx mem, addr = loc, tem;
1036 machine_mode mem_mode_save;
1037 bool store_save;
1038 scalar_int_mode tem_mode, tem_subreg_mode;
1039 poly_int64 size;
1040 switch (GET_CODE (loc))
1041 {
1042 case REG:
1043 /* Don't do any sp or fp replacements outside of MEM addresses
1044 on the LHS. */
1045 if (amd->mem_mode == VOIDmode && amd->store)
1046 return loc;
1047 if (loc == stack_pointer_rtx
1048 && !frame_pointer_needed
1049 && cfa_base_rtx)
1050 return compute_cfa_pointer (amd->stack_adjust);
1051 else if (loc == hard_frame_pointer_rtx
1052 && frame_pointer_needed
1053 && maybe_ne (hard_frame_pointer_adjustment, -1)
1054 && cfa_base_rtx)
1055 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1056 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1057 return loc;
1058 case MEM:
1059 mem = loc;
1060 if (!amd->store)
1061 {
1062 mem = targetm.delegitimize_address (mem);
1063 if (mem != loc && !MEM_P (mem))
1064 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1065 }
1066
1067 addr = XEXP (mem, 0);
1068 mem_mode_save = amd->mem_mode;
1069 amd->mem_mode = GET_MODE (mem);
1070 store_save = amd->store;
1071 amd->store = false;
1072 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1073 amd->store = store_save;
1074 amd->mem_mode = mem_mode_save;
1075 if (mem == loc)
1076 addr = targetm.delegitimize_address (addr);
1077 if (addr != XEXP (mem, 0))
1078 mem = replace_equiv_address_nv (mem, addr);
1079 if (!amd->store)
1080 mem = avoid_constant_pool_reference (mem);
1081 return mem;
1082 case PRE_INC:
1083 case PRE_DEC:
1084 size = GET_MODE_SIZE (amd->mem_mode);
1085 addr = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1086 GET_CODE (loc) == PRE_INC ? size : -size);
1087 /* FALLTHRU */
1088 case POST_INC:
1089 case POST_DEC:
1090 if (addr == loc)
1091 addr = XEXP (loc, 0);
1092 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1093 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1094 size = GET_MODE_SIZE (amd->mem_mode);
1095 tem = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1096 (GET_CODE (loc) == PRE_INC
1097 || GET_CODE (loc) == POST_INC) ? size : -size);
1098 store_save = amd->store;
1099 amd->store = false;
1100 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1101 amd->store = store_save;
1102 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1103 return addr;
1104 case PRE_MODIFY:
1105 addr = XEXP (loc, 1);
1106 /* FALLTHRU */
1107 case POST_MODIFY:
1108 if (addr == loc)
1109 addr = XEXP (loc, 0);
1110 gcc_assert (amd->mem_mode != VOIDmode);
1111 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1112 store_save = amd->store;
1113 amd->store = false;
1114 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1115 adjust_mems, data);
1116 amd->store = store_save;
1117 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1118 return addr;
1119 case SUBREG:
1120 /* First try without delegitimization of whole MEMs and
1121 avoid_constant_pool_reference, which is more likely to succeed. */
1122 store_save = amd->store;
1123 amd->store = true;
1124 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1125 data);
1126 amd->store = store_save;
1127 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1128 if (mem == SUBREG_REG (loc))
1129 {
1130 tem = loc;
1131 goto finish_subreg;
1132 }
1133 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1134 GET_MODE (SUBREG_REG (loc)),
1135 SUBREG_BYTE (loc));
1136 if (tem)
1137 goto finish_subreg;
1138 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1139 GET_MODE (SUBREG_REG (loc)),
1140 SUBREG_BYTE (loc));
1141 if (tem == NULL_RTX)
1142 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1143 finish_subreg:
1144 if (MAY_HAVE_DEBUG_BIND_INSNS
1145 && GET_CODE (tem) == SUBREG
1146 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1147 || GET_CODE (SUBREG_REG (tem)) == MINUS
1148 || GET_CODE (SUBREG_REG (tem)) == MULT
1149 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1150 && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
1151 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
1152 &tem_subreg_mode)
1153 && (GET_MODE_PRECISION (tem_mode)
1154 < GET_MODE_PRECISION (tem_subreg_mode))
1155 && subreg_lowpart_p (tem)
1156 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1157 return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode);
1158 return tem;
1159 case ASM_OPERANDS:
1160 /* Don't do any replacements in second and following
1161 ASM_OPERANDS of inline-asm with multiple sets.
1162 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1163 and ASM_OPERANDS_LABEL_VEC need to be equal between
1164 all the ASM_OPERANDs in the insn and adjust_insn will
1165 fix this up. */
1166 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1167 return loc;
1168 break;
1169 default:
1170 break;
1171 }
1172 return NULL_RTX;
1173 }
1174
1175 /* Helper function for replacement of uses. */
1176
1177 static void
1178 adjust_mem_uses (rtx *x, void *data)
1179 {
1180 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1181 if (new_x != *x)
1182 validate_change (NULL_RTX, x, new_x, true);
1183 }
1184
1185 /* Helper function for replacement of stores. */
1186
1187 static void
1188 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1189 {
1190 if (MEM_P (loc))
1191 {
1192 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1193 adjust_mems, data);
1194 if (new_dest != SET_DEST (expr))
1195 {
1196 rtx xexpr = CONST_CAST_RTX (expr);
1197 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1198 }
1199 }
1200 }
1201
1202 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1203 replace them with their value in the insn and add the side-effects
1204 as other sets to the insn. */
1205
1206 static void
1207 adjust_insn (basic_block bb, rtx_insn *insn)
1208 {
1209 rtx set;
1210
1211 #ifdef HAVE_window_save
1212 /* If the target machine has an explicit window save instruction, the
1213 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1214 if (RTX_FRAME_RELATED_P (insn)
1215 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1216 {
1217 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1218 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1219 parm_reg *p;
1220
1221 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1222 {
1223 XVECEXP (rtl, 0, i * 2)
1224 = gen_rtx_SET (p->incoming, p->outgoing);
1225 /* Do not clobber the attached DECL, but only the REG. */
1226 XVECEXP (rtl, 0, i * 2 + 1)
1227 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1228 gen_raw_REG (GET_MODE (p->outgoing),
1229 REGNO (p->outgoing)));
1230 }
1231
1232 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1233 return;
1234 }
1235 #endif
1236
1237 adjust_mem_data amd;
1238 amd.mem_mode = VOIDmode;
1239 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1240
1241 amd.store = true;
1242 note_stores (insn, adjust_mem_stores, &amd);
1243
1244 amd.store = false;
1245 if (GET_CODE (PATTERN (insn)) == PARALLEL
1246 && asm_noperands (PATTERN (insn)) > 0
1247 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1248 {
1249 rtx body, set0;
1250 int i;
1251
1252 /* inline-asm with multiple sets is tiny bit more complicated,
1253 because the 3 vectors in ASM_OPERANDS need to be shared between
1254 all ASM_OPERANDS in the instruction. adjust_mems will
1255 not touch ASM_OPERANDS other than the first one, asm_noperands
1256 test above needs to be called before that (otherwise it would fail)
1257 and afterwards this code fixes it up. */
1258 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1259 body = PATTERN (insn);
1260 set0 = XVECEXP (body, 0, 0);
1261 gcc_checking_assert (GET_CODE (set0) == SET
1262 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1263 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1264 for (i = 1; i < XVECLEN (body, 0); i++)
1265 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1266 break;
1267 else
1268 {
1269 set = XVECEXP (body, 0, i);
1270 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1271 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1272 == i);
1273 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1274 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1275 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1276 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1277 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1278 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1279 {
1280 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1281 ASM_OPERANDS_INPUT_VEC (newsrc)
1282 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1283 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1284 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1285 ASM_OPERANDS_LABEL_VEC (newsrc)
1286 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1287 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1288 }
1289 }
1290 }
1291 else
1292 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1293
1294 /* For read-only MEMs containing some constant, prefer those
1295 constants. */
1296 set = single_set (insn);
1297 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1298 {
1299 rtx note = find_reg_equal_equiv_note (insn);
1300
1301 if (note && CONSTANT_P (XEXP (note, 0)))
1302 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1303 }
1304
1305 if (!amd.side_effects.is_empty ())
1306 {
1307 rtx *pat, new_pat;
1308 int i, oldn;
1309
1310 pat = &PATTERN (insn);
1311 if (GET_CODE (*pat) == COND_EXEC)
1312 pat = &COND_EXEC_CODE (*pat);
1313 if (GET_CODE (*pat) == PARALLEL)
1314 oldn = XVECLEN (*pat, 0);
1315 else
1316 oldn = 1;
1317 unsigned int newn = amd.side_effects.length ();
1318 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1319 if (GET_CODE (*pat) == PARALLEL)
1320 for (i = 0; i < oldn; i++)
1321 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1322 else
1323 XVECEXP (new_pat, 0, 0) = *pat;
1324
1325 rtx effect;
1326 unsigned int j;
1327 FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect)
1328 XVECEXP (new_pat, 0, j + oldn) = effect;
1329 validate_change (NULL_RTX, pat, new_pat, true);
1330 }
1331 }
1332
1333 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1334 static inline rtx
1335 dv_as_rtx (decl_or_value dv)
1336 {
1337 tree decl;
1338
1339 if (dv_is_value_p (dv))
1340 return dv_as_value (dv);
1341
1342 decl = dv_as_decl (dv);
1343
1344 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1345 return DECL_RTL_KNOWN_SET (decl);
1346 }
1347
1348 /* Return nonzero if a decl_or_value must not have more than one
1349 variable part. The returned value discriminates among various
1350 kinds of one-part DVs ccording to enum onepart_enum. */
1351 static inline onepart_enum
1352 dv_onepart_p (decl_or_value dv)
1353 {
1354 tree decl;
1355
1356 if (!MAY_HAVE_DEBUG_BIND_INSNS)
1357 return NOT_ONEPART;
1358
1359 if (dv_is_value_p (dv))
1360 return ONEPART_VALUE;
1361
1362 decl = dv_as_decl (dv);
1363
1364 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1365 return ONEPART_DEXPR;
1366
1367 if (target_for_debug_bind (decl) != NULL_TREE)
1368 return ONEPART_VDECL;
1369
1370 return NOT_ONEPART;
1371 }
1372
1373 /* Return the variable pool to be used for a dv of type ONEPART. */
1374 static inline pool_allocator &
1375 onepart_pool (onepart_enum onepart)
1376 {
1377 return onepart ? valvar_pool : var_pool;
1378 }
1379
1380 /* Allocate a variable_def from the corresponding variable pool. */
1381 static inline variable *
1382 onepart_pool_allocate (onepart_enum onepart)
1383 {
1384 return (variable*) onepart_pool (onepart).allocate ();
1385 }
1386
1387 /* Build a decl_or_value out of a decl. */
1388 static inline decl_or_value
1389 dv_from_decl (tree decl)
1390 {
1391 decl_or_value dv;
1392 dv = decl;
1393 gcc_checking_assert (dv_is_decl_p (dv));
1394 return dv;
1395 }
1396
1397 /* Build a decl_or_value out of a value. */
1398 static inline decl_or_value
1399 dv_from_value (rtx value)
1400 {
1401 decl_or_value dv;
1402 dv = value;
1403 gcc_checking_assert (dv_is_value_p (dv));
1404 return dv;
1405 }
1406
1407 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1408 static inline decl_or_value
1409 dv_from_rtx (rtx x)
1410 {
1411 decl_or_value dv;
1412
1413 switch (GET_CODE (x))
1414 {
1415 case DEBUG_EXPR:
1416 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1417 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1418 break;
1419
1420 case VALUE:
1421 dv = dv_from_value (x);
1422 break;
1423
1424 default:
1425 gcc_unreachable ();
1426 }
1427
1428 return dv;
1429 }
1430
1431 extern void debug_dv (decl_or_value dv);
1432
1433 DEBUG_FUNCTION void
1434 debug_dv (decl_or_value dv)
1435 {
1436 if (dv_is_value_p (dv))
1437 debug_rtx (dv_as_value (dv));
1438 else
1439 debug_generic_stmt (dv_as_decl (dv));
1440 }
1441
1442 static void loc_exp_dep_clear (variable *var);
1443
1444 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1445
1446 static void
1447 variable_htab_free (void *elem)
1448 {
1449 int i;
1450 variable *var = (variable *) elem;
1451 location_chain *node, *next;
1452
1453 gcc_checking_assert (var->refcount > 0);
1454
1455 var->refcount--;
1456 if (var->refcount > 0)
1457 return;
1458
1459 for (i = 0; i < var->n_var_parts; i++)
1460 {
1461 for (node = var->var_part[i].loc_chain; node; node = next)
1462 {
1463 next = node->next;
1464 delete node;
1465 }
1466 var->var_part[i].loc_chain = NULL;
1467 }
1468 if (var->onepart && VAR_LOC_1PAUX (var))
1469 {
1470 loc_exp_dep_clear (var);
1471 if (VAR_LOC_DEP_LST (var))
1472 VAR_LOC_DEP_LST (var)->pprev = NULL;
1473 XDELETE (VAR_LOC_1PAUX (var));
1474 /* These may be reused across functions, so reset
1475 e.g. NO_LOC_P. */
1476 if (var->onepart == ONEPART_DEXPR)
1477 set_dv_changed (var->dv, true);
1478 }
1479 onepart_pool (var->onepart).remove (var);
1480 }
1481
1482 /* Initialize the set (array) SET of attrs to empty lists. */
1483
1484 static void
1485 init_attrs_list_set (attrs **set)
1486 {
1487 int i;
1488
1489 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1490 set[i] = NULL;
1491 }
1492
1493 /* Make the list *LISTP empty. */
1494
1495 static void
1496 attrs_list_clear (attrs **listp)
1497 {
1498 attrs *list, *next;
1499
1500 for (list = *listp; list; list = next)
1501 {
1502 next = list->next;
1503 delete list;
1504 }
1505 *listp = NULL;
1506 }
1507
1508 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1509
1510 static attrs *
1511 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
1512 {
1513 for (; list; list = list->next)
1514 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1515 return list;
1516 return NULL;
1517 }
1518
1519 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1520
1521 static void
1522 attrs_list_insert (attrs **listp, decl_or_value dv,
1523 HOST_WIDE_INT offset, rtx loc)
1524 {
1525 attrs *list = new attrs;
1526 list->loc = loc;
1527 list->dv = dv;
1528 list->offset = offset;
1529 list->next = *listp;
1530 *listp = list;
1531 }
1532
1533 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1534
1535 static void
1536 attrs_list_copy (attrs **dstp, attrs *src)
1537 {
1538 attrs_list_clear (dstp);
1539 for (; src; src = src->next)
1540 {
1541 attrs *n = new attrs;
1542 n->loc = src->loc;
1543 n->dv = src->dv;
1544 n->offset = src->offset;
1545 n->next = *dstp;
1546 *dstp = n;
1547 }
1548 }
1549
1550 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1551
1552 static void
1553 attrs_list_union (attrs **dstp, attrs *src)
1554 {
1555 for (; src; src = src->next)
1556 {
1557 if (!attrs_list_member (*dstp, src->dv, src->offset))
1558 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1559 }
1560 }
1561
1562 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1563 *DSTP. */
1564
1565 static void
1566 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
1567 {
1568 gcc_assert (!*dstp);
1569 for (; src; src = src->next)
1570 {
1571 if (!dv_onepart_p (src->dv))
1572 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1573 }
1574 for (src = src2; src; src = src->next)
1575 {
1576 if (!dv_onepart_p (src->dv)
1577 && !attrs_list_member (*dstp, src->dv, src->offset))
1578 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1579 }
1580 }
1581
1582 /* Shared hashtable support. */
1583
1584 /* Return true if VARS is shared. */
1585
1586 static inline bool
1587 shared_hash_shared (shared_hash *vars)
1588 {
1589 return vars->refcount > 1;
1590 }
1591
1592 /* Return the hash table for VARS. */
1593
1594 static inline variable_table_type *
1595 shared_hash_htab (shared_hash *vars)
1596 {
1597 return vars->htab;
1598 }
1599
1600 /* Return true if VAR is shared, or maybe because VARS is shared. */
1601
1602 static inline bool
1603 shared_var_p (variable *var, shared_hash *vars)
1604 {
1605 /* Don't count an entry in the changed_variables table as a duplicate. */
1606 return ((var->refcount > 1 + (int) var->in_changed_variables)
1607 || shared_hash_shared (vars));
1608 }
1609
1610 /* Copy variables into a new hash table. */
1611
1612 static shared_hash *
1613 shared_hash_unshare (shared_hash *vars)
1614 {
1615 shared_hash *new_vars = new shared_hash;
1616 gcc_assert (vars->refcount > 1);
1617 new_vars->refcount = 1;
1618 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1619 vars_copy (new_vars->htab, vars->htab);
1620 vars->refcount--;
1621 return new_vars;
1622 }
1623
1624 /* Increment reference counter on VARS and return it. */
1625
1626 static inline shared_hash *
1627 shared_hash_copy (shared_hash *vars)
1628 {
1629 vars->refcount++;
1630 return vars;
1631 }
1632
1633 /* Decrement reference counter and destroy hash table if not shared
1634 anymore. */
1635
1636 static void
1637 shared_hash_destroy (shared_hash *vars)
1638 {
1639 gcc_checking_assert (vars->refcount > 0);
1640 if (--vars->refcount == 0)
1641 {
1642 delete vars->htab;
1643 delete vars;
1644 }
1645 }
1646
1647 /* Unshare *PVARS if shared and return slot for DV. If INS is
1648 INSERT, insert it if not already present. */
1649
1650 static inline variable **
1651 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
1652 hashval_t dvhash, enum insert_option ins)
1653 {
1654 if (shared_hash_shared (*pvars))
1655 *pvars = shared_hash_unshare (*pvars);
1656 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1657 }
1658
1659 static inline variable **
1660 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
1661 enum insert_option ins)
1662 {
1663 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1664 }
1665
1666 /* Return slot for DV, if it is already present in the hash table.
1667 If it is not present, insert it only VARS is not shared, otherwise
1668 return NULL. */
1669
1670 static inline variable **
1671 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1672 {
1673 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1674 shared_hash_shared (vars)
1675 ? NO_INSERT : INSERT);
1676 }
1677
1678 static inline variable **
1679 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
1680 {
1681 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1682 }
1683
1684 /* Return slot for DV only if it is already present in the hash table. */
1685
1686 static inline variable **
1687 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
1688 hashval_t dvhash)
1689 {
1690 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1691 }
1692
1693 static inline variable **
1694 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
1695 {
1696 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1697 }
1698
1699 /* Return variable for DV or NULL if not already present in the hash
1700 table. */
1701
1702 static inline variable *
1703 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1704 {
1705 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1706 }
1707
1708 static inline variable *
1709 shared_hash_find (shared_hash *vars, decl_or_value dv)
1710 {
1711 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1712 }
1713
1714 /* Return true if TVAL is better than CVAL as a canonival value. We
1715 choose lowest-numbered VALUEs, using the RTX address as a
1716 tie-breaker. The idea is to arrange them into a star topology,
1717 such that all of them are at most one step away from the canonical
1718 value, and the canonical value has backlinks to all of them, in
1719 addition to all the actual locations. We don't enforce this
1720 topology throughout the entire dataflow analysis, though.
1721 */
1722
1723 static inline bool
1724 canon_value_cmp (rtx tval, rtx cval)
1725 {
1726 return !cval
1727 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1728 }
1729
1730 static bool dst_can_be_shared;
1731
1732 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1733
1734 static variable **
1735 unshare_variable (dataflow_set *set, variable **slot, variable *var,
1736 enum var_init_status initialized)
1737 {
1738 variable *new_var;
1739 int i;
1740
1741 new_var = onepart_pool_allocate (var->onepart);
1742 new_var->dv = var->dv;
1743 new_var->refcount = 1;
1744 var->refcount--;
1745 new_var->n_var_parts = var->n_var_parts;
1746 new_var->onepart = var->onepart;
1747 new_var->in_changed_variables = false;
1748
1749 if (! flag_var_tracking_uninit)
1750 initialized = VAR_INIT_STATUS_INITIALIZED;
1751
1752 for (i = 0; i < var->n_var_parts; i++)
1753 {
1754 location_chain *node;
1755 location_chain **nextp;
1756
1757 if (i == 0 && var->onepart)
1758 {
1759 /* One-part auxiliary data is only used while emitting
1760 notes, so propagate it to the new variable in the active
1761 dataflow set. If we're not emitting notes, this will be
1762 a no-op. */
1763 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1764 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1765 VAR_LOC_1PAUX (var) = NULL;
1766 }
1767 else
1768 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1769 nextp = &new_var->var_part[i].loc_chain;
1770 for (node = var->var_part[i].loc_chain; node; node = node->next)
1771 {
1772 location_chain *new_lc;
1773
1774 new_lc = new location_chain;
1775 new_lc->next = NULL;
1776 if (node->init > initialized)
1777 new_lc->init = node->init;
1778 else
1779 new_lc->init = initialized;
1780 if (node->set_src && !(MEM_P (node->set_src)))
1781 new_lc->set_src = node->set_src;
1782 else
1783 new_lc->set_src = NULL;
1784 new_lc->loc = node->loc;
1785
1786 *nextp = new_lc;
1787 nextp = &new_lc->next;
1788 }
1789
1790 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1791 }
1792
1793 dst_can_be_shared = false;
1794 if (shared_hash_shared (set->vars))
1795 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1796 else if (set->traversed_vars && set->vars != set->traversed_vars)
1797 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1798 *slot = new_var;
1799 if (var->in_changed_variables)
1800 {
1801 variable **cslot
1802 = changed_variables->find_slot_with_hash (var->dv,
1803 dv_htab_hash (var->dv),
1804 NO_INSERT);
1805 gcc_assert (*cslot == (void *) var);
1806 var->in_changed_variables = false;
1807 variable_htab_free (var);
1808 *cslot = new_var;
1809 new_var->in_changed_variables = true;
1810 }
1811 return slot;
1812 }
1813
1814 /* Copy all variables from hash table SRC to hash table DST. */
1815
1816 static void
1817 vars_copy (variable_table_type *dst, variable_table_type *src)
1818 {
1819 variable_iterator_type hi;
1820 variable *var;
1821
1822 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1823 {
1824 variable **dstp;
1825 var->refcount++;
1826 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1827 INSERT);
1828 *dstp = var;
1829 }
1830 }
1831
1832 /* Map a decl to its main debug decl. */
1833
1834 static inline tree
1835 var_debug_decl (tree decl)
1836 {
1837 if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
1838 {
1839 tree debugdecl = DECL_DEBUG_EXPR (decl);
1840 if (DECL_P (debugdecl))
1841 decl = debugdecl;
1842 }
1843
1844 return decl;
1845 }
1846
1847 /* Set the register LOC to contain DV, OFFSET. */
1848
1849 static void
1850 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1851 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1852 enum insert_option iopt)
1853 {
1854 attrs *node;
1855 bool decl_p = dv_is_decl_p (dv);
1856
1857 if (decl_p)
1858 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1859
1860 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1861 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1862 && node->offset == offset)
1863 break;
1864 if (!node)
1865 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1866 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1867 }
1868
1869 /* Return true if we should track a location that is OFFSET bytes from
1870 a variable. Store the constant offset in *OFFSET_OUT if so. */
1871
1872 static bool
1873 track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
1874 {
1875 HOST_WIDE_INT const_offset;
1876 if (!offset.is_constant (&const_offset)
1877 || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
1878 return false;
1879 *offset_out = const_offset;
1880 return true;
1881 }
1882
1883 /* Return the offset of a register that track_offset_p says we
1884 should track. */
1885
1886 static HOST_WIDE_INT
1887 get_tracked_reg_offset (rtx loc)
1888 {
1889 HOST_WIDE_INT offset;
1890 if (!track_offset_p (REG_OFFSET (loc), &offset))
1891 gcc_unreachable ();
1892 return offset;
1893 }
1894
1895 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1896
1897 static void
1898 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1899 rtx set_src)
1900 {
1901 tree decl = REG_EXPR (loc);
1902 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1903
1904 var_reg_decl_set (set, loc, initialized,
1905 dv_from_decl (decl), offset, set_src, INSERT);
1906 }
1907
1908 static enum var_init_status
1909 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1910 {
1911 variable *var;
1912 int i;
1913 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1914
1915 if (! flag_var_tracking_uninit)
1916 return VAR_INIT_STATUS_INITIALIZED;
1917
1918 var = shared_hash_find (set->vars, dv);
1919 if (var)
1920 {
1921 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1922 {
1923 location_chain *nextp;
1924 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1925 if (rtx_equal_p (nextp->loc, loc))
1926 {
1927 ret_val = nextp->init;
1928 break;
1929 }
1930 }
1931 }
1932
1933 return ret_val;
1934 }
1935
1936 /* Delete current content of register LOC in dataflow set SET and set
1937 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1938 MODIFY is true, any other live copies of the same variable part are
1939 also deleted from the dataflow set, otherwise the variable part is
1940 assumed to be copied from another location holding the same
1941 part. */
1942
1943 static void
1944 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1945 enum var_init_status initialized, rtx set_src)
1946 {
1947 tree decl = REG_EXPR (loc);
1948 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1949 attrs *node, *next;
1950 attrs **nextp;
1951
1952 decl = var_debug_decl (decl);
1953
1954 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1955 initialized = get_init_value (set, loc, dv_from_decl (decl));
1956
1957 nextp = &set->regs[REGNO (loc)];
1958 for (node = *nextp; node; node = next)
1959 {
1960 next = node->next;
1961 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1962 {
1963 delete_variable_part (set, node->loc, node->dv, node->offset);
1964 delete node;
1965 *nextp = next;
1966 }
1967 else
1968 {
1969 node->loc = loc;
1970 nextp = &node->next;
1971 }
1972 }
1973 if (modify)
1974 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1975 var_reg_set (set, loc, initialized, set_src);
1976 }
1977
1978 /* Delete the association of register LOC in dataflow set SET with any
1979 variables that aren't onepart. If CLOBBER is true, also delete any
1980 other live copies of the same variable part, and delete the
1981 association with onepart dvs too. */
1982
1983 static void
1984 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1985 {
1986 attrs **nextp = &set->regs[REGNO (loc)];
1987 attrs *node, *next;
1988
1989 HOST_WIDE_INT offset;
1990 if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
1991 {
1992 tree decl = REG_EXPR (loc);
1993
1994 decl = var_debug_decl (decl);
1995
1996 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1997 }
1998
1999 for (node = *nextp; node; node = next)
2000 {
2001 next = node->next;
2002 if (clobber || !dv_onepart_p (node->dv))
2003 {
2004 delete_variable_part (set, node->loc, node->dv, node->offset);
2005 delete node;
2006 *nextp = next;
2007 }
2008 else
2009 nextp = &node->next;
2010 }
2011 }
2012
2013 /* Delete content of register with number REGNO in dataflow set SET. */
2014
2015 static void
2016 var_regno_delete (dataflow_set *set, int regno)
2017 {
2018 attrs **reg = &set->regs[regno];
2019 attrs *node, *next;
2020
2021 for (node = *reg; node; node = next)
2022 {
2023 next = node->next;
2024 delete_variable_part (set, node->loc, node->dv, node->offset);
2025 delete node;
2026 }
2027 *reg = NULL;
2028 }
2029
2030 /* Return true if I is the negated value of a power of two. */
2031 static bool
2032 negative_power_of_two_p (HOST_WIDE_INT i)
2033 {
2034 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2035 return pow2_or_zerop (x);
2036 }
2037
2038 /* Strip constant offsets and alignments off of LOC. Return the base
2039 expression. */
2040
2041 static rtx
2042 vt_get_canonicalize_base (rtx loc)
2043 {
2044 while ((GET_CODE (loc) == PLUS
2045 || GET_CODE (loc) == AND)
2046 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2047 && (GET_CODE (loc) != AND
2048 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2049 loc = XEXP (loc, 0);
2050
2051 return loc;
2052 }
2053
2054 /* This caches canonicalized addresses for VALUEs, computed using
2055 information in the global cselib table. */
2056 static hash_map<rtx, rtx> *global_get_addr_cache;
2057
2058 /* This caches canonicalized addresses for VALUEs, computed using
2059 information from the global cache and information pertaining to a
2060 basic block being analyzed. */
2061 static hash_map<rtx, rtx> *local_get_addr_cache;
2062
2063 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2064
2065 /* Return the canonical address for LOC, that must be a VALUE, using a
2066 cached global equivalence or computing it and storing it in the
2067 global cache. */
2068
2069 static rtx
2070 get_addr_from_global_cache (rtx const loc)
2071 {
2072 rtx x;
2073
2074 gcc_checking_assert (GET_CODE (loc) == VALUE);
2075
2076 bool existed;
2077 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2078 if (existed)
2079 return *slot;
2080
2081 x = canon_rtx (get_addr (loc));
2082
2083 /* Tentative, avoiding infinite recursion. */
2084 *slot = x;
2085
2086 if (x != loc)
2087 {
2088 rtx nx = vt_canonicalize_addr (NULL, x);
2089 if (nx != x)
2090 {
2091 /* The table may have moved during recursion, recompute
2092 SLOT. */
2093 *global_get_addr_cache->get (loc) = x = nx;
2094 }
2095 }
2096
2097 return x;
2098 }
2099
2100 /* Return the canonical address for LOC, that must be a VALUE, using a
2101 cached local equivalence or computing it and storing it in the
2102 local cache. */
2103
2104 static rtx
2105 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2106 {
2107 rtx x;
2108 decl_or_value dv;
2109 variable *var;
2110 location_chain *l;
2111
2112 gcc_checking_assert (GET_CODE (loc) == VALUE);
2113
2114 bool existed;
2115 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2116 if (existed)
2117 return *slot;
2118
2119 x = get_addr_from_global_cache (loc);
2120
2121 /* Tentative, avoiding infinite recursion. */
2122 *slot = x;
2123
2124 /* Recurse to cache local expansion of X, or if we need to search
2125 for a VALUE in the expansion. */
2126 if (x != loc)
2127 {
2128 rtx nx = vt_canonicalize_addr (set, x);
2129 if (nx != x)
2130 {
2131 slot = local_get_addr_cache->get (loc);
2132 *slot = x = nx;
2133 }
2134 return x;
2135 }
2136
2137 dv = dv_from_rtx (x);
2138 var = shared_hash_find (set->vars, dv);
2139 if (!var)
2140 return x;
2141
2142 /* Look for an improved equivalent expression. */
2143 for (l = var->var_part[0].loc_chain; l; l = l->next)
2144 {
2145 rtx base = vt_get_canonicalize_base (l->loc);
2146 if (GET_CODE (base) == VALUE
2147 && canon_value_cmp (base, loc))
2148 {
2149 rtx nx = vt_canonicalize_addr (set, l->loc);
2150 if (x != nx)
2151 {
2152 slot = local_get_addr_cache->get (loc);
2153 *slot = x = nx;
2154 }
2155 break;
2156 }
2157 }
2158
2159 return x;
2160 }
2161
2162 /* Canonicalize LOC using equivalences from SET in addition to those
2163 in the cselib static table. It expects a VALUE-based expression,
2164 and it will only substitute VALUEs with other VALUEs or
2165 function-global equivalences, so that, if two addresses have base
2166 VALUEs that are locally or globally related in ways that
2167 memrefs_conflict_p cares about, they will both canonicalize to
2168 expressions that have the same base VALUE.
2169
2170 The use of VALUEs as canonical base addresses enables the canonical
2171 RTXs to remain unchanged globally, if they resolve to a constant,
2172 or throughout a basic block otherwise, so that they can be cached
2173 and the cache needs not be invalidated when REGs, MEMs or such
2174 change. */
2175
2176 static rtx
2177 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2178 {
2179 poly_int64 ofst = 0, term;
2180 machine_mode mode = GET_MODE (oloc);
2181 rtx loc = oloc;
2182 rtx x;
2183 bool retry = true;
2184
2185 while (retry)
2186 {
2187 while (GET_CODE (loc) == PLUS
2188 && poly_int_rtx_p (XEXP (loc, 1), &term))
2189 {
2190 ofst += term;
2191 loc = XEXP (loc, 0);
2192 }
2193
2194 /* Alignment operations can't normally be combined, so just
2195 canonicalize the base and we're done. We'll normally have
2196 only one stack alignment anyway. */
2197 if (GET_CODE (loc) == AND
2198 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2199 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2200 {
2201 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2202 if (x != XEXP (loc, 0))
2203 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2204 retry = false;
2205 }
2206
2207 if (GET_CODE (loc) == VALUE)
2208 {
2209 if (set)
2210 loc = get_addr_from_local_cache (set, loc);
2211 else
2212 loc = get_addr_from_global_cache (loc);
2213
2214 /* Consolidate plus_constants. */
2215 while (maybe_ne (ofst, 0)
2216 && GET_CODE (loc) == PLUS
2217 && poly_int_rtx_p (XEXP (loc, 1), &term))
2218 {
2219 ofst += term;
2220 loc = XEXP (loc, 0);
2221 }
2222
2223 retry = false;
2224 }
2225 else
2226 {
2227 x = canon_rtx (loc);
2228 if (retry)
2229 retry = (x != loc);
2230 loc = x;
2231 }
2232 }
2233
2234 /* Add OFST back in. */
2235 if (maybe_ne (ofst, 0))
2236 {
2237 /* Don't build new RTL if we can help it. */
2238 if (strip_offset (oloc, &term) == loc && known_eq (term, ofst))
2239 return oloc;
2240
2241 loc = plus_constant (mode, loc, ofst);
2242 }
2243
2244 return loc;
2245 }
2246
2247 /* Return true iff there's a true dependence between MLOC and LOC.
2248 MADDR must be a canonicalized version of MLOC's address. */
2249
2250 static inline bool
2251 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2252 {
2253 if (GET_CODE (loc) != MEM)
2254 return false;
2255
2256 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2257 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2258 return false;
2259
2260 return true;
2261 }
2262
2263 /* Hold parameters for the hashtab traversal function
2264 drop_overlapping_mem_locs, see below. */
2265
2266 struct overlapping_mems
2267 {
2268 dataflow_set *set;
2269 rtx loc, addr;
2270 };
2271
2272 /* Remove all MEMs that overlap with COMS->LOC from the location list
2273 of a hash table entry for a onepart variable. COMS->ADDR must be a
2274 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2275 canonicalized itself. */
2276
2277 int
2278 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
2279 {
2280 dataflow_set *set = coms->set;
2281 rtx mloc = coms->loc, addr = coms->addr;
2282 variable *var = *slot;
2283
2284 if (var->onepart != NOT_ONEPART)
2285 {
2286 location_chain *loc, **locp;
2287 bool changed = false;
2288 rtx cur_loc;
2289
2290 gcc_assert (var->n_var_parts == 1);
2291
2292 if (shared_var_p (var, set->vars))
2293 {
2294 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2295 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2296 break;
2297
2298 if (!loc)
2299 return 1;
2300
2301 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2302 var = *slot;
2303 gcc_assert (var->n_var_parts == 1);
2304 }
2305
2306 if (VAR_LOC_1PAUX (var))
2307 cur_loc = VAR_LOC_FROM (var);
2308 else
2309 cur_loc = var->var_part[0].cur_loc;
2310
2311 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2312 loc; loc = *locp)
2313 {
2314 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2315 {
2316 locp = &loc->next;
2317 continue;
2318 }
2319
2320 *locp = loc->next;
2321 /* If we have deleted the location which was last emitted
2322 we have to emit new location so add the variable to set
2323 of changed variables. */
2324 if (cur_loc == loc->loc)
2325 {
2326 changed = true;
2327 var->var_part[0].cur_loc = NULL;
2328 if (VAR_LOC_1PAUX (var))
2329 VAR_LOC_FROM (var) = NULL;
2330 }
2331 delete loc;
2332 }
2333
2334 if (!var->var_part[0].loc_chain)
2335 {
2336 var->n_var_parts--;
2337 changed = true;
2338 }
2339 if (changed)
2340 variable_was_changed (var, set);
2341 }
2342
2343 return 1;
2344 }
2345
2346 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2347
2348 static void
2349 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2350 {
2351 struct overlapping_mems coms;
2352
2353 gcc_checking_assert (GET_CODE (loc) == MEM);
2354
2355 coms.set = set;
2356 coms.loc = canon_rtx (loc);
2357 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2358
2359 set->traversed_vars = set->vars;
2360 shared_hash_htab (set->vars)
2361 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2362 set->traversed_vars = NULL;
2363 }
2364
2365 /* Set the location of DV, OFFSET as the MEM LOC. */
2366
2367 static void
2368 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2369 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2370 enum insert_option iopt)
2371 {
2372 if (dv_is_decl_p (dv))
2373 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2374
2375 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2376 }
2377
2378 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2379 SET to LOC.
2380 Adjust the address first if it is stack pointer based. */
2381
2382 static void
2383 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2384 rtx set_src)
2385 {
2386 tree decl = MEM_EXPR (loc);
2387 HOST_WIDE_INT offset = int_mem_offset (loc);
2388
2389 var_mem_decl_set (set, loc, initialized,
2390 dv_from_decl (decl), offset, set_src, INSERT);
2391 }
2392
2393 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2394 dataflow set SET to LOC. If MODIFY is true, any other live copies
2395 of the same variable part are also deleted from the dataflow set,
2396 otherwise the variable part is assumed to be copied from another
2397 location holding the same part.
2398 Adjust the address first if it is stack pointer based. */
2399
2400 static void
2401 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2402 enum var_init_status initialized, rtx set_src)
2403 {
2404 tree decl = MEM_EXPR (loc);
2405 HOST_WIDE_INT offset = int_mem_offset (loc);
2406
2407 clobber_overlapping_mems (set, loc);
2408 decl = var_debug_decl (decl);
2409
2410 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2411 initialized = get_init_value (set, loc, dv_from_decl (decl));
2412
2413 if (modify)
2414 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2415 var_mem_set (set, loc, initialized, set_src);
2416 }
2417
2418 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2419 true, also delete any other live copies of the same variable part.
2420 Adjust the address first if it is stack pointer based. */
2421
2422 static void
2423 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2424 {
2425 tree decl = MEM_EXPR (loc);
2426 HOST_WIDE_INT offset = int_mem_offset (loc);
2427
2428 clobber_overlapping_mems (set, loc);
2429 decl = var_debug_decl (decl);
2430 if (clobber)
2431 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2432 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2433 }
2434
2435 /* Return true if LOC should not be expanded for location expressions,
2436 or used in them. */
2437
2438 static inline bool
2439 unsuitable_loc (rtx loc)
2440 {
2441 switch (GET_CODE (loc))
2442 {
2443 case PC:
2444 case SCRATCH:
2445 case CC0:
2446 case ASM_INPUT:
2447 case ASM_OPERANDS:
2448 return true;
2449
2450 default:
2451 return false;
2452 }
2453 }
2454
2455 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2456 bound to it. */
2457
2458 static inline void
2459 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2460 {
2461 if (REG_P (loc))
2462 {
2463 if (modified)
2464 var_regno_delete (set, REGNO (loc));
2465 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2466 dv_from_value (val), 0, NULL_RTX, INSERT);
2467 }
2468 else if (MEM_P (loc))
2469 {
2470 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2471
2472 if (modified)
2473 clobber_overlapping_mems (set, loc);
2474
2475 if (l && GET_CODE (l->loc) == VALUE)
2476 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2477
2478 /* If this MEM is a global constant, we don't need it in the
2479 dynamic tables. ??? We should test this before emitting the
2480 micro-op in the first place. */
2481 while (l)
2482 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2483 break;
2484 else
2485 l = l->next;
2486
2487 if (!l)
2488 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2489 dv_from_value (val), 0, NULL_RTX, INSERT);
2490 }
2491 else
2492 {
2493 /* Other kinds of equivalences are necessarily static, at least
2494 so long as we do not perform substitutions while merging
2495 expressions. */
2496 gcc_unreachable ();
2497 set_variable_part (set, loc, dv_from_value (val), 0,
2498 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2499 }
2500 }
2501
2502 /* Bind a value to a location it was just stored in. If MODIFIED
2503 holds, assume the location was modified, detaching it from any
2504 values bound to it. */
2505
2506 static void
2507 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2508 bool modified)
2509 {
2510 cselib_val *v = CSELIB_VAL_PTR (val);
2511
2512 gcc_assert (cselib_preserved_value_p (v));
2513
2514 if (dump_file)
2515 {
2516 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2517 print_inline_rtx (dump_file, loc, 0);
2518 fprintf (dump_file, " evaluates to ");
2519 print_inline_rtx (dump_file, val, 0);
2520 if (v->locs)
2521 {
2522 struct elt_loc_list *l;
2523 for (l = v->locs; l; l = l->next)
2524 {
2525 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2526 print_inline_rtx (dump_file, l->loc, 0);
2527 }
2528 }
2529 fprintf (dump_file, "\n");
2530 }
2531
2532 gcc_checking_assert (!unsuitable_loc (loc));
2533
2534 val_bind (set, val, loc, modified);
2535 }
2536
2537 /* Clear (canonical address) slots that reference X. */
2538
2539 bool
2540 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2541 {
2542 if (vt_get_canonicalize_base (*slot) == x)
2543 *slot = NULL;
2544 return true;
2545 }
2546
2547 /* Reset this node, detaching all its equivalences. Return the slot
2548 in the variable hash table that holds dv, if there is one. */
2549
2550 static void
2551 val_reset (dataflow_set *set, decl_or_value dv)
2552 {
2553 variable *var = shared_hash_find (set->vars, dv) ;
2554 location_chain *node;
2555 rtx cval;
2556
2557 if (!var || !var->n_var_parts)
2558 return;
2559
2560 gcc_assert (var->n_var_parts == 1);
2561
2562 if (var->onepart == ONEPART_VALUE)
2563 {
2564 rtx x = dv_as_value (dv);
2565
2566 /* Relationships in the global cache don't change, so reset the
2567 local cache entry only. */
2568 rtx *slot = local_get_addr_cache->get (x);
2569 if (slot)
2570 {
2571 /* If the value resolved back to itself, odds are that other
2572 values may have cached it too. These entries now refer
2573 to the old X, so detach them too. Entries that used the
2574 old X but resolved to something else remain ok as long as
2575 that something else isn't also reset. */
2576 if (*slot == x)
2577 local_get_addr_cache
2578 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2579 *slot = NULL;
2580 }
2581 }
2582
2583 cval = NULL;
2584 for (node = var->var_part[0].loc_chain; node; node = node->next)
2585 if (GET_CODE (node->loc) == VALUE
2586 && canon_value_cmp (node->loc, cval))
2587 cval = node->loc;
2588
2589 for (node = var->var_part[0].loc_chain; node; node = node->next)
2590 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2591 {
2592 /* Redirect the equivalence link to the new canonical
2593 value, or simply remove it if it would point at
2594 itself. */
2595 if (cval)
2596 set_variable_part (set, cval, dv_from_value (node->loc),
2597 0, node->init, node->set_src, NO_INSERT);
2598 delete_variable_part (set, dv_as_value (dv),
2599 dv_from_value (node->loc), 0);
2600 }
2601
2602 if (cval)
2603 {
2604 decl_or_value cdv = dv_from_value (cval);
2605
2606 /* Keep the remaining values connected, accumulating links
2607 in the canonical value. */
2608 for (node = var->var_part[0].loc_chain; node; node = node->next)
2609 {
2610 if (node->loc == cval)
2611 continue;
2612 else if (GET_CODE (node->loc) == REG)
2613 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2614 node->set_src, NO_INSERT);
2615 else if (GET_CODE (node->loc) == MEM)
2616 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2617 node->set_src, NO_INSERT);
2618 else
2619 set_variable_part (set, node->loc, cdv, 0,
2620 node->init, node->set_src, NO_INSERT);
2621 }
2622 }
2623
2624 /* We remove this last, to make sure that the canonical value is not
2625 removed to the point of requiring reinsertion. */
2626 if (cval)
2627 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2628
2629 clobber_variable_part (set, NULL, dv, 0, NULL);
2630 }
2631
2632 /* Find the values in a given location and map the val to another
2633 value, if it is unique, or add the location as one holding the
2634 value. */
2635
2636 static void
2637 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2638 {
2639 decl_or_value dv = dv_from_value (val);
2640
2641 if (dump_file && (dump_flags & TDF_DETAILS))
2642 {
2643 if (insn)
2644 fprintf (dump_file, "%i: ", INSN_UID (insn));
2645 else
2646 fprintf (dump_file, "head: ");
2647 print_inline_rtx (dump_file, val, 0);
2648 fputs (" is at ", dump_file);
2649 print_inline_rtx (dump_file, loc, 0);
2650 fputc ('\n', dump_file);
2651 }
2652
2653 val_reset (set, dv);
2654
2655 gcc_checking_assert (!unsuitable_loc (loc));
2656
2657 if (REG_P (loc))
2658 {
2659 attrs *node, *found = NULL;
2660
2661 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2662 if (dv_is_value_p (node->dv)
2663 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2664 {
2665 found = node;
2666
2667 /* Map incoming equivalences. ??? Wouldn't it be nice if
2668 we just started sharing the location lists? Maybe a
2669 circular list ending at the value itself or some
2670 such. */
2671 set_variable_part (set, dv_as_value (node->dv),
2672 dv_from_value (val), node->offset,
2673 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2674 set_variable_part (set, val, node->dv, node->offset,
2675 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2676 }
2677
2678 /* If we didn't find any equivalence, we need to remember that
2679 this value is held in the named register. */
2680 if (found)
2681 return;
2682 }
2683 /* ??? Attempt to find and merge equivalent MEMs or other
2684 expressions too. */
2685
2686 val_bind (set, val, loc, false);
2687 }
2688
2689 /* Initialize dataflow set SET to be empty.
2690 VARS_SIZE is the initial size of hash table VARS. */
2691
2692 static void
2693 dataflow_set_init (dataflow_set *set)
2694 {
2695 init_attrs_list_set (set->regs);
2696 set->vars = shared_hash_copy (empty_shared_hash);
2697 set->stack_adjust = 0;
2698 set->traversed_vars = NULL;
2699 }
2700
2701 /* Delete the contents of dataflow set SET. */
2702
2703 static void
2704 dataflow_set_clear (dataflow_set *set)
2705 {
2706 int i;
2707
2708 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2709 attrs_list_clear (&set->regs[i]);
2710
2711 shared_hash_destroy (set->vars);
2712 set->vars = shared_hash_copy (empty_shared_hash);
2713 }
2714
2715 /* Copy the contents of dataflow set SRC to DST. */
2716
2717 static void
2718 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2719 {
2720 int i;
2721
2722 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2723 attrs_list_copy (&dst->regs[i], src->regs[i]);
2724
2725 shared_hash_destroy (dst->vars);
2726 dst->vars = shared_hash_copy (src->vars);
2727 dst->stack_adjust = src->stack_adjust;
2728 }
2729
2730 /* Information for merging lists of locations for a given offset of variable.
2731 */
2732 struct variable_union_info
2733 {
2734 /* Node of the location chain. */
2735 location_chain *lc;
2736
2737 /* The sum of positions in the input chains. */
2738 int pos;
2739
2740 /* The position in the chain of DST dataflow set. */
2741 int pos_dst;
2742 };
2743
2744 /* Buffer for location list sorting and its allocated size. */
2745 static struct variable_union_info *vui_vec;
2746 static int vui_allocated;
2747
2748 /* Compare function for qsort, order the structures by POS element. */
2749
2750 static int
2751 variable_union_info_cmp_pos (const void *n1, const void *n2)
2752 {
2753 const struct variable_union_info *const i1 =
2754 (const struct variable_union_info *) n1;
2755 const struct variable_union_info *const i2 =
2756 ( const struct variable_union_info *) n2;
2757
2758 if (i1->pos != i2->pos)
2759 return i1->pos - i2->pos;
2760
2761 return (i1->pos_dst - i2->pos_dst);
2762 }
2763
2764 /* Compute union of location parts of variable *SLOT and the same variable
2765 from hash table DATA. Compute "sorted" union of the location chains
2766 for common offsets, i.e. the locations of a variable part are sorted by
2767 a priority where the priority is the sum of the positions in the 2 chains
2768 (if a location is only in one list the position in the second list is
2769 defined to be larger than the length of the chains).
2770 When we are updating the location parts the newest location is in the
2771 beginning of the chain, so when we do the described "sorted" union
2772 we keep the newest locations in the beginning. */
2773
2774 static int
2775 variable_union (variable *src, dataflow_set *set)
2776 {
2777 variable *dst;
2778 variable **dstp;
2779 int i, j, k;
2780
2781 dstp = shared_hash_find_slot (set->vars, src->dv);
2782 if (!dstp || !*dstp)
2783 {
2784 src->refcount++;
2785
2786 dst_can_be_shared = false;
2787 if (!dstp)
2788 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2789
2790 *dstp = src;
2791
2792 /* Continue traversing the hash table. */
2793 return 1;
2794 }
2795 else
2796 dst = *dstp;
2797
2798 gcc_assert (src->n_var_parts);
2799 gcc_checking_assert (src->onepart == dst->onepart);
2800
2801 /* We can combine one-part variables very efficiently, because their
2802 entries are in canonical order. */
2803 if (src->onepart)
2804 {
2805 location_chain **nodep, *dnode, *snode;
2806
2807 gcc_assert (src->n_var_parts == 1
2808 && dst->n_var_parts == 1);
2809
2810 snode = src->var_part[0].loc_chain;
2811 gcc_assert (snode);
2812
2813 restart_onepart_unshared:
2814 nodep = &dst->var_part[0].loc_chain;
2815 dnode = *nodep;
2816 gcc_assert (dnode);
2817
2818 while (snode)
2819 {
2820 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2821
2822 if (r > 0)
2823 {
2824 location_chain *nnode;
2825
2826 if (shared_var_p (dst, set->vars))
2827 {
2828 dstp = unshare_variable (set, dstp, dst,
2829 VAR_INIT_STATUS_INITIALIZED);
2830 dst = *dstp;
2831 goto restart_onepart_unshared;
2832 }
2833
2834 *nodep = nnode = new location_chain;
2835 nnode->loc = snode->loc;
2836 nnode->init = snode->init;
2837 if (!snode->set_src || MEM_P (snode->set_src))
2838 nnode->set_src = NULL;
2839 else
2840 nnode->set_src = snode->set_src;
2841 nnode->next = dnode;
2842 dnode = nnode;
2843 }
2844 else if (r == 0)
2845 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2846
2847 if (r >= 0)
2848 snode = snode->next;
2849
2850 nodep = &dnode->next;
2851 dnode = *nodep;
2852 }
2853
2854 return 1;
2855 }
2856
2857 gcc_checking_assert (!src->onepart);
2858
2859 /* Count the number of location parts, result is K. */
2860 for (i = 0, j = 0, k = 0;
2861 i < src->n_var_parts && j < dst->n_var_parts; k++)
2862 {
2863 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2864 {
2865 i++;
2866 j++;
2867 }
2868 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2869 i++;
2870 else
2871 j++;
2872 }
2873 k += src->n_var_parts - i;
2874 k += dst->n_var_parts - j;
2875
2876 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2877 thus there are at most MAX_VAR_PARTS different offsets. */
2878 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2879
2880 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2881 {
2882 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2883 dst = *dstp;
2884 }
2885
2886 i = src->n_var_parts - 1;
2887 j = dst->n_var_parts - 1;
2888 dst->n_var_parts = k;
2889
2890 for (k--; k >= 0; k--)
2891 {
2892 location_chain *node, *node2;
2893
2894 if (i >= 0 && j >= 0
2895 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2896 {
2897 /* Compute the "sorted" union of the chains, i.e. the locations which
2898 are in both chains go first, they are sorted by the sum of
2899 positions in the chains. */
2900 int dst_l, src_l;
2901 int ii, jj, n;
2902 struct variable_union_info *vui;
2903
2904 /* If DST is shared compare the location chains.
2905 If they are different we will modify the chain in DST with
2906 high probability so make a copy of DST. */
2907 if (shared_var_p (dst, set->vars))
2908 {
2909 for (node = src->var_part[i].loc_chain,
2910 node2 = dst->var_part[j].loc_chain; node && node2;
2911 node = node->next, node2 = node2->next)
2912 {
2913 if (!((REG_P (node2->loc)
2914 && REG_P (node->loc)
2915 && REGNO (node2->loc) == REGNO (node->loc))
2916 || rtx_equal_p (node2->loc, node->loc)))
2917 {
2918 if (node2->init < node->init)
2919 node2->init = node->init;
2920 break;
2921 }
2922 }
2923 if (node || node2)
2924 {
2925 dstp = unshare_variable (set, dstp, dst,
2926 VAR_INIT_STATUS_UNKNOWN);
2927 dst = (variable *)*dstp;
2928 }
2929 }
2930
2931 src_l = 0;
2932 for (node = src->var_part[i].loc_chain; node; node = node->next)
2933 src_l++;
2934 dst_l = 0;
2935 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2936 dst_l++;
2937
2938 if (dst_l == 1)
2939 {
2940 /* The most common case, much simpler, no qsort is needed. */
2941 location_chain *dstnode = dst->var_part[j].loc_chain;
2942 dst->var_part[k].loc_chain = dstnode;
2943 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2944 node2 = dstnode;
2945 for (node = src->var_part[i].loc_chain; node; node = node->next)
2946 if (!((REG_P (dstnode->loc)
2947 && REG_P (node->loc)
2948 && REGNO (dstnode->loc) == REGNO (node->loc))
2949 || rtx_equal_p (dstnode->loc, node->loc)))
2950 {
2951 location_chain *new_node;
2952
2953 /* Copy the location from SRC. */
2954 new_node = new location_chain;
2955 new_node->loc = node->loc;
2956 new_node->init = node->init;
2957 if (!node->set_src || MEM_P (node->set_src))
2958 new_node->set_src = NULL;
2959 else
2960 new_node->set_src = node->set_src;
2961 node2->next = new_node;
2962 node2 = new_node;
2963 }
2964 node2->next = NULL;
2965 }
2966 else
2967 {
2968 if (src_l + dst_l > vui_allocated)
2969 {
2970 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2971 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2972 vui_allocated);
2973 }
2974 vui = vui_vec;
2975
2976 /* Fill in the locations from DST. */
2977 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2978 node = node->next, jj++)
2979 {
2980 vui[jj].lc = node;
2981 vui[jj].pos_dst = jj;
2982
2983 /* Pos plus value larger than a sum of 2 valid positions. */
2984 vui[jj].pos = jj + src_l + dst_l;
2985 }
2986
2987 /* Fill in the locations from SRC. */
2988 n = dst_l;
2989 for (node = src->var_part[i].loc_chain, ii = 0; node;
2990 node = node->next, ii++)
2991 {
2992 /* Find location from NODE. */
2993 for (jj = 0; jj < dst_l; jj++)
2994 {
2995 if ((REG_P (vui[jj].lc->loc)
2996 && REG_P (node->loc)
2997 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2998 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2999 {
3000 vui[jj].pos = jj + ii;
3001 break;
3002 }
3003 }
3004 if (jj >= dst_l) /* The location has not been found. */
3005 {
3006 location_chain *new_node;
3007
3008 /* Copy the location from SRC. */
3009 new_node = new location_chain;
3010 new_node->loc = node->loc;
3011 new_node->init = node->init;
3012 if (!node->set_src || MEM_P (node->set_src))
3013 new_node->set_src = NULL;
3014 else
3015 new_node->set_src = node->set_src;
3016 vui[n].lc = new_node;
3017 vui[n].pos_dst = src_l + dst_l;
3018 vui[n].pos = ii + src_l + dst_l;
3019 n++;
3020 }
3021 }
3022
3023 if (dst_l == 2)
3024 {
3025 /* Special case still very common case. For dst_l == 2
3026 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3027 vui[i].pos == i + src_l + dst_l. */
3028 if (vui[0].pos > vui[1].pos)
3029 {
3030 /* Order should be 1, 0, 2... */
3031 dst->var_part[k].loc_chain = vui[1].lc;
3032 vui[1].lc->next = vui[0].lc;
3033 if (n >= 3)
3034 {
3035 vui[0].lc->next = vui[2].lc;
3036 vui[n - 1].lc->next = NULL;
3037 }
3038 else
3039 vui[0].lc->next = NULL;
3040 ii = 3;
3041 }
3042 else
3043 {
3044 dst->var_part[k].loc_chain = vui[0].lc;
3045 if (n >= 3 && vui[2].pos < vui[1].pos)
3046 {
3047 /* Order should be 0, 2, 1, 3... */
3048 vui[0].lc->next = vui[2].lc;
3049 vui[2].lc->next = vui[1].lc;
3050 if (n >= 4)
3051 {
3052 vui[1].lc->next = vui[3].lc;
3053 vui[n - 1].lc->next = NULL;
3054 }
3055 else
3056 vui[1].lc->next = NULL;
3057 ii = 4;
3058 }
3059 else
3060 {
3061 /* Order should be 0, 1, 2... */
3062 ii = 1;
3063 vui[n - 1].lc->next = NULL;
3064 }
3065 }
3066 for (; ii < n; ii++)
3067 vui[ii - 1].lc->next = vui[ii].lc;
3068 }
3069 else
3070 {
3071 qsort (vui, n, sizeof (struct variable_union_info),
3072 variable_union_info_cmp_pos);
3073
3074 /* Reconnect the nodes in sorted order. */
3075 for (ii = 1; ii < n; ii++)
3076 vui[ii - 1].lc->next = vui[ii].lc;
3077 vui[n - 1].lc->next = NULL;
3078 dst->var_part[k].loc_chain = vui[0].lc;
3079 }
3080
3081 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3082 }
3083 i--;
3084 j--;
3085 }
3086 else if ((i >= 0 && j >= 0
3087 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3088 || i < 0)
3089 {
3090 dst->var_part[k] = dst->var_part[j];
3091 j--;
3092 }
3093 else if ((i >= 0 && j >= 0
3094 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3095 || j < 0)
3096 {
3097 location_chain **nextp;
3098
3099 /* Copy the chain from SRC. */
3100 nextp = &dst->var_part[k].loc_chain;
3101 for (node = src->var_part[i].loc_chain; node; node = node->next)
3102 {
3103 location_chain *new_lc;
3104
3105 new_lc = new location_chain;
3106 new_lc->next = NULL;
3107 new_lc->init = node->init;
3108 if (!node->set_src || MEM_P (node->set_src))
3109 new_lc->set_src = NULL;
3110 else
3111 new_lc->set_src = node->set_src;
3112 new_lc->loc = node->loc;
3113
3114 *nextp = new_lc;
3115 nextp = &new_lc->next;
3116 }
3117
3118 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3119 i--;
3120 }
3121 dst->var_part[k].cur_loc = NULL;
3122 }
3123
3124 if (flag_var_tracking_uninit)
3125 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3126 {
3127 location_chain *node, *node2;
3128 for (node = src->var_part[i].loc_chain; node; node = node->next)
3129 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3130 if (rtx_equal_p (node->loc, node2->loc))
3131 {
3132 if (node->init > node2->init)
3133 node2->init = node->init;
3134 }
3135 }
3136
3137 /* Continue traversing the hash table. */
3138 return 1;
3139 }
3140
3141 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3142
3143 static void
3144 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3145 {
3146 int i;
3147
3148 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3149 attrs_list_union (&dst->regs[i], src->regs[i]);
3150
3151 if (dst->vars == empty_shared_hash)
3152 {
3153 shared_hash_destroy (dst->vars);
3154 dst->vars = shared_hash_copy (src->vars);
3155 }
3156 else
3157 {
3158 variable_iterator_type hi;
3159 variable *var;
3160
3161 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3162 var, variable, hi)
3163 variable_union (var, dst);
3164 }
3165 }
3166
3167 /* Whether the value is currently being expanded. */
3168 #define VALUE_RECURSED_INTO(x) \
3169 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3170
3171 /* Whether no expansion was found, saving useless lookups.
3172 It must only be set when VALUE_CHANGED is clear. */
3173 #define NO_LOC_P(x) \
3174 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3175
3176 /* Whether cur_loc in the value needs to be (re)computed. */
3177 #define VALUE_CHANGED(x) \
3178 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3179 /* Whether cur_loc in the decl needs to be (re)computed. */
3180 #define DECL_CHANGED(x) TREE_VISITED (x)
3181
3182 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3183 user DECLs, this means they're in changed_variables. Values and
3184 debug exprs may be left with this flag set if no user variable
3185 requires them to be evaluated. */
3186
3187 static inline void
3188 set_dv_changed (decl_or_value dv, bool newv)
3189 {
3190 switch (dv_onepart_p (dv))
3191 {
3192 case ONEPART_VALUE:
3193 if (newv)
3194 NO_LOC_P (dv_as_value (dv)) = false;
3195 VALUE_CHANGED (dv_as_value (dv)) = newv;
3196 break;
3197
3198 case ONEPART_DEXPR:
3199 if (newv)
3200 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3201 /* Fall through. */
3202
3203 default:
3204 DECL_CHANGED (dv_as_decl (dv)) = newv;
3205 break;
3206 }
3207 }
3208
3209 /* Return true if DV needs to have its cur_loc recomputed. */
3210
3211 static inline bool
3212 dv_changed_p (decl_or_value dv)
3213 {
3214 return (dv_is_value_p (dv)
3215 ? VALUE_CHANGED (dv_as_value (dv))
3216 : DECL_CHANGED (dv_as_decl (dv)));
3217 }
3218
3219 /* Return a location list node whose loc is rtx_equal to LOC, in the
3220 location list of a one-part variable or value VAR, or in that of
3221 any values recursively mentioned in the location lists. VARS must
3222 be in star-canonical form. */
3223
3224 static location_chain *
3225 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
3226 {
3227 location_chain *node;
3228 enum rtx_code loc_code;
3229
3230 if (!var)
3231 return NULL;
3232
3233 gcc_checking_assert (var->onepart);
3234
3235 if (!var->n_var_parts)
3236 return NULL;
3237
3238 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3239
3240 loc_code = GET_CODE (loc);
3241 for (node = var->var_part[0].loc_chain; node; node = node->next)
3242 {
3243 decl_or_value dv;
3244 variable *rvar;
3245
3246 if (GET_CODE (node->loc) != loc_code)
3247 {
3248 if (GET_CODE (node->loc) != VALUE)
3249 continue;
3250 }
3251 else if (loc == node->loc)
3252 return node;
3253 else if (loc_code != VALUE)
3254 {
3255 if (rtx_equal_p (loc, node->loc))
3256 return node;
3257 continue;
3258 }
3259
3260 /* Since we're in star-canonical form, we don't need to visit
3261 non-canonical nodes: one-part variables and non-canonical
3262 values would only point back to the canonical node. */
3263 if (dv_is_value_p (var->dv)
3264 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3265 {
3266 /* Skip all subsequent VALUEs. */
3267 while (node->next && GET_CODE (node->next->loc) == VALUE)
3268 {
3269 node = node->next;
3270 gcc_checking_assert (!canon_value_cmp (node->loc,
3271 dv_as_value (var->dv)));
3272 if (loc == node->loc)
3273 return node;
3274 }
3275 continue;
3276 }
3277
3278 gcc_checking_assert (node == var->var_part[0].loc_chain);
3279 gcc_checking_assert (!node->next);
3280
3281 dv = dv_from_value (node->loc);
3282 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3283 return find_loc_in_1pdv (loc, rvar, vars);
3284 }
3285
3286 /* ??? Gotta look in cselib_val locations too. */
3287
3288 return NULL;
3289 }
3290
3291 /* Hash table iteration argument passed to variable_merge. */
3292 struct dfset_merge
3293 {
3294 /* The set in which the merge is to be inserted. */
3295 dataflow_set *dst;
3296 /* The set that we're iterating in. */
3297 dataflow_set *cur;
3298 /* The set that may contain the other dv we are to merge with. */
3299 dataflow_set *src;
3300 /* Number of onepart dvs in src. */
3301 int src_onepart_cnt;
3302 };
3303
3304 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3305 loc_cmp order, and it is maintained as such. */
3306
3307 static void
3308 insert_into_intersection (location_chain **nodep, rtx loc,
3309 enum var_init_status status)
3310 {
3311 location_chain *node;
3312 int r;
3313
3314 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3315 if ((r = loc_cmp (node->loc, loc)) == 0)
3316 {
3317 node->init = MIN (node->init, status);
3318 return;
3319 }
3320 else if (r > 0)
3321 break;
3322
3323 node = new location_chain;
3324
3325 node->loc = loc;
3326 node->set_src = NULL;
3327 node->init = status;
3328 node->next = *nodep;
3329 *nodep = node;
3330 }
3331
3332 /* Insert in DEST the intersection of the locations present in both
3333 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3334 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3335 DSM->dst. */
3336
3337 static void
3338 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
3339 location_chain *s1node, variable *s2var)
3340 {
3341 dataflow_set *s1set = dsm->cur;
3342 dataflow_set *s2set = dsm->src;
3343 location_chain *found;
3344
3345 if (s2var)
3346 {
3347 location_chain *s2node;
3348
3349 gcc_checking_assert (s2var->onepart);
3350
3351 if (s2var->n_var_parts)
3352 {
3353 s2node = s2var->var_part[0].loc_chain;
3354
3355 for (; s1node && s2node;
3356 s1node = s1node->next, s2node = s2node->next)
3357 if (s1node->loc != s2node->loc)
3358 break;
3359 else if (s1node->loc == val)
3360 continue;
3361 else
3362 insert_into_intersection (dest, s1node->loc,
3363 MIN (s1node->init, s2node->init));
3364 }
3365 }
3366
3367 for (; s1node; s1node = s1node->next)
3368 {
3369 if (s1node->loc == val)
3370 continue;
3371
3372 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3373 shared_hash_htab (s2set->vars))))
3374 {
3375 insert_into_intersection (dest, s1node->loc,
3376 MIN (s1node->init, found->init));
3377 continue;
3378 }
3379
3380 if (GET_CODE (s1node->loc) == VALUE
3381 && !VALUE_RECURSED_INTO (s1node->loc))
3382 {
3383 decl_or_value dv = dv_from_value (s1node->loc);
3384 variable *svar = shared_hash_find (s1set->vars, dv);
3385 if (svar)
3386 {
3387 if (svar->n_var_parts == 1)
3388 {
3389 VALUE_RECURSED_INTO (s1node->loc) = true;
3390 intersect_loc_chains (val, dest, dsm,
3391 svar->var_part[0].loc_chain,
3392 s2var);
3393 VALUE_RECURSED_INTO (s1node->loc) = false;
3394 }
3395 }
3396 }
3397
3398 /* ??? gotta look in cselib_val locations too. */
3399
3400 /* ??? if the location is equivalent to any location in src,
3401 searched recursively
3402
3403 add to dst the values needed to represent the equivalence
3404
3405 telling whether locations S is equivalent to another dv's
3406 location list:
3407
3408 for each location D in the list
3409
3410 if S and D satisfy rtx_equal_p, then it is present
3411
3412 else if D is a value, recurse without cycles
3413
3414 else if S and D have the same CODE and MODE
3415
3416 for each operand oS and the corresponding oD
3417
3418 if oS and oD are not equivalent, then S an D are not equivalent
3419
3420 else if they are RTX vectors
3421
3422 if any vector oS element is not equivalent to its respective oD,
3423 then S and D are not equivalent
3424
3425 */
3426
3427
3428 }
3429 }
3430
3431 /* Return -1 if X should be before Y in a location list for a 1-part
3432 variable, 1 if Y should be before X, and 0 if they're equivalent
3433 and should not appear in the list. */
3434
3435 static int
3436 loc_cmp (rtx x, rtx y)
3437 {
3438 int i, j, r;
3439 RTX_CODE code = GET_CODE (x);
3440 const char *fmt;
3441
3442 if (x == y)
3443 return 0;
3444
3445 if (REG_P (x))
3446 {
3447 if (!REG_P (y))
3448 return -1;
3449 gcc_assert (GET_MODE (x) == GET_MODE (y));
3450 if (REGNO (x) == REGNO (y))
3451 return 0;
3452 else if (REGNO (x) < REGNO (y))
3453 return -1;
3454 else
3455 return 1;
3456 }
3457
3458 if (REG_P (y))
3459 return 1;
3460
3461 if (MEM_P (x))
3462 {
3463 if (!MEM_P (y))
3464 return -1;
3465 gcc_assert (GET_MODE (x) == GET_MODE (y));
3466 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3467 }
3468
3469 if (MEM_P (y))
3470 return 1;
3471
3472 if (GET_CODE (x) == VALUE)
3473 {
3474 if (GET_CODE (y) != VALUE)
3475 return -1;
3476 /* Don't assert the modes are the same, that is true only
3477 when not recursing. (subreg:QI (value:SI 1:1) 0)
3478 and (subreg:QI (value:DI 2:2) 0) can be compared,
3479 even when the modes are different. */
3480 if (canon_value_cmp (x, y))
3481 return -1;
3482 else
3483 return 1;
3484 }
3485
3486 if (GET_CODE (y) == VALUE)
3487 return 1;
3488
3489 /* Entry value is the least preferable kind of expression. */
3490 if (GET_CODE (x) == ENTRY_VALUE)
3491 {
3492 if (GET_CODE (y) != ENTRY_VALUE)
3493 return 1;
3494 gcc_assert (GET_MODE (x) == GET_MODE (y));
3495 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3496 }
3497
3498 if (GET_CODE (y) == ENTRY_VALUE)
3499 return -1;
3500
3501 if (GET_CODE (x) == GET_CODE (y))
3502 /* Compare operands below. */;
3503 else if (GET_CODE (x) < GET_CODE (y))
3504 return -1;
3505 else
3506 return 1;
3507
3508 gcc_assert (GET_MODE (x) == GET_MODE (y));
3509
3510 if (GET_CODE (x) == DEBUG_EXPR)
3511 {
3512 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3513 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3514 return -1;
3515 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3516 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3517 return 1;
3518 }
3519
3520 fmt = GET_RTX_FORMAT (code);
3521 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3522 switch (fmt[i])
3523 {
3524 case 'w':
3525 if (XWINT (x, i) == XWINT (y, i))
3526 break;
3527 else if (XWINT (x, i) < XWINT (y, i))
3528 return -1;
3529 else
3530 return 1;
3531
3532 case 'n':
3533 case 'i':
3534 if (XINT (x, i) == XINT (y, i))
3535 break;
3536 else if (XINT (x, i) < XINT (y, i))
3537 return -1;
3538 else
3539 return 1;
3540
3541 case 'p':
3542 r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y));
3543 if (r != 0)
3544 return r;
3545 break;
3546
3547 case 'V':
3548 case 'E':
3549 /* Compare the vector length first. */
3550 if (XVECLEN (x, i) == XVECLEN (y, i))
3551 /* Compare the vectors elements. */;
3552 else if (XVECLEN (x, i) < XVECLEN (y, i))
3553 return -1;
3554 else
3555 return 1;
3556
3557 for (j = 0; j < XVECLEN (x, i); j++)
3558 if ((r = loc_cmp (XVECEXP (x, i, j),
3559 XVECEXP (y, i, j))))
3560 return r;
3561 break;
3562
3563 case 'e':
3564 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3565 return r;
3566 break;
3567
3568 case 'S':
3569 case 's':
3570 if (XSTR (x, i) == XSTR (y, i))
3571 break;
3572 if (!XSTR (x, i))
3573 return -1;
3574 if (!XSTR (y, i))
3575 return 1;
3576 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3577 break;
3578 else if (r < 0)
3579 return -1;
3580 else
3581 return 1;
3582
3583 case 'u':
3584 /* These are just backpointers, so they don't matter. */
3585 break;
3586
3587 case '0':
3588 case 't':
3589 break;
3590
3591 /* It is believed that rtx's at this level will never
3592 contain anything but integers and other rtx's,
3593 except for within LABEL_REFs and SYMBOL_REFs. */
3594 default:
3595 gcc_unreachable ();
3596 }
3597 if (CONST_WIDE_INT_P (x))
3598 {
3599 /* Compare the vector length first. */
3600 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3601 return 1;
3602 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3603 return -1;
3604
3605 /* Compare the vectors elements. */;
3606 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3607 {
3608 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3609 return -1;
3610 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3611 return 1;
3612 }
3613 }
3614
3615 return 0;
3616 }
3617
3618 /* Check the order of entries in one-part variables. */
3619
3620 int
3621 canonicalize_loc_order_check (variable **slot,
3622 dataflow_set *data ATTRIBUTE_UNUSED)
3623 {
3624 variable *var = *slot;
3625 location_chain *node, *next;
3626
3627 #ifdef ENABLE_RTL_CHECKING
3628 int i;
3629 for (i = 0; i < var->n_var_parts; i++)
3630 gcc_assert (var->var_part[0].cur_loc == NULL);
3631 gcc_assert (!var->in_changed_variables);
3632 #endif
3633
3634 if (!var->onepart)
3635 return 1;
3636
3637 gcc_assert (var->n_var_parts == 1);
3638 node = var->var_part[0].loc_chain;
3639 gcc_assert (node);
3640
3641 while ((next = node->next))
3642 {
3643 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3644 node = next;
3645 }
3646
3647 return 1;
3648 }
3649
3650 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3651 more likely to be chosen as canonical for an equivalence set.
3652 Ensure less likely values can reach more likely neighbors, making
3653 the connections bidirectional. */
3654
3655 int
3656 canonicalize_values_mark (variable **slot, dataflow_set *set)
3657 {
3658 variable *var = *slot;
3659 decl_or_value dv = var->dv;
3660 rtx val;
3661 location_chain *node;
3662
3663 if (!dv_is_value_p (dv))
3664 return 1;
3665
3666 gcc_checking_assert (var->n_var_parts == 1);
3667
3668 val = dv_as_value (dv);
3669
3670 for (node = var->var_part[0].loc_chain; node; node = node->next)
3671 if (GET_CODE (node->loc) == VALUE)
3672 {
3673 if (canon_value_cmp (node->loc, val))
3674 VALUE_RECURSED_INTO (val) = true;
3675 else
3676 {
3677 decl_or_value odv = dv_from_value (node->loc);
3678 variable **oslot;
3679 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3680
3681 set_slot_part (set, val, oslot, odv, 0,
3682 node->init, NULL_RTX);
3683
3684 VALUE_RECURSED_INTO (node->loc) = true;
3685 }
3686 }
3687
3688 return 1;
3689 }
3690
3691 /* Remove redundant entries from equivalence lists in onepart
3692 variables, canonicalizing equivalence sets into star shapes. */
3693
3694 int
3695 canonicalize_values_star (variable **slot, dataflow_set *set)
3696 {
3697 variable *var = *slot;
3698 decl_or_value dv = var->dv;
3699 location_chain *node;
3700 decl_or_value cdv;
3701 rtx val, cval;
3702 variable **cslot;
3703 bool has_value;
3704 bool has_marks;
3705
3706 if (!var->onepart)
3707 return 1;
3708
3709 gcc_checking_assert (var->n_var_parts == 1);
3710
3711 if (dv_is_value_p (dv))
3712 {
3713 cval = dv_as_value (dv);
3714 if (!VALUE_RECURSED_INTO (cval))
3715 return 1;
3716 VALUE_RECURSED_INTO (cval) = false;
3717 }
3718 else
3719 cval = NULL_RTX;
3720
3721 restart:
3722 val = cval;
3723 has_value = false;
3724 has_marks = false;
3725
3726 gcc_assert (var->n_var_parts == 1);
3727
3728 for (node = var->var_part[0].loc_chain; node; node = node->next)
3729 if (GET_CODE (node->loc) == VALUE)
3730 {
3731 has_value = true;
3732 if (VALUE_RECURSED_INTO (node->loc))
3733 has_marks = true;
3734 if (canon_value_cmp (node->loc, cval))
3735 cval = node->loc;
3736 }
3737
3738 if (!has_value)
3739 return 1;
3740
3741 if (cval == val)
3742 {
3743 if (!has_marks || dv_is_decl_p (dv))
3744 return 1;
3745
3746 /* Keep it marked so that we revisit it, either after visiting a
3747 child node, or after visiting a new parent that might be
3748 found out. */
3749 VALUE_RECURSED_INTO (val) = true;
3750
3751 for (node = var->var_part[0].loc_chain; node; node = node->next)
3752 if (GET_CODE (node->loc) == VALUE
3753 && VALUE_RECURSED_INTO (node->loc))
3754 {
3755 cval = node->loc;
3756 restart_with_cval:
3757 VALUE_RECURSED_INTO (cval) = false;
3758 dv = dv_from_value (cval);
3759 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3760 if (!slot)
3761 {
3762 gcc_assert (dv_is_decl_p (var->dv));
3763 /* The canonical value was reset and dropped.
3764 Remove it. */
3765 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3766 return 1;
3767 }
3768 var = *slot;
3769 gcc_assert (dv_is_value_p (var->dv));
3770 if (var->n_var_parts == 0)
3771 return 1;
3772 gcc_assert (var->n_var_parts == 1);
3773 goto restart;
3774 }
3775
3776 VALUE_RECURSED_INTO (val) = false;
3777
3778 return 1;
3779 }
3780
3781 /* Push values to the canonical one. */
3782 cdv = dv_from_value (cval);
3783 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3784
3785 for (node = var->var_part[0].loc_chain; node; node = node->next)
3786 if (node->loc != cval)
3787 {
3788 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3789 node->init, NULL_RTX);
3790 if (GET_CODE (node->loc) == VALUE)
3791 {
3792 decl_or_value ndv = dv_from_value (node->loc);
3793
3794 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3795 NO_INSERT);
3796
3797 if (canon_value_cmp (node->loc, val))
3798 {
3799 /* If it could have been a local minimum, it's not any more,
3800 since it's now neighbor to cval, so it may have to push
3801 to it. Conversely, if it wouldn't have prevailed over
3802 val, then whatever mark it has is fine: if it was to
3803 push, it will now push to a more canonical node, but if
3804 it wasn't, then it has already pushed any values it might
3805 have to. */
3806 VALUE_RECURSED_INTO (node->loc) = true;
3807 /* Make sure we visit node->loc by ensuring we cval is
3808 visited too. */
3809 VALUE_RECURSED_INTO (cval) = true;
3810 }
3811 else if (!VALUE_RECURSED_INTO (node->loc))
3812 /* If we have no need to "recurse" into this node, it's
3813 already "canonicalized", so drop the link to the old
3814 parent. */
3815 clobber_variable_part (set, cval, ndv, 0, NULL);
3816 }
3817 else if (GET_CODE (node->loc) == REG)
3818 {
3819 attrs *list = set->regs[REGNO (node->loc)], **listp;
3820
3821 /* Change an existing attribute referring to dv so that it
3822 refers to cdv, removing any duplicate this might
3823 introduce, and checking that no previous duplicates
3824 existed, all in a single pass. */
3825
3826 while (list)
3827 {
3828 if (list->offset == 0
3829 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3830 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3831 break;
3832
3833 list = list->next;
3834 }
3835
3836 gcc_assert (list);
3837 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3838 {
3839 list->dv = cdv;
3840 for (listp = &list->next; (list = *listp); listp = &list->next)
3841 {
3842 if (list->offset)
3843 continue;
3844
3845 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3846 {
3847 *listp = list->next;
3848 delete list;
3849 list = *listp;
3850 break;
3851 }
3852
3853 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3854 }
3855 }
3856 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3857 {
3858 for (listp = &list->next; (list = *listp); listp = &list->next)
3859 {
3860 if (list->offset)
3861 continue;
3862
3863 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3864 {
3865 *listp = list->next;
3866 delete list;
3867 list = *listp;
3868 break;
3869 }
3870
3871 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3872 }
3873 }
3874 else
3875 gcc_unreachable ();
3876
3877 if (flag_checking)
3878 while (list)
3879 {
3880 if (list->offset == 0
3881 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3882 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3883 gcc_unreachable ();
3884
3885 list = list->next;
3886 }
3887 }
3888 }
3889
3890 if (val)
3891 set_slot_part (set, val, cslot, cdv, 0,
3892 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3893
3894 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3895
3896 /* Variable may have been unshared. */
3897 var = *slot;
3898 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3899 && var->var_part[0].loc_chain->next == NULL);
3900
3901 if (VALUE_RECURSED_INTO (cval))
3902 goto restart_with_cval;
3903
3904 return 1;
3905 }
3906
3907 /* Bind one-part variables to the canonical value in an equivalence
3908 set. Not doing this causes dataflow convergence failure in rare
3909 circumstances, see PR42873. Unfortunately we can't do this
3910 efficiently as part of canonicalize_values_star, since we may not
3911 have determined or even seen the canonical value of a set when we
3912 get to a variable that references another member of the set. */
3913
3914 int
3915 canonicalize_vars_star (variable **slot, dataflow_set *set)
3916 {
3917 variable *var = *slot;
3918 decl_or_value dv = var->dv;
3919 location_chain *node;
3920 rtx cval;
3921 decl_or_value cdv;
3922 variable **cslot;
3923 variable *cvar;
3924 location_chain *cnode;
3925
3926 if (!var->onepart || var->onepart == ONEPART_VALUE)
3927 return 1;
3928
3929 gcc_assert (var->n_var_parts == 1);
3930
3931 node = var->var_part[0].loc_chain;
3932
3933 if (GET_CODE (node->loc) != VALUE)
3934 return 1;
3935
3936 gcc_assert (!node->next);
3937 cval = node->loc;
3938
3939 /* Push values to the canonical one. */
3940 cdv = dv_from_value (cval);
3941 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3942 if (!cslot)
3943 return 1;
3944 cvar = *cslot;
3945 gcc_assert (cvar->n_var_parts == 1);
3946
3947 cnode = cvar->var_part[0].loc_chain;
3948
3949 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3950 that are not “more canonical” than it. */
3951 if (GET_CODE (cnode->loc) != VALUE
3952 || !canon_value_cmp (cnode->loc, cval))
3953 return 1;
3954
3955 /* CVAL was found to be non-canonical. Change the variable to point
3956 to the canonical VALUE. */
3957 gcc_assert (!cnode->next);
3958 cval = cnode->loc;
3959
3960 slot = set_slot_part (set, cval, slot, dv, 0,
3961 node->init, node->set_src);
3962 clobber_slot_part (set, cval, slot, 0, node->set_src);
3963
3964 return 1;
3965 }
3966
3967 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3968 corresponding entry in DSM->src. Multi-part variables are combined
3969 with variable_union, whereas onepart dvs are combined with
3970 intersection. */
3971
3972 static int
3973 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
3974 {
3975 dataflow_set *dst = dsm->dst;
3976 variable **dstslot;
3977 variable *s2var, *dvar = NULL;
3978 decl_or_value dv = s1var->dv;
3979 onepart_enum onepart = s1var->onepart;
3980 rtx val;
3981 hashval_t dvhash;
3982 location_chain *node, **nodep;
3983
3984 /* If the incoming onepart variable has an empty location list, then
3985 the intersection will be just as empty. For other variables,
3986 it's always union. */
3987 gcc_checking_assert (s1var->n_var_parts
3988 && s1var->var_part[0].loc_chain);
3989
3990 if (!onepart)
3991 return variable_union (s1var, dst);
3992
3993 gcc_checking_assert (s1var->n_var_parts == 1);
3994
3995 dvhash = dv_htab_hash (dv);
3996 if (dv_is_value_p (dv))
3997 val = dv_as_value (dv);
3998 else
3999 val = NULL;
4000
4001 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4002 if (!s2var)
4003 {
4004 dst_can_be_shared = false;
4005 return 1;
4006 }
4007
4008 dsm->src_onepart_cnt--;
4009 gcc_assert (s2var->var_part[0].loc_chain
4010 && s2var->onepart == onepart
4011 && s2var->n_var_parts == 1);
4012
4013 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4014 if (dstslot)
4015 {
4016 dvar = *dstslot;
4017 gcc_assert (dvar->refcount == 1
4018 && dvar->onepart == onepart
4019 && dvar->n_var_parts == 1);
4020 nodep = &dvar->var_part[0].loc_chain;
4021 }
4022 else
4023 {
4024 nodep = &node;
4025 node = NULL;
4026 }
4027
4028 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4029 {
4030 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4031 dvhash, INSERT);
4032 *dstslot = dvar = s2var;
4033 dvar->refcount++;
4034 }
4035 else
4036 {
4037 dst_can_be_shared = false;
4038
4039 intersect_loc_chains (val, nodep, dsm,
4040 s1var->var_part[0].loc_chain, s2var);
4041
4042 if (!dstslot)
4043 {
4044 if (node)
4045 {
4046 dvar = onepart_pool_allocate (onepart);
4047 dvar->dv = dv;
4048 dvar->refcount = 1;
4049 dvar->n_var_parts = 1;
4050 dvar->onepart = onepart;
4051 dvar->in_changed_variables = false;
4052 dvar->var_part[0].loc_chain = node;
4053 dvar->var_part[0].cur_loc = NULL;
4054 if (onepart)
4055 VAR_LOC_1PAUX (dvar) = NULL;
4056 else
4057 VAR_PART_OFFSET (dvar, 0) = 0;
4058
4059 dstslot
4060 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4061 INSERT);
4062 gcc_assert (!*dstslot);
4063 *dstslot = dvar;
4064 }
4065 else
4066 return 1;
4067 }
4068 }
4069
4070 nodep = &dvar->var_part[0].loc_chain;
4071 while ((node = *nodep))
4072 {
4073 location_chain **nextp = &node->next;
4074
4075 if (GET_CODE (node->loc) == REG)
4076 {
4077 attrs *list;
4078
4079 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4080 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4081 && dv_is_value_p (list->dv))
4082 break;
4083
4084 if (!list)
4085 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4086 dv, 0, node->loc);
4087 /* If this value became canonical for another value that had
4088 this register, we want to leave it alone. */
4089 else if (dv_as_value (list->dv) != val)
4090 {
4091 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4092 dstslot, dv, 0,
4093 node->init, NULL_RTX);
4094 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4095
4096 /* Since nextp points into the removed node, we can't
4097 use it. The pointer to the next node moved to nodep.
4098 However, if the variable we're walking is unshared
4099 during our walk, we'll keep walking the location list
4100 of the previously-shared variable, in which case the
4101 node won't have been removed, and we'll want to skip
4102 it. That's why we test *nodep here. */
4103 if (*nodep != node)
4104 nextp = nodep;
4105 }
4106 }
4107 else
4108 /* Canonicalization puts registers first, so we don't have to
4109 walk it all. */
4110 break;
4111 nodep = nextp;
4112 }
4113
4114 if (dvar != *dstslot)
4115 dvar = *dstslot;
4116 nodep = &dvar->var_part[0].loc_chain;
4117
4118 if (val)
4119 {
4120 /* Mark all referenced nodes for canonicalization, and make sure
4121 we have mutual equivalence links. */
4122 VALUE_RECURSED_INTO (val) = true;
4123 for (node = *nodep; node; node = node->next)
4124 if (GET_CODE (node->loc) == VALUE)
4125 {
4126 VALUE_RECURSED_INTO (node->loc) = true;
4127 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4128 node->init, NULL, INSERT);
4129 }
4130
4131 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4132 gcc_assert (*dstslot == dvar);
4133 canonicalize_values_star (dstslot, dst);
4134 gcc_checking_assert (dstslot
4135 == shared_hash_find_slot_noinsert_1 (dst->vars,
4136 dv, dvhash));
4137 dvar = *dstslot;
4138 }
4139 else
4140 {
4141 bool has_value = false, has_other = false;
4142
4143 /* If we have one value and anything else, we're going to
4144 canonicalize this, so make sure all values have an entry in
4145 the table and are marked for canonicalization. */
4146 for (node = *nodep; node; node = node->next)
4147 {
4148 if (GET_CODE (node->loc) == VALUE)
4149 {
4150 /* If this was marked during register canonicalization,
4151 we know we have to canonicalize values. */
4152 if (has_value)
4153 has_other = true;
4154 has_value = true;
4155 if (has_other)
4156 break;
4157 }
4158 else
4159 {
4160 has_other = true;
4161 if (has_value)
4162 break;
4163 }
4164 }
4165
4166 if (has_value && has_other)
4167 {
4168 for (node = *nodep; node; node = node->next)
4169 {
4170 if (GET_CODE (node->loc) == VALUE)
4171 {
4172 decl_or_value dv = dv_from_value (node->loc);
4173 variable **slot = NULL;
4174
4175 if (shared_hash_shared (dst->vars))
4176 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4177 if (!slot)
4178 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4179 INSERT);
4180 if (!*slot)
4181 {
4182 variable *var = onepart_pool_allocate (ONEPART_VALUE);
4183 var->dv = dv;
4184 var->refcount = 1;
4185 var->n_var_parts = 1;
4186 var->onepart = ONEPART_VALUE;
4187 var->in_changed_variables = false;
4188 var->var_part[0].loc_chain = NULL;
4189 var->var_part[0].cur_loc = NULL;
4190 VAR_LOC_1PAUX (var) = NULL;
4191 *slot = var;
4192 }
4193
4194 VALUE_RECURSED_INTO (node->loc) = true;
4195 }
4196 }
4197
4198 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4199 gcc_assert (*dstslot == dvar);
4200 canonicalize_values_star (dstslot, dst);
4201 gcc_checking_assert (dstslot
4202 == shared_hash_find_slot_noinsert_1 (dst->vars,
4203 dv, dvhash));
4204 dvar = *dstslot;
4205 }
4206 }
4207
4208 if (!onepart_variable_different_p (dvar, s2var))
4209 {
4210 variable_htab_free (dvar);
4211 *dstslot = dvar = s2var;
4212 dvar->refcount++;
4213 }
4214 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4215 {
4216 variable_htab_free (dvar);
4217 *dstslot = dvar = s1var;
4218 dvar->refcount++;
4219 dst_can_be_shared = false;
4220 }
4221 else
4222 dst_can_be_shared = false;
4223
4224 return 1;
4225 }
4226
4227 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4228 multi-part variable. Unions of multi-part variables and
4229 intersections of one-part ones will be handled in
4230 variable_merge_over_cur(). */
4231
4232 static int
4233 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
4234 {
4235 dataflow_set *dst = dsm->dst;
4236 decl_or_value dv = s2var->dv;
4237
4238 if (!s2var->onepart)
4239 {
4240 variable **dstp = shared_hash_find_slot (dst->vars, dv);
4241 *dstp = s2var;
4242 s2var->refcount++;
4243 return 1;
4244 }
4245
4246 dsm->src_onepart_cnt++;
4247 return 1;
4248 }
4249
4250 /* Combine dataflow set information from SRC2 into DST, using PDST
4251 to carry over information across passes. */
4252
4253 static void
4254 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4255 {
4256 dataflow_set cur = *dst;
4257 dataflow_set *src1 = &cur;
4258 struct dfset_merge dsm;
4259 int i;
4260 size_t src1_elems, src2_elems;
4261 variable_iterator_type hi;
4262 variable *var;
4263
4264 src1_elems = shared_hash_htab (src1->vars)->elements ();
4265 src2_elems = shared_hash_htab (src2->vars)->elements ();
4266 dataflow_set_init (dst);
4267 dst->stack_adjust = cur.stack_adjust;
4268 shared_hash_destroy (dst->vars);
4269 dst->vars = new shared_hash;
4270 dst->vars->refcount = 1;
4271 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4272
4273 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4274 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4275
4276 dsm.dst = dst;
4277 dsm.src = src2;
4278 dsm.cur = src1;
4279 dsm.src_onepart_cnt = 0;
4280
4281 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4282 var, variable, hi)
4283 variable_merge_over_src (var, &dsm);
4284 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4285 var, variable, hi)
4286 variable_merge_over_cur (var, &dsm);
4287
4288 if (dsm.src_onepart_cnt)
4289 dst_can_be_shared = false;
4290
4291 dataflow_set_destroy (src1);
4292 }
4293
4294 /* Mark register equivalences. */
4295
4296 static void
4297 dataflow_set_equiv_regs (dataflow_set *set)
4298 {
4299 int i;
4300 attrs *list, **listp;
4301
4302 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4303 {
4304 rtx canon[NUM_MACHINE_MODES];
4305
4306 /* If the list is empty or one entry, no need to canonicalize
4307 anything. */
4308 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4309 continue;
4310
4311 memset (canon, 0, sizeof (canon));
4312
4313 for (list = set->regs[i]; list; list = list->next)
4314 if (list->offset == 0 && dv_is_value_p (list->dv))
4315 {
4316 rtx val = dv_as_value (list->dv);
4317 rtx *cvalp = &canon[(int)GET_MODE (val)];
4318 rtx cval = *cvalp;
4319
4320 if (canon_value_cmp (val, cval))
4321 *cvalp = val;
4322 }
4323
4324 for (list = set->regs[i]; list; list = list->next)
4325 if (list->offset == 0 && dv_onepart_p (list->dv))
4326 {
4327 rtx cval = canon[(int)GET_MODE (list->loc)];
4328
4329 if (!cval)
4330 continue;
4331
4332 if (dv_is_value_p (list->dv))
4333 {
4334 rtx val = dv_as_value (list->dv);
4335
4336 if (val == cval)
4337 continue;
4338
4339 VALUE_RECURSED_INTO (val) = true;
4340 set_variable_part (set, val, dv_from_value (cval), 0,
4341 VAR_INIT_STATUS_INITIALIZED,
4342 NULL, NO_INSERT);
4343 }
4344
4345 VALUE_RECURSED_INTO (cval) = true;
4346 set_variable_part (set, cval, list->dv, 0,
4347 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4348 }
4349
4350 for (listp = &set->regs[i]; (list = *listp);
4351 listp = list ? &list->next : listp)
4352 if (list->offset == 0 && dv_onepart_p (list->dv))
4353 {
4354 rtx cval = canon[(int)GET_MODE (list->loc)];
4355 variable **slot;
4356
4357 if (!cval)
4358 continue;
4359
4360 if (dv_is_value_p (list->dv))
4361 {
4362 rtx val = dv_as_value (list->dv);
4363 if (!VALUE_RECURSED_INTO (val))
4364 continue;
4365 }
4366
4367 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4368 canonicalize_values_star (slot, set);
4369 if (*listp != list)
4370 list = NULL;
4371 }
4372 }
4373 }
4374
4375 /* Remove any redundant values in the location list of VAR, which must
4376 be unshared and 1-part. */
4377
4378 static void
4379 remove_duplicate_values (variable *var)
4380 {
4381 location_chain *node, **nodep;
4382
4383 gcc_assert (var->onepart);
4384 gcc_assert (var->n_var_parts == 1);
4385 gcc_assert (var->refcount == 1);
4386
4387 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4388 {
4389 if (GET_CODE (node->loc) == VALUE)
4390 {
4391 if (VALUE_RECURSED_INTO (node->loc))
4392 {
4393 /* Remove duplicate value node. */
4394 *nodep = node->next;
4395 delete node;
4396 continue;
4397 }
4398 else
4399 VALUE_RECURSED_INTO (node->loc) = true;
4400 }
4401 nodep = &node->next;
4402 }
4403
4404 for (node = var->var_part[0].loc_chain; node; node = node->next)
4405 if (GET_CODE (node->loc) == VALUE)
4406 {
4407 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4408 VALUE_RECURSED_INTO (node->loc) = false;
4409 }
4410 }
4411
4412
4413 /* Hash table iteration argument passed to variable_post_merge. */
4414 struct dfset_post_merge
4415 {
4416 /* The new input set for the current block. */
4417 dataflow_set *set;
4418 /* Pointer to the permanent input set for the current block, or
4419 NULL. */
4420 dataflow_set **permp;
4421 };
4422
4423 /* Create values for incoming expressions associated with one-part
4424 variables that don't have value numbers for them. */
4425
4426 int
4427 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
4428 {
4429 dataflow_set *set = dfpm->set;
4430 variable *var = *slot;
4431 location_chain *node;
4432
4433 if (!var->onepart || !var->n_var_parts)
4434 return 1;
4435
4436 gcc_assert (var->n_var_parts == 1);
4437
4438 if (dv_is_decl_p (var->dv))
4439 {
4440 bool check_dupes = false;
4441
4442 restart:
4443 for (node = var->var_part[0].loc_chain; node; node = node->next)
4444 {
4445 if (GET_CODE (node->loc) == VALUE)
4446 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4447 else if (GET_CODE (node->loc) == REG)
4448 {
4449 attrs *att, **attp, **curp = NULL;
4450
4451 if (var->refcount != 1)
4452 {
4453 slot = unshare_variable (set, slot, var,
4454 VAR_INIT_STATUS_INITIALIZED);
4455 var = *slot;
4456 goto restart;
4457 }
4458
4459 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4460 attp = &att->next)
4461 if (att->offset == 0
4462 && GET_MODE (att->loc) == GET_MODE (node->loc))
4463 {
4464 if (dv_is_value_p (att->dv))
4465 {
4466 rtx cval = dv_as_value (att->dv);
4467 node->loc = cval;
4468 check_dupes = true;
4469 break;
4470 }
4471 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4472 curp = attp;
4473 }
4474
4475 if (!curp)
4476 {
4477 curp = attp;
4478 while (*curp)
4479 if ((*curp)->offset == 0
4480 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4481 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4482 break;
4483 else
4484 curp = &(*curp)->next;
4485 gcc_assert (*curp);
4486 }
4487
4488 if (!att)
4489 {
4490 decl_or_value cdv;
4491 rtx cval;
4492
4493 if (!*dfpm->permp)
4494 {
4495 *dfpm->permp = XNEW (dataflow_set);
4496 dataflow_set_init (*dfpm->permp);
4497 }
4498
4499 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4500 att; att = att->next)
4501 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4502 {
4503 gcc_assert (att->offset == 0
4504 && dv_is_value_p (att->dv));
4505 val_reset (set, att->dv);
4506 break;
4507 }
4508
4509 if (att)
4510 {
4511 cdv = att->dv;
4512 cval = dv_as_value (cdv);
4513 }
4514 else
4515 {
4516 /* Create a unique value to hold this register,
4517 that ought to be found and reused in
4518 subsequent rounds. */
4519 cselib_val *v;
4520 gcc_assert (!cselib_lookup (node->loc,
4521 GET_MODE (node->loc), 0,
4522 VOIDmode));
4523 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4524 VOIDmode);
4525 cselib_preserve_value (v);
4526 cselib_invalidate_rtx (node->loc);
4527 cval = v->val_rtx;
4528 cdv = dv_from_value (cval);
4529 if (dump_file)
4530 fprintf (dump_file,
4531 "Created new value %u:%u for reg %i\n",
4532 v->uid, v->hash, REGNO (node->loc));
4533 }
4534
4535 var_reg_decl_set (*dfpm->permp, node->loc,
4536 VAR_INIT_STATUS_INITIALIZED,
4537 cdv, 0, NULL, INSERT);
4538
4539 node->loc = cval;
4540 check_dupes = true;
4541 }
4542
4543 /* Remove attribute referring to the decl, which now
4544 uses the value for the register, already existing or
4545 to be added when we bring perm in. */
4546 att = *curp;
4547 *curp = att->next;
4548 delete att;
4549 }
4550 }
4551
4552 if (check_dupes)
4553 remove_duplicate_values (var);
4554 }
4555
4556 return 1;
4557 }
4558
4559 /* Reset values in the permanent set that are not associated with the
4560 chosen expression. */
4561
4562 int
4563 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
4564 {
4565 dataflow_set *set = dfpm->set;
4566 variable *pvar = *pslot, *var;
4567 location_chain *pnode;
4568 decl_or_value dv;
4569 attrs *att;
4570
4571 gcc_assert (dv_is_value_p (pvar->dv)
4572 && pvar->n_var_parts == 1);
4573 pnode = pvar->var_part[0].loc_chain;
4574 gcc_assert (pnode
4575 && !pnode->next
4576 && REG_P (pnode->loc));
4577
4578 dv = pvar->dv;
4579
4580 var = shared_hash_find (set->vars, dv);
4581 if (var)
4582 {
4583 /* Although variable_post_merge_new_vals may have made decls
4584 non-star-canonical, values that pre-existed in canonical form
4585 remain canonical, and newly-created values reference a single
4586 REG, so they are canonical as well. Since VAR has the
4587 location list for a VALUE, using find_loc_in_1pdv for it is
4588 fine, since VALUEs don't map back to DECLs. */
4589 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4590 return 1;
4591 val_reset (set, dv);
4592 }
4593
4594 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4595 if (att->offset == 0
4596 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4597 && dv_is_value_p (att->dv))
4598 break;
4599
4600 /* If there is a value associated with this register already, create
4601 an equivalence. */
4602 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4603 {
4604 rtx cval = dv_as_value (att->dv);
4605 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4606 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4607 NULL, INSERT);
4608 }
4609 else if (!att)
4610 {
4611 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4612 dv, 0, pnode->loc);
4613 variable_union (pvar, set);
4614 }
4615
4616 return 1;
4617 }
4618
4619 /* Just checking stuff and registering register attributes for
4620 now. */
4621
4622 static void
4623 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4624 {
4625 struct dfset_post_merge dfpm;
4626
4627 dfpm.set = set;
4628 dfpm.permp = permp;
4629
4630 shared_hash_htab (set->vars)
4631 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4632 if (*permp)
4633 shared_hash_htab ((*permp)->vars)
4634 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4635 shared_hash_htab (set->vars)
4636 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4637 shared_hash_htab (set->vars)
4638 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4639 }
4640
4641 /* Return a node whose loc is a MEM that refers to EXPR in the
4642 location list of a one-part variable or value VAR, or in that of
4643 any values recursively mentioned in the location lists. */
4644
4645 static location_chain *
4646 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4647 {
4648 location_chain *node;
4649 decl_or_value dv;
4650 variable *var;
4651 location_chain *where = NULL;
4652
4653 if (!val)
4654 return NULL;
4655
4656 gcc_assert (GET_CODE (val) == VALUE
4657 && !VALUE_RECURSED_INTO (val));
4658
4659 dv = dv_from_value (val);
4660 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4661
4662 if (!var)
4663 return NULL;
4664
4665 gcc_assert (var->onepart);
4666
4667 if (!var->n_var_parts)
4668 return NULL;
4669
4670 VALUE_RECURSED_INTO (val) = true;
4671
4672 for (node = var->var_part[0].loc_chain; node; node = node->next)
4673 if (MEM_P (node->loc)
4674 && MEM_EXPR (node->loc) == expr
4675 && int_mem_offset (node->loc) == 0)
4676 {
4677 where = node;
4678 break;
4679 }
4680 else if (GET_CODE (node->loc) == VALUE
4681 && !VALUE_RECURSED_INTO (node->loc)
4682 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4683 break;
4684
4685 VALUE_RECURSED_INTO (val) = false;
4686
4687 return where;
4688 }
4689
4690 /* Return TRUE if the value of MEM may vary across a call. */
4691
4692 static bool
4693 mem_dies_at_call (rtx mem)
4694 {
4695 tree expr = MEM_EXPR (mem);
4696 tree decl;
4697
4698 if (!expr)
4699 return true;
4700
4701 decl = get_base_address (expr);
4702
4703 if (!decl)
4704 return true;
4705
4706 if (!DECL_P (decl))
4707 return true;
4708
4709 return (may_be_aliased (decl)
4710 || (!TREE_READONLY (decl) && is_global_var (decl)));
4711 }
4712
4713 /* Remove all MEMs from the location list of a hash table entry for a
4714 one-part variable, except those whose MEM attributes map back to
4715 the variable itself, directly or within a VALUE. */
4716
4717 int
4718 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
4719 {
4720 variable *var = *slot;
4721
4722 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4723 {
4724 tree decl = dv_as_decl (var->dv);
4725 location_chain *loc, **locp;
4726 bool changed = false;
4727
4728 if (!var->n_var_parts)
4729 return 1;
4730
4731 gcc_assert (var->n_var_parts == 1);
4732
4733 if (shared_var_p (var, set->vars))
4734 {
4735 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4736 {
4737 /* We want to remove dying MEMs that don't refer to DECL. */
4738 if (GET_CODE (loc->loc) == MEM
4739 && (MEM_EXPR (loc->loc) != decl
4740 || int_mem_offset (loc->loc) != 0)
4741 && mem_dies_at_call (loc->loc))
4742 break;
4743 /* We want to move here MEMs that do refer to DECL. */
4744 else if (GET_CODE (loc->loc) == VALUE
4745 && find_mem_expr_in_1pdv (decl, loc->loc,
4746 shared_hash_htab (set->vars)))
4747 break;
4748 }
4749
4750 if (!loc)
4751 return 1;
4752
4753 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4754 var = *slot;
4755 gcc_assert (var->n_var_parts == 1);
4756 }
4757
4758 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4759 loc; loc = *locp)
4760 {
4761 rtx old_loc = loc->loc;
4762 if (GET_CODE (old_loc) == VALUE)
4763 {
4764 location_chain *mem_node
4765 = find_mem_expr_in_1pdv (decl, loc->loc,
4766 shared_hash_htab (set->vars));
4767
4768 /* ??? This picks up only one out of multiple MEMs that
4769 refer to the same variable. Do we ever need to be
4770 concerned about dealing with more than one, or, given
4771 that they should all map to the same variable
4772 location, their addresses will have been merged and
4773 they will be regarded as equivalent? */
4774 if (mem_node)
4775 {
4776 loc->loc = mem_node->loc;
4777 loc->set_src = mem_node->set_src;
4778 loc->init = MIN (loc->init, mem_node->init);
4779 }
4780 }
4781
4782 if (GET_CODE (loc->loc) != MEM
4783 || (MEM_EXPR (loc->loc) == decl
4784 && int_mem_offset (loc->loc) == 0)
4785 || !mem_dies_at_call (loc->loc))
4786 {
4787 if (old_loc != loc->loc && emit_notes)
4788 {
4789 if (old_loc == var->var_part[0].cur_loc)
4790 {
4791 changed = true;
4792 var->var_part[0].cur_loc = NULL;
4793 }
4794 }
4795 locp = &loc->next;
4796 continue;
4797 }
4798
4799 if (emit_notes)
4800 {
4801 if (old_loc == var->var_part[0].cur_loc)
4802 {
4803 changed = true;
4804 var->var_part[0].cur_loc = NULL;
4805 }
4806 }
4807 *locp = loc->next;
4808 delete loc;
4809 }
4810
4811 if (!var->var_part[0].loc_chain)
4812 {
4813 var->n_var_parts--;
4814 changed = true;
4815 }
4816 if (changed)
4817 variable_was_changed (var, set);
4818 }
4819
4820 return 1;
4821 }
4822
4823 /* Remove all MEMs from the location list of a hash table entry for a
4824 onepart variable. */
4825
4826 int
4827 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
4828 {
4829 variable *var = *slot;
4830
4831 if (var->onepart != NOT_ONEPART)
4832 {
4833 location_chain *loc, **locp;
4834 bool changed = false;
4835 rtx cur_loc;
4836
4837 gcc_assert (var->n_var_parts == 1);
4838
4839 if (shared_var_p (var, set->vars))
4840 {
4841 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4842 if (GET_CODE (loc->loc) == MEM
4843 && mem_dies_at_call (loc->loc))
4844 break;
4845
4846 if (!loc)
4847 return 1;
4848
4849 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4850 var = *slot;
4851 gcc_assert (var->n_var_parts == 1);
4852 }
4853
4854 if (VAR_LOC_1PAUX (var))
4855 cur_loc = VAR_LOC_FROM (var);
4856 else
4857 cur_loc = var->var_part[0].cur_loc;
4858
4859 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4860 loc; loc = *locp)
4861 {
4862 if (GET_CODE (loc->loc) != MEM
4863 || !mem_dies_at_call (loc->loc))
4864 {
4865 locp = &loc->next;
4866 continue;
4867 }
4868
4869 *locp = loc->next;
4870 /* If we have deleted the location which was last emitted
4871 we have to emit new location so add the variable to set
4872 of changed variables. */
4873 if (cur_loc == loc->loc)
4874 {
4875 changed = true;
4876 var->var_part[0].cur_loc = NULL;
4877 if (VAR_LOC_1PAUX (var))
4878 VAR_LOC_FROM (var) = NULL;
4879 }
4880 delete loc;
4881 }
4882
4883 if (!var->var_part[0].loc_chain)
4884 {
4885 var->n_var_parts--;
4886 changed = true;
4887 }
4888 if (changed)
4889 variable_was_changed (var, set);
4890 }
4891
4892 return 1;
4893 }
4894
4895 /* Remove all variable-location information about call-clobbered
4896 registers, as well as associations between MEMs and VALUEs. */
4897
4898 static void
4899 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn)
4900 {
4901 unsigned int r;
4902 hard_reg_set_iterator hrsi;
4903
4904 HARD_REG_SET callee_clobbers
4905 = insn_callee_abi (call_insn).full_reg_clobbers ();
4906
4907 EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, r, hrsi)
4908 var_regno_delete (set, r);
4909
4910 if (MAY_HAVE_DEBUG_BIND_INSNS)
4911 {
4912 set->traversed_vars = set->vars;
4913 shared_hash_htab (set->vars)
4914 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4915 set->traversed_vars = set->vars;
4916 shared_hash_htab (set->vars)
4917 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4918 set->traversed_vars = NULL;
4919 }
4920 }
4921
4922 static bool
4923 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4924 {
4925 location_chain *lc1, *lc2;
4926
4927 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4928 {
4929 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4930 {
4931 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4932 {
4933 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4934 break;
4935 }
4936 if (rtx_equal_p (lc1->loc, lc2->loc))
4937 break;
4938 }
4939 if (!lc2)
4940 return true;
4941 }
4942 return false;
4943 }
4944
4945 /* Return true if one-part variables VAR1 and VAR2 are different.
4946 They must be in canonical order. */
4947
4948 static bool
4949 onepart_variable_different_p (variable *var1, variable *var2)
4950 {
4951 location_chain *lc1, *lc2;
4952
4953 if (var1 == var2)
4954 return false;
4955
4956 gcc_assert (var1->n_var_parts == 1
4957 && var2->n_var_parts == 1);
4958
4959 lc1 = var1->var_part[0].loc_chain;
4960 lc2 = var2->var_part[0].loc_chain;
4961
4962 gcc_assert (lc1 && lc2);
4963
4964 while (lc1 && lc2)
4965 {
4966 if (loc_cmp (lc1->loc, lc2->loc))
4967 return true;
4968 lc1 = lc1->next;
4969 lc2 = lc2->next;
4970 }
4971
4972 return lc1 != lc2;
4973 }
4974
4975 /* Return true if one-part variables VAR1 and VAR2 are different.
4976 They must be in canonical order. */
4977
4978 static void
4979 dump_onepart_variable_differences (variable *var1, variable *var2)
4980 {
4981 location_chain *lc1, *lc2;
4982
4983 gcc_assert (var1 != var2);
4984 gcc_assert (dump_file);
4985 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4986 gcc_assert (var1->n_var_parts == 1
4987 && var2->n_var_parts == 1);
4988
4989 lc1 = var1->var_part[0].loc_chain;
4990 lc2 = var2->var_part[0].loc_chain;
4991
4992 gcc_assert (lc1 && lc2);
4993
4994 while (lc1 && lc2)
4995 {
4996 switch (loc_cmp (lc1->loc, lc2->loc))
4997 {
4998 case -1:
4999 fprintf (dump_file, "removed: ");
5000 print_rtl_single (dump_file, lc1->loc);
5001 lc1 = lc1->next;
5002 continue;
5003 case 0:
5004 break;
5005 case 1:
5006 fprintf (dump_file, "added: ");
5007 print_rtl_single (dump_file, lc2->loc);
5008 lc2 = lc2->next;
5009 continue;
5010 default:
5011 gcc_unreachable ();
5012 }
5013 lc1 = lc1->next;
5014 lc2 = lc2->next;
5015 }
5016
5017 while (lc1)
5018 {
5019 fprintf (dump_file, "removed: ");
5020 print_rtl_single (dump_file, lc1->loc);
5021 lc1 = lc1->next;
5022 }
5023
5024 while (lc2)
5025 {
5026 fprintf (dump_file, "added: ");
5027 print_rtl_single (dump_file, lc2->loc);
5028 lc2 = lc2->next;
5029 }
5030 }
5031
5032 /* Return true if variables VAR1 and VAR2 are different. */
5033
5034 static bool
5035 variable_different_p (variable *var1, variable *var2)
5036 {
5037 int i;
5038
5039 if (var1 == var2)
5040 return false;
5041
5042 if (var1->onepart != var2->onepart)
5043 return true;
5044
5045 if (var1->n_var_parts != var2->n_var_parts)
5046 return true;
5047
5048 if (var1->onepart && var1->n_var_parts)
5049 {
5050 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5051 && var1->n_var_parts == 1);
5052 /* One-part values have locations in a canonical order. */
5053 return onepart_variable_different_p (var1, var2);
5054 }
5055
5056 for (i = 0; i < var1->n_var_parts; i++)
5057 {
5058 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5059 return true;
5060 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5061 return true;
5062 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5063 return true;
5064 }
5065 return false;
5066 }
5067
5068 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5069
5070 static bool
5071 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5072 {
5073 variable_iterator_type hi;
5074 variable *var1;
5075 bool diffound = false;
5076 bool details = (dump_file && (dump_flags & TDF_DETAILS));
5077
5078 #define RETRUE \
5079 do \
5080 { \
5081 if (!details) \
5082 return true; \
5083 else \
5084 diffound = true; \
5085 } \
5086 while (0)
5087
5088 if (old_set->vars == new_set->vars)
5089 return false;
5090
5091 if (shared_hash_htab (old_set->vars)->elements ()
5092 != shared_hash_htab (new_set->vars)->elements ())
5093 RETRUE;
5094
5095 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5096 var1, variable, hi)
5097 {
5098 variable_table_type *htab = shared_hash_htab (new_set->vars);
5099 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5100
5101 if (!var2)
5102 {
5103 if (dump_file && (dump_flags & TDF_DETAILS))
5104 {
5105 fprintf (dump_file, "dataflow difference found: removal of:\n");
5106 dump_var (var1);
5107 }
5108 RETRUE;
5109 }
5110 else if (variable_different_p (var1, var2))
5111 {
5112 if (details)
5113 {
5114 fprintf (dump_file, "dataflow difference found: "
5115 "old and new follow:\n");
5116 dump_var (var1);
5117 if (dv_onepart_p (var1->dv))
5118 dump_onepart_variable_differences (var1, var2);
5119 dump_var (var2);
5120 }
5121 RETRUE;
5122 }
5123 }
5124
5125 /* There's no need to traverse the second hashtab unless we want to
5126 print the details. If both have the same number of elements and
5127 the second one had all entries found in the first one, then the
5128 second can't have any extra entries. */
5129 if (!details)
5130 return diffound;
5131
5132 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars),
5133 var1, variable, hi)
5134 {
5135 variable_table_type *htab = shared_hash_htab (old_set->vars);
5136 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5137 if (!var2)
5138 {
5139 if (details)
5140 {
5141 fprintf (dump_file, "dataflow difference found: addition of:\n");
5142 dump_var (var1);
5143 }
5144 RETRUE;
5145 }
5146 }
5147
5148 #undef RETRUE
5149
5150 return diffound;
5151 }
5152
5153 /* Free the contents of dataflow set SET. */
5154
5155 static void
5156 dataflow_set_destroy (dataflow_set *set)
5157 {
5158 int i;
5159
5160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5161 attrs_list_clear (&set->regs[i]);
5162
5163 shared_hash_destroy (set->vars);
5164 set->vars = NULL;
5165 }
5166
5167 /* Return true if T is a tracked parameter with non-degenerate record type. */
5168
5169 static bool
5170 tracked_record_parameter_p (tree t)
5171 {
5172 if (TREE_CODE (t) != PARM_DECL)
5173 return false;
5174
5175 if (DECL_MODE (t) == BLKmode)
5176 return false;
5177
5178 tree type = TREE_TYPE (t);
5179 if (TREE_CODE (type) != RECORD_TYPE)
5180 return false;
5181
5182 if (TYPE_FIELDS (type) == NULL_TREE
5183 || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE)
5184 return false;
5185
5186 return true;
5187 }
5188
5189 /* Shall EXPR be tracked? */
5190
5191 static bool
5192 track_expr_p (tree expr, bool need_rtl)
5193 {
5194 rtx decl_rtl;
5195 tree realdecl;
5196
5197 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5198 return DECL_RTL_SET_P (expr);
5199
5200 /* If EXPR is not a parameter or a variable do not track it. */
5201 if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL)
5202 return 0;
5203
5204 /* It also must have a name... */
5205 if (!DECL_NAME (expr) && need_rtl)
5206 return 0;
5207
5208 /* ... and a RTL assigned to it. */
5209 decl_rtl = DECL_RTL_IF_SET (expr);
5210 if (!decl_rtl && need_rtl)
5211 return 0;
5212
5213 /* If this expression is really a debug alias of some other declaration, we
5214 don't need to track this expression if the ultimate declaration is
5215 ignored. */
5216 realdecl = expr;
5217 if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl))
5218 {
5219 realdecl = DECL_DEBUG_EXPR (realdecl);
5220 if (!DECL_P (realdecl))
5221 {
5222 if (handled_component_p (realdecl)
5223 || (TREE_CODE (realdecl) == MEM_REF
5224 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5225 {
5226 HOST_WIDE_INT bitsize, bitpos;
5227 bool reverse;
5228 tree innerdecl
5229 = get_ref_base_and_extent_hwi (realdecl, &bitpos,
5230 &bitsize, &reverse);
5231 if (!innerdecl
5232 || !DECL_P (innerdecl)
5233 || DECL_IGNORED_P (innerdecl)
5234 /* Do not track declarations for parts of tracked record
5235 parameters since we want to track them as a whole. */
5236 || tracked_record_parameter_p (innerdecl)
5237 || TREE_STATIC (innerdecl)
5238 || bitsize == 0
5239 || bitpos + bitsize > 256)
5240 return 0;
5241 else
5242 realdecl = expr;
5243 }
5244 else
5245 return 0;
5246 }
5247 }
5248
5249 /* Do not track EXPR if REALDECL it should be ignored for debugging
5250 purposes. */
5251 if (DECL_IGNORED_P (realdecl))
5252 return 0;
5253
5254 /* Do not track global variables until we are able to emit correct location
5255 list for them. */
5256 if (TREE_STATIC (realdecl))
5257 return 0;
5258
5259 /* When the EXPR is a DECL for alias of some variable (see example)
5260 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5261 DECL_RTL contains SYMBOL_REF.
5262
5263 Example:
5264 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5265 char **_dl_argv;
5266 */
5267 if (decl_rtl && MEM_P (decl_rtl)
5268 && contains_symbol_ref_p (XEXP (decl_rtl, 0)))
5269 return 0;
5270
5271 /* If RTX is a memory it should not be very large (because it would be
5272 an array or struct). */
5273 if (decl_rtl && MEM_P (decl_rtl))
5274 {
5275 /* Do not track structures and arrays. */
5276 if ((GET_MODE (decl_rtl) == BLKmode
5277 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5278 && !tracked_record_parameter_p (realdecl))
5279 return 0;
5280 if (MEM_SIZE_KNOWN_P (decl_rtl)
5281 && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS))
5282 return 0;
5283 }
5284
5285 DECL_CHANGED (expr) = 0;
5286 DECL_CHANGED (realdecl) = 0;
5287 return 1;
5288 }
5289
5290 /* Determine whether a given LOC refers to the same variable part as
5291 EXPR+OFFSET. */
5292
5293 static bool
5294 same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
5295 {
5296 tree expr2;
5297 poly_int64 offset2;
5298
5299 if (! DECL_P (expr))
5300 return false;
5301
5302 if (REG_P (loc))
5303 {
5304 expr2 = REG_EXPR (loc);
5305 offset2 = REG_OFFSET (loc);
5306 }
5307 else if (MEM_P (loc))
5308 {
5309 expr2 = MEM_EXPR (loc);
5310 offset2 = int_mem_offset (loc);
5311 }
5312 else
5313 return false;
5314
5315 if (! expr2 || ! DECL_P (expr2))
5316 return false;
5317
5318 expr = var_debug_decl (expr);
5319 expr2 = var_debug_decl (expr2);
5320
5321 return (expr == expr2 && known_eq (offset, offset2));
5322 }
5323
5324 /* LOC is a REG or MEM that we would like to track if possible.
5325 If EXPR is null, we don't know what expression LOC refers to,
5326 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5327 LOC is an lvalue register.
5328
5329 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5330 is something we can track. When returning true, store the mode of
5331 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5332 from EXPR in *OFFSET_OUT (if nonnull). */
5333
5334 static bool
5335 track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
5336 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5337 {
5338 machine_mode mode;
5339
5340 if (expr == NULL || !track_expr_p (expr, true))
5341 return false;
5342
5343 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5344 whole subreg, but only the old inner part is really relevant. */
5345 mode = GET_MODE (loc);
5346 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5347 {
5348 machine_mode pseudo_mode;
5349
5350 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5351 if (paradoxical_subreg_p (mode, pseudo_mode))
5352 {
5353 offset += byte_lowpart_offset (pseudo_mode, mode);
5354 mode = pseudo_mode;
5355 }
5356 }
5357
5358 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5359 Do the same if we are storing to a register and EXPR occupies
5360 the whole of register LOC; in that case, the whole of EXPR is
5361 being changed. We exclude complex modes from the second case
5362 because the real and imaginary parts are represented as separate
5363 pseudo registers, even if the whole complex value fits into one
5364 hard register. */
5365 if ((paradoxical_subreg_p (mode, DECL_MODE (expr))
5366 || (store_reg_p
5367 && !COMPLEX_MODE_P (DECL_MODE (expr))
5368 && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
5369 && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
5370 {
5371 mode = DECL_MODE (expr);
5372 offset = 0;
5373 }
5374
5375 HOST_WIDE_INT const_offset;
5376 if (!track_offset_p (offset, &const_offset))
5377 return false;
5378
5379 if (mode_out)
5380 *mode_out = mode;
5381 if (offset_out)
5382 *offset_out = const_offset;
5383 return true;
5384 }
5385
5386 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5387 want to track. When returning nonnull, make sure that the attributes
5388 on the returned value are updated. */
5389
5390 static rtx
5391 var_lowpart (machine_mode mode, rtx loc)
5392 {
5393 unsigned int regno;
5394
5395 if (GET_MODE (loc) == mode)
5396 return loc;
5397
5398 if (!REG_P (loc) && !MEM_P (loc))
5399 return NULL;
5400
5401 poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5402
5403 if (MEM_P (loc))
5404 return adjust_address_nv (loc, mode, offset);
5405
5406 poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5407 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5408 reg_offset, mode);
5409 return gen_rtx_REG_offset (loc, mode, regno, offset);
5410 }
5411
5412 /* Carry information about uses and stores while walking rtx. */
5413
5414 struct count_use_info
5415 {
5416 /* The insn where the RTX is. */
5417 rtx_insn *insn;
5418
5419 /* The basic block where insn is. */
5420 basic_block bb;
5421
5422 /* The array of n_sets sets in the insn, as determined by cselib. */
5423 struct cselib_set *sets;
5424 int n_sets;
5425
5426 /* True if we're counting stores, false otherwise. */
5427 bool store_p;
5428 };
5429
5430 /* Find a VALUE corresponding to X. */
5431
5432 static inline cselib_val *
5433 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5434 {
5435 int i;
5436
5437 if (cui->sets)
5438 {
5439 /* This is called after uses are set up and before stores are
5440 processed by cselib, so it's safe to look up srcs, but not
5441 dsts. So we look up expressions that appear in srcs or in
5442 dest expressions, but we search the sets array for dests of
5443 stores. */
5444 if (cui->store_p)
5445 {
5446 /* Some targets represent memset and memcpy patterns
5447 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5448 (set (mem:BLK ...) (const_int ...)) or
5449 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5450 in that case, otherwise we end up with mode mismatches. */
5451 if (mode == BLKmode && MEM_P (x))
5452 return NULL;
5453 for (i = 0; i < cui->n_sets; i++)
5454 if (cui->sets[i].dest == x)
5455 return cui->sets[i].src_elt;
5456 }
5457 else
5458 return cselib_lookup (x, mode, 0, VOIDmode);
5459 }
5460
5461 return NULL;
5462 }
5463
5464 /* Replace all registers and addresses in an expression with VALUE
5465 expressions that map back to them, unless the expression is a
5466 register. If no mapping is or can be performed, returns NULL. */
5467
5468 static rtx
5469 replace_expr_with_values (rtx loc)
5470 {
5471 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5472 return NULL;
5473 else if (MEM_P (loc))
5474 {
5475 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5476 get_address_mode (loc), 0,
5477 GET_MODE (loc));
5478 if (addr)
5479 return replace_equiv_address_nv (loc, addr->val_rtx);
5480 else
5481 return NULL;
5482 }
5483 else
5484 return cselib_subst_to_values (loc, VOIDmode);
5485 }
5486
5487 /* Return true if X contains a DEBUG_EXPR. */
5488
5489 static bool
5490 rtx_debug_expr_p (const_rtx x)
5491 {
5492 subrtx_iterator::array_type array;
5493 FOR_EACH_SUBRTX (iter, array, x, ALL)
5494 if (GET_CODE (*iter) == DEBUG_EXPR)
5495 return true;
5496 return false;
5497 }
5498
5499 /* Determine what kind of micro operation to choose for a USE. Return
5500 MO_CLOBBER if no micro operation is to be generated. */
5501
5502 static enum micro_operation_type
5503 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5504 {
5505 tree expr;
5506
5507 if (cui && cui->sets)
5508 {
5509 if (GET_CODE (loc) == VAR_LOCATION)
5510 {
5511 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5512 {
5513 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5514 if (! VAR_LOC_UNKNOWN_P (ploc))
5515 {
5516 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5517 VOIDmode);
5518
5519 /* ??? flag_float_store and volatile mems are never
5520 given values, but we could in theory use them for
5521 locations. */
5522 gcc_assert (val || 1);
5523 }
5524 return MO_VAL_LOC;
5525 }
5526 else
5527 return MO_CLOBBER;
5528 }
5529
5530 if (REG_P (loc) || MEM_P (loc))
5531 {
5532 if (modep)
5533 *modep = GET_MODE (loc);
5534 if (cui->store_p)
5535 {
5536 if (REG_P (loc)
5537 || (find_use_val (loc, GET_MODE (loc), cui)
5538 && cselib_lookup (XEXP (loc, 0),
5539 get_address_mode (loc), 0,
5540 GET_MODE (loc))))
5541 return MO_VAL_SET;
5542 }
5543 else
5544 {
5545 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5546
5547 if (val && !cselib_preserved_value_p (val))
5548 return MO_VAL_USE;
5549 }
5550 }
5551 }
5552
5553 if (REG_P (loc))
5554 {
5555 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5556
5557 if (loc == cfa_base_rtx)
5558 return MO_CLOBBER;
5559 expr = REG_EXPR (loc);
5560
5561 if (!expr)
5562 return MO_USE_NO_VAR;
5563 else if (target_for_debug_bind (var_debug_decl (expr)))
5564 return MO_CLOBBER;
5565 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5566 false, modep, NULL))
5567 return MO_USE;
5568 else
5569 return MO_USE_NO_VAR;
5570 }
5571 else if (MEM_P (loc))
5572 {
5573 expr = MEM_EXPR (loc);
5574
5575 if (!expr)
5576 return MO_CLOBBER;
5577 else if (target_for_debug_bind (var_debug_decl (expr)))
5578 return MO_CLOBBER;
5579 else if (track_loc_p (loc, expr, int_mem_offset (loc),
5580 false, modep, NULL)
5581 /* Multi-part variables shouldn't refer to one-part
5582 variable names such as VALUEs (never happens) or
5583 DEBUG_EXPRs (only happens in the presence of debug
5584 insns). */
5585 && (!MAY_HAVE_DEBUG_BIND_INSNS
5586 || !rtx_debug_expr_p (XEXP (loc, 0))))
5587 return MO_USE;
5588 else
5589 return MO_CLOBBER;
5590 }
5591
5592 return MO_CLOBBER;
5593 }
5594
5595 /* Log to OUT information about micro-operation MOPT involving X in
5596 INSN of BB. */
5597
5598 static inline void
5599 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5600 enum micro_operation_type mopt, FILE *out)
5601 {
5602 fprintf (out, "bb %i op %i insn %i %s ",
5603 bb->index, VTI (bb)->mos.length (),
5604 INSN_UID (insn), micro_operation_type_name[mopt]);
5605 print_inline_rtx (out, x, 2);
5606 fputc ('\n', out);
5607 }
5608
5609 /* Tell whether the CONCAT used to holds a VALUE and its location
5610 needs value resolution, i.e., an attempt of mapping the location
5611 back to other incoming values. */
5612 #define VAL_NEEDS_RESOLUTION(x) \
5613 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5614 /* Whether the location in the CONCAT is a tracked expression, that
5615 should also be handled like a MO_USE. */
5616 #define VAL_HOLDS_TRACK_EXPR(x) \
5617 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5618 /* Whether the location in the CONCAT should be handled like a MO_COPY
5619 as well. */
5620 #define VAL_EXPR_IS_COPIED(x) \
5621 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5622 /* Whether the location in the CONCAT should be handled like a
5623 MO_CLOBBER as well. */
5624 #define VAL_EXPR_IS_CLOBBERED(x) \
5625 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5626
5627 /* All preserved VALUEs. */
5628 static vec<rtx> preserved_values;
5629
5630 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5631
5632 static void
5633 preserve_value (cselib_val *val)
5634 {
5635 cselib_preserve_value (val);
5636 preserved_values.safe_push (val->val_rtx);
5637 }
5638
5639 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5640 any rtxes not suitable for CONST use not replaced by VALUEs
5641 are discovered. */
5642
5643 static bool
5644 non_suitable_const (const_rtx x)
5645 {
5646 subrtx_iterator::array_type array;
5647 FOR_EACH_SUBRTX (iter, array, x, ALL)
5648 {
5649 const_rtx x = *iter;
5650 switch (GET_CODE (x))
5651 {
5652 case REG:
5653 case DEBUG_EXPR:
5654 case PC:
5655 case SCRATCH:
5656 case CC0:
5657 case ASM_INPUT:
5658 case ASM_OPERANDS:
5659 return true;
5660 case MEM:
5661 if (!MEM_READONLY_P (x))
5662 return true;
5663 break;
5664 default:
5665 break;
5666 }
5667 }
5668 return false;
5669 }
5670
5671 /* Add uses (register and memory references) LOC which will be tracked
5672 to VTI (bb)->mos. */
5673
5674 static void
5675 add_uses (rtx loc, struct count_use_info *cui)
5676 {
5677 machine_mode mode = VOIDmode;
5678 enum micro_operation_type type = use_type (loc, cui, &mode);
5679
5680 if (type != MO_CLOBBER)
5681 {
5682 basic_block bb = cui->bb;
5683 micro_operation mo;
5684
5685 mo.type = type;
5686 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5687 mo.insn = cui->insn;
5688
5689 if (type == MO_VAL_LOC)
5690 {
5691 rtx oloc = loc;
5692 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5693 cselib_val *val;
5694
5695 gcc_assert (cui->sets);
5696
5697 if (MEM_P (vloc)
5698 && !REG_P (XEXP (vloc, 0))
5699 && !MEM_P (XEXP (vloc, 0)))
5700 {
5701 rtx mloc = vloc;
5702 machine_mode address_mode = get_address_mode (mloc);
5703 cselib_val *val
5704 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5705 GET_MODE (mloc));
5706
5707 if (val && !cselib_preserved_value_p (val))
5708 preserve_value (val);
5709 }
5710
5711 if (CONSTANT_P (vloc)
5712 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5713 /* For constants don't look up any value. */;
5714 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5715 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5716 {
5717 machine_mode mode2;
5718 enum micro_operation_type type2;
5719 rtx nloc = NULL;
5720 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5721
5722 if (resolvable)
5723 nloc = replace_expr_with_values (vloc);
5724
5725 if (nloc)
5726 {
5727 oloc = shallow_copy_rtx (oloc);
5728 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5729 }
5730
5731 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5732
5733 type2 = use_type (vloc, 0, &mode2);
5734
5735 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5736 || type2 == MO_CLOBBER);
5737
5738 if (type2 == MO_CLOBBER
5739 && !cselib_preserved_value_p (val))
5740 {
5741 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5742 preserve_value (val);
5743 }
5744 }
5745 else if (!VAR_LOC_UNKNOWN_P (vloc))
5746 {
5747 oloc = shallow_copy_rtx (oloc);
5748 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5749 }
5750
5751 mo.u.loc = oloc;
5752 }
5753 else if (type == MO_VAL_USE)
5754 {
5755 machine_mode mode2 = VOIDmode;
5756 enum micro_operation_type type2;
5757 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5758 rtx vloc, oloc = loc, nloc;
5759
5760 gcc_assert (cui->sets);
5761
5762 if (MEM_P (oloc)
5763 && !REG_P (XEXP (oloc, 0))
5764 && !MEM_P (XEXP (oloc, 0)))
5765 {
5766 rtx mloc = oloc;
5767 machine_mode address_mode = get_address_mode (mloc);
5768 cselib_val *val
5769 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5770 GET_MODE (mloc));
5771
5772 if (val && !cselib_preserved_value_p (val))
5773 preserve_value (val);
5774 }
5775
5776 type2 = use_type (loc, 0, &mode2);
5777
5778 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5779 || type2 == MO_CLOBBER);
5780
5781 if (type2 == MO_USE)
5782 vloc = var_lowpart (mode2, loc);
5783 else
5784 vloc = oloc;
5785
5786 /* The loc of a MO_VAL_USE may have two forms:
5787
5788 (concat val src): val is at src, a value-based
5789 representation.
5790
5791 (concat (concat val use) src): same as above, with use as
5792 the MO_USE tracked value, if it differs from src.
5793
5794 */
5795
5796 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5797 nloc = replace_expr_with_values (loc);
5798 if (!nloc)
5799 nloc = oloc;
5800
5801 if (vloc != nloc)
5802 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5803 else
5804 oloc = val->val_rtx;
5805
5806 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5807
5808 if (type2 == MO_USE)
5809 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5810 if (!cselib_preserved_value_p (val))
5811 {
5812 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5813 preserve_value (val);
5814 }
5815 }
5816 else
5817 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5818
5819 if (dump_file && (dump_flags & TDF_DETAILS))
5820 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5821 VTI (bb)->mos.safe_push (mo);
5822 }
5823 }
5824
5825 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5826
5827 static void
5828 add_uses_1 (rtx *x, void *cui)
5829 {
5830 subrtx_var_iterator::array_type array;
5831 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5832 add_uses (*iter, (struct count_use_info *) cui);
5833 }
5834
5835 /* This is the value used during expansion of locations. We want it
5836 to be unbounded, so that variables expanded deep in a recursion
5837 nest are fully evaluated, so that their values are cached
5838 correctly. We avoid recursion cycles through other means, and we
5839 don't unshare RTL, so excess complexity is not a problem. */
5840 #define EXPR_DEPTH (INT_MAX)
5841 /* We use this to keep too-complex expressions from being emitted as
5842 location notes, and then to debug information. Users can trade
5843 compile time for ridiculously complex expressions, although they're
5844 seldom useful, and they may often have to be discarded as not
5845 representable anyway. */
5846 #define EXPR_USE_DEPTH (param_max_vartrack_expr_depth)
5847
5848 /* Attempt to reverse the EXPR operation in the debug info and record
5849 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5850 no longer live we can express its value as VAL - 6. */
5851
5852 static void
5853 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5854 {
5855 rtx src, arg, ret;
5856 cselib_val *v;
5857 struct elt_loc_list *l;
5858 enum rtx_code code;
5859 int count;
5860
5861 if (GET_CODE (expr) != SET)
5862 return;
5863
5864 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5865 return;
5866
5867 src = SET_SRC (expr);
5868 switch (GET_CODE (src))
5869 {
5870 case PLUS:
5871 case MINUS:
5872 case XOR:
5873 case NOT:
5874 case NEG:
5875 if (!REG_P (XEXP (src, 0)))
5876 return;
5877 break;
5878 case SIGN_EXTEND:
5879 case ZERO_EXTEND:
5880 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5881 return;
5882 break;
5883 default:
5884 return;
5885 }
5886
5887 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5888 return;
5889
5890 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5891 if (!v || !cselib_preserved_value_p (v))
5892 return;
5893
5894 /* Use canonical V to avoid creating multiple redundant expressions
5895 for different VALUES equivalent to V. */
5896 v = canonical_cselib_val (v);
5897
5898 /* Adding a reverse op isn't useful if V already has an always valid
5899 location. Ignore ENTRY_VALUE, while it is always constant, we should
5900 prefer non-ENTRY_VALUE locations whenever possible. */
5901 for (l = v->locs, count = 0; l; l = l->next, count++)
5902 if (CONSTANT_P (l->loc)
5903 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5904 return;
5905 /* Avoid creating too large locs lists. */
5906 else if (count == param_max_vartrack_reverse_op_size)
5907 return;
5908
5909 switch (GET_CODE (src))
5910 {
5911 case NOT:
5912 case NEG:
5913 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5914 return;
5915 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5916 break;
5917 case SIGN_EXTEND:
5918 case ZERO_EXTEND:
5919 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5920 break;
5921 case XOR:
5922 code = XOR;
5923 goto binary;
5924 case PLUS:
5925 code = MINUS;
5926 goto binary;
5927 case MINUS:
5928 code = PLUS;
5929 goto binary;
5930 binary:
5931 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5932 return;
5933 arg = XEXP (src, 1);
5934 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5935 {
5936 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5937 if (arg == NULL_RTX)
5938 return;
5939 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5940 return;
5941 }
5942 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5943 break;
5944 default:
5945 gcc_unreachable ();
5946 }
5947
5948 cselib_add_permanent_equiv (v, ret, insn);
5949 }
5950
5951 /* Add stores (register and memory references) LOC which will be tracked
5952 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5953 CUIP->insn is instruction which the LOC is part of. */
5954
5955 static void
5956 add_stores (rtx loc, const_rtx expr, void *cuip)
5957 {
5958 machine_mode mode = VOIDmode, mode2;
5959 struct count_use_info *cui = (struct count_use_info *)cuip;
5960 basic_block bb = cui->bb;
5961 micro_operation mo;
5962 rtx oloc = loc, nloc, src = NULL;
5963 enum micro_operation_type type = use_type (loc, cui, &mode);
5964 bool track_p = false;
5965 cselib_val *v;
5966 bool resolve, preserve;
5967
5968 if (type == MO_CLOBBER)
5969 return;
5970
5971 mode2 = mode;
5972
5973 if (REG_P (loc))
5974 {
5975 gcc_assert (loc != cfa_base_rtx);
5976 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5977 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5978 || GET_CODE (expr) == CLOBBER)
5979 {
5980 mo.type = MO_CLOBBER;
5981 mo.u.loc = loc;
5982 if (GET_CODE (expr) == SET
5983 && (SET_DEST (expr) == loc
5984 || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART
5985 && XEXP (SET_DEST (expr), 0) == loc))
5986 && !unsuitable_loc (SET_SRC (expr))
5987 && find_use_val (loc, mode, cui))
5988 {
5989 gcc_checking_assert (type == MO_VAL_SET);
5990 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5991 }
5992 }
5993 else
5994 {
5995 if (GET_CODE (expr) == SET
5996 && SET_DEST (expr) == loc
5997 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5998 src = var_lowpart (mode2, SET_SRC (expr));
5999 loc = var_lowpart (mode2, loc);
6000
6001 if (src == NULL)
6002 {
6003 mo.type = MO_SET;
6004 mo.u.loc = loc;
6005 }
6006 else
6007 {
6008 rtx xexpr = gen_rtx_SET (loc, src);
6009 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
6010 {
6011 /* If this is an instruction copying (part of) a parameter
6012 passed by invisible reference to its register location,
6013 pretend it's a SET so that the initial memory location
6014 is discarded, as the parameter register can be reused
6015 for other purposes and we do not track locations based
6016 on generic registers. */
6017 if (MEM_P (src)
6018 && REG_EXPR (loc)
6019 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6020 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6021 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6022 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
6023 != arg_pointer_rtx)
6024 mo.type = MO_SET;
6025 else
6026 mo.type = MO_COPY;
6027 }
6028 else
6029 mo.type = MO_SET;
6030 mo.u.loc = xexpr;
6031 }
6032 }
6033 mo.insn = cui->insn;
6034 }
6035 else if (MEM_P (loc)
6036 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
6037 || cui->sets))
6038 {
6039 if (MEM_P (loc) && type == MO_VAL_SET
6040 && !REG_P (XEXP (loc, 0))
6041 && !MEM_P (XEXP (loc, 0)))
6042 {
6043 rtx mloc = loc;
6044 machine_mode address_mode = get_address_mode (mloc);
6045 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6046 address_mode, 0,
6047 GET_MODE (mloc));
6048
6049 if (val && !cselib_preserved_value_p (val))
6050 preserve_value (val);
6051 }
6052
6053 if (GET_CODE (expr) == CLOBBER || !track_p)
6054 {
6055 mo.type = MO_CLOBBER;
6056 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6057 }
6058 else
6059 {
6060 if (GET_CODE (expr) == SET
6061 && SET_DEST (expr) == loc
6062 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6063 src = var_lowpart (mode2, SET_SRC (expr));
6064 loc = var_lowpart (mode2, loc);
6065
6066 if (src == NULL)
6067 {
6068 mo.type = MO_SET;
6069 mo.u.loc = loc;
6070 }
6071 else
6072 {
6073 rtx xexpr = gen_rtx_SET (loc, src);
6074 if (same_variable_part_p (SET_SRC (xexpr),
6075 MEM_EXPR (loc),
6076 int_mem_offset (loc)))
6077 mo.type = MO_COPY;
6078 else
6079 mo.type = MO_SET;
6080 mo.u.loc = xexpr;
6081 }
6082 }
6083 mo.insn = cui->insn;
6084 }
6085 else
6086 return;
6087
6088 if (type != MO_VAL_SET)
6089 goto log_and_return;
6090
6091 v = find_use_val (oloc, mode, cui);
6092
6093 if (!v)
6094 goto log_and_return;
6095
6096 resolve = preserve = !cselib_preserved_value_p (v);
6097
6098 /* We cannot track values for multiple-part variables, so we track only
6099 locations for tracked record parameters. */
6100 if (track_p
6101 && REG_P (loc)
6102 && REG_EXPR (loc)
6103 && tracked_record_parameter_p (REG_EXPR (loc)))
6104 {
6105 /* Although we don't use the value here, it could be used later by the
6106 mere virtue of its existence as the operand of the reverse operation
6107 that gave rise to it (typically extension/truncation). Make sure it
6108 is preserved as required by vt_expand_var_loc_chain. */
6109 if (preserve)
6110 preserve_value (v);
6111 goto log_and_return;
6112 }
6113
6114 if (loc == stack_pointer_rtx
6115 && maybe_ne (hard_frame_pointer_adjustment, -1)
6116 && preserve)
6117 cselib_set_value_sp_based (v);
6118
6119 nloc = replace_expr_with_values (oloc);
6120 if (nloc)
6121 oloc = nloc;
6122
6123 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6124 {
6125 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6126
6127 if (oval == v)
6128 return;
6129 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6130
6131 if (oval && !cselib_preserved_value_p (oval))
6132 {
6133 micro_operation moa;
6134
6135 preserve_value (oval);
6136
6137 moa.type = MO_VAL_USE;
6138 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6139 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6140 moa.insn = cui->insn;
6141
6142 if (dump_file && (dump_flags & TDF_DETAILS))
6143 log_op_type (moa.u.loc, cui->bb, cui->insn,
6144 moa.type, dump_file);
6145 VTI (bb)->mos.safe_push (moa);
6146 }
6147
6148 resolve = false;
6149 }
6150 else if (resolve && GET_CODE (mo.u.loc) == SET)
6151 {
6152 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6153 nloc = replace_expr_with_values (SET_SRC (expr));
6154 else
6155 nloc = NULL_RTX;
6156
6157 /* Avoid the mode mismatch between oexpr and expr. */
6158 if (!nloc && mode != mode2)
6159 {
6160 nloc = SET_SRC (expr);
6161 gcc_assert (oloc == SET_DEST (expr));
6162 }
6163
6164 if (nloc && nloc != SET_SRC (mo.u.loc))
6165 oloc = gen_rtx_SET (oloc, nloc);
6166 else
6167 {
6168 if (oloc == SET_DEST (mo.u.loc))
6169 /* No point in duplicating. */
6170 oloc = mo.u.loc;
6171 if (!REG_P (SET_SRC (mo.u.loc)))
6172 resolve = false;
6173 }
6174 }
6175 else if (!resolve)
6176 {
6177 if (GET_CODE (mo.u.loc) == SET
6178 && oloc == SET_DEST (mo.u.loc))
6179 /* No point in duplicating. */
6180 oloc = mo.u.loc;
6181 }
6182 else
6183 resolve = false;
6184
6185 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6186
6187 if (mo.u.loc != oloc)
6188 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6189
6190 /* The loc of a MO_VAL_SET may have various forms:
6191
6192 (concat val dst): dst now holds val
6193
6194 (concat val (set dst src)): dst now holds val, copied from src
6195
6196 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6197 after replacing mems and non-top-level regs with values.
6198
6199 (concat (concat val dstv) (set dst src)): dst now holds val,
6200 copied from src. dstv is a value-based representation of dst, if
6201 it differs from dst. If resolution is needed, src is a REG, and
6202 its mode is the same as that of val.
6203
6204 (concat (concat val (set dstv srcv)) (set dst src)): src
6205 copied to dst, holding val. dstv and srcv are value-based
6206 representations of dst and src, respectively.
6207
6208 */
6209
6210 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6211 reverse_op (v->val_rtx, expr, cui->insn);
6212
6213 mo.u.loc = loc;
6214
6215 if (track_p)
6216 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6217 if (preserve)
6218 {
6219 VAL_NEEDS_RESOLUTION (loc) = resolve;
6220 preserve_value (v);
6221 }
6222 if (mo.type == MO_CLOBBER)
6223 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6224 if (mo.type == MO_COPY)
6225 VAL_EXPR_IS_COPIED (loc) = 1;
6226
6227 mo.type = MO_VAL_SET;
6228
6229 log_and_return:
6230 if (dump_file && (dump_flags & TDF_DETAILS))
6231 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6232 VTI (bb)->mos.safe_push (mo);
6233 }
6234
6235 /* Arguments to the call. */
6236 static rtx call_arguments;
6237
6238 /* Compute call_arguments. */
6239
6240 static void
6241 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6242 {
6243 rtx link, x, call;
6244 rtx prev, cur, next;
6245 rtx this_arg = NULL_RTX;
6246 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6247 tree obj_type_ref = NULL_TREE;
6248 CUMULATIVE_ARGS args_so_far_v;
6249 cumulative_args_t args_so_far;
6250
6251 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6252 args_so_far = pack_cumulative_args (&args_so_far_v);
6253 call = get_call_rtx_from (insn);
6254 if (call)
6255 {
6256 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6257 {
6258 rtx symbol = XEXP (XEXP (call, 0), 0);
6259 if (SYMBOL_REF_DECL (symbol))
6260 fndecl = SYMBOL_REF_DECL (symbol);
6261 }
6262 if (fndecl == NULL_TREE)
6263 fndecl = MEM_EXPR (XEXP (call, 0));
6264 if (fndecl
6265 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6266 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6267 fndecl = NULL_TREE;
6268 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6269 type = TREE_TYPE (fndecl);
6270 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6271 {
6272 if (TREE_CODE (fndecl) == INDIRECT_REF
6273 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6274 obj_type_ref = TREE_OPERAND (fndecl, 0);
6275 fndecl = NULL_TREE;
6276 }
6277 if (type)
6278 {
6279 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6280 t = TREE_CHAIN (t))
6281 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6282 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6283 break;
6284 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6285 type = NULL;
6286 else
6287 {
6288 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6289 link = CALL_INSN_FUNCTION_USAGE (insn);
6290 #ifndef PCC_STATIC_STRUCT_RETURN
6291 if (aggregate_value_p (TREE_TYPE (type), type)
6292 && targetm.calls.struct_value_rtx (type, 0) == 0)
6293 {
6294 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6295 function_arg_info arg (struct_addr, /*named=*/true);
6296 rtx reg;
6297 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6298 nargs + 1);
6299 reg = targetm.calls.function_arg (args_so_far, arg);
6300 targetm.calls.function_arg_advance (args_so_far, arg);
6301 if (reg == NULL_RTX)
6302 {
6303 for (; link; link = XEXP (link, 1))
6304 if (GET_CODE (XEXP (link, 0)) == USE
6305 && MEM_P (XEXP (XEXP (link, 0), 0)))
6306 {
6307 link = XEXP (link, 1);
6308 break;
6309 }
6310 }
6311 }
6312 else
6313 #endif
6314 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6315 nargs);
6316 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6317 {
6318 t = TYPE_ARG_TYPES (type);
6319 function_arg_info arg (TREE_VALUE (t), /*named=*/true);
6320 this_arg = targetm.calls.function_arg (args_so_far, arg);
6321 if (this_arg && !REG_P (this_arg))
6322 this_arg = NULL_RTX;
6323 else if (this_arg == NULL_RTX)
6324 {
6325 for (; link; link = XEXP (link, 1))
6326 if (GET_CODE (XEXP (link, 0)) == USE
6327 && MEM_P (XEXP (XEXP (link, 0), 0)))
6328 {
6329 this_arg = XEXP (XEXP (link, 0), 0);
6330 break;
6331 }
6332 }
6333 }
6334 }
6335 }
6336 }
6337 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6338
6339 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6340 if (GET_CODE (XEXP (link, 0)) == USE)
6341 {
6342 rtx item = NULL_RTX;
6343 x = XEXP (XEXP (link, 0), 0);
6344 if (GET_MODE (link) == VOIDmode
6345 || GET_MODE (link) == BLKmode
6346 || (GET_MODE (link) != GET_MODE (x)
6347 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6348 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6349 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6350 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6351 /* Can't do anything for these, if the original type mode
6352 isn't known or can't be converted. */;
6353 else if (REG_P (x))
6354 {
6355 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6356 scalar_int_mode mode;
6357 if (val && cselib_preserved_value_p (val))
6358 item = val->val_rtx;
6359 else if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
6360 {
6361 opt_scalar_int_mode mode_iter;
6362 FOR_EACH_WIDER_MODE (mode_iter, mode)
6363 {
6364 mode = mode_iter.require ();
6365 if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
6366 break;
6367
6368 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6369 if (reg == NULL_RTX || !REG_P (reg))
6370 continue;
6371 val = cselib_lookup (reg, mode, 0, VOIDmode);
6372 if (val && cselib_preserved_value_p (val))
6373 {
6374 item = val->val_rtx;
6375 break;
6376 }
6377 }
6378 }
6379 }
6380 else if (MEM_P (x))
6381 {
6382 rtx mem = x;
6383 cselib_val *val;
6384
6385 if (!frame_pointer_needed)
6386 {
6387 class adjust_mem_data amd;
6388 amd.mem_mode = VOIDmode;
6389 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6390 amd.store = true;
6391 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6392 &amd);
6393 gcc_assert (amd.side_effects.is_empty ());
6394 }
6395 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6396 if (val && cselib_preserved_value_p (val))
6397 item = val->val_rtx;
6398 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6399 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6400 {
6401 /* For non-integer stack argument see also if they weren't
6402 initialized by integers. */
6403 scalar_int_mode imode;
6404 if (int_mode_for_mode (GET_MODE (mem)).exists (&imode)
6405 && imode != GET_MODE (mem))
6406 {
6407 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6408 imode, 0, VOIDmode);
6409 if (val && cselib_preserved_value_p (val))
6410 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6411 imode);
6412 }
6413 }
6414 }
6415 if (item)
6416 {
6417 rtx x2 = x;
6418 if (GET_MODE (item) != GET_MODE (link))
6419 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6420 if (GET_MODE (x2) != GET_MODE (link))
6421 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6422 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6423 call_arguments
6424 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6425 }
6426 if (t && t != void_list_node)
6427 {
6428 rtx reg;
6429 function_arg_info arg (TREE_VALUE (t), /*named=*/true);
6430 apply_pass_by_reference_rules (&args_so_far_v, arg);
6431 reg = targetm.calls.function_arg (args_so_far, arg);
6432 if (TREE_CODE (arg.type) == REFERENCE_TYPE
6433 && INTEGRAL_TYPE_P (TREE_TYPE (arg.type))
6434 && reg
6435 && REG_P (reg)
6436 && GET_MODE (reg) == arg.mode
6437 && (GET_MODE_CLASS (arg.mode) == MODE_INT
6438 || GET_MODE_CLASS (arg.mode) == MODE_PARTIAL_INT)
6439 && REG_P (x)
6440 && REGNO (x) == REGNO (reg)
6441 && GET_MODE (x) == arg.mode
6442 && item)
6443 {
6444 machine_mode indmode
6445 = TYPE_MODE (TREE_TYPE (arg.type));
6446 rtx mem = gen_rtx_MEM (indmode, x);
6447 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6448 if (val && cselib_preserved_value_p (val))
6449 {
6450 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6451 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6452 call_arguments);
6453 }
6454 else
6455 {
6456 struct elt_loc_list *l;
6457 tree initial;
6458
6459 /* Try harder, when passing address of a constant
6460 pool integer it can be easily read back. */
6461 item = XEXP (item, 1);
6462 if (GET_CODE (item) == SUBREG)
6463 item = SUBREG_REG (item);
6464 gcc_assert (GET_CODE (item) == VALUE);
6465 val = CSELIB_VAL_PTR (item);
6466 for (l = val->locs; l; l = l->next)
6467 if (GET_CODE (l->loc) == SYMBOL_REF
6468 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6469 && SYMBOL_REF_DECL (l->loc)
6470 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6471 {
6472 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6473 if (tree_fits_shwi_p (initial))
6474 {
6475 item = GEN_INT (tree_to_shwi (initial));
6476 item = gen_rtx_CONCAT (indmode, mem, item);
6477 call_arguments
6478 = gen_rtx_EXPR_LIST (VOIDmode, item,
6479 call_arguments);
6480 }
6481 break;
6482 }
6483 }
6484 }
6485 targetm.calls.function_arg_advance (args_so_far, arg);
6486 t = TREE_CHAIN (t);
6487 }
6488 }
6489
6490 /* Add debug arguments. */
6491 if (fndecl
6492 && TREE_CODE (fndecl) == FUNCTION_DECL
6493 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6494 {
6495 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6496 if (debug_args)
6497 {
6498 unsigned int ix;
6499 tree param;
6500 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6501 {
6502 rtx item;
6503 tree dtemp = (**debug_args)[ix + 1];
6504 machine_mode mode = DECL_MODE (dtemp);
6505 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6506 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6507 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6508 call_arguments);
6509 }
6510 }
6511 }
6512
6513 /* Reverse call_arguments chain. */
6514 prev = NULL_RTX;
6515 for (cur = call_arguments; cur; cur = next)
6516 {
6517 next = XEXP (cur, 1);
6518 XEXP (cur, 1) = prev;
6519 prev = cur;
6520 }
6521 call_arguments = prev;
6522
6523 x = get_call_rtx_from (insn);
6524 if (x)
6525 {
6526 x = XEXP (XEXP (x, 0), 0);
6527 if (GET_CODE (x) == SYMBOL_REF)
6528 /* Don't record anything. */;
6529 else if (CONSTANT_P (x))
6530 {
6531 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6532 pc_rtx, x);
6533 call_arguments
6534 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6535 }
6536 else
6537 {
6538 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6539 if (val && cselib_preserved_value_p (val))
6540 {
6541 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6542 call_arguments
6543 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6544 }
6545 }
6546 }
6547 if (this_arg)
6548 {
6549 machine_mode mode
6550 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6551 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6552 HOST_WIDE_INT token
6553 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6554 if (token)
6555 clobbered = plus_constant (mode, clobbered,
6556 token * GET_MODE_SIZE (mode));
6557 clobbered = gen_rtx_MEM (mode, clobbered);
6558 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6559 call_arguments
6560 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6561 }
6562 }
6563
6564 /* Callback for cselib_record_sets_hook, that records as micro
6565 operations uses and stores in an insn after cselib_record_sets has
6566 analyzed the sets in an insn, but before it modifies the stored
6567 values in the internal tables, unless cselib_record_sets doesn't
6568 call it directly (perhaps because we're not doing cselib in the
6569 first place, in which case sets and n_sets will be 0). */
6570
6571 static void
6572 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6573 {
6574 basic_block bb = BLOCK_FOR_INSN (insn);
6575 int n1, n2;
6576 struct count_use_info cui;
6577 micro_operation *mos;
6578
6579 cselib_hook_called = true;
6580
6581 cui.insn = insn;
6582 cui.bb = bb;
6583 cui.sets = sets;
6584 cui.n_sets = n_sets;
6585
6586 n1 = VTI (bb)->mos.length ();
6587 cui.store_p = false;
6588 note_uses (&PATTERN (insn), add_uses_1, &cui);
6589 n2 = VTI (bb)->mos.length () - 1;
6590 mos = VTI (bb)->mos.address ();
6591
6592 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6593 MO_VAL_LOC last. */
6594 while (n1 < n2)
6595 {
6596 while (n1 < n2 && mos[n1].type == MO_USE)
6597 n1++;
6598 while (n1 < n2 && mos[n2].type != MO_USE)
6599 n2--;
6600 if (n1 < n2)
6601 std::swap (mos[n1], mos[n2]);
6602 }
6603
6604 n2 = VTI (bb)->mos.length () - 1;
6605 while (n1 < n2)
6606 {
6607 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6608 n1++;
6609 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6610 n2--;
6611 if (n1 < n2)
6612 std::swap (mos[n1], mos[n2]);
6613 }
6614
6615 if (CALL_P (insn))
6616 {
6617 micro_operation mo;
6618
6619 mo.type = MO_CALL;
6620 mo.insn = insn;
6621 mo.u.loc = call_arguments;
6622 call_arguments = NULL_RTX;
6623
6624 if (dump_file && (dump_flags & TDF_DETAILS))
6625 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6626 VTI (bb)->mos.safe_push (mo);
6627 }
6628
6629 n1 = VTI (bb)->mos.length ();
6630 /* This will record NEXT_INSN (insn), such that we can
6631 insert notes before it without worrying about any
6632 notes that MO_USEs might emit after the insn. */
6633 cui.store_p = true;
6634 note_stores (insn, add_stores, &cui);
6635 n2 = VTI (bb)->mos.length () - 1;
6636 mos = VTI (bb)->mos.address ();
6637
6638 /* Order the MO_VAL_USEs first (note_stores does nothing
6639 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6640 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6641 while (n1 < n2)
6642 {
6643 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6644 n1++;
6645 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6646 n2--;
6647 if (n1 < n2)
6648 std::swap (mos[n1], mos[n2]);
6649 }
6650
6651 n2 = VTI (bb)->mos.length () - 1;
6652 while (n1 < n2)
6653 {
6654 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6655 n1++;
6656 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6657 n2--;
6658 if (n1 < n2)
6659 std::swap (mos[n1], mos[n2]);
6660 }
6661 }
6662
6663 static enum var_init_status
6664 find_src_status (dataflow_set *in, rtx src)
6665 {
6666 tree decl = NULL_TREE;
6667 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6668
6669 if (! flag_var_tracking_uninit)
6670 status = VAR_INIT_STATUS_INITIALIZED;
6671
6672 if (src && REG_P (src))
6673 decl = var_debug_decl (REG_EXPR (src));
6674 else if (src && MEM_P (src))
6675 decl = var_debug_decl (MEM_EXPR (src));
6676
6677 if (src && decl)
6678 status = get_init_value (in, src, dv_from_decl (decl));
6679
6680 return status;
6681 }
6682
6683 /* SRC is the source of an assignment. Use SET to try to find what
6684 was ultimately assigned to SRC. Return that value if known,
6685 otherwise return SRC itself. */
6686
6687 static rtx
6688 find_src_set_src (dataflow_set *set, rtx src)
6689 {
6690 tree decl = NULL_TREE; /* The variable being copied around. */
6691 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6692 variable *var;
6693 location_chain *nextp;
6694 int i;
6695 bool found;
6696
6697 if (src && REG_P (src))
6698 decl = var_debug_decl (REG_EXPR (src));
6699 else if (src && MEM_P (src))
6700 decl = var_debug_decl (MEM_EXPR (src));
6701
6702 if (src && decl)
6703 {
6704 decl_or_value dv = dv_from_decl (decl);
6705
6706 var = shared_hash_find (set->vars, dv);
6707 if (var)
6708 {
6709 found = false;
6710 for (i = 0; i < var->n_var_parts && !found; i++)
6711 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6712 nextp = nextp->next)
6713 if (rtx_equal_p (nextp->loc, src))
6714 {
6715 set_src = nextp->set_src;
6716 found = true;
6717 }
6718
6719 }
6720 }
6721
6722 return set_src;
6723 }
6724
6725 /* Compute the changes of variable locations in the basic block BB. */
6726
6727 static bool
6728 compute_bb_dataflow (basic_block bb)
6729 {
6730 unsigned int i;
6731 micro_operation *mo;
6732 bool changed;
6733 dataflow_set old_out;
6734 dataflow_set *in = &VTI (bb)->in;
6735 dataflow_set *out = &VTI (bb)->out;
6736
6737 dataflow_set_init (&old_out);
6738 dataflow_set_copy (&old_out, out);
6739 dataflow_set_copy (out, in);
6740
6741 if (MAY_HAVE_DEBUG_BIND_INSNS)
6742 local_get_addr_cache = new hash_map<rtx, rtx>;
6743
6744 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6745 {
6746 rtx_insn *insn = mo->insn;
6747
6748 switch (mo->type)
6749 {
6750 case MO_CALL:
6751 dataflow_set_clear_at_call (out, insn);
6752 break;
6753
6754 case MO_USE:
6755 {
6756 rtx loc = mo->u.loc;
6757
6758 if (REG_P (loc))
6759 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6760 else if (MEM_P (loc))
6761 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6762 }
6763 break;
6764
6765 case MO_VAL_LOC:
6766 {
6767 rtx loc = mo->u.loc;
6768 rtx val, vloc;
6769 tree var;
6770
6771 if (GET_CODE (loc) == CONCAT)
6772 {
6773 val = XEXP (loc, 0);
6774 vloc = XEXP (loc, 1);
6775 }
6776 else
6777 {
6778 val = NULL_RTX;
6779 vloc = loc;
6780 }
6781
6782 var = PAT_VAR_LOCATION_DECL (vloc);
6783
6784 clobber_variable_part (out, NULL_RTX,
6785 dv_from_decl (var), 0, NULL_RTX);
6786 if (val)
6787 {
6788 if (VAL_NEEDS_RESOLUTION (loc))
6789 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6790 set_variable_part (out, val, dv_from_decl (var), 0,
6791 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6792 INSERT);
6793 }
6794 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6795 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6796 dv_from_decl (var), 0,
6797 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6798 INSERT);
6799 }
6800 break;
6801
6802 case MO_VAL_USE:
6803 {
6804 rtx loc = mo->u.loc;
6805 rtx val, vloc, uloc;
6806
6807 vloc = uloc = XEXP (loc, 1);
6808 val = XEXP (loc, 0);
6809
6810 if (GET_CODE (val) == CONCAT)
6811 {
6812 uloc = XEXP (val, 1);
6813 val = XEXP (val, 0);
6814 }
6815
6816 if (VAL_NEEDS_RESOLUTION (loc))
6817 val_resolve (out, val, vloc, insn);
6818 else
6819 val_store (out, val, uloc, insn, false);
6820
6821 if (VAL_HOLDS_TRACK_EXPR (loc))
6822 {
6823 if (GET_CODE (uloc) == REG)
6824 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6825 NULL);
6826 else if (GET_CODE (uloc) == MEM)
6827 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6828 NULL);
6829 }
6830 }
6831 break;
6832
6833 case MO_VAL_SET:
6834 {
6835 rtx loc = mo->u.loc;
6836 rtx val, vloc, uloc;
6837 rtx dstv, srcv;
6838
6839 vloc = loc;
6840 uloc = XEXP (vloc, 1);
6841 val = XEXP (vloc, 0);
6842 vloc = uloc;
6843
6844 if (GET_CODE (uloc) == SET)
6845 {
6846 dstv = SET_DEST (uloc);
6847 srcv = SET_SRC (uloc);
6848 }
6849 else
6850 {
6851 dstv = uloc;
6852 srcv = NULL;
6853 }
6854
6855 if (GET_CODE (val) == CONCAT)
6856 {
6857 dstv = vloc = XEXP (val, 1);
6858 val = XEXP (val, 0);
6859 }
6860
6861 if (GET_CODE (vloc) == SET)
6862 {
6863 srcv = SET_SRC (vloc);
6864
6865 gcc_assert (val != srcv);
6866 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6867
6868 dstv = vloc = SET_DEST (vloc);
6869
6870 if (VAL_NEEDS_RESOLUTION (loc))
6871 val_resolve (out, val, srcv, insn);
6872 }
6873 else if (VAL_NEEDS_RESOLUTION (loc))
6874 {
6875 gcc_assert (GET_CODE (uloc) == SET
6876 && GET_CODE (SET_SRC (uloc)) == REG);
6877 val_resolve (out, val, SET_SRC (uloc), insn);
6878 }
6879
6880 if (VAL_HOLDS_TRACK_EXPR (loc))
6881 {
6882 if (VAL_EXPR_IS_CLOBBERED (loc))
6883 {
6884 if (REG_P (uloc))
6885 var_reg_delete (out, uloc, true);
6886 else if (MEM_P (uloc))
6887 {
6888 gcc_assert (MEM_P (dstv));
6889 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6890 var_mem_delete (out, dstv, true);
6891 }
6892 }
6893 else
6894 {
6895 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6896 rtx src = NULL, dst = uloc;
6897 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6898
6899 if (GET_CODE (uloc) == SET)
6900 {
6901 src = SET_SRC (uloc);
6902 dst = SET_DEST (uloc);
6903 }
6904
6905 if (copied_p)
6906 {
6907 if (flag_var_tracking_uninit)
6908 {
6909 status = find_src_status (in, src);
6910
6911 if (status == VAR_INIT_STATUS_UNKNOWN)
6912 status = find_src_status (out, src);
6913 }
6914
6915 src = find_src_set_src (in, src);
6916 }
6917
6918 if (REG_P (dst))
6919 var_reg_delete_and_set (out, dst, !copied_p,
6920 status, srcv);
6921 else if (MEM_P (dst))
6922 {
6923 gcc_assert (MEM_P (dstv));
6924 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6925 var_mem_delete_and_set (out, dstv, !copied_p,
6926 status, srcv);
6927 }
6928 }
6929 }
6930 else if (REG_P (uloc))
6931 var_regno_delete (out, REGNO (uloc));
6932 else if (MEM_P (uloc))
6933 {
6934 gcc_checking_assert (GET_CODE (vloc) == MEM);
6935 gcc_checking_assert (dstv == vloc);
6936 if (dstv != vloc)
6937 clobber_overlapping_mems (out, vloc);
6938 }
6939
6940 val_store (out, val, dstv, insn, true);
6941 }
6942 break;
6943
6944 case MO_SET:
6945 {
6946 rtx loc = mo->u.loc;
6947 rtx set_src = NULL;
6948
6949 if (GET_CODE (loc) == SET)
6950 {
6951 set_src = SET_SRC (loc);
6952 loc = SET_DEST (loc);
6953 }
6954
6955 if (REG_P (loc))
6956 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6957 set_src);
6958 else if (MEM_P (loc))
6959 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6960 set_src);
6961 }
6962 break;
6963
6964 case MO_COPY:
6965 {
6966 rtx loc = mo->u.loc;
6967 enum var_init_status src_status;
6968 rtx set_src = NULL;
6969
6970 if (GET_CODE (loc) == SET)
6971 {
6972 set_src = SET_SRC (loc);
6973 loc = SET_DEST (loc);
6974 }
6975
6976 if (! flag_var_tracking_uninit)
6977 src_status = VAR_INIT_STATUS_INITIALIZED;
6978 else
6979 {
6980 src_status = find_src_status (in, set_src);
6981
6982 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6983 src_status = find_src_status (out, set_src);
6984 }
6985
6986 set_src = find_src_set_src (in, set_src);
6987
6988 if (REG_P (loc))
6989 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6990 else if (MEM_P (loc))
6991 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6992 }
6993 break;
6994
6995 case MO_USE_NO_VAR:
6996 {
6997 rtx loc = mo->u.loc;
6998
6999 if (REG_P (loc))
7000 var_reg_delete (out, loc, false);
7001 else if (MEM_P (loc))
7002 var_mem_delete (out, loc, false);
7003 }
7004 break;
7005
7006 case MO_CLOBBER:
7007 {
7008 rtx loc = mo->u.loc;
7009
7010 if (REG_P (loc))
7011 var_reg_delete (out, loc, true);
7012 else if (MEM_P (loc))
7013 var_mem_delete (out, loc, true);
7014 }
7015 break;
7016
7017 case MO_ADJUST:
7018 out->stack_adjust += mo->u.adjust;
7019 break;
7020 }
7021 }
7022
7023 if (MAY_HAVE_DEBUG_BIND_INSNS)
7024 {
7025 delete local_get_addr_cache;
7026 local_get_addr_cache = NULL;
7027
7028 dataflow_set_equiv_regs (out);
7029 shared_hash_htab (out->vars)
7030 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
7031 shared_hash_htab (out->vars)
7032 ->traverse <dataflow_set *, canonicalize_values_star> (out);
7033 if (flag_checking)
7034 shared_hash_htab (out->vars)
7035 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7036 }
7037 changed = dataflow_set_different (&old_out, out);
7038 dataflow_set_destroy (&old_out);
7039 return changed;
7040 }
7041
7042 /* Find the locations of variables in the whole function. */
7043
7044 static bool
7045 vt_find_locations (void)
7046 {
7047 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7048 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7049 sbitmap in_worklist, in_pending;
7050 basic_block bb;
7051 edge e;
7052 int *bb_order;
7053 int *rc_order;
7054 int i;
7055 int htabsz = 0;
7056 int htabmax = param_max_vartrack_size;
7057 bool success = true;
7058
7059 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7060 /* Compute reverse completion order of depth first search of the CFG
7061 so that the data-flow runs faster. */
7062 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7063 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7064 pre_and_rev_post_order_compute (NULL, rc_order, false);
7065 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7066 bb_order[rc_order[i]] = i;
7067 free (rc_order);
7068
7069 auto_sbitmap visited (last_basic_block_for_fn (cfun));
7070 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7071 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7072 bitmap_clear (in_worklist);
7073
7074 FOR_EACH_BB_FN (bb, cfun)
7075 pending->insert (bb_order[bb->index], bb);
7076 bitmap_ones (in_pending);
7077
7078 while (success && !pending->empty ())
7079 {
7080 std::swap (worklist, pending);
7081 std::swap (in_worklist, in_pending);
7082
7083 bitmap_clear (visited);
7084
7085 while (!worklist->empty ())
7086 {
7087 bb = worklist->extract_min ();
7088 bitmap_clear_bit (in_worklist, bb->index);
7089 gcc_assert (!bitmap_bit_p (visited, bb->index));
7090 if (!bitmap_bit_p (visited, bb->index))
7091 {
7092 bool changed;
7093 edge_iterator ei;
7094 int oldinsz, oldoutsz;
7095
7096 bitmap_set_bit (visited, bb->index);
7097
7098 if (VTI (bb)->in.vars)
7099 {
7100 htabsz
7101 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7102 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7103 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7104 oldoutsz
7105 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7106 }
7107 else
7108 oldinsz = oldoutsz = 0;
7109
7110 if (MAY_HAVE_DEBUG_BIND_INSNS)
7111 {
7112 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7113 bool first = true, adjust = false;
7114
7115 /* Calculate the IN set as the intersection of
7116 predecessor OUT sets. */
7117
7118 dataflow_set_clear (in);
7119 dst_can_be_shared = true;
7120
7121 FOR_EACH_EDGE (e, ei, bb->preds)
7122 if (!VTI (e->src)->flooded)
7123 gcc_assert (bb_order[bb->index]
7124 <= bb_order[e->src->index]);
7125 else if (first)
7126 {
7127 dataflow_set_copy (in, &VTI (e->src)->out);
7128 first_out = &VTI (e->src)->out;
7129 first = false;
7130 }
7131 else
7132 {
7133 dataflow_set_merge (in, &VTI (e->src)->out);
7134 adjust = true;
7135 }
7136
7137 if (adjust)
7138 {
7139 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7140
7141 if (flag_checking)
7142 /* Merge and merge_adjust should keep entries in
7143 canonical order. */
7144 shared_hash_htab (in->vars)
7145 ->traverse <dataflow_set *,
7146 canonicalize_loc_order_check> (in);
7147
7148 if (dst_can_be_shared)
7149 {
7150 shared_hash_destroy (in->vars);
7151 in->vars = shared_hash_copy (first_out->vars);
7152 }
7153 }
7154
7155 VTI (bb)->flooded = true;
7156 }
7157 else
7158 {
7159 /* Calculate the IN set as union of predecessor OUT sets. */
7160 dataflow_set_clear (&VTI (bb)->in);
7161 FOR_EACH_EDGE (e, ei, bb->preds)
7162 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7163 }
7164
7165 changed = compute_bb_dataflow (bb);
7166 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7167 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7168
7169 if (htabmax && htabsz > htabmax)
7170 {
7171 if (MAY_HAVE_DEBUG_BIND_INSNS)
7172 inform (DECL_SOURCE_LOCATION (cfun->decl),
7173 "variable tracking size limit exceeded with "
7174 "%<-fvar-tracking-assignments%>, retrying without");
7175 else
7176 inform (DECL_SOURCE_LOCATION (cfun->decl),
7177 "variable tracking size limit exceeded");
7178 success = false;
7179 break;
7180 }
7181
7182 if (changed)
7183 {
7184 FOR_EACH_EDGE (e, ei, bb->succs)
7185 {
7186 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7187 continue;
7188
7189 if (bitmap_bit_p (visited, e->dest->index))
7190 {
7191 if (!bitmap_bit_p (in_pending, e->dest->index))
7192 {
7193 /* Send E->DEST to next round. */
7194 bitmap_set_bit (in_pending, e->dest->index);
7195 pending->insert (bb_order[e->dest->index],
7196 e->dest);
7197 }
7198 }
7199 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7200 {
7201 /* Add E->DEST to current round. */
7202 bitmap_set_bit (in_worklist, e->dest->index);
7203 worklist->insert (bb_order[e->dest->index],
7204 e->dest);
7205 }
7206 }
7207 }
7208
7209 if (dump_file)
7210 fprintf (dump_file,
7211 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7212 bb->index,
7213 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7214 oldinsz,
7215 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7216 oldoutsz,
7217 (int)worklist->nodes (), (int)pending->nodes (),
7218 htabsz);
7219
7220 if (dump_file && (dump_flags & TDF_DETAILS))
7221 {
7222 fprintf (dump_file, "BB %i IN:\n", bb->index);
7223 dump_dataflow_set (&VTI (bb)->in);
7224 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7225 dump_dataflow_set (&VTI (bb)->out);
7226 }
7227 }
7228 }
7229 }
7230
7231 if (success && MAY_HAVE_DEBUG_BIND_INSNS)
7232 FOR_EACH_BB_FN (bb, cfun)
7233 gcc_assert (VTI (bb)->flooded);
7234
7235 free (bb_order);
7236 delete worklist;
7237 delete pending;
7238 sbitmap_free (in_worklist);
7239 sbitmap_free (in_pending);
7240
7241 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7242 return success;
7243 }
7244
7245 /* Print the content of the LIST to dump file. */
7246
7247 static void
7248 dump_attrs_list (attrs *list)
7249 {
7250 for (; list; list = list->next)
7251 {
7252 if (dv_is_decl_p (list->dv))
7253 print_mem_expr (dump_file, dv_as_decl (list->dv));
7254 else
7255 print_rtl_single (dump_file, dv_as_value (list->dv));
7256 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7257 }
7258 fprintf (dump_file, "\n");
7259 }
7260
7261 /* Print the information about variable *SLOT to dump file. */
7262
7263 int
7264 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
7265 {
7266 variable *var = *slot;
7267
7268 dump_var (var);
7269
7270 /* Continue traversing the hash table. */
7271 return 1;
7272 }
7273
7274 /* Print the information about variable VAR to dump file. */
7275
7276 static void
7277 dump_var (variable *var)
7278 {
7279 int i;
7280 location_chain *node;
7281
7282 if (dv_is_decl_p (var->dv))
7283 {
7284 const_tree decl = dv_as_decl (var->dv);
7285
7286 if (DECL_NAME (decl))
7287 {
7288 fprintf (dump_file, " name: %s",
7289 IDENTIFIER_POINTER (DECL_NAME (decl)));
7290 if (dump_flags & TDF_UID)
7291 fprintf (dump_file, "D.%u", DECL_UID (decl));
7292 }
7293 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7294 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7295 else
7296 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7297 fprintf (dump_file, "\n");
7298 }
7299 else
7300 {
7301 fputc (' ', dump_file);
7302 print_rtl_single (dump_file, dv_as_value (var->dv));
7303 }
7304
7305 for (i = 0; i < var->n_var_parts; i++)
7306 {
7307 fprintf (dump_file, " offset %ld\n",
7308 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7309 for (node = var->var_part[i].loc_chain; node; node = node->next)
7310 {
7311 fprintf (dump_file, " ");
7312 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7313 fprintf (dump_file, "[uninit]");
7314 print_rtl_single (dump_file, node->loc);
7315 }
7316 }
7317 }
7318
7319 /* Print the information about variables from hash table VARS to dump file. */
7320
7321 static void
7322 dump_vars (variable_table_type *vars)
7323 {
7324 if (!vars->is_empty ())
7325 {
7326 fprintf (dump_file, "Variables:\n");
7327 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7328 }
7329 }
7330
7331 /* Print the dataflow set SET to dump file. */
7332
7333 static void
7334 dump_dataflow_set (dataflow_set *set)
7335 {
7336 int i;
7337
7338 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7339 set->stack_adjust);
7340 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7341 {
7342 if (set->regs[i])
7343 {
7344 fprintf (dump_file, "Reg %d:", i);
7345 dump_attrs_list (set->regs[i]);
7346 }
7347 }
7348 dump_vars (shared_hash_htab (set->vars));
7349 fprintf (dump_file, "\n");
7350 }
7351
7352 /* Print the IN and OUT sets for each basic block to dump file. */
7353
7354 static void
7355 dump_dataflow_sets (void)
7356 {
7357 basic_block bb;
7358
7359 FOR_EACH_BB_FN (bb, cfun)
7360 {
7361 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7362 fprintf (dump_file, "IN:\n");
7363 dump_dataflow_set (&VTI (bb)->in);
7364 fprintf (dump_file, "OUT:\n");
7365 dump_dataflow_set (&VTI (bb)->out);
7366 }
7367 }
7368
7369 /* Return the variable for DV in dropped_values, inserting one if
7370 requested with INSERT. */
7371
7372 static inline variable *
7373 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7374 {
7375 variable **slot;
7376 variable *empty_var;
7377 onepart_enum onepart;
7378
7379 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7380
7381 if (!slot)
7382 return NULL;
7383
7384 if (*slot)
7385 return *slot;
7386
7387 gcc_checking_assert (insert == INSERT);
7388
7389 onepart = dv_onepart_p (dv);
7390
7391 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7392
7393 empty_var = onepart_pool_allocate (onepart);
7394 empty_var->dv = dv;
7395 empty_var->refcount = 1;
7396 empty_var->n_var_parts = 0;
7397 empty_var->onepart = onepart;
7398 empty_var->in_changed_variables = false;
7399 empty_var->var_part[0].loc_chain = NULL;
7400 empty_var->var_part[0].cur_loc = NULL;
7401 VAR_LOC_1PAUX (empty_var) = NULL;
7402 set_dv_changed (dv, true);
7403
7404 *slot = empty_var;
7405
7406 return empty_var;
7407 }
7408
7409 /* Recover the one-part aux from dropped_values. */
7410
7411 static struct onepart_aux *
7412 recover_dropped_1paux (variable *var)
7413 {
7414 variable *dvar;
7415
7416 gcc_checking_assert (var->onepart);
7417
7418 if (VAR_LOC_1PAUX (var))
7419 return VAR_LOC_1PAUX (var);
7420
7421 if (var->onepart == ONEPART_VDECL)
7422 return NULL;
7423
7424 dvar = variable_from_dropped (var->dv, NO_INSERT);
7425
7426 if (!dvar)
7427 return NULL;
7428
7429 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7430 VAR_LOC_1PAUX (dvar) = NULL;
7431
7432 return VAR_LOC_1PAUX (var);
7433 }
7434
7435 /* Add variable VAR to the hash table of changed variables and
7436 if it has no locations delete it from SET's hash table. */
7437
7438 static void
7439 variable_was_changed (variable *var, dataflow_set *set)
7440 {
7441 hashval_t hash = dv_htab_hash (var->dv);
7442
7443 if (emit_notes)
7444 {
7445 variable **slot;
7446
7447 /* Remember this decl or VALUE has been added to changed_variables. */
7448 set_dv_changed (var->dv, true);
7449
7450 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7451
7452 if (*slot)
7453 {
7454 variable *old_var = *slot;
7455 gcc_assert (old_var->in_changed_variables);
7456 old_var->in_changed_variables = false;
7457 if (var != old_var && var->onepart)
7458 {
7459 /* Restore the auxiliary info from an empty variable
7460 previously created for changed_variables, so it is
7461 not lost. */
7462 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7463 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7464 VAR_LOC_1PAUX (old_var) = NULL;
7465 }
7466 variable_htab_free (*slot);
7467 }
7468
7469 if (set && var->n_var_parts == 0)
7470 {
7471 onepart_enum onepart = var->onepart;
7472 variable *empty_var = NULL;
7473 variable **dslot = NULL;
7474
7475 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7476 {
7477 dslot = dropped_values->find_slot_with_hash (var->dv,
7478 dv_htab_hash (var->dv),
7479 INSERT);
7480 empty_var = *dslot;
7481
7482 if (empty_var)
7483 {
7484 gcc_checking_assert (!empty_var->in_changed_variables);
7485 if (!VAR_LOC_1PAUX (var))
7486 {
7487 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7488 VAR_LOC_1PAUX (empty_var) = NULL;
7489 }
7490 else
7491 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7492 }
7493 }
7494
7495 if (!empty_var)
7496 {
7497 empty_var = onepart_pool_allocate (onepart);
7498 empty_var->dv = var->dv;
7499 empty_var->refcount = 1;
7500 empty_var->n_var_parts = 0;
7501 empty_var->onepart = onepart;
7502 if (dslot)
7503 {
7504 empty_var->refcount++;
7505 *dslot = empty_var;
7506 }
7507 }
7508 else
7509 empty_var->refcount++;
7510 empty_var->in_changed_variables = true;
7511 *slot = empty_var;
7512 if (onepart)
7513 {
7514 empty_var->var_part[0].loc_chain = NULL;
7515 empty_var->var_part[0].cur_loc = NULL;
7516 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7517 VAR_LOC_1PAUX (var) = NULL;
7518 }
7519 goto drop_var;
7520 }
7521 else
7522 {
7523 if (var->onepart && !VAR_LOC_1PAUX (var))
7524 recover_dropped_1paux (var);
7525 var->refcount++;
7526 var->in_changed_variables = true;
7527 *slot = var;
7528 }
7529 }
7530 else
7531 {
7532 gcc_assert (set);
7533 if (var->n_var_parts == 0)
7534 {
7535 variable **slot;
7536
7537 drop_var:
7538 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7539 if (slot)
7540 {
7541 if (shared_hash_shared (set->vars))
7542 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7543 NO_INSERT);
7544 shared_hash_htab (set->vars)->clear_slot (slot);
7545 }
7546 }
7547 }
7548 }
7549
7550 /* Look for the index in VAR->var_part corresponding to OFFSET.
7551 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7552 referenced int will be set to the index that the part has or should
7553 have, if it should be inserted. */
7554
7555 static inline int
7556 find_variable_location_part (variable *var, HOST_WIDE_INT offset,
7557 int *insertion_point)
7558 {
7559 int pos, low, high;
7560
7561 if (var->onepart)
7562 {
7563 if (offset != 0)
7564 return -1;
7565
7566 if (insertion_point)
7567 *insertion_point = 0;
7568
7569 return var->n_var_parts - 1;
7570 }
7571
7572 /* Find the location part. */
7573 low = 0;
7574 high = var->n_var_parts;
7575 while (low != high)
7576 {
7577 pos = (low + high) / 2;
7578 if (VAR_PART_OFFSET (var, pos) < offset)
7579 low = pos + 1;
7580 else
7581 high = pos;
7582 }
7583 pos = low;
7584
7585 if (insertion_point)
7586 *insertion_point = pos;
7587
7588 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7589 return pos;
7590
7591 return -1;
7592 }
7593
7594 static variable **
7595 set_slot_part (dataflow_set *set, rtx loc, variable **slot,
7596 decl_or_value dv, HOST_WIDE_INT offset,
7597 enum var_init_status initialized, rtx set_src)
7598 {
7599 int pos;
7600 location_chain *node, *next;
7601 location_chain **nextp;
7602 variable *var;
7603 onepart_enum onepart;
7604
7605 var = *slot;
7606
7607 if (var)
7608 onepart = var->onepart;
7609 else
7610 onepart = dv_onepart_p (dv);
7611
7612 gcc_checking_assert (offset == 0 || !onepart);
7613 gcc_checking_assert (loc != dv_as_opaque (dv));
7614
7615 if (! flag_var_tracking_uninit)
7616 initialized = VAR_INIT_STATUS_INITIALIZED;
7617
7618 if (!var)
7619 {
7620 /* Create new variable information. */
7621 var = onepart_pool_allocate (onepart);
7622 var->dv = dv;
7623 var->refcount = 1;
7624 var->n_var_parts = 1;
7625 var->onepart = onepart;
7626 var->in_changed_variables = false;
7627 if (var->onepart)
7628 VAR_LOC_1PAUX (var) = NULL;
7629 else
7630 VAR_PART_OFFSET (var, 0) = offset;
7631 var->var_part[0].loc_chain = NULL;
7632 var->var_part[0].cur_loc = NULL;
7633 *slot = var;
7634 pos = 0;
7635 nextp = &var->var_part[0].loc_chain;
7636 }
7637 else if (onepart)
7638 {
7639 int r = -1, c = 0;
7640
7641 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7642
7643 pos = 0;
7644
7645 if (GET_CODE (loc) == VALUE)
7646 {
7647 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7648 nextp = &node->next)
7649 if (GET_CODE (node->loc) == VALUE)
7650 {
7651 if (node->loc == loc)
7652 {
7653 r = 0;
7654 break;
7655 }
7656 if (canon_value_cmp (node->loc, loc))
7657 c++;
7658 else
7659 {
7660 r = 1;
7661 break;
7662 }
7663 }
7664 else if (REG_P (node->loc) || MEM_P (node->loc))
7665 c++;
7666 else
7667 {
7668 r = 1;
7669 break;
7670 }
7671 }
7672 else if (REG_P (loc))
7673 {
7674 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7675 nextp = &node->next)
7676 if (REG_P (node->loc))
7677 {
7678 if (REGNO (node->loc) < REGNO (loc))
7679 c++;
7680 else
7681 {
7682 if (REGNO (node->loc) == REGNO (loc))
7683 r = 0;
7684 else
7685 r = 1;
7686 break;
7687 }
7688 }
7689 else
7690 {
7691 r = 1;
7692 break;
7693 }
7694 }
7695 else if (MEM_P (loc))
7696 {
7697 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7698 nextp = &node->next)
7699 if (REG_P (node->loc))
7700 c++;
7701 else if (MEM_P (node->loc))
7702 {
7703 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7704 break;
7705 else
7706 c++;
7707 }
7708 else
7709 {
7710 r = 1;
7711 break;
7712 }
7713 }
7714 else
7715 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7716 nextp = &node->next)
7717 if ((r = loc_cmp (node->loc, loc)) >= 0)
7718 break;
7719 else
7720 c++;
7721
7722 if (r == 0)
7723 return slot;
7724
7725 if (shared_var_p (var, set->vars))
7726 {
7727 slot = unshare_variable (set, slot, var, initialized);
7728 var = *slot;
7729 for (nextp = &var->var_part[0].loc_chain; c;
7730 nextp = &(*nextp)->next)
7731 c--;
7732 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7733 }
7734 }
7735 else
7736 {
7737 int inspos = 0;
7738
7739 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7740
7741 pos = find_variable_location_part (var, offset, &inspos);
7742
7743 if (pos >= 0)
7744 {
7745 node = var->var_part[pos].loc_chain;
7746
7747 if (node
7748 && ((REG_P (node->loc) && REG_P (loc)
7749 && REGNO (node->loc) == REGNO (loc))
7750 || rtx_equal_p (node->loc, loc)))
7751 {
7752 /* LOC is in the beginning of the chain so we have nothing
7753 to do. */
7754 if (node->init < initialized)
7755 node->init = initialized;
7756 if (set_src != NULL)
7757 node->set_src = set_src;
7758
7759 return slot;
7760 }
7761 else
7762 {
7763 /* We have to make a copy of a shared variable. */
7764 if (shared_var_p (var, set->vars))
7765 {
7766 slot = unshare_variable (set, slot, var, initialized);
7767 var = *slot;
7768 }
7769 }
7770 }
7771 else
7772 {
7773 /* We have not found the location part, new one will be created. */
7774
7775 /* We have to make a copy of the shared variable. */
7776 if (shared_var_p (var, set->vars))
7777 {
7778 slot = unshare_variable (set, slot, var, initialized);
7779 var = *slot;
7780 }
7781
7782 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7783 thus there are at most MAX_VAR_PARTS different offsets. */
7784 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7785 && (!var->n_var_parts || !onepart));
7786
7787 /* We have to move the elements of array starting at index
7788 inspos to the next position. */
7789 for (pos = var->n_var_parts; pos > inspos; pos--)
7790 var->var_part[pos] = var->var_part[pos - 1];
7791
7792 var->n_var_parts++;
7793 gcc_checking_assert (!onepart);
7794 VAR_PART_OFFSET (var, pos) = offset;
7795 var->var_part[pos].loc_chain = NULL;
7796 var->var_part[pos].cur_loc = NULL;
7797 }
7798
7799 /* Delete the location from the list. */
7800 nextp = &var->var_part[pos].loc_chain;
7801 for (node = var->var_part[pos].loc_chain; node; node = next)
7802 {
7803 next = node->next;
7804 if ((REG_P (node->loc) && REG_P (loc)
7805 && REGNO (node->loc) == REGNO (loc))
7806 || rtx_equal_p (node->loc, loc))
7807 {
7808 /* Save these values, to assign to the new node, before
7809 deleting this one. */
7810 if (node->init > initialized)
7811 initialized = node->init;
7812 if (node->set_src != NULL && set_src == NULL)
7813 set_src = node->set_src;
7814 if (var->var_part[pos].cur_loc == node->loc)
7815 var->var_part[pos].cur_loc = NULL;
7816 delete node;
7817 *nextp = next;
7818 break;
7819 }
7820 else
7821 nextp = &node->next;
7822 }
7823
7824 nextp = &var->var_part[pos].loc_chain;
7825 }
7826
7827 /* Add the location to the beginning. */
7828 node = new location_chain;
7829 node->loc = loc;
7830 node->init = initialized;
7831 node->set_src = set_src;
7832 node->next = *nextp;
7833 *nextp = node;
7834
7835 /* If no location was emitted do so. */
7836 if (var->var_part[pos].cur_loc == NULL)
7837 variable_was_changed (var, set);
7838
7839 return slot;
7840 }
7841
7842 /* Set the part of variable's location in the dataflow set SET. The
7843 variable part is specified by variable's declaration in DV and
7844 offset OFFSET and the part's location by LOC. IOPT should be
7845 NO_INSERT if the variable is known to be in SET already and the
7846 variable hash table must not be resized, and INSERT otherwise. */
7847
7848 static void
7849 set_variable_part (dataflow_set *set, rtx loc,
7850 decl_or_value dv, HOST_WIDE_INT offset,
7851 enum var_init_status initialized, rtx set_src,
7852 enum insert_option iopt)
7853 {
7854 variable **slot;
7855
7856 if (iopt == NO_INSERT)
7857 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7858 else
7859 {
7860 slot = shared_hash_find_slot (set->vars, dv);
7861 if (!slot)
7862 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7863 }
7864 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7865 }
7866
7867 /* Remove all recorded register locations for the given variable part
7868 from dataflow set SET, except for those that are identical to loc.
7869 The variable part is specified by variable's declaration or value
7870 DV and offset OFFSET. */
7871
7872 static variable **
7873 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
7874 HOST_WIDE_INT offset, rtx set_src)
7875 {
7876 variable *var = *slot;
7877 int pos = find_variable_location_part (var, offset, NULL);
7878
7879 if (pos >= 0)
7880 {
7881 location_chain *node, *next;
7882
7883 /* Remove the register locations from the dataflow set. */
7884 next = var->var_part[pos].loc_chain;
7885 for (node = next; node; node = next)
7886 {
7887 next = node->next;
7888 if (node->loc != loc
7889 && (!flag_var_tracking_uninit
7890 || !set_src
7891 || MEM_P (set_src)
7892 || !rtx_equal_p (set_src, node->set_src)))
7893 {
7894 if (REG_P (node->loc))
7895 {
7896 attrs *anode, *anext;
7897 attrs **anextp;
7898
7899 /* Remove the variable part from the register's
7900 list, but preserve any other variable parts
7901 that might be regarded as live in that same
7902 register. */
7903 anextp = &set->regs[REGNO (node->loc)];
7904 for (anode = *anextp; anode; anode = anext)
7905 {
7906 anext = anode->next;
7907 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7908 && anode->offset == offset)
7909 {
7910 delete anode;
7911 *anextp = anext;
7912 }
7913 else
7914 anextp = &anode->next;
7915 }
7916 }
7917
7918 slot = delete_slot_part (set, node->loc, slot, offset);
7919 }
7920 }
7921 }
7922
7923 return slot;
7924 }
7925
7926 /* Remove all recorded register locations for the given variable part
7927 from dataflow set SET, except for those that are identical to loc.
7928 The variable part is specified by variable's declaration or value
7929 DV and offset OFFSET. */
7930
7931 static void
7932 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7933 HOST_WIDE_INT offset, rtx set_src)
7934 {
7935 variable **slot;
7936
7937 if (!dv_as_opaque (dv)
7938 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7939 return;
7940
7941 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7942 if (!slot)
7943 return;
7944
7945 clobber_slot_part (set, loc, slot, offset, set_src);
7946 }
7947
7948 /* Delete the part of variable's location from dataflow set SET. The
7949 variable part is specified by its SET->vars slot SLOT and offset
7950 OFFSET and the part's location by LOC. */
7951
7952 static variable **
7953 delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
7954 HOST_WIDE_INT offset)
7955 {
7956 variable *var = *slot;
7957 int pos = find_variable_location_part (var, offset, NULL);
7958
7959 if (pos >= 0)
7960 {
7961 location_chain *node, *next;
7962 location_chain **nextp;
7963 bool changed;
7964 rtx cur_loc;
7965
7966 if (shared_var_p (var, set->vars))
7967 {
7968 /* If the variable contains the location part we have to
7969 make a copy of the variable. */
7970 for (node = var->var_part[pos].loc_chain; node;
7971 node = node->next)
7972 {
7973 if ((REG_P (node->loc) && REG_P (loc)
7974 && REGNO (node->loc) == REGNO (loc))
7975 || rtx_equal_p (node->loc, loc))
7976 {
7977 slot = unshare_variable (set, slot, var,
7978 VAR_INIT_STATUS_UNKNOWN);
7979 var = *slot;
7980 break;
7981 }
7982 }
7983 }
7984
7985 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7986 cur_loc = VAR_LOC_FROM (var);
7987 else
7988 cur_loc = var->var_part[pos].cur_loc;
7989
7990 /* Delete the location part. */
7991 changed = false;
7992 nextp = &var->var_part[pos].loc_chain;
7993 for (node = *nextp; node; node = next)
7994 {
7995 next = node->next;
7996 if ((REG_P (node->loc) && REG_P (loc)
7997 && REGNO (node->loc) == REGNO (loc))
7998 || rtx_equal_p (node->loc, loc))
7999 {
8000 /* If we have deleted the location which was last emitted
8001 we have to emit new location so add the variable to set
8002 of changed variables. */
8003 if (cur_loc == node->loc)
8004 {
8005 changed = true;
8006 var->var_part[pos].cur_loc = NULL;
8007 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8008 VAR_LOC_FROM (var) = NULL;
8009 }
8010 delete node;
8011 *nextp = next;
8012 break;
8013 }
8014 else
8015 nextp = &node->next;
8016 }
8017
8018 if (var->var_part[pos].loc_chain == NULL)
8019 {
8020 changed = true;
8021 var->n_var_parts--;
8022 while (pos < var->n_var_parts)
8023 {
8024 var->var_part[pos] = var->var_part[pos + 1];
8025 pos++;
8026 }
8027 }
8028 if (changed)
8029 variable_was_changed (var, set);
8030 }
8031
8032 return slot;
8033 }
8034
8035 /* Delete the part of variable's location from dataflow set SET. The
8036 variable part is specified by variable's declaration or value DV
8037 and offset OFFSET and the part's location by LOC. */
8038
8039 static void
8040 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8041 HOST_WIDE_INT offset)
8042 {
8043 variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8044 if (!slot)
8045 return;
8046
8047 delete_slot_part (set, loc, slot, offset);
8048 }
8049
8050
8051 /* Structure for passing some other parameters to function
8052 vt_expand_loc_callback. */
8053 class expand_loc_callback_data
8054 {
8055 public:
8056 /* The variables and values active at this point. */
8057 variable_table_type *vars;
8058
8059 /* Stack of values and debug_exprs under expansion, and their
8060 children. */
8061 auto_vec<rtx, 4> expanding;
8062
8063 /* Stack of values and debug_exprs whose expansion hit recursion
8064 cycles. They will have VALUE_RECURSED_INTO marked when added to
8065 this list. This flag will be cleared if any of its dependencies
8066 resolves to a valid location. So, if the flag remains set at the
8067 end of the search, we know no valid location for this one can
8068 possibly exist. */
8069 auto_vec<rtx, 4> pending;
8070
8071 /* The maximum depth among the sub-expressions under expansion.
8072 Zero indicates no expansion so far. */
8073 expand_depth depth;
8074 };
8075
8076 /* Allocate the one-part auxiliary data structure for VAR, with enough
8077 room for COUNT dependencies. */
8078
8079 static void
8080 loc_exp_dep_alloc (variable *var, int count)
8081 {
8082 size_t allocsize;
8083
8084 gcc_checking_assert (var->onepart);
8085
8086 /* We can be called with COUNT == 0 to allocate the data structure
8087 without any dependencies, e.g. for the backlinks only. However,
8088 if we are specifying a COUNT, then the dependency list must have
8089 been emptied before. It would be possible to adjust pointers or
8090 force it empty here, but this is better done at an earlier point
8091 in the algorithm, so we instead leave an assertion to catch
8092 errors. */
8093 gcc_checking_assert (!count
8094 || VAR_LOC_DEP_VEC (var) == NULL
8095 || VAR_LOC_DEP_VEC (var)->is_empty ());
8096
8097 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8098 return;
8099
8100 allocsize = offsetof (struct onepart_aux, deps)
8101 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8102
8103 if (VAR_LOC_1PAUX (var))
8104 {
8105 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8106 VAR_LOC_1PAUX (var), allocsize);
8107 /* If the reallocation moves the onepaux structure, the
8108 back-pointer to BACKLINKS in the first list member will still
8109 point to its old location. Adjust it. */
8110 if (VAR_LOC_DEP_LST (var))
8111 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8112 }
8113 else
8114 {
8115 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8116 *VAR_LOC_DEP_LSTP (var) = NULL;
8117 VAR_LOC_FROM (var) = NULL;
8118 VAR_LOC_DEPTH (var).complexity = 0;
8119 VAR_LOC_DEPTH (var).entryvals = 0;
8120 }
8121 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8122 }
8123
8124 /* Remove all entries from the vector of active dependencies of VAR,
8125 removing them from the back-links lists too. */
8126
8127 static void
8128 loc_exp_dep_clear (variable *var)
8129 {
8130 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8131 {
8132 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8133 if (led->next)
8134 led->next->pprev = led->pprev;
8135 if (led->pprev)
8136 *led->pprev = led->next;
8137 VAR_LOC_DEP_VEC (var)->pop ();
8138 }
8139 }
8140
8141 /* Insert an active dependency from VAR on X to the vector of
8142 dependencies, and add the corresponding back-link to X's list of
8143 back-links in VARS. */
8144
8145 static void
8146 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
8147 {
8148 decl_or_value dv;
8149 variable *xvar;
8150 loc_exp_dep *led;
8151
8152 dv = dv_from_rtx (x);
8153
8154 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8155 an additional look up? */
8156 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8157
8158 if (!xvar)
8159 {
8160 xvar = variable_from_dropped (dv, NO_INSERT);
8161 gcc_checking_assert (xvar);
8162 }
8163
8164 /* No point in adding the same backlink more than once. This may
8165 arise if say the same value appears in two complex expressions in
8166 the same loc_list, or even more than once in a single
8167 expression. */
8168 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8169 return;
8170
8171 if (var->onepart == NOT_ONEPART)
8172 led = new loc_exp_dep;
8173 else
8174 {
8175 loc_exp_dep empty;
8176 memset (&empty, 0, sizeof (empty));
8177 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8178 led = &VAR_LOC_DEP_VEC (var)->last ();
8179 }
8180 led->dv = var->dv;
8181 led->value = x;
8182
8183 loc_exp_dep_alloc (xvar, 0);
8184 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8185 led->next = *led->pprev;
8186 if (led->next)
8187 led->next->pprev = &led->next;
8188 *led->pprev = led;
8189 }
8190
8191 /* Create active dependencies of VAR on COUNT values starting at
8192 VALUE, and corresponding back-links to the entries in VARS. Return
8193 true if we found any pending-recursion results. */
8194
8195 static bool
8196 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
8197 variable_table_type *vars)
8198 {
8199 bool pending_recursion = false;
8200
8201 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8202 || VAR_LOC_DEP_VEC (var)->is_empty ());
8203
8204 /* Set up all dependencies from last_child (as set up at the end of
8205 the loop above) to the end. */
8206 loc_exp_dep_alloc (var, count);
8207
8208 while (count--)
8209 {
8210 rtx x = *value++;
8211
8212 if (!pending_recursion)
8213 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8214
8215 loc_exp_insert_dep (var, x, vars);
8216 }
8217
8218 return pending_recursion;
8219 }
8220
8221 /* Notify the back-links of IVAR that are pending recursion that we
8222 have found a non-NIL value for it, so they are cleared for another
8223 attempt to compute a current location. */
8224
8225 static void
8226 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
8227 {
8228 loc_exp_dep *led, *next;
8229
8230 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8231 {
8232 decl_or_value dv = led->dv;
8233 variable *var;
8234
8235 next = led->next;
8236
8237 if (dv_is_value_p (dv))
8238 {
8239 rtx value = dv_as_value (dv);
8240
8241 /* If we have already resolved it, leave it alone. */
8242 if (!VALUE_RECURSED_INTO (value))
8243 continue;
8244
8245 /* Check that VALUE_RECURSED_INTO, true from the test above,
8246 implies NO_LOC_P. */
8247 gcc_checking_assert (NO_LOC_P (value));
8248
8249 /* We won't notify variables that are being expanded,
8250 because their dependency list is cleared before
8251 recursing. */
8252 NO_LOC_P (value) = false;
8253 VALUE_RECURSED_INTO (value) = false;
8254
8255 gcc_checking_assert (dv_changed_p (dv));
8256 }
8257 else
8258 {
8259 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8260 if (!dv_changed_p (dv))
8261 continue;
8262 }
8263
8264 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8265
8266 if (!var)
8267 var = variable_from_dropped (dv, NO_INSERT);
8268
8269 if (var)
8270 notify_dependents_of_resolved_value (var, vars);
8271
8272 if (next)
8273 next->pprev = led->pprev;
8274 if (led->pprev)
8275 *led->pprev = next;
8276 led->next = NULL;
8277 led->pprev = NULL;
8278 }
8279 }
8280
8281 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8282 int max_depth, void *data);
8283
8284 /* Return the combined depth, when one sub-expression evaluated to
8285 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8286
8287 static inline expand_depth
8288 update_depth (expand_depth saved_depth, expand_depth best_depth)
8289 {
8290 /* If we didn't find anything, stick with what we had. */
8291 if (!best_depth.complexity)
8292 return saved_depth;
8293
8294 /* If we found hadn't found anything, use the depth of the current
8295 expression. Do NOT add one extra level, we want to compute the
8296 maximum depth among sub-expressions. We'll increment it later,
8297 if appropriate. */
8298 if (!saved_depth.complexity)
8299 return best_depth;
8300
8301 /* Combine the entryval count so that regardless of which one we
8302 return, the entryval count is accurate. */
8303 best_depth.entryvals = saved_depth.entryvals
8304 = best_depth.entryvals + saved_depth.entryvals;
8305
8306 if (saved_depth.complexity < best_depth.complexity)
8307 return best_depth;
8308 else
8309 return saved_depth;
8310 }
8311
8312 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8313 DATA for cselib expand callback. If PENDRECP is given, indicate in
8314 it whether any sub-expression couldn't be fully evaluated because
8315 it is pending recursion resolution. */
8316
8317 static inline rtx
8318 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
8319 bool *pendrecp)
8320 {
8321 class expand_loc_callback_data *elcd
8322 = (class expand_loc_callback_data *) data;
8323 location_chain *loc, *next;
8324 rtx result = NULL;
8325 int first_child, result_first_child, last_child;
8326 bool pending_recursion;
8327 rtx loc_from = NULL;
8328 struct elt_loc_list *cloc = NULL;
8329 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8330 int wanted_entryvals, found_entryvals = 0;
8331
8332 /* Clear all backlinks pointing at this, so that we're not notified
8333 while we're active. */
8334 loc_exp_dep_clear (var);
8335
8336 retry:
8337 if (var->onepart == ONEPART_VALUE)
8338 {
8339 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8340
8341 gcc_checking_assert (cselib_preserved_value_p (val));
8342
8343 cloc = val->locs;
8344 }
8345
8346 first_child = result_first_child = last_child
8347 = elcd->expanding.length ();
8348
8349 wanted_entryvals = found_entryvals;
8350
8351 /* Attempt to expand each available location in turn. */
8352 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8353 loc || cloc; loc = next)
8354 {
8355 result_first_child = last_child;
8356
8357 if (!loc)
8358 {
8359 loc_from = cloc->loc;
8360 next = loc;
8361 cloc = cloc->next;
8362 if (unsuitable_loc (loc_from))
8363 continue;
8364 }
8365 else
8366 {
8367 loc_from = loc->loc;
8368 next = loc->next;
8369 }
8370
8371 gcc_checking_assert (!unsuitable_loc (loc_from));
8372
8373 elcd->depth.complexity = elcd->depth.entryvals = 0;
8374 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8375 vt_expand_loc_callback, data);
8376 last_child = elcd->expanding.length ();
8377
8378 if (result)
8379 {
8380 depth = elcd->depth;
8381
8382 gcc_checking_assert (depth.complexity
8383 || result_first_child == last_child);
8384
8385 if (last_child - result_first_child != 1)
8386 {
8387 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8388 depth.entryvals++;
8389 depth.complexity++;
8390 }
8391
8392 if (depth.complexity <= EXPR_USE_DEPTH)
8393 {
8394 if (depth.entryvals <= wanted_entryvals)
8395 break;
8396 else if (!found_entryvals || depth.entryvals < found_entryvals)
8397 found_entryvals = depth.entryvals;
8398 }
8399
8400 result = NULL;
8401 }
8402
8403 /* Set it up in case we leave the loop. */
8404 depth.complexity = depth.entryvals = 0;
8405 loc_from = NULL;
8406 result_first_child = first_child;
8407 }
8408
8409 if (!loc_from && wanted_entryvals < found_entryvals)
8410 {
8411 /* We found entries with ENTRY_VALUEs and skipped them. Since
8412 we could not find any expansions without ENTRY_VALUEs, but we
8413 found at least one with them, go back and get an entry with
8414 the minimum number ENTRY_VALUE count that we found. We could
8415 avoid looping, but since each sub-loc is already resolved,
8416 the re-expansion should be trivial. ??? Should we record all
8417 attempted locs as dependencies, so that we retry the
8418 expansion should any of them change, in the hope it can give
8419 us a new entry without an ENTRY_VALUE? */
8420 elcd->expanding.truncate (first_child);
8421 goto retry;
8422 }
8423
8424 /* Register all encountered dependencies as active. */
8425 pending_recursion = loc_exp_dep_set
8426 (var, result, elcd->expanding.address () + result_first_child,
8427 last_child - result_first_child, elcd->vars);
8428
8429 elcd->expanding.truncate (first_child);
8430
8431 /* Record where the expansion came from. */
8432 gcc_checking_assert (!result || !pending_recursion);
8433 VAR_LOC_FROM (var) = loc_from;
8434 VAR_LOC_DEPTH (var) = depth;
8435
8436 gcc_checking_assert (!depth.complexity == !result);
8437
8438 elcd->depth = update_depth (saved_depth, depth);
8439
8440 /* Indicate whether any of the dependencies are pending recursion
8441 resolution. */
8442 if (pendrecp)
8443 *pendrecp = pending_recursion;
8444
8445 if (!pendrecp || !pending_recursion)
8446 var->var_part[0].cur_loc = result;
8447
8448 return result;
8449 }
8450
8451 /* Callback for cselib_expand_value, that looks for expressions
8452 holding the value in the var-tracking hash tables. Return X for
8453 standard processing, anything else is to be used as-is. */
8454
8455 static rtx
8456 vt_expand_loc_callback (rtx x, bitmap regs,
8457 int max_depth ATTRIBUTE_UNUSED,
8458 void *data)
8459 {
8460 class expand_loc_callback_data *elcd
8461 = (class expand_loc_callback_data *) data;
8462 decl_or_value dv;
8463 variable *var;
8464 rtx result, subreg;
8465 bool pending_recursion = false;
8466 bool from_empty = false;
8467
8468 switch (GET_CODE (x))
8469 {
8470 case SUBREG:
8471 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8472 EXPR_DEPTH,
8473 vt_expand_loc_callback, data);
8474
8475 if (!subreg)
8476 return NULL;
8477
8478 result = simplify_gen_subreg (GET_MODE (x), subreg,
8479 GET_MODE (SUBREG_REG (x)),
8480 SUBREG_BYTE (x));
8481
8482 /* Invalid SUBREGs are ok in debug info. ??? We could try
8483 alternate expansions for the VALUE as well. */
8484 if (!result && GET_MODE (subreg) != VOIDmode)
8485 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8486
8487 return result;
8488
8489 case DEBUG_EXPR:
8490 case VALUE:
8491 dv = dv_from_rtx (x);
8492 break;
8493
8494 default:
8495 return x;
8496 }
8497
8498 elcd->expanding.safe_push (x);
8499
8500 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8501 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8502
8503 if (NO_LOC_P (x))
8504 {
8505 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8506 return NULL;
8507 }
8508
8509 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8510
8511 if (!var)
8512 {
8513 from_empty = true;
8514 var = variable_from_dropped (dv, INSERT);
8515 }
8516
8517 gcc_checking_assert (var);
8518
8519 if (!dv_changed_p (dv))
8520 {
8521 gcc_checking_assert (!NO_LOC_P (x));
8522 gcc_checking_assert (var->var_part[0].cur_loc);
8523 gcc_checking_assert (VAR_LOC_1PAUX (var));
8524 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8525
8526 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8527
8528 return var->var_part[0].cur_loc;
8529 }
8530
8531 VALUE_RECURSED_INTO (x) = true;
8532 /* This is tentative, but it makes some tests simpler. */
8533 NO_LOC_P (x) = true;
8534
8535 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8536
8537 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8538
8539 if (pending_recursion)
8540 {
8541 gcc_checking_assert (!result);
8542 elcd->pending.safe_push (x);
8543 }
8544 else
8545 {
8546 NO_LOC_P (x) = !result;
8547 VALUE_RECURSED_INTO (x) = false;
8548 set_dv_changed (dv, false);
8549
8550 if (result)
8551 notify_dependents_of_resolved_value (var, elcd->vars);
8552 }
8553
8554 return result;
8555 }
8556
8557 /* While expanding variables, we may encounter recursion cycles
8558 because of mutual (possibly indirect) dependencies between two
8559 particular variables (or values), say A and B. If we're trying to
8560 expand A when we get to B, which in turn attempts to expand A, if
8561 we can't find any other expansion for B, we'll add B to this
8562 pending-recursion stack, and tentatively return NULL for its
8563 location. This tentative value will be used for any other
8564 occurrences of B, unless A gets some other location, in which case
8565 it will notify B that it is worth another try at computing a
8566 location for it, and it will use the location computed for A then.
8567 At the end of the expansion, the tentative NULL locations become
8568 final for all members of PENDING that didn't get a notification.
8569 This function performs this finalization of NULL locations. */
8570
8571 static void
8572 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8573 {
8574 while (!pending->is_empty ())
8575 {
8576 rtx x = pending->pop ();
8577 decl_or_value dv;
8578
8579 if (!VALUE_RECURSED_INTO (x))
8580 continue;
8581
8582 gcc_checking_assert (NO_LOC_P (x));
8583 VALUE_RECURSED_INTO (x) = false;
8584 dv = dv_from_rtx (x);
8585 gcc_checking_assert (dv_changed_p (dv));
8586 set_dv_changed (dv, false);
8587 }
8588 }
8589
8590 /* Initialize expand_loc_callback_data D with variable hash table V.
8591 It must be a macro because of alloca (vec stack). */
8592 #define INIT_ELCD(d, v) \
8593 do \
8594 { \
8595 (d).vars = (v); \
8596 (d).depth.complexity = (d).depth.entryvals = 0; \
8597 } \
8598 while (0)
8599 /* Finalize expand_loc_callback_data D, resolved to location L. */
8600 #define FINI_ELCD(d, l) \
8601 do \
8602 { \
8603 resolve_expansions_pending_recursion (&(d).pending); \
8604 (d).pending.release (); \
8605 (d).expanding.release (); \
8606 \
8607 if ((l) && MEM_P (l)) \
8608 (l) = targetm.delegitimize_address (l); \
8609 } \
8610 while (0)
8611
8612 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8613 equivalences in VARS, updating their CUR_LOCs in the process. */
8614
8615 static rtx
8616 vt_expand_loc (rtx loc, variable_table_type *vars)
8617 {
8618 class expand_loc_callback_data data;
8619 rtx result;
8620
8621 if (!MAY_HAVE_DEBUG_BIND_INSNS)
8622 return loc;
8623
8624 INIT_ELCD (data, vars);
8625
8626 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8627 vt_expand_loc_callback, &data);
8628
8629 FINI_ELCD (data, result);
8630
8631 return result;
8632 }
8633
8634 /* Expand the one-part VARiable to a location, using the equivalences
8635 in VARS, updating their CUR_LOCs in the process. */
8636
8637 static rtx
8638 vt_expand_1pvar (variable *var, variable_table_type *vars)
8639 {
8640 class expand_loc_callback_data data;
8641 rtx loc;
8642
8643 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8644
8645 if (!dv_changed_p (var->dv))
8646 return var->var_part[0].cur_loc;
8647
8648 INIT_ELCD (data, vars);
8649
8650 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8651
8652 gcc_checking_assert (data.expanding.is_empty ());
8653
8654 FINI_ELCD (data, loc);
8655
8656 return loc;
8657 }
8658
8659 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8660 additional parameters: WHERE specifies whether the note shall be emitted
8661 before or after instruction INSN. */
8662
8663 int
8664 emit_note_insn_var_location (variable **varp, emit_note_data *data)
8665 {
8666 variable *var = *varp;
8667 rtx_insn *insn = data->insn;
8668 enum emit_note_where where = data->where;
8669 variable_table_type *vars = data->vars;
8670 rtx_note *note;
8671 rtx note_vl;
8672 int i, j, n_var_parts;
8673 bool complete;
8674 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8675 HOST_WIDE_INT last_limit;
8676 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8677 rtx loc[MAX_VAR_PARTS];
8678 tree decl;
8679 location_chain *lc;
8680
8681 gcc_checking_assert (var->onepart == NOT_ONEPART
8682 || var->onepart == ONEPART_VDECL);
8683
8684 decl = dv_as_decl (var->dv);
8685
8686 complete = true;
8687 last_limit = 0;
8688 n_var_parts = 0;
8689 if (!var->onepart)
8690 for (i = 0; i < var->n_var_parts; i++)
8691 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8692 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8693 for (i = 0; i < var->n_var_parts; i++)
8694 {
8695 machine_mode mode, wider_mode;
8696 rtx loc2;
8697 HOST_WIDE_INT offset, size, wider_size;
8698
8699 if (i == 0 && var->onepart)
8700 {
8701 gcc_checking_assert (var->n_var_parts == 1);
8702 offset = 0;
8703 initialized = VAR_INIT_STATUS_INITIALIZED;
8704 loc2 = vt_expand_1pvar (var, vars);
8705 }
8706 else
8707 {
8708 if (last_limit < VAR_PART_OFFSET (var, i))
8709 {
8710 complete = false;
8711 break;
8712 }
8713 else if (last_limit > VAR_PART_OFFSET (var, i))
8714 continue;
8715 offset = VAR_PART_OFFSET (var, i);
8716 loc2 = var->var_part[i].cur_loc;
8717 if (loc2 && GET_CODE (loc2) == MEM
8718 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8719 {
8720 rtx depval = XEXP (loc2, 0);
8721
8722 loc2 = vt_expand_loc (loc2, vars);
8723
8724 if (loc2)
8725 loc_exp_insert_dep (var, depval, vars);
8726 }
8727 if (!loc2)
8728 {
8729 complete = false;
8730 continue;
8731 }
8732 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8733 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8734 if (var->var_part[i].cur_loc == lc->loc)
8735 {
8736 initialized = lc->init;
8737 break;
8738 }
8739 gcc_assert (lc);
8740 }
8741
8742 offsets[n_var_parts] = offset;
8743 if (!loc2)
8744 {
8745 complete = false;
8746 continue;
8747 }
8748 loc[n_var_parts] = loc2;
8749 mode = GET_MODE (var->var_part[i].cur_loc);
8750 if (mode == VOIDmode && var->onepart)
8751 mode = DECL_MODE (decl);
8752 /* We ony track subparts of constant-sized objects, since at present
8753 there's no representation for polynomial pieces. */
8754 if (!GET_MODE_SIZE (mode).is_constant (&size))
8755 {
8756 complete = false;
8757 continue;
8758 }
8759 last_limit = offsets[n_var_parts] + size;
8760
8761 /* Attempt to merge adjacent registers or memory. */
8762 for (j = i + 1; j < var->n_var_parts; j++)
8763 if (last_limit <= VAR_PART_OFFSET (var, j))
8764 break;
8765 if (j < var->n_var_parts
8766 && GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
8767 && GET_MODE_SIZE (wider_mode).is_constant (&wider_size)
8768 && var->var_part[j].cur_loc
8769 && mode == GET_MODE (var->var_part[j].cur_loc)
8770 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8771 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8772 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8773 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8774 {
8775 rtx new_loc = NULL;
8776 poly_int64 offset2;
8777
8778 if (REG_P (loc[n_var_parts])
8779 && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
8780 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode)
8781 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8782 == REGNO (loc2))
8783 {
8784 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8785 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8786 mode, 0);
8787 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8788 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8789 if (new_loc)
8790 {
8791 if (!REG_P (new_loc)
8792 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8793 new_loc = NULL;
8794 else
8795 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8796 }
8797 }
8798 else if (MEM_P (loc[n_var_parts])
8799 && GET_CODE (XEXP (loc2, 0)) == PLUS
8800 && REG_P (XEXP (XEXP (loc2, 0), 0))
8801 && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2))
8802 {
8803 poly_int64 end1 = size;
8804 rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0),
8805 &end1);
8806 if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0))
8807 && known_eq (end1, offset2))
8808 new_loc = adjust_address_nv (loc[n_var_parts],
8809 wider_mode, 0);
8810 }
8811
8812 if (new_loc)
8813 {
8814 loc[n_var_parts] = new_loc;
8815 mode = wider_mode;
8816 last_limit = offsets[n_var_parts] + wider_size;
8817 i = j;
8818 }
8819 }
8820 ++n_var_parts;
8821 }
8822 poly_uint64 type_size_unit
8823 = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl)));
8824 if (maybe_lt (poly_uint64 (last_limit), type_size_unit))
8825 complete = false;
8826
8827 if (! flag_var_tracking_uninit)
8828 initialized = VAR_INIT_STATUS_INITIALIZED;
8829
8830 note_vl = NULL_RTX;
8831 if (!complete)
8832 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8833 else if (n_var_parts == 1)
8834 {
8835 rtx expr_list;
8836
8837 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8838 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8839 else
8840 expr_list = loc[0];
8841
8842 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8843 }
8844 else if (n_var_parts)
8845 {
8846 rtx parallel;
8847
8848 for (i = 0; i < n_var_parts; i++)
8849 loc[i]
8850 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8851
8852 parallel = gen_rtx_PARALLEL (VOIDmode,
8853 gen_rtvec_v (n_var_parts, loc));
8854 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8855 parallel, initialized);
8856 }
8857
8858 if (where != EMIT_NOTE_BEFORE_INSN)
8859 {
8860 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8861 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8862 NOTE_DURING_CALL_P (note) = true;
8863 }
8864 else
8865 {
8866 /* Make sure that the call related notes come first. */
8867 while (NEXT_INSN (insn)
8868 && NOTE_P (insn)
8869 && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8870 && NOTE_DURING_CALL_P (insn))
8871 insn = NEXT_INSN (insn);
8872 if (NOTE_P (insn)
8873 && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8874 && NOTE_DURING_CALL_P (insn))
8875 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8876 else
8877 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8878 }
8879 NOTE_VAR_LOCATION (note) = note_vl;
8880
8881 set_dv_changed (var->dv, false);
8882 gcc_assert (var->in_changed_variables);
8883 var->in_changed_variables = false;
8884 changed_variables->clear_slot (varp);
8885
8886 /* Continue traversing the hash table. */
8887 return 1;
8888 }
8889
8890 /* While traversing changed_variables, push onto DATA (a stack of RTX
8891 values) entries that aren't user variables. */
8892
8893 int
8894 var_track_values_to_stack (variable **slot,
8895 vec<rtx, va_heap> *changed_values_stack)
8896 {
8897 variable *var = *slot;
8898
8899 if (var->onepart == ONEPART_VALUE)
8900 changed_values_stack->safe_push (dv_as_value (var->dv));
8901 else if (var->onepart == ONEPART_DEXPR)
8902 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8903
8904 return 1;
8905 }
8906
8907 /* Remove from changed_variables the entry whose DV corresponds to
8908 value or debug_expr VAL. */
8909 static void
8910 remove_value_from_changed_variables (rtx val)
8911 {
8912 decl_or_value dv = dv_from_rtx (val);
8913 variable **slot;
8914 variable *var;
8915
8916 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8917 NO_INSERT);
8918 var = *slot;
8919 var->in_changed_variables = false;
8920 changed_variables->clear_slot (slot);
8921 }
8922
8923 /* If VAL (a value or debug_expr) has backlinks to variables actively
8924 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8925 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8926 have dependencies of their own to notify. */
8927
8928 static void
8929 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8930 vec<rtx, va_heap> *changed_values_stack)
8931 {
8932 variable **slot;
8933 variable *var;
8934 loc_exp_dep *led;
8935 decl_or_value dv = dv_from_rtx (val);
8936
8937 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8938 NO_INSERT);
8939 if (!slot)
8940 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8941 if (!slot)
8942 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8943 NO_INSERT);
8944 var = *slot;
8945
8946 while ((led = VAR_LOC_DEP_LST (var)))
8947 {
8948 decl_or_value ldv = led->dv;
8949 variable *ivar;
8950
8951 /* Deactivate and remove the backlink, as it was “used up”. It
8952 makes no sense to attempt to notify the same entity again:
8953 either it will be recomputed and re-register an active
8954 dependency, or it will still have the changed mark. */
8955 if (led->next)
8956 led->next->pprev = led->pprev;
8957 if (led->pprev)
8958 *led->pprev = led->next;
8959 led->next = NULL;
8960 led->pprev = NULL;
8961
8962 if (dv_changed_p (ldv))
8963 continue;
8964
8965 switch (dv_onepart_p (ldv))
8966 {
8967 case ONEPART_VALUE:
8968 case ONEPART_DEXPR:
8969 set_dv_changed (ldv, true);
8970 changed_values_stack->safe_push (dv_as_rtx (ldv));
8971 break;
8972
8973 case ONEPART_VDECL:
8974 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8975 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8976 variable_was_changed (ivar, NULL);
8977 break;
8978
8979 case NOT_ONEPART:
8980 delete led;
8981 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8982 if (ivar)
8983 {
8984 int i = ivar->n_var_parts;
8985 while (i--)
8986 {
8987 rtx loc = ivar->var_part[i].cur_loc;
8988
8989 if (loc && GET_CODE (loc) == MEM
8990 && XEXP (loc, 0) == val)
8991 {
8992 variable_was_changed (ivar, NULL);
8993 break;
8994 }
8995 }
8996 }
8997 break;
8998
8999 default:
9000 gcc_unreachable ();
9001 }
9002 }
9003 }
9004
9005 /* Take out of changed_variables any entries that don't refer to use
9006 variables. Back-propagate change notifications from values and
9007 debug_exprs to their active dependencies in HTAB or in
9008 CHANGED_VARIABLES. */
9009
9010 static void
9011 process_changed_values (variable_table_type *htab)
9012 {
9013 int i, n;
9014 rtx val;
9015 auto_vec<rtx, 20> changed_values_stack;
9016
9017 /* Move values from changed_variables to changed_values_stack. */
9018 changed_variables
9019 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
9020 (&changed_values_stack);
9021
9022 /* Back-propagate change notifications in values while popping
9023 them from the stack. */
9024 for (n = i = changed_values_stack.length ();
9025 i > 0; i = changed_values_stack.length ())
9026 {
9027 val = changed_values_stack.pop ();
9028 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9029
9030 /* This condition will hold when visiting each of the entries
9031 originally in changed_variables. We can't remove them
9032 earlier because this could drop the backlinks before we got a
9033 chance to use them. */
9034 if (i == n)
9035 {
9036 remove_value_from_changed_variables (val);
9037 n--;
9038 }
9039 }
9040 }
9041
9042 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9043 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9044 the notes shall be emitted before of after instruction INSN. */
9045
9046 static void
9047 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9048 shared_hash *vars)
9049 {
9050 emit_note_data data;
9051 variable_table_type *htab = shared_hash_htab (vars);
9052
9053 if (changed_variables->is_empty ())
9054 return;
9055
9056 if (MAY_HAVE_DEBUG_BIND_INSNS)
9057 process_changed_values (htab);
9058
9059 data.insn = insn;
9060 data.where = where;
9061 data.vars = htab;
9062
9063 changed_variables
9064 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9065 }
9066
9067 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9068 same variable in hash table DATA or is not there at all. */
9069
9070 int
9071 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
9072 {
9073 variable *old_var, *new_var;
9074
9075 old_var = *slot;
9076 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9077
9078 if (!new_var)
9079 {
9080 /* Variable has disappeared. */
9081 variable *empty_var = NULL;
9082
9083 if (old_var->onepart == ONEPART_VALUE
9084 || old_var->onepart == ONEPART_DEXPR)
9085 {
9086 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9087 if (empty_var)
9088 {
9089 gcc_checking_assert (!empty_var->in_changed_variables);
9090 if (!VAR_LOC_1PAUX (old_var))
9091 {
9092 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9093 VAR_LOC_1PAUX (empty_var) = NULL;
9094 }
9095 else
9096 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9097 }
9098 }
9099
9100 if (!empty_var)
9101 {
9102 empty_var = onepart_pool_allocate (old_var->onepart);
9103 empty_var->dv = old_var->dv;
9104 empty_var->refcount = 0;
9105 empty_var->n_var_parts = 0;
9106 empty_var->onepart = old_var->onepart;
9107 empty_var->in_changed_variables = false;
9108 }
9109
9110 if (empty_var->onepart)
9111 {
9112 /* Propagate the auxiliary data to (ultimately)
9113 changed_variables. */
9114 empty_var->var_part[0].loc_chain = NULL;
9115 empty_var->var_part[0].cur_loc = NULL;
9116 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9117 VAR_LOC_1PAUX (old_var) = NULL;
9118 }
9119 variable_was_changed (empty_var, NULL);
9120 /* Continue traversing the hash table. */
9121 return 1;
9122 }
9123 /* Update cur_loc and one-part auxiliary data, before new_var goes
9124 through variable_was_changed. */
9125 if (old_var != new_var && new_var->onepart)
9126 {
9127 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9128 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9129 VAR_LOC_1PAUX (old_var) = NULL;
9130 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9131 }
9132 if (variable_different_p (old_var, new_var))
9133 variable_was_changed (new_var, NULL);
9134
9135 /* Continue traversing the hash table. */
9136 return 1;
9137 }
9138
9139 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9140 table DATA. */
9141
9142 int
9143 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
9144 {
9145 variable *old_var, *new_var;
9146
9147 new_var = *slot;
9148 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9149 if (!old_var)
9150 {
9151 int i;
9152 for (i = 0; i < new_var->n_var_parts; i++)
9153 new_var->var_part[i].cur_loc = NULL;
9154 variable_was_changed (new_var, NULL);
9155 }
9156
9157 /* Continue traversing the hash table. */
9158 return 1;
9159 }
9160
9161 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9162 NEW_SET. */
9163
9164 static void
9165 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9166 dataflow_set *new_set)
9167 {
9168 shared_hash_htab (old_set->vars)
9169 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9170 (shared_hash_htab (new_set->vars));
9171 shared_hash_htab (new_set->vars)
9172 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9173 (shared_hash_htab (old_set->vars));
9174 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9175 }
9176
9177 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9178
9179 static rtx_insn *
9180 next_non_note_insn_var_location (rtx_insn *insn)
9181 {
9182 while (insn)
9183 {
9184 insn = NEXT_INSN (insn);
9185 if (insn == 0
9186 || !NOTE_P (insn)
9187 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9188 break;
9189 }
9190
9191 return insn;
9192 }
9193
9194 /* Emit the notes for changes of location parts in the basic block BB. */
9195
9196 static void
9197 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9198 {
9199 unsigned int i;
9200 micro_operation *mo;
9201
9202 dataflow_set_clear (set);
9203 dataflow_set_copy (set, &VTI (bb)->in);
9204
9205 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9206 {
9207 rtx_insn *insn = mo->insn;
9208 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9209
9210 switch (mo->type)
9211 {
9212 case MO_CALL:
9213 dataflow_set_clear_at_call (set, insn);
9214 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9215 {
9216 rtx arguments = mo->u.loc, *p = &arguments;
9217 while (*p)
9218 {
9219 XEXP (XEXP (*p, 0), 1)
9220 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9221 shared_hash_htab (set->vars));
9222 /* If expansion is successful, keep it in the list. */
9223 if (XEXP (XEXP (*p, 0), 1))
9224 {
9225 XEXP (XEXP (*p, 0), 1)
9226 = copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1));
9227 p = &XEXP (*p, 1);
9228 }
9229 /* Otherwise, if the following item is data_value for it,
9230 drop it too too. */
9231 else if (XEXP (*p, 1)
9232 && REG_P (XEXP (XEXP (*p, 0), 0))
9233 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9234 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9235 0))
9236 && REGNO (XEXP (XEXP (*p, 0), 0))
9237 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9238 0), 0)))
9239 *p = XEXP (XEXP (*p, 1), 1);
9240 /* Just drop this item. */
9241 else
9242 *p = XEXP (*p, 1);
9243 }
9244 add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments);
9245 }
9246 break;
9247
9248 case MO_USE:
9249 {
9250 rtx loc = mo->u.loc;
9251
9252 if (REG_P (loc))
9253 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9254 else
9255 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9256
9257 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9258 }
9259 break;
9260
9261 case MO_VAL_LOC:
9262 {
9263 rtx loc = mo->u.loc;
9264 rtx val, vloc;
9265 tree var;
9266
9267 if (GET_CODE (loc) == CONCAT)
9268 {
9269 val = XEXP (loc, 0);
9270 vloc = XEXP (loc, 1);
9271 }
9272 else
9273 {
9274 val = NULL_RTX;
9275 vloc = loc;
9276 }
9277
9278 var = PAT_VAR_LOCATION_DECL (vloc);
9279
9280 clobber_variable_part (set, NULL_RTX,
9281 dv_from_decl (var), 0, NULL_RTX);
9282 if (val)
9283 {
9284 if (VAL_NEEDS_RESOLUTION (loc))
9285 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9286 set_variable_part (set, val, dv_from_decl (var), 0,
9287 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9288 INSERT);
9289 }
9290 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9291 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9292 dv_from_decl (var), 0,
9293 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9294 INSERT);
9295
9296 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9297 }
9298 break;
9299
9300 case MO_VAL_USE:
9301 {
9302 rtx loc = mo->u.loc;
9303 rtx val, vloc, uloc;
9304
9305 vloc = uloc = XEXP (loc, 1);
9306 val = XEXP (loc, 0);
9307
9308 if (GET_CODE (val) == CONCAT)
9309 {
9310 uloc = XEXP (val, 1);
9311 val = XEXP (val, 0);
9312 }
9313
9314 if (VAL_NEEDS_RESOLUTION (loc))
9315 val_resolve (set, val, vloc, insn);
9316 else
9317 val_store (set, val, uloc, insn, false);
9318
9319 if (VAL_HOLDS_TRACK_EXPR (loc))
9320 {
9321 if (GET_CODE (uloc) == REG)
9322 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9323 NULL);
9324 else if (GET_CODE (uloc) == MEM)
9325 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9326 NULL);
9327 }
9328
9329 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9330 }
9331 break;
9332
9333 case MO_VAL_SET:
9334 {
9335 rtx loc = mo->u.loc;
9336 rtx val, vloc, uloc;
9337 rtx dstv, srcv;
9338
9339 vloc = loc;
9340 uloc = XEXP (vloc, 1);
9341 val = XEXP (vloc, 0);
9342 vloc = uloc;
9343
9344 if (GET_CODE (uloc) == SET)
9345 {
9346 dstv = SET_DEST (uloc);
9347 srcv = SET_SRC (uloc);
9348 }
9349 else
9350 {
9351 dstv = uloc;
9352 srcv = NULL;
9353 }
9354
9355 if (GET_CODE (val) == CONCAT)
9356 {
9357 dstv = vloc = XEXP (val, 1);
9358 val = XEXP (val, 0);
9359 }
9360
9361 if (GET_CODE (vloc) == SET)
9362 {
9363 srcv = SET_SRC (vloc);
9364
9365 gcc_assert (val != srcv);
9366 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9367
9368 dstv = vloc = SET_DEST (vloc);
9369
9370 if (VAL_NEEDS_RESOLUTION (loc))
9371 val_resolve (set, val, srcv, insn);
9372 }
9373 else if (VAL_NEEDS_RESOLUTION (loc))
9374 {
9375 gcc_assert (GET_CODE (uloc) == SET
9376 && GET_CODE (SET_SRC (uloc)) == REG);
9377 val_resolve (set, val, SET_SRC (uloc), insn);
9378 }
9379
9380 if (VAL_HOLDS_TRACK_EXPR (loc))
9381 {
9382 if (VAL_EXPR_IS_CLOBBERED (loc))
9383 {
9384 if (REG_P (uloc))
9385 var_reg_delete (set, uloc, true);
9386 else if (MEM_P (uloc))
9387 {
9388 gcc_assert (MEM_P (dstv));
9389 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9390 var_mem_delete (set, dstv, true);
9391 }
9392 }
9393 else
9394 {
9395 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9396 rtx src = NULL, dst = uloc;
9397 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9398
9399 if (GET_CODE (uloc) == SET)
9400 {
9401 src = SET_SRC (uloc);
9402 dst = SET_DEST (uloc);
9403 }
9404
9405 if (copied_p)
9406 {
9407 status = find_src_status (set, src);
9408
9409 src = find_src_set_src (set, src);
9410 }
9411
9412 if (REG_P (dst))
9413 var_reg_delete_and_set (set, dst, !copied_p,
9414 status, srcv);
9415 else if (MEM_P (dst))
9416 {
9417 gcc_assert (MEM_P (dstv));
9418 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9419 var_mem_delete_and_set (set, dstv, !copied_p,
9420 status, srcv);
9421 }
9422 }
9423 }
9424 else if (REG_P (uloc))
9425 var_regno_delete (set, REGNO (uloc));
9426 else if (MEM_P (uloc))
9427 {
9428 gcc_checking_assert (GET_CODE (vloc) == MEM);
9429 gcc_checking_assert (vloc == dstv);
9430 if (vloc != dstv)
9431 clobber_overlapping_mems (set, vloc);
9432 }
9433
9434 val_store (set, val, dstv, insn, true);
9435
9436 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9437 set->vars);
9438 }
9439 break;
9440
9441 case MO_SET:
9442 {
9443 rtx loc = mo->u.loc;
9444 rtx set_src = NULL;
9445
9446 if (GET_CODE (loc) == SET)
9447 {
9448 set_src = SET_SRC (loc);
9449 loc = SET_DEST (loc);
9450 }
9451
9452 if (REG_P (loc))
9453 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9454 set_src);
9455 else
9456 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9457 set_src);
9458
9459 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9460 set->vars);
9461 }
9462 break;
9463
9464 case MO_COPY:
9465 {
9466 rtx loc = mo->u.loc;
9467 enum var_init_status src_status;
9468 rtx set_src = NULL;
9469
9470 if (GET_CODE (loc) == SET)
9471 {
9472 set_src = SET_SRC (loc);
9473 loc = SET_DEST (loc);
9474 }
9475
9476 src_status = find_src_status (set, set_src);
9477 set_src = find_src_set_src (set, set_src);
9478
9479 if (REG_P (loc))
9480 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9481 else
9482 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9483
9484 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9485 set->vars);
9486 }
9487 break;
9488
9489 case MO_USE_NO_VAR:
9490 {
9491 rtx loc = mo->u.loc;
9492
9493 if (REG_P (loc))
9494 var_reg_delete (set, loc, false);
9495 else
9496 var_mem_delete (set, loc, false);
9497
9498 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9499 }
9500 break;
9501
9502 case MO_CLOBBER:
9503 {
9504 rtx loc = mo->u.loc;
9505
9506 if (REG_P (loc))
9507 var_reg_delete (set, loc, true);
9508 else
9509 var_mem_delete (set, loc, true);
9510
9511 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9512 set->vars);
9513 }
9514 break;
9515
9516 case MO_ADJUST:
9517 set->stack_adjust += mo->u.adjust;
9518 break;
9519 }
9520 }
9521 }
9522
9523 /* Emit notes for the whole function. */
9524
9525 static void
9526 vt_emit_notes (void)
9527 {
9528 basic_block bb;
9529 dataflow_set cur;
9530
9531 gcc_assert (changed_variables->is_empty ());
9532
9533 /* Free memory occupied by the out hash tables, as they aren't used
9534 anymore. */
9535 FOR_EACH_BB_FN (bb, cfun)
9536 dataflow_set_clear (&VTI (bb)->out);
9537
9538 /* Enable emitting notes by functions (mainly by set_variable_part and
9539 delete_variable_part). */
9540 emit_notes = true;
9541
9542 if (MAY_HAVE_DEBUG_BIND_INSNS)
9543 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9544
9545 dataflow_set_init (&cur);
9546
9547 FOR_EACH_BB_FN (bb, cfun)
9548 {
9549 /* Emit the notes for changes of variable locations between two
9550 subsequent basic blocks. */
9551 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9552
9553 if (MAY_HAVE_DEBUG_BIND_INSNS)
9554 local_get_addr_cache = new hash_map<rtx, rtx>;
9555
9556 /* Emit the notes for the changes in the basic block itself. */
9557 emit_notes_in_bb (bb, &cur);
9558
9559 if (MAY_HAVE_DEBUG_BIND_INSNS)
9560 delete local_get_addr_cache;
9561 local_get_addr_cache = NULL;
9562
9563 /* Free memory occupied by the in hash table, we won't need it
9564 again. */
9565 dataflow_set_clear (&VTI (bb)->in);
9566 }
9567
9568 if (flag_checking)
9569 shared_hash_htab (cur.vars)
9570 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9571 (shared_hash_htab (empty_shared_hash));
9572
9573 dataflow_set_destroy (&cur);
9574
9575 if (MAY_HAVE_DEBUG_BIND_INSNS)
9576 delete dropped_values;
9577 dropped_values = NULL;
9578
9579 emit_notes = false;
9580 }
9581
9582 /* If there is a declaration and offset associated with register/memory RTL
9583 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9584
9585 static bool
9586 vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
9587 {
9588 if (REG_P (rtl))
9589 {
9590 if (REG_ATTRS (rtl))
9591 {
9592 *declp = REG_EXPR (rtl);
9593 *offsetp = REG_OFFSET (rtl);
9594 return true;
9595 }
9596 }
9597 else if (GET_CODE (rtl) == PARALLEL)
9598 {
9599 tree decl = NULL_TREE;
9600 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9601 int len = XVECLEN (rtl, 0), i;
9602
9603 for (i = 0; i < len; i++)
9604 {
9605 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9606 if (!REG_P (reg) || !REG_ATTRS (reg))
9607 break;
9608 if (!decl)
9609 decl = REG_EXPR (reg);
9610 if (REG_EXPR (reg) != decl)
9611 break;
9612 HOST_WIDE_INT this_offset;
9613 if (!track_offset_p (REG_OFFSET (reg), &this_offset))
9614 break;
9615 offset = MIN (offset, this_offset);
9616 }
9617
9618 if (i == len)
9619 {
9620 *declp = decl;
9621 *offsetp = offset;
9622 return true;
9623 }
9624 }
9625 else if (MEM_P (rtl))
9626 {
9627 if (MEM_ATTRS (rtl))
9628 {
9629 *declp = MEM_EXPR (rtl);
9630 *offsetp = int_mem_offset (rtl);
9631 return true;
9632 }
9633 }
9634 return false;
9635 }
9636
9637 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9638 of VAL. */
9639
9640 static void
9641 record_entry_value (cselib_val *val, rtx rtl)
9642 {
9643 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9644
9645 ENTRY_VALUE_EXP (ev) = rtl;
9646
9647 cselib_add_permanent_equiv (val, ev, get_insns ());
9648 }
9649
9650 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9651
9652 static void
9653 vt_add_function_parameter (tree parm)
9654 {
9655 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9656 rtx incoming = DECL_INCOMING_RTL (parm);
9657 tree decl;
9658 machine_mode mode;
9659 poly_int64 offset;
9660 dataflow_set *out;
9661 decl_or_value dv;
9662 bool incoming_ok = true;
9663
9664 if (TREE_CODE (parm) != PARM_DECL)
9665 return;
9666
9667 if (!decl_rtl || !incoming)
9668 return;
9669
9670 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9671 return;
9672
9673 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9674 rewrite the incoming location of parameters passed on the stack
9675 into MEMs based on the argument pointer, so that incoming doesn't
9676 depend on a pseudo. */
9677 poly_int64 incoming_offset = 0;
9678 if (MEM_P (incoming)
9679 && (strip_offset (XEXP (incoming, 0), &incoming_offset)
9680 == crtl->args.internal_arg_pointer))
9681 {
9682 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9683 incoming
9684 = replace_equiv_address_nv (incoming,
9685 plus_constant (Pmode,
9686 arg_pointer_rtx,
9687 off + incoming_offset));
9688 }
9689
9690 #ifdef HAVE_window_save
9691 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9692 If the target machine has an explicit window save instruction, the
9693 actual entry value is the corresponding OUTGOING_REGNO instead. */
9694 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9695 {
9696 if (REG_P (incoming)
9697 && HARD_REGISTER_P (incoming)
9698 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9699 {
9700 parm_reg p;
9701 p.incoming = incoming;
9702 incoming
9703 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9704 OUTGOING_REGNO (REGNO (incoming)), 0);
9705 p.outgoing = incoming;
9706 vec_safe_push (windowed_parm_regs, p);
9707 }
9708 else if (GET_CODE (incoming) == PARALLEL)
9709 {
9710 rtx outgoing
9711 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9712 int i;
9713
9714 for (i = 0; i < XVECLEN (incoming, 0); i++)
9715 {
9716 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9717 parm_reg p;
9718 p.incoming = reg;
9719 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9720 OUTGOING_REGNO (REGNO (reg)), 0);
9721 p.outgoing = reg;
9722 XVECEXP (outgoing, 0, i)
9723 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9724 XEXP (XVECEXP (incoming, 0, i), 1));
9725 vec_safe_push (windowed_parm_regs, p);
9726 }
9727
9728 incoming = outgoing;
9729 }
9730 else if (MEM_P (incoming)
9731 && REG_P (XEXP (incoming, 0))
9732 && HARD_REGISTER_P (XEXP (incoming, 0)))
9733 {
9734 rtx reg = XEXP (incoming, 0);
9735 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9736 {
9737 parm_reg p;
9738 p.incoming = reg;
9739 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9740 p.outgoing = reg;
9741 vec_safe_push (windowed_parm_regs, p);
9742 incoming = replace_equiv_address_nv (incoming, reg);
9743 }
9744 }
9745 }
9746 #endif
9747
9748 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9749 {
9750 incoming_ok = false;
9751 if (MEM_P (incoming))
9752 {
9753 /* This means argument is passed by invisible reference. */
9754 offset = 0;
9755 decl = parm;
9756 }
9757 else
9758 {
9759 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9760 return;
9761 offset += byte_lowpart_offset (GET_MODE (incoming),
9762 GET_MODE (decl_rtl));
9763 }
9764 }
9765
9766 if (!decl)
9767 return;
9768
9769 if (parm != decl)
9770 {
9771 /* If that DECL_RTL wasn't a pseudo that got spilled to
9772 memory, bail out. Otherwise, the spill slot sharing code
9773 will force the memory to reference spill_slot_decl (%sfp),
9774 so we don't match above. That's ok, the pseudo must have
9775 referenced the entire parameter, so just reset OFFSET. */
9776 if (decl != get_spill_slot_decl (false))
9777 return;
9778 offset = 0;
9779 }
9780
9781 HOST_WIDE_INT const_offset;
9782 if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
9783 return;
9784
9785 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9786
9787 dv = dv_from_decl (parm);
9788
9789 if (target_for_debug_bind (parm)
9790 /* We can't deal with these right now, because this kind of
9791 variable is single-part. ??? We could handle parallels
9792 that describe multiple locations for the same single
9793 value, but ATM we don't. */
9794 && GET_CODE (incoming) != PARALLEL)
9795 {
9796 cselib_val *val;
9797 rtx lowpart;
9798
9799 /* ??? We shouldn't ever hit this, but it may happen because
9800 arguments passed by invisible reference aren't dealt with
9801 above: incoming-rtl will have Pmode rather than the
9802 expected mode for the type. */
9803 if (const_offset)
9804 return;
9805
9806 lowpart = var_lowpart (mode, incoming);
9807 if (!lowpart)
9808 return;
9809
9810 val = cselib_lookup_from_insn (lowpart, mode, true,
9811 VOIDmode, get_insns ());
9812
9813 /* ??? Float-typed values in memory are not handled by
9814 cselib. */
9815 if (val)
9816 {
9817 preserve_value (val);
9818 set_variable_part (out, val->val_rtx, dv, const_offset,
9819 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9820 dv = dv_from_value (val->val_rtx);
9821 }
9822
9823 if (MEM_P (incoming))
9824 {
9825 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9826 VOIDmode, get_insns ());
9827 if (val)
9828 {
9829 preserve_value (val);
9830 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9831 }
9832 }
9833 }
9834
9835 if (REG_P (incoming))
9836 {
9837 incoming = var_lowpart (mode, incoming);
9838 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9839 attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
9840 incoming);
9841 set_variable_part (out, incoming, dv, const_offset,
9842 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9843 if (dv_is_value_p (dv))
9844 {
9845 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9846 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9847 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9848 {
9849 machine_mode indmode
9850 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9851 rtx mem = gen_rtx_MEM (indmode, incoming);
9852 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9853 VOIDmode,
9854 get_insns ());
9855 if (val)
9856 {
9857 preserve_value (val);
9858 record_entry_value (val, mem);
9859 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9860 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9861 }
9862 }
9863 }
9864 }
9865 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9866 {
9867 int i;
9868
9869 /* The following code relies on vt_get_decl_and_offset returning true for
9870 incoming, which might not be always the case. */
9871 if (!incoming_ok)
9872 return;
9873 for (i = 0; i < XVECLEN (incoming, 0); i++)
9874 {
9875 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9876 /* vt_get_decl_and_offset has already checked that the offset
9877 is a valid variable part. */
9878 const_offset = get_tracked_reg_offset (reg);
9879 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9880 attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
9881 set_variable_part (out, reg, dv, const_offset,
9882 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9883 }
9884 }
9885 else if (MEM_P (incoming))
9886 {
9887 incoming = var_lowpart (mode, incoming);
9888 set_variable_part (out, incoming, dv, const_offset,
9889 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9890 }
9891 }
9892
9893 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9894
9895 static void
9896 vt_add_function_parameters (void)
9897 {
9898 tree parm;
9899
9900 for (parm = DECL_ARGUMENTS (current_function_decl);
9901 parm; parm = DECL_CHAIN (parm))
9902 vt_add_function_parameter (parm);
9903
9904 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9905 {
9906 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9907
9908 if (TREE_CODE (vexpr) == INDIRECT_REF)
9909 vexpr = TREE_OPERAND (vexpr, 0);
9910
9911 if (TREE_CODE (vexpr) == PARM_DECL
9912 && DECL_ARTIFICIAL (vexpr)
9913 && !DECL_IGNORED_P (vexpr)
9914 && DECL_NAMELESS (vexpr))
9915 vt_add_function_parameter (vexpr);
9916 }
9917 }
9918
9919 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9920 ensure it isn't flushed during cselib_reset_table.
9921 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9922 has been eliminated. */
9923
9924 static void
9925 vt_init_cfa_base (void)
9926 {
9927 cselib_val *val;
9928
9929 #ifdef FRAME_POINTER_CFA_OFFSET
9930 cfa_base_rtx = frame_pointer_rtx;
9931 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9932 #else
9933 cfa_base_rtx = arg_pointer_rtx;
9934 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9935 #endif
9936 if (cfa_base_rtx == hard_frame_pointer_rtx
9937 || !fixed_regs[REGNO (cfa_base_rtx)])
9938 {
9939 cfa_base_rtx = NULL_RTX;
9940 return;
9941 }
9942 if (!MAY_HAVE_DEBUG_BIND_INSNS)
9943 return;
9944
9945 /* Tell alias analysis that cfa_base_rtx should share
9946 find_base_term value with stack pointer or hard frame pointer. */
9947 if (!frame_pointer_needed)
9948 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9949 else if (!crtl->stack_realign_tried)
9950 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9951
9952 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9953 VOIDmode, get_insns ());
9954 preserve_value (val);
9955 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9956 }
9957
9958 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */
9959
9960 static rtx_insn *
9961 reemit_marker_as_note (rtx_insn *insn)
9962 {
9963 gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
9964
9965 enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn);
9966
9967 switch (kind)
9968 {
9969 case NOTE_INSN_BEGIN_STMT:
9970 case NOTE_INSN_INLINE_ENTRY:
9971 {
9972 rtx_insn *note = NULL;
9973 if (cfun->debug_nonbind_markers)
9974 {
9975 note = emit_note_before (kind, insn);
9976 NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
9977 }
9978 delete_insn (insn);
9979 return note;
9980 }
9981
9982 default:
9983 gcc_unreachable ();
9984 }
9985 }
9986
9987 /* Allocate and initialize the data structures for variable tracking
9988 and parse the RTL to get the micro operations. */
9989
9990 static bool
9991 vt_initialize (void)
9992 {
9993 basic_block bb;
9994 poly_int64 fp_cfa_offset = -1;
9995
9996 alloc_aux_for_blocks (sizeof (variable_tracking_info));
9997
9998 empty_shared_hash = shared_hash_pool.allocate ();
9999 empty_shared_hash->refcount = 1;
10000 empty_shared_hash->htab = new variable_table_type (1);
10001 changed_variables = new variable_table_type (10);
10002
10003 /* Init the IN and OUT sets. */
10004 FOR_ALL_BB_FN (bb, cfun)
10005 {
10006 VTI (bb)->visited = false;
10007 VTI (bb)->flooded = false;
10008 dataflow_set_init (&VTI (bb)->in);
10009 dataflow_set_init (&VTI (bb)->out);
10010 VTI (bb)->permp = NULL;
10011 }
10012
10013 if (MAY_HAVE_DEBUG_BIND_INSNS)
10014 {
10015 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
10016 scratch_regs = BITMAP_ALLOC (NULL);
10017 preserved_values.create (256);
10018 global_get_addr_cache = new hash_map<rtx, rtx>;
10019 }
10020 else
10021 {
10022 scratch_regs = NULL;
10023 global_get_addr_cache = NULL;
10024 }
10025
10026 if (MAY_HAVE_DEBUG_BIND_INSNS)
10027 {
10028 rtx reg, expr;
10029 int ofst;
10030 cselib_val *val;
10031
10032 #ifdef FRAME_POINTER_CFA_OFFSET
10033 reg = frame_pointer_rtx;
10034 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10035 #else
10036 reg = arg_pointer_rtx;
10037 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
10038 #endif
10039
10040 ofst -= INCOMING_FRAME_SP_OFFSET;
10041
10042 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
10043 VOIDmode, get_insns ());
10044 preserve_value (val);
10045 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
10046 cselib_preserve_cfa_base_value (val, REGNO (reg));
10047 expr = plus_constant (GET_MODE (stack_pointer_rtx),
10048 stack_pointer_rtx, -ofst);
10049 cselib_add_permanent_equiv (val, expr, get_insns ());
10050
10051 if (ofst)
10052 {
10053 val = cselib_lookup_from_insn (stack_pointer_rtx,
10054 GET_MODE (stack_pointer_rtx), 1,
10055 VOIDmode, get_insns ());
10056 preserve_value (val);
10057 expr = plus_constant (GET_MODE (reg), reg, ofst);
10058 cselib_add_permanent_equiv (val, expr, get_insns ());
10059 }
10060 }
10061
10062 /* In order to factor out the adjustments made to the stack pointer or to
10063 the hard frame pointer and thus be able to use DW_OP_fbreg operations
10064 instead of individual location lists, we're going to rewrite MEMs based
10065 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10066 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10067 resp. arg_pointer_rtx. We can do this either when there is no frame
10068 pointer in the function and stack adjustments are consistent for all
10069 basic blocks or when there is a frame pointer and no stack realignment.
10070 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10071 has been eliminated. */
10072 if (!frame_pointer_needed)
10073 {
10074 rtx reg, elim;
10075
10076 if (!vt_stack_adjustments ())
10077 return false;
10078
10079 #ifdef FRAME_POINTER_CFA_OFFSET
10080 reg = frame_pointer_rtx;
10081 #else
10082 reg = arg_pointer_rtx;
10083 #endif
10084 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10085 if (elim != reg)
10086 {
10087 if (GET_CODE (elim) == PLUS)
10088 elim = XEXP (elim, 0);
10089 if (elim == stack_pointer_rtx)
10090 vt_init_cfa_base ();
10091 }
10092 }
10093 else if (!crtl->stack_realign_tried)
10094 {
10095 rtx reg, elim;
10096
10097 #ifdef FRAME_POINTER_CFA_OFFSET
10098 reg = frame_pointer_rtx;
10099 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10100 #else
10101 reg = arg_pointer_rtx;
10102 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10103 #endif
10104 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10105 if (elim != reg)
10106 {
10107 if (GET_CODE (elim) == PLUS)
10108 {
10109 fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1));
10110 elim = XEXP (elim, 0);
10111 }
10112 if (elim != hard_frame_pointer_rtx)
10113 fp_cfa_offset = -1;
10114 }
10115 else
10116 fp_cfa_offset = -1;
10117 }
10118
10119 /* If the stack is realigned and a DRAP register is used, we're going to
10120 rewrite MEMs based on it representing incoming locations of parameters
10121 passed on the stack into MEMs based on the argument pointer. Although
10122 we aren't going to rewrite other MEMs, we still need to initialize the
10123 virtual CFA pointer in order to ensure that the argument pointer will
10124 be seen as a constant throughout the function.
10125
10126 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10127 else if (stack_realign_drap)
10128 {
10129 rtx reg, elim;
10130
10131 #ifdef FRAME_POINTER_CFA_OFFSET
10132 reg = frame_pointer_rtx;
10133 #else
10134 reg = arg_pointer_rtx;
10135 #endif
10136 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10137 if (elim != reg)
10138 {
10139 if (GET_CODE (elim) == PLUS)
10140 elim = XEXP (elim, 0);
10141 if (elim == hard_frame_pointer_rtx)
10142 vt_init_cfa_base ();
10143 }
10144 }
10145
10146 hard_frame_pointer_adjustment = -1;
10147
10148 vt_add_function_parameters ();
10149
10150 FOR_EACH_BB_FN (bb, cfun)
10151 {
10152 rtx_insn *insn;
10153 HOST_WIDE_INT pre, post = 0;
10154 basic_block first_bb, last_bb;
10155
10156 if (MAY_HAVE_DEBUG_BIND_INSNS)
10157 {
10158 cselib_record_sets_hook = add_with_sets;
10159 if (dump_file && (dump_flags & TDF_DETAILS))
10160 fprintf (dump_file, "first value: %i\n",
10161 cselib_get_next_uid ());
10162 }
10163
10164 first_bb = bb;
10165 for (;;)
10166 {
10167 edge e;
10168 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10169 || ! single_pred_p (bb->next_bb))
10170 break;
10171 e = find_edge (bb, bb->next_bb);
10172 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10173 break;
10174 bb = bb->next_bb;
10175 }
10176 last_bb = bb;
10177
10178 /* Add the micro-operations to the vector. */
10179 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10180 {
10181 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10182 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10183
10184 rtx_insn *next;
10185 FOR_BB_INSNS_SAFE (bb, insn, next)
10186 {
10187 if (INSN_P (insn))
10188 {
10189 if (!frame_pointer_needed)
10190 {
10191 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10192 if (pre)
10193 {
10194 micro_operation mo;
10195 mo.type = MO_ADJUST;
10196 mo.u.adjust = pre;
10197 mo.insn = insn;
10198 if (dump_file && (dump_flags & TDF_DETAILS))
10199 log_op_type (PATTERN (insn), bb, insn,
10200 MO_ADJUST, dump_file);
10201 VTI (bb)->mos.safe_push (mo);
10202 }
10203 }
10204
10205 cselib_hook_called = false;
10206 adjust_insn (bb, insn);
10207
10208 if (!frame_pointer_needed && pre)
10209 VTI (bb)->out.stack_adjust += pre;
10210
10211 if (DEBUG_MARKER_INSN_P (insn))
10212 {
10213 reemit_marker_as_note (insn);
10214 continue;
10215 }
10216
10217 if (MAY_HAVE_DEBUG_BIND_INSNS)
10218 {
10219 if (CALL_P (insn))
10220 prepare_call_arguments (bb, insn);
10221 cselib_process_insn (insn);
10222 if (dump_file && (dump_flags & TDF_DETAILS))
10223 {
10224 if (dump_flags & TDF_SLIM)
10225 dump_insn_slim (dump_file, insn);
10226 else
10227 print_rtl_single (dump_file, insn);
10228 dump_cselib_table (dump_file);
10229 }
10230 }
10231 if (!cselib_hook_called)
10232 add_with_sets (insn, 0, 0);
10233 cancel_changes (0);
10234
10235 if (!frame_pointer_needed && post)
10236 {
10237 micro_operation mo;
10238 mo.type = MO_ADJUST;
10239 mo.u.adjust = post;
10240 mo.insn = insn;
10241 if (dump_file && (dump_flags & TDF_DETAILS))
10242 log_op_type (PATTERN (insn), bb, insn,
10243 MO_ADJUST, dump_file);
10244 VTI (bb)->mos.safe_push (mo);
10245 VTI (bb)->out.stack_adjust += post;
10246 }
10247
10248 if (maybe_ne (fp_cfa_offset, -1)
10249 && known_eq (hard_frame_pointer_adjustment, -1)
10250 && fp_setter_insn (insn))
10251 {
10252 vt_init_cfa_base ();
10253 hard_frame_pointer_adjustment = fp_cfa_offset;
10254 /* Disassociate sp from fp now. */
10255 if (MAY_HAVE_DEBUG_BIND_INSNS)
10256 {
10257 cselib_val *v;
10258 cselib_invalidate_rtx (stack_pointer_rtx);
10259 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10260 VOIDmode);
10261 if (v && !cselib_preserved_value_p (v))
10262 {
10263 cselib_set_value_sp_based (v);
10264 preserve_value (v);
10265 }
10266 }
10267 }
10268 }
10269 }
10270 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10271 }
10272
10273 bb = last_bb;
10274
10275 if (MAY_HAVE_DEBUG_BIND_INSNS)
10276 {
10277 cselib_preserve_only_values ();
10278 cselib_reset_table (cselib_get_next_uid ());
10279 cselib_record_sets_hook = NULL;
10280 }
10281 }
10282
10283 hard_frame_pointer_adjustment = -1;
10284 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10285 cfa_base_rtx = NULL_RTX;
10286 return true;
10287 }
10288
10289 /* This is *not* reset after each function. It gives each
10290 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10291 a unique label number. */
10292
10293 static int debug_label_num = 1;
10294
10295 /* Remove from the insn stream a single debug insn used for
10296 variable tracking at assignments. */
10297
10298 static inline void
10299 delete_vta_debug_insn (rtx_insn *insn)
10300 {
10301 if (DEBUG_MARKER_INSN_P (insn))
10302 {
10303 reemit_marker_as_note (insn);
10304 return;
10305 }
10306
10307 tree decl = INSN_VAR_LOCATION_DECL (insn);
10308 if (TREE_CODE (decl) == LABEL_DECL
10309 && DECL_NAME (decl)
10310 && !DECL_RTL_SET_P (decl))
10311 {
10312 PUT_CODE (insn, NOTE);
10313 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10314 NOTE_DELETED_LABEL_NAME (insn)
10315 = IDENTIFIER_POINTER (DECL_NAME (decl));
10316 SET_DECL_RTL (decl, insn);
10317 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10318 }
10319 else
10320 delete_insn (insn);
10321 }
10322
10323 /* Remove from the insn stream all debug insns used for variable
10324 tracking at assignments. USE_CFG should be false if the cfg is no
10325 longer usable. */
10326
10327 void
10328 delete_vta_debug_insns (bool use_cfg)
10329 {
10330 basic_block bb;
10331 rtx_insn *insn, *next;
10332
10333 if (!MAY_HAVE_DEBUG_INSNS)
10334 return;
10335
10336 if (use_cfg)
10337 FOR_EACH_BB_FN (bb, cfun)
10338 {
10339 FOR_BB_INSNS_SAFE (bb, insn, next)
10340 if (DEBUG_INSN_P (insn))
10341 delete_vta_debug_insn (insn);
10342 }
10343 else
10344 for (insn = get_insns (); insn; insn = next)
10345 {
10346 next = NEXT_INSN (insn);
10347 if (DEBUG_INSN_P (insn))
10348 delete_vta_debug_insn (insn);
10349 }
10350 }
10351
10352 /* Run a fast, BB-local only version of var tracking, to take care of
10353 information that we don't do global analysis on, such that not all
10354 information is lost. If SKIPPED holds, we're skipping the global
10355 pass entirely, so we should try to use information it would have
10356 handled as well.. */
10357
10358 static void
10359 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10360 {
10361 /* ??? Just skip it all for now. */
10362 delete_vta_debug_insns (true);
10363 }
10364
10365 /* Free the data structures needed for variable tracking. */
10366
10367 static void
10368 vt_finalize (void)
10369 {
10370 basic_block bb;
10371
10372 FOR_EACH_BB_FN (bb, cfun)
10373 {
10374 VTI (bb)->mos.release ();
10375 }
10376
10377 FOR_ALL_BB_FN (bb, cfun)
10378 {
10379 dataflow_set_destroy (&VTI (bb)->in);
10380 dataflow_set_destroy (&VTI (bb)->out);
10381 if (VTI (bb)->permp)
10382 {
10383 dataflow_set_destroy (VTI (bb)->permp);
10384 XDELETE (VTI (bb)->permp);
10385 }
10386 }
10387 free_aux_for_blocks ();
10388 delete empty_shared_hash->htab;
10389 empty_shared_hash->htab = NULL;
10390 delete changed_variables;
10391 changed_variables = NULL;
10392 attrs_pool.release ();
10393 var_pool.release ();
10394 location_chain_pool.release ();
10395 shared_hash_pool.release ();
10396
10397 if (MAY_HAVE_DEBUG_BIND_INSNS)
10398 {
10399 if (global_get_addr_cache)
10400 delete global_get_addr_cache;
10401 global_get_addr_cache = NULL;
10402 loc_exp_dep_pool.release ();
10403 valvar_pool.release ();
10404 preserved_values.release ();
10405 cselib_finish ();
10406 BITMAP_FREE (scratch_regs);
10407 scratch_regs = NULL;
10408 }
10409
10410 #ifdef HAVE_window_save
10411 vec_free (windowed_parm_regs);
10412 #endif
10413
10414 if (vui_vec)
10415 XDELETEVEC (vui_vec);
10416 vui_vec = NULL;
10417 vui_allocated = 0;
10418 }
10419
10420 /* The entry point to variable tracking pass. */
10421
10422 static inline unsigned int
10423 variable_tracking_main_1 (void)
10424 {
10425 bool success;
10426
10427 /* We won't be called as a separate pass if flag_var_tracking is not
10428 set, but final may call us to turn debug markers into notes. */
10429 if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS)
10430 || flag_var_tracking_assignments < 0
10431 /* Var-tracking right now assumes the IR doesn't contain
10432 any pseudos at this point. */
10433 || targetm.no_register_allocation)
10434 {
10435 delete_vta_debug_insns (true);
10436 return 0;
10437 }
10438
10439 if (!flag_var_tracking)
10440 return 0;
10441
10442 if (n_basic_blocks_for_fn (cfun) > 500
10443 && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10444 {
10445 vt_debug_insns_local (true);
10446 return 0;
10447 }
10448
10449 mark_dfs_back_edges ();
10450 if (!vt_initialize ())
10451 {
10452 vt_finalize ();
10453 vt_debug_insns_local (true);
10454 return 0;
10455 }
10456
10457 success = vt_find_locations ();
10458
10459 if (!success && flag_var_tracking_assignments > 0)
10460 {
10461 vt_finalize ();
10462
10463 delete_vta_debug_insns (true);
10464
10465 /* This is later restored by our caller. */
10466 flag_var_tracking_assignments = 0;
10467
10468 success = vt_initialize ();
10469 gcc_assert (success);
10470
10471 success = vt_find_locations ();
10472 }
10473
10474 if (!success)
10475 {
10476 vt_finalize ();
10477 vt_debug_insns_local (false);
10478 return 0;
10479 }
10480
10481 if (dump_file && (dump_flags & TDF_DETAILS))
10482 {
10483 dump_dataflow_sets ();
10484 dump_reg_info (dump_file);
10485 dump_flow_info (dump_file, dump_flags);
10486 }
10487
10488 timevar_push (TV_VAR_TRACKING_EMIT);
10489 vt_emit_notes ();
10490 timevar_pop (TV_VAR_TRACKING_EMIT);
10491
10492 vt_finalize ();
10493 vt_debug_insns_local (false);
10494 return 0;
10495 }
10496
10497 unsigned int
10498 variable_tracking_main (void)
10499 {
10500 unsigned int ret;
10501 int save = flag_var_tracking_assignments;
10502
10503 ret = variable_tracking_main_1 ();
10504
10505 flag_var_tracking_assignments = save;
10506
10507 return ret;
10508 }
10509 \f
10510 namespace {
10511
10512 const pass_data pass_data_variable_tracking =
10513 {
10514 RTL_PASS, /* type */
10515 "vartrack", /* name */
10516 OPTGROUP_NONE, /* optinfo_flags */
10517 TV_VAR_TRACKING, /* tv_id */
10518 0, /* properties_required */
10519 0, /* properties_provided */
10520 0, /* properties_destroyed */
10521 0, /* todo_flags_start */
10522 0, /* todo_flags_finish */
10523 };
10524
10525 class pass_variable_tracking : public rtl_opt_pass
10526 {
10527 public:
10528 pass_variable_tracking (gcc::context *ctxt)
10529 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10530 {}
10531
10532 /* opt_pass methods: */
10533 virtual bool gate (function *)
10534 {
10535 return (flag_var_tracking && !targetm.delay_vartrack);
10536 }
10537
10538 virtual unsigned int execute (function *)
10539 {
10540 return variable_tracking_main ();
10541 }
10542
10543 }; // class pass_variable_tracking
10544
10545 } // anon namespace
10546
10547 rtl_opt_pass *
10548 make_pass_variable_tracking (gcc::context *ctxt)
10549 {
10550 return new pass_variable_tracking (ctxt);
10551 }