]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cselib.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "df.h"
27 #include "alias.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "emit-rtl.h"
34 #include "diagnostic-core.h"
35 #include "dumpfile.h"
36 #include "alloc-pool.h"
37 #include "cselib.h"
38 #include "valtrack.h"
39 #include "params.h"
40 #include "alloc-pool.h"
41 #include "target.h"
42
43 /* A list of cselib_val structures. */
44 struct elt_list
45 {
46 struct elt_list *next;
47 cselib_val *elt;
48
49 /* Pool allocation new operator. */
50 inline void *operator new (size_t)
51 {
52 return pool.allocate ();
53 }
54
55 /* Delete operator utilizing pool allocation. */
56 inline void operator delete (void *ptr)
57 {
58 pool.remove ((elt_list *) ptr);
59 }
60
61 /* Memory allocation pool. */
62 static pool_allocator<elt_list> pool;
63 };
64
65 static bool cselib_record_memory;
66 static bool cselib_preserve_constants;
67 static bool cselib_any_perm_equivs;
68 static inline void promote_debug_loc (struct elt_loc_list *l);
69 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
70 static void new_elt_loc_list (cselib_val *, rtx);
71 static void unchain_one_value (cselib_val *);
72 static void unchain_one_elt_list (struct elt_list **);
73 static void unchain_one_elt_loc_list (struct elt_loc_list **);
74 static void remove_useless_values (void);
75 static int rtx_equal_for_cselib_1 (rtx, rtx, machine_mode);
76 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
77 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
78 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
79 static cselib_val *cselib_lookup_mem (rtx, int);
80 static void cselib_invalidate_regno (unsigned int, machine_mode);
81 static void cselib_invalidate_mem (rtx);
82 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
83 static void cselib_record_sets (rtx_insn *);
84
85 struct expand_value_data
86 {
87 bitmap regs_active;
88 cselib_expand_callback callback;
89 void *callback_arg;
90 bool dummy;
91 };
92
93 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
94
95 /* There are three ways in which cselib can look up an rtx:
96 - for a REG, the reg_values table (which is indexed by regno) is used
97 - for a MEM, we recursively look up its address and then follow the
98 addr_list of that value
99 - for everything else, we compute a hash value and go through the hash
100 table. Since different rtx's can still have the same hash value,
101 this involves walking the table entries for a given value and comparing
102 the locations of the entries with the rtx we are looking up. */
103
104 struct cselib_hasher : nofree_ptr_hash <cselib_val>
105 {
106 struct key {
107 /* The rtx value and its mode (needed separately for constant
108 integers). */
109 machine_mode mode;
110 rtx x;
111 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
112 machine_mode memmode;
113 };
114 typedef key *compare_type;
115 static inline hashval_t hash (const cselib_val *);
116 static inline bool equal (const cselib_val *, const key *);
117 };
118
119 /* The hash function for our hash table. The value is always computed with
120 cselib_hash_rtx when adding an element; this function just extracts the
121 hash value from a cselib_val structure. */
122
123 inline hashval_t
124 cselib_hasher::hash (const cselib_val *v)
125 {
126 return v->hash;
127 }
128
129 /* The equality test for our hash table. The first argument V is a table
130 element (i.e. a cselib_val), while the second arg X is an rtx. We know
131 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
132 CONST of an appropriate mode. */
133
134 inline bool
135 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
136 {
137 struct elt_loc_list *l;
138 rtx x = x_arg->x;
139 machine_mode mode = x_arg->mode;
140 machine_mode memmode = x_arg->memmode;
141
142 if (mode != GET_MODE (v->val_rtx))
143 return false;
144
145 if (GET_CODE (x) == VALUE)
146 return x == v->val_rtx;
147
148 /* We don't guarantee that distinct rtx's have different hash values,
149 so we need to do a comparison. */
150 for (l = v->locs; l; l = l->next)
151 if (rtx_equal_for_cselib_1 (l->loc, x, memmode))
152 {
153 promote_debug_loc (l);
154 return true;
155 }
156
157 return false;
158 }
159
160 /* A table that enables us to look up elts by their value. */
161 static hash_table<cselib_hasher> *cselib_hash_table;
162
163 /* A table to hold preserved values. */
164 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
165
166 /* This is a global so we don't have to pass this through every function.
167 It is used in new_elt_loc_list to set SETTING_INSN. */
168 static rtx_insn *cselib_current_insn;
169
170 /* The unique id that the next create value will take. */
171 static unsigned int next_uid;
172
173 /* The number of registers we had when the varrays were last resized. */
174 static unsigned int cselib_nregs;
175
176 /* Count values without known locations, or with only locations that
177 wouldn't have been known except for debug insns. Whenever this
178 grows too big, we remove these useless values from the table.
179
180 Counting values with only debug values is a bit tricky. We don't
181 want to increment n_useless_values when we create a value for a
182 debug insn, for this would get n_useless_values out of sync, but we
183 want increment it if all locs in the list that were ever referenced
184 in nondebug insns are removed from the list.
185
186 In the general case, once we do that, we'd have to stop accepting
187 nondebug expressions in the loc list, to avoid having two values
188 equivalent that, without debug insns, would have been made into
189 separate values. However, because debug insns never introduce
190 equivalences themselves (no assignments), the only means for
191 growing loc lists is through nondebug assignments. If the locs
192 also happen to be referenced in debug insns, it will work just fine.
193
194 A consequence of this is that there's at most one debug-only loc in
195 each loc list. If we keep it in the first entry, testing whether
196 we have a debug-only loc list takes O(1).
197
198 Furthermore, since any additional entry in a loc list containing a
199 debug loc would have to come from an assignment (nondebug) that
200 references both the initial debug loc and the newly-equivalent loc,
201 the initial debug loc would be promoted to a nondebug loc, and the
202 loc list would not contain debug locs any more.
203
204 So the only case we have to be careful with in order to keep
205 n_useless_values in sync between debug and nondebug compilations is
206 to avoid incrementing n_useless_values when removing the single loc
207 from a value that turns out to not appear outside debug values. We
208 increment n_useless_debug_values instead, and leave such values
209 alone until, for other reasons, we garbage-collect useless
210 values. */
211 static int n_useless_values;
212 static int n_useless_debug_values;
213
214 /* Count values whose locs have been taken exclusively from debug
215 insns for the entire life of the value. */
216 static int n_debug_values;
217
218 /* Number of useless values before we remove them from the hash table. */
219 #define MAX_USELESS_VALUES 32
220
221 /* This table maps from register number to values. It does not
222 contain pointers to cselib_val structures, but rather elt_lists.
223 The purpose is to be able to refer to the same register in
224 different modes. The first element of the list defines the mode in
225 which the register was set; if the mode is unknown or the value is
226 no longer valid in that mode, ELT will be NULL for the first
227 element. */
228 static struct elt_list **reg_values;
229 static unsigned int reg_values_size;
230 #define REG_VALUES(i) reg_values[i]
231
232 /* The largest number of hard regs used by any entry added to the
233 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
234 static unsigned int max_value_regs;
235
236 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
237 in cselib_clear_table() for fast emptying. */
238 static unsigned int *used_regs;
239 static unsigned int n_used_regs;
240
241 /* We pass this to cselib_invalidate_mem to invalidate all of
242 memory for a non-const call instruction. */
243 static GTY(()) rtx callmem;
244
245 /* Set by discard_useless_locs if it deleted the last location of any
246 value. */
247 static int values_became_useless;
248
249 /* Used as stop element of the containing_mem list so we can check
250 presence in the list by checking the next pointer. */
251 static cselib_val dummy_val;
252
253 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
254 that is constant through the whole function and should never be
255 eliminated. */
256 static cselib_val *cfa_base_preserved_val;
257 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
258
259 /* Used to list all values that contain memory reference.
260 May or may not contain the useless values - the list is compacted
261 each time memory is invalidated. */
262 static cselib_val *first_containing_mem = &dummy_val;
263
264 pool_allocator<elt_list> elt_list::pool ("elt_list", 10);
265 pool_allocator<elt_loc_list> elt_loc_list::pool ("elt_loc_list", 10);
266 pool_allocator<cselib_val> cselib_val::pool ("cselib_val_list", 10);
267
268 static pool_allocator<rtx_def> value_pool ("value", 100, RTX_CODE_SIZE (VALUE),
269 true);
270
271 /* If nonnull, cselib will call this function before freeing useless
272 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
273 void (*cselib_discard_hook) (cselib_val *);
274
275 /* If nonnull, cselib will call this function before recording sets or
276 even clobbering outputs of INSN. All the recorded sets will be
277 represented in the array sets[n_sets]. new_val_min can be used to
278 tell whether values present in sets are introduced by this
279 instruction. */
280 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
281 int n_sets);
282
283 #define PRESERVED_VALUE_P(RTX) \
284 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
285
286 #define SP_BASED_VALUE_P(RTX) \
287 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
288
289 \f
290
291 /* Allocate a struct elt_list and fill in its two elements with the
292 arguments. */
293
294 static inline struct elt_list *
295 new_elt_list (struct elt_list *next, cselib_val *elt)
296 {
297 elt_list *el = new elt_list ();
298 el->next = next;
299 el->elt = elt;
300 return el;
301 }
302
303 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
304 list. */
305
306 static inline void
307 new_elt_loc_list (cselib_val *val, rtx loc)
308 {
309 struct elt_loc_list *el, *next = val->locs;
310
311 gcc_checking_assert (!next || !next->setting_insn
312 || !DEBUG_INSN_P (next->setting_insn)
313 || cselib_current_insn == next->setting_insn);
314
315 /* If we're creating the first loc in a debug insn context, we've
316 just created a debug value. Count it. */
317 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
318 n_debug_values++;
319
320 val = canonical_cselib_val (val);
321 next = val->locs;
322
323 if (GET_CODE (loc) == VALUE)
324 {
325 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
326
327 gcc_checking_assert (PRESERVED_VALUE_P (loc)
328 == PRESERVED_VALUE_P (val->val_rtx));
329
330 if (val->val_rtx == loc)
331 return;
332 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
333 {
334 /* Reverse the insertion. */
335 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
336 return;
337 }
338
339 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
340
341 if (CSELIB_VAL_PTR (loc)->locs)
342 {
343 /* Bring all locs from LOC to VAL. */
344 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
345 {
346 /* Adjust values that have LOC as canonical so that VAL
347 becomes their canonical. */
348 if (el->loc && GET_CODE (el->loc) == VALUE)
349 {
350 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
351 == loc);
352 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
353 }
354 }
355 el->next = val->locs;
356 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
357 }
358
359 if (CSELIB_VAL_PTR (loc)->addr_list)
360 {
361 /* Bring in addr_list into canonical node. */
362 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
363 while (last->next)
364 last = last->next;
365 last->next = val->addr_list;
366 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
367 CSELIB_VAL_PTR (loc)->addr_list = NULL;
368 }
369
370 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
371 && val->next_containing_mem == NULL)
372 {
373 /* Add VAL to the containing_mem list after LOC. LOC will
374 be removed when we notice it doesn't contain any
375 MEMs. */
376 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
377 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
378 }
379
380 /* Chain LOC back to VAL. */
381 el = new elt_loc_list;
382 el->loc = val->val_rtx;
383 el->setting_insn = cselib_current_insn;
384 el->next = NULL;
385 CSELIB_VAL_PTR (loc)->locs = el;
386 }
387
388 el = new elt_loc_list;
389 el->loc = loc;
390 el->setting_insn = cselib_current_insn;
391 el->next = next;
392 val->locs = el;
393 }
394
395 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
396 originating from a debug insn, maintaining the debug values
397 count. */
398
399 static inline void
400 promote_debug_loc (struct elt_loc_list *l)
401 {
402 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
403 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
404 {
405 n_debug_values--;
406 l->setting_insn = cselib_current_insn;
407 if (cselib_preserve_constants && l->next)
408 {
409 gcc_assert (l->next->setting_insn
410 && DEBUG_INSN_P (l->next->setting_insn)
411 && !l->next->next);
412 l->next->setting_insn = cselib_current_insn;
413 }
414 else
415 gcc_assert (!l->next);
416 }
417 }
418
419 /* The elt_list at *PL is no longer needed. Unchain it and free its
420 storage. */
421
422 static inline void
423 unchain_one_elt_list (struct elt_list **pl)
424 {
425 struct elt_list *l = *pl;
426
427 *pl = l->next;
428 delete l;
429 }
430
431 /* Likewise for elt_loc_lists. */
432
433 static void
434 unchain_one_elt_loc_list (struct elt_loc_list **pl)
435 {
436 struct elt_loc_list *l = *pl;
437
438 *pl = l->next;
439 delete l;
440 }
441
442 /* Likewise for cselib_vals. This also frees the addr_list associated with
443 V. */
444
445 static void
446 unchain_one_value (cselib_val *v)
447 {
448 while (v->addr_list)
449 unchain_one_elt_list (&v->addr_list);
450
451 delete v;
452 }
453
454 /* Remove all entries from the hash table. Also used during
455 initialization. */
456
457 void
458 cselib_clear_table (void)
459 {
460 cselib_reset_table (1);
461 }
462
463 /* Return TRUE if V is a constant, a function invariant or a VALUE
464 equivalence; FALSE otherwise. */
465
466 static bool
467 invariant_or_equiv_p (cselib_val *v)
468 {
469 struct elt_loc_list *l;
470
471 if (v == cfa_base_preserved_val)
472 return true;
473
474 /* Keep VALUE equivalences around. */
475 for (l = v->locs; l; l = l->next)
476 if (GET_CODE (l->loc) == VALUE)
477 return true;
478
479 if (v->locs != NULL
480 && v->locs->next == NULL)
481 {
482 if (CONSTANT_P (v->locs->loc)
483 && (GET_CODE (v->locs->loc) != CONST
484 || !references_value_p (v->locs->loc, 0)))
485 return true;
486 /* Although a debug expr may be bound to different expressions,
487 we can preserve it as if it was constant, to get unification
488 and proper merging within var-tracking. */
489 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
490 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
491 || GET_CODE (v->locs->loc) == ENTRY_VALUE
492 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
493 return true;
494
495 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
496 if (GET_CODE (v->locs->loc) == PLUS
497 && CONST_INT_P (XEXP (v->locs->loc, 1))
498 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
499 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
500 return true;
501 }
502
503 return false;
504 }
505
506 /* Remove from hash table all VALUEs except constants, function
507 invariants and VALUE equivalences. */
508
509 int
510 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
511 {
512 cselib_val *v = *x;
513
514 if (invariant_or_equiv_p (v))
515 {
516 cselib_hasher::key lookup = {
517 GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
518 };
519 cselib_val **slot
520 = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
521 v->hash, INSERT);
522 gcc_assert (!*slot);
523 *slot = v;
524 }
525
526 cselib_hash_table->clear_slot (x);
527
528 return 1;
529 }
530
531 /* Remove all entries from the hash table, arranging for the next
532 value to be numbered NUM. */
533
534 void
535 cselib_reset_table (unsigned int num)
536 {
537 unsigned int i;
538
539 max_value_regs = 0;
540
541 if (cfa_base_preserved_val)
542 {
543 unsigned int regno = cfa_base_preserved_regno;
544 unsigned int new_used_regs = 0;
545 for (i = 0; i < n_used_regs; i++)
546 if (used_regs[i] == regno)
547 {
548 new_used_regs = 1;
549 continue;
550 }
551 else
552 REG_VALUES (used_regs[i]) = 0;
553 gcc_assert (new_used_regs == 1);
554 n_used_regs = new_used_regs;
555 used_regs[0] = regno;
556 max_value_regs
557 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
558 }
559 else
560 {
561 for (i = 0; i < n_used_regs; i++)
562 REG_VALUES (used_regs[i]) = 0;
563 n_used_regs = 0;
564 }
565
566 if (cselib_preserve_constants)
567 cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
568 (NULL);
569 else
570 {
571 cselib_hash_table->empty ();
572 gcc_checking_assert (!cselib_any_perm_equivs);
573 }
574
575 n_useless_values = 0;
576 n_useless_debug_values = 0;
577 n_debug_values = 0;
578
579 next_uid = num;
580
581 first_containing_mem = &dummy_val;
582 }
583
584 /* Return the number of the next value that will be generated. */
585
586 unsigned int
587 cselib_get_next_uid (void)
588 {
589 return next_uid;
590 }
591
592 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
593 INSERTing if requested. When X is part of the address of a MEM,
594 MEMMODE should specify the mode of the MEM. */
595
596 static cselib_val **
597 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
598 enum insert_option insert, machine_mode memmode)
599 {
600 cselib_val **slot = NULL;
601 cselib_hasher::key lookup = { mode, x, memmode };
602 if (cselib_preserve_constants)
603 slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
604 NO_INSERT);
605 if (!slot)
606 slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
607 return slot;
608 }
609
610 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
611 only return true for values which point to a cselib_val whose value
612 element has been set to zero, which implies the cselib_val will be
613 removed. */
614
615 int
616 references_value_p (const_rtx x, int only_useless)
617 {
618 const enum rtx_code code = GET_CODE (x);
619 const char *fmt = GET_RTX_FORMAT (code);
620 int i, j;
621
622 if (GET_CODE (x) == VALUE
623 && (! only_useless ||
624 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
625 return 1;
626
627 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
628 {
629 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
630 return 1;
631 else if (fmt[i] == 'E')
632 for (j = 0; j < XVECLEN (x, i); j++)
633 if (references_value_p (XVECEXP (x, i, j), only_useless))
634 return 1;
635 }
636
637 return 0;
638 }
639
640 /* For all locations found in X, delete locations that reference useless
641 values (i.e. values without any location). Called through
642 htab_traverse. */
643
644 int
645 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
646 {
647 cselib_val *v = *x;
648 struct elt_loc_list **p = &v->locs;
649 bool had_locs = v->locs != NULL;
650 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
651
652 while (*p)
653 {
654 if (references_value_p ((*p)->loc, 1))
655 unchain_one_elt_loc_list (p);
656 else
657 p = &(*p)->next;
658 }
659
660 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
661 {
662 if (setting_insn && DEBUG_INSN_P (setting_insn))
663 n_useless_debug_values++;
664 else
665 n_useless_values++;
666 values_became_useless = 1;
667 }
668 return 1;
669 }
670
671 /* If X is a value with no locations, remove it from the hashtable. */
672
673 int
674 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
675 {
676 cselib_val *v = *x;
677
678 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
679 {
680 if (cselib_discard_hook)
681 cselib_discard_hook (v);
682
683 CSELIB_VAL_PTR (v->val_rtx) = NULL;
684 cselib_hash_table->clear_slot (x);
685 unchain_one_value (v);
686 n_useless_values--;
687 }
688
689 return 1;
690 }
691
692 /* Clean out useless values (i.e. those which no longer have locations
693 associated with them) from the hash table. */
694
695 static void
696 remove_useless_values (void)
697 {
698 cselib_val **p, *v;
699
700 /* First pass: eliminate locations that reference the value. That in
701 turn can make more values useless. */
702 do
703 {
704 values_became_useless = 0;
705 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
706 }
707 while (values_became_useless);
708
709 /* Second pass: actually remove the values. */
710
711 p = &first_containing_mem;
712 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
713 if (v->locs && v == canonical_cselib_val (v))
714 {
715 *p = v;
716 p = &(*p)->next_containing_mem;
717 }
718 *p = &dummy_val;
719
720 n_useless_values += n_useless_debug_values;
721 n_debug_values -= n_useless_debug_values;
722 n_useless_debug_values = 0;
723
724 cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
725
726 gcc_assert (!n_useless_values);
727 }
728
729 /* Arrange for a value to not be removed from the hash table even if
730 it becomes useless. */
731
732 void
733 cselib_preserve_value (cselib_val *v)
734 {
735 PRESERVED_VALUE_P (v->val_rtx) = 1;
736 }
737
738 /* Test whether a value is preserved. */
739
740 bool
741 cselib_preserved_value_p (cselib_val *v)
742 {
743 return PRESERVED_VALUE_P (v->val_rtx);
744 }
745
746 /* Arrange for a REG value to be assumed constant through the whole function,
747 never invalidated and preserved across cselib_reset_table calls. */
748
749 void
750 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
751 {
752 if (cselib_preserve_constants
753 && v->locs
754 && REG_P (v->locs->loc))
755 {
756 cfa_base_preserved_val = v;
757 cfa_base_preserved_regno = regno;
758 }
759 }
760
761 /* Clean all non-constant expressions in the hash table, but retain
762 their values. */
763
764 void
765 cselib_preserve_only_values (void)
766 {
767 int i;
768
769 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
770 cselib_invalidate_regno (i, reg_raw_mode[i]);
771
772 cselib_invalidate_mem (callmem);
773
774 remove_useless_values ();
775
776 gcc_assert (first_containing_mem == &dummy_val);
777 }
778
779 /* Arrange for a value to be marked as based on stack pointer
780 for find_base_term purposes. */
781
782 void
783 cselib_set_value_sp_based (cselib_val *v)
784 {
785 SP_BASED_VALUE_P (v->val_rtx) = 1;
786 }
787
788 /* Test whether a value is based on stack pointer for
789 find_base_term purposes. */
790
791 bool
792 cselib_sp_based_value_p (cselib_val *v)
793 {
794 return SP_BASED_VALUE_P (v->val_rtx);
795 }
796
797 /* Return the mode in which a register was last set. If X is not a
798 register, return its mode. If the mode in which the register was
799 set is not known, or the value was already clobbered, return
800 VOIDmode. */
801
802 machine_mode
803 cselib_reg_set_mode (const_rtx x)
804 {
805 if (!REG_P (x))
806 return GET_MODE (x);
807
808 if (REG_VALUES (REGNO (x)) == NULL
809 || REG_VALUES (REGNO (x))->elt == NULL)
810 return VOIDmode;
811
812 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
813 }
814
815 /* Return nonzero if we can prove that X and Y contain the same value, taking
816 our gathered information into account. */
817
818 int
819 rtx_equal_for_cselib_p (rtx x, rtx y)
820 {
821 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
822 }
823
824 /* If x is a PLUS or an autoinc operation, expand the operation,
825 storing the offset, if any, in *OFF. */
826
827 static rtx
828 autoinc_split (rtx x, rtx *off, machine_mode memmode)
829 {
830 switch (GET_CODE (x))
831 {
832 case PLUS:
833 *off = XEXP (x, 1);
834 return XEXP (x, 0);
835
836 case PRE_DEC:
837 if (memmode == VOIDmode)
838 return x;
839
840 *off = GEN_INT (-GET_MODE_SIZE (memmode));
841 return XEXP (x, 0);
842 break;
843
844 case PRE_INC:
845 if (memmode == VOIDmode)
846 return x;
847
848 *off = GEN_INT (GET_MODE_SIZE (memmode));
849 return XEXP (x, 0);
850
851 case PRE_MODIFY:
852 return XEXP (x, 1);
853
854 case POST_DEC:
855 case POST_INC:
856 case POST_MODIFY:
857 return XEXP (x, 0);
858
859 default:
860 return x;
861 }
862 }
863
864 /* Return nonzero if we can prove that X and Y contain the same value,
865 taking our gathered information into account. MEMMODE holds the
866 mode of the enclosing MEM, if any, as required to deal with autoinc
867 addressing modes. If X and Y are not (known to be) part of
868 addresses, MEMMODE should be VOIDmode. */
869
870 static int
871 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode)
872 {
873 enum rtx_code code;
874 const char *fmt;
875 int i;
876
877 if (REG_P (x) || MEM_P (x))
878 {
879 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
880
881 if (e)
882 x = e->val_rtx;
883 }
884
885 if (REG_P (y) || MEM_P (y))
886 {
887 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
888
889 if (e)
890 y = e->val_rtx;
891 }
892
893 if (x == y)
894 return 1;
895
896 if (GET_CODE (x) == VALUE)
897 {
898 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
899 struct elt_loc_list *l;
900
901 if (GET_CODE (y) == VALUE)
902 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
903
904 for (l = e->locs; l; l = l->next)
905 {
906 rtx t = l->loc;
907
908 /* Avoid infinite recursion. We know we have the canonical
909 value, so we can just skip any values in the equivalence
910 list. */
911 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
912 continue;
913 else if (rtx_equal_for_cselib_1 (t, y, memmode))
914 return 1;
915 }
916
917 return 0;
918 }
919 else if (GET_CODE (y) == VALUE)
920 {
921 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
922 struct elt_loc_list *l;
923
924 for (l = e->locs; l; l = l->next)
925 {
926 rtx t = l->loc;
927
928 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
929 continue;
930 else if (rtx_equal_for_cselib_1 (x, t, memmode))
931 return 1;
932 }
933
934 return 0;
935 }
936
937 if (GET_MODE (x) != GET_MODE (y))
938 return 0;
939
940 if (GET_CODE (x) != GET_CODE (y))
941 {
942 rtx xorig = x, yorig = y;
943 rtx xoff = NULL, yoff = NULL;
944
945 x = autoinc_split (x, &xoff, memmode);
946 y = autoinc_split (y, &yoff, memmode);
947
948 if (!xoff != !yoff)
949 return 0;
950
951 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
952 return 0;
953
954 /* Don't recurse if nothing changed. */
955 if (x != xorig || y != yorig)
956 return rtx_equal_for_cselib_1 (x, y, memmode);
957
958 return 0;
959 }
960
961 /* These won't be handled correctly by the code below. */
962 switch (GET_CODE (x))
963 {
964 CASE_CONST_UNIQUE:
965 case DEBUG_EXPR:
966 return 0;
967
968 case DEBUG_IMPLICIT_PTR:
969 return DEBUG_IMPLICIT_PTR_DECL (x)
970 == DEBUG_IMPLICIT_PTR_DECL (y);
971
972 case DEBUG_PARAMETER_REF:
973 return DEBUG_PARAMETER_REF_DECL (x)
974 == DEBUG_PARAMETER_REF_DECL (y);
975
976 case ENTRY_VALUE:
977 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
978 use rtx_equal_for_cselib_1 to compare the operands. */
979 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
980
981 case LABEL_REF:
982 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
983
984 case REG:
985 return REGNO (x) == REGNO (y);
986
987 case MEM:
988 /* We have to compare any autoinc operations in the addresses
989 using this MEM's mode. */
990 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
991
992 default:
993 break;
994 }
995
996 code = GET_CODE (x);
997 fmt = GET_RTX_FORMAT (code);
998
999 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1000 {
1001 int j;
1002
1003 switch (fmt[i])
1004 {
1005 case 'w':
1006 if (XWINT (x, i) != XWINT (y, i))
1007 return 0;
1008 break;
1009
1010 case 'n':
1011 case 'i':
1012 if (XINT (x, i) != XINT (y, i))
1013 return 0;
1014 break;
1015
1016 case 'V':
1017 case 'E':
1018 /* Two vectors must have the same length. */
1019 if (XVECLEN (x, i) != XVECLEN (y, i))
1020 return 0;
1021
1022 /* And the corresponding elements must match. */
1023 for (j = 0; j < XVECLEN (x, i); j++)
1024 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1025 XVECEXP (y, i, j), memmode))
1026 return 0;
1027 break;
1028
1029 case 'e':
1030 if (i == 1
1031 && targetm.commutative_p (x, UNKNOWN)
1032 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
1033 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
1034 return 1;
1035 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
1036 return 0;
1037 break;
1038
1039 case 'S':
1040 case 's':
1041 if (strcmp (XSTR (x, i), XSTR (y, i)))
1042 return 0;
1043 break;
1044
1045 case 'u':
1046 /* These are just backpointers, so they don't matter. */
1047 break;
1048
1049 case '0':
1050 case 't':
1051 break;
1052
1053 /* It is believed that rtx's at this level will never
1054 contain anything but integers and other rtx's,
1055 except for within LABEL_REFs and SYMBOL_REFs. */
1056 default:
1057 gcc_unreachable ();
1058 }
1059 }
1060 return 1;
1061 }
1062
1063 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1064 For registers and memory locations, we look up their cselib_val structure
1065 and return its VALUE element.
1066 Possible reasons for return 0 are: the object is volatile, or we couldn't
1067 find a register or memory location in the table and CREATE is zero. If
1068 CREATE is nonzero, table elts are created for regs and mem.
1069 N.B. this hash function returns the same hash value for RTXes that
1070 differ only in the order of operands, thus it is suitable for comparisons
1071 that take commutativity into account.
1072 If we wanted to also support associative rules, we'd have to use a different
1073 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1074 MEMMODE indicates the mode of an enclosing MEM, and it's only
1075 used to compute autoinc values.
1076 We used to have a MODE argument for hashing for CONST_INTs, but that
1077 didn't make sense, since it caused spurious hash differences between
1078 (set (reg:SI 1) (const_int))
1079 (plus:SI (reg:SI 2) (reg:SI 1))
1080 and
1081 (plus:SI (reg:SI 2) (const_int))
1082 If the mode is important in any context, it must be checked specifically
1083 in a comparison anyway, since relying on hash differences is unsafe. */
1084
1085 static unsigned int
1086 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1087 {
1088 cselib_val *e;
1089 int i, j;
1090 enum rtx_code code;
1091 const char *fmt;
1092 unsigned int hash = 0;
1093
1094 code = GET_CODE (x);
1095 hash += (unsigned) code + (unsigned) GET_MODE (x);
1096
1097 switch (code)
1098 {
1099 case VALUE:
1100 e = CSELIB_VAL_PTR (x);
1101 return e->hash;
1102
1103 case MEM:
1104 case REG:
1105 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1106 if (! e)
1107 return 0;
1108
1109 return e->hash;
1110
1111 case DEBUG_EXPR:
1112 hash += ((unsigned) DEBUG_EXPR << 7)
1113 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1114 return hash ? hash : (unsigned int) DEBUG_EXPR;
1115
1116 case DEBUG_IMPLICIT_PTR:
1117 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1118 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1119 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1120
1121 case DEBUG_PARAMETER_REF:
1122 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1123 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1124 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1125
1126 case ENTRY_VALUE:
1127 /* ENTRY_VALUEs are function invariant, thus try to avoid
1128 recursing on argument if ENTRY_VALUE is one of the
1129 forms emitted by expand_debug_expr, otherwise
1130 ENTRY_VALUE hash would depend on the current value
1131 in some register or memory. */
1132 if (REG_P (ENTRY_VALUE_EXP (x)))
1133 hash += (unsigned int) REG
1134 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1135 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1136 else if (MEM_P (ENTRY_VALUE_EXP (x))
1137 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1138 hash += (unsigned int) MEM
1139 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1140 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1141 else
1142 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1143 return hash ? hash : (unsigned int) ENTRY_VALUE;
1144
1145 case CONST_INT:
1146 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1147 return hash ? hash : (unsigned int) CONST_INT;
1148
1149 case CONST_WIDE_INT:
1150 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1151 hash += CONST_WIDE_INT_ELT (x, i);
1152 return hash;
1153
1154 case CONST_DOUBLE:
1155 /* This is like the general case, except that it only counts
1156 the integers representing the constant. */
1157 hash += (unsigned) code + (unsigned) GET_MODE (x);
1158 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1159 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1160 + (unsigned) CONST_DOUBLE_HIGH (x));
1161 else
1162 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1163 return hash ? hash : (unsigned int) CONST_DOUBLE;
1164
1165 case CONST_FIXED:
1166 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1167 hash += fixed_hash (CONST_FIXED_VALUE (x));
1168 return hash ? hash : (unsigned int) CONST_FIXED;
1169
1170 case CONST_VECTOR:
1171 {
1172 int units;
1173 rtx elt;
1174
1175 units = CONST_VECTOR_NUNITS (x);
1176
1177 for (i = 0; i < units; ++i)
1178 {
1179 elt = CONST_VECTOR_ELT (x, i);
1180 hash += cselib_hash_rtx (elt, 0, memmode);
1181 }
1182
1183 return hash;
1184 }
1185
1186 /* Assume there is only one rtx object for any given label. */
1187 case LABEL_REF:
1188 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1189 differences and differences between each stage's debugging dumps. */
1190 hash += (((unsigned int) LABEL_REF << 7)
1191 + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x)));
1192 return hash ? hash : (unsigned int) LABEL_REF;
1193
1194 case SYMBOL_REF:
1195 {
1196 /* Don't hash on the symbol's address to avoid bootstrap differences.
1197 Different hash values may cause expressions to be recorded in
1198 different orders and thus different registers to be used in the
1199 final assembler. This also avoids differences in the dump files
1200 between various stages. */
1201 unsigned int h = 0;
1202 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1203
1204 while (*p)
1205 h += (h << 7) + *p++; /* ??? revisit */
1206
1207 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1208 return hash ? hash : (unsigned int) SYMBOL_REF;
1209 }
1210
1211 case PRE_DEC:
1212 case PRE_INC:
1213 /* We can't compute these without knowing the MEM mode. */
1214 gcc_assert (memmode != VOIDmode);
1215 i = GET_MODE_SIZE (memmode);
1216 if (code == PRE_DEC)
1217 i = -i;
1218 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1219 like (mem:MEMMODE (plus (reg) (const_int I))). */
1220 hash += (unsigned) PLUS - (unsigned)code
1221 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1222 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1223 return hash ? hash : 1 + (unsigned) PLUS;
1224
1225 case PRE_MODIFY:
1226 gcc_assert (memmode != VOIDmode);
1227 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1228
1229 case POST_DEC:
1230 case POST_INC:
1231 case POST_MODIFY:
1232 gcc_assert (memmode != VOIDmode);
1233 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1234
1235 case PC:
1236 case CC0:
1237 case CALL:
1238 case UNSPEC_VOLATILE:
1239 return 0;
1240
1241 case ASM_OPERANDS:
1242 if (MEM_VOLATILE_P (x))
1243 return 0;
1244
1245 break;
1246
1247 default:
1248 break;
1249 }
1250
1251 i = GET_RTX_LENGTH (code) - 1;
1252 fmt = GET_RTX_FORMAT (code);
1253 for (; i >= 0; i--)
1254 {
1255 switch (fmt[i])
1256 {
1257 case 'e':
1258 {
1259 rtx tem = XEXP (x, i);
1260 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1261
1262 if (tem_hash == 0)
1263 return 0;
1264
1265 hash += tem_hash;
1266 }
1267 break;
1268 case 'E':
1269 for (j = 0; j < XVECLEN (x, i); j++)
1270 {
1271 unsigned int tem_hash
1272 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1273
1274 if (tem_hash == 0)
1275 return 0;
1276
1277 hash += tem_hash;
1278 }
1279 break;
1280
1281 case 's':
1282 {
1283 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1284
1285 if (p)
1286 while (*p)
1287 hash += *p++;
1288 break;
1289 }
1290
1291 case 'i':
1292 hash += XINT (x, i);
1293 break;
1294
1295 case '0':
1296 case 't':
1297 /* unused */
1298 break;
1299
1300 default:
1301 gcc_unreachable ();
1302 }
1303 }
1304
1305 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1306 }
1307
1308 /* Create a new value structure for VALUE and initialize it. The mode of the
1309 value is MODE. */
1310
1311 static inline cselib_val *
1312 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1313 {
1314 cselib_val *e = new cselib_val;
1315
1316 gcc_assert (hash);
1317 gcc_assert (next_uid);
1318
1319 e->hash = hash;
1320 e->uid = next_uid++;
1321 /* We use an alloc pool to allocate this RTL construct because it
1322 accounts for about 8% of the overall memory usage. We know
1323 precisely when we can have VALUE RTXen (when cselib is active)
1324 so we don't need to put them in garbage collected memory.
1325 ??? Why should a VALUE be an RTX in the first place? */
1326 e->val_rtx = value_pool.allocate ();
1327 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1328 PUT_CODE (e->val_rtx, VALUE);
1329 PUT_MODE (e->val_rtx, mode);
1330 CSELIB_VAL_PTR (e->val_rtx) = e;
1331 e->addr_list = 0;
1332 e->locs = 0;
1333 e->next_containing_mem = 0;
1334
1335 if (dump_file && (dump_flags & TDF_CSELIB))
1336 {
1337 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1338 if (flag_dump_noaddr || flag_dump_unnumbered)
1339 fputs ("# ", dump_file);
1340 else
1341 fprintf (dump_file, "%p ", (void*)e);
1342 print_rtl_single (dump_file, x);
1343 fputc ('\n', dump_file);
1344 }
1345
1346 return e;
1347 }
1348
1349 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1350 contains the data at this address. X is a MEM that represents the
1351 value. Update the two value structures to represent this situation. */
1352
1353 static void
1354 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1355 {
1356 struct elt_loc_list *l;
1357
1358 addr_elt = canonical_cselib_val (addr_elt);
1359 mem_elt = canonical_cselib_val (mem_elt);
1360
1361 /* Avoid duplicates. */
1362 for (l = mem_elt->locs; l; l = l->next)
1363 if (MEM_P (l->loc)
1364 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1365 {
1366 promote_debug_loc (l);
1367 return;
1368 }
1369
1370 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1371 new_elt_loc_list (mem_elt,
1372 replace_equiv_address_nv (x, addr_elt->val_rtx));
1373 if (mem_elt->next_containing_mem == NULL)
1374 {
1375 mem_elt->next_containing_mem = first_containing_mem;
1376 first_containing_mem = mem_elt;
1377 }
1378 }
1379
1380 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1381 If CREATE, make a new one if we haven't seen it before. */
1382
1383 static cselib_val *
1384 cselib_lookup_mem (rtx x, int create)
1385 {
1386 machine_mode mode = GET_MODE (x);
1387 machine_mode addr_mode;
1388 cselib_val **slot;
1389 cselib_val *addr;
1390 cselib_val *mem_elt;
1391 struct elt_list *l;
1392
1393 if (MEM_VOLATILE_P (x) || mode == BLKmode
1394 || !cselib_record_memory
1395 || (FLOAT_MODE_P (mode) && flag_float_store))
1396 return 0;
1397
1398 addr_mode = GET_MODE (XEXP (x, 0));
1399 if (addr_mode == VOIDmode)
1400 addr_mode = Pmode;
1401
1402 /* Look up the value for the address. */
1403 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1404 if (! addr)
1405 return 0;
1406
1407 addr = canonical_cselib_val (addr);
1408 /* Find a value that describes a value of our mode at that address. */
1409 for (l = addr->addr_list; l; l = l->next)
1410 if (GET_MODE (l->elt->val_rtx) == mode)
1411 {
1412 promote_debug_loc (l->elt->locs);
1413 return l->elt;
1414 }
1415
1416 if (! create)
1417 return 0;
1418
1419 mem_elt = new_cselib_val (next_uid, mode, x);
1420 add_mem_for_addr (addr, mem_elt, x);
1421 slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1422 *slot = mem_elt;
1423 return mem_elt;
1424 }
1425
1426 /* Search through the possible substitutions in P. We prefer a non reg
1427 substitution because this allows us to expand the tree further. If
1428 we find, just a reg, take the lowest regno. There may be several
1429 non-reg results, we just take the first one because they will all
1430 expand to the same place. */
1431
1432 static rtx
1433 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1434 int max_depth)
1435 {
1436 rtx reg_result = NULL;
1437 unsigned int regno = UINT_MAX;
1438 struct elt_loc_list *p_in = p;
1439
1440 for (; p; p = p->next)
1441 {
1442 /* Return these right away to avoid returning stack pointer based
1443 expressions for frame pointer and vice versa, which is something
1444 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1445 for more details. */
1446 if (REG_P (p->loc)
1447 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1448 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1449 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1450 || REGNO (p->loc) == cfa_base_preserved_regno))
1451 return p->loc;
1452 /* Avoid infinite recursion trying to expand a reg into a
1453 the same reg. */
1454 if ((REG_P (p->loc))
1455 && (REGNO (p->loc) < regno)
1456 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1457 {
1458 reg_result = p->loc;
1459 regno = REGNO (p->loc);
1460 }
1461 /* Avoid infinite recursion and do not try to expand the
1462 value. */
1463 else if (GET_CODE (p->loc) == VALUE
1464 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1465 continue;
1466 else if (!REG_P (p->loc))
1467 {
1468 rtx result, note;
1469 if (dump_file && (dump_flags & TDF_CSELIB))
1470 {
1471 print_inline_rtx (dump_file, p->loc, 0);
1472 fprintf (dump_file, "\n");
1473 }
1474 if (GET_CODE (p->loc) == LO_SUM
1475 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1476 && p->setting_insn
1477 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1478 && XEXP (note, 0) == XEXP (p->loc, 1))
1479 return XEXP (p->loc, 1);
1480 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1481 if (result)
1482 return result;
1483 }
1484
1485 }
1486
1487 if (regno != UINT_MAX)
1488 {
1489 rtx result;
1490 if (dump_file && (dump_flags & TDF_CSELIB))
1491 fprintf (dump_file, "r%d\n", regno);
1492
1493 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1494 if (result)
1495 return result;
1496 }
1497
1498 if (dump_file && (dump_flags & TDF_CSELIB))
1499 {
1500 if (reg_result)
1501 {
1502 print_inline_rtx (dump_file, reg_result, 0);
1503 fprintf (dump_file, "\n");
1504 }
1505 else
1506 fprintf (dump_file, "NULL\n");
1507 }
1508 return reg_result;
1509 }
1510
1511
1512 /* Forward substitute and expand an expression out to its roots.
1513 This is the opposite of common subexpression. Because local value
1514 numbering is such a weak optimization, the expanded expression is
1515 pretty much unique (not from a pointer equals point of view but
1516 from a tree shape point of view.
1517
1518 This function returns NULL if the expansion fails. The expansion
1519 will fail if there is no value number for one of the operands or if
1520 one of the operands has been overwritten between the current insn
1521 and the beginning of the basic block. For instance x has no
1522 expansion in:
1523
1524 r1 <- r1 + 3
1525 x <- r1 + 8
1526
1527 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1528 It is clear on return. */
1529
1530 rtx
1531 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1532 {
1533 struct expand_value_data evd;
1534
1535 evd.regs_active = regs_active;
1536 evd.callback = NULL;
1537 evd.callback_arg = NULL;
1538 evd.dummy = false;
1539
1540 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1541 }
1542
1543 /* Same as cselib_expand_value_rtx, but using a callback to try to
1544 resolve some expressions. The CB function should return ORIG if it
1545 can't or does not want to deal with a certain RTX. Any other
1546 return value, including NULL, will be used as the expansion for
1547 VALUE, without any further changes. */
1548
1549 rtx
1550 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1551 cselib_expand_callback cb, void *data)
1552 {
1553 struct expand_value_data evd;
1554
1555 evd.regs_active = regs_active;
1556 evd.callback = cb;
1557 evd.callback_arg = data;
1558 evd.dummy = false;
1559
1560 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1561 }
1562
1563 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1564 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1565 would return NULL or non-NULL, without allocating new rtx. */
1566
1567 bool
1568 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1569 cselib_expand_callback cb, void *data)
1570 {
1571 struct expand_value_data evd;
1572
1573 evd.regs_active = regs_active;
1574 evd.callback = cb;
1575 evd.callback_arg = data;
1576 evd.dummy = true;
1577
1578 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1579 }
1580
1581 /* Internal implementation of cselib_expand_value_rtx and
1582 cselib_expand_value_rtx_cb. */
1583
1584 static rtx
1585 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1586 int max_depth)
1587 {
1588 rtx copy, scopy;
1589 int i, j;
1590 RTX_CODE code;
1591 const char *format_ptr;
1592 machine_mode mode;
1593
1594 code = GET_CODE (orig);
1595
1596 /* For the context of dse, if we end up expand into a huge tree, we
1597 will not have a useful address, so we might as well just give up
1598 quickly. */
1599 if (max_depth <= 0)
1600 return NULL;
1601
1602 switch (code)
1603 {
1604 case REG:
1605 {
1606 struct elt_list *l = REG_VALUES (REGNO (orig));
1607
1608 if (l && l->elt == NULL)
1609 l = l->next;
1610 for (; l; l = l->next)
1611 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1612 {
1613 rtx result;
1614 unsigned regno = REGNO (orig);
1615
1616 /* The only thing that we are not willing to do (this
1617 is requirement of dse and if others potential uses
1618 need this function we should add a parm to control
1619 it) is that we will not substitute the
1620 STACK_POINTER_REGNUM, FRAME_POINTER or the
1621 HARD_FRAME_POINTER.
1622
1623 These expansions confuses the code that notices that
1624 stores into the frame go dead at the end of the
1625 function and that the frame is not effected by calls
1626 to subroutines. If you allow the
1627 STACK_POINTER_REGNUM substitution, then dse will
1628 think that parameter pushing also goes dead which is
1629 wrong. If you allow the FRAME_POINTER or the
1630 HARD_FRAME_POINTER then you lose the opportunity to
1631 make the frame assumptions. */
1632 if (regno == STACK_POINTER_REGNUM
1633 || regno == FRAME_POINTER_REGNUM
1634 || regno == HARD_FRAME_POINTER_REGNUM
1635 || regno == cfa_base_preserved_regno)
1636 return orig;
1637
1638 bitmap_set_bit (evd->regs_active, regno);
1639
1640 if (dump_file && (dump_flags & TDF_CSELIB))
1641 fprintf (dump_file, "expanding: r%d into: ", regno);
1642
1643 result = expand_loc (l->elt->locs, evd, max_depth);
1644 bitmap_clear_bit (evd->regs_active, regno);
1645
1646 if (result)
1647 return result;
1648 else
1649 return orig;
1650 }
1651 }
1652
1653 CASE_CONST_ANY:
1654 case SYMBOL_REF:
1655 case CODE_LABEL:
1656 case PC:
1657 case CC0:
1658 case SCRATCH:
1659 /* SCRATCH must be shared because they represent distinct values. */
1660 return orig;
1661 case CLOBBER:
1662 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1663 return orig;
1664 break;
1665
1666 case CONST:
1667 if (shared_const_p (orig))
1668 return orig;
1669 break;
1670
1671 case SUBREG:
1672 {
1673 rtx subreg;
1674
1675 if (evd->callback)
1676 {
1677 subreg = evd->callback (orig, evd->regs_active, max_depth,
1678 evd->callback_arg);
1679 if (subreg != orig)
1680 return subreg;
1681 }
1682
1683 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1684 max_depth - 1);
1685 if (!subreg)
1686 return NULL;
1687 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1688 GET_MODE (SUBREG_REG (orig)),
1689 SUBREG_BYTE (orig));
1690 if (scopy == NULL
1691 || (GET_CODE (scopy) == SUBREG
1692 && !REG_P (SUBREG_REG (scopy))
1693 && !MEM_P (SUBREG_REG (scopy))))
1694 return NULL;
1695
1696 return scopy;
1697 }
1698
1699 case VALUE:
1700 {
1701 rtx result;
1702
1703 if (dump_file && (dump_flags & TDF_CSELIB))
1704 {
1705 fputs ("\nexpanding ", dump_file);
1706 print_rtl_single (dump_file, orig);
1707 fputs (" into...", dump_file);
1708 }
1709
1710 if (evd->callback)
1711 {
1712 result = evd->callback (orig, evd->regs_active, max_depth,
1713 evd->callback_arg);
1714
1715 if (result != orig)
1716 return result;
1717 }
1718
1719 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1720 return result;
1721 }
1722
1723 case DEBUG_EXPR:
1724 if (evd->callback)
1725 return evd->callback (orig, evd->regs_active, max_depth,
1726 evd->callback_arg);
1727 return orig;
1728
1729 default:
1730 break;
1731 }
1732
1733 /* Copy the various flags, fields, and other information. We assume
1734 that all fields need copying, and then clear the fields that should
1735 not be copied. That is the sensible default behavior, and forces
1736 us to explicitly document why we are *not* copying a flag. */
1737 if (evd->dummy)
1738 copy = NULL;
1739 else
1740 copy = shallow_copy_rtx (orig);
1741
1742 format_ptr = GET_RTX_FORMAT (code);
1743
1744 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1745 switch (*format_ptr++)
1746 {
1747 case 'e':
1748 if (XEXP (orig, i) != NULL)
1749 {
1750 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1751 max_depth - 1);
1752 if (!result)
1753 return NULL;
1754 if (copy)
1755 XEXP (copy, i) = result;
1756 }
1757 break;
1758
1759 case 'E':
1760 case 'V':
1761 if (XVEC (orig, i) != NULL)
1762 {
1763 if (copy)
1764 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1765 for (j = 0; j < XVECLEN (orig, i); j++)
1766 {
1767 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1768 evd, max_depth - 1);
1769 if (!result)
1770 return NULL;
1771 if (copy)
1772 XVECEXP (copy, i, j) = result;
1773 }
1774 }
1775 break;
1776
1777 case 't':
1778 case 'w':
1779 case 'i':
1780 case 's':
1781 case 'S':
1782 case 'T':
1783 case 'u':
1784 case 'B':
1785 case '0':
1786 /* These are left unchanged. */
1787 break;
1788
1789 default:
1790 gcc_unreachable ();
1791 }
1792
1793 if (evd->dummy)
1794 return orig;
1795
1796 mode = GET_MODE (copy);
1797 /* If an operand has been simplified into CONST_INT, which doesn't
1798 have a mode and the mode isn't derivable from whole rtx's mode,
1799 try simplify_*_operation first with mode from original's operand
1800 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1801 scopy = copy;
1802 switch (GET_RTX_CLASS (code))
1803 {
1804 case RTX_UNARY:
1805 if (CONST_INT_P (XEXP (copy, 0))
1806 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1807 {
1808 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1809 GET_MODE (XEXP (orig, 0)));
1810 if (scopy)
1811 return scopy;
1812 }
1813 break;
1814 case RTX_COMM_ARITH:
1815 case RTX_BIN_ARITH:
1816 /* These expressions can derive operand modes from the whole rtx's mode. */
1817 break;
1818 case RTX_TERNARY:
1819 case RTX_BITFIELD_OPS:
1820 if (CONST_INT_P (XEXP (copy, 0))
1821 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1822 {
1823 scopy = simplify_ternary_operation (code, mode,
1824 GET_MODE (XEXP (orig, 0)),
1825 XEXP (copy, 0), XEXP (copy, 1),
1826 XEXP (copy, 2));
1827 if (scopy)
1828 return scopy;
1829 }
1830 break;
1831 case RTX_COMPARE:
1832 case RTX_COMM_COMPARE:
1833 if (CONST_INT_P (XEXP (copy, 0))
1834 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1835 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1836 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1837 {
1838 scopy = simplify_relational_operation (code, mode,
1839 (GET_MODE (XEXP (orig, 0))
1840 != VOIDmode)
1841 ? GET_MODE (XEXP (orig, 0))
1842 : GET_MODE (XEXP (orig, 1)),
1843 XEXP (copy, 0),
1844 XEXP (copy, 1));
1845 if (scopy)
1846 return scopy;
1847 }
1848 break;
1849 default:
1850 break;
1851 }
1852 scopy = simplify_rtx (copy);
1853 if (scopy)
1854 return scopy;
1855 return copy;
1856 }
1857
1858 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1859 with VALUE expressions. This way, it becomes independent of changes
1860 to registers and memory.
1861 X isn't actually modified; if modifications are needed, new rtl is
1862 allocated. However, the return value can share rtl with X.
1863 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1864
1865 rtx
1866 cselib_subst_to_values (rtx x, machine_mode memmode)
1867 {
1868 enum rtx_code code = GET_CODE (x);
1869 const char *fmt = GET_RTX_FORMAT (code);
1870 cselib_val *e;
1871 struct elt_list *l;
1872 rtx copy = x;
1873 int i;
1874
1875 switch (code)
1876 {
1877 case REG:
1878 l = REG_VALUES (REGNO (x));
1879 if (l && l->elt == NULL)
1880 l = l->next;
1881 for (; l; l = l->next)
1882 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1883 return l->elt->val_rtx;
1884
1885 gcc_unreachable ();
1886
1887 case MEM:
1888 e = cselib_lookup_mem (x, 0);
1889 /* This used to happen for autoincrements, but we deal with them
1890 properly now. Remove the if stmt for the next release. */
1891 if (! e)
1892 {
1893 /* Assign a value that doesn't match any other. */
1894 e = new_cselib_val (next_uid, GET_MODE (x), x);
1895 }
1896 return e->val_rtx;
1897
1898 case ENTRY_VALUE:
1899 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1900 if (! e)
1901 break;
1902 return e->val_rtx;
1903
1904 CASE_CONST_ANY:
1905 return x;
1906
1907 case PRE_DEC:
1908 case PRE_INC:
1909 gcc_assert (memmode != VOIDmode);
1910 i = GET_MODE_SIZE (memmode);
1911 if (code == PRE_DEC)
1912 i = -i;
1913 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1914 XEXP (x, 0), i),
1915 memmode);
1916
1917 case PRE_MODIFY:
1918 gcc_assert (memmode != VOIDmode);
1919 return cselib_subst_to_values (XEXP (x, 1), memmode);
1920
1921 case POST_DEC:
1922 case POST_INC:
1923 case POST_MODIFY:
1924 gcc_assert (memmode != VOIDmode);
1925 return cselib_subst_to_values (XEXP (x, 0), memmode);
1926
1927 default:
1928 break;
1929 }
1930
1931 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1932 {
1933 if (fmt[i] == 'e')
1934 {
1935 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1936
1937 if (t != XEXP (x, i))
1938 {
1939 if (x == copy)
1940 copy = shallow_copy_rtx (x);
1941 XEXP (copy, i) = t;
1942 }
1943 }
1944 else if (fmt[i] == 'E')
1945 {
1946 int j;
1947
1948 for (j = 0; j < XVECLEN (x, i); j++)
1949 {
1950 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1951
1952 if (t != XVECEXP (x, i, j))
1953 {
1954 if (XVEC (x, i) == XVEC (copy, i))
1955 {
1956 if (x == copy)
1957 copy = shallow_copy_rtx (x);
1958 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1959 }
1960 XVECEXP (copy, i, j) = t;
1961 }
1962 }
1963 }
1964 }
1965
1966 return copy;
1967 }
1968
1969 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1970
1971 rtx
1972 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
1973 {
1974 rtx ret;
1975 gcc_assert (!cselib_current_insn);
1976 cselib_current_insn = insn;
1977 ret = cselib_subst_to_values (x, memmode);
1978 cselib_current_insn = NULL;
1979 return ret;
1980 }
1981
1982 /* Look up the rtl expression X in our tables and return the value it
1983 has. If CREATE is zero, we return NULL if we don't know the value.
1984 Otherwise, we create a new one if possible, using mode MODE if X
1985 doesn't have a mode (i.e. because it's a constant). When X is part
1986 of an address, MEMMODE should be the mode of the enclosing MEM if
1987 we're tracking autoinc expressions. */
1988
1989 static cselib_val *
1990 cselib_lookup_1 (rtx x, machine_mode mode,
1991 int create, machine_mode memmode)
1992 {
1993 cselib_val **slot;
1994 cselib_val *e;
1995 unsigned int hashval;
1996
1997 if (GET_MODE (x) != VOIDmode)
1998 mode = GET_MODE (x);
1999
2000 if (GET_CODE (x) == VALUE)
2001 return CSELIB_VAL_PTR (x);
2002
2003 if (REG_P (x))
2004 {
2005 struct elt_list *l;
2006 unsigned int i = REGNO (x);
2007
2008 l = REG_VALUES (i);
2009 if (l && l->elt == NULL)
2010 l = l->next;
2011 for (; l; l = l->next)
2012 if (mode == GET_MODE (l->elt->val_rtx))
2013 {
2014 promote_debug_loc (l->elt->locs);
2015 return l->elt;
2016 }
2017
2018 if (! create)
2019 return 0;
2020
2021 if (i < FIRST_PSEUDO_REGISTER)
2022 {
2023 unsigned int n = hard_regno_nregs[i][mode];
2024
2025 if (n > max_value_regs)
2026 max_value_regs = n;
2027 }
2028
2029 e = new_cselib_val (next_uid, GET_MODE (x), x);
2030 new_elt_loc_list (e, x);
2031 if (REG_VALUES (i) == 0)
2032 {
2033 /* Maintain the invariant that the first entry of
2034 REG_VALUES, if present, must be the value used to set the
2035 register, or NULL. */
2036 used_regs[n_used_regs++] = i;
2037 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2038 }
2039 else if (cselib_preserve_constants
2040 && GET_MODE_CLASS (mode) == MODE_INT)
2041 {
2042 /* During var-tracking, try harder to find equivalences
2043 for SUBREGs. If a setter sets say a DImode register
2044 and user uses that register only in SImode, add a lowpart
2045 subreg location. */
2046 struct elt_list *lwider = NULL;
2047 l = REG_VALUES (i);
2048 if (l && l->elt == NULL)
2049 l = l->next;
2050 for (; l; l = l->next)
2051 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2052 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2053 > GET_MODE_SIZE (mode)
2054 && (lwider == NULL
2055 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2056 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2057 {
2058 struct elt_loc_list *el;
2059 if (i < FIRST_PSEUDO_REGISTER
2060 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2061 continue;
2062 for (el = l->elt->locs; el; el = el->next)
2063 if (!REG_P (el->loc))
2064 break;
2065 if (el)
2066 lwider = l;
2067 }
2068 if (lwider)
2069 {
2070 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2071 GET_MODE (lwider->elt->val_rtx));
2072 if (sub)
2073 new_elt_loc_list (e, sub);
2074 }
2075 }
2076 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2077 slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2078 *slot = e;
2079 return e;
2080 }
2081
2082 if (MEM_P (x))
2083 return cselib_lookup_mem (x, create);
2084
2085 hashval = cselib_hash_rtx (x, create, memmode);
2086 /* Can't even create if hashing is not possible. */
2087 if (! hashval)
2088 return 0;
2089
2090 slot = cselib_find_slot (mode, x, hashval,
2091 create ? INSERT : NO_INSERT, memmode);
2092 if (slot == 0)
2093 return 0;
2094
2095 e = (cselib_val *) *slot;
2096 if (e)
2097 return e;
2098
2099 e = new_cselib_val (hashval, mode, x);
2100
2101 /* We have to fill the slot before calling cselib_subst_to_values:
2102 the hash table is inconsistent until we do so, and
2103 cselib_subst_to_values will need to do lookups. */
2104 *slot = e;
2105 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2106 return e;
2107 }
2108
2109 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2110
2111 cselib_val *
2112 cselib_lookup_from_insn (rtx x, machine_mode mode,
2113 int create, machine_mode memmode, rtx_insn *insn)
2114 {
2115 cselib_val *ret;
2116
2117 gcc_assert (!cselib_current_insn);
2118 cselib_current_insn = insn;
2119
2120 ret = cselib_lookup (x, mode, create, memmode);
2121
2122 cselib_current_insn = NULL;
2123
2124 return ret;
2125 }
2126
2127 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2128 maintains invariants related with debug insns. */
2129
2130 cselib_val *
2131 cselib_lookup (rtx x, machine_mode mode,
2132 int create, machine_mode memmode)
2133 {
2134 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2135
2136 /* ??? Should we return NULL if we're not to create an entry, the
2137 found loc is a debug loc and cselib_current_insn is not DEBUG?
2138 If so, we should also avoid converting val to non-DEBUG; probably
2139 easiest setting cselib_current_insn to NULL before the call
2140 above. */
2141
2142 if (dump_file && (dump_flags & TDF_CSELIB))
2143 {
2144 fputs ("cselib lookup ", dump_file);
2145 print_inline_rtx (dump_file, x, 2);
2146 fprintf (dump_file, " => %u:%u\n",
2147 ret ? ret->uid : 0,
2148 ret ? ret->hash : 0);
2149 }
2150
2151 return ret;
2152 }
2153
2154 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2155 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2156 is used to determine how many hard registers are being changed. If MODE
2157 is VOIDmode, then only REGNO is being changed; this is used when
2158 invalidating call clobbered registers across a call. */
2159
2160 static void
2161 cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2162 {
2163 unsigned int endregno;
2164 unsigned int i;
2165
2166 /* If we see pseudos after reload, something is _wrong_. */
2167 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2168 || reg_renumber[regno] < 0);
2169
2170 /* Determine the range of registers that must be invalidated. For
2171 pseudos, only REGNO is affected. For hard regs, we must take MODE
2172 into account, and we must also invalidate lower register numbers
2173 if they contain values that overlap REGNO. */
2174 if (regno < FIRST_PSEUDO_REGISTER)
2175 {
2176 gcc_assert (mode != VOIDmode);
2177
2178 if (regno < max_value_regs)
2179 i = 0;
2180 else
2181 i = regno - max_value_regs;
2182
2183 endregno = end_hard_regno (mode, regno);
2184 }
2185 else
2186 {
2187 i = regno;
2188 endregno = regno + 1;
2189 }
2190
2191 for (; i < endregno; i++)
2192 {
2193 struct elt_list **l = &REG_VALUES (i);
2194
2195 /* Go through all known values for this reg; if it overlaps the range
2196 we're invalidating, remove the value. */
2197 while (*l)
2198 {
2199 cselib_val *v = (*l)->elt;
2200 bool had_locs;
2201 rtx_insn *setting_insn;
2202 struct elt_loc_list **p;
2203 unsigned int this_last = i;
2204
2205 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2206 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2207
2208 if (this_last < regno || v == NULL
2209 || (v == cfa_base_preserved_val
2210 && i == cfa_base_preserved_regno))
2211 {
2212 l = &(*l)->next;
2213 continue;
2214 }
2215
2216 /* We have an overlap. */
2217 if (*l == REG_VALUES (i))
2218 {
2219 /* Maintain the invariant that the first entry of
2220 REG_VALUES, if present, must be the value used to set
2221 the register, or NULL. This is also nice because
2222 then we won't push the same regno onto user_regs
2223 multiple times. */
2224 (*l)->elt = NULL;
2225 l = &(*l)->next;
2226 }
2227 else
2228 unchain_one_elt_list (l);
2229
2230 v = canonical_cselib_val (v);
2231
2232 had_locs = v->locs != NULL;
2233 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2234
2235 /* Now, we clear the mapping from value to reg. It must exist, so
2236 this code will crash intentionally if it doesn't. */
2237 for (p = &v->locs; ; p = &(*p)->next)
2238 {
2239 rtx x = (*p)->loc;
2240
2241 if (REG_P (x) && REGNO (x) == i)
2242 {
2243 unchain_one_elt_loc_list (p);
2244 break;
2245 }
2246 }
2247
2248 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2249 {
2250 if (setting_insn && DEBUG_INSN_P (setting_insn))
2251 n_useless_debug_values++;
2252 else
2253 n_useless_values++;
2254 }
2255 }
2256 }
2257 }
2258 \f
2259 /* Invalidate any locations in the table which are changed because of a
2260 store to MEM_RTX. If this is called because of a non-const call
2261 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2262
2263 static void
2264 cselib_invalidate_mem (rtx mem_rtx)
2265 {
2266 cselib_val **vp, *v, *next;
2267 int num_mems = 0;
2268 rtx mem_addr;
2269
2270 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2271 mem_rtx = canon_rtx (mem_rtx);
2272
2273 vp = &first_containing_mem;
2274 for (v = *vp; v != &dummy_val; v = next)
2275 {
2276 bool has_mem = false;
2277 struct elt_loc_list **p = &v->locs;
2278 bool had_locs = v->locs != NULL;
2279 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2280
2281 while (*p)
2282 {
2283 rtx x = (*p)->loc;
2284 cselib_val *addr;
2285 struct elt_list **mem_chain;
2286
2287 /* MEMs may occur in locations only at the top level; below
2288 that every MEM or REG is substituted by its VALUE. */
2289 if (!MEM_P (x))
2290 {
2291 p = &(*p)->next;
2292 continue;
2293 }
2294 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2295 && ! canon_anti_dependence (x, false, mem_rtx,
2296 GET_MODE (mem_rtx), mem_addr))
2297 {
2298 has_mem = true;
2299 num_mems++;
2300 p = &(*p)->next;
2301 continue;
2302 }
2303
2304 /* This one overlaps. */
2305 /* We must have a mapping from this MEM's address to the
2306 value (E). Remove that, too. */
2307 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2308 addr = canonical_cselib_val (addr);
2309 gcc_checking_assert (v == canonical_cselib_val (v));
2310 mem_chain = &addr->addr_list;
2311 for (;;)
2312 {
2313 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2314
2315 if (canon == v)
2316 {
2317 unchain_one_elt_list (mem_chain);
2318 break;
2319 }
2320
2321 /* Record canonicalized elt. */
2322 (*mem_chain)->elt = canon;
2323
2324 mem_chain = &(*mem_chain)->next;
2325 }
2326
2327 unchain_one_elt_loc_list (p);
2328 }
2329
2330 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2331 {
2332 if (setting_insn && DEBUG_INSN_P (setting_insn))
2333 n_useless_debug_values++;
2334 else
2335 n_useless_values++;
2336 }
2337
2338 next = v->next_containing_mem;
2339 if (has_mem)
2340 {
2341 *vp = v;
2342 vp = &(*vp)->next_containing_mem;
2343 }
2344 else
2345 v->next_containing_mem = NULL;
2346 }
2347 *vp = &dummy_val;
2348 }
2349
2350 /* Invalidate DEST, which is being assigned to or clobbered. */
2351
2352 void
2353 cselib_invalidate_rtx (rtx dest)
2354 {
2355 while (GET_CODE (dest) == SUBREG
2356 || GET_CODE (dest) == ZERO_EXTRACT
2357 || GET_CODE (dest) == STRICT_LOW_PART)
2358 dest = XEXP (dest, 0);
2359
2360 if (REG_P (dest))
2361 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2362 else if (MEM_P (dest))
2363 cselib_invalidate_mem (dest);
2364 }
2365
2366 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2367
2368 static void
2369 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2370 void *data ATTRIBUTE_UNUSED)
2371 {
2372 cselib_invalidate_rtx (dest);
2373 }
2374
2375 /* Record the result of a SET instruction. DEST is being set; the source
2376 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2377 describes its address. */
2378
2379 static void
2380 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2381 {
2382 if (src_elt == 0 || side_effects_p (dest))
2383 return;
2384
2385 if (REG_P (dest))
2386 {
2387 unsigned int dreg = REGNO (dest);
2388 if (dreg < FIRST_PSEUDO_REGISTER)
2389 {
2390 unsigned int n = REG_NREGS (dest);
2391
2392 if (n > max_value_regs)
2393 max_value_regs = n;
2394 }
2395
2396 if (REG_VALUES (dreg) == 0)
2397 {
2398 used_regs[n_used_regs++] = dreg;
2399 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2400 }
2401 else
2402 {
2403 /* The register should have been invalidated. */
2404 gcc_assert (REG_VALUES (dreg)->elt == 0);
2405 REG_VALUES (dreg)->elt = src_elt;
2406 }
2407
2408 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2409 n_useless_values--;
2410 new_elt_loc_list (src_elt, dest);
2411 }
2412 else if (MEM_P (dest) && dest_addr_elt != 0
2413 && cselib_record_memory)
2414 {
2415 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2416 n_useless_values--;
2417 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2418 }
2419 }
2420
2421 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2422
2423 void
2424 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2425 {
2426 cselib_val *nelt;
2427 rtx_insn *save_cselib_current_insn = cselib_current_insn;
2428
2429 gcc_checking_assert (elt);
2430 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2431 gcc_checking_assert (!side_effects_p (x));
2432
2433 cselib_current_insn = insn;
2434
2435 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2436
2437 if (nelt != elt)
2438 {
2439 cselib_any_perm_equivs = true;
2440
2441 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2442 cselib_preserve_value (nelt);
2443
2444 new_elt_loc_list (nelt, elt->val_rtx);
2445 }
2446
2447 cselib_current_insn = save_cselib_current_insn;
2448 }
2449
2450 /* Return TRUE if any permanent equivalences have been recorded since
2451 the table was last initialized. */
2452 bool
2453 cselib_have_permanent_equivalences (void)
2454 {
2455 return cselib_any_perm_equivs;
2456 }
2457
2458 /* There is no good way to determine how many elements there can be
2459 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2460 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2461
2462 struct cselib_record_autoinc_data
2463 {
2464 struct cselib_set *sets;
2465 int n_sets;
2466 };
2467
2468 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2469 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2470
2471 static int
2472 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2473 rtx dest, rtx src, rtx srcoff, void *arg)
2474 {
2475 struct cselib_record_autoinc_data *data;
2476 data = (struct cselib_record_autoinc_data *)arg;
2477
2478 data->sets[data->n_sets].dest = dest;
2479
2480 if (srcoff)
2481 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2482 else
2483 data->sets[data->n_sets].src = src;
2484
2485 data->n_sets++;
2486
2487 return 0;
2488 }
2489
2490 /* Record the effects of any sets and autoincs in INSN. */
2491 static void
2492 cselib_record_sets (rtx_insn *insn)
2493 {
2494 int n_sets = 0;
2495 int i;
2496 struct cselib_set sets[MAX_SETS];
2497 rtx body = PATTERN (insn);
2498 rtx cond = 0;
2499 int n_sets_before_autoinc;
2500 struct cselib_record_autoinc_data data;
2501
2502 body = PATTERN (insn);
2503 if (GET_CODE (body) == COND_EXEC)
2504 {
2505 cond = COND_EXEC_TEST (body);
2506 body = COND_EXEC_CODE (body);
2507 }
2508
2509 /* Find all sets. */
2510 if (GET_CODE (body) == SET)
2511 {
2512 sets[0].src = SET_SRC (body);
2513 sets[0].dest = SET_DEST (body);
2514 n_sets = 1;
2515 }
2516 else if (GET_CODE (body) == PARALLEL)
2517 {
2518 /* Look through the PARALLEL and record the values being
2519 set, if possible. Also handle any CLOBBERs. */
2520 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2521 {
2522 rtx x = XVECEXP (body, 0, i);
2523
2524 if (GET_CODE (x) == SET)
2525 {
2526 sets[n_sets].src = SET_SRC (x);
2527 sets[n_sets].dest = SET_DEST (x);
2528 n_sets++;
2529 }
2530 }
2531 }
2532
2533 if (n_sets == 1
2534 && MEM_P (sets[0].src)
2535 && !cselib_record_memory
2536 && MEM_READONLY_P (sets[0].src))
2537 {
2538 rtx note = find_reg_equal_equiv_note (insn);
2539
2540 if (note && CONSTANT_P (XEXP (note, 0)))
2541 sets[0].src = XEXP (note, 0);
2542 }
2543
2544 data.sets = sets;
2545 data.n_sets = n_sets_before_autoinc = n_sets;
2546 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2547 n_sets = data.n_sets;
2548
2549 /* Look up the values that are read. Do this before invalidating the
2550 locations that are written. */
2551 for (i = 0; i < n_sets; i++)
2552 {
2553 rtx dest = sets[i].dest;
2554
2555 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2556 the low part after invalidating any knowledge about larger modes. */
2557 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2558 sets[i].dest = dest = XEXP (dest, 0);
2559
2560 /* We don't know how to record anything but REG or MEM. */
2561 if (REG_P (dest)
2562 || (MEM_P (dest) && cselib_record_memory))
2563 {
2564 rtx src = sets[i].src;
2565 if (cond)
2566 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2567 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2568 if (MEM_P (dest))
2569 {
2570 machine_mode address_mode = get_address_mode (dest);
2571
2572 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2573 address_mode, 1,
2574 GET_MODE (dest));
2575 }
2576 else
2577 sets[i].dest_addr_elt = 0;
2578 }
2579 }
2580
2581 if (cselib_record_sets_hook)
2582 cselib_record_sets_hook (insn, sets, n_sets);
2583
2584 /* Invalidate all locations written by this insn. Note that the elts we
2585 looked up in the previous loop aren't affected, just some of their
2586 locations may go away. */
2587 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2588
2589 for (i = n_sets_before_autoinc; i < n_sets; i++)
2590 cselib_invalidate_rtx (sets[i].dest);
2591
2592 /* If this is an asm, look for duplicate sets. This can happen when the
2593 user uses the same value as an output multiple times. This is valid
2594 if the outputs are not actually used thereafter. Treat this case as
2595 if the value isn't actually set. We do this by smashing the destination
2596 to pc_rtx, so that we won't record the value later. */
2597 if (n_sets >= 2 && asm_noperands (body) >= 0)
2598 {
2599 for (i = 0; i < n_sets; i++)
2600 {
2601 rtx dest = sets[i].dest;
2602 if (REG_P (dest) || MEM_P (dest))
2603 {
2604 int j;
2605 for (j = i + 1; j < n_sets; j++)
2606 if (rtx_equal_p (dest, sets[j].dest))
2607 {
2608 sets[i].dest = pc_rtx;
2609 sets[j].dest = pc_rtx;
2610 }
2611 }
2612 }
2613 }
2614
2615 /* Now enter the equivalences in our tables. */
2616 for (i = 0; i < n_sets; i++)
2617 {
2618 rtx dest = sets[i].dest;
2619 if (REG_P (dest)
2620 || (MEM_P (dest) && cselib_record_memory))
2621 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2622 }
2623 }
2624
2625 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
2626
2627 bool
2628 fp_setter_insn (rtx_insn *insn)
2629 {
2630 rtx expr, pat = NULL_RTX;
2631
2632 if (!RTX_FRAME_RELATED_P (insn))
2633 return false;
2634
2635 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2636 if (expr)
2637 pat = XEXP (expr, 0);
2638 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2639 return false;
2640
2641 /* Don't return true for frame pointer restores in the epilogue. */
2642 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2643 return false;
2644 return true;
2645 }
2646
2647 /* Record the effects of INSN. */
2648
2649 void
2650 cselib_process_insn (rtx_insn *insn)
2651 {
2652 int i;
2653 rtx x;
2654
2655 cselib_current_insn = insn;
2656
2657 /* Forget everything at a CODE_LABEL or a setjmp. */
2658 if ((LABEL_P (insn)
2659 || (CALL_P (insn)
2660 && find_reg_note (insn, REG_SETJMP, NULL)))
2661 && !cselib_preserve_constants)
2662 {
2663 cselib_reset_table (next_uid);
2664 cselib_current_insn = NULL;
2665 return;
2666 }
2667
2668 if (! INSN_P (insn))
2669 {
2670 cselib_current_insn = NULL;
2671 return;
2672 }
2673
2674 /* If this is a call instruction, forget anything stored in a
2675 call clobbered register, or, if this is not a const call, in
2676 memory. */
2677 if (CALL_P (insn))
2678 {
2679 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2680 if (call_used_regs[i]
2681 || (REG_VALUES (i) && REG_VALUES (i)->elt
2682 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2683 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2684 cselib_invalidate_regno (i, reg_raw_mode[i]);
2685
2686 /* Since it is not clear how cselib is going to be used, be
2687 conservative here and treat looping pure or const functions
2688 as if they were regular functions. */
2689 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2690 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2691 cselib_invalidate_mem (callmem);
2692 }
2693
2694 cselib_record_sets (insn);
2695
2696 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2697 after we have processed the insn. */
2698 if (CALL_P (insn))
2699 {
2700 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2701 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2702 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2703 /* Flush evertything on setjmp. */
2704 if (cselib_preserve_constants
2705 && find_reg_note (insn, REG_SETJMP, NULL))
2706 {
2707 cselib_preserve_only_values ();
2708 cselib_reset_table (next_uid);
2709 }
2710 }
2711
2712 /* On setter of the hard frame pointer if frame_pointer_needed,
2713 invalidate stack_pointer_rtx, so that sp and {,h}fp based
2714 VALUEs are distinct. */
2715 if (reload_completed
2716 && frame_pointer_needed
2717 && fp_setter_insn (insn))
2718 cselib_invalidate_rtx (stack_pointer_rtx);
2719
2720 cselib_current_insn = NULL;
2721
2722 if (n_useless_values > MAX_USELESS_VALUES
2723 /* remove_useless_values is linear in the hash table size. Avoid
2724 quadratic behavior for very large hashtables with very few
2725 useless elements. */
2726 && ((unsigned int)n_useless_values
2727 > (cselib_hash_table->elements () - n_debug_values) / 4))
2728 remove_useless_values ();
2729 }
2730
2731 /* Initialize cselib for one pass. The caller must also call
2732 init_alias_analysis. */
2733
2734 void
2735 cselib_init (int record_what)
2736 {
2737 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2738 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2739 cselib_any_perm_equivs = false;
2740
2741 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2742 see canon_true_dependence. This is only created once. */
2743 if (! callmem)
2744 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2745
2746 cselib_nregs = max_reg_num ();
2747
2748 /* We preserve reg_values to allow expensive clearing of the whole thing.
2749 Reallocate it however if it happens to be too large. */
2750 if (!reg_values || reg_values_size < cselib_nregs
2751 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2752 {
2753 free (reg_values);
2754 /* Some space for newly emit instructions so we don't end up
2755 reallocating in between passes. */
2756 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2757 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2758 }
2759 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2760 n_used_regs = 0;
2761 cselib_hash_table = new hash_table<cselib_hasher> (31);
2762 if (cselib_preserve_constants)
2763 cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
2764 next_uid = 1;
2765 }
2766
2767 /* Called when the current user is done with cselib. */
2768
2769 void
2770 cselib_finish (void)
2771 {
2772 bool preserved = cselib_preserve_constants;
2773 cselib_discard_hook = NULL;
2774 cselib_preserve_constants = false;
2775 cselib_any_perm_equivs = false;
2776 cfa_base_preserved_val = NULL;
2777 cfa_base_preserved_regno = INVALID_REGNUM;
2778 elt_list::pool.release ();
2779 elt_loc_list::pool.release ();
2780 cselib_val::pool.release ();
2781 value_pool.release ();
2782 cselib_clear_table ();
2783 delete cselib_hash_table;
2784 cselib_hash_table = NULL;
2785 if (preserved)
2786 delete cselib_preserved_hash_table;
2787 cselib_preserved_hash_table = NULL;
2788 free (used_regs);
2789 used_regs = 0;
2790 n_useless_values = 0;
2791 n_useless_debug_values = 0;
2792 n_debug_values = 0;
2793 next_uid = 0;
2794 }
2795
2796 /* Dump the cselib_val *X to FILE *OUT. */
2797
2798 int
2799 dump_cselib_val (cselib_val **x, FILE *out)
2800 {
2801 cselib_val *v = *x;
2802 bool need_lf = true;
2803
2804 print_inline_rtx (out, v->val_rtx, 0);
2805
2806 if (v->locs)
2807 {
2808 struct elt_loc_list *l = v->locs;
2809 if (need_lf)
2810 {
2811 fputc ('\n', out);
2812 need_lf = false;
2813 }
2814 fputs (" locs:", out);
2815 do
2816 {
2817 if (l->setting_insn)
2818 fprintf (out, "\n from insn %i ",
2819 INSN_UID (l->setting_insn));
2820 else
2821 fprintf (out, "\n ");
2822 print_inline_rtx (out, l->loc, 4);
2823 }
2824 while ((l = l->next));
2825 fputc ('\n', out);
2826 }
2827 else
2828 {
2829 fputs (" no locs", out);
2830 need_lf = true;
2831 }
2832
2833 if (v->addr_list)
2834 {
2835 struct elt_list *e = v->addr_list;
2836 if (need_lf)
2837 {
2838 fputc ('\n', out);
2839 need_lf = false;
2840 }
2841 fputs (" addr list:", out);
2842 do
2843 {
2844 fputs ("\n ", out);
2845 print_inline_rtx (out, e->elt->val_rtx, 2);
2846 }
2847 while ((e = e->next));
2848 fputc ('\n', out);
2849 }
2850 else
2851 {
2852 fputs (" no addrs", out);
2853 need_lf = true;
2854 }
2855
2856 if (v->next_containing_mem == &dummy_val)
2857 fputs (" last mem\n", out);
2858 else if (v->next_containing_mem)
2859 {
2860 fputs (" next mem ", out);
2861 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2862 fputc ('\n', out);
2863 }
2864 else if (need_lf)
2865 fputc ('\n', out);
2866
2867 return 1;
2868 }
2869
2870 /* Dump to OUT everything in the CSELIB table. */
2871
2872 void
2873 dump_cselib_table (FILE *out)
2874 {
2875 fprintf (out, "cselib hash table:\n");
2876 cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
2877 fprintf (out, "cselib preserved hash table:\n");
2878 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
2879 if (first_containing_mem != &dummy_val)
2880 {
2881 fputs ("first mem ", out);
2882 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2883 fputc ('\n', out);
2884 }
2885 fprintf (out, "next uid %i\n", next_uid);
2886 }
2887
2888 #include "gt-cselib.h"