]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cselib.c
re PR bootstrap/53249 (Bootstrap failure)
[thirdparty/gcc.git] / gcc / cselib.c
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "emit-rtl.h"
36 #include "diagnostic-core.h"
37 #include "output.h"
38 #include "ggc.h"
39 #include "hashtab.h"
40 #include "tree-pass.h"
41 #include "cselib.h"
42 #include "params.h"
43 #include "alloc-pool.h"
44 #include "target.h"
45 #include "bitmap.h"
46
47 /* A list of cselib_val structures. */
48 struct elt_list {
49 struct elt_list *next;
50 cselib_val *elt;
51 };
52
53 static bool cselib_record_memory;
54 static bool cselib_preserve_constants;
55 static bool cselib_any_perm_equivs;
56 static int entry_and_rtx_equal_p (const void *, const void *);
57 static hashval_t get_value_hash (const void *);
58 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
59 static void new_elt_loc_list (cselib_val *, rtx);
60 static void unchain_one_value (cselib_val *);
61 static void unchain_one_elt_list (struct elt_list **);
62 static void unchain_one_elt_loc_list (struct elt_loc_list **);
63 static int discard_useless_locs (void **, void *);
64 static int discard_useless_values (void **, void *);
65 static void remove_useless_values (void);
66 static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
67 static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
68 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
69 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
70 static cselib_val *cselib_lookup_mem (rtx, int);
71 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
72 static void cselib_invalidate_mem (rtx);
73 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
74 static void cselib_record_sets (rtx);
75
76 struct expand_value_data
77 {
78 bitmap regs_active;
79 cselib_expand_callback callback;
80 void *callback_arg;
81 bool dummy;
82 };
83
84 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
85
86 /* There are three ways in which cselib can look up an rtx:
87 - for a REG, the reg_values table (which is indexed by regno) is used
88 - for a MEM, we recursively look up its address and then follow the
89 addr_list of that value
90 - for everything else, we compute a hash value and go through the hash
91 table. Since different rtx's can still have the same hash value,
92 this involves walking the table entries for a given value and comparing
93 the locations of the entries with the rtx we are looking up. */
94
95 /* A table that enables us to look up elts by their value. */
96 static htab_t cselib_hash_table;
97
98 /* This is a global so we don't have to pass this through every function.
99 It is used in new_elt_loc_list to set SETTING_INSN. */
100 static rtx cselib_current_insn;
101
102 /* The unique id that the next create value will take. */
103 static unsigned int next_uid;
104
105 /* The number of registers we had when the varrays were last resized. */
106 static unsigned int cselib_nregs;
107
108 /* Count values without known locations, or with only locations that
109 wouldn't have been known except for debug insns. Whenever this
110 grows too big, we remove these useless values from the table.
111
112 Counting values with only debug values is a bit tricky. We don't
113 want to increment n_useless_values when we create a value for a
114 debug insn, for this would get n_useless_values out of sync, but we
115 want increment it if all locs in the list that were ever referenced
116 in nondebug insns are removed from the list.
117
118 In the general case, once we do that, we'd have to stop accepting
119 nondebug expressions in the loc list, to avoid having two values
120 equivalent that, without debug insns, would have been made into
121 separate values. However, because debug insns never introduce
122 equivalences themselves (no assignments), the only means for
123 growing loc lists is through nondebug assignments. If the locs
124 also happen to be referenced in debug insns, it will work just fine.
125
126 A consequence of this is that there's at most one debug-only loc in
127 each loc list. If we keep it in the first entry, testing whether
128 we have a debug-only loc list takes O(1).
129
130 Furthermore, since any additional entry in a loc list containing a
131 debug loc would have to come from an assignment (nondebug) that
132 references both the initial debug loc and the newly-equivalent loc,
133 the initial debug loc would be promoted to a nondebug loc, and the
134 loc list would not contain debug locs any more.
135
136 So the only case we have to be careful with in order to keep
137 n_useless_values in sync between debug and nondebug compilations is
138 to avoid incrementing n_useless_values when removing the single loc
139 from a value that turns out to not appear outside debug values. We
140 increment n_useless_debug_values instead, and leave such values
141 alone until, for other reasons, we garbage-collect useless
142 values. */
143 static int n_useless_values;
144 static int n_useless_debug_values;
145
146 /* Count values whose locs have been taken exclusively from debug
147 insns for the entire life of the value. */
148 static int n_debug_values;
149
150 /* Number of useless values before we remove them from the hash table. */
151 #define MAX_USELESS_VALUES 32
152
153 /* This table maps from register number to values. It does not
154 contain pointers to cselib_val structures, but rather elt_lists.
155 The purpose is to be able to refer to the same register in
156 different modes. The first element of the list defines the mode in
157 which the register was set; if the mode is unknown or the value is
158 no longer valid in that mode, ELT will be NULL for the first
159 element. */
160 static struct elt_list **reg_values;
161 static unsigned int reg_values_size;
162 #define REG_VALUES(i) reg_values[i]
163
164 /* The largest number of hard regs used by any entry added to the
165 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
166 static unsigned int max_value_regs;
167
168 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
169 in cselib_clear_table() for fast emptying. */
170 static unsigned int *used_regs;
171 static unsigned int n_used_regs;
172
173 /* We pass this to cselib_invalidate_mem to invalidate all of
174 memory for a non-const call instruction. */
175 static GTY(()) rtx callmem;
176
177 /* Set by discard_useless_locs if it deleted the last location of any
178 value. */
179 static int values_became_useless;
180
181 /* Used as stop element of the containing_mem list so we can check
182 presence in the list by checking the next pointer. */
183 static cselib_val dummy_val;
184
185 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
186 that is constant through the whole function and should never be
187 eliminated. */
188 static cselib_val *cfa_base_preserved_val;
189 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
190
191 /* Used to list all values that contain memory reference.
192 May or may not contain the useless values - the list is compacted
193 each time memory is invalidated. */
194 static cselib_val *first_containing_mem = &dummy_val;
195 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
196
197 /* If nonnull, cselib will call this function before freeing useless
198 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
199 void (*cselib_discard_hook) (cselib_val *);
200
201 /* If nonnull, cselib will call this function before recording sets or
202 even clobbering outputs of INSN. All the recorded sets will be
203 represented in the array sets[n_sets]. new_val_min can be used to
204 tell whether values present in sets are introduced by this
205 instruction. */
206 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
207 int n_sets);
208
209 #define PRESERVED_VALUE_P(RTX) \
210 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
211
212 \f
213
214 /* Allocate a struct elt_list and fill in its two elements with the
215 arguments. */
216
217 static inline struct elt_list *
218 new_elt_list (struct elt_list *next, cselib_val *elt)
219 {
220 struct elt_list *el;
221 el = (struct elt_list *) pool_alloc (elt_list_pool);
222 el->next = next;
223 el->elt = elt;
224 return el;
225 }
226
227 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
228 list. */
229
230 static inline void
231 new_elt_loc_list (cselib_val *val, rtx loc)
232 {
233 struct elt_loc_list *el, *next = val->locs;
234
235 gcc_checking_assert (!next || !next->setting_insn
236 || !DEBUG_INSN_P (next->setting_insn)
237 || cselib_current_insn == next->setting_insn);
238
239 /* If we're creating the first loc in a debug insn context, we've
240 just created a debug value. Count it. */
241 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
242 n_debug_values++;
243
244 val = canonical_cselib_val (val);
245 next = val->locs;
246
247 if (GET_CODE (loc) == VALUE)
248 {
249 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
250
251 gcc_checking_assert (PRESERVED_VALUE_P (loc)
252 == PRESERVED_VALUE_P (val->val_rtx));
253
254 if (val->val_rtx == loc)
255 return;
256 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
257 {
258 /* Reverse the insertion. */
259 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
260 return;
261 }
262
263 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
264
265 if (CSELIB_VAL_PTR (loc)->locs)
266 {
267 /* Bring all locs from LOC to VAL. */
268 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
269 {
270 /* Adjust values that have LOC as canonical so that VAL
271 becomes their canonical. */
272 if (el->loc && GET_CODE (el->loc) == VALUE)
273 {
274 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
275 == loc);
276 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
277 }
278 }
279 el->next = val->locs;
280 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
281 }
282
283 if (CSELIB_VAL_PTR (loc)->addr_list)
284 {
285 /* Bring in addr_list into canonical node. */
286 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
287 while (last->next)
288 last = last->next;
289 last->next = val->addr_list;
290 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
291 CSELIB_VAL_PTR (loc)->addr_list = NULL;
292 }
293
294 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
295 && val->next_containing_mem == NULL)
296 {
297 /* Add VAL to the containing_mem list after LOC. LOC will
298 be removed when we notice it doesn't contain any
299 MEMs. */
300 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
301 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
302 }
303
304 /* Chain LOC back to VAL. */
305 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
306 el->loc = val->val_rtx;
307 el->setting_insn = cselib_current_insn;
308 el->next = NULL;
309 CSELIB_VAL_PTR (loc)->locs = el;
310 }
311
312 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
313 el->loc = loc;
314 el->setting_insn = cselib_current_insn;
315 el->next = next;
316 val->locs = el;
317 }
318
319 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
320 originating from a debug insn, maintaining the debug values
321 count. */
322
323 static inline void
324 promote_debug_loc (struct elt_loc_list *l)
325 {
326 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
327 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
328 {
329 n_debug_values--;
330 l->setting_insn = cselib_current_insn;
331 if (cselib_preserve_constants && l->next)
332 {
333 gcc_assert (l->next->setting_insn
334 && DEBUG_INSN_P (l->next->setting_insn)
335 && !l->next->next);
336 l->next->setting_insn = cselib_current_insn;
337 }
338 else
339 gcc_assert (!l->next);
340 }
341 }
342
343 /* The elt_list at *PL is no longer needed. Unchain it and free its
344 storage. */
345
346 static inline void
347 unchain_one_elt_list (struct elt_list **pl)
348 {
349 struct elt_list *l = *pl;
350
351 *pl = l->next;
352 pool_free (elt_list_pool, l);
353 }
354
355 /* Likewise for elt_loc_lists. */
356
357 static void
358 unchain_one_elt_loc_list (struct elt_loc_list **pl)
359 {
360 struct elt_loc_list *l = *pl;
361
362 *pl = l->next;
363 pool_free (elt_loc_list_pool, l);
364 }
365
366 /* Likewise for cselib_vals. This also frees the addr_list associated with
367 V. */
368
369 static void
370 unchain_one_value (cselib_val *v)
371 {
372 while (v->addr_list)
373 unchain_one_elt_list (&v->addr_list);
374
375 pool_free (cselib_val_pool, v);
376 }
377
378 /* Remove all entries from the hash table. Also used during
379 initialization. */
380
381 void
382 cselib_clear_table (void)
383 {
384 cselib_reset_table (1);
385 }
386
387 /* Return TRUE if V is a constant, a function invariant or a VALUE
388 equivalence; FALSE otherwise. */
389
390 static bool
391 invariant_or_equiv_p (cselib_val *v)
392 {
393 struct elt_loc_list *l;
394
395 if (v == cfa_base_preserved_val)
396 return true;
397
398 /* Keep VALUE equivalences around. */
399 for (l = v->locs; l; l = l->next)
400 if (GET_CODE (l->loc) == VALUE)
401 return true;
402
403 if (v->locs != NULL
404 && v->locs->next == NULL)
405 {
406 if (CONSTANT_P (v->locs->loc)
407 && (GET_CODE (v->locs->loc) != CONST
408 || !references_value_p (v->locs->loc, 0)))
409 return true;
410 /* Although a debug expr may be bound to different expressions,
411 we can preserve it as if it was constant, to get unification
412 and proper merging within var-tracking. */
413 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
414 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
415 || GET_CODE (v->locs->loc) == ENTRY_VALUE
416 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
417 return true;
418
419 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
420 if (GET_CODE (v->locs->loc) == PLUS
421 && CONST_INT_P (XEXP (v->locs->loc, 1))
422 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
423 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
424 return true;
425 }
426
427 return false;
428 }
429
430 /* Remove from hash table all VALUEs except constants, function
431 invariants and VALUE equivalences. */
432
433 static int
434 preserve_constants_and_equivs (void **x, void *info ATTRIBUTE_UNUSED)
435 {
436 cselib_val *v = (cselib_val *)*x;
437
438 if (!invariant_or_equiv_p (v))
439 htab_clear_slot (cselib_hash_table, x);
440 return 1;
441 }
442
443 /* Remove all entries from the hash table, arranging for the next
444 value to be numbered NUM. */
445
446 void
447 cselib_reset_table (unsigned int num)
448 {
449 unsigned int i;
450
451 max_value_regs = 0;
452
453 if (cfa_base_preserved_val)
454 {
455 unsigned int regno = cfa_base_preserved_regno;
456 unsigned int new_used_regs = 0;
457 for (i = 0; i < n_used_regs; i++)
458 if (used_regs[i] == regno)
459 {
460 new_used_regs = 1;
461 continue;
462 }
463 else
464 REG_VALUES (used_regs[i]) = 0;
465 gcc_assert (new_used_regs == 1);
466 n_used_regs = new_used_regs;
467 used_regs[0] = regno;
468 max_value_regs
469 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
470 }
471 else
472 {
473 for (i = 0; i < n_used_regs; i++)
474 REG_VALUES (used_regs[i]) = 0;
475 n_used_regs = 0;
476 }
477
478 if (cselib_preserve_constants)
479 htab_traverse (cselib_hash_table, preserve_constants_and_equivs, NULL);
480 else
481 {
482 htab_empty (cselib_hash_table);
483 gcc_checking_assert (!cselib_any_perm_equivs);
484 }
485
486 n_useless_values = 0;
487 n_useless_debug_values = 0;
488 n_debug_values = 0;
489
490 next_uid = num;
491
492 first_containing_mem = &dummy_val;
493 }
494
495 /* Return the number of the next value that will be generated. */
496
497 unsigned int
498 cselib_get_next_uid (void)
499 {
500 return next_uid;
501 }
502
503 /* See the documentation of cselib_find_slot below. */
504 static enum machine_mode find_slot_memmode;
505
506 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
507 INSERTing if requested. When X is part of the address of a MEM,
508 MEMMODE should specify the mode of the MEM. While searching the
509 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
510 in X can be resolved. */
511
512 static void **
513 cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
514 enum machine_mode memmode)
515 {
516 void **slot;
517 find_slot_memmode = memmode;
518 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
519 find_slot_memmode = VOIDmode;
520 return slot;
521 }
522
523 /* The equality test for our hash table. The first argument ENTRY is a table
524 element (i.e. a cselib_val), while the second arg X is an rtx. We know
525 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
526 CONST of an appropriate mode. */
527
528 static int
529 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
530 {
531 struct elt_loc_list *l;
532 const cselib_val *const v = (const cselib_val *) entry;
533 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
534 enum machine_mode mode = GET_MODE (x);
535
536 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
537 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
538
539 if (mode != GET_MODE (v->val_rtx))
540 return 0;
541
542 /* Unwrap X if necessary. */
543 if (GET_CODE (x) == CONST
544 && (CONST_INT_P (XEXP (x, 0))
545 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
546 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
547 x = XEXP (x, 0);
548
549 /* We don't guarantee that distinct rtx's have different hash values,
550 so we need to do a comparison. */
551 for (l = v->locs; l; l = l->next)
552 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
553 {
554 promote_debug_loc (l);
555 return 1;
556 }
557
558 return 0;
559 }
560
561 /* The hash function for our hash table. The value is always computed with
562 cselib_hash_rtx when adding an element; this function just extracts the
563 hash value from a cselib_val structure. */
564
565 static hashval_t
566 get_value_hash (const void *entry)
567 {
568 const cselib_val *const v = (const cselib_val *) entry;
569 return v->hash;
570 }
571
572 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
573 only return true for values which point to a cselib_val whose value
574 element has been set to zero, which implies the cselib_val will be
575 removed. */
576
577 int
578 references_value_p (const_rtx x, int only_useless)
579 {
580 const enum rtx_code code = GET_CODE (x);
581 const char *fmt = GET_RTX_FORMAT (code);
582 int i, j;
583
584 if (GET_CODE (x) == VALUE
585 && (! only_useless ||
586 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
587 return 1;
588
589 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
590 {
591 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
592 return 1;
593 else if (fmt[i] == 'E')
594 for (j = 0; j < XVECLEN (x, i); j++)
595 if (references_value_p (XVECEXP (x, i, j), only_useless))
596 return 1;
597 }
598
599 return 0;
600 }
601
602 /* For all locations found in X, delete locations that reference useless
603 values (i.e. values without any location). Called through
604 htab_traverse. */
605
606 static int
607 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
608 {
609 cselib_val *v = (cselib_val *)*x;
610 struct elt_loc_list **p = &v->locs;
611 bool had_locs = v->locs != NULL;
612 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
613
614 while (*p)
615 {
616 if (references_value_p ((*p)->loc, 1))
617 unchain_one_elt_loc_list (p);
618 else
619 p = &(*p)->next;
620 }
621
622 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
623 {
624 if (setting_insn && DEBUG_INSN_P (setting_insn))
625 n_useless_debug_values++;
626 else
627 n_useless_values++;
628 values_became_useless = 1;
629 }
630 return 1;
631 }
632
633 /* If X is a value with no locations, remove it from the hashtable. */
634
635 static int
636 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
637 {
638 cselib_val *v = (cselib_val *)*x;
639
640 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
641 {
642 if (cselib_discard_hook)
643 cselib_discard_hook (v);
644
645 CSELIB_VAL_PTR (v->val_rtx) = NULL;
646 htab_clear_slot (cselib_hash_table, x);
647 unchain_one_value (v);
648 n_useless_values--;
649 }
650
651 return 1;
652 }
653
654 /* Clean out useless values (i.e. those which no longer have locations
655 associated with them) from the hash table. */
656
657 static void
658 remove_useless_values (void)
659 {
660 cselib_val **p, *v;
661
662 /* First pass: eliminate locations that reference the value. That in
663 turn can make more values useless. */
664 do
665 {
666 values_became_useless = 0;
667 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
668 }
669 while (values_became_useless);
670
671 /* Second pass: actually remove the values. */
672
673 p = &first_containing_mem;
674 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
675 if (v->locs && v == canonical_cselib_val (v))
676 {
677 *p = v;
678 p = &(*p)->next_containing_mem;
679 }
680 *p = &dummy_val;
681
682 n_useless_values += n_useless_debug_values;
683 n_debug_values -= n_useless_debug_values;
684 n_useless_debug_values = 0;
685
686 htab_traverse (cselib_hash_table, discard_useless_values, 0);
687
688 gcc_assert (!n_useless_values);
689 }
690
691 /* Arrange for a value to not be removed from the hash table even if
692 it becomes useless. */
693
694 void
695 cselib_preserve_value (cselib_val *v)
696 {
697 PRESERVED_VALUE_P (v->val_rtx) = 1;
698 }
699
700 /* Test whether a value is preserved. */
701
702 bool
703 cselib_preserved_value_p (cselib_val *v)
704 {
705 return PRESERVED_VALUE_P (v->val_rtx);
706 }
707
708 /* Arrange for a REG value to be assumed constant through the whole function,
709 never invalidated and preserved across cselib_reset_table calls. */
710
711 void
712 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
713 {
714 if (cselib_preserve_constants
715 && v->locs
716 && REG_P (v->locs->loc))
717 {
718 cfa_base_preserved_val = v;
719 cfa_base_preserved_regno = regno;
720 }
721 }
722
723 /* Clean all non-constant expressions in the hash table, but retain
724 their values. */
725
726 void
727 cselib_preserve_only_values (void)
728 {
729 int i;
730
731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
732 cselib_invalidate_regno (i, reg_raw_mode[i]);
733
734 cselib_invalidate_mem (callmem);
735
736 remove_useless_values ();
737
738 gcc_assert (first_containing_mem == &dummy_val);
739 }
740
741 /* Return the mode in which a register was last set. If X is not a
742 register, return its mode. If the mode in which the register was
743 set is not known, or the value was already clobbered, return
744 VOIDmode. */
745
746 enum machine_mode
747 cselib_reg_set_mode (const_rtx x)
748 {
749 if (!REG_P (x))
750 return GET_MODE (x);
751
752 if (REG_VALUES (REGNO (x)) == NULL
753 || REG_VALUES (REGNO (x))->elt == NULL)
754 return VOIDmode;
755
756 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
757 }
758
759 /* Return nonzero if we can prove that X and Y contain the same value, taking
760 our gathered information into account. */
761
762 int
763 rtx_equal_for_cselib_p (rtx x, rtx y)
764 {
765 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
766 }
767
768 /* If x is a PLUS or an autoinc operation, expand the operation,
769 storing the offset, if any, in *OFF. */
770
771 static rtx
772 autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
773 {
774 switch (GET_CODE (x))
775 {
776 case PLUS:
777 *off = XEXP (x, 1);
778 return XEXP (x, 0);
779
780 case PRE_DEC:
781 if (memmode == VOIDmode)
782 return x;
783
784 *off = GEN_INT (-GET_MODE_SIZE (memmode));
785 return XEXP (x, 0);
786 break;
787
788 case PRE_INC:
789 if (memmode == VOIDmode)
790 return x;
791
792 *off = GEN_INT (GET_MODE_SIZE (memmode));
793 return XEXP (x, 0);
794
795 case PRE_MODIFY:
796 return XEXP (x, 1);
797
798 case POST_DEC:
799 case POST_INC:
800 case POST_MODIFY:
801 return XEXP (x, 0);
802
803 default:
804 return x;
805 }
806 }
807
808 /* Return nonzero if we can prove that X and Y contain the same value,
809 taking our gathered information into account. MEMMODE holds the
810 mode of the enclosing MEM, if any, as required to deal with autoinc
811 addressing modes. If X and Y are not (known to be) part of
812 addresses, MEMMODE should be VOIDmode. */
813
814 static int
815 rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
816 {
817 enum rtx_code code;
818 const char *fmt;
819 int i;
820
821 if (REG_P (x) || MEM_P (x))
822 {
823 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
824
825 if (e)
826 x = e->val_rtx;
827 }
828
829 if (REG_P (y) || MEM_P (y))
830 {
831 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
832
833 if (e)
834 y = e->val_rtx;
835 }
836
837 if (x == y)
838 return 1;
839
840 if (GET_CODE (x) == VALUE)
841 {
842 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
843 struct elt_loc_list *l;
844
845 if (GET_CODE (y) == VALUE)
846 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
847
848 for (l = e->locs; l; l = l->next)
849 {
850 rtx t = l->loc;
851
852 /* Avoid infinite recursion. We know we have the canonical
853 value, so we can just skip any values in the equivalence
854 list. */
855 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
856 continue;
857 else if (rtx_equal_for_cselib_1 (t, y, memmode))
858 return 1;
859 }
860
861 return 0;
862 }
863 else if (GET_CODE (y) == VALUE)
864 {
865 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
866 struct elt_loc_list *l;
867
868 for (l = e->locs; l; l = l->next)
869 {
870 rtx t = l->loc;
871
872 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
873 continue;
874 else if (rtx_equal_for_cselib_1 (x, t, memmode))
875 return 1;
876 }
877
878 return 0;
879 }
880
881 if (GET_MODE (x) != GET_MODE (y))
882 return 0;
883
884 if (GET_CODE (x) != GET_CODE (y))
885 {
886 rtx xorig = x, yorig = y;
887 rtx xoff = NULL, yoff = NULL;
888
889 x = autoinc_split (x, &xoff, memmode);
890 y = autoinc_split (y, &yoff, memmode);
891
892 if (!xoff != !yoff)
893 return 0;
894
895 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
896 return 0;
897
898 /* Don't recurse if nothing changed. */
899 if (x != xorig || y != yorig)
900 return rtx_equal_for_cselib_1 (x, y, memmode);
901
902 return 0;
903 }
904
905 /* These won't be handled correctly by the code below. */
906 switch (GET_CODE (x))
907 {
908 case CONST_DOUBLE:
909 case CONST_FIXED:
910 case DEBUG_EXPR:
911 return 0;
912
913 case DEBUG_IMPLICIT_PTR:
914 return DEBUG_IMPLICIT_PTR_DECL (x)
915 == DEBUG_IMPLICIT_PTR_DECL (y);
916
917 case DEBUG_PARAMETER_REF:
918 return DEBUG_PARAMETER_REF_DECL (x)
919 == DEBUG_PARAMETER_REF_DECL (y);
920
921 case ENTRY_VALUE:
922 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
923 use rtx_equal_for_cselib_1 to compare the operands. */
924 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
925
926 case LABEL_REF:
927 return XEXP (x, 0) == XEXP (y, 0);
928
929 case MEM:
930 /* We have to compare any autoinc operations in the addresses
931 using this MEM's mode. */
932 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
933
934 default:
935 break;
936 }
937
938 code = GET_CODE (x);
939 fmt = GET_RTX_FORMAT (code);
940
941 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
942 {
943 int j;
944
945 switch (fmt[i])
946 {
947 case 'w':
948 if (XWINT (x, i) != XWINT (y, i))
949 return 0;
950 break;
951
952 case 'n':
953 case 'i':
954 if (XINT (x, i) != XINT (y, i))
955 return 0;
956 break;
957
958 case 'V':
959 case 'E':
960 /* Two vectors must have the same length. */
961 if (XVECLEN (x, i) != XVECLEN (y, i))
962 return 0;
963
964 /* And the corresponding elements must match. */
965 for (j = 0; j < XVECLEN (x, i); j++)
966 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
967 XVECEXP (y, i, j), memmode))
968 return 0;
969 break;
970
971 case 'e':
972 if (i == 1
973 && targetm.commutative_p (x, UNKNOWN)
974 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
975 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
976 return 1;
977 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
978 return 0;
979 break;
980
981 case 'S':
982 case 's':
983 if (strcmp (XSTR (x, i), XSTR (y, i)))
984 return 0;
985 break;
986
987 case 'u':
988 /* These are just backpointers, so they don't matter. */
989 break;
990
991 case '0':
992 case 't':
993 break;
994
995 /* It is believed that rtx's at this level will never
996 contain anything but integers and other rtx's,
997 except for within LABEL_REFs and SYMBOL_REFs. */
998 default:
999 gcc_unreachable ();
1000 }
1001 }
1002 return 1;
1003 }
1004
1005 /* We need to pass down the mode of constants through the hash table
1006 functions. For that purpose, wrap them in a CONST of the appropriate
1007 mode. */
1008 static rtx
1009 wrap_constant (enum machine_mode mode, rtx x)
1010 {
1011 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
1012 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
1013 return x;
1014 gcc_assert (mode != VOIDmode);
1015 return gen_rtx_CONST (mode, x);
1016 }
1017
1018 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1019 For registers and memory locations, we look up their cselib_val structure
1020 and return its VALUE element.
1021 Possible reasons for return 0 are: the object is volatile, or we couldn't
1022 find a register or memory location in the table and CREATE is zero. If
1023 CREATE is nonzero, table elts are created for regs and mem.
1024 N.B. this hash function returns the same hash value for RTXes that
1025 differ only in the order of operands, thus it is suitable for comparisons
1026 that take commutativity into account.
1027 If we wanted to also support associative rules, we'd have to use a different
1028 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1029 MEMMODE indicates the mode of an enclosing MEM, and it's only
1030 used to compute autoinc values.
1031 We used to have a MODE argument for hashing for CONST_INTs, but that
1032 didn't make sense, since it caused spurious hash differences between
1033 (set (reg:SI 1) (const_int))
1034 (plus:SI (reg:SI 2) (reg:SI 1))
1035 and
1036 (plus:SI (reg:SI 2) (const_int))
1037 If the mode is important in any context, it must be checked specifically
1038 in a comparison anyway, since relying on hash differences is unsafe. */
1039
1040 static unsigned int
1041 cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
1042 {
1043 cselib_val *e;
1044 int i, j;
1045 enum rtx_code code;
1046 const char *fmt;
1047 unsigned int hash = 0;
1048
1049 code = GET_CODE (x);
1050 hash += (unsigned) code + (unsigned) GET_MODE (x);
1051
1052 switch (code)
1053 {
1054 case VALUE:
1055 e = CSELIB_VAL_PTR (x);
1056 return e->hash;
1057
1058 case MEM:
1059 case REG:
1060 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1061 if (! e)
1062 return 0;
1063
1064 return e->hash;
1065
1066 case DEBUG_EXPR:
1067 hash += ((unsigned) DEBUG_EXPR << 7)
1068 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1069 return hash ? hash : (unsigned int) DEBUG_EXPR;
1070
1071 case DEBUG_IMPLICIT_PTR:
1072 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1073 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1074 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1075
1076 case DEBUG_PARAMETER_REF:
1077 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1078 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1079 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1080
1081 case ENTRY_VALUE:
1082 /* ENTRY_VALUEs are function invariant, thus try to avoid
1083 recursing on argument if ENTRY_VALUE is one of the
1084 forms emitted by expand_debug_expr, otherwise
1085 ENTRY_VALUE hash would depend on the current value
1086 in some register or memory. */
1087 if (REG_P (ENTRY_VALUE_EXP (x)))
1088 hash += (unsigned int) REG
1089 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1090 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1091 else if (MEM_P (ENTRY_VALUE_EXP (x))
1092 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1093 hash += (unsigned int) MEM
1094 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1095 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1096 else
1097 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1098 return hash ? hash : (unsigned int) ENTRY_VALUE;
1099
1100 case CONST_INT:
1101 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
1102 return hash ? hash : (unsigned int) CONST_INT;
1103
1104 case CONST_DOUBLE:
1105 /* This is like the general case, except that it only counts
1106 the integers representing the constant. */
1107 hash += (unsigned) code + (unsigned) GET_MODE (x);
1108 if (GET_MODE (x) != VOIDmode)
1109 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1110 else
1111 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1112 + (unsigned) CONST_DOUBLE_HIGH (x));
1113 return hash ? hash : (unsigned int) CONST_DOUBLE;
1114
1115 case CONST_FIXED:
1116 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1117 hash += fixed_hash (CONST_FIXED_VALUE (x));
1118 return hash ? hash : (unsigned int) CONST_FIXED;
1119
1120 case CONST_VECTOR:
1121 {
1122 int units;
1123 rtx elt;
1124
1125 units = CONST_VECTOR_NUNITS (x);
1126
1127 for (i = 0; i < units; ++i)
1128 {
1129 elt = CONST_VECTOR_ELT (x, i);
1130 hash += cselib_hash_rtx (elt, 0, memmode);
1131 }
1132
1133 return hash;
1134 }
1135
1136 /* Assume there is only one rtx object for any given label. */
1137 case LABEL_REF:
1138 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1139 differences and differences between each stage's debugging dumps. */
1140 hash += (((unsigned int) LABEL_REF << 7)
1141 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1142 return hash ? hash : (unsigned int) LABEL_REF;
1143
1144 case SYMBOL_REF:
1145 {
1146 /* Don't hash on the symbol's address to avoid bootstrap differences.
1147 Different hash values may cause expressions to be recorded in
1148 different orders and thus different registers to be used in the
1149 final assembler. This also avoids differences in the dump files
1150 between various stages. */
1151 unsigned int h = 0;
1152 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1153
1154 while (*p)
1155 h += (h << 7) + *p++; /* ??? revisit */
1156
1157 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1158 return hash ? hash : (unsigned int) SYMBOL_REF;
1159 }
1160
1161 case PRE_DEC:
1162 case PRE_INC:
1163 /* We can't compute these without knowing the MEM mode. */
1164 gcc_assert (memmode != VOIDmode);
1165 i = GET_MODE_SIZE (memmode);
1166 if (code == PRE_DEC)
1167 i = -i;
1168 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1169 like (mem:MEMMODE (plus (reg) (const_int I))). */
1170 hash += (unsigned) PLUS - (unsigned)code
1171 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1172 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1173 return hash ? hash : 1 + (unsigned) PLUS;
1174
1175 case PRE_MODIFY:
1176 gcc_assert (memmode != VOIDmode);
1177 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1178
1179 case POST_DEC:
1180 case POST_INC:
1181 case POST_MODIFY:
1182 gcc_assert (memmode != VOIDmode);
1183 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1184
1185 case PC:
1186 case CC0:
1187 case CALL:
1188 case UNSPEC_VOLATILE:
1189 return 0;
1190
1191 case ASM_OPERANDS:
1192 if (MEM_VOLATILE_P (x))
1193 return 0;
1194
1195 break;
1196
1197 default:
1198 break;
1199 }
1200
1201 i = GET_RTX_LENGTH (code) - 1;
1202 fmt = GET_RTX_FORMAT (code);
1203 for (; i >= 0; i--)
1204 {
1205 switch (fmt[i])
1206 {
1207 case 'e':
1208 {
1209 rtx tem = XEXP (x, i);
1210 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1211
1212 if (tem_hash == 0)
1213 return 0;
1214
1215 hash += tem_hash;
1216 }
1217 break;
1218 case 'E':
1219 for (j = 0; j < XVECLEN (x, i); j++)
1220 {
1221 unsigned int tem_hash
1222 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1223
1224 if (tem_hash == 0)
1225 return 0;
1226
1227 hash += tem_hash;
1228 }
1229 break;
1230
1231 case 's':
1232 {
1233 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1234
1235 if (p)
1236 while (*p)
1237 hash += *p++;
1238 break;
1239 }
1240
1241 case 'i':
1242 hash += XINT (x, i);
1243 break;
1244
1245 case '0':
1246 case 't':
1247 /* unused */
1248 break;
1249
1250 default:
1251 gcc_unreachable ();
1252 }
1253 }
1254
1255 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1256 }
1257
1258 /* Create a new value structure for VALUE and initialize it. The mode of the
1259 value is MODE. */
1260
1261 static inline cselib_val *
1262 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
1263 {
1264 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1265
1266 gcc_assert (hash);
1267 gcc_assert (next_uid);
1268
1269 e->hash = hash;
1270 e->uid = next_uid++;
1271 /* We use an alloc pool to allocate this RTL construct because it
1272 accounts for about 8% of the overall memory usage. We know
1273 precisely when we can have VALUE RTXen (when cselib is active)
1274 so we don't need to put them in garbage collected memory.
1275 ??? Why should a VALUE be an RTX in the first place? */
1276 e->val_rtx = (rtx) pool_alloc (value_pool);
1277 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1278 PUT_CODE (e->val_rtx, VALUE);
1279 PUT_MODE (e->val_rtx, mode);
1280 CSELIB_VAL_PTR (e->val_rtx) = e;
1281 e->addr_list = 0;
1282 e->locs = 0;
1283 e->next_containing_mem = 0;
1284
1285 if (dump_file && (dump_flags & TDF_CSELIB))
1286 {
1287 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1288 if (flag_dump_noaddr || flag_dump_unnumbered)
1289 fputs ("# ", dump_file);
1290 else
1291 fprintf (dump_file, "%p ", (void*)e);
1292 print_rtl_single (dump_file, x);
1293 fputc ('\n', dump_file);
1294 }
1295
1296 return e;
1297 }
1298
1299 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1300 contains the data at this address. X is a MEM that represents the
1301 value. Update the two value structures to represent this situation. */
1302
1303 static void
1304 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1305 {
1306 struct elt_loc_list *l;
1307
1308 addr_elt = canonical_cselib_val (addr_elt);
1309 mem_elt = canonical_cselib_val (mem_elt);
1310
1311 /* Avoid duplicates. */
1312 for (l = mem_elt->locs; l; l = l->next)
1313 if (MEM_P (l->loc)
1314 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1315 {
1316 promote_debug_loc (l);
1317 return;
1318 }
1319
1320 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1321 new_elt_loc_list (mem_elt,
1322 replace_equiv_address_nv (x, addr_elt->val_rtx));
1323 if (mem_elt->next_containing_mem == NULL)
1324 {
1325 mem_elt->next_containing_mem = first_containing_mem;
1326 first_containing_mem = mem_elt;
1327 }
1328 }
1329
1330 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1331 If CREATE, make a new one if we haven't seen it before. */
1332
1333 static cselib_val *
1334 cselib_lookup_mem (rtx x, int create)
1335 {
1336 enum machine_mode mode = GET_MODE (x);
1337 enum machine_mode addr_mode;
1338 void **slot;
1339 cselib_val *addr;
1340 cselib_val *mem_elt;
1341 struct elt_list *l;
1342
1343 if (MEM_VOLATILE_P (x) || mode == BLKmode
1344 || !cselib_record_memory
1345 || (FLOAT_MODE_P (mode) && flag_float_store))
1346 return 0;
1347
1348 addr_mode = GET_MODE (XEXP (x, 0));
1349 if (addr_mode == VOIDmode)
1350 addr_mode = Pmode;
1351
1352 /* Look up the value for the address. */
1353 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1354 if (! addr)
1355 return 0;
1356
1357 addr = canonical_cselib_val (addr);
1358 /* Find a value that describes a value of our mode at that address. */
1359 for (l = addr->addr_list; l; l = l->next)
1360 if (GET_MODE (l->elt->val_rtx) == mode)
1361 {
1362 promote_debug_loc (l->elt->locs);
1363 return l->elt;
1364 }
1365
1366 if (! create)
1367 return 0;
1368
1369 mem_elt = new_cselib_val (next_uid, mode, x);
1370 add_mem_for_addr (addr, mem_elt, x);
1371 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1372 INSERT, mode);
1373 *slot = mem_elt;
1374 return mem_elt;
1375 }
1376
1377 /* Search thru the possible substitutions in P. We prefer a non reg
1378 substitution because this allows us to expand the tree further. If
1379 we find, just a reg, take the lowest regno. There may be several
1380 non-reg results, we just take the first one because they will all
1381 expand to the same place. */
1382
1383 static rtx
1384 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1385 int max_depth)
1386 {
1387 rtx reg_result = NULL;
1388 unsigned int regno = UINT_MAX;
1389 struct elt_loc_list *p_in = p;
1390
1391 for (; p; p = p->next)
1392 {
1393 /* Return these right away to avoid returning stack pointer based
1394 expressions for frame pointer and vice versa, which is something
1395 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1396 for more details. */
1397 if (REG_P (p->loc)
1398 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1399 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1400 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1401 || REGNO (p->loc) == cfa_base_preserved_regno))
1402 return p->loc;
1403 /* Avoid infinite recursion trying to expand a reg into a
1404 the same reg. */
1405 if ((REG_P (p->loc))
1406 && (REGNO (p->loc) < regno)
1407 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1408 {
1409 reg_result = p->loc;
1410 regno = REGNO (p->loc);
1411 }
1412 /* Avoid infinite recursion and do not try to expand the
1413 value. */
1414 else if (GET_CODE (p->loc) == VALUE
1415 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1416 continue;
1417 else if (!REG_P (p->loc))
1418 {
1419 rtx result, note;
1420 if (dump_file && (dump_flags & TDF_CSELIB))
1421 {
1422 print_inline_rtx (dump_file, p->loc, 0);
1423 fprintf (dump_file, "\n");
1424 }
1425 if (GET_CODE (p->loc) == LO_SUM
1426 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1427 && p->setting_insn
1428 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1429 && XEXP (note, 0) == XEXP (p->loc, 1))
1430 return XEXP (p->loc, 1);
1431 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1432 if (result)
1433 return result;
1434 }
1435
1436 }
1437
1438 if (regno != UINT_MAX)
1439 {
1440 rtx result;
1441 if (dump_file && (dump_flags & TDF_CSELIB))
1442 fprintf (dump_file, "r%d\n", regno);
1443
1444 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1445 if (result)
1446 return result;
1447 }
1448
1449 if (dump_file && (dump_flags & TDF_CSELIB))
1450 {
1451 if (reg_result)
1452 {
1453 print_inline_rtx (dump_file, reg_result, 0);
1454 fprintf (dump_file, "\n");
1455 }
1456 else
1457 fprintf (dump_file, "NULL\n");
1458 }
1459 return reg_result;
1460 }
1461
1462
1463 /* Forward substitute and expand an expression out to its roots.
1464 This is the opposite of common subexpression. Because local value
1465 numbering is such a weak optimization, the expanded expression is
1466 pretty much unique (not from a pointer equals point of view but
1467 from a tree shape point of view.
1468
1469 This function returns NULL if the expansion fails. The expansion
1470 will fail if there is no value number for one of the operands or if
1471 one of the operands has been overwritten between the current insn
1472 and the beginning of the basic block. For instance x has no
1473 expansion in:
1474
1475 r1 <- r1 + 3
1476 x <- r1 + 8
1477
1478 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1479 It is clear on return. */
1480
1481 rtx
1482 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1483 {
1484 struct expand_value_data evd;
1485
1486 evd.regs_active = regs_active;
1487 evd.callback = NULL;
1488 evd.callback_arg = NULL;
1489 evd.dummy = false;
1490
1491 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1492 }
1493
1494 /* Same as cselib_expand_value_rtx, but using a callback to try to
1495 resolve some expressions. The CB function should return ORIG if it
1496 can't or does not want to deal with a certain RTX. Any other
1497 return value, including NULL, will be used as the expansion for
1498 VALUE, without any further changes. */
1499
1500 rtx
1501 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1502 cselib_expand_callback cb, void *data)
1503 {
1504 struct expand_value_data evd;
1505
1506 evd.regs_active = regs_active;
1507 evd.callback = cb;
1508 evd.callback_arg = data;
1509 evd.dummy = false;
1510
1511 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1512 }
1513
1514 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1515 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1516 would return NULL or non-NULL, without allocating new rtx. */
1517
1518 bool
1519 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1520 cselib_expand_callback cb, void *data)
1521 {
1522 struct expand_value_data evd;
1523
1524 evd.regs_active = regs_active;
1525 evd.callback = cb;
1526 evd.callback_arg = data;
1527 evd.dummy = true;
1528
1529 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1530 }
1531
1532 /* Internal implementation of cselib_expand_value_rtx and
1533 cselib_expand_value_rtx_cb. */
1534
1535 static rtx
1536 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1537 int max_depth)
1538 {
1539 rtx copy, scopy;
1540 int i, j;
1541 RTX_CODE code;
1542 const char *format_ptr;
1543 enum machine_mode mode;
1544
1545 code = GET_CODE (orig);
1546
1547 /* For the context of dse, if we end up expand into a huge tree, we
1548 will not have a useful address, so we might as well just give up
1549 quickly. */
1550 if (max_depth <= 0)
1551 return NULL;
1552
1553 switch (code)
1554 {
1555 case REG:
1556 {
1557 struct elt_list *l = REG_VALUES (REGNO (orig));
1558
1559 if (l && l->elt == NULL)
1560 l = l->next;
1561 for (; l; l = l->next)
1562 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1563 {
1564 rtx result;
1565 unsigned regno = REGNO (orig);
1566
1567 /* The only thing that we are not willing to do (this
1568 is requirement of dse and if others potential uses
1569 need this function we should add a parm to control
1570 it) is that we will not substitute the
1571 STACK_POINTER_REGNUM, FRAME_POINTER or the
1572 HARD_FRAME_POINTER.
1573
1574 These expansions confuses the code that notices that
1575 stores into the frame go dead at the end of the
1576 function and that the frame is not effected by calls
1577 to subroutines. If you allow the
1578 STACK_POINTER_REGNUM substitution, then dse will
1579 think that parameter pushing also goes dead which is
1580 wrong. If you allow the FRAME_POINTER or the
1581 HARD_FRAME_POINTER then you lose the opportunity to
1582 make the frame assumptions. */
1583 if (regno == STACK_POINTER_REGNUM
1584 || regno == FRAME_POINTER_REGNUM
1585 || regno == HARD_FRAME_POINTER_REGNUM
1586 || regno == cfa_base_preserved_regno)
1587 return orig;
1588
1589 bitmap_set_bit (evd->regs_active, regno);
1590
1591 if (dump_file && (dump_flags & TDF_CSELIB))
1592 fprintf (dump_file, "expanding: r%d into: ", regno);
1593
1594 result = expand_loc (l->elt->locs, evd, max_depth);
1595 bitmap_clear_bit (evd->regs_active, regno);
1596
1597 if (result)
1598 return result;
1599 else
1600 return orig;
1601 }
1602 }
1603
1604 case CONST_INT:
1605 case CONST_DOUBLE:
1606 case CONST_VECTOR:
1607 case SYMBOL_REF:
1608 case CODE_LABEL:
1609 case PC:
1610 case CC0:
1611 case SCRATCH:
1612 /* SCRATCH must be shared because they represent distinct values. */
1613 return orig;
1614 case CLOBBER:
1615 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1616 return orig;
1617 break;
1618
1619 case CONST:
1620 if (shared_const_p (orig))
1621 return orig;
1622 break;
1623
1624 case SUBREG:
1625 {
1626 rtx subreg;
1627
1628 if (evd->callback)
1629 {
1630 subreg = evd->callback (orig, evd->regs_active, max_depth,
1631 evd->callback_arg);
1632 if (subreg != orig)
1633 return subreg;
1634 }
1635
1636 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1637 max_depth - 1);
1638 if (!subreg)
1639 return NULL;
1640 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1641 GET_MODE (SUBREG_REG (orig)),
1642 SUBREG_BYTE (orig));
1643 if (scopy == NULL
1644 || (GET_CODE (scopy) == SUBREG
1645 && !REG_P (SUBREG_REG (scopy))
1646 && !MEM_P (SUBREG_REG (scopy))))
1647 return NULL;
1648
1649 return scopy;
1650 }
1651
1652 case VALUE:
1653 {
1654 rtx result;
1655
1656 if (dump_file && (dump_flags & TDF_CSELIB))
1657 {
1658 fputs ("\nexpanding ", dump_file);
1659 print_rtl_single (dump_file, orig);
1660 fputs (" into...", dump_file);
1661 }
1662
1663 if (evd->callback)
1664 {
1665 result = evd->callback (orig, evd->regs_active, max_depth,
1666 evd->callback_arg);
1667
1668 if (result != orig)
1669 return result;
1670 }
1671
1672 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1673 return result;
1674 }
1675
1676 case DEBUG_EXPR:
1677 if (evd->callback)
1678 return evd->callback (orig, evd->regs_active, max_depth,
1679 evd->callback_arg);
1680 return orig;
1681
1682 default:
1683 break;
1684 }
1685
1686 /* Copy the various flags, fields, and other information. We assume
1687 that all fields need copying, and then clear the fields that should
1688 not be copied. That is the sensible default behavior, and forces
1689 us to explicitly document why we are *not* copying a flag. */
1690 if (evd->dummy)
1691 copy = NULL;
1692 else
1693 copy = shallow_copy_rtx (orig);
1694
1695 format_ptr = GET_RTX_FORMAT (code);
1696
1697 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1698 switch (*format_ptr++)
1699 {
1700 case 'e':
1701 if (XEXP (orig, i) != NULL)
1702 {
1703 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1704 max_depth - 1);
1705 if (!result)
1706 return NULL;
1707 if (copy)
1708 XEXP (copy, i) = result;
1709 }
1710 break;
1711
1712 case 'E':
1713 case 'V':
1714 if (XVEC (orig, i) != NULL)
1715 {
1716 if (copy)
1717 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1718 for (j = 0; j < XVECLEN (orig, i); j++)
1719 {
1720 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1721 evd, max_depth - 1);
1722 if (!result)
1723 return NULL;
1724 if (copy)
1725 XVECEXP (copy, i, j) = result;
1726 }
1727 }
1728 break;
1729
1730 case 't':
1731 case 'w':
1732 case 'i':
1733 case 's':
1734 case 'S':
1735 case 'T':
1736 case 'u':
1737 case 'B':
1738 case '0':
1739 /* These are left unchanged. */
1740 break;
1741
1742 default:
1743 gcc_unreachable ();
1744 }
1745
1746 if (evd->dummy)
1747 return orig;
1748
1749 mode = GET_MODE (copy);
1750 /* If an operand has been simplified into CONST_INT, which doesn't
1751 have a mode and the mode isn't derivable from whole rtx's mode,
1752 try simplify_*_operation first with mode from original's operand
1753 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1754 scopy = copy;
1755 switch (GET_RTX_CLASS (code))
1756 {
1757 case RTX_UNARY:
1758 if (CONST_INT_P (XEXP (copy, 0))
1759 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1760 {
1761 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1762 GET_MODE (XEXP (orig, 0)));
1763 if (scopy)
1764 return scopy;
1765 }
1766 break;
1767 case RTX_COMM_ARITH:
1768 case RTX_BIN_ARITH:
1769 /* These expressions can derive operand modes from the whole rtx's mode. */
1770 break;
1771 case RTX_TERNARY:
1772 case RTX_BITFIELD_OPS:
1773 if (CONST_INT_P (XEXP (copy, 0))
1774 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1775 {
1776 scopy = simplify_ternary_operation (code, mode,
1777 GET_MODE (XEXP (orig, 0)),
1778 XEXP (copy, 0), XEXP (copy, 1),
1779 XEXP (copy, 2));
1780 if (scopy)
1781 return scopy;
1782 }
1783 break;
1784 case RTX_COMPARE:
1785 case RTX_COMM_COMPARE:
1786 if (CONST_INT_P (XEXP (copy, 0))
1787 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1788 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1789 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1790 {
1791 scopy = simplify_relational_operation (code, mode,
1792 (GET_MODE (XEXP (orig, 0))
1793 != VOIDmode)
1794 ? GET_MODE (XEXP (orig, 0))
1795 : GET_MODE (XEXP (orig, 1)),
1796 XEXP (copy, 0),
1797 XEXP (copy, 1));
1798 if (scopy)
1799 return scopy;
1800 }
1801 break;
1802 default:
1803 break;
1804 }
1805 scopy = simplify_rtx (copy);
1806 if (scopy)
1807 return scopy;
1808 return copy;
1809 }
1810
1811 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1812 with VALUE expressions. This way, it becomes independent of changes
1813 to registers and memory.
1814 X isn't actually modified; if modifications are needed, new rtl is
1815 allocated. However, the return value can share rtl with X.
1816 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1817
1818 rtx
1819 cselib_subst_to_values (rtx x, enum machine_mode memmode)
1820 {
1821 enum rtx_code code = GET_CODE (x);
1822 const char *fmt = GET_RTX_FORMAT (code);
1823 cselib_val *e;
1824 struct elt_list *l;
1825 rtx copy = x;
1826 int i;
1827
1828 switch (code)
1829 {
1830 case REG:
1831 l = REG_VALUES (REGNO (x));
1832 if (l && l->elt == NULL)
1833 l = l->next;
1834 for (; l; l = l->next)
1835 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1836 return l->elt->val_rtx;
1837
1838 gcc_unreachable ();
1839
1840 case MEM:
1841 e = cselib_lookup_mem (x, 0);
1842 /* This used to happen for autoincrements, but we deal with them
1843 properly now. Remove the if stmt for the next release. */
1844 if (! e)
1845 {
1846 /* Assign a value that doesn't match any other. */
1847 e = new_cselib_val (next_uid, GET_MODE (x), x);
1848 }
1849 return e->val_rtx;
1850
1851 case ENTRY_VALUE:
1852 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1853 if (! e)
1854 break;
1855 return e->val_rtx;
1856
1857 case CONST_DOUBLE:
1858 case CONST_VECTOR:
1859 case CONST_INT:
1860 case CONST_FIXED:
1861 return x;
1862
1863 case PRE_DEC:
1864 case PRE_INC:
1865 gcc_assert (memmode != VOIDmode);
1866 i = GET_MODE_SIZE (memmode);
1867 if (code == PRE_DEC)
1868 i = -i;
1869 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1870 XEXP (x, 0), i),
1871 memmode);
1872
1873 case PRE_MODIFY:
1874 gcc_assert (memmode != VOIDmode);
1875 return cselib_subst_to_values (XEXP (x, 1), memmode);
1876
1877 case POST_DEC:
1878 case POST_INC:
1879 case POST_MODIFY:
1880 gcc_assert (memmode != VOIDmode);
1881 return cselib_subst_to_values (XEXP (x, 0), memmode);
1882
1883 default:
1884 break;
1885 }
1886
1887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1888 {
1889 if (fmt[i] == 'e')
1890 {
1891 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1892
1893 if (t != XEXP (x, i))
1894 {
1895 if (x == copy)
1896 copy = shallow_copy_rtx (x);
1897 XEXP (copy, i) = t;
1898 }
1899 }
1900 else if (fmt[i] == 'E')
1901 {
1902 int j;
1903
1904 for (j = 0; j < XVECLEN (x, i); j++)
1905 {
1906 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1907
1908 if (t != XVECEXP (x, i, j))
1909 {
1910 if (XVEC (x, i) == XVEC (copy, i))
1911 {
1912 if (x == copy)
1913 copy = shallow_copy_rtx (x);
1914 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1915 }
1916 XVECEXP (copy, i, j) = t;
1917 }
1918 }
1919 }
1920 }
1921
1922 return copy;
1923 }
1924
1925 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1926
1927 rtx
1928 cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn)
1929 {
1930 rtx ret;
1931 gcc_assert (!cselib_current_insn);
1932 cselib_current_insn = insn;
1933 ret = cselib_subst_to_values (x, memmode);
1934 cselib_current_insn = NULL;
1935 return ret;
1936 }
1937
1938 /* Look up the rtl expression X in our tables and return the value it
1939 has. If CREATE is zero, we return NULL if we don't know the value.
1940 Otherwise, we create a new one if possible, using mode MODE if X
1941 doesn't have a mode (i.e. because it's a constant). When X is part
1942 of an address, MEMMODE should be the mode of the enclosing MEM if
1943 we're tracking autoinc expressions. */
1944
1945 static cselib_val *
1946 cselib_lookup_1 (rtx x, enum machine_mode mode,
1947 int create, enum machine_mode memmode)
1948 {
1949 void **slot;
1950 cselib_val *e;
1951 unsigned int hashval;
1952
1953 if (GET_MODE (x) != VOIDmode)
1954 mode = GET_MODE (x);
1955
1956 if (GET_CODE (x) == VALUE)
1957 return CSELIB_VAL_PTR (x);
1958
1959 if (REG_P (x))
1960 {
1961 struct elt_list *l;
1962 unsigned int i = REGNO (x);
1963
1964 l = REG_VALUES (i);
1965 if (l && l->elt == NULL)
1966 l = l->next;
1967 for (; l; l = l->next)
1968 if (mode == GET_MODE (l->elt->val_rtx))
1969 {
1970 promote_debug_loc (l->elt->locs);
1971 return l->elt;
1972 }
1973
1974 if (! create)
1975 return 0;
1976
1977 if (i < FIRST_PSEUDO_REGISTER)
1978 {
1979 unsigned int n = hard_regno_nregs[i][mode];
1980
1981 if (n > max_value_regs)
1982 max_value_regs = n;
1983 }
1984
1985 e = new_cselib_val (next_uid, GET_MODE (x), x);
1986 new_elt_loc_list (e, x);
1987 if (REG_VALUES (i) == 0)
1988 {
1989 /* Maintain the invariant that the first entry of
1990 REG_VALUES, if present, must be the value used to set the
1991 register, or NULL. */
1992 used_regs[n_used_regs++] = i;
1993 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1994 }
1995 else if (cselib_preserve_constants
1996 && GET_MODE_CLASS (mode) == MODE_INT)
1997 {
1998 /* During var-tracking, try harder to find equivalences
1999 for SUBREGs. If a setter sets say a DImode register
2000 and user uses that register only in SImode, add a lowpart
2001 subreg location. */
2002 struct elt_list *lwider = NULL;
2003 l = REG_VALUES (i);
2004 if (l && l->elt == NULL)
2005 l = l->next;
2006 for (; l; l = l->next)
2007 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2008 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2009 > GET_MODE_SIZE (mode)
2010 && (lwider == NULL
2011 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2012 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2013 {
2014 struct elt_loc_list *el;
2015 if (i < FIRST_PSEUDO_REGISTER
2016 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2017 continue;
2018 for (el = l->elt->locs; el; el = el->next)
2019 if (!REG_P (el->loc))
2020 break;
2021 if (el)
2022 lwider = l;
2023 }
2024 if (lwider)
2025 {
2026 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2027 GET_MODE (lwider->elt->val_rtx));
2028 if (sub)
2029 new_elt_loc_list (e, sub);
2030 }
2031 }
2032 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2033 slot = cselib_find_slot (x, e->hash, INSERT, memmode);
2034 *slot = e;
2035 return e;
2036 }
2037
2038 if (MEM_P (x))
2039 return cselib_lookup_mem (x, create);
2040
2041 hashval = cselib_hash_rtx (x, create, memmode);
2042 /* Can't even create if hashing is not possible. */
2043 if (! hashval)
2044 return 0;
2045
2046 slot = cselib_find_slot (wrap_constant (mode, x), hashval,
2047 create ? INSERT : NO_INSERT, memmode);
2048 if (slot == 0)
2049 return 0;
2050
2051 e = (cselib_val *) *slot;
2052 if (e)
2053 return e;
2054
2055 e = new_cselib_val (hashval, mode, x);
2056
2057 /* We have to fill the slot before calling cselib_subst_to_values:
2058 the hash table is inconsistent until we do so, and
2059 cselib_subst_to_values will need to do lookups. */
2060 *slot = (void *) e;
2061 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2062 return e;
2063 }
2064
2065 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2066
2067 cselib_val *
2068 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
2069 int create, enum machine_mode memmode, rtx insn)
2070 {
2071 cselib_val *ret;
2072
2073 gcc_assert (!cselib_current_insn);
2074 cselib_current_insn = insn;
2075
2076 ret = cselib_lookup (x, mode, create, memmode);
2077
2078 cselib_current_insn = NULL;
2079
2080 return ret;
2081 }
2082
2083 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2084 maintains invariants related with debug insns. */
2085
2086 cselib_val *
2087 cselib_lookup (rtx x, enum machine_mode mode,
2088 int create, enum machine_mode memmode)
2089 {
2090 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2091
2092 /* ??? Should we return NULL if we're not to create an entry, the
2093 found loc is a debug loc and cselib_current_insn is not DEBUG?
2094 If so, we should also avoid converting val to non-DEBUG; probably
2095 easiest setting cselib_current_insn to NULL before the call
2096 above. */
2097
2098 if (dump_file && (dump_flags & TDF_CSELIB))
2099 {
2100 fputs ("cselib lookup ", dump_file);
2101 print_inline_rtx (dump_file, x, 2);
2102 fprintf (dump_file, " => %u:%u\n",
2103 ret ? ret->uid : 0,
2104 ret ? ret->hash : 0);
2105 }
2106
2107 return ret;
2108 }
2109
2110 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2111 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2112 is used to determine how many hard registers are being changed. If MODE
2113 is VOIDmode, then only REGNO is being changed; this is used when
2114 invalidating call clobbered registers across a call. */
2115
2116 static void
2117 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
2118 {
2119 unsigned int endregno;
2120 unsigned int i;
2121
2122 /* If we see pseudos after reload, something is _wrong_. */
2123 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2124 || reg_renumber[regno] < 0);
2125
2126 /* Determine the range of registers that must be invalidated. For
2127 pseudos, only REGNO is affected. For hard regs, we must take MODE
2128 into account, and we must also invalidate lower register numbers
2129 if they contain values that overlap REGNO. */
2130 if (regno < FIRST_PSEUDO_REGISTER)
2131 {
2132 gcc_assert (mode != VOIDmode);
2133
2134 if (regno < max_value_regs)
2135 i = 0;
2136 else
2137 i = regno - max_value_regs;
2138
2139 endregno = end_hard_regno (mode, regno);
2140 }
2141 else
2142 {
2143 i = regno;
2144 endregno = regno + 1;
2145 }
2146
2147 for (; i < endregno; i++)
2148 {
2149 struct elt_list **l = &REG_VALUES (i);
2150
2151 /* Go through all known values for this reg; if it overlaps the range
2152 we're invalidating, remove the value. */
2153 while (*l)
2154 {
2155 cselib_val *v = (*l)->elt;
2156 bool had_locs;
2157 rtx setting_insn;
2158 struct elt_loc_list **p;
2159 unsigned int this_last = i;
2160
2161 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2162 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2163
2164 if (this_last < regno || v == NULL
2165 || (v == cfa_base_preserved_val
2166 && i == cfa_base_preserved_regno))
2167 {
2168 l = &(*l)->next;
2169 continue;
2170 }
2171
2172 /* We have an overlap. */
2173 if (*l == REG_VALUES (i))
2174 {
2175 /* Maintain the invariant that the first entry of
2176 REG_VALUES, if present, must be the value used to set
2177 the register, or NULL. This is also nice because
2178 then we won't push the same regno onto user_regs
2179 multiple times. */
2180 (*l)->elt = NULL;
2181 l = &(*l)->next;
2182 }
2183 else
2184 unchain_one_elt_list (l);
2185
2186 v = canonical_cselib_val (v);
2187
2188 had_locs = v->locs != NULL;
2189 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2190
2191 /* Now, we clear the mapping from value to reg. It must exist, so
2192 this code will crash intentionally if it doesn't. */
2193 for (p = &v->locs; ; p = &(*p)->next)
2194 {
2195 rtx x = (*p)->loc;
2196
2197 if (REG_P (x) && REGNO (x) == i)
2198 {
2199 unchain_one_elt_loc_list (p);
2200 break;
2201 }
2202 }
2203
2204 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2205 {
2206 if (setting_insn && DEBUG_INSN_P (setting_insn))
2207 n_useless_debug_values++;
2208 else
2209 n_useless_values++;
2210 }
2211 }
2212 }
2213 }
2214 \f
2215 /* Invalidate any locations in the table which are changed because of a
2216 store to MEM_RTX. If this is called because of a non-const call
2217 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2218
2219 static void
2220 cselib_invalidate_mem (rtx mem_rtx)
2221 {
2222 cselib_val **vp, *v, *next;
2223 int num_mems = 0;
2224 rtx mem_addr;
2225
2226 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2227 mem_rtx = canon_rtx (mem_rtx);
2228
2229 vp = &first_containing_mem;
2230 for (v = *vp; v != &dummy_val; v = next)
2231 {
2232 bool has_mem = false;
2233 struct elt_loc_list **p = &v->locs;
2234 bool had_locs = v->locs != NULL;
2235 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2236
2237 while (*p)
2238 {
2239 rtx x = (*p)->loc;
2240 cselib_val *addr;
2241 struct elt_list **mem_chain;
2242
2243 /* MEMs may occur in locations only at the top level; below
2244 that every MEM or REG is substituted by its VALUE. */
2245 if (!MEM_P (x))
2246 {
2247 p = &(*p)->next;
2248 continue;
2249 }
2250 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2251 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
2252 mem_addr, x, NULL_RTX))
2253 {
2254 has_mem = true;
2255 num_mems++;
2256 p = &(*p)->next;
2257 continue;
2258 }
2259
2260 /* This one overlaps. */
2261 /* We must have a mapping from this MEM's address to the
2262 value (E). Remove that, too. */
2263 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2264 addr = canonical_cselib_val (addr);
2265 gcc_checking_assert (v == canonical_cselib_val (v));
2266 mem_chain = &addr->addr_list;
2267 for (;;)
2268 {
2269 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2270
2271 if (canon == v)
2272 {
2273 unchain_one_elt_list (mem_chain);
2274 break;
2275 }
2276
2277 /* Record canonicalized elt. */
2278 (*mem_chain)->elt = canon;
2279
2280 mem_chain = &(*mem_chain)->next;
2281 }
2282
2283 unchain_one_elt_loc_list (p);
2284 }
2285
2286 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2287 {
2288 if (setting_insn && DEBUG_INSN_P (setting_insn))
2289 n_useless_debug_values++;
2290 else
2291 n_useless_values++;
2292 }
2293
2294 next = v->next_containing_mem;
2295 if (has_mem)
2296 {
2297 *vp = v;
2298 vp = &(*vp)->next_containing_mem;
2299 }
2300 else
2301 v->next_containing_mem = NULL;
2302 }
2303 *vp = &dummy_val;
2304 }
2305
2306 /* Invalidate DEST, which is being assigned to or clobbered. */
2307
2308 void
2309 cselib_invalidate_rtx (rtx dest)
2310 {
2311 while (GET_CODE (dest) == SUBREG
2312 || GET_CODE (dest) == ZERO_EXTRACT
2313 || GET_CODE (dest) == STRICT_LOW_PART)
2314 dest = XEXP (dest, 0);
2315
2316 if (REG_P (dest))
2317 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2318 else if (MEM_P (dest))
2319 cselib_invalidate_mem (dest);
2320 }
2321
2322 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2323
2324 static void
2325 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2326 void *data ATTRIBUTE_UNUSED)
2327 {
2328 cselib_invalidate_rtx (dest);
2329 }
2330
2331 /* Record the result of a SET instruction. DEST is being set; the source
2332 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2333 describes its address. */
2334
2335 static void
2336 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2337 {
2338 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2339
2340 if (src_elt == 0 || side_effects_p (dest))
2341 return;
2342
2343 if (dreg >= 0)
2344 {
2345 if (dreg < FIRST_PSEUDO_REGISTER)
2346 {
2347 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2348
2349 if (n > max_value_regs)
2350 max_value_regs = n;
2351 }
2352
2353 if (REG_VALUES (dreg) == 0)
2354 {
2355 used_regs[n_used_regs++] = dreg;
2356 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2357 }
2358 else
2359 {
2360 /* The register should have been invalidated. */
2361 gcc_assert (REG_VALUES (dreg)->elt == 0);
2362 REG_VALUES (dreg)->elt = src_elt;
2363 }
2364
2365 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2366 n_useless_values--;
2367 new_elt_loc_list (src_elt, dest);
2368 }
2369 else if (MEM_P (dest) && dest_addr_elt != 0
2370 && cselib_record_memory)
2371 {
2372 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2373 n_useless_values--;
2374 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2375 }
2376 }
2377
2378 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2379
2380 void
2381 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2382 {
2383 cselib_val *nelt;
2384 rtx save_cselib_current_insn = cselib_current_insn;
2385
2386 gcc_checking_assert (elt);
2387 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2388 gcc_checking_assert (!side_effects_p (x));
2389
2390 cselib_current_insn = insn;
2391
2392 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2393
2394 if (nelt != elt)
2395 {
2396 cselib_any_perm_equivs = true;
2397
2398 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2399 cselib_preserve_value (nelt);
2400
2401 new_elt_loc_list (nelt, elt->val_rtx);
2402 }
2403
2404 cselib_current_insn = save_cselib_current_insn;
2405 }
2406
2407 /* Return TRUE if any permanent equivalences have been recorded since
2408 the table was last initialized. */
2409 bool
2410 cselib_have_permanent_equivalences (void)
2411 {
2412 return cselib_any_perm_equivs;
2413 }
2414
2415 /* There is no good way to determine how many elements there can be
2416 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2417 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2418
2419 struct cselib_record_autoinc_data
2420 {
2421 struct cselib_set *sets;
2422 int n_sets;
2423 };
2424
2425 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2426 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2427
2428 static int
2429 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2430 rtx dest, rtx src, rtx srcoff, void *arg)
2431 {
2432 struct cselib_record_autoinc_data *data;
2433 data = (struct cselib_record_autoinc_data *)arg;
2434
2435 data->sets[data->n_sets].dest = dest;
2436
2437 if (srcoff)
2438 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2439 else
2440 data->sets[data->n_sets].src = src;
2441
2442 data->n_sets++;
2443
2444 return -1;
2445 }
2446
2447 /* Record the effects of any sets and autoincs in INSN. */
2448 static void
2449 cselib_record_sets (rtx insn)
2450 {
2451 int n_sets = 0;
2452 int i;
2453 struct cselib_set sets[MAX_SETS];
2454 rtx body = PATTERN (insn);
2455 rtx cond = 0;
2456 int n_sets_before_autoinc;
2457 struct cselib_record_autoinc_data data;
2458
2459 body = PATTERN (insn);
2460 if (GET_CODE (body) == COND_EXEC)
2461 {
2462 cond = COND_EXEC_TEST (body);
2463 body = COND_EXEC_CODE (body);
2464 }
2465
2466 /* Find all sets. */
2467 if (GET_CODE (body) == SET)
2468 {
2469 sets[0].src = SET_SRC (body);
2470 sets[0].dest = SET_DEST (body);
2471 n_sets = 1;
2472 }
2473 else if (GET_CODE (body) == PARALLEL)
2474 {
2475 /* Look through the PARALLEL and record the values being
2476 set, if possible. Also handle any CLOBBERs. */
2477 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2478 {
2479 rtx x = XVECEXP (body, 0, i);
2480
2481 if (GET_CODE (x) == SET)
2482 {
2483 sets[n_sets].src = SET_SRC (x);
2484 sets[n_sets].dest = SET_DEST (x);
2485 n_sets++;
2486 }
2487 }
2488 }
2489
2490 if (n_sets == 1
2491 && MEM_P (sets[0].src)
2492 && !cselib_record_memory
2493 && MEM_READONLY_P (sets[0].src))
2494 {
2495 rtx note = find_reg_equal_equiv_note (insn);
2496
2497 if (note && CONSTANT_P (XEXP (note, 0)))
2498 sets[0].src = XEXP (note, 0);
2499 }
2500
2501 data.sets = sets;
2502 data.n_sets = n_sets_before_autoinc = n_sets;
2503 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2504 n_sets = data.n_sets;
2505
2506 /* Look up the values that are read. Do this before invalidating the
2507 locations that are written. */
2508 for (i = 0; i < n_sets; i++)
2509 {
2510 rtx dest = sets[i].dest;
2511
2512 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2513 the low part after invalidating any knowledge about larger modes. */
2514 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2515 sets[i].dest = dest = XEXP (dest, 0);
2516
2517 /* We don't know how to record anything but REG or MEM. */
2518 if (REG_P (dest)
2519 || (MEM_P (dest) && cselib_record_memory))
2520 {
2521 rtx src = sets[i].src;
2522 if (cond)
2523 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2524 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2525 if (MEM_P (dest))
2526 {
2527 enum machine_mode address_mode = get_address_mode (dest);
2528
2529 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2530 address_mode, 1,
2531 GET_MODE (dest));
2532 }
2533 else
2534 sets[i].dest_addr_elt = 0;
2535 }
2536 }
2537
2538 if (cselib_record_sets_hook)
2539 cselib_record_sets_hook (insn, sets, n_sets);
2540
2541 /* Invalidate all locations written by this insn. Note that the elts we
2542 looked up in the previous loop aren't affected, just some of their
2543 locations may go away. */
2544 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2545
2546 for (i = n_sets_before_autoinc; i < n_sets; i++)
2547 cselib_invalidate_rtx (sets[i].dest);
2548
2549 /* If this is an asm, look for duplicate sets. This can happen when the
2550 user uses the same value as an output multiple times. This is valid
2551 if the outputs are not actually used thereafter. Treat this case as
2552 if the value isn't actually set. We do this by smashing the destination
2553 to pc_rtx, so that we won't record the value later. */
2554 if (n_sets >= 2 && asm_noperands (body) >= 0)
2555 {
2556 for (i = 0; i < n_sets; i++)
2557 {
2558 rtx dest = sets[i].dest;
2559 if (REG_P (dest) || MEM_P (dest))
2560 {
2561 int j;
2562 for (j = i + 1; j < n_sets; j++)
2563 if (rtx_equal_p (dest, sets[j].dest))
2564 {
2565 sets[i].dest = pc_rtx;
2566 sets[j].dest = pc_rtx;
2567 }
2568 }
2569 }
2570 }
2571
2572 /* Now enter the equivalences in our tables. */
2573 for (i = 0; i < n_sets; i++)
2574 {
2575 rtx dest = sets[i].dest;
2576 if (REG_P (dest)
2577 || (MEM_P (dest) && cselib_record_memory))
2578 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2579 }
2580 }
2581
2582 /* Record the effects of INSN. */
2583
2584 void
2585 cselib_process_insn (rtx insn)
2586 {
2587 int i;
2588 rtx x;
2589
2590 cselib_current_insn = insn;
2591
2592 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2593 if (LABEL_P (insn)
2594 || (CALL_P (insn)
2595 && find_reg_note (insn, REG_SETJMP, NULL))
2596 || (NONJUMP_INSN_P (insn)
2597 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2598 && MEM_VOLATILE_P (PATTERN (insn))))
2599 {
2600 cselib_reset_table (next_uid);
2601 cselib_current_insn = NULL_RTX;
2602 return;
2603 }
2604
2605 if (! INSN_P (insn))
2606 {
2607 cselib_current_insn = NULL_RTX;
2608 return;
2609 }
2610
2611 /* If this is a call instruction, forget anything stored in a
2612 call clobbered register, or, if this is not a const call, in
2613 memory. */
2614 if (CALL_P (insn))
2615 {
2616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2617 if (call_used_regs[i]
2618 || (REG_VALUES (i) && REG_VALUES (i)->elt
2619 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2620 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2621 cselib_invalidate_regno (i, reg_raw_mode[i]);
2622
2623 /* Since it is not clear how cselib is going to be used, be
2624 conservative here and treat looping pure or const functions
2625 as if they were regular functions. */
2626 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2627 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2628 cselib_invalidate_mem (callmem);
2629 }
2630
2631 cselib_record_sets (insn);
2632
2633 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2634 after we have processed the insn. */
2635 if (CALL_P (insn))
2636 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2637 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2638 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2639
2640 cselib_current_insn = NULL_RTX;
2641
2642 if (n_useless_values > MAX_USELESS_VALUES
2643 /* remove_useless_values is linear in the hash table size. Avoid
2644 quadratic behavior for very large hashtables with very few
2645 useless elements. */
2646 && ((unsigned int)n_useless_values
2647 > (cselib_hash_table->n_elements
2648 - cselib_hash_table->n_deleted
2649 - n_debug_values) / 4))
2650 remove_useless_values ();
2651 }
2652
2653 /* Initialize cselib for one pass. The caller must also call
2654 init_alias_analysis. */
2655
2656 void
2657 cselib_init (int record_what)
2658 {
2659 elt_list_pool = create_alloc_pool ("elt_list",
2660 sizeof (struct elt_list), 10);
2661 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2662 sizeof (struct elt_loc_list), 10);
2663 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2664 sizeof (cselib_val), 10);
2665 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2666 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2667 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2668 cselib_any_perm_equivs = false;
2669
2670 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2671 see canon_true_dependence. This is only created once. */
2672 if (! callmem)
2673 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2674
2675 cselib_nregs = max_reg_num ();
2676
2677 /* We preserve reg_values to allow expensive clearing of the whole thing.
2678 Reallocate it however if it happens to be too large. */
2679 if (!reg_values || reg_values_size < cselib_nregs
2680 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2681 {
2682 free (reg_values);
2683 /* Some space for newly emit instructions so we don't end up
2684 reallocating in between passes. */
2685 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2686 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2687 }
2688 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2689 n_used_regs = 0;
2690 cselib_hash_table = htab_create (31, get_value_hash,
2691 entry_and_rtx_equal_p, NULL);
2692 next_uid = 1;
2693 }
2694
2695 /* Called when the current user is done with cselib. */
2696
2697 void
2698 cselib_finish (void)
2699 {
2700 cselib_discard_hook = NULL;
2701 cselib_preserve_constants = false;
2702 cselib_any_perm_equivs = false;
2703 cfa_base_preserved_val = NULL;
2704 cfa_base_preserved_regno = INVALID_REGNUM;
2705 free_alloc_pool (elt_list_pool);
2706 free_alloc_pool (elt_loc_list_pool);
2707 free_alloc_pool (cselib_val_pool);
2708 free_alloc_pool (value_pool);
2709 cselib_clear_table ();
2710 htab_delete (cselib_hash_table);
2711 free (used_regs);
2712 used_regs = 0;
2713 cselib_hash_table = 0;
2714 n_useless_values = 0;
2715 n_useless_debug_values = 0;
2716 n_debug_values = 0;
2717 next_uid = 0;
2718 }
2719
2720 /* Dump the cselib_val *X to FILE *info. */
2721
2722 static int
2723 dump_cselib_val (void **x, void *info)
2724 {
2725 cselib_val *v = (cselib_val *)*x;
2726 FILE *out = (FILE *)info;
2727 bool need_lf = true;
2728
2729 print_inline_rtx (out, v->val_rtx, 0);
2730
2731 if (v->locs)
2732 {
2733 struct elt_loc_list *l = v->locs;
2734 if (need_lf)
2735 {
2736 fputc ('\n', out);
2737 need_lf = false;
2738 }
2739 fputs (" locs:", out);
2740 do
2741 {
2742 if (l->setting_insn)
2743 fprintf (out, "\n from insn %i ",
2744 INSN_UID (l->setting_insn));
2745 else
2746 fprintf (out, "\n ");
2747 print_inline_rtx (out, l->loc, 4);
2748 }
2749 while ((l = l->next));
2750 fputc ('\n', out);
2751 }
2752 else
2753 {
2754 fputs (" no locs", out);
2755 need_lf = true;
2756 }
2757
2758 if (v->addr_list)
2759 {
2760 struct elt_list *e = v->addr_list;
2761 if (need_lf)
2762 {
2763 fputc ('\n', out);
2764 need_lf = false;
2765 }
2766 fputs (" addr list:", out);
2767 do
2768 {
2769 fputs ("\n ", out);
2770 print_inline_rtx (out, e->elt->val_rtx, 2);
2771 }
2772 while ((e = e->next));
2773 fputc ('\n', out);
2774 }
2775 else
2776 {
2777 fputs (" no addrs", out);
2778 need_lf = true;
2779 }
2780
2781 if (v->next_containing_mem == &dummy_val)
2782 fputs (" last mem\n", out);
2783 else if (v->next_containing_mem)
2784 {
2785 fputs (" next mem ", out);
2786 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2787 fputc ('\n', out);
2788 }
2789 else if (need_lf)
2790 fputc ('\n', out);
2791
2792 return 1;
2793 }
2794
2795 /* Dump to OUT everything in the CSELIB table. */
2796
2797 void
2798 dump_cselib_table (FILE *out)
2799 {
2800 fprintf (out, "cselib hash table:\n");
2801 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2802 if (first_containing_mem != &dummy_val)
2803 {
2804 fputs ("first mem ", out);
2805 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2806 fputc ('\n', out);
2807 }
2808 fprintf (out, "next uid %i\n", next_uid);
2809 }
2810
2811 #include "gt-cselib.h"