]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cselib.c
re PR tree-optimization/54889 (Revision 191983 gives compfail for 465.tonto in SPEC...
[thirdparty/gcc.git] / gcc / cselib.c
CommitLineData
fa49fd0f
RK
1/* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
a4f436ff
JJ
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
fa49fd0f 5
1322177d 6This file is part of GCC.
fa49fd0f 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
fa49fd0f 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
fa49fd0f
RK
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
fa49fd0f
RK
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
fa49fd0f
RK
26
27#include "rtl.h"
532aafad 28#include "tree.h"/* FIXME: For hashing DEBUG_EXPR & friends. */
fa49fd0f
RK
29#include "tm_p.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "flags.h"
fa49fd0f
RK
33#include "insn-config.h"
34#include "recog.h"
35#include "function.h"
78528714 36#include "emit-rtl.h"
718f9c0f 37#include "diagnostic-core.h"
fa49fd0f 38#include "ggc.h"
fa49fd0f 39#include "hashtab.h"
7ee2468b 40#include "dumpfile.h"
fa49fd0f 41#include "cselib.h"
08df6c0d 42#include "valtrack.h"
c65ecebc 43#include "params.h"
6a59927d 44#include "alloc-pool.h"
29c1846b 45#include "target.h"
7a8cba34 46#include "bitmap.h"
fa49fd0f 47
fba4cb03
LB
48/* A list of cselib_val structures. */
49struct elt_list {
50 struct elt_list *next;
51 cselib_val *elt;
52};
53
463301c3 54static bool cselib_record_memory;
457eeaae 55static bool cselib_preserve_constants;
0f68ba3e 56static bool cselib_any_perm_equivs;
7080f735
AJ
57static int entry_and_rtx_equal_p (const void *, const void *);
58static hashval_t get_value_hash (const void *);
59static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
6f2ffb4b 60static void new_elt_loc_list (cselib_val *, rtx);
7080f735
AJ
61static void unchain_one_value (cselib_val *);
62static void unchain_one_elt_list (struct elt_list **);
63static void unchain_one_elt_loc_list (struct elt_loc_list **);
7080f735
AJ
64static int discard_useless_locs (void **, void *);
65static int discard_useless_values (void **, void *);
66static void remove_useless_values (void);
4deef538
AO
67static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
68static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
b5b8b0ac 69static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
7080f735
AJ
70static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
71static cselib_val *cselib_lookup_mem (rtx, int);
72static void cselib_invalidate_regno (unsigned int, enum machine_mode);
7080f735 73static void cselib_invalidate_mem (rtx);
7080f735
AJ
74static void cselib_record_set (rtx, cselib_val *, cselib_val *);
75static void cselib_record_sets (rtx);
fa49fd0f 76
b5b8b0ac
AO
77struct expand_value_data
78{
79 bitmap regs_active;
80 cselib_expand_callback callback;
81 void *callback_arg;
864ddef7 82 bool dummy;
b5b8b0ac
AO
83};
84
85static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
86
fa49fd0f
RK
87/* There are three ways in which cselib can look up an rtx:
88 - for a REG, the reg_values table (which is indexed by regno) is used
89 - for a MEM, we recursively look up its address and then follow the
90 addr_list of that value
91 - for everything else, we compute a hash value and go through the hash
92 table. Since different rtx's can still have the same hash value,
93 this involves walking the table entries for a given value and comparing
94 the locations of the entries with the rtx we are looking up. */
95
96/* A table that enables us to look up elts by their value. */
7c514720 97static htab_t cselib_hash_table;
fa49fd0f
RK
98
99/* This is a global so we don't have to pass this through every function.
100 It is used in new_elt_loc_list to set SETTING_INSN. */
101static rtx cselib_current_insn;
102
5440c0e7
AO
103/* The unique id that the next create value will take. */
104static unsigned int next_uid;
fa49fd0f
RK
105
106/* The number of registers we had when the varrays were last resized. */
107static unsigned int cselib_nregs;
108
5847e8da
AO
109/* Count values without known locations, or with only locations that
110 wouldn't have been known except for debug insns. Whenever this
111 grows too big, we remove these useless values from the table.
112
113 Counting values with only debug values is a bit tricky. We don't
114 want to increment n_useless_values when we create a value for a
115 debug insn, for this would get n_useless_values out of sync, but we
116 want increment it if all locs in the list that were ever referenced
117 in nondebug insns are removed from the list.
118
119 In the general case, once we do that, we'd have to stop accepting
120 nondebug expressions in the loc list, to avoid having two values
121 equivalent that, without debug insns, would have been made into
122 separate values. However, because debug insns never introduce
123 equivalences themselves (no assignments), the only means for
124 growing loc lists is through nondebug assignments. If the locs
125 also happen to be referenced in debug insns, it will work just fine.
126
127 A consequence of this is that there's at most one debug-only loc in
128 each loc list. If we keep it in the first entry, testing whether
129 we have a debug-only loc list takes O(1).
130
131 Furthermore, since any additional entry in a loc list containing a
132 debug loc would have to come from an assignment (nondebug) that
133 references both the initial debug loc and the newly-equivalent loc,
134 the initial debug loc would be promoted to a nondebug loc, and the
135 loc list would not contain debug locs any more.
136
137 So the only case we have to be careful with in order to keep
138 n_useless_values in sync between debug and nondebug compilations is
139 to avoid incrementing n_useless_values when removing the single loc
140 from a value that turns out to not appear outside debug values. We
141 increment n_useless_debug_values instead, and leave such values
142 alone until, for other reasons, we garbage-collect useless
143 values. */
fa49fd0f 144static int n_useless_values;
5847e8da
AO
145static int n_useless_debug_values;
146
147/* Count values whose locs have been taken exclusively from debug
148 insns for the entire life of the value. */
149static int n_debug_values;
fa49fd0f
RK
150
151/* Number of useless values before we remove them from the hash table. */
152#define MAX_USELESS_VALUES 32
153
60fa6660
AO
154/* This table maps from register number to values. It does not
155 contain pointers to cselib_val structures, but rather elt_lists.
156 The purpose is to be able to refer to the same register in
157 different modes. The first element of the list defines the mode in
158 which the register was set; if the mode is unknown or the value is
159 no longer valid in that mode, ELT will be NULL for the first
160 element. */
5211d65a
KH
161static struct elt_list **reg_values;
162static unsigned int reg_values_size;
6790d1ab 163#define REG_VALUES(i) reg_values[i]
fa49fd0f 164
31825e57 165/* The largest number of hard regs used by any entry added to the
eb232f4e 166 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
31825e57
DM
167static unsigned int max_value_regs;
168
fa49fd0f 169/* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
eb232f4e 170 in cselib_clear_table() for fast emptying. */
6790d1ab
JH
171static unsigned int *used_regs;
172static unsigned int n_used_regs;
fa49fd0f
RK
173
174/* We pass this to cselib_invalidate_mem to invalidate all of
175 memory for a non-const call instruction. */
e2500fed 176static GTY(()) rtx callmem;
fa49fd0f 177
fa49fd0f
RK
178/* Set by discard_useless_locs if it deleted the last location of any
179 value. */
180static int values_became_useless;
7101fb18
JH
181
182/* Used as stop element of the containing_mem list so we can check
183 presence in the list by checking the next pointer. */
184static cselib_val dummy_val;
185
457eeaae
JJ
186/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
187 that is constant through the whole function and should never be
188 eliminated. */
189static cselib_val *cfa_base_preserved_val;
5a9fbcf1 190static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
457eeaae 191
7080f735 192/* Used to list all values that contain memory reference.
7101fb18
JH
193 May or may not contain the useless values - the list is compacted
194 each time memory is invalidated. */
195static cselib_val *first_containing_mem = &dummy_val;
23bd7a93 196static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
6fb5fa3c
DB
197
198/* If nonnull, cselib will call this function before freeing useless
199 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
200void (*cselib_discard_hook) (cselib_val *);
b5b8b0ac
AO
201
202/* If nonnull, cselib will call this function before recording sets or
203 even clobbering outputs of INSN. All the recorded sets will be
204 represented in the array sets[n_sets]. new_val_min can be used to
205 tell whether values present in sets are introduced by this
206 instruction. */
207void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
208 int n_sets);
209
210#define PRESERVED_VALUE_P(RTX) \
211 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
b5b8b0ac 212
fa49fd0f
RK
213\f
214
215/* Allocate a struct elt_list and fill in its two elements with the
216 arguments. */
217
6a59927d 218static inline struct elt_list *
7080f735 219new_elt_list (struct elt_list *next, cselib_val *elt)
fa49fd0f 220{
6a59927d 221 struct elt_list *el;
f883e0a7 222 el = (struct elt_list *) pool_alloc (elt_list_pool);
fa49fd0f
RK
223 el->next = next;
224 el->elt = elt;
225 return el;
226}
227
6f2ffb4b
AO
228/* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
229 list. */
fa49fd0f 230
6f2ffb4b
AO
231static inline void
232new_elt_loc_list (cselib_val *val, rtx loc)
fa49fd0f 233{
6f2ffb4b
AO
234 struct elt_loc_list *el, *next = val->locs;
235
236 gcc_checking_assert (!next || !next->setting_insn
237 || !DEBUG_INSN_P (next->setting_insn)
238 || cselib_current_insn == next->setting_insn);
5847e8da
AO
239
240 /* If we're creating the first loc in a debug insn context, we've
241 just created a debug value. Count it. */
242 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
243 n_debug_values++;
244
6f2ffb4b
AO
245 val = canonical_cselib_val (val);
246 next = val->locs;
247
248 if (GET_CODE (loc) == VALUE)
249 {
250 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
251
252 gcc_checking_assert (PRESERVED_VALUE_P (loc)
253 == PRESERVED_VALUE_P (val->val_rtx));
254
255 if (val->val_rtx == loc)
256 return;
257 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
258 {
259 /* Reverse the insertion. */
260 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
261 return;
262 }
263
264 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
265
266 if (CSELIB_VAL_PTR (loc)->locs)
267 {
268 /* Bring all locs from LOC to VAL. */
269 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
270 {
271 /* Adjust values that have LOC as canonical so that VAL
272 becomes their canonical. */
273 if (el->loc && GET_CODE (el->loc) == VALUE)
274 {
275 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
276 == loc);
277 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
278 }
279 }
280 el->next = val->locs;
281 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
faead9f7
AO
282 }
283
284 if (CSELIB_VAL_PTR (loc)->addr_list)
285 {
286 /* Bring in addr_list into canonical node. */
287 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
288 while (last->next)
289 last = last->next;
290 last->next = val->addr_list;
291 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
292 CSELIB_VAL_PTR (loc)->addr_list = NULL;
293 }
294
295 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
296 && val->next_containing_mem == NULL)
297 {
298 /* Add VAL to the containing_mem list after LOC. LOC will
299 be removed when we notice it doesn't contain any
300 MEMs. */
301 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
302 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
6f2ffb4b
AO
303 }
304
305 /* Chain LOC back to VAL. */
306 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
307 el->loc = val->val_rtx;
308 el->setting_insn = cselib_current_insn;
309 el->next = NULL;
310 CSELIB_VAL_PTR (loc)->locs = el;
311 }
312
313 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
314 el->loc = loc;
315 el->setting_insn = cselib_current_insn;
316 el->next = next;
317 val->locs = el;
fa49fd0f
RK
318}
319
5847e8da
AO
320/* Promote loc L to a nondebug cselib_current_insn if L is marked as
321 originating from a debug insn, maintaining the debug values
322 count. */
323
324static inline void
325promote_debug_loc (struct elt_loc_list *l)
326{
ce8fe26d 327 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
5847e8da
AO
328 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
329 {
330 n_debug_values--;
331 l->setting_insn = cselib_current_insn;
dc2a58da
JJ
332 if (cselib_preserve_constants && l->next)
333 {
334 gcc_assert (l->next->setting_insn
335 && DEBUG_INSN_P (l->next->setting_insn)
336 && !l->next->next);
337 l->next->setting_insn = cselib_current_insn;
338 }
339 else
340 gcc_assert (!l->next);
5847e8da
AO
341 }
342}
343
fa49fd0f
RK
344/* The elt_list at *PL is no longer needed. Unchain it and free its
345 storage. */
346
6a59927d 347static inline void
7080f735 348unchain_one_elt_list (struct elt_list **pl)
fa49fd0f
RK
349{
350 struct elt_list *l = *pl;
351
352 *pl = l->next;
6a59927d 353 pool_free (elt_list_pool, l);
fa49fd0f
RK
354}
355
356/* Likewise for elt_loc_lists. */
357
358static void
7080f735 359unchain_one_elt_loc_list (struct elt_loc_list **pl)
fa49fd0f
RK
360{
361 struct elt_loc_list *l = *pl;
362
363 *pl = l->next;
6a59927d 364 pool_free (elt_loc_list_pool, l);
fa49fd0f
RK
365}
366
367/* Likewise for cselib_vals. This also frees the addr_list associated with
368 V. */
369
370static void
7080f735 371unchain_one_value (cselib_val *v)
fa49fd0f
RK
372{
373 while (v->addr_list)
374 unchain_one_elt_list (&v->addr_list);
375
6a59927d 376 pool_free (cselib_val_pool, v);
fa49fd0f
RK
377}
378
379/* Remove all entries from the hash table. Also used during
b5b8b0ac 380 initialization. */
fa49fd0f 381
eb232f4e
SB
382void
383cselib_clear_table (void)
b5b8b0ac 384{
5440c0e7 385 cselib_reset_table (1);
b5b8b0ac
AO
386}
387
0e224656
AO
388/* Return TRUE if V is a constant, a function invariant or a VALUE
389 equivalence; FALSE otherwise. */
457eeaae 390
0e224656
AO
391static bool
392invariant_or_equiv_p (cselib_val *v)
457eeaae 393{
6f2ffb4b 394 struct elt_loc_list *l;
457eeaae 395
0e224656
AO
396 if (v == cfa_base_preserved_val)
397 return true;
398
399 /* Keep VALUE equivalences around. */
400 for (l = v->locs; l; l = l->next)
401 if (GET_CODE (l->loc) == VALUE)
402 return true;
403
457eeaae
JJ
404 if (v->locs != NULL
405 && v->locs->next == NULL)
406 {
407 if (CONSTANT_P (v->locs->loc)
408 && (GET_CODE (v->locs->loc) != CONST
409 || !references_value_p (v->locs->loc, 0)))
0e224656 410 return true;
6f2ffb4b
AO
411 /* Although a debug expr may be bound to different expressions,
412 we can preserve it as if it was constant, to get unification
413 and proper merging within var-tracking. */
414 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
415 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
416 || GET_CODE (v->locs->loc) == ENTRY_VALUE
417 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
0e224656
AO
418 return true;
419
420 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
421 if (GET_CODE (v->locs->loc) == PLUS
422 && CONST_INT_P (XEXP (v->locs->loc, 1))
423 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
424 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
425 return true;
457eeaae 426 }
6f2ffb4b 427
0e224656
AO
428 return false;
429}
430
431/* Remove from hash table all VALUEs except constants, function
432 invariants and VALUE equivalences. */
433
434static int
435preserve_constants_and_equivs (void **x, void *info ATTRIBUTE_UNUSED)
436{
437 cselib_val *v = (cselib_val *)*x;
457eeaae 438
0e224656
AO
439 if (!invariant_or_equiv_p (v))
440 htab_clear_slot (cselib_hash_table, x);
457eeaae
JJ
441 return 1;
442}
443
b5b8b0ac
AO
444/* Remove all entries from the hash table, arranging for the next
445 value to be numbered NUM. */
446
447void
5440c0e7 448cselib_reset_table (unsigned int num)
fa49fd0f
RK
449{
450 unsigned int i;
451
31825e57
DM
452 max_value_regs = 0;
453
457eeaae
JJ
454 if (cfa_base_preserved_val)
455 {
9de9cbaf 456 unsigned int regno = cfa_base_preserved_regno;
457eeaae
JJ
457 unsigned int new_used_regs = 0;
458 for (i = 0; i < n_used_regs; i++)
459 if (used_regs[i] == regno)
460 {
461 new_used_regs = 1;
462 continue;
463 }
464 else
465 REG_VALUES (used_regs[i]) = 0;
466 gcc_assert (new_used_regs == 1);
467 n_used_regs = new_used_regs;
468 used_regs[0] = regno;
469 max_value_regs
470 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
471 }
472 else
473 {
474 for (i = 0; i < n_used_regs; i++)
475 REG_VALUES (used_regs[i]) = 0;
476 n_used_regs = 0;
477 }
fa49fd0f 478
457eeaae 479 if (cselib_preserve_constants)
0e224656 480 htab_traverse (cselib_hash_table, preserve_constants_and_equivs, NULL);
457eeaae 481 else
0f68ba3e
AO
482 {
483 htab_empty (cselib_hash_table);
484 gcc_checking_assert (!cselib_any_perm_equivs);
485 }
fa49fd0f 486
fa49fd0f 487 n_useless_values = 0;
5847e8da
AO
488 n_useless_debug_values = 0;
489 n_debug_values = 0;
fa49fd0f 490
5440c0e7 491 next_uid = num;
7101fb18
JH
492
493 first_containing_mem = &dummy_val;
fa49fd0f
RK
494}
495
b5b8b0ac
AO
496/* Return the number of the next value that will be generated. */
497
498unsigned int
5440c0e7 499cselib_get_next_uid (void)
b5b8b0ac 500{
5440c0e7 501 return next_uid;
b5b8b0ac
AO
502}
503
4deef538
AO
504/* See the documentation of cselib_find_slot below. */
505static enum machine_mode find_slot_memmode;
506
507/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
508 INSERTing if requested. When X is part of the address of a MEM,
509 MEMMODE should specify the mode of the MEM. While searching the
510 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
511 in X can be resolved. */
512
513static void **
514cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
515 enum machine_mode memmode)
516{
517 void **slot;
518 find_slot_memmode = memmode;
519 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
520 find_slot_memmode = VOIDmode;
521 return slot;
522}
523
fa49fd0f
RK
524/* The equality test for our hash table. The first argument ENTRY is a table
525 element (i.e. a cselib_val), while the second arg X is an rtx. We know
526 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
527 CONST of an appropriate mode. */
528
529static int
7080f735 530entry_and_rtx_equal_p (const void *entry, const void *x_arg)
fa49fd0f
RK
531{
532 struct elt_loc_list *l;
e5cfc29f 533 const cselib_val *const v = (const cselib_val *) entry;
f883e0a7 534 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
fa49fd0f
RK
535 enum machine_mode mode = GET_MODE (x);
536
481683e1 537 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
341c100f 538 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
b8698a0f 539
757bbef8 540 if (mode != GET_MODE (v->val_rtx))
fa49fd0f
RK
541 return 0;
542
543 /* Unwrap X if necessary. */
544 if (GET_CODE (x) == CONST
481683e1 545 && (CONST_INT_P (XEXP (x, 0))
091a3ac7 546 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
fa49fd0f
RK
547 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
548 x = XEXP (x, 0);
7080f735 549
fa49fd0f
RK
550 /* We don't guarantee that distinct rtx's have different hash values,
551 so we need to do a comparison. */
552 for (l = v->locs; l; l = l->next)
4deef538 553 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
5847e8da
AO
554 {
555 promote_debug_loc (l);
556 return 1;
557 }
fa49fd0f
RK
558
559 return 0;
560}
561
562/* The hash function for our hash table. The value is always computed with
0516f6fe
SB
563 cselib_hash_rtx when adding an element; this function just extracts the
564 hash value from a cselib_val structure. */
fa49fd0f 565
fb7e6024 566static hashval_t
7080f735 567get_value_hash (const void *entry)
fa49fd0f 568{
4f588890 569 const cselib_val *const v = (const cselib_val *) entry;
5440c0e7 570 return v->hash;
fa49fd0f
RK
571}
572
573/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
574 only return true for values which point to a cselib_val whose value
575 element has been set to zero, which implies the cselib_val will be
576 removed. */
577
578int
4f588890 579references_value_p (const_rtx x, int only_useless)
fa49fd0f 580{
4f588890 581 const enum rtx_code code = GET_CODE (x);
fa49fd0f
RK
582 const char *fmt = GET_RTX_FORMAT (code);
583 int i, j;
584
585 if (GET_CODE (x) == VALUE
6f2ffb4b
AO
586 && (! only_useless ||
587 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
fa49fd0f
RK
588 return 1;
589
590 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
591 {
592 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
593 return 1;
594 else if (fmt[i] == 'E')
595 for (j = 0; j < XVECLEN (x, i); j++)
596 if (references_value_p (XVECEXP (x, i, j), only_useless))
597 return 1;
598 }
599
600 return 0;
601}
602
603/* For all locations found in X, delete locations that reference useless
604 values (i.e. values without any location). Called through
605 htab_traverse. */
606
607static int
7080f735 608discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
fa49fd0f
RK
609{
610 cselib_val *v = (cselib_val *)*x;
611 struct elt_loc_list **p = &v->locs;
5847e8da
AO
612 bool had_locs = v->locs != NULL;
613 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
fa49fd0f
RK
614
615 while (*p)
616 {
617 if (references_value_p ((*p)->loc, 1))
618 unchain_one_elt_loc_list (p);
619 else
620 p = &(*p)->next;
621 }
622
b5b8b0ac 623 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
fa49fd0f 624 {
5847e8da
AO
625 if (setting_insn && DEBUG_INSN_P (setting_insn))
626 n_useless_debug_values++;
627 else
628 n_useless_values++;
fa49fd0f
RK
629 values_became_useless = 1;
630 }
631 return 1;
632}
633
634/* If X is a value with no locations, remove it from the hashtable. */
635
636static int
7080f735 637discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
fa49fd0f
RK
638{
639 cselib_val *v = (cselib_val *)*x;
640
b5b8b0ac 641 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
fa49fd0f 642 {
6fb5fa3c
DB
643 if (cselib_discard_hook)
644 cselib_discard_hook (v);
645
757bbef8 646 CSELIB_VAL_PTR (v->val_rtx) = NULL;
7c514720 647 htab_clear_slot (cselib_hash_table, x);
fa49fd0f
RK
648 unchain_one_value (v);
649 n_useless_values--;
650 }
651
652 return 1;
653}
654
655/* Clean out useless values (i.e. those which no longer have locations
656 associated with them) from the hash table. */
657
658static void
7080f735 659remove_useless_values (void)
fa49fd0f 660{
7101fb18 661 cselib_val **p, *v;
5847e8da 662
fa49fd0f
RK
663 /* First pass: eliminate locations that reference the value. That in
664 turn can make more values useless. */
665 do
666 {
667 values_became_useless = 0;
7c514720 668 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
fa49fd0f
RK
669 }
670 while (values_became_useless);
671
672 /* Second pass: actually remove the values. */
fa49fd0f 673
7101fb18
JH
674 p = &first_containing_mem;
675 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
faead9f7 676 if (v->locs && v == canonical_cselib_val (v))
7101fb18
JH
677 {
678 *p = v;
679 p = &(*p)->next_containing_mem;
680 }
681 *p = &dummy_val;
682
5847e8da
AO
683 n_useless_values += n_useless_debug_values;
684 n_debug_values -= n_useless_debug_values;
685 n_useless_debug_values = 0;
686
7c514720 687 htab_traverse (cselib_hash_table, discard_useless_values, 0);
3e2a0bd2 688
341c100f 689 gcc_assert (!n_useless_values);
fa49fd0f
RK
690}
691
b5b8b0ac
AO
692/* Arrange for a value to not be removed from the hash table even if
693 it becomes useless. */
694
695void
696cselib_preserve_value (cselib_val *v)
697{
698 PRESERVED_VALUE_P (v->val_rtx) = 1;
699}
700
701/* Test whether a value is preserved. */
702
703bool
704cselib_preserved_value_p (cselib_val *v)
705{
706 return PRESERVED_VALUE_P (v->val_rtx);
707}
708
457eeaae
JJ
709/* Arrange for a REG value to be assumed constant through the whole function,
710 never invalidated and preserved across cselib_reset_table calls. */
711
712void
9de9cbaf 713cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
457eeaae
JJ
714{
715 if (cselib_preserve_constants
716 && v->locs
717 && REG_P (v->locs->loc))
9de9cbaf
JJ
718 {
719 cfa_base_preserved_val = v;
720 cfa_base_preserved_regno = regno;
721 }
457eeaae
JJ
722}
723
b5b8b0ac
AO
724/* Clean all non-constant expressions in the hash table, but retain
725 their values. */
726
727void
0de3e43f 728cselib_preserve_only_values (void)
b5b8b0ac
AO
729{
730 int i;
731
b5b8b0ac
AO
732 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
733 cselib_invalidate_regno (i, reg_raw_mode[i]);
734
735 cselib_invalidate_mem (callmem);
736
737 remove_useless_values ();
738
739 gcc_assert (first_containing_mem == &dummy_val);
740}
741
60fa6660
AO
742/* Return the mode in which a register was last set. If X is not a
743 register, return its mode. If the mode in which the register was
744 set is not known, or the value was already clobbered, return
745 VOIDmode. */
746
747enum machine_mode
4f588890 748cselib_reg_set_mode (const_rtx x)
60fa6660 749{
f8cfc6aa 750 if (!REG_P (x))
60fa6660
AO
751 return GET_MODE (x);
752
753 if (REG_VALUES (REGNO (x)) == NULL
754 || REG_VALUES (REGNO (x))->elt == NULL)
755 return VOIDmode;
756
757bbef8 757 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
60fa6660
AO
758}
759
fa49fd0f
RK
760/* Return nonzero if we can prove that X and Y contain the same value, taking
761 our gathered information into account. */
762
763int
7080f735 764rtx_equal_for_cselib_p (rtx x, rtx y)
4deef538
AO
765{
766 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
767}
768
769/* If x is a PLUS or an autoinc operation, expand the operation,
770 storing the offset, if any, in *OFF. */
771
772static rtx
773autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
774{
775 switch (GET_CODE (x))
776 {
777 case PLUS:
778 *off = XEXP (x, 1);
779 return XEXP (x, 0);
780
781 case PRE_DEC:
782 if (memmode == VOIDmode)
783 return x;
784
785 *off = GEN_INT (-GET_MODE_SIZE (memmode));
786 return XEXP (x, 0);
787 break;
788
789 case PRE_INC:
790 if (memmode == VOIDmode)
791 return x;
792
793 *off = GEN_INT (GET_MODE_SIZE (memmode));
794 return XEXP (x, 0);
795
796 case PRE_MODIFY:
797 return XEXP (x, 1);
798
799 case POST_DEC:
800 case POST_INC:
801 case POST_MODIFY:
802 return XEXP (x, 0);
803
804 default:
805 return x;
806 }
807}
808
809/* Return nonzero if we can prove that X and Y contain the same value,
810 taking our gathered information into account. MEMMODE holds the
811 mode of the enclosing MEM, if any, as required to deal with autoinc
812 addressing modes. If X and Y are not (known to be) part of
813 addresses, MEMMODE should be VOIDmode. */
814
815static int
816rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
fa49fd0f
RK
817{
818 enum rtx_code code;
819 const char *fmt;
820 int i;
7080f735 821
f8cfc6aa 822 if (REG_P (x) || MEM_P (x))
fa49fd0f 823 {
4deef538 824 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
fa49fd0f
RK
825
826 if (e)
757bbef8 827 x = e->val_rtx;
fa49fd0f
RK
828 }
829
f8cfc6aa 830 if (REG_P (y) || MEM_P (y))
fa49fd0f 831 {
4deef538 832 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
fa49fd0f
RK
833
834 if (e)
757bbef8 835 y = e->val_rtx;
fa49fd0f
RK
836 }
837
838 if (x == y)
839 return 1;
840
fa49fd0f
RK
841 if (GET_CODE (x) == VALUE)
842 {
6f2ffb4b 843 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
fa49fd0f
RK
844 struct elt_loc_list *l;
845
6f2ffb4b
AO
846 if (GET_CODE (y) == VALUE)
847 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
848
fa49fd0f
RK
849 for (l = e->locs; l; l = l->next)
850 {
851 rtx t = l->loc;
852
6f2ffb4b
AO
853 /* Avoid infinite recursion. We know we have the canonical
854 value, so we can just skip any values in the equivalence
855 list. */
856 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
fa49fd0f 857 continue;
4deef538 858 else if (rtx_equal_for_cselib_1 (t, y, memmode))
fa49fd0f
RK
859 return 1;
860 }
7080f735 861
fa49fd0f
RK
862 return 0;
863 }
6f2ffb4b 864 else if (GET_CODE (y) == VALUE)
fa49fd0f 865 {
6f2ffb4b 866 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
fa49fd0f
RK
867 struct elt_loc_list *l;
868
869 for (l = e->locs; l; l = l->next)
870 {
871 rtx t = l->loc;
872
6f2ffb4b 873 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
fa49fd0f 874 continue;
4deef538 875 else if (rtx_equal_for_cselib_1 (x, t, memmode))
fa49fd0f
RK
876 return 1;
877 }
7080f735 878
fa49fd0f
RK
879 return 0;
880 }
881
4deef538 882 if (GET_MODE (x) != GET_MODE (y))
fa49fd0f
RK
883 return 0;
884
4deef538
AO
885 if (GET_CODE (x) != GET_CODE (y))
886 {
887 rtx xorig = x, yorig = y;
888 rtx xoff = NULL, yoff = NULL;
889
890 x = autoinc_split (x, &xoff, memmode);
891 y = autoinc_split (y, &yoff, memmode);
892
893 if (!xoff != !yoff)
894 return 0;
895
896 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
897 return 0;
898
899 /* Don't recurse if nothing changed. */
900 if (x != xorig || y != yorig)
901 return rtx_equal_for_cselib_1 (x, y, memmode);
902
903 return 0;
904 }
905
37cf6116
RH
906 /* These won't be handled correctly by the code below. */
907 switch (GET_CODE (x))
908 {
909 case CONST_DOUBLE:
091a3ac7 910 case CONST_FIXED:
0ca5af51 911 case DEBUG_EXPR:
37cf6116
RH
912 return 0;
913
c8a27c40
JJ
914 case DEBUG_IMPLICIT_PTR:
915 return DEBUG_IMPLICIT_PTR_DECL (x)
916 == DEBUG_IMPLICIT_PTR_DECL (y);
917
ddb555ed
JJ
918 case DEBUG_PARAMETER_REF:
919 return DEBUG_PARAMETER_REF_DECL (x)
920 == DEBUG_PARAMETER_REF_DECL (y);
921
a58a8e4b 922 case ENTRY_VALUE:
2b80199f
JJ
923 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
924 use rtx_equal_for_cselib_1 to compare the operands. */
925 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
a58a8e4b 926
37cf6116
RH
927 case LABEL_REF:
928 return XEXP (x, 0) == XEXP (y, 0);
929
4deef538
AO
930 case MEM:
931 /* We have to compare any autoinc operations in the addresses
932 using this MEM's mode. */
933 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
934
37cf6116
RH
935 default:
936 break;
937 }
7080f735 938
fa49fd0f
RK
939 code = GET_CODE (x);
940 fmt = GET_RTX_FORMAT (code);
941
942 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
943 {
944 int j;
945
946 switch (fmt[i])
947 {
948 case 'w':
949 if (XWINT (x, i) != XWINT (y, i))
950 return 0;
951 break;
952
953 case 'n':
954 case 'i':
955 if (XINT (x, i) != XINT (y, i))
956 return 0;
957 break;
958
959 case 'V':
960 case 'E':
961 /* Two vectors must have the same length. */
962 if (XVECLEN (x, i) != XVECLEN (y, i))
963 return 0;
964
965 /* And the corresponding elements must match. */
966 for (j = 0; j < XVECLEN (x, i); j++)
4deef538
AO
967 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
968 XVECEXP (y, i, j), memmode))
fa49fd0f
RK
969 return 0;
970 break;
971
972 case 'e':
29c1846b
R
973 if (i == 1
974 && targetm.commutative_p (x, UNKNOWN)
4deef538
AO
975 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
976 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
29c1846b 977 return 1;
4deef538 978 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
fa49fd0f
RK
979 return 0;
980 break;
981
982 case 'S':
983 case 's':
984 if (strcmp (XSTR (x, i), XSTR (y, i)))
985 return 0;
986 break;
987
988 case 'u':
989 /* These are just backpointers, so they don't matter. */
990 break;
991
992 case '0':
993 case 't':
994 break;
995
996 /* It is believed that rtx's at this level will never
997 contain anything but integers and other rtx's,
998 except for within LABEL_REFs and SYMBOL_REFs. */
999 default:
341c100f 1000 gcc_unreachable ();
fa49fd0f
RK
1001 }
1002 }
1003 return 1;
1004}
1005
3af4ba41
RS
1006/* We need to pass down the mode of constants through the hash table
1007 functions. For that purpose, wrap them in a CONST of the appropriate
1008 mode. */
1009static rtx
1010wrap_constant (enum machine_mode mode, rtx x)
1011{
48175537
KZ
1012 if (!CONST_INT_P (x)
1013 && GET_CODE (x) != CONST_FIXED
1014 && !CONST_DOUBLE_AS_INT_P (x))
3af4ba41
RS
1015 return x;
1016 gcc_assert (mode != VOIDmode);
1017 return gen_rtx_CONST (mode, x);
1018}
1019
fa49fd0f
RK
1020/* Hash an rtx. Return 0 if we couldn't hash the rtx.
1021 For registers and memory locations, we look up their cselib_val structure
1022 and return its VALUE element.
1023 Possible reasons for return 0 are: the object is volatile, or we couldn't
1024 find a register or memory location in the table and CREATE is zero. If
1025 CREATE is nonzero, table elts are created for regs and mem.
29c1846b
R
1026 N.B. this hash function returns the same hash value for RTXes that
1027 differ only in the order of operands, thus it is suitable for comparisons
1028 that take commutativity into account.
1029 If we wanted to also support associative rules, we'd have to use a different
1030 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
4deef538
AO
1031 MEMMODE indicates the mode of an enclosing MEM, and it's only
1032 used to compute autoinc values.
29c1846b
R
1033 We used to have a MODE argument for hashing for CONST_INTs, but that
1034 didn't make sense, since it caused spurious hash differences between
1035 (set (reg:SI 1) (const_int))
1036 (plus:SI (reg:SI 2) (reg:SI 1))
1037 and
1038 (plus:SI (reg:SI 2) (const_int))
1039 If the mode is important in any context, it must be checked specifically
1040 in a comparison anyway, since relying on hash differences is unsafe. */
fa49fd0f
RK
1041
1042static unsigned int
4deef538 1043cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
fa49fd0f
RK
1044{
1045 cselib_val *e;
1046 int i, j;
1047 enum rtx_code code;
1048 const char *fmt;
1049 unsigned int hash = 0;
1050
fa49fd0f
RK
1051 code = GET_CODE (x);
1052 hash += (unsigned) code + (unsigned) GET_MODE (x);
1053
1054 switch (code)
1055 {
7483eef8
AO
1056 case VALUE:
1057 e = CSELIB_VAL_PTR (x);
1058 return e->hash;
1059
fa49fd0f
RK
1060 case MEM:
1061 case REG:
4deef538 1062 e = cselib_lookup (x, GET_MODE (x), create, memmode);
fa49fd0f
RK
1063 if (! e)
1064 return 0;
1065
5440c0e7 1066 return e->hash;
fa49fd0f 1067
0ca5af51 1068 case DEBUG_EXPR:
e4fb38bd
JJ
1069 hash += ((unsigned) DEBUG_EXPR << 7)
1070 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
0ca5af51
AO
1071 return hash ? hash : (unsigned int) DEBUG_EXPR;
1072
c8a27c40
JJ
1073 case DEBUG_IMPLICIT_PTR:
1074 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1075 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1076 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1077
ddb555ed
JJ
1078 case DEBUG_PARAMETER_REF:
1079 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1080 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1081 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1082
a58a8e4b 1083 case ENTRY_VALUE:
2b80199f
JJ
1084 /* ENTRY_VALUEs are function invariant, thus try to avoid
1085 recursing on argument if ENTRY_VALUE is one of the
1086 forms emitted by expand_debug_expr, otherwise
1087 ENTRY_VALUE hash would depend on the current value
1088 in some register or memory. */
1089 if (REG_P (ENTRY_VALUE_EXP (x)))
1090 hash += (unsigned int) REG
1091 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1092 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1093 else if (MEM_P (ENTRY_VALUE_EXP (x))
1094 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1095 hash += (unsigned int) MEM
1096 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1097 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1098 else
1099 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
a58a8e4b
JJ
1100 return hash ? hash : (unsigned int) ENTRY_VALUE;
1101
fa49fd0f 1102 case CONST_INT:
29c1846b 1103 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
dc76f41c 1104 return hash ? hash : (unsigned int) CONST_INT;
fa49fd0f
RK
1105
1106 case CONST_DOUBLE:
1107 /* This is like the general case, except that it only counts
1108 the integers representing the constant. */
1109 hash += (unsigned) code + (unsigned) GET_MODE (x);
1110 if (GET_MODE (x) != VOIDmode)
46b33600 1111 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
fa49fd0f
RK
1112 else
1113 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1114 + (unsigned) CONST_DOUBLE_HIGH (x));
dc76f41c 1115 return hash ? hash : (unsigned int) CONST_DOUBLE;
fa49fd0f 1116
091a3ac7
CF
1117 case CONST_FIXED:
1118 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1119 hash += fixed_hash (CONST_FIXED_VALUE (x));
1120 return hash ? hash : (unsigned int) CONST_FIXED;
1121
69ef87e2
AH
1122 case CONST_VECTOR:
1123 {
1124 int units;
1125 rtx elt;
1126
1127 units = CONST_VECTOR_NUNITS (x);
1128
1129 for (i = 0; i < units; ++i)
1130 {
1131 elt = CONST_VECTOR_ELT (x, i);
4deef538 1132 hash += cselib_hash_rtx (elt, 0, memmode);
69ef87e2
AH
1133 }
1134
1135 return hash;
1136 }
1137
fa49fd0f
RK
1138 /* Assume there is only one rtx object for any given label. */
1139 case LABEL_REF:
4c6669c2
RS
1140 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1141 differences and differences between each stage's debugging dumps. */
1142 hash += (((unsigned int) LABEL_REF << 7)
1143 + CODE_LABEL_NUMBER (XEXP (x, 0)));
dc76f41c 1144 return hash ? hash : (unsigned int) LABEL_REF;
fa49fd0f
RK
1145
1146 case SYMBOL_REF:
4c6669c2
RS
1147 {
1148 /* Don't hash on the symbol's address to avoid bootstrap differences.
1149 Different hash values may cause expressions to be recorded in
1150 different orders and thus different registers to be used in the
1151 final assembler. This also avoids differences in the dump files
1152 between various stages. */
1153 unsigned int h = 0;
1154 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1155
1156 while (*p)
1157 h += (h << 7) + *p++; /* ??? revisit */
1158
1159 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1160 return hash ? hash : (unsigned int) SYMBOL_REF;
1161 }
fa49fd0f
RK
1162
1163 case PRE_DEC:
1164 case PRE_INC:
4deef538
AO
1165 /* We can't compute these without knowing the MEM mode. */
1166 gcc_assert (memmode != VOIDmode);
1167 i = GET_MODE_SIZE (memmode);
1168 if (code == PRE_DEC)
1169 i = -i;
1170 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1171 like (mem:MEMMODE (plus (reg) (const_int I))). */
1172 hash += (unsigned) PLUS - (unsigned)code
1173 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1174 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1175 return hash ? hash : 1 + (unsigned) PLUS;
1176
1177 case PRE_MODIFY:
1178 gcc_assert (memmode != VOIDmode);
1179 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1180
fa49fd0f
RK
1181 case POST_DEC:
1182 case POST_INC:
1183 case POST_MODIFY:
4deef538
AO
1184 gcc_assert (memmode != VOIDmode);
1185 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1186
fa49fd0f
RK
1187 case PC:
1188 case CC0:
1189 case CALL:
1190 case UNSPEC_VOLATILE:
1191 return 0;
1192
1193 case ASM_OPERANDS:
1194 if (MEM_VOLATILE_P (x))
1195 return 0;
1196
1197 break;
7080f735 1198
fa49fd0f
RK
1199 default:
1200 break;
1201 }
1202
1203 i = GET_RTX_LENGTH (code) - 1;
1204 fmt = GET_RTX_FORMAT (code);
1205 for (; i >= 0; i--)
1206 {
341c100f 1207 switch (fmt[i])
fa49fd0f 1208 {
341c100f 1209 case 'e':
fa49fd0f 1210 {
341c100f 1211 rtx tem = XEXP (x, i);
4deef538 1212 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
b8698a0f 1213
fa49fd0f
RK
1214 if (tem_hash == 0)
1215 return 0;
b8698a0f 1216
fa49fd0f
RK
1217 hash += tem_hash;
1218 }
341c100f
NS
1219 break;
1220 case 'E':
1221 for (j = 0; j < XVECLEN (x, i); j++)
1222 {
1223 unsigned int tem_hash
4deef538 1224 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
b8698a0f 1225
341c100f
NS
1226 if (tem_hash == 0)
1227 return 0;
b8698a0f 1228
341c100f
NS
1229 hash += tem_hash;
1230 }
1231 break;
fa49fd0f 1232
341c100f
NS
1233 case 's':
1234 {
1235 const unsigned char *p = (const unsigned char *) XSTR (x, i);
b8698a0f 1236
341c100f
NS
1237 if (p)
1238 while (*p)
1239 hash += *p++;
1240 break;
1241 }
b8698a0f 1242
341c100f
NS
1243 case 'i':
1244 hash += XINT (x, i);
1245 break;
1246
1247 case '0':
1248 case 't':
1249 /* unused */
1250 break;
b8698a0f 1251
341c100f
NS
1252 default:
1253 gcc_unreachable ();
fa49fd0f 1254 }
fa49fd0f
RK
1255 }
1256
dc76f41c 1257 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
fa49fd0f
RK
1258}
1259
1260/* Create a new value structure for VALUE and initialize it. The mode of the
1261 value is MODE. */
1262
6a59927d 1263static inline cselib_val *
5440c0e7 1264new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
fa49fd0f 1265{
f883e0a7 1266 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
fa49fd0f 1267
5440c0e7
AO
1268 gcc_assert (hash);
1269 gcc_assert (next_uid);
fa49fd0f 1270
5440c0e7
AO
1271 e->hash = hash;
1272 e->uid = next_uid++;
d67fb775
SB
1273 /* We use an alloc pool to allocate this RTL construct because it
1274 accounts for about 8% of the overall memory usage. We know
1275 precisely when we can have VALUE RTXen (when cselib is active)
daa956d0 1276 so we don't need to put them in garbage collected memory.
d67fb775 1277 ??? Why should a VALUE be an RTX in the first place? */
f883e0a7 1278 e->val_rtx = (rtx) pool_alloc (value_pool);
757bbef8
SB
1279 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1280 PUT_CODE (e->val_rtx, VALUE);
1281 PUT_MODE (e->val_rtx, mode);
1282 CSELIB_VAL_PTR (e->val_rtx) = e;
fa49fd0f
RK
1283 e->addr_list = 0;
1284 e->locs = 0;
7101fb18 1285 e->next_containing_mem = 0;
b5b8b0ac 1286
4a3c9687 1287 if (dump_file && (dump_flags & TDF_CSELIB))
b5b8b0ac 1288 {
5440c0e7 1289 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
b5b8b0ac
AO
1290 if (flag_dump_noaddr || flag_dump_unnumbered)
1291 fputs ("# ", dump_file);
1292 else
1293 fprintf (dump_file, "%p ", (void*)e);
1294 print_rtl_single (dump_file, x);
1295 fputc ('\n', dump_file);
1296 }
1297
fa49fd0f
RK
1298 return e;
1299}
1300
1301/* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1302 contains the data at this address. X is a MEM that represents the
1303 value. Update the two value structures to represent this situation. */
1304
1305static void
7080f735 1306add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
fa49fd0f 1307{
fa49fd0f
RK
1308 struct elt_loc_list *l;
1309
faead9f7 1310 addr_elt = canonical_cselib_val (addr_elt);
a4f436ff
JJ
1311 mem_elt = canonical_cselib_val (mem_elt);
1312
fa49fd0f
RK
1313 /* Avoid duplicates. */
1314 for (l = mem_elt->locs; l; l = l->next)
3c0cb5de 1315 if (MEM_P (l->loc)
fa49fd0f 1316 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
5847e8da
AO
1317 {
1318 promote_debug_loc (l);
1319 return;
1320 }
fa49fd0f 1321
fa49fd0f 1322 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
6f2ffb4b
AO
1323 new_elt_loc_list (mem_elt,
1324 replace_equiv_address_nv (x, addr_elt->val_rtx));
7101fb18
JH
1325 if (mem_elt->next_containing_mem == NULL)
1326 {
1327 mem_elt->next_containing_mem = first_containing_mem;
1328 first_containing_mem = mem_elt;
1329 }
fa49fd0f
RK
1330}
1331
1332/* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1333 If CREATE, make a new one if we haven't seen it before. */
1334
1335static cselib_val *
7080f735 1336cselib_lookup_mem (rtx x, int create)
fa49fd0f
RK
1337{
1338 enum machine_mode mode = GET_MODE (x);
4deef538 1339 enum machine_mode addr_mode;
fa49fd0f
RK
1340 void **slot;
1341 cselib_val *addr;
1342 cselib_val *mem_elt;
1343 struct elt_list *l;
1344
1345 if (MEM_VOLATILE_P (x) || mode == BLKmode
463301c3 1346 || !cselib_record_memory
fa49fd0f
RK
1347 || (FLOAT_MODE_P (mode) && flag_float_store))
1348 return 0;
1349
4deef538
AO
1350 addr_mode = GET_MODE (XEXP (x, 0));
1351 if (addr_mode == VOIDmode)
1352 addr_mode = Pmode;
1353
fa49fd0f 1354 /* Look up the value for the address. */
4deef538 1355 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
fa49fd0f
RK
1356 if (! addr)
1357 return 0;
1358
faead9f7 1359 addr = canonical_cselib_val (addr);
fa49fd0f
RK
1360 /* Find a value that describes a value of our mode at that address. */
1361 for (l = addr->addr_list; l; l = l->next)
757bbef8 1362 if (GET_MODE (l->elt->val_rtx) == mode)
5847e8da
AO
1363 {
1364 promote_debug_loc (l->elt->locs);
1365 return l->elt;
1366 }
fa49fd0f
RK
1367
1368 if (! create)
1369 return 0;
1370
5440c0e7 1371 mem_elt = new_cselib_val (next_uid, mode, x);
fa49fd0f 1372 add_mem_for_addr (addr, mem_elt, x);
4deef538
AO
1373 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1374 INSERT, mode);
fa49fd0f
RK
1375 *slot = mem_elt;
1376 return mem_elt;
1377}
1378
073a8998 1379/* Search through the possible substitutions in P. We prefer a non reg
6fb5fa3c
DB
1380 substitution because this allows us to expand the tree further. If
1381 we find, just a reg, take the lowest regno. There may be several
1382 non-reg results, we just take the first one because they will all
1383 expand to the same place. */
1384
b8698a0f 1385static rtx
b5b8b0ac
AO
1386expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1387 int max_depth)
6fb5fa3c
DB
1388{
1389 rtx reg_result = NULL;
1390 unsigned int regno = UINT_MAX;
1391 struct elt_loc_list *p_in = p;
1392
67b977ad 1393 for (; p; p = p->next)
6fb5fa3c 1394 {
67b977ad
JJ
1395 /* Return these right away to avoid returning stack pointer based
1396 expressions for frame pointer and vice versa, which is something
1397 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1398 for more details. */
1399 if (REG_P (p->loc)
1400 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1401 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1402 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1403 || REGNO (p->loc) == cfa_base_preserved_regno))
1404 return p->loc;
6fb5fa3c
DB
1405 /* Avoid infinite recursion trying to expand a reg into a
1406 the same reg. */
b8698a0f
L
1407 if ((REG_P (p->loc))
1408 && (REGNO (p->loc) < regno)
b5b8b0ac 1409 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
6fb5fa3c
DB
1410 {
1411 reg_result = p->loc;
1412 regno = REGNO (p->loc);
1413 }
1414 /* Avoid infinite recursion and do not try to expand the
1415 value. */
b8698a0f 1416 else if (GET_CODE (p->loc) == VALUE
6fb5fa3c
DB
1417 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1418 continue;
1419 else if (!REG_P (p->loc))
1420 {
8dd5516b 1421 rtx result, note;
4a3c9687 1422 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1423 {
1424 print_inline_rtx (dump_file, p->loc, 0);
1425 fprintf (dump_file, "\n");
1426 }
8dd5516b
JJ
1427 if (GET_CODE (p->loc) == LO_SUM
1428 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1429 && p->setting_insn
1430 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1431 && XEXP (note, 0) == XEXP (p->loc, 1))
1432 return XEXP (p->loc, 1);
b5b8b0ac 1433 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
6fb5fa3c
DB
1434 if (result)
1435 return result;
1436 }
b8698a0f 1437
6fb5fa3c 1438 }
b8698a0f 1439
6fb5fa3c
DB
1440 if (regno != UINT_MAX)
1441 {
1442 rtx result;
4a3c9687 1443 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1444 fprintf (dump_file, "r%d\n", regno);
1445
b5b8b0ac 1446 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
6fb5fa3c
DB
1447 if (result)
1448 return result;
1449 }
1450
4a3c9687 1451 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1452 {
1453 if (reg_result)
1454 {
1455 print_inline_rtx (dump_file, reg_result, 0);
1456 fprintf (dump_file, "\n");
1457 }
b8698a0f 1458 else
6fb5fa3c
DB
1459 fprintf (dump_file, "NULL\n");
1460 }
1461 return reg_result;
1462}
1463
1464
1465/* Forward substitute and expand an expression out to its roots.
1466 This is the opposite of common subexpression. Because local value
1467 numbering is such a weak optimization, the expanded expression is
1468 pretty much unique (not from a pointer equals point of view but
b8698a0f 1469 from a tree shape point of view.
6fb5fa3c
DB
1470
1471 This function returns NULL if the expansion fails. The expansion
1472 will fail if there is no value number for one of the operands or if
1473 one of the operands has been overwritten between the current insn
1474 and the beginning of the basic block. For instance x has no
1475 expansion in:
1476
1477 r1 <- r1 + 3
1478 x <- r1 + 8
1479
1480 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1481 It is clear on return. */
1482
1483rtx
1484cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
b5b8b0ac
AO
1485{
1486 struct expand_value_data evd;
1487
1488 evd.regs_active = regs_active;
1489 evd.callback = NULL;
1490 evd.callback_arg = NULL;
864ddef7 1491 evd.dummy = false;
b5b8b0ac
AO
1492
1493 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1494}
1495
1496/* Same as cselib_expand_value_rtx, but using a callback to try to
0b7e34d7
AO
1497 resolve some expressions. The CB function should return ORIG if it
1498 can't or does not want to deal with a certain RTX. Any other
1499 return value, including NULL, will be used as the expansion for
1500 VALUE, without any further changes. */
b5b8b0ac
AO
1501
1502rtx
1503cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1504 cselib_expand_callback cb, void *data)
1505{
1506 struct expand_value_data evd;
1507
1508 evd.regs_active = regs_active;
1509 evd.callback = cb;
1510 evd.callback_arg = data;
864ddef7 1511 evd.dummy = false;
b5b8b0ac
AO
1512
1513 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1514}
1515
864ddef7
JJ
1516/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1517 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1518 would return NULL or non-NULL, without allocating new rtx. */
1519
1520bool
1521cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1522 cselib_expand_callback cb, void *data)
1523{
1524 struct expand_value_data evd;
1525
1526 evd.regs_active = regs_active;
1527 evd.callback = cb;
1528 evd.callback_arg = data;
1529 evd.dummy = true;
1530
1531 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1532}
1533
0b7e34d7
AO
1534/* Internal implementation of cselib_expand_value_rtx and
1535 cselib_expand_value_rtx_cb. */
1536
b5b8b0ac
AO
1537static rtx
1538cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1539 int max_depth)
6fb5fa3c
DB
1540{
1541 rtx copy, scopy;
1542 int i, j;
1543 RTX_CODE code;
1544 const char *format_ptr;
8dd5516b 1545 enum machine_mode mode;
6fb5fa3c
DB
1546
1547 code = GET_CODE (orig);
1548
1549 /* For the context of dse, if we end up expand into a huge tree, we
1550 will not have a useful address, so we might as well just give up
1551 quickly. */
1552 if (max_depth <= 0)
1553 return NULL;
1554
1555 switch (code)
1556 {
1557 case REG:
1558 {
1559 struct elt_list *l = REG_VALUES (REGNO (orig));
1560
1561 if (l && l->elt == NULL)
1562 l = l->next;
1563 for (; l; l = l->next)
1564 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1565 {
1566 rtx result;
5a9fbcf1 1567 unsigned regno = REGNO (orig);
b8698a0f 1568
6fb5fa3c 1569 /* The only thing that we are not willing to do (this
6ed3da00 1570 is requirement of dse and if others potential uses
6fb5fa3c
DB
1571 need this function we should add a parm to control
1572 it) is that we will not substitute the
1573 STACK_POINTER_REGNUM, FRAME_POINTER or the
1574 HARD_FRAME_POINTER.
1575
cea618ac 1576 These expansions confuses the code that notices that
6fb5fa3c
DB
1577 stores into the frame go dead at the end of the
1578 function and that the frame is not effected by calls
1579 to subroutines. If you allow the
1580 STACK_POINTER_REGNUM substitution, then dse will
1581 think that parameter pushing also goes dead which is
1582 wrong. If you allow the FRAME_POINTER or the
1583 HARD_FRAME_POINTER then you lose the opportunity to
1584 make the frame assumptions. */
1585 if (regno == STACK_POINTER_REGNUM
1586 || regno == FRAME_POINTER_REGNUM
5a9fbcf1
AO
1587 || regno == HARD_FRAME_POINTER_REGNUM
1588 || regno == cfa_base_preserved_regno)
6fb5fa3c
DB
1589 return orig;
1590
b5b8b0ac 1591 bitmap_set_bit (evd->regs_active, regno);
6fb5fa3c 1592
4a3c9687 1593 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1594 fprintf (dump_file, "expanding: r%d into: ", regno);
1595
b5b8b0ac
AO
1596 result = expand_loc (l->elt->locs, evd, max_depth);
1597 bitmap_clear_bit (evd->regs_active, regno);
6fb5fa3c
DB
1598
1599 if (result)
1600 return result;
b8698a0f 1601 else
6fb5fa3c
DB
1602 return orig;
1603 }
1604 }
b8698a0f 1605
d8116890 1606 CASE_CONST_ANY:
6fb5fa3c
DB
1607 case SYMBOL_REF:
1608 case CODE_LABEL:
1609 case PC:
1610 case CC0:
1611 case SCRATCH:
1612 /* SCRATCH must be shared because they represent distinct values. */
1613 return orig;
1614 case CLOBBER:
1615 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1616 return orig;
1617 break;
1618
1619 case CONST:
1620 if (shared_const_p (orig))
1621 return orig;
1622 break;
1623
8dd5516b 1624 case SUBREG:
6fb5fa3c 1625 {
0b7e34d7
AO
1626 rtx subreg;
1627
1628 if (evd->callback)
1629 {
1630 subreg = evd->callback (orig, evd->regs_active, max_depth,
1631 evd->callback_arg);
1632 if (subreg != orig)
1633 return subreg;
1634 }
1635
1636 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1637 max_depth - 1);
8dd5516b
JJ
1638 if (!subreg)
1639 return NULL;
1640 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1641 GET_MODE (SUBREG_REG (orig)),
1642 SUBREG_BYTE (orig));
0b7e34d7
AO
1643 if (scopy == NULL
1644 || (GET_CODE (scopy) == SUBREG
1645 && !REG_P (SUBREG_REG (scopy))
1646 && !MEM_P (SUBREG_REG (scopy))))
1647 return NULL;
1648
8dd5516b 1649 return scopy;
6fb5fa3c 1650 }
8dd5516b
JJ
1651
1652 case VALUE:
b5b8b0ac
AO
1653 {
1654 rtx result;
0b7e34d7 1655
4a3c9687 1656 if (dump_file && (dump_flags & TDF_CSELIB))
b5b8b0ac
AO
1657 {
1658 fputs ("\nexpanding ", dump_file);
1659 print_rtl_single (dump_file, orig);
1660 fputs (" into...", dump_file);
1661 }
8dd5516b 1662
0b7e34d7 1663 if (evd->callback)
b5b8b0ac
AO
1664 {
1665 result = evd->callback (orig, evd->regs_active, max_depth,
1666 evd->callback_arg);
0b7e34d7
AO
1667
1668 if (result != orig)
1669 return result;
b5b8b0ac 1670 }
8dd5516b 1671
0b7e34d7 1672 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
b5b8b0ac
AO
1673 return result;
1674 }
0ca5af51
AO
1675
1676 case DEBUG_EXPR:
1677 if (evd->callback)
1678 return evd->callback (orig, evd->regs_active, max_depth,
1679 evd->callback_arg);
1680 return orig;
1681
6fb5fa3c
DB
1682 default:
1683 break;
1684 }
1685
1686 /* Copy the various flags, fields, and other information. We assume
1687 that all fields need copying, and then clear the fields that should
1688 not be copied. That is the sensible default behavior, and forces
1689 us to explicitly document why we are *not* copying a flag. */
864ddef7
JJ
1690 if (evd->dummy)
1691 copy = NULL;
1692 else
1693 copy = shallow_copy_rtx (orig);
6fb5fa3c 1694
8dd5516b 1695 format_ptr = GET_RTX_FORMAT (code);
6fb5fa3c 1696
8dd5516b 1697 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6fb5fa3c
DB
1698 switch (*format_ptr++)
1699 {
1700 case 'e':
1701 if (XEXP (orig, i) != NULL)
1702 {
b5b8b0ac
AO
1703 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1704 max_depth - 1);
6fb5fa3c
DB
1705 if (!result)
1706 return NULL;
864ddef7
JJ
1707 if (copy)
1708 XEXP (copy, i) = result;
6fb5fa3c
DB
1709 }
1710 break;
1711
1712 case 'E':
1713 case 'V':
1714 if (XVEC (orig, i) != NULL)
1715 {
864ddef7
JJ
1716 if (copy)
1717 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1718 for (j = 0; j < XVECLEN (orig, i); j++)
6fb5fa3c 1719 {
b5b8b0ac
AO
1720 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1721 evd, max_depth - 1);
6fb5fa3c
DB
1722 if (!result)
1723 return NULL;
864ddef7
JJ
1724 if (copy)
1725 XVECEXP (copy, i, j) = result;
6fb5fa3c
DB
1726 }
1727 }
1728 break;
1729
1730 case 't':
1731 case 'w':
1732 case 'i':
1733 case 's':
1734 case 'S':
1735 case 'T':
1736 case 'u':
1737 case 'B':
1738 case '0':
1739 /* These are left unchanged. */
1740 break;
1741
1742 default:
1743 gcc_unreachable ();
1744 }
1745
864ddef7
JJ
1746 if (evd->dummy)
1747 return orig;
1748
8dd5516b
JJ
1749 mode = GET_MODE (copy);
1750 /* If an operand has been simplified into CONST_INT, which doesn't
1751 have a mode and the mode isn't derivable from whole rtx's mode,
1752 try simplify_*_operation first with mode from original's operand
1753 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1754 scopy = copy;
1755 switch (GET_RTX_CLASS (code))
1756 {
1757 case RTX_UNARY:
1758 if (CONST_INT_P (XEXP (copy, 0))
1759 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1760 {
1761 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1762 GET_MODE (XEXP (orig, 0)));
1763 if (scopy)
1764 return scopy;
1765 }
1766 break;
1767 case RTX_COMM_ARITH:
1768 case RTX_BIN_ARITH:
1769 /* These expressions can derive operand modes from the whole rtx's mode. */
1770 break;
1771 case RTX_TERNARY:
1772 case RTX_BITFIELD_OPS:
1773 if (CONST_INT_P (XEXP (copy, 0))
1774 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1775 {
1776 scopy = simplify_ternary_operation (code, mode,
1777 GET_MODE (XEXP (orig, 0)),
1778 XEXP (copy, 0), XEXP (copy, 1),
1779 XEXP (copy, 2));
1780 if (scopy)
1781 return scopy;
1782 }
1783 break;
1784 case RTX_COMPARE:
1785 case RTX_COMM_COMPARE:
1786 if (CONST_INT_P (XEXP (copy, 0))
1787 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1788 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1789 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1790 {
1791 scopy = simplify_relational_operation (code, mode,
1792 (GET_MODE (XEXP (orig, 0))
1793 != VOIDmode)
1794 ? GET_MODE (XEXP (orig, 0))
1795 : GET_MODE (XEXP (orig, 1)),
1796 XEXP (copy, 0),
1797 XEXP (copy, 1));
1798 if (scopy)
1799 return scopy;
1800 }
1801 break;
1802 default:
1803 break;
1804 }
6fb5fa3c
DB
1805 scopy = simplify_rtx (copy);
1806 if (scopy)
3af4ba41 1807 return scopy;
6fb5fa3c
DB
1808 return copy;
1809}
1810
fa49fd0f
RK
1811/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1812 with VALUE expressions. This way, it becomes independent of changes
1813 to registers and memory.
1814 X isn't actually modified; if modifications are needed, new rtl is
4deef538
AO
1815 allocated. However, the return value can share rtl with X.
1816 If X is within a MEM, MEMMODE must be the mode of the MEM. */
fa49fd0f 1817
91700444 1818rtx
4deef538 1819cselib_subst_to_values (rtx x, enum machine_mode memmode)
fa49fd0f
RK
1820{
1821 enum rtx_code code = GET_CODE (x);
1822 const char *fmt = GET_RTX_FORMAT (code);
1823 cselib_val *e;
1824 struct elt_list *l;
1825 rtx copy = x;
1826 int i;
1827
1828 switch (code)
1829 {
1830 case REG:
60fa6660
AO
1831 l = REG_VALUES (REGNO (x));
1832 if (l && l->elt == NULL)
1833 l = l->next;
1834 for (; l; l = l->next)
757bbef8
SB
1835 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1836 return l->elt->val_rtx;
fa49fd0f 1837
341c100f 1838 gcc_unreachable ();
fa49fd0f
RK
1839
1840 case MEM:
1841 e = cselib_lookup_mem (x, 0);
4deef538
AO
1842 /* This used to happen for autoincrements, but we deal with them
1843 properly now. Remove the if stmt for the next release. */
fa49fd0f 1844 if (! e)
91700444 1845 {
4deef538 1846 /* Assign a value that doesn't match any other. */
5440c0e7 1847 e = new_cselib_val (next_uid, GET_MODE (x), x);
91700444 1848 }
757bbef8 1849 return e->val_rtx;
fa49fd0f 1850
509f4495
JJ
1851 case ENTRY_VALUE:
1852 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1853 if (! e)
1854 break;
1855 return e->val_rtx;
1856
d8116890 1857 CASE_CONST_ANY:
fa49fd0f
RK
1858 return x;
1859
4deef538 1860 case PRE_DEC:
91700444 1861 case PRE_INC:
4deef538
AO
1862 gcc_assert (memmode != VOIDmode);
1863 i = GET_MODE_SIZE (memmode);
1864 if (code == PRE_DEC)
1865 i = -i;
0a81f074
RS
1866 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1867 XEXP (x, 0), i),
4deef538
AO
1868 memmode);
1869
1870 case PRE_MODIFY:
1871 gcc_assert (memmode != VOIDmode);
1872 return cselib_subst_to_values (XEXP (x, 1), memmode);
1873
91700444 1874 case POST_DEC:
4deef538 1875 case POST_INC:
91700444 1876 case POST_MODIFY:
4deef538
AO
1877 gcc_assert (memmode != VOIDmode);
1878 return cselib_subst_to_values (XEXP (x, 0), memmode);
7080f735 1879
fa49fd0f
RK
1880 default:
1881 break;
1882 }
1883
1884 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1885 {
1886 if (fmt[i] == 'e')
1887 {
4deef538 1888 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
fa49fd0f 1889
bd7960b1
RS
1890 if (t != XEXP (x, i))
1891 {
1892 if (x == copy)
1893 copy = shallow_copy_rtx (x);
1894 XEXP (copy, i) = t;
1895 }
fa49fd0f
RK
1896 }
1897 else if (fmt[i] == 'E')
1898 {
bd7960b1 1899 int j;
fa49fd0f
RK
1900
1901 for (j = 0; j < XVECLEN (x, i); j++)
1902 {
4deef538 1903 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
fa49fd0f 1904
bd7960b1 1905 if (t != XVECEXP (x, i, j))
fa49fd0f 1906 {
bd7960b1
RS
1907 if (XVEC (x, i) == XVEC (copy, i))
1908 {
1909 if (x == copy)
1910 copy = shallow_copy_rtx (x);
1911 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1912 }
1913 XVECEXP (copy, i, j) = t;
fa49fd0f 1914 }
fa49fd0f
RK
1915 }
1916 }
1917 }
1918
1919 return copy;
1920}
1921
9a76e83d
JJ
1922/* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1923
1924rtx
1925cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn)
1926{
1927 rtx ret;
1928 gcc_assert (!cselib_current_insn);
1929 cselib_current_insn = insn;
1930 ret = cselib_subst_to_values (x, memmode);
1931 cselib_current_insn = NULL;
1932 return ret;
1933}
1934
4deef538
AO
1935/* Look up the rtl expression X in our tables and return the value it
1936 has. If CREATE is zero, we return NULL if we don't know the value.
1937 Otherwise, we create a new one if possible, using mode MODE if X
1938 doesn't have a mode (i.e. because it's a constant). When X is part
1939 of an address, MEMMODE should be the mode of the enclosing MEM if
1940 we're tracking autoinc expressions. */
fa49fd0f 1941
5847e8da 1942static cselib_val *
4deef538
AO
1943cselib_lookup_1 (rtx x, enum machine_mode mode,
1944 int create, enum machine_mode memmode)
fa49fd0f
RK
1945{
1946 void **slot;
1947 cselib_val *e;
1948 unsigned int hashval;
1949
1950 if (GET_MODE (x) != VOIDmode)
1951 mode = GET_MODE (x);
1952
1953 if (GET_CODE (x) == VALUE)
1954 return CSELIB_VAL_PTR (x);
1955
f8cfc6aa 1956 if (REG_P (x))
fa49fd0f
RK
1957 {
1958 struct elt_list *l;
1959 unsigned int i = REGNO (x);
1960
60fa6660
AO
1961 l = REG_VALUES (i);
1962 if (l && l->elt == NULL)
1963 l = l->next;
1964 for (; l; l = l->next)
757bbef8 1965 if (mode == GET_MODE (l->elt->val_rtx))
5847e8da
AO
1966 {
1967 promote_debug_loc (l->elt->locs);
1968 return l->elt;
1969 }
fa49fd0f
RK
1970
1971 if (! create)
5847e8da 1972 return 0;
fa49fd0f 1973
31825e57
DM
1974 if (i < FIRST_PSEUDO_REGISTER)
1975 {
66fd46b6 1976 unsigned int n = hard_regno_nregs[i][mode];
31825e57
DM
1977
1978 if (n > max_value_regs)
1979 max_value_regs = n;
1980 }
1981
5440c0e7 1982 e = new_cselib_val (next_uid, GET_MODE (x), x);
6f2ffb4b 1983 new_elt_loc_list (e, x);
fa49fd0f 1984 if (REG_VALUES (i) == 0)
60fa6660
AO
1985 {
1986 /* Maintain the invariant that the first entry of
1987 REG_VALUES, if present, must be the value used to set the
1988 register, or NULL. */
6790d1ab 1989 used_regs[n_used_regs++] = i;
60fa6660
AO
1990 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1991 }
509f4495
JJ
1992 else if (cselib_preserve_constants
1993 && GET_MODE_CLASS (mode) == MODE_INT)
1994 {
1995 /* During var-tracking, try harder to find equivalences
1996 for SUBREGs. If a setter sets say a DImode register
1997 and user uses that register only in SImode, add a lowpart
1998 subreg location. */
1999 struct elt_list *lwider = NULL;
2000 l = REG_VALUES (i);
2001 if (l && l->elt == NULL)
2002 l = l->next;
2003 for (; l; l = l->next)
2004 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2005 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2006 > GET_MODE_SIZE (mode)
2007 && (lwider == NULL
2008 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2009 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2010 {
2011 struct elt_loc_list *el;
2012 if (i < FIRST_PSEUDO_REGISTER
2013 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2014 continue;
2015 for (el = l->elt->locs; el; el = el->next)
2016 if (!REG_P (el->loc))
2017 break;
2018 if (el)
2019 lwider = l;
2020 }
2021 if (lwider)
2022 {
2023 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2024 GET_MODE (lwider->elt->val_rtx));
2025 if (sub)
6f2ffb4b 2026 new_elt_loc_list (e, sub);
509f4495
JJ
2027 }
2028 }
60fa6660 2029 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
4deef538 2030 slot = cselib_find_slot (x, e->hash, INSERT, memmode);
fa49fd0f 2031 *slot = e;
5847e8da 2032 return e;
fa49fd0f
RK
2033 }
2034
3c0cb5de 2035 if (MEM_P (x))
5847e8da 2036 return cselib_lookup_mem (x, create);
fa49fd0f 2037
4deef538 2038 hashval = cselib_hash_rtx (x, create, memmode);
fa49fd0f
RK
2039 /* Can't even create if hashing is not possible. */
2040 if (! hashval)
5847e8da 2041 return 0;
fa49fd0f 2042
4deef538
AO
2043 slot = cselib_find_slot (wrap_constant (mode, x), hashval,
2044 create ? INSERT : NO_INSERT, memmode);
fa49fd0f 2045 if (slot == 0)
5847e8da 2046 return 0;
fa49fd0f
RK
2047
2048 e = (cselib_val *) *slot;
2049 if (e)
5847e8da 2050 return e;
fa49fd0f 2051
b5b8b0ac 2052 e = new_cselib_val (hashval, mode, x);
fa49fd0f
RK
2053
2054 /* We have to fill the slot before calling cselib_subst_to_values:
2055 the hash table is inconsistent until we do so, and
2056 cselib_subst_to_values will need to do lookups. */
2057 *slot = (void *) e;
6f2ffb4b 2058 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
5847e8da
AO
2059 return e;
2060}
2061
2062/* Wrapper for cselib_lookup, that indicates X is in INSN. */
2063
2064cselib_val *
2065cselib_lookup_from_insn (rtx x, enum machine_mode mode,
4deef538 2066 int create, enum machine_mode memmode, rtx insn)
5847e8da
AO
2067{
2068 cselib_val *ret;
2069
2070 gcc_assert (!cselib_current_insn);
2071 cselib_current_insn = insn;
2072
4deef538 2073 ret = cselib_lookup (x, mode, create, memmode);
5847e8da
AO
2074
2075 cselib_current_insn = NULL;
2076
2077 return ret;
2078}
2079
2080/* Wrapper for cselib_lookup_1, that logs the lookup result and
2081 maintains invariants related with debug insns. */
2082
2083cselib_val *
4deef538
AO
2084cselib_lookup (rtx x, enum machine_mode mode,
2085 int create, enum machine_mode memmode)
5847e8da 2086{
4deef538 2087 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
5847e8da
AO
2088
2089 /* ??? Should we return NULL if we're not to create an entry, the
2090 found loc is a debug loc and cselib_current_insn is not DEBUG?
2091 If so, we should also avoid converting val to non-DEBUG; probably
2092 easiest setting cselib_current_insn to NULL before the call
2093 above. */
2094
4a3c9687 2095 if (dump_file && (dump_flags & TDF_CSELIB))
5847e8da
AO
2096 {
2097 fputs ("cselib lookup ", dump_file);
2098 print_inline_rtx (dump_file, x, 2);
2099 fprintf (dump_file, " => %u:%u\n",
2100 ret ? ret->uid : 0,
2101 ret ? ret->hash : 0);
2102 }
2103
2104 return ret;
fa49fd0f
RK
2105}
2106
2107/* Invalidate any entries in reg_values that overlap REGNO. This is called
2108 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2109 is used to determine how many hard registers are being changed. If MODE
2110 is VOIDmode, then only REGNO is being changed; this is used when
2111 invalidating call clobbered registers across a call. */
2112
2113static void
7080f735 2114cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
fa49fd0f
RK
2115{
2116 unsigned int endregno;
2117 unsigned int i;
2118
2119 /* If we see pseudos after reload, something is _wrong_. */
341c100f
NS
2120 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2121 || reg_renumber[regno] < 0);
fa49fd0f
RK
2122
2123 /* Determine the range of registers that must be invalidated. For
2124 pseudos, only REGNO is affected. For hard regs, we must take MODE
2125 into account, and we must also invalidate lower register numbers
2126 if they contain values that overlap REGNO. */
291aac59 2127 if (regno < FIRST_PSEUDO_REGISTER)
31825e57 2128 {
341c100f 2129 gcc_assert (mode != VOIDmode);
7080f735 2130
31825e57
DM
2131 if (regno < max_value_regs)
2132 i = 0;
2133 else
2134 i = regno - max_value_regs;
fa49fd0f 2135
09e18274 2136 endregno = end_hard_regno (mode, regno);
31825e57
DM
2137 }
2138 else
2139 {
2140 i = regno;
2141 endregno = regno + 1;
2142 }
2143
2144 for (; i < endregno; i++)
fa49fd0f
RK
2145 {
2146 struct elt_list **l = &REG_VALUES (i);
2147
2148 /* Go through all known values for this reg; if it overlaps the range
2149 we're invalidating, remove the value. */
2150 while (*l)
2151 {
2152 cselib_val *v = (*l)->elt;
5847e8da
AO
2153 bool had_locs;
2154 rtx setting_insn;
fa49fd0f
RK
2155 struct elt_loc_list **p;
2156 unsigned int this_last = i;
2157
60fa6660 2158 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
09e18274 2159 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
fa49fd0f 2160
9de9cbaf
JJ
2161 if (this_last < regno || v == NULL
2162 || (v == cfa_base_preserved_val
2163 && i == cfa_base_preserved_regno))
fa49fd0f
RK
2164 {
2165 l = &(*l)->next;
2166 continue;
2167 }
2168
2169 /* We have an overlap. */
60fa6660
AO
2170 if (*l == REG_VALUES (i))
2171 {
2172 /* Maintain the invariant that the first entry of
2173 REG_VALUES, if present, must be the value used to set
2174 the register, or NULL. This is also nice because
2175 then we won't push the same regno onto user_regs
2176 multiple times. */
2177 (*l)->elt = NULL;
2178 l = &(*l)->next;
2179 }
2180 else
2181 unchain_one_elt_list (l);
fa49fd0f 2182
6f2ffb4b
AO
2183 v = canonical_cselib_val (v);
2184
5847e8da
AO
2185 had_locs = v->locs != NULL;
2186 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2187
fa49fd0f
RK
2188 /* Now, we clear the mapping from value to reg. It must exist, so
2189 this code will crash intentionally if it doesn't. */
2190 for (p = &v->locs; ; p = &(*p)->next)
2191 {
2192 rtx x = (*p)->loc;
2193
f8cfc6aa 2194 if (REG_P (x) && REGNO (x) == i)
fa49fd0f
RK
2195 {
2196 unchain_one_elt_loc_list (p);
2197 break;
2198 }
2199 }
5847e8da
AO
2200
2201 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2202 {
2203 if (setting_insn && DEBUG_INSN_P (setting_insn))
2204 n_useless_debug_values++;
2205 else
2206 n_useless_values++;
2207 }
fa49fd0f
RK
2208 }
2209 }
2210}
9ddb66ca 2211\f
7101fb18
JH
2212/* Invalidate any locations in the table which are changed because of a
2213 store to MEM_RTX. If this is called because of a non-const call
2214 instruction, MEM_RTX is (mem:BLK const0_rtx). */
fa49fd0f 2215
7101fb18 2216static void
7080f735 2217cselib_invalidate_mem (rtx mem_rtx)
fa49fd0f 2218{
7101fb18 2219 cselib_val **vp, *v, *next;
c65ecebc 2220 int num_mems = 0;
9ddb66ca
JH
2221 rtx mem_addr;
2222
2223 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2224 mem_rtx = canon_rtx (mem_rtx);
fa49fd0f 2225
7101fb18
JH
2226 vp = &first_containing_mem;
2227 for (v = *vp; v != &dummy_val; v = next)
fa49fd0f 2228 {
7101fb18
JH
2229 bool has_mem = false;
2230 struct elt_loc_list **p = &v->locs;
5847e8da
AO
2231 bool had_locs = v->locs != NULL;
2232 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
fa49fd0f 2233
7101fb18 2234 while (*p)
fa49fd0f 2235 {
7101fb18
JH
2236 rtx x = (*p)->loc;
2237 cselib_val *addr;
2238 struct elt_list **mem_chain;
2239
2240 /* MEMs may occur in locations only at the top level; below
2241 that every MEM or REG is substituted by its VALUE. */
3c0cb5de 2242 if (!MEM_P (x))
fa49fd0f 2243 {
7101fb18
JH
2244 p = &(*p)->next;
2245 continue;
2246 }
c65ecebc 2247 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
53d9622b
RS
2248 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
2249 mem_addr, x, NULL_RTX))
7101fb18
JH
2250 {
2251 has_mem = true;
c65ecebc 2252 num_mems++;
7101fb18
JH
2253 p = &(*p)->next;
2254 continue;
fa49fd0f
RK
2255 }
2256
7101fb18
JH
2257 /* This one overlaps. */
2258 /* We must have a mapping from this MEM's address to the
2259 value (E). Remove that, too. */
4deef538 2260 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
faead9f7
AO
2261 addr = canonical_cselib_val (addr);
2262 gcc_checking_assert (v == canonical_cselib_val (v));
7101fb18
JH
2263 mem_chain = &addr->addr_list;
2264 for (;;)
2265 {
faead9f7
AO
2266 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2267
2268 if (canon == v)
7101fb18
JH
2269 {
2270 unchain_one_elt_list (mem_chain);
2271 break;
2272 }
fa49fd0f 2273
faead9f7
AO
2274 /* Record canonicalized elt. */
2275 (*mem_chain)->elt = canon;
2276
7101fb18
JH
2277 mem_chain = &(*mem_chain)->next;
2278 }
fa49fd0f 2279
7101fb18
JH
2280 unchain_one_elt_loc_list (p);
2281 }
fa49fd0f 2282
b5b8b0ac 2283 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
5847e8da
AO
2284 {
2285 if (setting_insn && DEBUG_INSN_P (setting_insn))
2286 n_useless_debug_values++;
2287 else
2288 n_useless_values++;
2289 }
fa49fd0f 2290
7101fb18
JH
2291 next = v->next_containing_mem;
2292 if (has_mem)
2293 {
2294 *vp = v;
2295 vp = &(*vp)->next_containing_mem;
2296 }
2297 else
2298 v->next_containing_mem = NULL;
2299 }
2300 *vp = &dummy_val;
fa49fd0f
RK
2301}
2302
0d87c765 2303/* Invalidate DEST, which is being assigned to or clobbered. */
fa49fd0f 2304
0d87c765
RH
2305void
2306cselib_invalidate_rtx (rtx dest)
fa49fd0f 2307{
46d096a3
SB
2308 while (GET_CODE (dest) == SUBREG
2309 || GET_CODE (dest) == ZERO_EXTRACT
2310 || GET_CODE (dest) == STRICT_LOW_PART)
fa49fd0f
RK
2311 dest = XEXP (dest, 0);
2312
f8cfc6aa 2313 if (REG_P (dest))
fa49fd0f 2314 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
3c0cb5de 2315 else if (MEM_P (dest))
fa49fd0f 2316 cselib_invalidate_mem (dest);
0d87c765
RH
2317}
2318
2319/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2320
2321static void
7bc980e1 2322cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
0d87c765
RH
2323 void *data ATTRIBUTE_UNUSED)
2324{
2325 cselib_invalidate_rtx (dest);
fa49fd0f
RK
2326}
2327
2328/* Record the result of a SET instruction. DEST is being set; the source
2329 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2330 describes its address. */
2331
2332static void
7080f735 2333cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
fa49fd0f 2334{
f8cfc6aa 2335 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
fa49fd0f
RK
2336
2337 if (src_elt == 0 || side_effects_p (dest))
2338 return;
2339
2340 if (dreg >= 0)
2341 {
31825e57
DM
2342 if (dreg < FIRST_PSEUDO_REGISTER)
2343 {
66fd46b6 2344 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
31825e57
DM
2345
2346 if (n > max_value_regs)
2347 max_value_regs = n;
2348 }
2349
60fa6660
AO
2350 if (REG_VALUES (dreg) == 0)
2351 {
6790d1ab 2352 used_regs[n_used_regs++] = dreg;
60fa6660
AO
2353 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2354 }
2355 else
2356 {
341c100f
NS
2357 /* The register should have been invalidated. */
2358 gcc_assert (REG_VALUES (dreg)->elt == 0);
2359 REG_VALUES (dreg)->elt = src_elt;
60fa6660
AO
2360 }
2361
b5b8b0ac 2362 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
fa49fd0f 2363 n_useless_values--;
6f2ffb4b 2364 new_elt_loc_list (src_elt, dest);
fa49fd0f 2365 }
3c0cb5de 2366 else if (MEM_P (dest) && dest_addr_elt != 0
463301c3 2367 && cselib_record_memory)
fa49fd0f 2368 {
b5b8b0ac 2369 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
fa49fd0f
RK
2370 n_useless_values--;
2371 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2372 }
2373}
2374
6f2ffb4b
AO
2375/* Make ELT and X's VALUE equivalent to each other at INSN. */
2376
2377void
2378cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2379{
2380 cselib_val *nelt;
2381 rtx save_cselib_current_insn = cselib_current_insn;
2382
2383 gcc_checking_assert (elt);
2384 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2385 gcc_checking_assert (!side_effects_p (x));
2386
2387 cselib_current_insn = insn;
2388
2389 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2390
2391 if (nelt != elt)
2392 {
0f68ba3e
AO
2393 cselib_any_perm_equivs = true;
2394
6f2ffb4b
AO
2395 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2396 cselib_preserve_value (nelt);
2397
2398 new_elt_loc_list (nelt, elt->val_rtx);
2399 }
2400
2401 cselib_current_insn = save_cselib_current_insn;
2402}
2403
0f68ba3e
AO
2404/* Return TRUE if any permanent equivalences have been recorded since
2405 the table was last initialized. */
2406bool
2407cselib_have_permanent_equivalences (void)
2408{
2409 return cselib_any_perm_equivs;
2410}
2411
fa49fd0f
RK
2412/* There is no good way to determine how many elements there can be
2413 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2414#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2415
4deef538
AO
2416struct cselib_record_autoinc_data
2417{
2418 struct cselib_set *sets;
2419 int n_sets;
2420};
2421
2422/* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2423 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2424
2425static int
2426cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2427 rtx dest, rtx src, rtx srcoff, void *arg)
2428{
2429 struct cselib_record_autoinc_data *data;
2430 data = (struct cselib_record_autoinc_data *)arg;
2431
2432 data->sets[data->n_sets].dest = dest;
2433
2434 if (srcoff)
2435 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2436 else
2437 data->sets[data->n_sets].src = src;
2438
2439 data->n_sets++;
2440
2441 return -1;
2442}
2443
2444/* Record the effects of any sets and autoincs in INSN. */
fa49fd0f 2445static void
7080f735 2446cselib_record_sets (rtx insn)
fa49fd0f
RK
2447{
2448 int n_sets = 0;
2449 int i;
b5b8b0ac 2450 struct cselib_set sets[MAX_SETS];
fa49fd0f 2451 rtx body = PATTERN (insn);
b7933c21 2452 rtx cond = 0;
4deef538
AO
2453 int n_sets_before_autoinc;
2454 struct cselib_record_autoinc_data data;
fa49fd0f
RK
2455
2456 body = PATTERN (insn);
b7933c21
BS
2457 if (GET_CODE (body) == COND_EXEC)
2458 {
2459 cond = COND_EXEC_TEST (body);
2460 body = COND_EXEC_CODE (body);
2461 }
2462
fa49fd0f
RK
2463 /* Find all sets. */
2464 if (GET_CODE (body) == SET)
2465 {
2466 sets[0].src = SET_SRC (body);
2467 sets[0].dest = SET_DEST (body);
2468 n_sets = 1;
2469 }
2470 else if (GET_CODE (body) == PARALLEL)
2471 {
2472 /* Look through the PARALLEL and record the values being
2473 set, if possible. Also handle any CLOBBERs. */
2474 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2475 {
2476 rtx x = XVECEXP (body, 0, i);
2477
2478 if (GET_CODE (x) == SET)
2479 {
2480 sets[n_sets].src = SET_SRC (x);
2481 sets[n_sets].dest = SET_DEST (x);
2482 n_sets++;
2483 }
2484 }
2485 }
2486
8dd5516b
JJ
2487 if (n_sets == 1
2488 && MEM_P (sets[0].src)
2489 && !cselib_record_memory
2490 && MEM_READONLY_P (sets[0].src))
2491 {
2492 rtx note = find_reg_equal_equiv_note (insn);
2493
2494 if (note && CONSTANT_P (XEXP (note, 0)))
2495 sets[0].src = XEXP (note, 0);
2496 }
2497
4deef538
AO
2498 data.sets = sets;
2499 data.n_sets = n_sets_before_autoinc = n_sets;
2500 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2501 n_sets = data.n_sets;
2502
fa49fd0f
RK
2503 /* Look up the values that are read. Do this before invalidating the
2504 locations that are written. */
2505 for (i = 0; i < n_sets; i++)
2506 {
2507 rtx dest = sets[i].dest;
2508
2509 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2510 the low part after invalidating any knowledge about larger modes. */
2511 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2512 sets[i].dest = dest = XEXP (dest, 0);
2513
2514 /* We don't know how to record anything but REG or MEM. */
f8cfc6aa 2515 if (REG_P (dest)
3c0cb5de 2516 || (MEM_P (dest) && cselib_record_memory))
fa49fd0f 2517 {
b7933c21
BS
2518 rtx src = sets[i].src;
2519 if (cond)
be9ed5d5 2520 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
4deef538 2521 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
3c0cb5de 2522 if (MEM_P (dest))
d4ebfa65 2523 {
372d6395 2524 enum machine_mode address_mode = get_address_mode (dest);
d4ebfa65
BE
2525
2526 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
4deef538
AO
2527 address_mode, 1,
2528 GET_MODE (dest));
d4ebfa65 2529 }
fa49fd0f
RK
2530 else
2531 sets[i].dest_addr_elt = 0;
2532 }
2533 }
2534
b5b8b0ac
AO
2535 if (cselib_record_sets_hook)
2536 cselib_record_sets_hook (insn, sets, n_sets);
2537
fa49fd0f
RK
2538 /* Invalidate all locations written by this insn. Note that the elts we
2539 looked up in the previous loop aren't affected, just some of their
2540 locations may go away. */
0d87c765 2541 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
fa49fd0f 2542
4deef538
AO
2543 for (i = n_sets_before_autoinc; i < n_sets; i++)
2544 cselib_invalidate_rtx (sets[i].dest);
2545
b7048ab7
RH
2546 /* If this is an asm, look for duplicate sets. This can happen when the
2547 user uses the same value as an output multiple times. This is valid
2548 if the outputs are not actually used thereafter. Treat this case as
2549 if the value isn't actually set. We do this by smashing the destination
2550 to pc_rtx, so that we won't record the value later. */
2551 if (n_sets >= 2 && asm_noperands (body) >= 0)
2552 {
2553 for (i = 0; i < n_sets; i++)
2554 {
2555 rtx dest = sets[i].dest;
3c0cb5de 2556 if (REG_P (dest) || MEM_P (dest))
b7048ab7
RH
2557 {
2558 int j;
2559 for (j = i + 1; j < n_sets; j++)
2560 if (rtx_equal_p (dest, sets[j].dest))
2561 {
2562 sets[i].dest = pc_rtx;
2563 sets[j].dest = pc_rtx;
2564 }
2565 }
2566 }
2567 }
2568
fa49fd0f
RK
2569 /* Now enter the equivalences in our tables. */
2570 for (i = 0; i < n_sets; i++)
2571 {
2572 rtx dest = sets[i].dest;
f8cfc6aa 2573 if (REG_P (dest)
3c0cb5de 2574 || (MEM_P (dest) && cselib_record_memory))
fa49fd0f
RK
2575 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2576 }
2577}
2578
2579/* Record the effects of INSN. */
2580
2581void
7080f735 2582cselib_process_insn (rtx insn)
fa49fd0f
RK
2583{
2584 int i;
2585 rtx x;
2586
2587 cselib_current_insn = insn;
2588
2589 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
4b4bf941
JQ
2590 if (LABEL_P (insn)
2591 || (CALL_P (insn)
570a98eb 2592 && find_reg_note (insn, REG_SETJMP, NULL))
4b4bf941 2593 || (NONJUMP_INSN_P (insn)
fa49fd0f
RK
2594 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2595 && MEM_VOLATILE_P (PATTERN (insn))))
2596 {
5440c0e7 2597 cselib_reset_table (next_uid);
2080bd29 2598 cselib_current_insn = NULL_RTX;
fa49fd0f
RK
2599 return;
2600 }
2601
2602 if (! INSN_P (insn))
2603 {
2080bd29 2604 cselib_current_insn = NULL_RTX;
fa49fd0f
RK
2605 return;
2606 }
2607
2608 /* If this is a call instruction, forget anything stored in a
2609 call clobbered register, or, if this is not a const call, in
2610 memory. */
4b4bf941 2611 if (CALL_P (insn))
fa49fd0f
RK
2612 {
2613 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7e42db17
DJ
2614 if (call_used_regs[i]
2615 || (REG_VALUES (i) && REG_VALUES (i)->elt
b8698a0f 2616 && HARD_REGNO_CALL_PART_CLOBBERED (i,
757bbef8 2617 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
291aac59 2618 cselib_invalidate_regno (i, reg_raw_mode[i]);
fa49fd0f 2619
becfd6e5
KZ
2620 /* Since it is not clear how cselib is going to be used, be
2621 conservative here and treat looping pure or const functions
2622 as if they were regular functions. */
2623 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2624 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
fa49fd0f
RK
2625 cselib_invalidate_mem (callmem);
2626 }
2627
2628 cselib_record_sets (insn);
2629
fa49fd0f
RK
2630 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2631 after we have processed the insn. */
4b4bf941 2632 if (CALL_P (insn))
fa49fd0f
RK
2633 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2634 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
0d87c765 2635 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
fa49fd0f 2636
2080bd29 2637 cselib_current_insn = NULL_RTX;
fa49fd0f 2638
96d0cc81
JH
2639 if (n_useless_values > MAX_USELESS_VALUES
2640 /* remove_useless_values is linear in the hash table size. Avoid
9f5ed61a 2641 quadratic behavior for very large hashtables with very few
96d0cc81 2642 useless elements. */
5847e8da
AO
2643 && ((unsigned int)n_useless_values
2644 > (cselib_hash_table->n_elements
2645 - cselib_hash_table->n_deleted
2646 - n_debug_values) / 4))
fa49fd0f
RK
2647 remove_useless_values ();
2648}
2649
fa49fd0f
RK
2650/* Initialize cselib for one pass. The caller must also call
2651 init_alias_analysis. */
2652
2653void
457eeaae 2654cselib_init (int record_what)
fa49fd0f 2655{
b8698a0f 2656 elt_list_pool = create_alloc_pool ("elt_list",
6a59927d 2657 sizeof (struct elt_list), 10);
b8698a0f 2658 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
6a59927d 2659 sizeof (struct elt_loc_list), 10);
b8698a0f 2660 cselib_val_pool = create_alloc_pool ("cselib_val_list",
6a59927d 2661 sizeof (cselib_val), 10);
aacd3885 2662 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
457eeaae
JJ
2663 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2664 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
0f68ba3e 2665 cselib_any_perm_equivs = false;
ac3768f6
SB
2666
2667 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2668 see canon_true_dependence. This is only created once. */
fa49fd0f 2669 if (! callmem)
ac3768f6 2670 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
fa49fd0f
RK
2671
2672 cselib_nregs = max_reg_num ();
6790d1ab
JH
2673
2674 /* We preserve reg_values to allow expensive clearing of the whole thing.
2675 Reallocate it however if it happens to be too large. */
2676 if (!reg_values || reg_values_size < cselib_nregs
2677 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
e2500fed 2678 {
04695783 2679 free (reg_values);
6790d1ab
JH
2680 /* Some space for newly emit instructions so we don't end up
2681 reallocating in between passes. */
2682 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
5ed6ace5 2683 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
e2500fed 2684 }
5ed6ace5 2685 used_regs = XNEWVEC (unsigned int, cselib_nregs);
6790d1ab 2686 n_used_regs = 0;
7c514720
KH
2687 cselib_hash_table = htab_create (31, get_value_hash,
2688 entry_and_rtx_equal_p, NULL);
5440c0e7 2689 next_uid = 1;
fa49fd0f
RK
2690}
2691
2692/* Called when the current user is done with cselib. */
2693
2694void
7080f735 2695cselib_finish (void)
fa49fd0f 2696{
6fb5fa3c 2697 cselib_discard_hook = NULL;
457eeaae 2698 cselib_preserve_constants = false;
0f68ba3e 2699 cselib_any_perm_equivs = false;
457eeaae 2700 cfa_base_preserved_val = NULL;
9de9cbaf 2701 cfa_base_preserved_regno = INVALID_REGNUM;
6a59927d
JH
2702 free_alloc_pool (elt_list_pool);
2703 free_alloc_pool (elt_loc_list_pool);
2704 free_alloc_pool (cselib_val_pool);
23bd7a93 2705 free_alloc_pool (value_pool);
eb232f4e 2706 cselib_clear_table ();
7c514720 2707 htab_delete (cselib_hash_table);
0fc0c4c9 2708 free (used_regs);
e2500fed 2709 used_regs = 0;
7c514720 2710 cselib_hash_table = 0;
e2500fed 2711 n_useless_values = 0;
5847e8da
AO
2712 n_useless_debug_values = 0;
2713 n_debug_values = 0;
5440c0e7 2714 next_uid = 0;
fa49fd0f 2715}
e2500fed 2716
b5b8b0ac
AO
2717/* Dump the cselib_val *X to FILE *info. */
2718
2719static int
2720dump_cselib_val (void **x, void *info)
2721{
2722 cselib_val *v = (cselib_val *)*x;
2723 FILE *out = (FILE *)info;
2724 bool need_lf = true;
2725
2726 print_inline_rtx (out, v->val_rtx, 0);
2727
2728 if (v->locs)
2729 {
2730 struct elt_loc_list *l = v->locs;
2731 if (need_lf)
2732 {
2733 fputc ('\n', out);
2734 need_lf = false;
2735 }
2736 fputs (" locs:", out);
2737 do
2738 {
42286976
JJ
2739 if (l->setting_insn)
2740 fprintf (out, "\n from insn %i ",
2741 INSN_UID (l->setting_insn));
2742 else
2743 fprintf (out, "\n ");
b5b8b0ac
AO
2744 print_inline_rtx (out, l->loc, 4);
2745 }
2746 while ((l = l->next));
2747 fputc ('\n', out);
2748 }
2749 else
2750 {
2751 fputs (" no locs", out);
2752 need_lf = true;
2753 }
2754
2755 if (v->addr_list)
2756 {
2757 struct elt_list *e = v->addr_list;
2758 if (need_lf)
2759 {
2760 fputc ('\n', out);
2761 need_lf = false;
2762 }
2763 fputs (" addr list:", out);
2764 do
2765 {
2766 fputs ("\n ", out);
2767 print_inline_rtx (out, e->elt->val_rtx, 2);
2768 }
2769 while ((e = e->next));
2770 fputc ('\n', out);
2771 }
2772 else
2773 {
2774 fputs (" no addrs", out);
2775 need_lf = true;
2776 }
2777
2778 if (v->next_containing_mem == &dummy_val)
2779 fputs (" last mem\n", out);
2780 else if (v->next_containing_mem)
2781 {
2782 fputs (" next mem ", out);
2783 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2784 fputc ('\n', out);
2785 }
2786 else if (need_lf)
2787 fputc ('\n', out);
2788
2789 return 1;
2790}
2791
2792/* Dump to OUT everything in the CSELIB table. */
2793
2794void
2795dump_cselib_table (FILE *out)
2796{
2797 fprintf (out, "cselib hash table:\n");
2798 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2799 if (first_containing_mem != &dummy_val)
2800 {
2801 fputs ("first mem ", out);
2802 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2803 fputc ('\n', out);
2804 }
5440c0e7 2805 fprintf (out, "next uid %i\n", next_uid);
b5b8b0ac
AO
2806}
2807
e2500fed 2808#include "gt-cselib.h"