]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cselib.cc
combine: Fix ICE in try_combine on pr112494.c [PR112560]
[thirdparty/gcc.git] / gcc / cselib.cc
CommitLineData
fa49fd0f 1/* Common subexpression elimination library for GNU compiler.
a945c346 2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
fa49fd0f 3
1322177d 4This file is part of GCC.
fa49fd0f 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
fa49fd0f 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
fa49fd0f
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
fa49fd0f
RK
19
20#include "config.h"
21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
fa49fd0f 25#include "rtl.h"
957060b5 26#include "tree.h"
c7131fb2 27#include "df.h"
4d0cdd0c 28#include "memmodel.h"
fa49fd0f 29#include "tm_p.h"
957060b5 30#include "regs.h"
78528714 31#include "emit-rtl.h"
7ee2468b 32#include "dumpfile.h"
fa49fd0f 33#include "cselib.h"
6ee2cc70 34#include "function-abi.h"
64ce76d9 35#include "alias.h"
fa49fd0f 36
fba4cb03 37/* A list of cselib_val structures. */
a78a26f1
ML
38struct elt_list
39{
40 struct elt_list *next;
41 cselib_val *elt;
fba4cb03
LB
42};
43
463301c3 44static bool cselib_record_memory;
457eeaae 45static bool cselib_preserve_constants;
0f68ba3e 46static bool cselib_any_perm_equivs;
4a8fb1a1 47static inline void promote_debug_loc (struct elt_loc_list *l);
7080f735 48static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
6f2ffb4b 49static void new_elt_loc_list (cselib_val *, rtx);
7080f735
AJ
50static void unchain_one_value (cselib_val *);
51static void unchain_one_elt_list (struct elt_list **);
52static void unchain_one_elt_loc_list (struct elt_loc_list **);
7080f735 53static void remove_useless_values (void);
ef4bddc2
RS
54static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
55static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
7080f735
AJ
56static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
57static cselib_val *cselib_lookup_mem (rtx, int);
17d184e5 58static void cselib_invalidate_regno (unsigned int, machine_mode);
7080f735 59static void cselib_invalidate_mem (rtx);
7080f735 60static void cselib_record_set (rtx, cselib_val *, cselib_val *);
dd60a84c 61static void cselib_record_sets (rtx_insn *);
2c0fa3ec
JJ
62static rtx autoinc_split (rtx, rtx *, machine_mode);
63
64#define PRESERVED_VALUE_P(RTX) \
65 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
66
67#define SP_BASED_VALUE_P(RTX) \
68 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
69
70#define SP_DERIVED_VALUE_P(RTX) \
71 (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
fa49fd0f 72
b5b8b0ac
AO
73struct expand_value_data
74{
75 bitmap regs_active;
76 cselib_expand_callback callback;
77 void *callback_arg;
864ddef7 78 bool dummy;
b5b8b0ac
AO
79};
80
81static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
82
465a9c51
JJ
83/* This is a global so we don't have to pass this through every function.
84 It is used in new_elt_loc_list to set SETTING_INSN. */
85static rtx_insn *cselib_current_insn;
86
fa49fd0f
RK
87/* There are three ways in which cselib can look up an rtx:
88 - for a REG, the reg_values table (which is indexed by regno) is used
89 - for a MEM, we recursively look up its address and then follow the
90 addr_list of that value
91 - for everything else, we compute a hash value and go through the hash
92 table. Since different rtx's can still have the same hash value,
93 this involves walking the table entries for a given value and comparing
94 the locations of the entries with the rtx we are looking up. */
95
8d67ee55 96struct cselib_hasher : nofree_ptr_hash <cselib_val>
4a8fb1a1 97{
67f58944 98 struct key {
f956adb9
RS
99 /* The rtx value and its mode (needed separately for constant
100 integers). */
ef4bddc2 101 machine_mode mode;
f956adb9
RS
102 rtx x;
103 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
ef4bddc2 104 machine_mode memmode;
f956adb9 105 };
67f58944
TS
106 typedef key *compare_type;
107 static inline hashval_t hash (const cselib_val *);
108 static inline bool equal (const cselib_val *, const key *);
4a8fb1a1
LC
109};
110
111/* The hash function for our hash table. The value is always computed with
112 cselib_hash_rtx when adding an element; this function just extracts the
113 hash value from a cselib_val structure. */
114
115inline hashval_t
67f58944 116cselib_hasher::hash (const cselib_val *v)
4a8fb1a1
LC
117{
118 return v->hash;
119}
120
121/* The equality test for our hash table. The first argument V is a table
122 element (i.e. a cselib_val), while the second arg X is an rtx. We know
123 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
124 CONST of an appropriate mode. */
125
126inline bool
67f58944 127cselib_hasher::equal (const cselib_val *v, const key *x_arg)
4a8fb1a1
LC
128{
129 struct elt_loc_list *l;
f956adb9 130 rtx x = x_arg->x;
ef4bddc2
RS
131 machine_mode mode = x_arg->mode;
132 machine_mode memmode = x_arg->memmode;
4a8fb1a1
LC
133
134 if (mode != GET_MODE (v->val_rtx))
135 return false;
136
0618dee5
AO
137 if (GET_CODE (x) == VALUE)
138 return x == v->val_rtx;
139
2c0fa3ec
JJ
140 if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
141 {
142 rtx xoff = NULL;
143 if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
144 return true;
145 }
146
4a8fb1a1
LC
147 /* We don't guarantee that distinct rtx's have different hash values,
148 so we need to do a comparison. */
149 for (l = v->locs; l; l = l->next)
465a9c51
JJ
150 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
151 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
4a8fb1a1 152 {
465a9c51
JJ
153 rtx_insn *save_cselib_current_insn = cselib_current_insn;
154 /* If l is so far a debug only loc, without debug stmts it
155 would never be compared to x at all, so temporarily pretend
156 current instruction is that DEBUG_INSN so that we don't
157 promote other debug locs even for unsuccessful comparison. */
158 cselib_current_insn = l->setting_insn;
159 bool match = rtx_equal_for_cselib_1 (l->loc, x, memmode, 0);
160 cselib_current_insn = save_cselib_current_insn;
161 if (match)
162 {
163 promote_debug_loc (l);
164 return true;
165 }
4a8fb1a1 166 }
465a9c51
JJ
167 else if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
168 return true;
4a8fb1a1
LC
169
170 return false;
171}
172
fa49fd0f 173/* A table that enables us to look up elts by their value. */
c203e8a7 174static hash_table<cselib_hasher> *cselib_hash_table;
fa49fd0f 175
0618dee5 176/* A table to hold preserved values. */
c203e8a7 177static hash_table<cselib_hasher> *cselib_preserved_hash_table;
0618dee5 178
5440c0e7
AO
179/* The unique id that the next create value will take. */
180static unsigned int next_uid;
fa49fd0f
RK
181
182/* The number of registers we had when the varrays were last resized. */
183static unsigned int cselib_nregs;
184
5847e8da
AO
185/* Count values without known locations, or with only locations that
186 wouldn't have been known except for debug insns. Whenever this
187 grows too big, we remove these useless values from the table.
188
189 Counting values with only debug values is a bit tricky. We don't
190 want to increment n_useless_values when we create a value for a
191 debug insn, for this would get n_useless_values out of sync, but we
192 want increment it if all locs in the list that were ever referenced
193 in nondebug insns are removed from the list.
194
195 In the general case, once we do that, we'd have to stop accepting
196 nondebug expressions in the loc list, to avoid having two values
197 equivalent that, without debug insns, would have been made into
198 separate values. However, because debug insns never introduce
199 equivalences themselves (no assignments), the only means for
200 growing loc lists is through nondebug assignments. If the locs
201 also happen to be referenced in debug insns, it will work just fine.
202
203 A consequence of this is that there's at most one debug-only loc in
204 each loc list. If we keep it in the first entry, testing whether
205 we have a debug-only loc list takes O(1).
206
207 Furthermore, since any additional entry in a loc list containing a
208 debug loc would have to come from an assignment (nondebug) that
209 references both the initial debug loc and the newly-equivalent loc,
210 the initial debug loc would be promoted to a nondebug loc, and the
211 loc list would not contain debug locs any more.
212
213 So the only case we have to be careful with in order to keep
214 n_useless_values in sync between debug and nondebug compilations is
215 to avoid incrementing n_useless_values when removing the single loc
216 from a value that turns out to not appear outside debug values. We
217 increment n_useless_debug_values instead, and leave such values
218 alone until, for other reasons, we garbage-collect useless
219 values. */
fa49fd0f 220static int n_useless_values;
5847e8da
AO
221static int n_useless_debug_values;
222
223/* Count values whose locs have been taken exclusively from debug
224 insns for the entire life of the value. */
225static int n_debug_values;
fa49fd0f
RK
226
227/* Number of useless values before we remove them from the hash table. */
228#define MAX_USELESS_VALUES 32
229
60fa6660
AO
230/* This table maps from register number to values. It does not
231 contain pointers to cselib_val structures, but rather elt_lists.
232 The purpose is to be able to refer to the same register in
233 different modes. The first element of the list defines the mode in
234 which the register was set; if the mode is unknown or the value is
235 no longer valid in that mode, ELT will be NULL for the first
236 element. */
5211d65a
KH
237static struct elt_list **reg_values;
238static unsigned int reg_values_size;
6790d1ab 239#define REG_VALUES(i) reg_values[i]
fa49fd0f 240
31825e57 241/* The largest number of hard regs used by any entry added to the
eb232f4e 242 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
31825e57
DM
243static unsigned int max_value_regs;
244
fa49fd0f 245/* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
eb232f4e 246 in cselib_clear_table() for fast emptying. */
6790d1ab
JH
247static unsigned int *used_regs;
248static unsigned int n_used_regs;
fa49fd0f
RK
249
250/* We pass this to cselib_invalidate_mem to invalidate all of
251 memory for a non-const call instruction. */
e2500fed 252static GTY(()) rtx callmem;
fa49fd0f 253
fa49fd0f
RK
254/* Set by discard_useless_locs if it deleted the last location of any
255 value. */
256static int values_became_useless;
7101fb18
JH
257
258/* Used as stop element of the containing_mem list so we can check
259 presence in the list by checking the next pointer. */
260static cselib_val dummy_val;
261
457eeaae
JJ
262/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
263 that is constant through the whole function and should never be
264 eliminated. */
265static cselib_val *cfa_base_preserved_val;
5a9fbcf1 266static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
457eeaae 267
7080f735 268/* Used to list all values that contain memory reference.
7101fb18
JH
269 May or may not contain the useless values - the list is compacted
270 each time memory is invalidated. */
271static cselib_val *first_containing_mem = &dummy_val;
a78a26f1 272
fcb87c50
MM
273static object_allocator<elt_list> elt_list_pool ("elt_list");
274static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
275static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
a78a26f1 276
fcb87c50 277static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
6fb5fa3c
DB
278
279/* If nonnull, cselib will call this function before freeing useless
280 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
281void (*cselib_discard_hook) (cselib_val *);
b5b8b0ac
AO
282
283/* If nonnull, cselib will call this function before recording sets or
284 even clobbering outputs of INSN. All the recorded sets will be
285 represented in the array sets[n_sets]. new_val_min can be used to
286 tell whether values present in sets are introduced by this
287 instruction. */
46665961 288void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
b5b8b0ac
AO
289 int n_sets);
290
fa49fd0f
RK
291\f
292
293/* Allocate a struct elt_list and fill in its two elements with the
294 arguments. */
295
6a59927d 296static inline struct elt_list *
7080f735 297new_elt_list (struct elt_list *next, cselib_val *elt)
fa49fd0f 298{
fb0b2914 299 elt_list *el = elt_list_pool.allocate ();
fa49fd0f
RK
300 el->next = next;
301 el->elt = elt;
302 return el;
303}
304
6f2ffb4b
AO
305/* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
306 list. */
fa49fd0f 307
6f2ffb4b
AO
308static inline void
309new_elt_loc_list (cselib_val *val, rtx loc)
fa49fd0f 310{
6f2ffb4b
AO
311 struct elt_loc_list *el, *next = val->locs;
312
313 gcc_checking_assert (!next || !next->setting_insn
314 || !DEBUG_INSN_P (next->setting_insn)
315 || cselib_current_insn == next->setting_insn);
5847e8da
AO
316
317 /* If we're creating the first loc in a debug insn context, we've
318 just created a debug value. Count it. */
319 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
320 n_debug_values++;
321
6f2ffb4b
AO
322 val = canonical_cselib_val (val);
323 next = val->locs;
324
325 if (GET_CODE (loc) == VALUE)
326 {
327 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
328
329 gcc_checking_assert (PRESERVED_VALUE_P (loc)
330 == PRESERVED_VALUE_P (val->val_rtx));
331
332 if (val->val_rtx == loc)
333 return;
334 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
335 {
336 /* Reverse the insertion. */
337 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
338 return;
339 }
340
341 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
342
343 if (CSELIB_VAL_PTR (loc)->locs)
344 {
345 /* Bring all locs from LOC to VAL. */
346 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
347 {
348 /* Adjust values that have LOC as canonical so that VAL
349 becomes their canonical. */
350 if (el->loc && GET_CODE (el->loc) == VALUE)
351 {
352 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
353 == loc);
354 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
355 }
356 }
357 el->next = val->locs;
358 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
faead9f7
AO
359 }
360
361 if (CSELIB_VAL_PTR (loc)->addr_list)
362 {
363 /* Bring in addr_list into canonical node. */
364 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
365 while (last->next)
366 last = last->next;
367 last->next = val->addr_list;
368 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
369 CSELIB_VAL_PTR (loc)->addr_list = NULL;
370 }
371
372 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
373 && val->next_containing_mem == NULL)
374 {
375 /* Add VAL to the containing_mem list after LOC. LOC will
376 be removed when we notice it doesn't contain any
377 MEMs. */
378 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
379 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
6f2ffb4b
AO
380 }
381
382 /* Chain LOC back to VAL. */
fb0b2914 383 el = elt_loc_list_pool.allocate ();
6f2ffb4b
AO
384 el->loc = val->val_rtx;
385 el->setting_insn = cselib_current_insn;
386 el->next = NULL;
387 CSELIB_VAL_PTR (loc)->locs = el;
388 }
389
fb0b2914 390 el = elt_loc_list_pool.allocate ();
6f2ffb4b
AO
391 el->loc = loc;
392 el->setting_insn = cselib_current_insn;
393 el->next = next;
394 val->locs = el;
fa49fd0f
RK
395}
396
5847e8da
AO
397/* Promote loc L to a nondebug cselib_current_insn if L is marked as
398 originating from a debug insn, maintaining the debug values
399 count. */
400
401static inline void
402promote_debug_loc (struct elt_loc_list *l)
403{
ce8fe26d 404 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
5847e8da
AO
405 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
406 {
407 n_debug_values--;
408 l->setting_insn = cselib_current_insn;
dc2a58da
JJ
409 if (cselib_preserve_constants && l->next)
410 {
411 gcc_assert (l->next->setting_insn
412 && DEBUG_INSN_P (l->next->setting_insn)
413 && !l->next->next);
414 l->next->setting_insn = cselib_current_insn;
415 }
416 else
417 gcc_assert (!l->next);
5847e8da
AO
418 }
419}
420
fa49fd0f
RK
421/* The elt_list at *PL is no longer needed. Unchain it and free its
422 storage. */
423
6a59927d 424static inline void
7080f735 425unchain_one_elt_list (struct elt_list **pl)
fa49fd0f
RK
426{
427 struct elt_list *l = *pl;
428
429 *pl = l->next;
fb0b2914 430 elt_list_pool.remove (l);
fa49fd0f
RK
431}
432
433/* Likewise for elt_loc_lists. */
434
435static void
7080f735 436unchain_one_elt_loc_list (struct elt_loc_list **pl)
fa49fd0f
RK
437{
438 struct elt_loc_list *l = *pl;
439
440 *pl = l->next;
fb0b2914 441 elt_loc_list_pool.remove (l);
fa49fd0f
RK
442}
443
444/* Likewise for cselib_vals. This also frees the addr_list associated with
445 V. */
446
447static void
7080f735 448unchain_one_value (cselib_val *v)
fa49fd0f
RK
449{
450 while (v->addr_list)
451 unchain_one_elt_list (&v->addr_list);
452
fb0b2914 453 cselib_val_pool.remove (v);
fa49fd0f
RK
454}
455
456/* Remove all entries from the hash table. Also used during
b5b8b0ac 457 initialization. */
fa49fd0f 458
eb232f4e
SB
459void
460cselib_clear_table (void)
b5b8b0ac 461{
5440c0e7 462 cselib_reset_table (1);
b5b8b0ac
AO
463}
464
0e224656
AO
465/* Return TRUE if V is a constant, a function invariant or a VALUE
466 equivalence; FALSE otherwise. */
457eeaae 467
0e224656
AO
468static bool
469invariant_or_equiv_p (cselib_val *v)
457eeaae 470{
6f2ffb4b 471 struct elt_loc_list *l;
457eeaae 472
0e224656
AO
473 if (v == cfa_base_preserved_val)
474 return true;
475
476 /* Keep VALUE equivalences around. */
477 for (l = v->locs; l; l = l->next)
478 if (GET_CODE (l->loc) == VALUE)
479 return true;
480
457eeaae
JJ
481 if (v->locs != NULL
482 && v->locs->next == NULL)
483 {
484 if (CONSTANT_P (v->locs->loc)
485 && (GET_CODE (v->locs->loc) != CONST
486 || !references_value_p (v->locs->loc, 0)))
0e224656 487 return true;
6f2ffb4b
AO
488 /* Although a debug expr may be bound to different expressions,
489 we can preserve it as if it was constant, to get unification
490 and proper merging within var-tracking. */
491 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
492 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
493 || GET_CODE (v->locs->loc) == ENTRY_VALUE
494 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
0e224656
AO
495 return true;
496
497 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
498 if (GET_CODE (v->locs->loc) == PLUS
499 && CONST_INT_P (XEXP (v->locs->loc, 1))
500 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
501 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
502 return true;
457eeaae 503 }
6f2ffb4b 504
0e224656
AO
505 return false;
506}
507
508/* Remove from hash table all VALUEs except constants, function
509 invariants and VALUE equivalences. */
510
4a8fb1a1
LC
511int
512preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
0e224656 513{
4a8fb1a1 514 cselib_val *v = *x;
457eeaae 515
0618dee5
AO
516 if (invariant_or_equiv_p (v))
517 {
67f58944 518 cselib_hasher::key lookup = {
f956adb9
RS
519 GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
520 };
0618dee5 521 cselib_val **slot
c203e8a7 522 = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
2c0fa3ec 523 v->hash, INSERT);
0618dee5
AO
524 gcc_assert (!*slot);
525 *slot = v;
526 }
527
c203e8a7 528 cselib_hash_table->clear_slot (x);
0618dee5 529
457eeaae
JJ
530 return 1;
531}
532
b5b8b0ac
AO
533/* Remove all entries from the hash table, arranging for the next
534 value to be numbered NUM. */
535
536void
5440c0e7 537cselib_reset_table (unsigned int num)
fa49fd0f
RK
538{
539 unsigned int i;
540
31825e57
DM
541 max_value_regs = 0;
542
457eeaae
JJ
543 if (cfa_base_preserved_val)
544 {
9de9cbaf 545 unsigned int regno = cfa_base_preserved_regno;
457eeaae
JJ
546 unsigned int new_used_regs = 0;
547 for (i = 0; i < n_used_regs; i++)
548 if (used_regs[i] == regno)
549 {
550 new_used_regs = 1;
551 continue;
552 }
553 else
554 REG_VALUES (used_regs[i]) = 0;
555 gcc_assert (new_used_regs == 1);
556 n_used_regs = new_used_regs;
557 used_regs[0] = regno;
558 max_value_regs
ad474626
RS
559 = hard_regno_nregs (regno,
560 GET_MODE (cfa_base_preserved_val->locs->loc));
2c0fa3ec
JJ
561
562 /* If cfa_base is sp + const_int, need to preserve also the
563 SP_DERIVED_VALUE_P value. */
564 for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
565 l; l = l->next)
566 if (GET_CODE (l->loc) == PLUS
567 && GET_CODE (XEXP (l->loc, 0)) == VALUE
568 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
569 && CONST_INT_P (XEXP (l->loc, 1)))
570 {
571 if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
572 {
573 rtx val = cfa_base_preserved_val->val_rtx;
574 rtx_insn *save_cselib_current_insn = cselib_current_insn;
575 cselib_current_insn = l->setting_insn;
576 new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
577 plus_constant (Pmode, val,
578 -UINTVAL (XEXP (l->loc, 1))));
579 cselib_current_insn = save_cselib_current_insn;
580 }
581 break;
582 }
457eeaae
JJ
583 }
584 else
585 {
586 for (i = 0; i < n_used_regs; i++)
587 REG_VALUES (used_regs[i]) = 0;
588 n_used_regs = 0;
589 }
fa49fd0f 590
457eeaae 591 if (cselib_preserve_constants)
2c0fa3ec 592 cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
457eeaae 593 else
0f68ba3e 594 {
c203e8a7 595 cselib_hash_table->empty ();
0f68ba3e
AO
596 gcc_checking_assert (!cselib_any_perm_equivs);
597 }
fa49fd0f 598
fa49fd0f 599 n_useless_values = 0;
5847e8da
AO
600 n_useless_debug_values = 0;
601 n_debug_values = 0;
fa49fd0f 602
5440c0e7 603 next_uid = num;
7101fb18
JH
604
605 first_containing_mem = &dummy_val;
fa49fd0f
RK
606}
607
b5b8b0ac
AO
608/* Return the number of the next value that will be generated. */
609
610unsigned int
5440c0e7 611cselib_get_next_uid (void)
b5b8b0ac 612{
5440c0e7 613 return next_uid;
b5b8b0ac
AO
614}
615
4deef538
AO
616/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
617 INSERTing if requested. When X is part of the address of a MEM,
f956adb9 618 MEMMODE should specify the mode of the MEM. */
4deef538 619
4a8fb1a1 620static cselib_val **
ef4bddc2
RS
621cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
622 enum insert_option insert, machine_mode memmode)
4deef538 623{
0618dee5 624 cselib_val **slot = NULL;
67f58944 625 cselib_hasher::key lookup = { mode, x, memmode };
0618dee5 626 if (cselib_preserve_constants)
c203e8a7
TS
627 slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
628 NO_INSERT);
0618dee5 629 if (!slot)
c203e8a7 630 slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
4deef538
AO
631 return slot;
632}
633
fa49fd0f
RK
634/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
635 only return true for values which point to a cselib_val whose value
636 element has been set to zero, which implies the cselib_val will be
637 removed. */
638
c41332ab 639bool
4f588890 640references_value_p (const_rtx x, int only_useless)
fa49fd0f 641{
4f588890 642 const enum rtx_code code = GET_CODE (x);
fa49fd0f
RK
643 const char *fmt = GET_RTX_FORMAT (code);
644 int i, j;
645
646 if (GET_CODE (x) == VALUE
bab8d962
JJ
647 && (! only_useless
648 || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
c41332ab 649 return true;
fa49fd0f
RK
650
651 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
652 {
653 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
c41332ab 654 return true;
fa49fd0f
RK
655 else if (fmt[i] == 'E')
656 for (j = 0; j < XVECLEN (x, i); j++)
657 if (references_value_p (XVECEXP (x, i, j), only_useless))
c41332ab 658 return true;
fa49fd0f
RK
659 }
660
c41332ab 661 return false;
fa49fd0f
RK
662}
663
bab8d962
JJ
664/* Return true if V is a useless VALUE and can be discarded as such. */
665
666static bool
667cselib_useless_value_p (cselib_val *v)
668{
669 return (v->locs == 0
670 && !PRESERVED_VALUE_P (v->val_rtx)
671 && !SP_DERIVED_VALUE_P (v->val_rtx));
672}
673
fa49fd0f
RK
674/* For all locations found in X, delete locations that reference useless
675 values (i.e. values without any location). Called through
676 htab_traverse. */
677
4a8fb1a1
LC
678int
679discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
fa49fd0f 680{
4a8fb1a1 681 cselib_val *v = *x;
fa49fd0f 682 struct elt_loc_list **p = &v->locs;
5847e8da 683 bool had_locs = v->locs != NULL;
21afc57d 684 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
fa49fd0f
RK
685
686 while (*p)
687 {
688 if (references_value_p ((*p)->loc, 1))
689 unchain_one_elt_loc_list (p);
690 else
691 p = &(*p)->next;
692 }
693
bab8d962 694 if (had_locs && cselib_useless_value_p (v))
fa49fd0f 695 {
5847e8da
AO
696 if (setting_insn && DEBUG_INSN_P (setting_insn))
697 n_useless_debug_values++;
698 else
699 n_useless_values++;
fa49fd0f
RK
700 values_became_useless = 1;
701 }
702 return 1;
703}
704
705/* If X is a value with no locations, remove it from the hashtable. */
706
4a8fb1a1
LC
707int
708discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
fa49fd0f 709{
4a8fb1a1 710 cselib_val *v = *x;
fa49fd0f 711
bab8d962 712 if (v->locs == 0 && cselib_useless_value_p (v))
fa49fd0f 713 {
6fb5fa3c
DB
714 if (cselib_discard_hook)
715 cselib_discard_hook (v);
716
757bbef8 717 CSELIB_VAL_PTR (v->val_rtx) = NULL;
c203e8a7 718 cselib_hash_table->clear_slot (x);
fa49fd0f
RK
719 unchain_one_value (v);
720 n_useless_values--;
721 }
722
723 return 1;
724}
725
726/* Clean out useless values (i.e. those which no longer have locations
727 associated with them) from the hash table. */
728
729static void
7080f735 730remove_useless_values (void)
fa49fd0f 731{
7101fb18 732 cselib_val **p, *v;
5847e8da 733
fa49fd0f
RK
734 /* First pass: eliminate locations that reference the value. That in
735 turn can make more values useless. */
736 do
737 {
738 values_became_useless = 0;
c203e8a7 739 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
fa49fd0f
RK
740 }
741 while (values_became_useless);
742
743 /* Second pass: actually remove the values. */
fa49fd0f 744
7101fb18
JH
745 p = &first_containing_mem;
746 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
faead9f7 747 if (v->locs && v == canonical_cselib_val (v))
7101fb18
JH
748 {
749 *p = v;
750 p = &(*p)->next_containing_mem;
751 }
752 *p = &dummy_val;
753
5847e8da
AO
754 n_useless_values += n_useless_debug_values;
755 n_debug_values -= n_useless_debug_values;
756 n_useless_debug_values = 0;
757
c203e8a7 758 cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
3e2a0bd2 759
341c100f 760 gcc_assert (!n_useless_values);
fa49fd0f
RK
761}
762
b5b8b0ac
AO
763/* Arrange for a value to not be removed from the hash table even if
764 it becomes useless. */
765
766void
767cselib_preserve_value (cselib_val *v)
768{
769 PRESERVED_VALUE_P (v->val_rtx) = 1;
770}
771
772/* Test whether a value is preserved. */
773
774bool
775cselib_preserved_value_p (cselib_val *v)
776{
777 return PRESERVED_VALUE_P (v->val_rtx);
778}
779
457eeaae
JJ
780/* Arrange for a REG value to be assumed constant through the whole function,
781 never invalidated and preserved across cselib_reset_table calls. */
782
783void
9de9cbaf 784cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
457eeaae
JJ
785{
786 if (cselib_preserve_constants
787 && v->locs
788 && REG_P (v->locs->loc))
9de9cbaf
JJ
789 {
790 cfa_base_preserved_val = v;
791 cfa_base_preserved_regno = regno;
792 }
457eeaae
JJ
793}
794
b5b8b0ac
AO
795/* Clean all non-constant expressions in the hash table, but retain
796 their values. */
797
798void
0de3e43f 799cselib_preserve_only_values (void)
b5b8b0ac
AO
800{
801 int i;
802
b5b8b0ac
AO
803 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
804 cselib_invalidate_regno (i, reg_raw_mode[i]);
805
806 cselib_invalidate_mem (callmem);
807
808 remove_useless_values ();
809
810 gcc_assert (first_containing_mem == &dummy_val);
811}
812
0fe03ac3
JJ
813/* Arrange for a value to be marked as based on stack pointer
814 for find_base_term purposes. */
815
816void
817cselib_set_value_sp_based (cselib_val *v)
818{
819 SP_BASED_VALUE_P (v->val_rtx) = 1;
820}
821
822/* Test whether a value is based on stack pointer for
823 find_base_term purposes. */
824
825bool
826cselib_sp_based_value_p (cselib_val *v)
827{
828 return SP_BASED_VALUE_P (v->val_rtx);
829}
830
60fa6660
AO
831/* Return the mode in which a register was last set. If X is not a
832 register, return its mode. If the mode in which the register was
833 set is not known, or the value was already clobbered, return
834 VOIDmode. */
835
ef4bddc2 836machine_mode
4f588890 837cselib_reg_set_mode (const_rtx x)
60fa6660 838{
f8cfc6aa 839 if (!REG_P (x))
60fa6660
AO
840 return GET_MODE (x);
841
842 if (REG_VALUES (REGNO (x)) == NULL
843 || REG_VALUES (REGNO (x))->elt == NULL)
844 return VOIDmode;
845
757bbef8 846 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
60fa6660
AO
847}
848
4deef538
AO
849/* If x is a PLUS or an autoinc operation, expand the operation,
850 storing the offset, if any, in *OFF. */
851
852static rtx
ef4bddc2 853autoinc_split (rtx x, rtx *off, machine_mode memmode)
4deef538
AO
854{
855 switch (GET_CODE (x))
856 {
857 case PLUS:
858 *off = XEXP (x, 1);
2c0fa3ec
JJ
859 x = XEXP (x, 0);
860 break;
4deef538
AO
861
862 case PRE_DEC:
863 if (memmode == VOIDmode)
864 return x;
865
cf098191 866 *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
2c0fa3ec
JJ
867 x = XEXP (x, 0);
868 break;
4deef538
AO
869
870 case PRE_INC:
871 if (memmode == VOIDmode)
872 return x;
873
cf098191 874 *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
2c0fa3ec
JJ
875 x = XEXP (x, 0);
876 break;
4deef538
AO
877
878 case PRE_MODIFY:
2c0fa3ec
JJ
879 x = XEXP (x, 1);
880 break;
4deef538
AO
881
882 case POST_DEC:
883 case POST_INC:
884 case POST_MODIFY:
2c0fa3ec
JJ
885 x = XEXP (x, 0);
886 break;
4deef538
AO
887
888 default:
2c0fa3ec
JJ
889 break;
890 }
891
892 if (GET_MODE (x) == Pmode
893 && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
894 && (*off == NULL_RTX || CONST_INT_P (*off)))
895 {
896 cselib_val *e;
897 if (GET_CODE (x) == VALUE)
898 e = CSELIB_VAL_PTR (x);
899 else
900 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
901 if (e)
d0cc1b79
JJ
902 {
903 if (SP_DERIVED_VALUE_P (e->val_rtx)
904 && (*off == NULL_RTX || *off == const0_rtx))
2c0fa3ec 905 {
d0cc1b79
JJ
906 *off = NULL_RTX;
907 return e->val_rtx;
2c0fa3ec 908 }
d0cc1b79
JJ
909 for (struct elt_loc_list *l = e->locs; l; l = l->next)
910 if (GET_CODE (l->loc) == PLUS
911 && GET_CODE (XEXP (l->loc, 0)) == VALUE
912 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
913 && CONST_INT_P (XEXP (l->loc, 1)))
914 {
915 if (*off == NULL_RTX)
916 *off = XEXP (l->loc, 1);
917 else
918 *off = plus_constant (Pmode, *off,
919 INTVAL (XEXP (l->loc, 1)));
920 if (*off == const0_rtx)
921 *off = NULL_RTX;
922 return XEXP (l->loc, 0);
923 }
924 }
4deef538 925 }
2c0fa3ec 926 return x;
4deef538
AO
927}
928
c41332ab 929/* Return true if we can prove that X and Y contain the same value,
4deef538
AO
930 taking our gathered information into account. MEMMODE holds the
931 mode of the enclosing MEM, if any, as required to deal with autoinc
932 addressing modes. If X and Y are not (known to be) part of
933 addresses, MEMMODE should be VOIDmode. */
934
c41332ab 935bool
005f12bf 936rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
fa49fd0f
RK
937{
938 enum rtx_code code;
939 const char *fmt;
940 int i;
7080f735 941
f8cfc6aa 942 if (REG_P (x) || MEM_P (x))
fa49fd0f 943 {
4deef538 944 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
fa49fd0f
RK
945
946 if (e)
757bbef8 947 x = e->val_rtx;
fa49fd0f
RK
948 }
949
f8cfc6aa 950 if (REG_P (y) || MEM_P (y))
fa49fd0f 951 {
4deef538 952 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
fa49fd0f
RK
953
954 if (e)
757bbef8 955 y = e->val_rtx;
fa49fd0f
RK
956 }
957
958 if (x == y)
c41332ab 959 return true;
fa49fd0f 960
fa49fd0f
RK
961 if (GET_CODE (x) == VALUE)
962 {
6f2ffb4b 963 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
fa49fd0f
RK
964 struct elt_loc_list *l;
965
6f2ffb4b
AO
966 if (GET_CODE (y) == VALUE)
967 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
968
2c0fa3ec
JJ
969 if ((SP_DERIVED_VALUE_P (x)
970 || SP_DERIVED_VALUE_P (e->val_rtx))
971 && GET_MODE (y) == Pmode)
972 {
973 rtx yoff = NULL;
974 rtx yr = autoinc_split (y, &yoff, memmode);
975 if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
c41332ab 976 return true;
2c0fa3ec
JJ
977 }
978
005f12bf 979 if (depth == 128)
c41332ab 980 return false;
005f12bf 981
fa49fd0f
RK
982 for (l = e->locs; l; l = l->next)
983 {
984 rtx t = l->loc;
985
6f2ffb4b
AO
986 /* Avoid infinite recursion. We know we have the canonical
987 value, so we can just skip any values in the equivalence
988 list. */
989 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
fa49fd0f 990 continue;
005f12bf 991 else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
c41332ab 992 return true;
fa49fd0f 993 }
7080f735 994
c41332ab 995 return false;
fa49fd0f 996 }
6f2ffb4b 997 else if (GET_CODE (y) == VALUE)
fa49fd0f 998 {
6f2ffb4b 999 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
fa49fd0f
RK
1000 struct elt_loc_list *l;
1001
2c0fa3ec
JJ
1002 if ((SP_DERIVED_VALUE_P (y)
1003 || SP_DERIVED_VALUE_P (e->val_rtx))
1004 && GET_MODE (x) == Pmode)
1005 {
1006 rtx xoff = NULL;
1007 rtx xr = autoinc_split (x, &xoff, memmode);
1008 if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
c41332ab 1009 return true;
2c0fa3ec
JJ
1010 }
1011
005f12bf 1012 if (depth == 128)
c41332ab 1013 return false;
005f12bf 1014
fa49fd0f
RK
1015 for (l = e->locs; l; l = l->next)
1016 {
1017 rtx t = l->loc;
1018
6f2ffb4b 1019 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
fa49fd0f 1020 continue;
005f12bf 1021 else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
c41332ab 1022 return true;
fa49fd0f 1023 }
7080f735 1024
c41332ab 1025 return false;
fa49fd0f
RK
1026 }
1027
4deef538 1028 if (GET_MODE (x) != GET_MODE (y))
c41332ab 1029 return false;
fa49fd0f 1030
2c0fa3ec
JJ
1031 if (GET_CODE (x) != GET_CODE (y)
1032 || (GET_CODE (x) == PLUS
1033 && GET_MODE (x) == Pmode
1034 && CONST_INT_P (XEXP (x, 1))
1035 && CONST_INT_P (XEXP (y, 1))))
4deef538
AO
1036 {
1037 rtx xorig = x, yorig = y;
1038 rtx xoff = NULL, yoff = NULL;
1039
1040 x = autoinc_split (x, &xoff, memmode);
1041 y = autoinc_split (y, &yoff, memmode);
1042
4deef538
AO
1043 /* Don't recurse if nothing changed. */
1044 if (x != xorig || y != yorig)
2c0fa3ec
JJ
1045 {
1046 if (!xoff != !yoff)
c41332ab 1047 return false;
4deef538 1048
2c0fa3ec 1049 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
c41332ab 1050 return false;
2c0fa3ec
JJ
1051
1052 return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1053 }
1054
1055 if (GET_CODE (xorig) != GET_CODE (yorig))
c41332ab 1056 return false;
4deef538
AO
1057 }
1058
37cf6116
RH
1059 /* These won't be handled correctly by the code below. */
1060 switch (GET_CODE (x))
1061 {
807e902e 1062 CASE_CONST_UNIQUE:
0ca5af51 1063 case DEBUG_EXPR:
c41332ab 1064 return false;
37cf6116 1065
a87d3f96
RS
1066 case CONST_VECTOR:
1067 if (!same_vector_encodings_p (x, y))
1068 return false;
1069 break;
1070
c8a27c40
JJ
1071 case DEBUG_IMPLICIT_PTR:
1072 return DEBUG_IMPLICIT_PTR_DECL (x)
1073 == DEBUG_IMPLICIT_PTR_DECL (y);
1074
ddb555ed
JJ
1075 case DEBUG_PARAMETER_REF:
1076 return DEBUG_PARAMETER_REF_DECL (x)
1077 == DEBUG_PARAMETER_REF_DECL (y);
1078
a58a8e4b 1079 case ENTRY_VALUE:
2b80199f
JJ
1080 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1081 use rtx_equal_for_cselib_1 to compare the operands. */
1082 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
a58a8e4b 1083
37cf6116 1084 case LABEL_REF:
04a121a7 1085 return label_ref_label (x) == label_ref_label (y);
37cf6116 1086
9fccb335
RS
1087 case REG:
1088 return REGNO (x) == REGNO (y);
1089
4deef538
AO
1090 case MEM:
1091 /* We have to compare any autoinc operations in the addresses
1092 using this MEM's mode. */
005f12bf
JJ
1093 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1094 depth);
4deef538 1095
37cf6116
RH
1096 default:
1097 break;
1098 }
7080f735 1099
fa49fd0f
RK
1100 code = GET_CODE (x);
1101 fmt = GET_RTX_FORMAT (code);
1102
1103 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1104 {
1105 int j;
1106
1107 switch (fmt[i])
1108 {
1109 case 'w':
1110 if (XWINT (x, i) != XWINT (y, i))
c41332ab 1111 return false;
fa49fd0f
RK
1112 break;
1113
1114 case 'n':
1115 case 'i':
1116 if (XINT (x, i) != XINT (y, i))
c41332ab 1117 return false;
fa49fd0f
RK
1118 break;
1119
91914e56
RS
1120 case 'p':
1121 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
c41332ab 1122 return false;
91914e56
RS
1123 break;
1124
fa49fd0f
RK
1125 case 'V':
1126 case 'E':
1127 /* Two vectors must have the same length. */
1128 if (XVECLEN (x, i) != XVECLEN (y, i))
c41332ab 1129 return false;
fa49fd0f
RK
1130
1131 /* And the corresponding elements must match. */
1132 for (j = 0; j < XVECLEN (x, i); j++)
4deef538 1133 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
005f12bf 1134 XVECEXP (y, i, j), memmode, depth))
c41332ab 1135 return false;
fa49fd0f
RK
1136 break;
1137
1138 case 'e':
29c1846b
R
1139 if (i == 1
1140 && targetm.commutative_p (x, UNKNOWN)
005f12bf
JJ
1141 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1142 depth)
1143 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1144 depth))
c41332ab 1145 return true;
005f12bf
JJ
1146 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1147 depth))
c41332ab 1148 return false;
fa49fd0f
RK
1149 break;
1150
1151 case 'S':
1152 case 's':
1153 if (strcmp (XSTR (x, i), XSTR (y, i)))
c41332ab 1154 return false;
fa49fd0f
RK
1155 break;
1156
1157 case 'u':
1158 /* These are just backpointers, so they don't matter. */
1159 break;
1160
1161 case '0':
1162 case 't':
1163 break;
1164
1165 /* It is believed that rtx's at this level will never
1166 contain anything but integers and other rtx's,
1167 except for within LABEL_REFs and SYMBOL_REFs. */
1168 default:
341c100f 1169 gcc_unreachable ();
fa49fd0f
RK
1170 }
1171 }
c41332ab 1172 return true;
fa49fd0f
RK
1173}
1174
64ce76d9
RE
1175/* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1176 truly redundant, taking into account aliasing information. */
1177bool
1178cselib_redundant_set_p (rtx set)
1179{
1180 gcc_assert (GET_CODE (set) == SET);
1181 rtx dest = SET_DEST (set);
1182 if (cselib_reg_set_mode (dest) != GET_MODE (dest))
1183 return false;
1184
1185 if (!rtx_equal_for_cselib_p (dest, SET_SRC (set)))
1186 return false;
1187
1188 while (GET_CODE (dest) == SUBREG
1189 || GET_CODE (dest) == ZERO_EXTRACT
1190 || GET_CODE (dest) == STRICT_LOW_PART)
1191 dest = XEXP (dest, 0);
1192
1193 if (!flag_strict_aliasing || !MEM_P (dest))
1194 return true;
1195
1196 /* For a store we need to check that suppressing it will not change
1197 the effective alias set. */
1198 rtx dest_addr = XEXP (dest, 0);
1199
1200 /* Lookup the equivalents to the original dest (rather than just the
1201 MEM). */
1202 cselib_val *src_val = cselib_lookup (SET_DEST (set),
1203 GET_MODE (SET_DEST (set)),
1204 0, VOIDmode);
1205
1206 if (src_val)
1207 {
1208 /* Walk the list of source equivalents to find the MEM accessing
1209 the same location. */
1210 for (elt_loc_list *l = src_val->locs; l; l = l->next)
1211 {
1212 rtx src_equiv = l->loc;
1213 while (GET_CODE (src_equiv) == SUBREG
1214 || GET_CODE (src_equiv) == ZERO_EXTRACT
1215 || GET_CODE (src_equiv) == STRICT_LOW_PART)
1216 src_equiv = XEXP (src_equiv, 0);
1217
1218 if (MEM_P (src_equiv))
1219 {
1220 /* Match the MEMs by comparing the addresses. We can
1221 only remove the later store if the earlier aliases at
1222 least all the accesses of the later one. */
1223 if (rtx_equal_for_cselib_1 (dest_addr, XEXP (src_equiv, 0),
1224 GET_MODE (dest), 0))
1225 return mems_same_for_tbaa_p (src_equiv, dest);
1226 }
1227 }
1228 }
1229
1230 /* We failed to find a recorded value in the cselib history, so try
1231 the source of this set; this catches cases such as *p = *q when p
1232 and q have the same value. */
1233 rtx src = SET_SRC (set);
1234 while (GET_CODE (src) == SUBREG)
1235 src = XEXP (src, 0);
1236
1237 if (MEM_P (src)
1238 && rtx_equal_for_cselib_1 (dest_addr, XEXP (src, 0), GET_MODE (dest), 0))
1239 return mems_same_for_tbaa_p (src, dest);
1240
1241 return false;
1242}
1243
2c0fa3ec
JJ
1244/* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1245 except that it hashes (plus:P x c). */
1246
1247static unsigned int
1248cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1249 machine_mode memmode)
1250{
1251 cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1252 if (! e)
1253 return 0;
1254
1255 if (! SP_DERIVED_VALUE_P (e->val_rtx))
1256 for (struct elt_loc_list *l = e->locs; l; l = l->next)
1257 if (GET_CODE (l->loc) == PLUS
1258 && GET_CODE (XEXP (l->loc, 0)) == VALUE
1259 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1260 && CONST_INT_P (XEXP (l->loc, 1)))
1261 {
1262 e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1263 c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1264 break;
1265 }
1266 if (c == 0)
1267 return e->hash;
1268
1269 unsigned hash = (unsigned) PLUS + (unsigned) GET_MODE (x);
1270 hash += e->hash;
1271 unsigned int tem_hash = (unsigned) CONST_INT + (unsigned) VOIDmode;
1272 tem_hash += ((unsigned) CONST_INT << 7) + (unsigned HOST_WIDE_INT) c;
1273 if (tem_hash == 0)
1274 tem_hash = (unsigned int) CONST_INT;
1275 hash += tem_hash;
1276 return hash ? hash : 1 + (unsigned int) PLUS;
1277}
1278
fa49fd0f
RK
1279/* Hash an rtx. Return 0 if we couldn't hash the rtx.
1280 For registers and memory locations, we look up their cselib_val structure
1281 and return its VALUE element.
1282 Possible reasons for return 0 are: the object is volatile, or we couldn't
1283 find a register or memory location in the table and CREATE is zero. If
1284 CREATE is nonzero, table elts are created for regs and mem.
29c1846b
R
1285 N.B. this hash function returns the same hash value for RTXes that
1286 differ only in the order of operands, thus it is suitable for comparisons
1287 that take commutativity into account.
1288 If we wanted to also support associative rules, we'd have to use a different
1289 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
4deef538
AO
1290 MEMMODE indicates the mode of an enclosing MEM, and it's only
1291 used to compute autoinc values.
29c1846b
R
1292 We used to have a MODE argument for hashing for CONST_INTs, but that
1293 didn't make sense, since it caused spurious hash differences between
1294 (set (reg:SI 1) (const_int))
1295 (plus:SI (reg:SI 2) (reg:SI 1))
1296 and
1297 (plus:SI (reg:SI 2) (const_int))
1298 If the mode is important in any context, it must be checked specifically
1299 in a comparison anyway, since relying on hash differences is unsafe. */
fa49fd0f
RK
1300
1301static unsigned int
ef4bddc2 1302cselib_hash_rtx (rtx x, int create, machine_mode memmode)
fa49fd0f
RK
1303{
1304 cselib_val *e;
cf098191 1305 poly_int64 offset;
fa49fd0f
RK
1306 int i, j;
1307 enum rtx_code code;
1308 const char *fmt;
1309 unsigned int hash = 0;
1310
fa49fd0f
RK
1311 code = GET_CODE (x);
1312 hash += (unsigned) code + (unsigned) GET_MODE (x);
1313
1314 switch (code)
1315 {
7483eef8
AO
1316 case VALUE:
1317 e = CSELIB_VAL_PTR (x);
1318 return e->hash;
1319
fa49fd0f
RK
1320 case MEM:
1321 case REG:
4deef538 1322 e = cselib_lookup (x, GET_MODE (x), create, memmode);
fa49fd0f
RK
1323 if (! e)
1324 return 0;
1325
5440c0e7 1326 return e->hash;
fa49fd0f 1327
0ca5af51 1328 case DEBUG_EXPR:
e4fb38bd
JJ
1329 hash += ((unsigned) DEBUG_EXPR << 7)
1330 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
0ca5af51
AO
1331 return hash ? hash : (unsigned int) DEBUG_EXPR;
1332
c8a27c40
JJ
1333 case DEBUG_IMPLICIT_PTR:
1334 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1335 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1336 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1337
ddb555ed
JJ
1338 case DEBUG_PARAMETER_REF:
1339 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1340 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1341 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1342
a58a8e4b 1343 case ENTRY_VALUE:
2b80199f
JJ
1344 /* ENTRY_VALUEs are function invariant, thus try to avoid
1345 recursing on argument if ENTRY_VALUE is one of the
1346 forms emitted by expand_debug_expr, otherwise
1347 ENTRY_VALUE hash would depend on the current value
1348 in some register or memory. */
1349 if (REG_P (ENTRY_VALUE_EXP (x)))
1350 hash += (unsigned int) REG
1351 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1352 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1353 else if (MEM_P (ENTRY_VALUE_EXP (x))
1354 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1355 hash += (unsigned int) MEM
1356 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1357 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1358 else
1359 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
a58a8e4b
JJ
1360 return hash ? hash : (unsigned int) ENTRY_VALUE;
1361
fa49fd0f 1362 case CONST_INT:
a8acccdd 1363 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
dc76f41c 1364 return hash ? hash : (unsigned int) CONST_INT;
fa49fd0f 1365
807e902e
KZ
1366 case CONST_WIDE_INT:
1367 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1368 hash += CONST_WIDE_INT_ELT (x, i);
1369 return hash;
1370
0c12fc9b
RS
1371 case CONST_POLY_INT:
1372 {
1373 inchash::hash h;
1374 h.add_int (hash);
1375 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1376 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1377 return h.end ();
1378 }
1379
fa49fd0f
RK
1380 case CONST_DOUBLE:
1381 /* This is like the general case, except that it only counts
1382 the integers representing the constant. */
1383 hash += (unsigned) code + (unsigned) GET_MODE (x);
807e902e 1384 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
fa49fd0f
RK
1385 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1386 + (unsigned) CONST_DOUBLE_HIGH (x));
807e902e
KZ
1387 else
1388 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
dc76f41c 1389 return hash ? hash : (unsigned int) CONST_DOUBLE;
fa49fd0f 1390
091a3ac7
CF
1391 case CONST_FIXED:
1392 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1393 hash += fixed_hash (CONST_FIXED_VALUE (x));
1394 return hash ? hash : (unsigned int) CONST_FIXED;
1395
69ef87e2
AH
1396 case CONST_VECTOR:
1397 {
1398 int units;
1399 rtx elt;
1400
16c78b66 1401 units = const_vector_encoded_nelts (x);
69ef87e2
AH
1402
1403 for (i = 0; i < units; ++i)
1404 {
16c78b66 1405 elt = CONST_VECTOR_ENCODED_ELT (x, i);
4deef538 1406 hash += cselib_hash_rtx (elt, 0, memmode);
69ef87e2
AH
1407 }
1408
1409 return hash;
1410 }
1411
fa49fd0f
RK
1412 /* Assume there is only one rtx object for any given label. */
1413 case LABEL_REF:
4c6669c2
RS
1414 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1415 differences and differences between each stage's debugging dumps. */
1416 hash += (((unsigned int) LABEL_REF << 7)
04a121a7 1417 + CODE_LABEL_NUMBER (label_ref_label (x)));
dc76f41c 1418 return hash ? hash : (unsigned int) LABEL_REF;
fa49fd0f
RK
1419
1420 case SYMBOL_REF:
4c6669c2
RS
1421 {
1422 /* Don't hash on the symbol's address to avoid bootstrap differences.
1423 Different hash values may cause expressions to be recorded in
1424 different orders and thus different registers to be used in the
1425 final assembler. This also avoids differences in the dump files
1426 between various stages. */
1427 unsigned int h = 0;
1428 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1429
1430 while (*p)
1431 h += (h << 7) + *p++; /* ??? revisit */
1432
1433 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1434 return hash ? hash : (unsigned int) SYMBOL_REF;
1435 }
fa49fd0f
RK
1436
1437 case PRE_DEC:
1438 case PRE_INC:
4deef538
AO
1439 /* We can't compute these without knowing the MEM mode. */
1440 gcc_assert (memmode != VOIDmode);
cf098191 1441 offset = GET_MODE_SIZE (memmode);
4deef538 1442 if (code == PRE_DEC)
cf098191 1443 offset = -offset;
4deef538
AO
1444 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1445 like (mem:MEMMODE (plus (reg) (const_int I))). */
2c0fa3ec
JJ
1446 if (GET_MODE (x) == Pmode
1447 && (REG_P (XEXP (x, 0))
1448 || MEM_P (XEXP (x, 0))
1449 || GET_CODE (XEXP (x, 0)) == VALUE))
1450 {
1451 HOST_WIDE_INT c;
1452 if (offset.is_constant (&c))
1453 return cselib_hash_plus_const_int (XEXP (x, 0),
1454 trunc_int_for_mode (c, Pmode),
1455 create, memmode);
1456 }
1457 hash = ((unsigned) PLUS + (unsigned) GET_MODE (x)
1458 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1459 + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1460 create, memmode));
4deef538
AO
1461 return hash ? hash : 1 + (unsigned) PLUS;
1462
1463 case PRE_MODIFY:
1464 gcc_assert (memmode != VOIDmode);
1465 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1466
fa49fd0f
RK
1467 case POST_DEC:
1468 case POST_INC:
1469 case POST_MODIFY:
4deef538
AO
1470 gcc_assert (memmode != VOIDmode);
1471 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1472
fa49fd0f 1473 case PC:
fa49fd0f
RK
1474 case CALL:
1475 case UNSPEC_VOLATILE:
1476 return 0;
1477
1478 case ASM_OPERANDS:
1479 if (MEM_VOLATILE_P (x))
1480 return 0;
1481
1482 break;
7080f735 1483
2c0fa3ec
JJ
1484 case PLUS:
1485 if (GET_MODE (x) == Pmode
1486 && (REG_P (XEXP (x, 0))
1487 || MEM_P (XEXP (x, 0))
1488 || GET_CODE (XEXP (x, 0)) == VALUE)
1489 && CONST_INT_P (XEXP (x, 1)))
1490 return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1491 create, memmode);
1492 break;
1493
fa49fd0f
RK
1494 default:
1495 break;
1496 }
1497
1498 i = GET_RTX_LENGTH (code) - 1;
1499 fmt = GET_RTX_FORMAT (code);
1500 for (; i >= 0; i--)
1501 {
341c100f 1502 switch (fmt[i])
fa49fd0f 1503 {
341c100f 1504 case 'e':
fa49fd0f 1505 {
341c100f 1506 rtx tem = XEXP (x, i);
4deef538 1507 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
b8698a0f 1508
fa49fd0f
RK
1509 if (tem_hash == 0)
1510 return 0;
b8698a0f 1511
fa49fd0f
RK
1512 hash += tem_hash;
1513 }
341c100f
NS
1514 break;
1515 case 'E':
1516 for (j = 0; j < XVECLEN (x, i); j++)
1517 {
1518 unsigned int tem_hash
4deef538 1519 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
b8698a0f 1520
341c100f
NS
1521 if (tem_hash == 0)
1522 return 0;
b8698a0f 1523
341c100f
NS
1524 hash += tem_hash;
1525 }
1526 break;
fa49fd0f 1527
341c100f
NS
1528 case 's':
1529 {
1530 const unsigned char *p = (const unsigned char *) XSTR (x, i);
b8698a0f 1531
341c100f
NS
1532 if (p)
1533 while (*p)
1534 hash += *p++;
1535 break;
1536 }
b8698a0f 1537
341c100f
NS
1538 case 'i':
1539 hash += XINT (x, i);
1540 break;
1541
91914e56
RS
1542 case 'p':
1543 hash += constant_lower_bound (SUBREG_BYTE (x));
1544 break;
1545
341c100f
NS
1546 case '0':
1547 case 't':
1548 /* unused */
1549 break;
b8698a0f 1550
341c100f
NS
1551 default:
1552 gcc_unreachable ();
fa49fd0f 1553 }
fa49fd0f
RK
1554 }
1555
dc76f41c 1556 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
fa49fd0f
RK
1557}
1558
1559/* Create a new value structure for VALUE and initialize it. The mode of the
1560 value is MODE. */
1561
6a59927d 1562static inline cselib_val *
ef4bddc2 1563new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
fa49fd0f 1564{
fb0b2914 1565 cselib_val *e = cselib_val_pool.allocate ();
fa49fd0f 1566
5440c0e7
AO
1567 gcc_assert (hash);
1568 gcc_assert (next_uid);
fa49fd0f 1569
5440c0e7
AO
1570 e->hash = hash;
1571 e->uid = next_uid++;
d67fb775
SB
1572 /* We use an alloc pool to allocate this RTL construct because it
1573 accounts for about 8% of the overall memory usage. We know
1574 precisely when we can have VALUE RTXen (when cselib is active)
daa956d0 1575 so we don't need to put them in garbage collected memory.
d67fb775 1576 ??? Why should a VALUE be an RTX in the first place? */
fb0b2914 1577 e->val_rtx = (rtx_def*) value_pool.allocate ();
757bbef8
SB
1578 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1579 PUT_CODE (e->val_rtx, VALUE);
1580 PUT_MODE (e->val_rtx, mode);
1581 CSELIB_VAL_PTR (e->val_rtx) = e;
fa49fd0f
RK
1582 e->addr_list = 0;
1583 e->locs = 0;
7101fb18 1584 e->next_containing_mem = 0;
b5b8b0ac 1585
d0b00b63
SSF
1586 scalar_int_mode int_mode;
1587 if (REG_P (x) && is_int_mode (mode, &int_mode)
5fc4d3e1 1588 && GET_MODE_SIZE (int_mode) > 1
d0b00b63
SSF
1589 && REG_VALUES (REGNO (x)) != NULL
1590 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
1591 {
1592 rtx copy = shallow_copy_rtx (x);
1593 scalar_int_mode narrow_mode_iter;
1594 FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
1595 {
1596 PUT_MODE_RAW (copy, narrow_mode_iter);
1597 cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode);
1598 if (v)
1599 {
1600 rtx sub = lowpart_subreg (narrow_mode_iter, e->val_rtx, int_mode);
1601 if (sub)
1602 new_elt_loc_list (v, sub);
1603 }
1604 }
1605 }
1606
4a3c9687 1607 if (dump_file && (dump_flags & TDF_CSELIB))
b5b8b0ac 1608 {
5440c0e7 1609 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
b5b8b0ac
AO
1610 if (flag_dump_noaddr || flag_dump_unnumbered)
1611 fputs ("# ", dump_file);
1612 else
1613 fprintf (dump_file, "%p ", (void*)e);
1614 print_rtl_single (dump_file, x);
1615 fputc ('\n', dump_file);
1616 }
1617
fa49fd0f
RK
1618 return e;
1619}
1620
1621/* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1622 contains the data at this address. X is a MEM that represents the
1623 value. Update the two value structures to represent this situation. */
1624
1625static void
7080f735 1626add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
fa49fd0f 1627{
faead9f7 1628 addr_elt = canonical_cselib_val (addr_elt);
a4f436ff
JJ
1629 mem_elt = canonical_cselib_val (mem_elt);
1630
fa49fd0f 1631 /* Avoid duplicates. */
bd68a3a7
RH
1632 addr_space_t as = MEM_ADDR_SPACE (x);
1633 for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
3c0cb5de 1634 if (MEM_P (l->loc)
bd68a3a7
RH
1635 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1636 && MEM_ADDR_SPACE (l->loc) == as)
5847e8da
AO
1637 {
1638 promote_debug_loc (l);
1639 return;
1640 }
fa49fd0f 1641
fa49fd0f 1642 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
6f2ffb4b
AO
1643 new_elt_loc_list (mem_elt,
1644 replace_equiv_address_nv (x, addr_elt->val_rtx));
7101fb18
JH
1645 if (mem_elt->next_containing_mem == NULL)
1646 {
1647 mem_elt->next_containing_mem = first_containing_mem;
1648 first_containing_mem = mem_elt;
1649 }
fa49fd0f
RK
1650}
1651
1652/* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1653 If CREATE, make a new one if we haven't seen it before. */
1654
1655static cselib_val *
7080f735 1656cselib_lookup_mem (rtx x, int create)
fa49fd0f 1657{
ef4bddc2
RS
1658 machine_mode mode = GET_MODE (x);
1659 machine_mode addr_mode;
4a8fb1a1 1660 cselib_val **slot;
fa49fd0f
RK
1661 cselib_val *addr;
1662 cselib_val *mem_elt;
fa49fd0f
RK
1663
1664 if (MEM_VOLATILE_P (x) || mode == BLKmode
463301c3 1665 || !cselib_record_memory
fa49fd0f
RK
1666 || (FLOAT_MODE_P (mode) && flag_float_store))
1667 return 0;
1668
4deef538
AO
1669 addr_mode = GET_MODE (XEXP (x, 0));
1670 if (addr_mode == VOIDmode)
1671 addr_mode = Pmode;
1672
fa49fd0f 1673 /* Look up the value for the address. */
4deef538 1674 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
fa49fd0f
RK
1675 if (! addr)
1676 return 0;
faead9f7 1677 addr = canonical_cselib_val (addr);
bd68a3a7 1678
fa49fd0f 1679 /* Find a value that describes a value of our mode at that address. */
bd68a3a7
RH
1680 addr_space_t as = MEM_ADDR_SPACE (x);
1681 for (elt_list *l = addr->addr_list; l; l = l->next)
757bbef8 1682 if (GET_MODE (l->elt->val_rtx) == mode)
5847e8da 1683 {
bd68a3a7
RH
1684 for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1685 if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1686 {
1687 promote_debug_loc (l->elt->locs);
1688 return l->elt;
1689 }
5847e8da 1690 }
fa49fd0f
RK
1691
1692 if (! create)
1693 return 0;
1694
5440c0e7 1695 mem_elt = new_cselib_val (next_uid, mode, x);
fa49fd0f 1696 add_mem_for_addr (addr, mem_elt, x);
f956adb9 1697 slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
fa49fd0f
RK
1698 *slot = mem_elt;
1699 return mem_elt;
1700}
1701
073a8998 1702/* Search through the possible substitutions in P. We prefer a non reg
6fb5fa3c
DB
1703 substitution because this allows us to expand the tree further. If
1704 we find, just a reg, take the lowest regno. There may be several
1705 non-reg results, we just take the first one because they will all
1706 expand to the same place. */
1707
b8698a0f 1708static rtx
b5b8b0ac
AO
1709expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1710 int max_depth)
6fb5fa3c
DB
1711{
1712 rtx reg_result = NULL;
1713 unsigned int regno = UINT_MAX;
1714 struct elt_loc_list *p_in = p;
1715
67b977ad 1716 for (; p; p = p->next)
6fb5fa3c 1717 {
67b977ad
JJ
1718 /* Return these right away to avoid returning stack pointer based
1719 expressions for frame pointer and vice versa, which is something
1720 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1721 for more details. */
1722 if (REG_P (p->loc)
1723 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1724 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1725 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1726 || REGNO (p->loc) == cfa_base_preserved_regno))
1727 return p->loc;
6fb5fa3c
DB
1728 /* Avoid infinite recursion trying to expand a reg into a
1729 the same reg. */
b8698a0f
L
1730 if ((REG_P (p->loc))
1731 && (REGNO (p->loc) < regno)
b5b8b0ac 1732 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
6fb5fa3c
DB
1733 {
1734 reg_result = p->loc;
1735 regno = REGNO (p->loc);
1736 }
1737 /* Avoid infinite recursion and do not try to expand the
1738 value. */
b8698a0f 1739 else if (GET_CODE (p->loc) == VALUE
6fb5fa3c
DB
1740 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1741 continue;
1742 else if (!REG_P (p->loc))
1743 {
8dd5516b 1744 rtx result, note;
4a3c9687 1745 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1746 {
1747 print_inline_rtx (dump_file, p->loc, 0);
1748 fprintf (dump_file, "\n");
1749 }
8dd5516b
JJ
1750 if (GET_CODE (p->loc) == LO_SUM
1751 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1752 && p->setting_insn
1753 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1754 && XEXP (note, 0) == XEXP (p->loc, 1))
1755 return XEXP (p->loc, 1);
b5b8b0ac 1756 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
6fb5fa3c
DB
1757 if (result)
1758 return result;
1759 }
b8698a0f 1760
6fb5fa3c 1761 }
b8698a0f 1762
6fb5fa3c
DB
1763 if (regno != UINT_MAX)
1764 {
1765 rtx result;
4a3c9687 1766 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1767 fprintf (dump_file, "r%d\n", regno);
1768
b5b8b0ac 1769 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
6fb5fa3c
DB
1770 if (result)
1771 return result;
1772 }
1773
4a3c9687 1774 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1775 {
1776 if (reg_result)
1777 {
1778 print_inline_rtx (dump_file, reg_result, 0);
1779 fprintf (dump_file, "\n");
1780 }
b8698a0f 1781 else
6fb5fa3c
DB
1782 fprintf (dump_file, "NULL\n");
1783 }
1784 return reg_result;
1785}
1786
1787
1788/* Forward substitute and expand an expression out to its roots.
1789 This is the opposite of common subexpression. Because local value
1790 numbering is such a weak optimization, the expanded expression is
1791 pretty much unique (not from a pointer equals point of view but
b8698a0f 1792 from a tree shape point of view.
6fb5fa3c
DB
1793
1794 This function returns NULL if the expansion fails. The expansion
1795 will fail if there is no value number for one of the operands or if
1796 one of the operands has been overwritten between the current insn
1797 and the beginning of the basic block. For instance x has no
1798 expansion in:
1799
1800 r1 <- r1 + 3
1801 x <- r1 + 8
1802
1803 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1804 It is clear on return. */
1805
1806rtx
1807cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
b5b8b0ac
AO
1808{
1809 struct expand_value_data evd;
1810
1811 evd.regs_active = regs_active;
1812 evd.callback = NULL;
1813 evd.callback_arg = NULL;
864ddef7 1814 evd.dummy = false;
b5b8b0ac
AO
1815
1816 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1817}
1818
1819/* Same as cselib_expand_value_rtx, but using a callback to try to
0b7e34d7
AO
1820 resolve some expressions. The CB function should return ORIG if it
1821 can't or does not want to deal with a certain RTX. Any other
1822 return value, including NULL, will be used as the expansion for
1823 VALUE, without any further changes. */
b5b8b0ac
AO
1824
1825rtx
1826cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1827 cselib_expand_callback cb, void *data)
1828{
1829 struct expand_value_data evd;
1830
1831 evd.regs_active = regs_active;
1832 evd.callback = cb;
1833 evd.callback_arg = data;
864ddef7 1834 evd.dummy = false;
b5b8b0ac
AO
1835
1836 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1837}
1838
864ddef7
JJ
1839/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1840 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1841 would return NULL or non-NULL, without allocating new rtx. */
1842
1843bool
1844cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1845 cselib_expand_callback cb, void *data)
1846{
1847 struct expand_value_data evd;
1848
1849 evd.regs_active = regs_active;
1850 evd.callback = cb;
1851 evd.callback_arg = data;
1852 evd.dummy = true;
1853
1854 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1855}
1856
0b7e34d7
AO
1857/* Internal implementation of cselib_expand_value_rtx and
1858 cselib_expand_value_rtx_cb. */
1859
b5b8b0ac
AO
1860static rtx
1861cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1862 int max_depth)
6fb5fa3c
DB
1863{
1864 rtx copy, scopy;
1865 int i, j;
1866 RTX_CODE code;
1867 const char *format_ptr;
ef4bddc2 1868 machine_mode mode;
6fb5fa3c
DB
1869
1870 code = GET_CODE (orig);
1871
1872 /* For the context of dse, if we end up expand into a huge tree, we
1873 will not have a useful address, so we might as well just give up
1874 quickly. */
1875 if (max_depth <= 0)
1876 return NULL;
1877
1878 switch (code)
1879 {
1880 case REG:
1881 {
1882 struct elt_list *l = REG_VALUES (REGNO (orig));
1883
1884 if (l && l->elt == NULL)
1885 l = l->next;
1886 for (; l; l = l->next)
1887 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1888 {
1889 rtx result;
5a9fbcf1 1890 unsigned regno = REGNO (orig);
b8698a0f 1891
6fb5fa3c 1892 /* The only thing that we are not willing to do (this
6ed3da00 1893 is requirement of dse and if others potential uses
6fb5fa3c
DB
1894 need this function we should add a parm to control
1895 it) is that we will not substitute the
1896 STACK_POINTER_REGNUM, FRAME_POINTER or the
1897 HARD_FRAME_POINTER.
1898
cea618ac 1899 These expansions confuses the code that notices that
6fb5fa3c
DB
1900 stores into the frame go dead at the end of the
1901 function and that the frame is not effected by calls
1902 to subroutines. If you allow the
1903 STACK_POINTER_REGNUM substitution, then dse will
1904 think that parameter pushing also goes dead which is
1905 wrong. If you allow the FRAME_POINTER or the
1906 HARD_FRAME_POINTER then you lose the opportunity to
1907 make the frame assumptions. */
1908 if (regno == STACK_POINTER_REGNUM
1909 || regno == FRAME_POINTER_REGNUM
5a9fbcf1
AO
1910 || regno == HARD_FRAME_POINTER_REGNUM
1911 || regno == cfa_base_preserved_regno)
6fb5fa3c
DB
1912 return orig;
1913
b5b8b0ac 1914 bitmap_set_bit (evd->regs_active, regno);
6fb5fa3c 1915
4a3c9687 1916 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1917 fprintf (dump_file, "expanding: r%d into: ", regno);
1918
b5b8b0ac
AO
1919 result = expand_loc (l->elt->locs, evd, max_depth);
1920 bitmap_clear_bit (evd->regs_active, regno);
6fb5fa3c
DB
1921
1922 if (result)
1923 return result;
b8698a0f 1924 else
6fb5fa3c
DB
1925 return orig;
1926 }
f0bc3323 1927 return orig;
6fb5fa3c 1928 }
b8698a0f 1929
d8116890 1930 CASE_CONST_ANY:
6fb5fa3c
DB
1931 case SYMBOL_REF:
1932 case CODE_LABEL:
1933 case PC:
6fb5fa3c
DB
1934 case SCRATCH:
1935 /* SCRATCH must be shared because they represent distinct values. */
1936 return orig;
1937 case CLOBBER:
1938 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1939 return orig;
1940 break;
1941
1942 case CONST:
1943 if (shared_const_p (orig))
1944 return orig;
1945 break;
1946
8dd5516b 1947 case SUBREG:
6fb5fa3c 1948 {
0b7e34d7
AO
1949 rtx subreg;
1950
1951 if (evd->callback)
1952 {
1953 subreg = evd->callback (orig, evd->regs_active, max_depth,
1954 evd->callback_arg);
1955 if (subreg != orig)
1956 return subreg;
1957 }
1958
1959 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1960 max_depth - 1);
8dd5516b
JJ
1961 if (!subreg)
1962 return NULL;
1963 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1964 GET_MODE (SUBREG_REG (orig)),
1965 SUBREG_BYTE (orig));
0b7e34d7
AO
1966 if (scopy == NULL
1967 || (GET_CODE (scopy) == SUBREG
1968 && !REG_P (SUBREG_REG (scopy))
1969 && !MEM_P (SUBREG_REG (scopy))))
1970 return NULL;
1971
8dd5516b 1972 return scopy;
6fb5fa3c 1973 }
8dd5516b
JJ
1974
1975 case VALUE:
b5b8b0ac
AO
1976 {
1977 rtx result;
0b7e34d7 1978
4a3c9687 1979 if (dump_file && (dump_flags & TDF_CSELIB))
b5b8b0ac
AO
1980 {
1981 fputs ("\nexpanding ", dump_file);
1982 print_rtl_single (dump_file, orig);
1983 fputs (" into...", dump_file);
1984 }
8dd5516b 1985
0b7e34d7 1986 if (evd->callback)
b5b8b0ac
AO
1987 {
1988 result = evd->callback (orig, evd->regs_active, max_depth,
1989 evd->callback_arg);
0b7e34d7
AO
1990
1991 if (result != orig)
1992 return result;
b5b8b0ac 1993 }
8dd5516b 1994
0b7e34d7 1995 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
b5b8b0ac
AO
1996 return result;
1997 }
0ca5af51
AO
1998
1999 case DEBUG_EXPR:
2000 if (evd->callback)
2001 return evd->callback (orig, evd->regs_active, max_depth,
2002 evd->callback_arg);
2003 return orig;
2004
6fb5fa3c
DB
2005 default:
2006 break;
2007 }
2008
2009 /* Copy the various flags, fields, and other information. We assume
2010 that all fields need copying, and then clear the fields that should
2011 not be copied. That is the sensible default behavior, and forces
2012 us to explicitly document why we are *not* copying a flag. */
864ddef7
JJ
2013 if (evd->dummy)
2014 copy = NULL;
2015 else
2016 copy = shallow_copy_rtx (orig);
6fb5fa3c 2017
8dd5516b 2018 format_ptr = GET_RTX_FORMAT (code);
6fb5fa3c 2019
8dd5516b 2020 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6fb5fa3c
DB
2021 switch (*format_ptr++)
2022 {
2023 case 'e':
2024 if (XEXP (orig, i) != NULL)
2025 {
b5b8b0ac
AO
2026 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
2027 max_depth - 1);
6fb5fa3c
DB
2028 if (!result)
2029 return NULL;
864ddef7
JJ
2030 if (copy)
2031 XEXP (copy, i) = result;
6fb5fa3c
DB
2032 }
2033 break;
2034
2035 case 'E':
2036 case 'V':
2037 if (XVEC (orig, i) != NULL)
2038 {
864ddef7
JJ
2039 if (copy)
2040 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2041 for (j = 0; j < XVECLEN (orig, i); j++)
6fb5fa3c 2042 {
b5b8b0ac
AO
2043 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
2044 evd, max_depth - 1);
6fb5fa3c
DB
2045 if (!result)
2046 return NULL;
864ddef7
JJ
2047 if (copy)
2048 XVECEXP (copy, i, j) = result;
6fb5fa3c
DB
2049 }
2050 }
2051 break;
2052
2053 case 't':
2054 case 'w':
2055 case 'i':
2056 case 's':
2057 case 'S':
2058 case 'T':
2059 case 'u':
2060 case 'B':
2061 case '0':
2062 /* These are left unchanged. */
2063 break;
2064
2065 default:
2066 gcc_unreachable ();
2067 }
2068
864ddef7
JJ
2069 if (evd->dummy)
2070 return orig;
2071
8dd5516b
JJ
2072 mode = GET_MODE (copy);
2073 /* If an operand has been simplified into CONST_INT, which doesn't
2074 have a mode and the mode isn't derivable from whole rtx's mode,
2075 try simplify_*_operation first with mode from original's operand
2076 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2077 scopy = copy;
2078 switch (GET_RTX_CLASS (code))
2079 {
2080 case RTX_UNARY:
2081 if (CONST_INT_P (XEXP (copy, 0))
2082 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2083 {
2084 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
2085 GET_MODE (XEXP (orig, 0)));
2086 if (scopy)
2087 return scopy;
2088 }
2089 break;
2090 case RTX_COMM_ARITH:
2091 case RTX_BIN_ARITH:
2092 /* These expressions can derive operand modes from the whole rtx's mode. */
2093 break;
2094 case RTX_TERNARY:
2095 case RTX_BITFIELD_OPS:
2096 if (CONST_INT_P (XEXP (copy, 0))
2097 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2098 {
2099 scopy = simplify_ternary_operation (code, mode,
2100 GET_MODE (XEXP (orig, 0)),
2101 XEXP (copy, 0), XEXP (copy, 1),
2102 XEXP (copy, 2));
2103 if (scopy)
2104 return scopy;
2105 }
2106 break;
2107 case RTX_COMPARE:
2108 case RTX_COMM_COMPARE:
2109 if (CONST_INT_P (XEXP (copy, 0))
2110 && GET_MODE (XEXP (copy, 1)) == VOIDmode
2111 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2112 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2113 {
2114 scopy = simplify_relational_operation (code, mode,
2115 (GET_MODE (XEXP (orig, 0))
2116 != VOIDmode)
2117 ? GET_MODE (XEXP (orig, 0))
2118 : GET_MODE (XEXP (orig, 1)),
2119 XEXP (copy, 0),
2120 XEXP (copy, 1));
2121 if (scopy)
2122 return scopy;
2123 }
2124 break;
2125 default:
2126 break;
2127 }
6fb5fa3c
DB
2128 scopy = simplify_rtx (copy);
2129 if (scopy)
3af4ba41 2130 return scopy;
6fb5fa3c
DB
2131 return copy;
2132}
2133
fa49fd0f
RK
2134/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2135 with VALUE expressions. This way, it becomes independent of changes
2136 to registers and memory.
2137 X isn't actually modified; if modifications are needed, new rtl is
4deef538
AO
2138 allocated. However, the return value can share rtl with X.
2139 If X is within a MEM, MEMMODE must be the mode of the MEM. */
fa49fd0f 2140
91700444 2141rtx
ef4bddc2 2142cselib_subst_to_values (rtx x, machine_mode memmode)
fa49fd0f
RK
2143{
2144 enum rtx_code code = GET_CODE (x);
2145 const char *fmt = GET_RTX_FORMAT (code);
2146 cselib_val *e;
2147 struct elt_list *l;
2148 rtx copy = x;
2149 int i;
cf098191 2150 poly_int64 offset;
fa49fd0f
RK
2151
2152 switch (code)
2153 {
2154 case REG:
60fa6660
AO
2155 l = REG_VALUES (REGNO (x));
2156 if (l && l->elt == NULL)
2157 l = l->next;
2158 for (; l; l = l->next)
757bbef8
SB
2159 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2160 return l->elt->val_rtx;
fa49fd0f 2161
341c100f 2162 gcc_unreachable ();
fa49fd0f
RK
2163
2164 case MEM:
2165 e = cselib_lookup_mem (x, 0);
4deef538
AO
2166 /* This used to happen for autoincrements, but we deal with them
2167 properly now. Remove the if stmt for the next release. */
fa49fd0f 2168 if (! e)
91700444 2169 {
4deef538 2170 /* Assign a value that doesn't match any other. */
5440c0e7 2171 e = new_cselib_val (next_uid, GET_MODE (x), x);
91700444 2172 }
757bbef8 2173 return e->val_rtx;
fa49fd0f 2174
509f4495
JJ
2175 case ENTRY_VALUE:
2176 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2177 if (! e)
2178 break;
2179 return e->val_rtx;
2180
d8116890 2181 CASE_CONST_ANY:
fa49fd0f
RK
2182 return x;
2183
4deef538 2184 case PRE_DEC:
91700444 2185 case PRE_INC:
4deef538 2186 gcc_assert (memmode != VOIDmode);
cf098191 2187 offset = GET_MODE_SIZE (memmode);
4deef538 2188 if (code == PRE_DEC)
cf098191 2189 offset = -offset;
0a81f074 2190 return cselib_subst_to_values (plus_constant (GET_MODE (x),
cf098191 2191 XEXP (x, 0), offset),
4deef538
AO
2192 memmode);
2193
2194 case PRE_MODIFY:
2195 gcc_assert (memmode != VOIDmode);
2196 return cselib_subst_to_values (XEXP (x, 1), memmode);
2197
91700444 2198 case POST_DEC:
4deef538 2199 case POST_INC:
91700444 2200 case POST_MODIFY:
4deef538
AO
2201 gcc_assert (memmode != VOIDmode);
2202 return cselib_subst_to_values (XEXP (x, 0), memmode);
7080f735 2203
2c0fa3ec
JJ
2204 case PLUS:
2205 if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2206 {
2207 rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2208 if (GET_CODE (t) == VALUE)
8662d059
JJ
2209 {
2210 if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2211 return t;
2212 for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2213 l; l = l->next)
2214 if (GET_CODE (l->loc) == PLUS
2215 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2216 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2217 && CONST_INT_P (XEXP (l->loc, 1)))
2218 return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2219 }
2c0fa3ec
JJ
2220 if (t != XEXP (x, 0))
2221 {
2222 copy = shallow_copy_rtx (x);
2223 XEXP (copy, 0) = t;
2224 }
2225 return copy;
2226 }
2227
fa49fd0f
RK
2228 default:
2229 break;
2230 }
2231
2232 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2233 {
2234 if (fmt[i] == 'e')
2235 {
4deef538 2236 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
fa49fd0f 2237
bd7960b1
RS
2238 if (t != XEXP (x, i))
2239 {
2240 if (x == copy)
2241 copy = shallow_copy_rtx (x);
2242 XEXP (copy, i) = t;
2243 }
fa49fd0f
RK
2244 }
2245 else if (fmt[i] == 'E')
2246 {
bd7960b1 2247 int j;
fa49fd0f
RK
2248
2249 for (j = 0; j < XVECLEN (x, i); j++)
2250 {
4deef538 2251 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
fa49fd0f 2252
bd7960b1 2253 if (t != XVECEXP (x, i, j))
fa49fd0f 2254 {
bd7960b1
RS
2255 if (XVEC (x, i) == XVEC (copy, i))
2256 {
2257 if (x == copy)
2258 copy = shallow_copy_rtx (x);
2259 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2260 }
2261 XVECEXP (copy, i, j) = t;
fa49fd0f 2262 }
fa49fd0f
RK
2263 }
2264 }
2265 }
2266
2267 return copy;
2268}
2269
9a76e83d
JJ
2270/* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2271
2272rtx
ef4bddc2 2273cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
9a76e83d
JJ
2274{
2275 rtx ret;
2276 gcc_assert (!cselib_current_insn);
2277 cselib_current_insn = insn;
2278 ret = cselib_subst_to_values (x, memmode);
2279 cselib_current_insn = NULL;
2280 return ret;
2281}
2282
4deef538
AO
2283/* Look up the rtl expression X in our tables and return the value it
2284 has. If CREATE is zero, we return NULL if we don't know the value.
2285 Otherwise, we create a new one if possible, using mode MODE if X
2286 doesn't have a mode (i.e. because it's a constant). When X is part
2287 of an address, MEMMODE should be the mode of the enclosing MEM if
2288 we're tracking autoinc expressions. */
fa49fd0f 2289
5847e8da 2290static cselib_val *
ef4bddc2
RS
2291cselib_lookup_1 (rtx x, machine_mode mode,
2292 int create, machine_mode memmode)
fa49fd0f 2293{
4a8fb1a1 2294 cselib_val **slot;
fa49fd0f
RK
2295 cselib_val *e;
2296 unsigned int hashval;
2297
2298 if (GET_MODE (x) != VOIDmode)
2299 mode = GET_MODE (x);
2300
2301 if (GET_CODE (x) == VALUE)
2302 return CSELIB_VAL_PTR (x);
2303
f8cfc6aa 2304 if (REG_P (x))
fa49fd0f
RK
2305 {
2306 struct elt_list *l;
2307 unsigned int i = REGNO (x);
2308
60fa6660
AO
2309 l = REG_VALUES (i);
2310 if (l && l->elt == NULL)
2311 l = l->next;
2312 for (; l; l = l->next)
757bbef8 2313 if (mode == GET_MODE (l->elt->val_rtx))
5847e8da
AO
2314 {
2315 promote_debug_loc (l->elt->locs);
2316 return l->elt;
2317 }
fa49fd0f
RK
2318
2319 if (! create)
5847e8da 2320 return 0;
fa49fd0f 2321
31825e57
DM
2322 if (i < FIRST_PSEUDO_REGISTER)
2323 {
ad474626 2324 unsigned int n = hard_regno_nregs (i, mode);
31825e57
DM
2325
2326 if (n > max_value_regs)
2327 max_value_regs = n;
2328 }
2329
5440c0e7 2330 e = new_cselib_val (next_uid, GET_MODE (x), x);
2c0fa3ec
JJ
2331 if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2332 SP_DERIVED_VALUE_P (e->val_rtx) = 1;
6f2ffb4b 2333 new_elt_loc_list (e, x);
b4206259
RS
2334
2335 scalar_int_mode int_mode;
fa49fd0f 2336 if (REG_VALUES (i) == 0)
60fa6660
AO
2337 {
2338 /* Maintain the invariant that the first entry of
2339 REG_VALUES, if present, must be the value used to set the
2340 register, or NULL. */
6790d1ab 2341 used_regs[n_used_regs++] = i;
60fa6660
AO
2342 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2343 }
509f4495 2344 else if (cselib_preserve_constants
b4206259 2345 && is_int_mode (mode, &int_mode))
509f4495
JJ
2346 {
2347 /* During var-tracking, try harder to find equivalences
2348 for SUBREGs. If a setter sets say a DImode register
2349 and user uses that register only in SImode, add a lowpart
2350 subreg location. */
2351 struct elt_list *lwider = NULL;
b4206259 2352 scalar_int_mode lmode;
509f4495
JJ
2353 l = REG_VALUES (i);
2354 if (l && l->elt == NULL)
2355 l = l->next;
2356 for (; l; l = l->next)
b4206259
RS
2357 if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2358 && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
509f4495 2359 && (lwider == NULL
bd4288c0
RS
2360 || partial_subreg_p (lmode,
2361 GET_MODE (lwider->elt->val_rtx))))
509f4495
JJ
2362 {
2363 struct elt_loc_list *el;
2364 if (i < FIRST_PSEUDO_REGISTER
ad474626 2365 && hard_regno_nregs (i, lmode) != 1)
509f4495
JJ
2366 continue;
2367 for (el = l->elt->locs; el; el = el->next)
2368 if (!REG_P (el->loc))
2369 break;
2370 if (el)
2371 lwider = l;
2372 }
2373 if (lwider)
2374 {
b4206259 2375 rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
509f4495
JJ
2376 GET_MODE (lwider->elt->val_rtx));
2377 if (sub)
6f2ffb4b 2378 new_elt_loc_list (e, sub);
509f4495
JJ
2379 }
2380 }
60fa6660 2381 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
f956adb9 2382 slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
fa49fd0f 2383 *slot = e;
5847e8da 2384 return e;
fa49fd0f
RK
2385 }
2386
3c0cb5de 2387 if (MEM_P (x))
5847e8da 2388 return cselib_lookup_mem (x, create);
fa49fd0f 2389
4deef538 2390 hashval = cselib_hash_rtx (x, create, memmode);
fa49fd0f
RK
2391 /* Can't even create if hashing is not possible. */
2392 if (! hashval)
5847e8da 2393 return 0;
fa49fd0f 2394
f956adb9 2395 slot = cselib_find_slot (mode, x, hashval,
4deef538 2396 create ? INSERT : NO_INSERT, memmode);
fa49fd0f 2397 if (slot == 0)
5847e8da 2398 return 0;
fa49fd0f
RK
2399
2400 e = (cselib_val *) *slot;
2401 if (e)
5847e8da 2402 return e;
fa49fd0f 2403
b5b8b0ac 2404 e = new_cselib_val (hashval, mode, x);
fa49fd0f
RK
2405
2406 /* We have to fill the slot before calling cselib_subst_to_values:
2407 the hash table is inconsistent until we do so, and
2408 cselib_subst_to_values will need to do lookups. */
4a8fb1a1 2409 *slot = e;
2c0fa3ec
JJ
2410 rtx v = cselib_subst_to_values (x, memmode);
2411
2412 /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2413 VALUE that isn't in the hash tables anymore. */
2414 if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2415 PRESERVED_VALUE_P (e->val_rtx) = 1;
2416
2417 new_elt_loc_list (e, v);
5847e8da
AO
2418 return e;
2419}
2420
2421/* Wrapper for cselib_lookup, that indicates X is in INSN. */
2422
2423cselib_val *
ef4bddc2
RS
2424cselib_lookup_from_insn (rtx x, machine_mode mode,
2425 int create, machine_mode memmode, rtx_insn *insn)
5847e8da
AO
2426{
2427 cselib_val *ret;
2428
2429 gcc_assert (!cselib_current_insn);
2430 cselib_current_insn = insn;
2431
4deef538 2432 ret = cselib_lookup (x, mode, create, memmode);
5847e8da
AO
2433
2434 cselib_current_insn = NULL;
2435
2436 return ret;
2437}
2438
2439/* Wrapper for cselib_lookup_1, that logs the lookup result and
2440 maintains invariants related with debug insns. */
2441
2442cselib_val *
ef4bddc2
RS
2443cselib_lookup (rtx x, machine_mode mode,
2444 int create, machine_mode memmode)
5847e8da 2445{
4deef538 2446 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
5847e8da
AO
2447
2448 /* ??? Should we return NULL if we're not to create an entry, the
2449 found loc is a debug loc and cselib_current_insn is not DEBUG?
2450 If so, we should also avoid converting val to non-DEBUG; probably
2451 easiest setting cselib_current_insn to NULL before the call
2452 above. */
2453
4a3c9687 2454 if (dump_file && (dump_flags & TDF_CSELIB))
5847e8da
AO
2455 {
2456 fputs ("cselib lookup ", dump_file);
2457 print_inline_rtx (dump_file, x, 2);
2458 fprintf (dump_file, " => %u:%u\n",
2459 ret ? ret->uid : 0,
2460 ret ? ret->hash : 0);
2461 }
2462
2463 return ret;
fa49fd0f
RK
2464}
2465
150760dd
RS
2466/* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2467
2468static void
2469cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2470{
2471 cselib_val *v = (*l)->elt;
2472 if (*l == REG_VALUES (regno))
2473 {
2474 /* Maintain the invariant that the first entry of
2475 REG_VALUES, if present, must be the value used to set
2476 the register, or NULL. This is also nice because
2477 then we won't push the same regno onto user_regs
2478 multiple times. */
2479 (*l)->elt = NULL;
2480 l = &(*l)->next;
2481 }
2482 else
2483 unchain_one_elt_list (l);
2484
2485 v = canonical_cselib_val (v);
2486
2487 bool had_locs = v->locs != NULL;
2488 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2489
2490 /* Now, we clear the mapping from value to reg. It must exist, so
2491 this code will crash intentionally if it doesn't. */
2492 for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2493 {
2494 rtx x = (*p)->loc;
2495
2496 if (REG_P (x) && REGNO (x) == regno)
2497 {
2498 unchain_one_elt_loc_list (p);
2499 break;
2500 }
2501 }
2502
bab8d962 2503 if (had_locs && cselib_useless_value_p (v))
150760dd
RS
2504 {
2505 if (setting_insn && DEBUG_INSN_P (setting_insn))
2506 n_useless_debug_values++;
2507 else
2508 n_useless_values++;
2509 }
2510}
2511
fa49fd0f
RK
2512/* Invalidate any entries in reg_values that overlap REGNO. This is called
2513 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2514 is used to determine how many hard registers are being changed. If MODE
2515 is VOIDmode, then only REGNO is being changed; this is used when
2516 invalidating call clobbered registers across a call. */
2517
2518static void
17d184e5 2519cselib_invalidate_regno (unsigned int regno, machine_mode mode)
fa49fd0f
RK
2520{
2521 unsigned int endregno;
2522 unsigned int i;
2523
2524 /* If we see pseudos after reload, something is _wrong_. */
341c100f
NS
2525 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2526 || reg_renumber[regno] < 0);
fa49fd0f
RK
2527
2528 /* Determine the range of registers that must be invalidated. For
2529 pseudos, only REGNO is affected. For hard regs, we must take MODE
2530 into account, and we must also invalidate lower register numbers
2531 if they contain values that overlap REGNO. */
291aac59 2532 if (regno < FIRST_PSEUDO_REGISTER)
31825e57 2533 {
341c100f 2534 gcc_assert (mode != VOIDmode);
7080f735 2535
31825e57
DM
2536 if (regno < max_value_regs)
2537 i = 0;
2538 else
2539 i = regno - max_value_regs;
fa49fd0f 2540
09e18274 2541 endregno = end_hard_regno (mode, regno);
31825e57
DM
2542 }
2543 else
2544 {
2545 i = regno;
2546 endregno = regno + 1;
2547 }
2548
2549 for (; i < endregno; i++)
fa49fd0f
RK
2550 {
2551 struct elt_list **l = &REG_VALUES (i);
2552
2553 /* Go through all known values for this reg; if it overlaps the range
2554 we're invalidating, remove the value. */
2555 while (*l)
2556 {
2557 cselib_val *v = (*l)->elt;
fa49fd0f
RK
2558 unsigned int this_last = i;
2559
60fa6660 2560 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
09e18274 2561 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
fa49fd0f 2562
9de9cbaf
JJ
2563 if (this_last < regno || v == NULL
2564 || (v == cfa_base_preserved_val
2565 && i == cfa_base_preserved_regno))
fa49fd0f
RK
2566 {
2567 l = &(*l)->next;
2568 continue;
2569 }
2570
2571 /* We have an overlap. */
150760dd 2572 cselib_invalidate_regno_val (i, l);
fa49fd0f
RK
2573 }
2574 }
2575}
9ddb66ca 2576\f
7101fb18
JH
2577/* Invalidate any locations in the table which are changed because of a
2578 store to MEM_RTX. If this is called because of a non-const call
2579 instruction, MEM_RTX is (mem:BLK const0_rtx). */
fa49fd0f 2580
7101fb18 2581static void
7080f735 2582cselib_invalidate_mem (rtx mem_rtx)
fa49fd0f 2583{
7101fb18 2584 cselib_val **vp, *v, *next;
c65ecebc 2585 int num_mems = 0;
9ddb66ca
JH
2586 rtx mem_addr;
2587
2588 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2589 mem_rtx = canon_rtx (mem_rtx);
fa49fd0f 2590
7101fb18
JH
2591 vp = &first_containing_mem;
2592 for (v = *vp; v != &dummy_val; v = next)
fa49fd0f 2593 {
7101fb18
JH
2594 bool has_mem = false;
2595 struct elt_loc_list **p = &v->locs;
5847e8da 2596 bool had_locs = v->locs != NULL;
21afc57d 2597 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
fa49fd0f 2598
7101fb18 2599 while (*p)
fa49fd0f 2600 {
7101fb18
JH
2601 rtx x = (*p)->loc;
2602 cselib_val *addr;
2603 struct elt_list **mem_chain;
2604
2605 /* MEMs may occur in locations only at the top level; below
2606 that every MEM or REG is substituted by its VALUE. */
3c0cb5de 2607 if (!MEM_P (x))
fa49fd0f 2608 {
7101fb18
JH
2609 p = &(*p)->next;
2610 continue;
2611 }
028d4092 2612 if (num_mems < param_max_cselib_memory_locations
bd280792
JR
2613 && ! canon_anti_dependence (x, false, mem_rtx,
2614 GET_MODE (mem_rtx), mem_addr))
7101fb18
JH
2615 {
2616 has_mem = true;
c65ecebc 2617 num_mems++;
7101fb18
JH
2618 p = &(*p)->next;
2619 continue;
fa49fd0f
RK
2620 }
2621
7101fb18
JH
2622 /* This one overlaps. */
2623 /* We must have a mapping from this MEM's address to the
2624 value (E). Remove that, too. */
4deef538 2625 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
faead9f7
AO
2626 addr = canonical_cselib_val (addr);
2627 gcc_checking_assert (v == canonical_cselib_val (v));
7101fb18
JH
2628 mem_chain = &addr->addr_list;
2629 for (;;)
2630 {
faead9f7
AO
2631 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2632
2633 if (canon == v)
7101fb18
JH
2634 {
2635 unchain_one_elt_list (mem_chain);
2636 break;
2637 }
fa49fd0f 2638
faead9f7
AO
2639 /* Record canonicalized elt. */
2640 (*mem_chain)->elt = canon;
2641
7101fb18
JH
2642 mem_chain = &(*mem_chain)->next;
2643 }
fa49fd0f 2644
7101fb18
JH
2645 unchain_one_elt_loc_list (p);
2646 }
fa49fd0f 2647
bab8d962 2648 if (had_locs && cselib_useless_value_p (v))
5847e8da
AO
2649 {
2650 if (setting_insn && DEBUG_INSN_P (setting_insn))
2651 n_useless_debug_values++;
2652 else
2653 n_useless_values++;
2654 }
fa49fd0f 2655
7101fb18
JH
2656 next = v->next_containing_mem;
2657 if (has_mem)
2658 {
2659 *vp = v;
2660 vp = &(*vp)->next_containing_mem;
2661 }
2662 else
2663 v->next_containing_mem = NULL;
2664 }
2665 *vp = &dummy_val;
fa49fd0f
RK
2666}
2667
17d184e5 2668/* Invalidate DEST. */
fa49fd0f 2669
0d87c765 2670void
17d184e5 2671cselib_invalidate_rtx (rtx dest)
fa49fd0f 2672{
46d096a3
SB
2673 while (GET_CODE (dest) == SUBREG
2674 || GET_CODE (dest) == ZERO_EXTRACT
2675 || GET_CODE (dest) == STRICT_LOW_PART)
fa49fd0f
RK
2676 dest = XEXP (dest, 0);
2677
f8cfc6aa 2678 if (REG_P (dest))
17d184e5 2679 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
3c0cb5de 2680 else if (MEM_P (dest))
fa49fd0f 2681 cselib_invalidate_mem (dest);
0d87c765
RH
2682}
2683
2684/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2685
2686static void
17d184e5 2687cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
0d87c765
RH
2688 void *data ATTRIBUTE_UNUSED)
2689{
17d184e5 2690 cselib_invalidate_rtx (dest);
fa49fd0f
RK
2691}
2692
2693/* Record the result of a SET instruction. DEST is being set; the source
2694 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2695 describes its address. */
2696
2697static void
7080f735 2698cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
fa49fd0f 2699{
fa49fd0f
RK
2700 if (src_elt == 0 || side_effects_p (dest))
2701 return;
2702
dc8afb70 2703 if (REG_P (dest))
fa49fd0f 2704 {
dc8afb70 2705 unsigned int dreg = REGNO (dest);
31825e57
DM
2706 if (dreg < FIRST_PSEUDO_REGISTER)
2707 {
dc8afb70 2708 unsigned int n = REG_NREGS (dest);
31825e57
DM
2709
2710 if (n > max_value_regs)
2711 max_value_regs = n;
2712 }
2713
60fa6660
AO
2714 if (REG_VALUES (dreg) == 0)
2715 {
6790d1ab 2716 used_regs[n_used_regs++] = dreg;
60fa6660
AO
2717 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2718 }
2719 else
2720 {
341c100f
NS
2721 /* The register should have been invalidated. */
2722 gcc_assert (REG_VALUES (dreg)->elt == 0);
2723 REG_VALUES (dreg)->elt = src_elt;
60fa6660
AO
2724 }
2725
bab8d962 2726 if (cselib_useless_value_p (src_elt))
fa49fd0f 2727 n_useless_values--;
6f2ffb4b 2728 new_elt_loc_list (src_elt, dest);
fa49fd0f 2729 }
3c0cb5de 2730 else if (MEM_P (dest) && dest_addr_elt != 0
463301c3 2731 && cselib_record_memory)
fa49fd0f 2732 {
bab8d962 2733 if (cselib_useless_value_p (src_elt))
fa49fd0f
RK
2734 n_useless_values--;
2735 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2736 }
2737}
2738
6f2ffb4b
AO
2739/* Make ELT and X's VALUE equivalent to each other at INSN. */
2740
2741void
12ea1b95 2742cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
6f2ffb4b
AO
2743{
2744 cselib_val *nelt;
12ea1b95 2745 rtx_insn *save_cselib_current_insn = cselib_current_insn;
6f2ffb4b
AO
2746
2747 gcc_checking_assert (elt);
2748 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2749 gcc_checking_assert (!side_effects_p (x));
2750
2751 cselib_current_insn = insn;
2752
2753 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2754
2755 if (nelt != elt)
2756 {
0f68ba3e
AO
2757 cselib_any_perm_equivs = true;
2758
6f2ffb4b
AO
2759 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2760 cselib_preserve_value (nelt);
2761
2762 new_elt_loc_list (nelt, elt->val_rtx);
2763 }
2764
2765 cselib_current_insn = save_cselib_current_insn;
2766}
2767
0f68ba3e
AO
2768/* Return TRUE if any permanent equivalences have been recorded since
2769 the table was last initialized. */
2770bool
2771cselib_have_permanent_equivalences (void)
2772{
2773 return cselib_any_perm_equivs;
2774}
2775
33c45e51
JJ
2776/* Record stack_pointer_rtx to be equal to
2777 (plus:P cfa_base_preserved_val offset). Used by var-tracking
2778 at the start of basic blocks for !frame_pointer_needed functions. */
2779
2780void
2781cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2782{
2783 rtx sp_derived_value = NULL_RTX;
2784 for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2785 if (GET_CODE (l->loc) == VALUE
2786 && SP_DERIVED_VALUE_P (l->loc))
2787 {
2788 sp_derived_value = l->loc;
2789 break;
2790 }
2791 else if (GET_CODE (l->loc) == PLUS
2792 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2793 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2794 && CONST_INT_P (XEXP (l->loc, 1)))
2795 {
2796 sp_derived_value = XEXP (l->loc, 0);
2797 offset = offset + UINTVAL (XEXP (l->loc, 1));
2798 break;
2799 }
2800 if (sp_derived_value == NULL_RTX)
2801 return;
2802 cselib_val *val
2803 = cselib_lookup_from_insn (plus_constant (Pmode, sp_derived_value, offset),
2804 Pmode, 1, VOIDmode, insn);
2805 if (val != NULL)
a615ea71
JJ
2806 {
2807 PRESERVED_VALUE_P (val->val_rtx) = 1;
2808 cselib_record_set (stack_pointer_rtx, val, NULL);
2809 }
33c45e51
JJ
2810}
2811
2812/* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2813 that can be expressed using cfa_base_preserved_val + CONST_INT. */
2814
2815bool
2816cselib_sp_derived_value_p (cselib_val *v)
2817{
2818 if (!SP_DERIVED_VALUE_P (v->val_rtx))
2819 for (struct elt_loc_list *l = v->locs; l; l = l->next)
2820 if (GET_CODE (l->loc) == PLUS
2821 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2822 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2823 && CONST_INT_P (XEXP (l->loc, 1)))
2824 v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2825 if (!SP_DERIVED_VALUE_P (v->val_rtx))
2826 return false;
2827 for (struct elt_loc_list *l = v->locs; l; l = l->next)
2828 if (l->loc == cfa_base_preserved_val->val_rtx)
2829 return true;
2830 else if (GET_CODE (l->loc) == PLUS
2831 && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2832 && CONST_INT_P (XEXP (l->loc, 1)))
2833 return true;
2834 return false;
2835}
2836
fa49fd0f
RK
2837/* There is no good way to determine how many elements there can be
2838 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2839#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2840
4deef538
AO
2841struct cselib_record_autoinc_data
2842{
2843 struct cselib_set *sets;
2844 int n_sets;
2845};
2846
2847/* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2848 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2849
2850static int
2851cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2852 rtx dest, rtx src, rtx srcoff, void *arg)
2853{
2854 struct cselib_record_autoinc_data *data;
2855 data = (struct cselib_record_autoinc_data *)arg;
2856
2857 data->sets[data->n_sets].dest = dest;
2858
2859 if (srcoff)
2860 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2861 else
2862 data->sets[data->n_sets].src = src;
2863
2864 data->n_sets++;
2865
8d8e205b 2866 return 0;
4deef538
AO
2867}
2868
2869/* Record the effects of any sets and autoincs in INSN. */
fa49fd0f 2870static void
dd60a84c 2871cselib_record_sets (rtx_insn *insn)
fa49fd0f
RK
2872{
2873 int n_sets = 0;
2874 int i;
b5b8b0ac 2875 struct cselib_set sets[MAX_SETS];
b7933c21 2876 rtx cond = 0;
4deef538 2877 int n_sets_before_autoinc;
ff111948 2878 int n_strict_low_parts = 0;
4deef538 2879 struct cselib_record_autoinc_data data;
fa49fd0f 2880
45309d28 2881 rtx body = PATTERN (insn);
b7933c21
BS
2882 if (GET_CODE (body) == COND_EXEC)
2883 {
2884 cond = COND_EXEC_TEST (body);
2885 body = COND_EXEC_CODE (body);
2886 }
2887
fa49fd0f
RK
2888 /* Find all sets. */
2889 if (GET_CODE (body) == SET)
2890 {
2891 sets[0].src = SET_SRC (body);
2892 sets[0].dest = SET_DEST (body);
2893 n_sets = 1;
2894 }
2895 else if (GET_CODE (body) == PARALLEL)
2896 {
2897 /* Look through the PARALLEL and record the values being
2898 set, if possible. Also handle any CLOBBERs. */
2899 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2900 {
2901 rtx x = XVECEXP (body, 0, i);
2902
2903 if (GET_CODE (x) == SET)
2904 {
2905 sets[n_sets].src = SET_SRC (x);
2906 sets[n_sets].dest = SET_DEST (x);
2907 n_sets++;
2908 }
2909 }
2910 }
2911
8dd5516b
JJ
2912 if (n_sets == 1
2913 && MEM_P (sets[0].src)
2914 && !cselib_record_memory
2915 && MEM_READONLY_P (sets[0].src))
2916 {
2917 rtx note = find_reg_equal_equiv_note (insn);
2918
2919 if (note && CONSTANT_P (XEXP (note, 0)))
2920 sets[0].src = XEXP (note, 0);
2921 }
2922
4deef538
AO
2923 data.sets = sets;
2924 data.n_sets = n_sets_before_autoinc = n_sets;
8d8e205b 2925 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
4deef538
AO
2926 n_sets = data.n_sets;
2927
fa49fd0f
RK
2928 /* Look up the values that are read. Do this before invalidating the
2929 locations that are written. */
2930 for (i = 0; i < n_sets; i++)
2931 {
2932 rtx dest = sets[i].dest;
ff111948 2933 rtx orig = dest;
fa49fd0f
RK
2934
2935 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2936 the low part after invalidating any knowledge about larger modes. */
2937 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2938 sets[i].dest = dest = XEXP (dest, 0);
2939
2940 /* We don't know how to record anything but REG or MEM. */
f8cfc6aa 2941 if (REG_P (dest)
3c0cb5de 2942 || (MEM_P (dest) && cselib_record_memory))
fa49fd0f 2943 {
b7933c21
BS
2944 rtx src = sets[i].src;
2945 if (cond)
be9ed5d5 2946 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
4deef538 2947 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
3c0cb5de 2948 if (MEM_P (dest))
d4ebfa65 2949 {
ef4bddc2 2950 machine_mode address_mode = get_address_mode (dest);
d4ebfa65
BE
2951
2952 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
4deef538
AO
2953 address_mode, 1,
2954 GET_MODE (dest));
d4ebfa65 2955 }
fa49fd0f
RK
2956 else
2957 sets[i].dest_addr_elt = 0;
2958 }
ff111948
JJ
2959
2960 /* Improve handling of STRICT_LOW_PART if the current value is known
2961 to be const0_rtx, then the low bits will be set to dest and higher
2962 bits will remain zero. Used in code like:
2963
2964 {di:SI=0;clobber flags:CC;}
2965 flags:CCNO=cmp(bx:SI,0)
2966 strict_low_part(di:QI)=flags:CCNO<=0
2967
2968 where we can note both that di:QI=flags:CCNO<=0 and
2969 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2970 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2971 scalar_int_mode mode;
2972 if (dest != orig
2973 && cselib_record_sets_hook
2974 && REG_P (dest)
2975 && HARD_REGISTER_P (dest)
bbc8d04f 2976 && sets[i].src_elt
ff111948
JJ
2977 && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2978 && n_sets + n_strict_low_parts < MAX_SETS)
2979 {
2980 opt_scalar_int_mode wider_mode_iter;
2981 FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2982 {
2983 scalar_int_mode wider_mode = wider_mode_iter.require ();
2984 if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2985 break;
2986
2987 rtx reg = gen_lowpart (wider_mode, dest);
2988 if (!REG_P (reg))
2989 break;
2990
2991 cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2992 if (!v)
2993 continue;
2994
2995 struct elt_loc_list *l;
2996 for (l = v->locs; l; l = l->next)
2997 if (l->loc == const0_rtx)
2998 break;
2999
3000 if (!l)
3001 continue;
3002
3003 sets[n_sets + n_strict_low_parts].dest = reg;
3004 sets[n_sets + n_strict_low_parts].src = dest;
3005 sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
3006 break;
3007 }
3008 }
fa49fd0f
RK
3009 }
3010
b5b8b0ac
AO
3011 if (cselib_record_sets_hook)
3012 cselib_record_sets_hook (insn, sets, n_sets);
3013
fa49fd0f
RK
3014 /* Invalidate all locations written by this insn. Note that the elts we
3015 looked up in the previous loop aren't affected, just some of their
3016 locations may go away. */
e8448ba5 3017 note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
fa49fd0f 3018
4deef538
AO
3019 for (i = n_sets_before_autoinc; i < n_sets; i++)
3020 cselib_invalidate_rtx (sets[i].dest);
3021
b7048ab7
RH
3022 /* If this is an asm, look for duplicate sets. This can happen when the
3023 user uses the same value as an output multiple times. This is valid
3024 if the outputs are not actually used thereafter. Treat this case as
3025 if the value isn't actually set. We do this by smashing the destination
3026 to pc_rtx, so that we won't record the value later. */
3027 if (n_sets >= 2 && asm_noperands (body) >= 0)
3028 {
3029 for (i = 0; i < n_sets; i++)
3030 {
3031 rtx dest = sets[i].dest;
3c0cb5de 3032 if (REG_P (dest) || MEM_P (dest))
b7048ab7
RH
3033 {
3034 int j;
3035 for (j = i + 1; j < n_sets; j++)
3036 if (rtx_equal_p (dest, sets[j].dest))
3037 {
3038 sets[i].dest = pc_rtx;
3039 sets[j].dest = pc_rtx;
3040 }
3041 }
3042 }
3043 }
3044
fa49fd0f
RK
3045 /* Now enter the equivalences in our tables. */
3046 for (i = 0; i < n_sets; i++)
3047 {
3048 rtx dest = sets[i].dest;
f8cfc6aa 3049 if (REG_P (dest)
3c0cb5de 3050 || (MEM_P (dest) && cselib_record_memory))
fa49fd0f
RK
3051 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
3052 }
ff111948
JJ
3053
3054 /* And deal with STRICT_LOW_PART. */
3055 for (i = 0; i < n_strict_low_parts; i++)
3056 {
3057 if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
3058 continue;
3059 machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
3060 cselib_val *v
3061 = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
3062 cselib_preserve_value (v);
3063 rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
3064 sets[n_sets + i].src_elt->val_rtx);
3065 cselib_add_permanent_equiv (v, r, insn);
3066 }
fa49fd0f
RK
3067}
3068
40155239
JJ
3069/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3070
3071bool
8df68a82 3072fp_setter_insn (rtx_insn *insn)
40155239
JJ
3073{
3074 rtx expr, pat = NULL_RTX;
3075
3076 if (!RTX_FRAME_RELATED_P (insn))
3077 return false;
3078
3079 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
3080 if (expr)
3081 pat = XEXP (expr, 0);
3082 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
3083 return false;
3084
3085 /* Don't return true for frame pointer restores in the epilogue. */
3086 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
3087 return false;
3088 return true;
3089}
3090
150760dd
RS
3091/* V is one of the values in REG_VALUES (REGNO). Return true if it
3092 would be invalidated by CALLEE_ABI. */
3093
3094static bool
3095cselib_invalidated_by_call_p (const function_abi &callee_abi,
3096 unsigned int regno, cselib_val *v)
3097{
3098 machine_mode mode = GET_MODE (v->val_rtx);
3099 if (mode == VOIDmode)
3100 {
3101 v = REG_VALUES (regno)->elt;
3102 if (!v)
3103 /* If we don't know what the mode of the constant value is, and we
3104 don't know what mode the register was set in, conservatively
3105 assume that the register is clobbered. The value's going to be
3106 essentially useless in this case anyway. */
3107 return true;
3108 mode = GET_MODE (v->val_rtx);
3109 }
3110 return callee_abi.clobbers_reg_p (mode, regno);
3111}
3112
fa49fd0f
RK
3113/* Record the effects of INSN. */
3114
3115void
dd60a84c 3116cselib_process_insn (rtx_insn *insn)
fa49fd0f
RK
3117{
3118 int i;
3119 rtx x;
3120
3121 cselib_current_insn = insn;
3122
f1257268 3123 /* Forget everything at a CODE_LABEL or a setjmp. */
f5d30aa6
JJ
3124 if ((LABEL_P (insn)
3125 || (CALL_P (insn)
f1257268 3126 && find_reg_note (insn, REG_SETJMP, NULL)))
f5d30aa6 3127 && !cselib_preserve_constants)
fa49fd0f 3128 {
5440c0e7 3129 cselib_reset_table (next_uid);
12ea1b95 3130 cselib_current_insn = NULL;
fa49fd0f
RK
3131 return;
3132 }
3133
3134 if (! INSN_P (insn))
3135 {
12ea1b95 3136 cselib_current_insn = NULL;
fa49fd0f
RK
3137 return;
3138 }
3139
3140 /* If this is a call instruction, forget anything stored in a
3141 call clobbered register, or, if this is not a const call, in
3142 memory. */
4b4bf941 3143 if (CALL_P (insn))
fa49fd0f 3144 {
6ee2cc70 3145 function_abi callee_abi = insn_callee_abi (insn);
fa49fd0f 3146 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
150760dd
RS
3147 {
3148 elt_list **l = &REG_VALUES (i);
3149 while (*l)
3150 {
3151 cselib_val *v = (*l)->elt;
3152 if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
3153 cselib_invalidate_regno_val (i, l);
3154 else
3155 l = &(*l)->next;
3156 }
3157 }
fa49fd0f 3158
becfd6e5
KZ
3159 /* Since it is not clear how cselib is going to be used, be
3160 conservative here and treat looping pure or const functions
3161 as if they were regular functions. */
3162 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3163 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
fa49fd0f 3164 cselib_invalidate_mem (callmem);
007b405b
JDA
3165 else
3166 /* For const/pure calls, invalidate any argument slots because
3167 they are owned by the callee. */
3168 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3169 if (GET_CODE (XEXP (x, 0)) == USE
3170 && MEM_P (XEXP (XEXP (x, 0), 0)))
3171 cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
fa49fd0f
RK
3172 }
3173
3174 cselib_record_sets (insn);
3175
fa49fd0f
RK
3176 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3177 after we have processed the insn. */
4b4bf941 3178 if (CALL_P (insn))
f5d30aa6
JJ
3179 {
3180 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
17d184e5
RS
3181 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3182 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3183
99788e06 3184 /* Flush everything on setjmp. */
f5d30aa6
JJ
3185 if (cselib_preserve_constants
3186 && find_reg_note (insn, REG_SETJMP, NULL))
3187 {
3188 cselib_preserve_only_values ();
3189 cselib_reset_table (next_uid);
3190 }
3191 }
fa49fd0f 3192
40155239
JJ
3193 /* On setter of the hard frame pointer if frame_pointer_needed,
3194 invalidate stack_pointer_rtx, so that sp and {,h}fp based
3195 VALUEs are distinct. */
3196 if (reload_completed
3197 && frame_pointer_needed
3198 && fp_setter_insn (insn))
3199 cselib_invalidate_rtx (stack_pointer_rtx);
3200
12ea1b95 3201 cselib_current_insn = NULL;
fa49fd0f 3202
80662856
AO
3203 if (n_useless_values > MAX_USELESS_VALUES
3204 /* remove_useless_values is linear in the hash table size. Avoid
3205 quadratic behavior for very large hashtables with very few
3206 useless elements. */
3207 && ((unsigned int)n_useless_values
c203e8a7 3208 > (cselib_hash_table->elements () - n_debug_values) / 4))
80662856 3209 remove_useless_values ();
fa49fd0f
RK
3210}
3211
fa49fd0f
RK
3212/* Initialize cselib for one pass. The caller must also call
3213 init_alias_analysis. */
3214
3215void
457eeaae 3216cselib_init (int record_what)
fa49fd0f 3217{
457eeaae
JJ
3218 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3219 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
0f68ba3e 3220 cselib_any_perm_equivs = false;
ac3768f6
SB
3221
3222 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3223 see canon_true_dependence. This is only created once. */
fa49fd0f 3224 if (! callmem)
ac3768f6 3225 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
fa49fd0f
RK
3226
3227 cselib_nregs = max_reg_num ();
6790d1ab
JH
3228
3229 /* We preserve reg_values to allow expensive clearing of the whole thing.
3230 Reallocate it however if it happens to be too large. */
3231 if (!reg_values || reg_values_size < cselib_nregs
3232 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
e2500fed 3233 {
04695783 3234 free (reg_values);
6790d1ab
JH
3235 /* Some space for newly emit instructions so we don't end up
3236 reallocating in between passes. */
3237 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
5ed6ace5 3238 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
e2500fed 3239 }
5ed6ace5 3240 used_regs = XNEWVEC (unsigned int, cselib_nregs);
6790d1ab 3241 n_used_regs = 0;
510c9192
ML
3242 /* FIXME: enable sanitization (PR87845) */
3243 cselib_hash_table
3244 = new hash_table<cselib_hasher> (31, /* ggc */ false,
3245 /* sanitize_eq_and_hash */ false);
0618dee5 3246 if (cselib_preserve_constants)
510c9192
ML
3247 cselib_preserved_hash_table
3248 = new hash_table<cselib_hasher> (31, /* ggc */ false,
3249 /* sanitize_eq_and_hash */ false);
5440c0e7 3250 next_uid = 1;
fa49fd0f
RK
3251}
3252
3253/* Called when the current user is done with cselib. */
3254
3255void
7080f735 3256cselib_finish (void)
fa49fd0f 3257{
0618dee5 3258 bool preserved = cselib_preserve_constants;
6fb5fa3c 3259 cselib_discard_hook = NULL;
457eeaae 3260 cselib_preserve_constants = false;
0f68ba3e 3261 cselib_any_perm_equivs = false;
457eeaae 3262 cfa_base_preserved_val = NULL;
9de9cbaf 3263 cfa_base_preserved_regno = INVALID_REGNUM;
fb0b2914
ML
3264 elt_list_pool.release ();
3265 elt_loc_list_pool.release ();
3266 cselib_val_pool.release ();
a78a26f1 3267 value_pool.release ();
eb232f4e 3268 cselib_clear_table ();
c203e8a7
TS
3269 delete cselib_hash_table;
3270 cselib_hash_table = NULL;
0618dee5 3271 if (preserved)
c203e8a7
TS
3272 delete cselib_preserved_hash_table;
3273 cselib_preserved_hash_table = NULL;
0fc0c4c9 3274 free (used_regs);
e2500fed 3275 used_regs = 0;
e2500fed 3276 n_useless_values = 0;
5847e8da
AO
3277 n_useless_debug_values = 0;
3278 n_debug_values = 0;
5440c0e7 3279 next_uid = 0;
fa49fd0f 3280}
e2500fed 3281
4a8fb1a1 3282/* Dump the cselib_val *X to FILE *OUT. */
b5b8b0ac 3283
4a8fb1a1
LC
3284int
3285dump_cselib_val (cselib_val **x, FILE *out)
b5b8b0ac 3286{
4a8fb1a1 3287 cselib_val *v = *x;
b5b8b0ac
AO
3288 bool need_lf = true;
3289
3290 print_inline_rtx (out, v->val_rtx, 0);
3291
3292 if (v->locs)
3293 {
3294 struct elt_loc_list *l = v->locs;
3295 if (need_lf)
3296 {
3297 fputc ('\n', out);
3298 need_lf = false;
3299 }
3300 fputs (" locs:", out);
3301 do
3302 {
42286976
JJ
3303 if (l->setting_insn)
3304 fprintf (out, "\n from insn %i ",
3305 INSN_UID (l->setting_insn));
3306 else
3307 fprintf (out, "\n ");
b5b8b0ac
AO
3308 print_inline_rtx (out, l->loc, 4);
3309 }
3310 while ((l = l->next));
3311 fputc ('\n', out);
3312 }
3313 else
3314 {
3315 fputs (" no locs", out);
3316 need_lf = true;
3317 }
3318
3319 if (v->addr_list)
3320 {
3321 struct elt_list *e = v->addr_list;
3322 if (need_lf)
3323 {
3324 fputc ('\n', out);
3325 need_lf = false;
3326 }
3327 fputs (" addr list:", out);
3328 do
3329 {
3330 fputs ("\n ", out);
3331 print_inline_rtx (out, e->elt->val_rtx, 2);
3332 }
3333 while ((e = e->next));
3334 fputc ('\n', out);
3335 }
3336 else
3337 {
3338 fputs (" no addrs", out);
3339 need_lf = true;
3340 }
3341
3342 if (v->next_containing_mem == &dummy_val)
3343 fputs (" last mem\n", out);
3344 else if (v->next_containing_mem)
3345 {
3346 fputs (" next mem ", out);
3347 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3348 fputc ('\n', out);
3349 }
3350 else if (need_lf)
3351 fputc ('\n', out);
3352
3353 return 1;
3354}
3355
3356/* Dump to OUT everything in the CSELIB table. */
3357
3358void
3359dump_cselib_table (FILE *out)
3360{
3361 fprintf (out, "cselib hash table:\n");
c203e8a7 3362 cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
0618dee5 3363 fprintf (out, "cselib preserved hash table:\n");
c203e8a7 3364 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
b5b8b0ac
AO
3365 if (first_containing_mem != &dummy_val)
3366 {
3367 fputs ("first mem ", out);
3368 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3369 fputc ('\n', out);
3370 }
5440c0e7 3371 fprintf (out, "next uid %i\n", next_uid);
b5b8b0ac
AO
3372}
3373
e2500fed 3374#include "gt-cselib.h"