]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cselib.c
[AArch64] Use calls for SVE TLSDESC
[thirdparty/gcc.git] / gcc / cselib.c
CommitLineData
fa49fd0f 1/* Common subexpression elimination library for GNU compiler.
a5544970 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
fa49fd0f 3
1322177d 4This file is part of GCC.
fa49fd0f 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
fa49fd0f 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
fa49fd0f
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
fa49fd0f
RK
19
20#include "config.h"
21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
fa49fd0f 25#include "rtl.h"
957060b5 26#include "tree.h"
c7131fb2 27#include "df.h"
4d0cdd0c 28#include "memmodel.h"
fa49fd0f 29#include "tm_p.h"
957060b5 30#include "regs.h"
78528714 31#include "emit-rtl.h"
7ee2468b 32#include "dumpfile.h"
fa49fd0f 33#include "cselib.h"
c65ecebc 34#include "params.h"
6ee2cc70 35#include "function-abi.h"
fa49fd0f 36
fba4cb03 37/* A list of cselib_val structures. */
a78a26f1
ML
38struct elt_list
39{
40 struct elt_list *next;
41 cselib_val *elt;
fba4cb03
LB
42};
43
463301c3 44static bool cselib_record_memory;
457eeaae 45static bool cselib_preserve_constants;
0f68ba3e 46static bool cselib_any_perm_equivs;
4a8fb1a1 47static inline void promote_debug_loc (struct elt_loc_list *l);
7080f735 48static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
6f2ffb4b 49static void new_elt_loc_list (cselib_val *, rtx);
7080f735
AJ
50static void unchain_one_value (cselib_val *);
51static void unchain_one_elt_list (struct elt_list **);
52static void unchain_one_elt_loc_list (struct elt_loc_list **);
7080f735 53static void remove_useless_values (void);
ef4bddc2
RS
54static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
55static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
7080f735
AJ
56static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
57static cselib_val *cselib_lookup_mem (rtx, int);
99788e06
AH
58static void cselib_invalidate_regno (unsigned int, machine_mode,
59 const_rtx = NULL);
7080f735 60static void cselib_invalidate_mem (rtx);
7080f735 61static void cselib_record_set (rtx, cselib_val *, cselib_val *);
dd60a84c 62static void cselib_record_sets (rtx_insn *);
fa49fd0f 63
b5b8b0ac
AO
64struct expand_value_data
65{
66 bitmap regs_active;
67 cselib_expand_callback callback;
68 void *callback_arg;
864ddef7 69 bool dummy;
b5b8b0ac
AO
70};
71
72static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
73
fa49fd0f
RK
74/* There are three ways in which cselib can look up an rtx:
75 - for a REG, the reg_values table (which is indexed by regno) is used
76 - for a MEM, we recursively look up its address and then follow the
77 addr_list of that value
78 - for everything else, we compute a hash value and go through the hash
79 table. Since different rtx's can still have the same hash value,
80 this involves walking the table entries for a given value and comparing
81 the locations of the entries with the rtx we are looking up. */
82
8d67ee55 83struct cselib_hasher : nofree_ptr_hash <cselib_val>
4a8fb1a1 84{
67f58944 85 struct key {
f956adb9
RS
86 /* The rtx value and its mode (needed separately for constant
87 integers). */
ef4bddc2 88 machine_mode mode;
f956adb9
RS
89 rtx x;
90 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
ef4bddc2 91 machine_mode memmode;
f956adb9 92 };
67f58944
TS
93 typedef key *compare_type;
94 static inline hashval_t hash (const cselib_val *);
95 static inline bool equal (const cselib_val *, const key *);
4a8fb1a1
LC
96};
97
98/* The hash function for our hash table. The value is always computed with
99 cselib_hash_rtx when adding an element; this function just extracts the
100 hash value from a cselib_val structure. */
101
102inline hashval_t
67f58944 103cselib_hasher::hash (const cselib_val *v)
4a8fb1a1
LC
104{
105 return v->hash;
106}
107
108/* The equality test for our hash table. The first argument V is a table
109 element (i.e. a cselib_val), while the second arg X is an rtx. We know
110 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
111 CONST of an appropriate mode. */
112
113inline bool
67f58944 114cselib_hasher::equal (const cselib_val *v, const key *x_arg)
4a8fb1a1
LC
115{
116 struct elt_loc_list *l;
f956adb9 117 rtx x = x_arg->x;
ef4bddc2
RS
118 machine_mode mode = x_arg->mode;
119 machine_mode memmode = x_arg->memmode;
4a8fb1a1
LC
120
121 if (mode != GET_MODE (v->val_rtx))
122 return false;
123
0618dee5
AO
124 if (GET_CODE (x) == VALUE)
125 return x == v->val_rtx;
126
4a8fb1a1
LC
127 /* We don't guarantee that distinct rtx's have different hash values,
128 so we need to do a comparison. */
129 for (l = v->locs; l; l = l->next)
005f12bf 130 if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
4a8fb1a1
LC
131 {
132 promote_debug_loc (l);
133 return true;
134 }
135
136 return false;
137}
138
fa49fd0f 139/* A table that enables us to look up elts by their value. */
c203e8a7 140static hash_table<cselib_hasher> *cselib_hash_table;
fa49fd0f 141
0618dee5 142/* A table to hold preserved values. */
c203e8a7 143static hash_table<cselib_hasher> *cselib_preserved_hash_table;
0618dee5 144
fa49fd0f
RK
145/* This is a global so we don't have to pass this through every function.
146 It is used in new_elt_loc_list to set SETTING_INSN. */
12ea1b95 147static rtx_insn *cselib_current_insn;
fa49fd0f 148
5440c0e7
AO
149/* The unique id that the next create value will take. */
150static unsigned int next_uid;
fa49fd0f
RK
151
152/* The number of registers we had when the varrays were last resized. */
153static unsigned int cselib_nregs;
154
5847e8da
AO
155/* Count values without known locations, or with only locations that
156 wouldn't have been known except for debug insns. Whenever this
157 grows too big, we remove these useless values from the table.
158
159 Counting values with only debug values is a bit tricky. We don't
160 want to increment n_useless_values when we create a value for a
161 debug insn, for this would get n_useless_values out of sync, but we
162 want increment it if all locs in the list that were ever referenced
163 in nondebug insns are removed from the list.
164
165 In the general case, once we do that, we'd have to stop accepting
166 nondebug expressions in the loc list, to avoid having two values
167 equivalent that, without debug insns, would have been made into
168 separate values. However, because debug insns never introduce
169 equivalences themselves (no assignments), the only means for
170 growing loc lists is through nondebug assignments. If the locs
171 also happen to be referenced in debug insns, it will work just fine.
172
173 A consequence of this is that there's at most one debug-only loc in
174 each loc list. If we keep it in the first entry, testing whether
175 we have a debug-only loc list takes O(1).
176
177 Furthermore, since any additional entry in a loc list containing a
178 debug loc would have to come from an assignment (nondebug) that
179 references both the initial debug loc and the newly-equivalent loc,
180 the initial debug loc would be promoted to a nondebug loc, and the
181 loc list would not contain debug locs any more.
182
183 So the only case we have to be careful with in order to keep
184 n_useless_values in sync between debug and nondebug compilations is
185 to avoid incrementing n_useless_values when removing the single loc
186 from a value that turns out to not appear outside debug values. We
187 increment n_useless_debug_values instead, and leave such values
188 alone until, for other reasons, we garbage-collect useless
189 values. */
fa49fd0f 190static int n_useless_values;
5847e8da
AO
191static int n_useless_debug_values;
192
193/* Count values whose locs have been taken exclusively from debug
194 insns for the entire life of the value. */
195static int n_debug_values;
fa49fd0f
RK
196
197/* Number of useless values before we remove them from the hash table. */
198#define MAX_USELESS_VALUES 32
199
60fa6660
AO
200/* This table maps from register number to values. It does not
201 contain pointers to cselib_val structures, but rather elt_lists.
202 The purpose is to be able to refer to the same register in
203 different modes. The first element of the list defines the mode in
204 which the register was set; if the mode is unknown or the value is
205 no longer valid in that mode, ELT will be NULL for the first
206 element. */
5211d65a
KH
207static struct elt_list **reg_values;
208static unsigned int reg_values_size;
6790d1ab 209#define REG_VALUES(i) reg_values[i]
fa49fd0f 210
31825e57 211/* The largest number of hard regs used by any entry added to the
eb232f4e 212 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
31825e57
DM
213static unsigned int max_value_regs;
214
fa49fd0f 215/* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
eb232f4e 216 in cselib_clear_table() for fast emptying. */
6790d1ab
JH
217static unsigned int *used_regs;
218static unsigned int n_used_regs;
fa49fd0f
RK
219
220/* We pass this to cselib_invalidate_mem to invalidate all of
221 memory for a non-const call instruction. */
e2500fed 222static GTY(()) rtx callmem;
fa49fd0f 223
fa49fd0f
RK
224/* Set by discard_useless_locs if it deleted the last location of any
225 value. */
226static int values_became_useless;
7101fb18
JH
227
228/* Used as stop element of the containing_mem list so we can check
229 presence in the list by checking the next pointer. */
230static cselib_val dummy_val;
231
457eeaae
JJ
232/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
233 that is constant through the whole function and should never be
234 eliminated. */
235static cselib_val *cfa_base_preserved_val;
5a9fbcf1 236static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
457eeaae 237
7080f735 238/* Used to list all values that contain memory reference.
7101fb18
JH
239 May or may not contain the useless values - the list is compacted
240 each time memory is invalidated. */
241static cselib_val *first_containing_mem = &dummy_val;
a78a26f1 242
fcb87c50
MM
243static object_allocator<elt_list> elt_list_pool ("elt_list");
244static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
245static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
a78a26f1 246
fcb87c50 247static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
6fb5fa3c
DB
248
249/* If nonnull, cselib will call this function before freeing useless
250 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
251void (*cselib_discard_hook) (cselib_val *);
b5b8b0ac
AO
252
253/* If nonnull, cselib will call this function before recording sets or
254 even clobbering outputs of INSN. All the recorded sets will be
255 represented in the array sets[n_sets]. new_val_min can be used to
256 tell whether values present in sets are introduced by this
257 instruction. */
46665961 258void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
b5b8b0ac
AO
259 int n_sets);
260
261#define PRESERVED_VALUE_P(RTX) \
c3284718 262 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
b5b8b0ac 263
0fe03ac3 264#define SP_BASED_VALUE_P(RTX) \
c3284718 265 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
0fe03ac3 266
fa49fd0f
RK
267\f
268
269/* Allocate a struct elt_list and fill in its two elements with the
270 arguments. */
271
6a59927d 272static inline struct elt_list *
7080f735 273new_elt_list (struct elt_list *next, cselib_val *elt)
fa49fd0f 274{
fb0b2914 275 elt_list *el = elt_list_pool.allocate ();
fa49fd0f
RK
276 el->next = next;
277 el->elt = elt;
278 return el;
279}
280
6f2ffb4b
AO
281/* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
282 list. */
fa49fd0f 283
6f2ffb4b
AO
284static inline void
285new_elt_loc_list (cselib_val *val, rtx loc)
fa49fd0f 286{
6f2ffb4b
AO
287 struct elt_loc_list *el, *next = val->locs;
288
289 gcc_checking_assert (!next || !next->setting_insn
290 || !DEBUG_INSN_P (next->setting_insn)
291 || cselib_current_insn == next->setting_insn);
5847e8da
AO
292
293 /* If we're creating the first loc in a debug insn context, we've
294 just created a debug value. Count it. */
295 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
296 n_debug_values++;
297
6f2ffb4b
AO
298 val = canonical_cselib_val (val);
299 next = val->locs;
300
301 if (GET_CODE (loc) == VALUE)
302 {
303 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
304
305 gcc_checking_assert (PRESERVED_VALUE_P (loc)
306 == PRESERVED_VALUE_P (val->val_rtx));
307
308 if (val->val_rtx == loc)
309 return;
310 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
311 {
312 /* Reverse the insertion. */
313 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
314 return;
315 }
316
317 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
318
319 if (CSELIB_VAL_PTR (loc)->locs)
320 {
321 /* Bring all locs from LOC to VAL. */
322 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
323 {
324 /* Adjust values that have LOC as canonical so that VAL
325 becomes their canonical. */
326 if (el->loc && GET_CODE (el->loc) == VALUE)
327 {
328 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
329 == loc);
330 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
331 }
332 }
333 el->next = val->locs;
334 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
faead9f7
AO
335 }
336
337 if (CSELIB_VAL_PTR (loc)->addr_list)
338 {
339 /* Bring in addr_list into canonical node. */
340 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
341 while (last->next)
342 last = last->next;
343 last->next = val->addr_list;
344 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
345 CSELIB_VAL_PTR (loc)->addr_list = NULL;
346 }
347
348 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
349 && val->next_containing_mem == NULL)
350 {
351 /* Add VAL to the containing_mem list after LOC. LOC will
352 be removed when we notice it doesn't contain any
353 MEMs. */
354 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
355 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
6f2ffb4b
AO
356 }
357
358 /* Chain LOC back to VAL. */
fb0b2914 359 el = elt_loc_list_pool.allocate ();
6f2ffb4b
AO
360 el->loc = val->val_rtx;
361 el->setting_insn = cselib_current_insn;
362 el->next = NULL;
363 CSELIB_VAL_PTR (loc)->locs = el;
364 }
365
fb0b2914 366 el = elt_loc_list_pool.allocate ();
6f2ffb4b
AO
367 el->loc = loc;
368 el->setting_insn = cselib_current_insn;
369 el->next = next;
370 val->locs = el;
fa49fd0f
RK
371}
372
5847e8da
AO
373/* Promote loc L to a nondebug cselib_current_insn if L is marked as
374 originating from a debug insn, maintaining the debug values
375 count. */
376
377static inline void
378promote_debug_loc (struct elt_loc_list *l)
379{
ce8fe26d 380 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
5847e8da
AO
381 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
382 {
383 n_debug_values--;
384 l->setting_insn = cselib_current_insn;
dc2a58da
JJ
385 if (cselib_preserve_constants && l->next)
386 {
387 gcc_assert (l->next->setting_insn
388 && DEBUG_INSN_P (l->next->setting_insn)
389 && !l->next->next);
390 l->next->setting_insn = cselib_current_insn;
391 }
392 else
393 gcc_assert (!l->next);
5847e8da
AO
394 }
395}
396
fa49fd0f
RK
397/* The elt_list at *PL is no longer needed. Unchain it and free its
398 storage. */
399
6a59927d 400static inline void
7080f735 401unchain_one_elt_list (struct elt_list **pl)
fa49fd0f
RK
402{
403 struct elt_list *l = *pl;
404
405 *pl = l->next;
fb0b2914 406 elt_list_pool.remove (l);
fa49fd0f
RK
407}
408
409/* Likewise for elt_loc_lists. */
410
411static void
7080f735 412unchain_one_elt_loc_list (struct elt_loc_list **pl)
fa49fd0f
RK
413{
414 struct elt_loc_list *l = *pl;
415
416 *pl = l->next;
fb0b2914 417 elt_loc_list_pool.remove (l);
fa49fd0f
RK
418}
419
420/* Likewise for cselib_vals. This also frees the addr_list associated with
421 V. */
422
423static void
7080f735 424unchain_one_value (cselib_val *v)
fa49fd0f
RK
425{
426 while (v->addr_list)
427 unchain_one_elt_list (&v->addr_list);
428
fb0b2914 429 cselib_val_pool.remove (v);
fa49fd0f
RK
430}
431
432/* Remove all entries from the hash table. Also used during
b5b8b0ac 433 initialization. */
fa49fd0f 434
eb232f4e
SB
435void
436cselib_clear_table (void)
b5b8b0ac 437{
5440c0e7 438 cselib_reset_table (1);
b5b8b0ac
AO
439}
440
0e224656
AO
441/* Return TRUE if V is a constant, a function invariant or a VALUE
442 equivalence; FALSE otherwise. */
457eeaae 443
0e224656
AO
444static bool
445invariant_or_equiv_p (cselib_val *v)
457eeaae 446{
6f2ffb4b 447 struct elt_loc_list *l;
457eeaae 448
0e224656
AO
449 if (v == cfa_base_preserved_val)
450 return true;
451
452 /* Keep VALUE equivalences around. */
453 for (l = v->locs; l; l = l->next)
454 if (GET_CODE (l->loc) == VALUE)
455 return true;
456
457eeaae
JJ
457 if (v->locs != NULL
458 && v->locs->next == NULL)
459 {
460 if (CONSTANT_P (v->locs->loc)
461 && (GET_CODE (v->locs->loc) != CONST
462 || !references_value_p (v->locs->loc, 0)))
0e224656 463 return true;
6f2ffb4b
AO
464 /* Although a debug expr may be bound to different expressions,
465 we can preserve it as if it was constant, to get unification
466 and proper merging within var-tracking. */
467 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
468 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
469 || GET_CODE (v->locs->loc) == ENTRY_VALUE
470 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
0e224656
AO
471 return true;
472
473 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
474 if (GET_CODE (v->locs->loc) == PLUS
475 && CONST_INT_P (XEXP (v->locs->loc, 1))
476 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
477 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
478 return true;
457eeaae 479 }
6f2ffb4b 480
0e224656
AO
481 return false;
482}
483
484/* Remove from hash table all VALUEs except constants, function
485 invariants and VALUE equivalences. */
486
4a8fb1a1
LC
487int
488preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
0e224656 489{
4a8fb1a1 490 cselib_val *v = *x;
457eeaae 491
0618dee5
AO
492 if (invariant_or_equiv_p (v))
493 {
67f58944 494 cselib_hasher::key lookup = {
f956adb9
RS
495 GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
496 };
0618dee5 497 cselib_val **slot
c203e8a7 498 = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
0618dee5
AO
499 v->hash, INSERT);
500 gcc_assert (!*slot);
501 *slot = v;
502 }
503
c203e8a7 504 cselib_hash_table->clear_slot (x);
0618dee5 505
457eeaae
JJ
506 return 1;
507}
508
b5b8b0ac
AO
509/* Remove all entries from the hash table, arranging for the next
510 value to be numbered NUM. */
511
512void
5440c0e7 513cselib_reset_table (unsigned int num)
fa49fd0f
RK
514{
515 unsigned int i;
516
31825e57
DM
517 max_value_regs = 0;
518
457eeaae
JJ
519 if (cfa_base_preserved_val)
520 {
9de9cbaf 521 unsigned int regno = cfa_base_preserved_regno;
457eeaae
JJ
522 unsigned int new_used_regs = 0;
523 for (i = 0; i < n_used_regs; i++)
524 if (used_regs[i] == regno)
525 {
526 new_used_regs = 1;
527 continue;
528 }
529 else
530 REG_VALUES (used_regs[i]) = 0;
531 gcc_assert (new_used_regs == 1);
532 n_used_regs = new_used_regs;
533 used_regs[0] = regno;
534 max_value_regs
ad474626
RS
535 = hard_regno_nregs (regno,
536 GET_MODE (cfa_base_preserved_val->locs->loc));
457eeaae
JJ
537 }
538 else
539 {
540 for (i = 0; i < n_used_regs; i++)
541 REG_VALUES (used_regs[i]) = 0;
542 n_used_regs = 0;
543 }
fa49fd0f 544
457eeaae 545 if (cselib_preserve_constants)
c203e8a7
TS
546 cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
547 (NULL);
457eeaae 548 else
0f68ba3e 549 {
c203e8a7 550 cselib_hash_table->empty ();
0f68ba3e
AO
551 gcc_checking_assert (!cselib_any_perm_equivs);
552 }
fa49fd0f 553
fa49fd0f 554 n_useless_values = 0;
5847e8da
AO
555 n_useless_debug_values = 0;
556 n_debug_values = 0;
fa49fd0f 557
5440c0e7 558 next_uid = num;
7101fb18
JH
559
560 first_containing_mem = &dummy_val;
fa49fd0f
RK
561}
562
b5b8b0ac
AO
563/* Return the number of the next value that will be generated. */
564
565unsigned int
5440c0e7 566cselib_get_next_uid (void)
b5b8b0ac 567{
5440c0e7 568 return next_uid;
b5b8b0ac
AO
569}
570
4deef538
AO
571/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
572 INSERTing if requested. When X is part of the address of a MEM,
f956adb9 573 MEMMODE should specify the mode of the MEM. */
4deef538 574
4a8fb1a1 575static cselib_val **
ef4bddc2
RS
576cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
577 enum insert_option insert, machine_mode memmode)
4deef538 578{
0618dee5 579 cselib_val **slot = NULL;
67f58944 580 cselib_hasher::key lookup = { mode, x, memmode };
0618dee5 581 if (cselib_preserve_constants)
c203e8a7
TS
582 slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
583 NO_INSERT);
0618dee5 584 if (!slot)
c203e8a7 585 slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
4deef538
AO
586 return slot;
587}
588
fa49fd0f
RK
589/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
590 only return true for values which point to a cselib_val whose value
591 element has been set to zero, which implies the cselib_val will be
592 removed. */
593
594int
4f588890 595references_value_p (const_rtx x, int only_useless)
fa49fd0f 596{
4f588890 597 const enum rtx_code code = GET_CODE (x);
fa49fd0f
RK
598 const char *fmt = GET_RTX_FORMAT (code);
599 int i, j;
600
601 if (GET_CODE (x) == VALUE
6f2ffb4b
AO
602 && (! only_useless ||
603 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
fa49fd0f
RK
604 return 1;
605
606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
607 {
608 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
609 return 1;
610 else if (fmt[i] == 'E')
611 for (j = 0; j < XVECLEN (x, i); j++)
612 if (references_value_p (XVECEXP (x, i, j), only_useless))
613 return 1;
614 }
615
616 return 0;
617}
618
619/* For all locations found in X, delete locations that reference useless
620 values (i.e. values without any location). Called through
621 htab_traverse. */
622
4a8fb1a1
LC
623int
624discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
fa49fd0f 625{
4a8fb1a1 626 cselib_val *v = *x;
fa49fd0f 627 struct elt_loc_list **p = &v->locs;
5847e8da 628 bool had_locs = v->locs != NULL;
21afc57d 629 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
fa49fd0f
RK
630
631 while (*p)
632 {
633 if (references_value_p ((*p)->loc, 1))
634 unchain_one_elt_loc_list (p);
635 else
636 p = &(*p)->next;
637 }
638
b5b8b0ac 639 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
fa49fd0f 640 {
5847e8da
AO
641 if (setting_insn && DEBUG_INSN_P (setting_insn))
642 n_useless_debug_values++;
643 else
644 n_useless_values++;
fa49fd0f
RK
645 values_became_useless = 1;
646 }
647 return 1;
648}
649
650/* If X is a value with no locations, remove it from the hashtable. */
651
4a8fb1a1
LC
652int
653discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
fa49fd0f 654{
4a8fb1a1 655 cselib_val *v = *x;
fa49fd0f 656
b5b8b0ac 657 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
fa49fd0f 658 {
6fb5fa3c
DB
659 if (cselib_discard_hook)
660 cselib_discard_hook (v);
661
757bbef8 662 CSELIB_VAL_PTR (v->val_rtx) = NULL;
c203e8a7 663 cselib_hash_table->clear_slot (x);
fa49fd0f
RK
664 unchain_one_value (v);
665 n_useless_values--;
666 }
667
668 return 1;
669}
670
671/* Clean out useless values (i.e. those which no longer have locations
672 associated with them) from the hash table. */
673
674static void
7080f735 675remove_useless_values (void)
fa49fd0f 676{
7101fb18 677 cselib_val **p, *v;
5847e8da 678
fa49fd0f
RK
679 /* First pass: eliminate locations that reference the value. That in
680 turn can make more values useless. */
681 do
682 {
683 values_became_useless = 0;
c203e8a7 684 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
fa49fd0f
RK
685 }
686 while (values_became_useless);
687
688 /* Second pass: actually remove the values. */
fa49fd0f 689
7101fb18
JH
690 p = &first_containing_mem;
691 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
faead9f7 692 if (v->locs && v == canonical_cselib_val (v))
7101fb18
JH
693 {
694 *p = v;
695 p = &(*p)->next_containing_mem;
696 }
697 *p = &dummy_val;
698
5847e8da
AO
699 n_useless_values += n_useless_debug_values;
700 n_debug_values -= n_useless_debug_values;
701 n_useless_debug_values = 0;
702
c203e8a7 703 cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
3e2a0bd2 704
341c100f 705 gcc_assert (!n_useless_values);
fa49fd0f
RK
706}
707
b5b8b0ac
AO
708/* Arrange for a value to not be removed from the hash table even if
709 it becomes useless. */
710
711void
712cselib_preserve_value (cselib_val *v)
713{
714 PRESERVED_VALUE_P (v->val_rtx) = 1;
715}
716
717/* Test whether a value is preserved. */
718
719bool
720cselib_preserved_value_p (cselib_val *v)
721{
722 return PRESERVED_VALUE_P (v->val_rtx);
723}
724
457eeaae
JJ
725/* Arrange for a REG value to be assumed constant through the whole function,
726 never invalidated and preserved across cselib_reset_table calls. */
727
728void
9de9cbaf 729cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
457eeaae
JJ
730{
731 if (cselib_preserve_constants
732 && v->locs
733 && REG_P (v->locs->loc))
9de9cbaf
JJ
734 {
735 cfa_base_preserved_val = v;
736 cfa_base_preserved_regno = regno;
737 }
457eeaae
JJ
738}
739
b5b8b0ac
AO
740/* Clean all non-constant expressions in the hash table, but retain
741 their values. */
742
743void
0de3e43f 744cselib_preserve_only_values (void)
b5b8b0ac
AO
745{
746 int i;
747
b5b8b0ac
AO
748 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
749 cselib_invalidate_regno (i, reg_raw_mode[i]);
750
751 cselib_invalidate_mem (callmem);
752
753 remove_useless_values ();
754
755 gcc_assert (first_containing_mem == &dummy_val);
756}
757
0fe03ac3
JJ
758/* Arrange for a value to be marked as based on stack pointer
759 for find_base_term purposes. */
760
761void
762cselib_set_value_sp_based (cselib_val *v)
763{
764 SP_BASED_VALUE_P (v->val_rtx) = 1;
765}
766
767/* Test whether a value is based on stack pointer for
768 find_base_term purposes. */
769
770bool
771cselib_sp_based_value_p (cselib_val *v)
772{
773 return SP_BASED_VALUE_P (v->val_rtx);
774}
775
60fa6660
AO
776/* Return the mode in which a register was last set. If X is not a
777 register, return its mode. If the mode in which the register was
778 set is not known, or the value was already clobbered, return
779 VOIDmode. */
780
ef4bddc2 781machine_mode
4f588890 782cselib_reg_set_mode (const_rtx x)
60fa6660 783{
f8cfc6aa 784 if (!REG_P (x))
60fa6660
AO
785 return GET_MODE (x);
786
787 if (REG_VALUES (REGNO (x)) == NULL
788 || REG_VALUES (REGNO (x))->elt == NULL)
789 return VOIDmode;
790
757bbef8 791 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
60fa6660
AO
792}
793
4deef538
AO
794/* If x is a PLUS or an autoinc operation, expand the operation,
795 storing the offset, if any, in *OFF. */
796
797static rtx
ef4bddc2 798autoinc_split (rtx x, rtx *off, machine_mode memmode)
4deef538
AO
799{
800 switch (GET_CODE (x))
801 {
802 case PLUS:
803 *off = XEXP (x, 1);
804 return XEXP (x, 0);
805
806 case PRE_DEC:
807 if (memmode == VOIDmode)
808 return x;
809
cf098191 810 *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
4deef538 811 return XEXP (x, 0);
4deef538
AO
812
813 case PRE_INC:
814 if (memmode == VOIDmode)
815 return x;
816
cf098191 817 *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
4deef538
AO
818 return XEXP (x, 0);
819
820 case PRE_MODIFY:
821 return XEXP (x, 1);
822
823 case POST_DEC:
824 case POST_INC:
825 case POST_MODIFY:
826 return XEXP (x, 0);
827
828 default:
829 return x;
830 }
831}
832
833/* Return nonzero if we can prove that X and Y contain the same value,
834 taking our gathered information into account. MEMMODE holds the
835 mode of the enclosing MEM, if any, as required to deal with autoinc
836 addressing modes. If X and Y are not (known to be) part of
837 addresses, MEMMODE should be VOIDmode. */
838
08e5cf22 839int
005f12bf 840rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
fa49fd0f
RK
841{
842 enum rtx_code code;
843 const char *fmt;
844 int i;
7080f735 845
f8cfc6aa 846 if (REG_P (x) || MEM_P (x))
fa49fd0f 847 {
4deef538 848 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
fa49fd0f
RK
849
850 if (e)
757bbef8 851 x = e->val_rtx;
fa49fd0f
RK
852 }
853
f8cfc6aa 854 if (REG_P (y) || MEM_P (y))
fa49fd0f 855 {
4deef538 856 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
fa49fd0f
RK
857
858 if (e)
757bbef8 859 y = e->val_rtx;
fa49fd0f
RK
860 }
861
862 if (x == y)
863 return 1;
864
fa49fd0f
RK
865 if (GET_CODE (x) == VALUE)
866 {
6f2ffb4b 867 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
fa49fd0f
RK
868 struct elt_loc_list *l;
869
6f2ffb4b
AO
870 if (GET_CODE (y) == VALUE)
871 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
872
005f12bf
JJ
873 if (depth == 128)
874 return 0;
875
fa49fd0f
RK
876 for (l = e->locs; l; l = l->next)
877 {
878 rtx t = l->loc;
879
6f2ffb4b
AO
880 /* Avoid infinite recursion. We know we have the canonical
881 value, so we can just skip any values in the equivalence
882 list. */
883 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
fa49fd0f 884 continue;
005f12bf 885 else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
fa49fd0f
RK
886 return 1;
887 }
7080f735 888
fa49fd0f
RK
889 return 0;
890 }
6f2ffb4b 891 else if (GET_CODE (y) == VALUE)
fa49fd0f 892 {
6f2ffb4b 893 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
fa49fd0f
RK
894 struct elt_loc_list *l;
895
005f12bf
JJ
896 if (depth == 128)
897 return 0;
898
fa49fd0f
RK
899 for (l = e->locs; l; l = l->next)
900 {
901 rtx t = l->loc;
902
6f2ffb4b 903 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
fa49fd0f 904 continue;
005f12bf 905 else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
fa49fd0f
RK
906 return 1;
907 }
7080f735 908
fa49fd0f
RK
909 return 0;
910 }
911
4deef538 912 if (GET_MODE (x) != GET_MODE (y))
fa49fd0f
RK
913 return 0;
914
4deef538
AO
915 if (GET_CODE (x) != GET_CODE (y))
916 {
917 rtx xorig = x, yorig = y;
918 rtx xoff = NULL, yoff = NULL;
919
920 x = autoinc_split (x, &xoff, memmode);
921 y = autoinc_split (y, &yoff, memmode);
922
923 if (!xoff != !yoff)
924 return 0;
925
005f12bf 926 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
4deef538
AO
927 return 0;
928
929 /* Don't recurse if nothing changed. */
930 if (x != xorig || y != yorig)
005f12bf 931 return rtx_equal_for_cselib_1 (x, y, memmode, depth);
4deef538
AO
932
933 return 0;
934 }
935
37cf6116
RH
936 /* These won't be handled correctly by the code below. */
937 switch (GET_CODE (x))
938 {
807e902e 939 CASE_CONST_UNIQUE:
0ca5af51 940 case DEBUG_EXPR:
37cf6116
RH
941 return 0;
942
c8a27c40
JJ
943 case DEBUG_IMPLICIT_PTR:
944 return DEBUG_IMPLICIT_PTR_DECL (x)
945 == DEBUG_IMPLICIT_PTR_DECL (y);
946
ddb555ed
JJ
947 case DEBUG_PARAMETER_REF:
948 return DEBUG_PARAMETER_REF_DECL (x)
949 == DEBUG_PARAMETER_REF_DECL (y);
950
a58a8e4b 951 case ENTRY_VALUE:
2b80199f
JJ
952 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
953 use rtx_equal_for_cselib_1 to compare the operands. */
954 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
a58a8e4b 955
37cf6116 956 case LABEL_REF:
04a121a7 957 return label_ref_label (x) == label_ref_label (y);
37cf6116 958
9fccb335
RS
959 case REG:
960 return REGNO (x) == REGNO (y);
961
4deef538
AO
962 case MEM:
963 /* We have to compare any autoinc operations in the addresses
964 using this MEM's mode. */
005f12bf
JJ
965 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
966 depth);
4deef538 967
37cf6116
RH
968 default:
969 break;
970 }
7080f735 971
fa49fd0f
RK
972 code = GET_CODE (x);
973 fmt = GET_RTX_FORMAT (code);
974
975 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
976 {
977 int j;
978
979 switch (fmt[i])
980 {
981 case 'w':
982 if (XWINT (x, i) != XWINT (y, i))
983 return 0;
984 break;
985
986 case 'n':
987 case 'i':
988 if (XINT (x, i) != XINT (y, i))
989 return 0;
990 break;
991
91914e56
RS
992 case 'p':
993 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
994 return 0;
995 break;
996
fa49fd0f
RK
997 case 'V':
998 case 'E':
999 /* Two vectors must have the same length. */
1000 if (XVECLEN (x, i) != XVECLEN (y, i))
1001 return 0;
1002
1003 /* And the corresponding elements must match. */
1004 for (j = 0; j < XVECLEN (x, i); j++)
4deef538 1005 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
005f12bf 1006 XVECEXP (y, i, j), memmode, depth))
fa49fd0f
RK
1007 return 0;
1008 break;
1009
1010 case 'e':
29c1846b
R
1011 if (i == 1
1012 && targetm.commutative_p (x, UNKNOWN)
005f12bf
JJ
1013 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1014 depth)
1015 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1016 depth))
29c1846b 1017 return 1;
005f12bf
JJ
1018 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1019 depth))
fa49fd0f
RK
1020 return 0;
1021 break;
1022
1023 case 'S':
1024 case 's':
1025 if (strcmp (XSTR (x, i), XSTR (y, i)))
1026 return 0;
1027 break;
1028
1029 case 'u':
1030 /* These are just backpointers, so they don't matter. */
1031 break;
1032
1033 case '0':
1034 case 't':
1035 break;
1036
1037 /* It is believed that rtx's at this level will never
1038 contain anything but integers and other rtx's,
1039 except for within LABEL_REFs and SYMBOL_REFs. */
1040 default:
341c100f 1041 gcc_unreachable ();
fa49fd0f
RK
1042 }
1043 }
1044 return 1;
1045}
1046
fa49fd0f
RK
1047/* Hash an rtx. Return 0 if we couldn't hash the rtx.
1048 For registers and memory locations, we look up their cselib_val structure
1049 and return its VALUE element.
1050 Possible reasons for return 0 are: the object is volatile, or we couldn't
1051 find a register or memory location in the table and CREATE is zero. If
1052 CREATE is nonzero, table elts are created for regs and mem.
29c1846b
R
1053 N.B. this hash function returns the same hash value for RTXes that
1054 differ only in the order of operands, thus it is suitable for comparisons
1055 that take commutativity into account.
1056 If we wanted to also support associative rules, we'd have to use a different
1057 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
4deef538
AO
1058 MEMMODE indicates the mode of an enclosing MEM, and it's only
1059 used to compute autoinc values.
29c1846b
R
1060 We used to have a MODE argument for hashing for CONST_INTs, but that
1061 didn't make sense, since it caused spurious hash differences between
1062 (set (reg:SI 1) (const_int))
1063 (plus:SI (reg:SI 2) (reg:SI 1))
1064 and
1065 (plus:SI (reg:SI 2) (const_int))
1066 If the mode is important in any context, it must be checked specifically
1067 in a comparison anyway, since relying on hash differences is unsafe. */
fa49fd0f
RK
1068
1069static unsigned int
ef4bddc2 1070cselib_hash_rtx (rtx x, int create, machine_mode memmode)
fa49fd0f
RK
1071{
1072 cselib_val *e;
cf098191 1073 poly_int64 offset;
fa49fd0f
RK
1074 int i, j;
1075 enum rtx_code code;
1076 const char *fmt;
1077 unsigned int hash = 0;
1078
fa49fd0f
RK
1079 code = GET_CODE (x);
1080 hash += (unsigned) code + (unsigned) GET_MODE (x);
1081
1082 switch (code)
1083 {
7483eef8
AO
1084 case VALUE:
1085 e = CSELIB_VAL_PTR (x);
1086 return e->hash;
1087
fa49fd0f
RK
1088 case MEM:
1089 case REG:
4deef538 1090 e = cselib_lookup (x, GET_MODE (x), create, memmode);
fa49fd0f
RK
1091 if (! e)
1092 return 0;
1093
5440c0e7 1094 return e->hash;
fa49fd0f 1095
0ca5af51 1096 case DEBUG_EXPR:
e4fb38bd
JJ
1097 hash += ((unsigned) DEBUG_EXPR << 7)
1098 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
0ca5af51
AO
1099 return hash ? hash : (unsigned int) DEBUG_EXPR;
1100
c8a27c40
JJ
1101 case DEBUG_IMPLICIT_PTR:
1102 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1103 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1104 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1105
ddb555ed
JJ
1106 case DEBUG_PARAMETER_REF:
1107 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1108 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1109 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1110
a58a8e4b 1111 case ENTRY_VALUE:
2b80199f
JJ
1112 /* ENTRY_VALUEs are function invariant, thus try to avoid
1113 recursing on argument if ENTRY_VALUE is one of the
1114 forms emitted by expand_debug_expr, otherwise
1115 ENTRY_VALUE hash would depend on the current value
1116 in some register or memory. */
1117 if (REG_P (ENTRY_VALUE_EXP (x)))
1118 hash += (unsigned int) REG
1119 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1120 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1121 else if (MEM_P (ENTRY_VALUE_EXP (x))
1122 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1123 hash += (unsigned int) MEM
1124 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1125 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1126 else
1127 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
a58a8e4b
JJ
1128 return hash ? hash : (unsigned int) ENTRY_VALUE;
1129
fa49fd0f 1130 case CONST_INT:
a8acccdd 1131 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
dc76f41c 1132 return hash ? hash : (unsigned int) CONST_INT;
fa49fd0f 1133
807e902e
KZ
1134 case CONST_WIDE_INT:
1135 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1136 hash += CONST_WIDE_INT_ELT (x, i);
1137 return hash;
1138
0c12fc9b
RS
1139 case CONST_POLY_INT:
1140 {
1141 inchash::hash h;
1142 h.add_int (hash);
1143 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1144 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1145 return h.end ();
1146 }
1147
fa49fd0f
RK
1148 case CONST_DOUBLE:
1149 /* This is like the general case, except that it only counts
1150 the integers representing the constant. */
1151 hash += (unsigned) code + (unsigned) GET_MODE (x);
807e902e 1152 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
fa49fd0f
RK
1153 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1154 + (unsigned) CONST_DOUBLE_HIGH (x));
807e902e
KZ
1155 else
1156 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
dc76f41c 1157 return hash ? hash : (unsigned int) CONST_DOUBLE;
fa49fd0f 1158
091a3ac7
CF
1159 case CONST_FIXED:
1160 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1161 hash += fixed_hash (CONST_FIXED_VALUE (x));
1162 return hash ? hash : (unsigned int) CONST_FIXED;
1163
69ef87e2
AH
1164 case CONST_VECTOR:
1165 {
1166 int units;
1167 rtx elt;
1168
16c78b66 1169 units = const_vector_encoded_nelts (x);
69ef87e2
AH
1170
1171 for (i = 0; i < units; ++i)
1172 {
16c78b66 1173 elt = CONST_VECTOR_ENCODED_ELT (x, i);
4deef538 1174 hash += cselib_hash_rtx (elt, 0, memmode);
69ef87e2
AH
1175 }
1176
1177 return hash;
1178 }
1179
fa49fd0f
RK
1180 /* Assume there is only one rtx object for any given label. */
1181 case LABEL_REF:
4c6669c2
RS
1182 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1183 differences and differences between each stage's debugging dumps. */
1184 hash += (((unsigned int) LABEL_REF << 7)
04a121a7 1185 + CODE_LABEL_NUMBER (label_ref_label (x)));
dc76f41c 1186 return hash ? hash : (unsigned int) LABEL_REF;
fa49fd0f
RK
1187
1188 case SYMBOL_REF:
4c6669c2
RS
1189 {
1190 /* Don't hash on the symbol's address to avoid bootstrap differences.
1191 Different hash values may cause expressions to be recorded in
1192 different orders and thus different registers to be used in the
1193 final assembler. This also avoids differences in the dump files
1194 between various stages. */
1195 unsigned int h = 0;
1196 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1197
1198 while (*p)
1199 h += (h << 7) + *p++; /* ??? revisit */
1200
1201 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1202 return hash ? hash : (unsigned int) SYMBOL_REF;
1203 }
fa49fd0f
RK
1204
1205 case PRE_DEC:
1206 case PRE_INC:
4deef538
AO
1207 /* We can't compute these without knowing the MEM mode. */
1208 gcc_assert (memmode != VOIDmode);
cf098191 1209 offset = GET_MODE_SIZE (memmode);
4deef538 1210 if (code == PRE_DEC)
cf098191 1211 offset = -offset;
4deef538
AO
1212 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1213 like (mem:MEMMODE (plus (reg) (const_int I))). */
1214 hash += (unsigned) PLUS - (unsigned)code
1215 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
cf098191
RS
1216 + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1217 create, memmode);
4deef538
AO
1218 return hash ? hash : 1 + (unsigned) PLUS;
1219
1220 case PRE_MODIFY:
1221 gcc_assert (memmode != VOIDmode);
1222 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1223
fa49fd0f
RK
1224 case POST_DEC:
1225 case POST_INC:
1226 case POST_MODIFY:
4deef538
AO
1227 gcc_assert (memmode != VOIDmode);
1228 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1229
fa49fd0f
RK
1230 case PC:
1231 case CC0:
1232 case CALL:
1233 case UNSPEC_VOLATILE:
1234 return 0;
1235
1236 case ASM_OPERANDS:
1237 if (MEM_VOLATILE_P (x))
1238 return 0;
1239
1240 break;
7080f735 1241
fa49fd0f
RK
1242 default:
1243 break;
1244 }
1245
1246 i = GET_RTX_LENGTH (code) - 1;
1247 fmt = GET_RTX_FORMAT (code);
1248 for (; i >= 0; i--)
1249 {
341c100f 1250 switch (fmt[i])
fa49fd0f 1251 {
341c100f 1252 case 'e':
fa49fd0f 1253 {
341c100f 1254 rtx tem = XEXP (x, i);
4deef538 1255 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
b8698a0f 1256
fa49fd0f
RK
1257 if (tem_hash == 0)
1258 return 0;
b8698a0f 1259
fa49fd0f
RK
1260 hash += tem_hash;
1261 }
341c100f
NS
1262 break;
1263 case 'E':
1264 for (j = 0; j < XVECLEN (x, i); j++)
1265 {
1266 unsigned int tem_hash
4deef538 1267 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
b8698a0f 1268
341c100f
NS
1269 if (tem_hash == 0)
1270 return 0;
b8698a0f 1271
341c100f
NS
1272 hash += tem_hash;
1273 }
1274 break;
fa49fd0f 1275
341c100f
NS
1276 case 's':
1277 {
1278 const unsigned char *p = (const unsigned char *) XSTR (x, i);
b8698a0f 1279
341c100f
NS
1280 if (p)
1281 while (*p)
1282 hash += *p++;
1283 break;
1284 }
b8698a0f 1285
341c100f
NS
1286 case 'i':
1287 hash += XINT (x, i);
1288 break;
1289
91914e56
RS
1290 case 'p':
1291 hash += constant_lower_bound (SUBREG_BYTE (x));
1292 break;
1293
341c100f
NS
1294 case '0':
1295 case 't':
1296 /* unused */
1297 break;
b8698a0f 1298
341c100f
NS
1299 default:
1300 gcc_unreachable ();
fa49fd0f 1301 }
fa49fd0f
RK
1302 }
1303
dc76f41c 1304 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
fa49fd0f
RK
1305}
1306
1307/* Create a new value structure for VALUE and initialize it. The mode of the
1308 value is MODE. */
1309
6a59927d 1310static inline cselib_val *
ef4bddc2 1311new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
fa49fd0f 1312{
fb0b2914 1313 cselib_val *e = cselib_val_pool.allocate ();
fa49fd0f 1314
5440c0e7
AO
1315 gcc_assert (hash);
1316 gcc_assert (next_uid);
fa49fd0f 1317
5440c0e7
AO
1318 e->hash = hash;
1319 e->uid = next_uid++;
d67fb775
SB
1320 /* We use an alloc pool to allocate this RTL construct because it
1321 accounts for about 8% of the overall memory usage. We know
1322 precisely when we can have VALUE RTXen (when cselib is active)
daa956d0 1323 so we don't need to put them in garbage collected memory.
d67fb775 1324 ??? Why should a VALUE be an RTX in the first place? */
fb0b2914 1325 e->val_rtx = (rtx_def*) value_pool.allocate ();
757bbef8
SB
1326 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1327 PUT_CODE (e->val_rtx, VALUE);
1328 PUT_MODE (e->val_rtx, mode);
1329 CSELIB_VAL_PTR (e->val_rtx) = e;
fa49fd0f
RK
1330 e->addr_list = 0;
1331 e->locs = 0;
7101fb18 1332 e->next_containing_mem = 0;
b5b8b0ac 1333
4a3c9687 1334 if (dump_file && (dump_flags & TDF_CSELIB))
b5b8b0ac 1335 {
5440c0e7 1336 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
b5b8b0ac
AO
1337 if (flag_dump_noaddr || flag_dump_unnumbered)
1338 fputs ("# ", dump_file);
1339 else
1340 fprintf (dump_file, "%p ", (void*)e);
1341 print_rtl_single (dump_file, x);
1342 fputc ('\n', dump_file);
1343 }
1344
fa49fd0f
RK
1345 return e;
1346}
1347
1348/* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1349 contains the data at this address. X is a MEM that represents the
1350 value. Update the two value structures to represent this situation. */
1351
1352static void
7080f735 1353add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
fa49fd0f 1354{
faead9f7 1355 addr_elt = canonical_cselib_val (addr_elt);
a4f436ff
JJ
1356 mem_elt = canonical_cselib_val (mem_elt);
1357
fa49fd0f 1358 /* Avoid duplicates. */
bd68a3a7
RH
1359 addr_space_t as = MEM_ADDR_SPACE (x);
1360 for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
3c0cb5de 1361 if (MEM_P (l->loc)
bd68a3a7
RH
1362 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1363 && MEM_ADDR_SPACE (l->loc) == as)
5847e8da
AO
1364 {
1365 promote_debug_loc (l);
1366 return;
1367 }
fa49fd0f 1368
fa49fd0f 1369 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
6f2ffb4b
AO
1370 new_elt_loc_list (mem_elt,
1371 replace_equiv_address_nv (x, addr_elt->val_rtx));
7101fb18
JH
1372 if (mem_elt->next_containing_mem == NULL)
1373 {
1374 mem_elt->next_containing_mem = first_containing_mem;
1375 first_containing_mem = mem_elt;
1376 }
fa49fd0f
RK
1377}
1378
1379/* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1380 If CREATE, make a new one if we haven't seen it before. */
1381
1382static cselib_val *
7080f735 1383cselib_lookup_mem (rtx x, int create)
fa49fd0f 1384{
ef4bddc2
RS
1385 machine_mode mode = GET_MODE (x);
1386 machine_mode addr_mode;
4a8fb1a1 1387 cselib_val **slot;
fa49fd0f
RK
1388 cselib_val *addr;
1389 cselib_val *mem_elt;
fa49fd0f
RK
1390
1391 if (MEM_VOLATILE_P (x) || mode == BLKmode
463301c3 1392 || !cselib_record_memory
fa49fd0f
RK
1393 || (FLOAT_MODE_P (mode) && flag_float_store))
1394 return 0;
1395
4deef538
AO
1396 addr_mode = GET_MODE (XEXP (x, 0));
1397 if (addr_mode == VOIDmode)
1398 addr_mode = Pmode;
1399
fa49fd0f 1400 /* Look up the value for the address. */
4deef538 1401 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
fa49fd0f
RK
1402 if (! addr)
1403 return 0;
faead9f7 1404 addr = canonical_cselib_val (addr);
bd68a3a7 1405
fa49fd0f 1406 /* Find a value that describes a value of our mode at that address. */
bd68a3a7
RH
1407 addr_space_t as = MEM_ADDR_SPACE (x);
1408 for (elt_list *l = addr->addr_list; l; l = l->next)
757bbef8 1409 if (GET_MODE (l->elt->val_rtx) == mode)
5847e8da 1410 {
bd68a3a7
RH
1411 for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1412 if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1413 {
1414 promote_debug_loc (l->elt->locs);
1415 return l->elt;
1416 }
5847e8da 1417 }
fa49fd0f
RK
1418
1419 if (! create)
1420 return 0;
1421
5440c0e7 1422 mem_elt = new_cselib_val (next_uid, mode, x);
fa49fd0f 1423 add_mem_for_addr (addr, mem_elt, x);
f956adb9 1424 slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
fa49fd0f
RK
1425 *slot = mem_elt;
1426 return mem_elt;
1427}
1428
073a8998 1429/* Search through the possible substitutions in P. We prefer a non reg
6fb5fa3c
DB
1430 substitution because this allows us to expand the tree further. If
1431 we find, just a reg, take the lowest regno. There may be several
1432 non-reg results, we just take the first one because they will all
1433 expand to the same place. */
1434
b8698a0f 1435static rtx
b5b8b0ac
AO
1436expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1437 int max_depth)
6fb5fa3c
DB
1438{
1439 rtx reg_result = NULL;
1440 unsigned int regno = UINT_MAX;
1441 struct elt_loc_list *p_in = p;
1442
67b977ad 1443 for (; p; p = p->next)
6fb5fa3c 1444 {
67b977ad
JJ
1445 /* Return these right away to avoid returning stack pointer based
1446 expressions for frame pointer and vice versa, which is something
1447 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1448 for more details. */
1449 if (REG_P (p->loc)
1450 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1451 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1452 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1453 || REGNO (p->loc) == cfa_base_preserved_regno))
1454 return p->loc;
6fb5fa3c
DB
1455 /* Avoid infinite recursion trying to expand a reg into a
1456 the same reg. */
b8698a0f
L
1457 if ((REG_P (p->loc))
1458 && (REGNO (p->loc) < regno)
b5b8b0ac 1459 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
6fb5fa3c
DB
1460 {
1461 reg_result = p->loc;
1462 regno = REGNO (p->loc);
1463 }
1464 /* Avoid infinite recursion and do not try to expand the
1465 value. */
b8698a0f 1466 else if (GET_CODE (p->loc) == VALUE
6fb5fa3c
DB
1467 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1468 continue;
1469 else if (!REG_P (p->loc))
1470 {
8dd5516b 1471 rtx result, note;
4a3c9687 1472 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1473 {
1474 print_inline_rtx (dump_file, p->loc, 0);
1475 fprintf (dump_file, "\n");
1476 }
8dd5516b
JJ
1477 if (GET_CODE (p->loc) == LO_SUM
1478 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1479 && p->setting_insn
1480 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1481 && XEXP (note, 0) == XEXP (p->loc, 1))
1482 return XEXP (p->loc, 1);
b5b8b0ac 1483 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
6fb5fa3c
DB
1484 if (result)
1485 return result;
1486 }
b8698a0f 1487
6fb5fa3c 1488 }
b8698a0f 1489
6fb5fa3c
DB
1490 if (regno != UINT_MAX)
1491 {
1492 rtx result;
4a3c9687 1493 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1494 fprintf (dump_file, "r%d\n", regno);
1495
b5b8b0ac 1496 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
6fb5fa3c
DB
1497 if (result)
1498 return result;
1499 }
1500
4a3c9687 1501 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1502 {
1503 if (reg_result)
1504 {
1505 print_inline_rtx (dump_file, reg_result, 0);
1506 fprintf (dump_file, "\n");
1507 }
b8698a0f 1508 else
6fb5fa3c
DB
1509 fprintf (dump_file, "NULL\n");
1510 }
1511 return reg_result;
1512}
1513
1514
1515/* Forward substitute and expand an expression out to its roots.
1516 This is the opposite of common subexpression. Because local value
1517 numbering is such a weak optimization, the expanded expression is
1518 pretty much unique (not from a pointer equals point of view but
b8698a0f 1519 from a tree shape point of view.
6fb5fa3c
DB
1520
1521 This function returns NULL if the expansion fails. The expansion
1522 will fail if there is no value number for one of the operands or if
1523 one of the operands has been overwritten between the current insn
1524 and the beginning of the basic block. For instance x has no
1525 expansion in:
1526
1527 r1 <- r1 + 3
1528 x <- r1 + 8
1529
1530 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1531 It is clear on return. */
1532
1533rtx
1534cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
b5b8b0ac
AO
1535{
1536 struct expand_value_data evd;
1537
1538 evd.regs_active = regs_active;
1539 evd.callback = NULL;
1540 evd.callback_arg = NULL;
864ddef7 1541 evd.dummy = false;
b5b8b0ac
AO
1542
1543 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1544}
1545
1546/* Same as cselib_expand_value_rtx, but using a callback to try to
0b7e34d7
AO
1547 resolve some expressions. The CB function should return ORIG if it
1548 can't or does not want to deal with a certain RTX. Any other
1549 return value, including NULL, will be used as the expansion for
1550 VALUE, without any further changes. */
b5b8b0ac
AO
1551
1552rtx
1553cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1554 cselib_expand_callback cb, void *data)
1555{
1556 struct expand_value_data evd;
1557
1558 evd.regs_active = regs_active;
1559 evd.callback = cb;
1560 evd.callback_arg = data;
864ddef7 1561 evd.dummy = false;
b5b8b0ac
AO
1562
1563 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1564}
1565
864ddef7
JJ
1566/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1567 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1568 would return NULL or non-NULL, without allocating new rtx. */
1569
1570bool
1571cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1572 cselib_expand_callback cb, void *data)
1573{
1574 struct expand_value_data evd;
1575
1576 evd.regs_active = regs_active;
1577 evd.callback = cb;
1578 evd.callback_arg = data;
1579 evd.dummy = true;
1580
1581 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1582}
1583
0b7e34d7
AO
1584/* Internal implementation of cselib_expand_value_rtx and
1585 cselib_expand_value_rtx_cb. */
1586
b5b8b0ac
AO
1587static rtx
1588cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1589 int max_depth)
6fb5fa3c
DB
1590{
1591 rtx copy, scopy;
1592 int i, j;
1593 RTX_CODE code;
1594 const char *format_ptr;
ef4bddc2 1595 machine_mode mode;
6fb5fa3c
DB
1596
1597 code = GET_CODE (orig);
1598
1599 /* For the context of dse, if we end up expand into a huge tree, we
1600 will not have a useful address, so we might as well just give up
1601 quickly. */
1602 if (max_depth <= 0)
1603 return NULL;
1604
1605 switch (code)
1606 {
1607 case REG:
1608 {
1609 struct elt_list *l = REG_VALUES (REGNO (orig));
1610
1611 if (l && l->elt == NULL)
1612 l = l->next;
1613 for (; l; l = l->next)
1614 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1615 {
1616 rtx result;
5a9fbcf1 1617 unsigned regno = REGNO (orig);
b8698a0f 1618
6fb5fa3c 1619 /* The only thing that we are not willing to do (this
6ed3da00 1620 is requirement of dse and if others potential uses
6fb5fa3c
DB
1621 need this function we should add a parm to control
1622 it) is that we will not substitute the
1623 STACK_POINTER_REGNUM, FRAME_POINTER or the
1624 HARD_FRAME_POINTER.
1625
cea618ac 1626 These expansions confuses the code that notices that
6fb5fa3c
DB
1627 stores into the frame go dead at the end of the
1628 function and that the frame is not effected by calls
1629 to subroutines. If you allow the
1630 STACK_POINTER_REGNUM substitution, then dse will
1631 think that parameter pushing also goes dead which is
1632 wrong. If you allow the FRAME_POINTER or the
1633 HARD_FRAME_POINTER then you lose the opportunity to
1634 make the frame assumptions. */
1635 if (regno == STACK_POINTER_REGNUM
1636 || regno == FRAME_POINTER_REGNUM
5a9fbcf1
AO
1637 || regno == HARD_FRAME_POINTER_REGNUM
1638 || regno == cfa_base_preserved_regno)
6fb5fa3c
DB
1639 return orig;
1640
b5b8b0ac 1641 bitmap_set_bit (evd->regs_active, regno);
6fb5fa3c 1642
4a3c9687 1643 if (dump_file && (dump_flags & TDF_CSELIB))
6fb5fa3c
DB
1644 fprintf (dump_file, "expanding: r%d into: ", regno);
1645
b5b8b0ac
AO
1646 result = expand_loc (l->elt->locs, evd, max_depth);
1647 bitmap_clear_bit (evd->regs_active, regno);
6fb5fa3c
DB
1648
1649 if (result)
1650 return result;
b8698a0f 1651 else
6fb5fa3c
DB
1652 return orig;
1653 }
f0bc3323 1654 return orig;
6fb5fa3c 1655 }
b8698a0f 1656
d8116890 1657 CASE_CONST_ANY:
6fb5fa3c
DB
1658 case SYMBOL_REF:
1659 case CODE_LABEL:
1660 case PC:
1661 case CC0:
1662 case SCRATCH:
1663 /* SCRATCH must be shared because they represent distinct values. */
1664 return orig;
1665 case CLOBBER:
99788e06 1666 case CLOBBER_HIGH:
6fb5fa3c
DB
1667 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1668 return orig;
1669 break;
1670
1671 case CONST:
1672 if (shared_const_p (orig))
1673 return orig;
1674 break;
1675
8dd5516b 1676 case SUBREG:
6fb5fa3c 1677 {
0b7e34d7
AO
1678 rtx subreg;
1679
1680 if (evd->callback)
1681 {
1682 subreg = evd->callback (orig, evd->regs_active, max_depth,
1683 evd->callback_arg);
1684 if (subreg != orig)
1685 return subreg;
1686 }
1687
1688 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1689 max_depth - 1);
8dd5516b
JJ
1690 if (!subreg)
1691 return NULL;
1692 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1693 GET_MODE (SUBREG_REG (orig)),
1694 SUBREG_BYTE (orig));
0b7e34d7
AO
1695 if (scopy == NULL
1696 || (GET_CODE (scopy) == SUBREG
1697 && !REG_P (SUBREG_REG (scopy))
1698 && !MEM_P (SUBREG_REG (scopy))))
1699 return NULL;
1700
8dd5516b 1701 return scopy;
6fb5fa3c 1702 }
8dd5516b
JJ
1703
1704 case VALUE:
b5b8b0ac
AO
1705 {
1706 rtx result;
0b7e34d7 1707
4a3c9687 1708 if (dump_file && (dump_flags & TDF_CSELIB))
b5b8b0ac
AO
1709 {
1710 fputs ("\nexpanding ", dump_file);
1711 print_rtl_single (dump_file, orig);
1712 fputs (" into...", dump_file);
1713 }
8dd5516b 1714
0b7e34d7 1715 if (evd->callback)
b5b8b0ac
AO
1716 {
1717 result = evd->callback (orig, evd->regs_active, max_depth,
1718 evd->callback_arg);
0b7e34d7
AO
1719
1720 if (result != orig)
1721 return result;
b5b8b0ac 1722 }
8dd5516b 1723
0b7e34d7 1724 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
b5b8b0ac
AO
1725 return result;
1726 }
0ca5af51
AO
1727
1728 case DEBUG_EXPR:
1729 if (evd->callback)
1730 return evd->callback (orig, evd->regs_active, max_depth,
1731 evd->callback_arg);
1732 return orig;
1733
6fb5fa3c
DB
1734 default:
1735 break;
1736 }
1737
1738 /* Copy the various flags, fields, and other information. We assume
1739 that all fields need copying, and then clear the fields that should
1740 not be copied. That is the sensible default behavior, and forces
1741 us to explicitly document why we are *not* copying a flag. */
864ddef7
JJ
1742 if (evd->dummy)
1743 copy = NULL;
1744 else
1745 copy = shallow_copy_rtx (orig);
6fb5fa3c 1746
8dd5516b 1747 format_ptr = GET_RTX_FORMAT (code);
6fb5fa3c 1748
8dd5516b 1749 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6fb5fa3c
DB
1750 switch (*format_ptr++)
1751 {
1752 case 'e':
1753 if (XEXP (orig, i) != NULL)
1754 {
b5b8b0ac
AO
1755 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1756 max_depth - 1);
6fb5fa3c
DB
1757 if (!result)
1758 return NULL;
864ddef7
JJ
1759 if (copy)
1760 XEXP (copy, i) = result;
6fb5fa3c
DB
1761 }
1762 break;
1763
1764 case 'E':
1765 case 'V':
1766 if (XVEC (orig, i) != NULL)
1767 {
864ddef7
JJ
1768 if (copy)
1769 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1770 for (j = 0; j < XVECLEN (orig, i); j++)
6fb5fa3c 1771 {
b5b8b0ac
AO
1772 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1773 evd, max_depth - 1);
6fb5fa3c
DB
1774 if (!result)
1775 return NULL;
864ddef7
JJ
1776 if (copy)
1777 XVECEXP (copy, i, j) = result;
6fb5fa3c
DB
1778 }
1779 }
1780 break;
1781
1782 case 't':
1783 case 'w':
1784 case 'i':
1785 case 's':
1786 case 'S':
1787 case 'T':
1788 case 'u':
1789 case 'B':
1790 case '0':
1791 /* These are left unchanged. */
1792 break;
1793
1794 default:
1795 gcc_unreachable ();
1796 }
1797
864ddef7
JJ
1798 if (evd->dummy)
1799 return orig;
1800
8dd5516b
JJ
1801 mode = GET_MODE (copy);
1802 /* If an operand has been simplified into CONST_INT, which doesn't
1803 have a mode and the mode isn't derivable from whole rtx's mode,
1804 try simplify_*_operation first with mode from original's operand
1805 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1806 scopy = copy;
1807 switch (GET_RTX_CLASS (code))
1808 {
1809 case RTX_UNARY:
1810 if (CONST_INT_P (XEXP (copy, 0))
1811 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1812 {
1813 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1814 GET_MODE (XEXP (orig, 0)));
1815 if (scopy)
1816 return scopy;
1817 }
1818 break;
1819 case RTX_COMM_ARITH:
1820 case RTX_BIN_ARITH:
1821 /* These expressions can derive operand modes from the whole rtx's mode. */
1822 break;
1823 case RTX_TERNARY:
1824 case RTX_BITFIELD_OPS:
1825 if (CONST_INT_P (XEXP (copy, 0))
1826 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1827 {
1828 scopy = simplify_ternary_operation (code, mode,
1829 GET_MODE (XEXP (orig, 0)),
1830 XEXP (copy, 0), XEXP (copy, 1),
1831 XEXP (copy, 2));
1832 if (scopy)
1833 return scopy;
1834 }
1835 break;
1836 case RTX_COMPARE:
1837 case RTX_COMM_COMPARE:
1838 if (CONST_INT_P (XEXP (copy, 0))
1839 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1840 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1841 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1842 {
1843 scopy = simplify_relational_operation (code, mode,
1844 (GET_MODE (XEXP (orig, 0))
1845 != VOIDmode)
1846 ? GET_MODE (XEXP (orig, 0))
1847 : GET_MODE (XEXP (orig, 1)),
1848 XEXP (copy, 0),
1849 XEXP (copy, 1));
1850 if (scopy)
1851 return scopy;
1852 }
1853 break;
1854 default:
1855 break;
1856 }
6fb5fa3c
DB
1857 scopy = simplify_rtx (copy);
1858 if (scopy)
3af4ba41 1859 return scopy;
6fb5fa3c
DB
1860 return copy;
1861}
1862
fa49fd0f
RK
1863/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1864 with VALUE expressions. This way, it becomes independent of changes
1865 to registers and memory.
1866 X isn't actually modified; if modifications are needed, new rtl is
4deef538
AO
1867 allocated. However, the return value can share rtl with X.
1868 If X is within a MEM, MEMMODE must be the mode of the MEM. */
fa49fd0f 1869
91700444 1870rtx
ef4bddc2 1871cselib_subst_to_values (rtx x, machine_mode memmode)
fa49fd0f
RK
1872{
1873 enum rtx_code code = GET_CODE (x);
1874 const char *fmt = GET_RTX_FORMAT (code);
1875 cselib_val *e;
1876 struct elt_list *l;
1877 rtx copy = x;
1878 int i;
cf098191 1879 poly_int64 offset;
fa49fd0f
RK
1880
1881 switch (code)
1882 {
1883 case REG:
60fa6660
AO
1884 l = REG_VALUES (REGNO (x));
1885 if (l && l->elt == NULL)
1886 l = l->next;
1887 for (; l; l = l->next)
757bbef8
SB
1888 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1889 return l->elt->val_rtx;
fa49fd0f 1890
341c100f 1891 gcc_unreachable ();
fa49fd0f
RK
1892
1893 case MEM:
1894 e = cselib_lookup_mem (x, 0);
4deef538
AO
1895 /* This used to happen for autoincrements, but we deal with them
1896 properly now. Remove the if stmt for the next release. */
fa49fd0f 1897 if (! e)
91700444 1898 {
4deef538 1899 /* Assign a value that doesn't match any other. */
5440c0e7 1900 e = new_cselib_val (next_uid, GET_MODE (x), x);
91700444 1901 }
757bbef8 1902 return e->val_rtx;
fa49fd0f 1903
509f4495
JJ
1904 case ENTRY_VALUE:
1905 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1906 if (! e)
1907 break;
1908 return e->val_rtx;
1909
d8116890 1910 CASE_CONST_ANY:
fa49fd0f
RK
1911 return x;
1912
4deef538 1913 case PRE_DEC:
91700444 1914 case PRE_INC:
4deef538 1915 gcc_assert (memmode != VOIDmode);
cf098191 1916 offset = GET_MODE_SIZE (memmode);
4deef538 1917 if (code == PRE_DEC)
cf098191 1918 offset = -offset;
0a81f074 1919 return cselib_subst_to_values (plus_constant (GET_MODE (x),
cf098191 1920 XEXP (x, 0), offset),
4deef538
AO
1921 memmode);
1922
1923 case PRE_MODIFY:
1924 gcc_assert (memmode != VOIDmode);
1925 return cselib_subst_to_values (XEXP (x, 1), memmode);
1926
91700444 1927 case POST_DEC:
4deef538 1928 case POST_INC:
91700444 1929 case POST_MODIFY:
4deef538
AO
1930 gcc_assert (memmode != VOIDmode);
1931 return cselib_subst_to_values (XEXP (x, 0), memmode);
7080f735 1932
fa49fd0f
RK
1933 default:
1934 break;
1935 }
1936
1937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1938 {
1939 if (fmt[i] == 'e')
1940 {
4deef538 1941 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
fa49fd0f 1942
bd7960b1
RS
1943 if (t != XEXP (x, i))
1944 {
1945 if (x == copy)
1946 copy = shallow_copy_rtx (x);
1947 XEXP (copy, i) = t;
1948 }
fa49fd0f
RK
1949 }
1950 else if (fmt[i] == 'E')
1951 {
bd7960b1 1952 int j;
fa49fd0f
RK
1953
1954 for (j = 0; j < XVECLEN (x, i); j++)
1955 {
4deef538 1956 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
fa49fd0f 1957
bd7960b1 1958 if (t != XVECEXP (x, i, j))
fa49fd0f 1959 {
bd7960b1
RS
1960 if (XVEC (x, i) == XVEC (copy, i))
1961 {
1962 if (x == copy)
1963 copy = shallow_copy_rtx (x);
1964 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1965 }
1966 XVECEXP (copy, i, j) = t;
fa49fd0f 1967 }
fa49fd0f
RK
1968 }
1969 }
1970 }
1971
1972 return copy;
1973}
1974
9a76e83d
JJ
1975/* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1976
1977rtx
ef4bddc2 1978cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
9a76e83d
JJ
1979{
1980 rtx ret;
1981 gcc_assert (!cselib_current_insn);
1982 cselib_current_insn = insn;
1983 ret = cselib_subst_to_values (x, memmode);
1984 cselib_current_insn = NULL;
1985 return ret;
1986}
1987
4deef538
AO
1988/* Look up the rtl expression X in our tables and return the value it
1989 has. If CREATE is zero, we return NULL if we don't know the value.
1990 Otherwise, we create a new one if possible, using mode MODE if X
1991 doesn't have a mode (i.e. because it's a constant). When X is part
1992 of an address, MEMMODE should be the mode of the enclosing MEM if
1993 we're tracking autoinc expressions. */
fa49fd0f 1994
5847e8da 1995static cselib_val *
ef4bddc2
RS
1996cselib_lookup_1 (rtx x, machine_mode mode,
1997 int create, machine_mode memmode)
fa49fd0f 1998{
4a8fb1a1 1999 cselib_val **slot;
fa49fd0f
RK
2000 cselib_val *e;
2001 unsigned int hashval;
2002
2003 if (GET_MODE (x) != VOIDmode)
2004 mode = GET_MODE (x);
2005
2006 if (GET_CODE (x) == VALUE)
2007 return CSELIB_VAL_PTR (x);
2008
f8cfc6aa 2009 if (REG_P (x))
fa49fd0f
RK
2010 {
2011 struct elt_list *l;
2012 unsigned int i = REGNO (x);
2013
60fa6660
AO
2014 l = REG_VALUES (i);
2015 if (l && l->elt == NULL)
2016 l = l->next;
2017 for (; l; l = l->next)
757bbef8 2018 if (mode == GET_MODE (l->elt->val_rtx))
5847e8da
AO
2019 {
2020 promote_debug_loc (l->elt->locs);
2021 return l->elt;
2022 }
fa49fd0f
RK
2023
2024 if (! create)
5847e8da 2025 return 0;
fa49fd0f 2026
31825e57
DM
2027 if (i < FIRST_PSEUDO_REGISTER)
2028 {
ad474626 2029 unsigned int n = hard_regno_nregs (i, mode);
31825e57
DM
2030
2031 if (n > max_value_regs)
2032 max_value_regs = n;
2033 }
2034
5440c0e7 2035 e = new_cselib_val (next_uid, GET_MODE (x), x);
6f2ffb4b 2036 new_elt_loc_list (e, x);
b4206259
RS
2037
2038 scalar_int_mode int_mode;
fa49fd0f 2039 if (REG_VALUES (i) == 0)
60fa6660
AO
2040 {
2041 /* Maintain the invariant that the first entry of
2042 REG_VALUES, if present, must be the value used to set the
2043 register, or NULL. */
6790d1ab 2044 used_regs[n_used_regs++] = i;
60fa6660
AO
2045 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2046 }
509f4495 2047 else if (cselib_preserve_constants
b4206259 2048 && is_int_mode (mode, &int_mode))
509f4495
JJ
2049 {
2050 /* During var-tracking, try harder to find equivalences
2051 for SUBREGs. If a setter sets say a DImode register
2052 and user uses that register only in SImode, add a lowpart
2053 subreg location. */
2054 struct elt_list *lwider = NULL;
b4206259 2055 scalar_int_mode lmode;
509f4495
JJ
2056 l = REG_VALUES (i);
2057 if (l && l->elt == NULL)
2058 l = l->next;
2059 for (; l; l = l->next)
b4206259
RS
2060 if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2061 && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
509f4495 2062 && (lwider == NULL
bd4288c0
RS
2063 || partial_subreg_p (lmode,
2064 GET_MODE (lwider->elt->val_rtx))))
509f4495
JJ
2065 {
2066 struct elt_loc_list *el;
2067 if (i < FIRST_PSEUDO_REGISTER
ad474626 2068 && hard_regno_nregs (i, lmode) != 1)
509f4495
JJ
2069 continue;
2070 for (el = l->elt->locs; el; el = el->next)
2071 if (!REG_P (el->loc))
2072 break;
2073 if (el)
2074 lwider = l;
2075 }
2076 if (lwider)
2077 {
b4206259 2078 rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
509f4495
JJ
2079 GET_MODE (lwider->elt->val_rtx));
2080 if (sub)
6f2ffb4b 2081 new_elt_loc_list (e, sub);
509f4495
JJ
2082 }
2083 }
60fa6660 2084 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
f956adb9 2085 slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
fa49fd0f 2086 *slot = e;
5847e8da 2087 return e;
fa49fd0f
RK
2088 }
2089
3c0cb5de 2090 if (MEM_P (x))
5847e8da 2091 return cselib_lookup_mem (x, create);
fa49fd0f 2092
4deef538 2093 hashval = cselib_hash_rtx (x, create, memmode);
fa49fd0f
RK
2094 /* Can't even create if hashing is not possible. */
2095 if (! hashval)
5847e8da 2096 return 0;
fa49fd0f 2097
f956adb9 2098 slot = cselib_find_slot (mode, x, hashval,
4deef538 2099 create ? INSERT : NO_INSERT, memmode);
fa49fd0f 2100 if (slot == 0)
5847e8da 2101 return 0;
fa49fd0f
RK
2102
2103 e = (cselib_val *) *slot;
2104 if (e)
5847e8da 2105 return e;
fa49fd0f 2106
b5b8b0ac 2107 e = new_cselib_val (hashval, mode, x);
fa49fd0f
RK
2108
2109 /* We have to fill the slot before calling cselib_subst_to_values:
2110 the hash table is inconsistent until we do so, and
2111 cselib_subst_to_values will need to do lookups. */
4a8fb1a1 2112 *slot = e;
6f2ffb4b 2113 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
5847e8da
AO
2114 return e;
2115}
2116
2117/* Wrapper for cselib_lookup, that indicates X is in INSN. */
2118
2119cselib_val *
ef4bddc2
RS
2120cselib_lookup_from_insn (rtx x, machine_mode mode,
2121 int create, machine_mode memmode, rtx_insn *insn)
5847e8da
AO
2122{
2123 cselib_val *ret;
2124
2125 gcc_assert (!cselib_current_insn);
2126 cselib_current_insn = insn;
2127
4deef538 2128 ret = cselib_lookup (x, mode, create, memmode);
5847e8da
AO
2129
2130 cselib_current_insn = NULL;
2131
2132 return ret;
2133}
2134
2135/* Wrapper for cselib_lookup_1, that logs the lookup result and
2136 maintains invariants related with debug insns. */
2137
2138cselib_val *
ef4bddc2
RS
2139cselib_lookup (rtx x, machine_mode mode,
2140 int create, machine_mode memmode)
5847e8da 2141{
4deef538 2142 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
5847e8da
AO
2143
2144 /* ??? Should we return NULL if we're not to create an entry, the
2145 found loc is a debug loc and cselib_current_insn is not DEBUG?
2146 If so, we should also avoid converting val to non-DEBUG; probably
2147 easiest setting cselib_current_insn to NULL before the call
2148 above. */
2149
4a3c9687 2150 if (dump_file && (dump_flags & TDF_CSELIB))
5847e8da
AO
2151 {
2152 fputs ("cselib lookup ", dump_file);
2153 print_inline_rtx (dump_file, x, 2);
2154 fprintf (dump_file, " => %u:%u\n",
2155 ret ? ret->uid : 0,
2156 ret ? ret->hash : 0);
2157 }
2158
2159 return ret;
fa49fd0f
RK
2160}
2161
2162/* Invalidate any entries in reg_values that overlap REGNO. This is called
2163 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2164 is used to determine how many hard registers are being changed. If MODE
2165 is VOIDmode, then only REGNO is being changed; this is used when
2166 invalidating call clobbered registers across a call. */
2167
2168static void
99788e06
AH
2169cselib_invalidate_regno (unsigned int regno, machine_mode mode,
2170 const_rtx setter)
fa49fd0f
RK
2171{
2172 unsigned int endregno;
2173 unsigned int i;
2174
2175 /* If we see pseudos after reload, something is _wrong_. */
341c100f
NS
2176 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2177 || reg_renumber[regno] < 0);
fa49fd0f
RK
2178
2179 /* Determine the range of registers that must be invalidated. For
2180 pseudos, only REGNO is affected. For hard regs, we must take MODE
2181 into account, and we must also invalidate lower register numbers
2182 if they contain values that overlap REGNO. */
291aac59 2183 if (regno < FIRST_PSEUDO_REGISTER)
31825e57 2184 {
341c100f 2185 gcc_assert (mode != VOIDmode);
7080f735 2186
31825e57
DM
2187 if (regno < max_value_regs)
2188 i = 0;
2189 else
2190 i = regno - max_value_regs;
fa49fd0f 2191
09e18274 2192 endregno = end_hard_regno (mode, regno);
99788e06
AH
2193
2194 if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2195 gcc_assert (endregno == regno + 1);
31825e57
DM
2196 }
2197 else
2198 {
2199 i = regno;
2200 endregno = regno + 1;
2201 }
2202
2203 for (; i < endregno; i++)
fa49fd0f
RK
2204 {
2205 struct elt_list **l = &REG_VALUES (i);
2206
2207 /* Go through all known values for this reg; if it overlaps the range
2208 we're invalidating, remove the value. */
2209 while (*l)
2210 {
2211 cselib_val *v = (*l)->elt;
5847e8da 2212 bool had_locs;
21afc57d 2213 rtx_insn *setting_insn;
fa49fd0f
RK
2214 struct elt_loc_list **p;
2215 unsigned int this_last = i;
2216
60fa6660 2217 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
09e18274 2218 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
fa49fd0f 2219
9de9cbaf
JJ
2220 if (this_last < regno || v == NULL
2221 || (v == cfa_base_preserved_val
2222 && i == cfa_base_preserved_regno))
fa49fd0f
RK
2223 {
2224 l = &(*l)->next;
2225 continue;
2226 }
2227
99788e06
AH
2228 /* Ignore if clobber high and the register isn't clobbered. */
2229 if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2230 {
2231 gcc_assert (endregno == regno + 1);
2232 const_rtx x = XEXP (setter, 0);
2233 if (!reg_is_clobbered_by_clobber_high (i, GET_MODE (v->val_rtx),
2234 x))
2235 {
2236 l = &(*l)->next;
2237 continue;
2238 }
2239 }
2240
fa49fd0f 2241 /* We have an overlap. */
60fa6660
AO
2242 if (*l == REG_VALUES (i))
2243 {
2244 /* Maintain the invariant that the first entry of
2245 REG_VALUES, if present, must be the value used to set
2246 the register, or NULL. This is also nice because
2247 then we won't push the same regno onto user_regs
2248 multiple times. */
2249 (*l)->elt = NULL;
2250 l = &(*l)->next;
2251 }
2252 else
2253 unchain_one_elt_list (l);
fa49fd0f 2254
6f2ffb4b
AO
2255 v = canonical_cselib_val (v);
2256
5847e8da
AO
2257 had_locs = v->locs != NULL;
2258 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2259
fa49fd0f
RK
2260 /* Now, we clear the mapping from value to reg. It must exist, so
2261 this code will crash intentionally if it doesn't. */
2262 for (p = &v->locs; ; p = &(*p)->next)
2263 {
2264 rtx x = (*p)->loc;
2265
f8cfc6aa 2266 if (REG_P (x) && REGNO (x) == i)
fa49fd0f
RK
2267 {
2268 unchain_one_elt_loc_list (p);
2269 break;
2270 }
2271 }
5847e8da
AO
2272
2273 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2274 {
2275 if (setting_insn && DEBUG_INSN_P (setting_insn))
2276 n_useless_debug_values++;
2277 else
2278 n_useless_values++;
2279 }
fa49fd0f
RK
2280 }
2281 }
2282}
9ddb66ca 2283\f
7101fb18
JH
2284/* Invalidate any locations in the table which are changed because of a
2285 store to MEM_RTX. If this is called because of a non-const call
2286 instruction, MEM_RTX is (mem:BLK const0_rtx). */
fa49fd0f 2287
7101fb18 2288static void
7080f735 2289cselib_invalidate_mem (rtx mem_rtx)
fa49fd0f 2290{
7101fb18 2291 cselib_val **vp, *v, *next;
c65ecebc 2292 int num_mems = 0;
9ddb66ca
JH
2293 rtx mem_addr;
2294
2295 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2296 mem_rtx = canon_rtx (mem_rtx);
fa49fd0f 2297
7101fb18
JH
2298 vp = &first_containing_mem;
2299 for (v = *vp; v != &dummy_val; v = next)
fa49fd0f 2300 {
7101fb18
JH
2301 bool has_mem = false;
2302 struct elt_loc_list **p = &v->locs;
5847e8da 2303 bool had_locs = v->locs != NULL;
21afc57d 2304 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
fa49fd0f 2305
7101fb18 2306 while (*p)
fa49fd0f 2307 {
7101fb18
JH
2308 rtx x = (*p)->loc;
2309 cselib_val *addr;
2310 struct elt_list **mem_chain;
2311
2312 /* MEMs may occur in locations only at the top level; below
2313 that every MEM or REG is substituted by its VALUE. */
3c0cb5de 2314 if (!MEM_P (x))
fa49fd0f 2315 {
7101fb18
JH
2316 p = &(*p)->next;
2317 continue;
2318 }
c65ecebc 2319 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
bd280792
JR
2320 && ! canon_anti_dependence (x, false, mem_rtx,
2321 GET_MODE (mem_rtx), mem_addr))
7101fb18
JH
2322 {
2323 has_mem = true;
c65ecebc 2324 num_mems++;
7101fb18
JH
2325 p = &(*p)->next;
2326 continue;
fa49fd0f
RK
2327 }
2328
7101fb18
JH
2329 /* This one overlaps. */
2330 /* We must have a mapping from this MEM's address to the
2331 value (E). Remove that, too. */
4deef538 2332 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
faead9f7
AO
2333 addr = canonical_cselib_val (addr);
2334 gcc_checking_assert (v == canonical_cselib_val (v));
7101fb18
JH
2335 mem_chain = &addr->addr_list;
2336 for (;;)
2337 {
faead9f7
AO
2338 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2339
2340 if (canon == v)
7101fb18
JH
2341 {
2342 unchain_one_elt_list (mem_chain);
2343 break;
2344 }
fa49fd0f 2345
faead9f7
AO
2346 /* Record canonicalized elt. */
2347 (*mem_chain)->elt = canon;
2348
7101fb18
JH
2349 mem_chain = &(*mem_chain)->next;
2350 }
fa49fd0f 2351
7101fb18
JH
2352 unchain_one_elt_loc_list (p);
2353 }
fa49fd0f 2354
b5b8b0ac 2355 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
5847e8da
AO
2356 {
2357 if (setting_insn && DEBUG_INSN_P (setting_insn))
2358 n_useless_debug_values++;
2359 else
2360 n_useless_values++;
2361 }
fa49fd0f 2362
7101fb18
JH
2363 next = v->next_containing_mem;
2364 if (has_mem)
2365 {
2366 *vp = v;
2367 vp = &(*vp)->next_containing_mem;
2368 }
2369 else
2370 v->next_containing_mem = NULL;
2371 }
2372 *vp = &dummy_val;
fa49fd0f
RK
2373}
2374
99788e06 2375/* Invalidate DEST, which is being assigned to or clobbered by SETTER. */
fa49fd0f 2376
0d87c765 2377void
99788e06 2378cselib_invalidate_rtx (rtx dest, const_rtx setter)
fa49fd0f 2379{
46d096a3
SB
2380 while (GET_CODE (dest) == SUBREG
2381 || GET_CODE (dest) == ZERO_EXTRACT
2382 || GET_CODE (dest) == STRICT_LOW_PART)
fa49fd0f
RK
2383 dest = XEXP (dest, 0);
2384
f8cfc6aa 2385 if (REG_P (dest))
99788e06 2386 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest), setter);
3c0cb5de 2387 else if (MEM_P (dest))
fa49fd0f 2388 cselib_invalidate_mem (dest);
0d87c765
RH
2389}
2390
2391/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2392
2393static void
99788e06 2394cselib_invalidate_rtx_note_stores (rtx dest, const_rtx setter,
0d87c765
RH
2395 void *data ATTRIBUTE_UNUSED)
2396{
99788e06 2397 cselib_invalidate_rtx (dest, setter);
fa49fd0f
RK
2398}
2399
2400/* Record the result of a SET instruction. DEST is being set; the source
2401 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2402 describes its address. */
2403
2404static void
7080f735 2405cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
fa49fd0f 2406{
fa49fd0f
RK
2407 if (src_elt == 0 || side_effects_p (dest))
2408 return;
2409
dc8afb70 2410 if (REG_P (dest))
fa49fd0f 2411 {
dc8afb70 2412 unsigned int dreg = REGNO (dest);
31825e57
DM
2413 if (dreg < FIRST_PSEUDO_REGISTER)
2414 {
dc8afb70 2415 unsigned int n = REG_NREGS (dest);
31825e57
DM
2416
2417 if (n > max_value_regs)
2418 max_value_regs = n;
2419 }
2420
60fa6660
AO
2421 if (REG_VALUES (dreg) == 0)
2422 {
6790d1ab 2423 used_regs[n_used_regs++] = dreg;
60fa6660
AO
2424 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2425 }
2426 else
2427 {
341c100f
NS
2428 /* The register should have been invalidated. */
2429 gcc_assert (REG_VALUES (dreg)->elt == 0);
2430 REG_VALUES (dreg)->elt = src_elt;
60fa6660
AO
2431 }
2432
b5b8b0ac 2433 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
fa49fd0f 2434 n_useless_values--;
6f2ffb4b 2435 new_elt_loc_list (src_elt, dest);
fa49fd0f 2436 }
3c0cb5de 2437 else if (MEM_P (dest) && dest_addr_elt != 0
463301c3 2438 && cselib_record_memory)
fa49fd0f 2439 {
b5b8b0ac 2440 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
fa49fd0f
RK
2441 n_useless_values--;
2442 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2443 }
2444}
2445
6f2ffb4b
AO
2446/* Make ELT and X's VALUE equivalent to each other at INSN. */
2447
2448void
12ea1b95 2449cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
6f2ffb4b
AO
2450{
2451 cselib_val *nelt;
12ea1b95 2452 rtx_insn *save_cselib_current_insn = cselib_current_insn;
6f2ffb4b
AO
2453
2454 gcc_checking_assert (elt);
2455 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2456 gcc_checking_assert (!side_effects_p (x));
2457
2458 cselib_current_insn = insn;
2459
2460 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2461
2462 if (nelt != elt)
2463 {
0f68ba3e
AO
2464 cselib_any_perm_equivs = true;
2465
6f2ffb4b
AO
2466 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2467 cselib_preserve_value (nelt);
2468
2469 new_elt_loc_list (nelt, elt->val_rtx);
2470 }
2471
2472 cselib_current_insn = save_cselib_current_insn;
2473}
2474
0f68ba3e
AO
2475/* Return TRUE if any permanent equivalences have been recorded since
2476 the table was last initialized. */
2477bool
2478cselib_have_permanent_equivalences (void)
2479{
2480 return cselib_any_perm_equivs;
2481}
2482
fa49fd0f
RK
2483/* There is no good way to determine how many elements there can be
2484 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2485#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2486
4deef538
AO
2487struct cselib_record_autoinc_data
2488{
2489 struct cselib_set *sets;
2490 int n_sets;
2491};
2492
2493/* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2494 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2495
2496static int
2497cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2498 rtx dest, rtx src, rtx srcoff, void *arg)
2499{
2500 struct cselib_record_autoinc_data *data;
2501 data = (struct cselib_record_autoinc_data *)arg;
2502
2503 data->sets[data->n_sets].dest = dest;
2504
2505 if (srcoff)
2506 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2507 else
2508 data->sets[data->n_sets].src = src;
2509
2510 data->n_sets++;
2511
8d8e205b 2512 return 0;
4deef538
AO
2513}
2514
2515/* Record the effects of any sets and autoincs in INSN. */
fa49fd0f 2516static void
dd60a84c 2517cselib_record_sets (rtx_insn *insn)
fa49fd0f
RK
2518{
2519 int n_sets = 0;
2520 int i;
b5b8b0ac 2521 struct cselib_set sets[MAX_SETS];
b7933c21 2522 rtx cond = 0;
4deef538 2523 int n_sets_before_autoinc;
ff111948 2524 int n_strict_low_parts = 0;
4deef538 2525 struct cselib_record_autoinc_data data;
fa49fd0f 2526
45309d28 2527 rtx body = PATTERN (insn);
b7933c21
BS
2528 if (GET_CODE (body) == COND_EXEC)
2529 {
2530 cond = COND_EXEC_TEST (body);
2531 body = COND_EXEC_CODE (body);
2532 }
2533
fa49fd0f
RK
2534 /* Find all sets. */
2535 if (GET_CODE (body) == SET)
2536 {
2537 sets[0].src = SET_SRC (body);
2538 sets[0].dest = SET_DEST (body);
2539 n_sets = 1;
2540 }
2541 else if (GET_CODE (body) == PARALLEL)
2542 {
2543 /* Look through the PARALLEL and record the values being
2544 set, if possible. Also handle any CLOBBERs. */
2545 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2546 {
2547 rtx x = XVECEXP (body, 0, i);
2548
2549 if (GET_CODE (x) == SET)
2550 {
2551 sets[n_sets].src = SET_SRC (x);
2552 sets[n_sets].dest = SET_DEST (x);
2553 n_sets++;
2554 }
2555 }
2556 }
2557
8dd5516b
JJ
2558 if (n_sets == 1
2559 && MEM_P (sets[0].src)
2560 && !cselib_record_memory
2561 && MEM_READONLY_P (sets[0].src))
2562 {
2563 rtx note = find_reg_equal_equiv_note (insn);
2564
2565 if (note && CONSTANT_P (XEXP (note, 0)))
2566 sets[0].src = XEXP (note, 0);
2567 }
2568
4deef538
AO
2569 data.sets = sets;
2570 data.n_sets = n_sets_before_autoinc = n_sets;
8d8e205b 2571 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
4deef538
AO
2572 n_sets = data.n_sets;
2573
fa49fd0f
RK
2574 /* Look up the values that are read. Do this before invalidating the
2575 locations that are written. */
2576 for (i = 0; i < n_sets; i++)
2577 {
2578 rtx dest = sets[i].dest;
ff111948 2579 rtx orig = dest;
fa49fd0f
RK
2580
2581 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2582 the low part after invalidating any knowledge about larger modes. */
2583 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2584 sets[i].dest = dest = XEXP (dest, 0);
2585
2586 /* We don't know how to record anything but REG or MEM. */
f8cfc6aa 2587 if (REG_P (dest)
3c0cb5de 2588 || (MEM_P (dest) && cselib_record_memory))
fa49fd0f 2589 {
b7933c21
BS
2590 rtx src = sets[i].src;
2591 if (cond)
be9ed5d5 2592 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
4deef538 2593 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
3c0cb5de 2594 if (MEM_P (dest))
d4ebfa65 2595 {
ef4bddc2 2596 machine_mode address_mode = get_address_mode (dest);
d4ebfa65
BE
2597
2598 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
4deef538
AO
2599 address_mode, 1,
2600 GET_MODE (dest));
d4ebfa65 2601 }
fa49fd0f
RK
2602 else
2603 sets[i].dest_addr_elt = 0;
2604 }
ff111948
JJ
2605
2606 /* Improve handling of STRICT_LOW_PART if the current value is known
2607 to be const0_rtx, then the low bits will be set to dest and higher
2608 bits will remain zero. Used in code like:
2609
2610 {di:SI=0;clobber flags:CC;}
2611 flags:CCNO=cmp(bx:SI,0)
2612 strict_low_part(di:QI)=flags:CCNO<=0
2613
2614 where we can note both that di:QI=flags:CCNO<=0 and
2615 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2616 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2617 scalar_int_mode mode;
2618 if (dest != orig
2619 && cselib_record_sets_hook
2620 && REG_P (dest)
2621 && HARD_REGISTER_P (dest)
bbc8d04f 2622 && sets[i].src_elt
ff111948
JJ
2623 && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2624 && n_sets + n_strict_low_parts < MAX_SETS)
2625 {
2626 opt_scalar_int_mode wider_mode_iter;
2627 FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2628 {
2629 scalar_int_mode wider_mode = wider_mode_iter.require ();
2630 if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2631 break;
2632
2633 rtx reg = gen_lowpart (wider_mode, dest);
2634 if (!REG_P (reg))
2635 break;
2636
2637 cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2638 if (!v)
2639 continue;
2640
2641 struct elt_loc_list *l;
2642 for (l = v->locs; l; l = l->next)
2643 if (l->loc == const0_rtx)
2644 break;
2645
2646 if (!l)
2647 continue;
2648
2649 sets[n_sets + n_strict_low_parts].dest = reg;
2650 sets[n_sets + n_strict_low_parts].src = dest;
2651 sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2652 break;
2653 }
2654 }
fa49fd0f
RK
2655 }
2656
b5b8b0ac
AO
2657 if (cselib_record_sets_hook)
2658 cselib_record_sets_hook (insn, sets, n_sets);
2659
fa49fd0f
RK
2660 /* Invalidate all locations written by this insn. Note that the elts we
2661 looked up in the previous loop aren't affected, just some of their
2662 locations may go away. */
e8448ba5 2663 note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
fa49fd0f 2664
4deef538
AO
2665 for (i = n_sets_before_autoinc; i < n_sets; i++)
2666 cselib_invalidate_rtx (sets[i].dest);
2667
b7048ab7
RH
2668 /* If this is an asm, look for duplicate sets. This can happen when the
2669 user uses the same value as an output multiple times. This is valid
2670 if the outputs are not actually used thereafter. Treat this case as
2671 if the value isn't actually set. We do this by smashing the destination
2672 to pc_rtx, so that we won't record the value later. */
2673 if (n_sets >= 2 && asm_noperands (body) >= 0)
2674 {
2675 for (i = 0; i < n_sets; i++)
2676 {
2677 rtx dest = sets[i].dest;
3c0cb5de 2678 if (REG_P (dest) || MEM_P (dest))
b7048ab7
RH
2679 {
2680 int j;
2681 for (j = i + 1; j < n_sets; j++)
2682 if (rtx_equal_p (dest, sets[j].dest))
2683 {
2684 sets[i].dest = pc_rtx;
2685 sets[j].dest = pc_rtx;
2686 }
2687 }
2688 }
2689 }
2690
fa49fd0f
RK
2691 /* Now enter the equivalences in our tables. */
2692 for (i = 0; i < n_sets; i++)
2693 {
2694 rtx dest = sets[i].dest;
f8cfc6aa 2695 if (REG_P (dest)
3c0cb5de 2696 || (MEM_P (dest) && cselib_record_memory))
fa49fd0f
RK
2697 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2698 }
ff111948
JJ
2699
2700 /* And deal with STRICT_LOW_PART. */
2701 for (i = 0; i < n_strict_low_parts; i++)
2702 {
2703 if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
2704 continue;
2705 machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
2706 cselib_val *v
2707 = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
2708 cselib_preserve_value (v);
2709 rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
2710 sets[n_sets + i].src_elt->val_rtx);
2711 cselib_add_permanent_equiv (v, r, insn);
2712 }
fa49fd0f
RK
2713}
2714
40155239
JJ
2715/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
2716
2717bool
8df68a82 2718fp_setter_insn (rtx_insn *insn)
40155239
JJ
2719{
2720 rtx expr, pat = NULL_RTX;
2721
2722 if (!RTX_FRAME_RELATED_P (insn))
2723 return false;
2724
2725 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2726 if (expr)
2727 pat = XEXP (expr, 0);
2728 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2729 return false;
2730
2731 /* Don't return true for frame pointer restores in the epilogue. */
2732 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2733 return false;
2734 return true;
2735}
2736
fa49fd0f
RK
2737/* Record the effects of INSN. */
2738
2739void
dd60a84c 2740cselib_process_insn (rtx_insn *insn)
fa49fd0f
RK
2741{
2742 int i;
2743 rtx x;
2744
2745 cselib_current_insn = insn;
2746
f1257268 2747 /* Forget everything at a CODE_LABEL or a setjmp. */
f5d30aa6
JJ
2748 if ((LABEL_P (insn)
2749 || (CALL_P (insn)
f1257268 2750 && find_reg_note (insn, REG_SETJMP, NULL)))
f5d30aa6 2751 && !cselib_preserve_constants)
fa49fd0f 2752 {
5440c0e7 2753 cselib_reset_table (next_uid);
12ea1b95 2754 cselib_current_insn = NULL;
fa49fd0f
RK
2755 return;
2756 }
2757
2758 if (! INSN_P (insn))
2759 {
12ea1b95 2760 cselib_current_insn = NULL;
fa49fd0f
RK
2761 return;
2762 }
2763
2764 /* If this is a call instruction, forget anything stored in a
2765 call clobbered register, or, if this is not a const call, in
2766 memory. */
4b4bf941 2767 if (CALL_P (insn))
fa49fd0f 2768 {
6ee2cc70 2769 function_abi callee_abi = insn_callee_abi (insn);
fa49fd0f 2770 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3bd29185
RS
2771 if (elt_list *values = REG_VALUES (i))
2772 {
2773 /* If we know what mode the value was set in, check whether
2774 it is still available after the call in that mode. If we
2775 don't know the mode, we have to check for the worst-case
2776 scenario instead. */
2777 if (values->elt)
2778 {
2779 machine_mode mode = GET_MODE (values->elt->val_rtx);
2780 if (callee_abi.clobbers_reg_p (mode, i))
2781 cselib_invalidate_regno (i, mode);
2782 }
2783 else
2784 {
2785 if (callee_abi.clobbers_at_least_part_of_reg_p (i))
2786 cselib_invalidate_regno (i, reg_raw_mode[i]);
2787 }
2788 }
fa49fd0f 2789
becfd6e5
KZ
2790 /* Since it is not clear how cselib is going to be used, be
2791 conservative here and treat looping pure or const functions
2792 as if they were regular functions. */
2793 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2794 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
fa49fd0f 2795 cselib_invalidate_mem (callmem);
007b405b
JDA
2796 else
2797 /* For const/pure calls, invalidate any argument slots because
2798 they are owned by the callee. */
2799 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2800 if (GET_CODE (XEXP (x, 0)) == USE
2801 && MEM_P (XEXP (XEXP (x, 0), 0)))
2802 cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
fa49fd0f
RK
2803 }
2804
2805 cselib_record_sets (insn);
2806
fa49fd0f
RK
2807 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2808 after we have processed the insn. */
4b4bf941 2809 if (CALL_P (insn))
f5d30aa6
JJ
2810 {
2811 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
99788e06
AH
2812 {
2813 gcc_assert (GET_CODE (XEXP (x, 0)) != CLOBBER_HIGH);
2814 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2815 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2816 }
2817 /* Flush everything on setjmp. */
f5d30aa6
JJ
2818 if (cselib_preserve_constants
2819 && find_reg_note (insn, REG_SETJMP, NULL))
2820 {
2821 cselib_preserve_only_values ();
2822 cselib_reset_table (next_uid);
2823 }
2824 }
fa49fd0f 2825
40155239
JJ
2826 /* On setter of the hard frame pointer if frame_pointer_needed,
2827 invalidate stack_pointer_rtx, so that sp and {,h}fp based
2828 VALUEs are distinct. */
2829 if (reload_completed
2830 && frame_pointer_needed
2831 && fp_setter_insn (insn))
2832 cselib_invalidate_rtx (stack_pointer_rtx);
2833
12ea1b95 2834 cselib_current_insn = NULL;
fa49fd0f 2835
80662856
AO
2836 if (n_useless_values > MAX_USELESS_VALUES
2837 /* remove_useless_values is linear in the hash table size. Avoid
2838 quadratic behavior for very large hashtables with very few
2839 useless elements. */
2840 && ((unsigned int)n_useless_values
c203e8a7 2841 > (cselib_hash_table->elements () - n_debug_values) / 4))
80662856 2842 remove_useless_values ();
fa49fd0f
RK
2843}
2844
fa49fd0f
RK
2845/* Initialize cselib for one pass. The caller must also call
2846 init_alias_analysis. */
2847
2848void
457eeaae 2849cselib_init (int record_what)
fa49fd0f 2850{
457eeaae
JJ
2851 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2852 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
0f68ba3e 2853 cselib_any_perm_equivs = false;
ac3768f6
SB
2854
2855 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2856 see canon_true_dependence. This is only created once. */
fa49fd0f 2857 if (! callmem)
ac3768f6 2858 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
fa49fd0f
RK
2859
2860 cselib_nregs = max_reg_num ();
6790d1ab
JH
2861
2862 /* We preserve reg_values to allow expensive clearing of the whole thing.
2863 Reallocate it however if it happens to be too large. */
2864 if (!reg_values || reg_values_size < cselib_nregs
2865 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
e2500fed 2866 {
04695783 2867 free (reg_values);
6790d1ab
JH
2868 /* Some space for newly emit instructions so we don't end up
2869 reallocating in between passes. */
2870 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
5ed6ace5 2871 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
e2500fed 2872 }
5ed6ace5 2873 used_regs = XNEWVEC (unsigned int, cselib_nregs);
6790d1ab 2874 n_used_regs = 0;
510c9192
ML
2875 /* FIXME: enable sanitization (PR87845) */
2876 cselib_hash_table
2877 = new hash_table<cselib_hasher> (31, /* ggc */ false,
2878 /* sanitize_eq_and_hash */ false);
0618dee5 2879 if (cselib_preserve_constants)
510c9192
ML
2880 cselib_preserved_hash_table
2881 = new hash_table<cselib_hasher> (31, /* ggc */ false,
2882 /* sanitize_eq_and_hash */ false);
5440c0e7 2883 next_uid = 1;
fa49fd0f
RK
2884}
2885
2886/* Called when the current user is done with cselib. */
2887
2888void
7080f735 2889cselib_finish (void)
fa49fd0f 2890{
0618dee5 2891 bool preserved = cselib_preserve_constants;
6fb5fa3c 2892 cselib_discard_hook = NULL;
457eeaae 2893 cselib_preserve_constants = false;
0f68ba3e 2894 cselib_any_perm_equivs = false;
457eeaae 2895 cfa_base_preserved_val = NULL;
9de9cbaf 2896 cfa_base_preserved_regno = INVALID_REGNUM;
fb0b2914
ML
2897 elt_list_pool.release ();
2898 elt_loc_list_pool.release ();
2899 cselib_val_pool.release ();
a78a26f1 2900 value_pool.release ();
eb232f4e 2901 cselib_clear_table ();
c203e8a7
TS
2902 delete cselib_hash_table;
2903 cselib_hash_table = NULL;
0618dee5 2904 if (preserved)
c203e8a7
TS
2905 delete cselib_preserved_hash_table;
2906 cselib_preserved_hash_table = NULL;
0fc0c4c9 2907 free (used_regs);
e2500fed 2908 used_regs = 0;
e2500fed 2909 n_useless_values = 0;
5847e8da
AO
2910 n_useless_debug_values = 0;
2911 n_debug_values = 0;
5440c0e7 2912 next_uid = 0;
fa49fd0f 2913}
e2500fed 2914
4a8fb1a1 2915/* Dump the cselib_val *X to FILE *OUT. */
b5b8b0ac 2916
4a8fb1a1
LC
2917int
2918dump_cselib_val (cselib_val **x, FILE *out)
b5b8b0ac 2919{
4a8fb1a1 2920 cselib_val *v = *x;
b5b8b0ac
AO
2921 bool need_lf = true;
2922
2923 print_inline_rtx (out, v->val_rtx, 0);
2924
2925 if (v->locs)
2926 {
2927 struct elt_loc_list *l = v->locs;
2928 if (need_lf)
2929 {
2930 fputc ('\n', out);
2931 need_lf = false;
2932 }
2933 fputs (" locs:", out);
2934 do
2935 {
42286976
JJ
2936 if (l->setting_insn)
2937 fprintf (out, "\n from insn %i ",
2938 INSN_UID (l->setting_insn));
2939 else
2940 fprintf (out, "\n ");
b5b8b0ac
AO
2941 print_inline_rtx (out, l->loc, 4);
2942 }
2943 while ((l = l->next));
2944 fputc ('\n', out);
2945 }
2946 else
2947 {
2948 fputs (" no locs", out);
2949 need_lf = true;
2950 }
2951
2952 if (v->addr_list)
2953 {
2954 struct elt_list *e = v->addr_list;
2955 if (need_lf)
2956 {
2957 fputc ('\n', out);
2958 need_lf = false;
2959 }
2960 fputs (" addr list:", out);
2961 do
2962 {
2963 fputs ("\n ", out);
2964 print_inline_rtx (out, e->elt->val_rtx, 2);
2965 }
2966 while ((e = e->next));
2967 fputc ('\n', out);
2968 }
2969 else
2970 {
2971 fputs (" no addrs", out);
2972 need_lf = true;
2973 }
2974
2975 if (v->next_containing_mem == &dummy_val)
2976 fputs (" last mem\n", out);
2977 else if (v->next_containing_mem)
2978 {
2979 fputs (" next mem ", out);
2980 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2981 fputc ('\n', out);
2982 }
2983 else if (need_lf)
2984 fputc ('\n', out);
2985
2986 return 1;
2987}
2988
2989/* Dump to OUT everything in the CSELIB table. */
2990
2991void
2992dump_cselib_table (FILE *out)
2993{
2994 fprintf (out, "cselib hash table:\n");
c203e8a7 2995 cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
0618dee5 2996 fprintf (out, "cselib preserved hash table:\n");
c203e8a7 2997 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
b5b8b0ac
AO
2998 if (first_containing_mem != &dummy_val)
2999 {
3000 fputs ("first mem ", out);
3001 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3002 fputc ('\n', out);
3003 }
5440c0e7 3004 fprintf (out, "next uid %i\n", next_uid);
b5b8b0ac
AO
3005}
3006
e2500fed 3007#include "gt-cselib.h"