]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cselib.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / cselib.cc
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "emit-rtl.h"
32 #include "dumpfile.h"
33 #include "cselib.h"
34 #include "function-abi.h"
35 #include "alias.h"
36
37 /* A list of cselib_val structures. */
38 struct elt_list
39 {
40 struct elt_list *next;
41 cselib_val *elt;
42 };
43
44 static bool cselib_record_memory;
45 static bool cselib_preserve_constants;
46 static bool cselib_any_perm_equivs;
47 static inline void promote_debug_loc (struct elt_loc_list *l);
48 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
49 static void new_elt_loc_list (cselib_val *, rtx);
50 static void unchain_one_value (cselib_val *);
51 static void unchain_one_elt_list (struct elt_list **);
52 static void unchain_one_elt_loc_list (struct elt_loc_list **);
53 static void remove_useless_values (void);
54 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
55 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
56 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
57 static cselib_val *cselib_lookup_mem (rtx, int);
58 static void cselib_invalidate_regno (unsigned int, machine_mode);
59 static void cselib_invalidate_mem (rtx);
60 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
61 static void cselib_record_sets (rtx_insn *);
62 static rtx autoinc_split (rtx, rtx *, machine_mode);
63
64 #define PRESERVED_VALUE_P(RTX) \
65 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
66
67 #define SP_BASED_VALUE_P(RTX) \
68 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
69
70 #define SP_DERIVED_VALUE_P(RTX) \
71 (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
72
73 struct expand_value_data
74 {
75 bitmap regs_active;
76 cselib_expand_callback callback;
77 void *callback_arg;
78 bool dummy;
79 };
80
81 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
82
83 /* There are three ways in which cselib can look up an rtx:
84 - for a REG, the reg_values table (which is indexed by regno) is used
85 - for a MEM, we recursively look up its address and then follow the
86 addr_list of that value
87 - for everything else, we compute a hash value and go through the hash
88 table. Since different rtx's can still have the same hash value,
89 this involves walking the table entries for a given value and comparing
90 the locations of the entries with the rtx we are looking up. */
91
92 struct cselib_hasher : nofree_ptr_hash <cselib_val>
93 {
94 struct key {
95 /* The rtx value and its mode (needed separately for constant
96 integers). */
97 machine_mode mode;
98 rtx x;
99 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
100 machine_mode memmode;
101 };
102 typedef key *compare_type;
103 static inline hashval_t hash (const cselib_val *);
104 static inline bool equal (const cselib_val *, const key *);
105 };
106
107 /* The hash function for our hash table. The value is always computed with
108 cselib_hash_rtx when adding an element; this function just extracts the
109 hash value from a cselib_val structure. */
110
111 inline hashval_t
112 cselib_hasher::hash (const cselib_val *v)
113 {
114 return v->hash;
115 }
116
117 /* The equality test for our hash table. The first argument V is a table
118 element (i.e. a cselib_val), while the second arg X is an rtx. We know
119 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
120 CONST of an appropriate mode. */
121
122 inline bool
123 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
124 {
125 struct elt_loc_list *l;
126 rtx x = x_arg->x;
127 machine_mode mode = x_arg->mode;
128 machine_mode memmode = x_arg->memmode;
129
130 if (mode != GET_MODE (v->val_rtx))
131 return false;
132
133 if (GET_CODE (x) == VALUE)
134 return x == v->val_rtx;
135
136 if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
137 {
138 rtx xoff = NULL;
139 if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
140 return true;
141 }
142
143 /* We don't guarantee that distinct rtx's have different hash values,
144 so we need to do a comparison. */
145 for (l = v->locs; l; l = l->next)
146 if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
147 {
148 promote_debug_loc (l);
149 return true;
150 }
151
152 return false;
153 }
154
155 /* A table that enables us to look up elts by their value. */
156 static hash_table<cselib_hasher> *cselib_hash_table;
157
158 /* A table to hold preserved values. */
159 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
160
161 /* This is a global so we don't have to pass this through every function.
162 It is used in new_elt_loc_list to set SETTING_INSN. */
163 static rtx_insn *cselib_current_insn;
164
165 /* The unique id that the next create value will take. */
166 static unsigned int next_uid;
167
168 /* The number of registers we had when the varrays were last resized. */
169 static unsigned int cselib_nregs;
170
171 /* Count values without known locations, or with only locations that
172 wouldn't have been known except for debug insns. Whenever this
173 grows too big, we remove these useless values from the table.
174
175 Counting values with only debug values is a bit tricky. We don't
176 want to increment n_useless_values when we create a value for a
177 debug insn, for this would get n_useless_values out of sync, but we
178 want increment it if all locs in the list that were ever referenced
179 in nondebug insns are removed from the list.
180
181 In the general case, once we do that, we'd have to stop accepting
182 nondebug expressions in the loc list, to avoid having two values
183 equivalent that, without debug insns, would have been made into
184 separate values. However, because debug insns never introduce
185 equivalences themselves (no assignments), the only means for
186 growing loc lists is through nondebug assignments. If the locs
187 also happen to be referenced in debug insns, it will work just fine.
188
189 A consequence of this is that there's at most one debug-only loc in
190 each loc list. If we keep it in the first entry, testing whether
191 we have a debug-only loc list takes O(1).
192
193 Furthermore, since any additional entry in a loc list containing a
194 debug loc would have to come from an assignment (nondebug) that
195 references both the initial debug loc and the newly-equivalent loc,
196 the initial debug loc would be promoted to a nondebug loc, and the
197 loc list would not contain debug locs any more.
198
199 So the only case we have to be careful with in order to keep
200 n_useless_values in sync between debug and nondebug compilations is
201 to avoid incrementing n_useless_values when removing the single loc
202 from a value that turns out to not appear outside debug values. We
203 increment n_useless_debug_values instead, and leave such values
204 alone until, for other reasons, we garbage-collect useless
205 values. */
206 static int n_useless_values;
207 static int n_useless_debug_values;
208
209 /* Count values whose locs have been taken exclusively from debug
210 insns for the entire life of the value. */
211 static int n_debug_values;
212
213 /* Number of useless values before we remove them from the hash table. */
214 #define MAX_USELESS_VALUES 32
215
216 /* This table maps from register number to values. It does not
217 contain pointers to cselib_val structures, but rather elt_lists.
218 The purpose is to be able to refer to the same register in
219 different modes. The first element of the list defines the mode in
220 which the register was set; if the mode is unknown or the value is
221 no longer valid in that mode, ELT will be NULL for the first
222 element. */
223 static struct elt_list **reg_values;
224 static unsigned int reg_values_size;
225 #define REG_VALUES(i) reg_values[i]
226
227 /* The largest number of hard regs used by any entry added to the
228 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
229 static unsigned int max_value_regs;
230
231 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
232 in cselib_clear_table() for fast emptying. */
233 static unsigned int *used_regs;
234 static unsigned int n_used_regs;
235
236 /* We pass this to cselib_invalidate_mem to invalidate all of
237 memory for a non-const call instruction. */
238 static GTY(()) rtx callmem;
239
240 /* Set by discard_useless_locs if it deleted the last location of any
241 value. */
242 static int values_became_useless;
243
244 /* Used as stop element of the containing_mem list so we can check
245 presence in the list by checking the next pointer. */
246 static cselib_val dummy_val;
247
248 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
249 that is constant through the whole function and should never be
250 eliminated. */
251 static cselib_val *cfa_base_preserved_val;
252 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
253
254 /* Used to list all values that contain memory reference.
255 May or may not contain the useless values - the list is compacted
256 each time memory is invalidated. */
257 static cselib_val *first_containing_mem = &dummy_val;
258
259 static object_allocator<elt_list> elt_list_pool ("elt_list");
260 static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
261 static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
262
263 static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
264
265 /* If nonnull, cselib will call this function before freeing useless
266 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
267 void (*cselib_discard_hook) (cselib_val *);
268
269 /* If nonnull, cselib will call this function before recording sets or
270 even clobbering outputs of INSN. All the recorded sets will be
271 represented in the array sets[n_sets]. new_val_min can be used to
272 tell whether values present in sets are introduced by this
273 instruction. */
274 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
275 int n_sets);
276
277 \f
278
279 /* Allocate a struct elt_list and fill in its two elements with the
280 arguments. */
281
282 static inline struct elt_list *
283 new_elt_list (struct elt_list *next, cselib_val *elt)
284 {
285 elt_list *el = elt_list_pool.allocate ();
286 el->next = next;
287 el->elt = elt;
288 return el;
289 }
290
291 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
292 list. */
293
294 static inline void
295 new_elt_loc_list (cselib_val *val, rtx loc)
296 {
297 struct elt_loc_list *el, *next = val->locs;
298
299 gcc_checking_assert (!next || !next->setting_insn
300 || !DEBUG_INSN_P (next->setting_insn)
301 || cselib_current_insn == next->setting_insn);
302
303 /* If we're creating the first loc in a debug insn context, we've
304 just created a debug value. Count it. */
305 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
306 n_debug_values++;
307
308 val = canonical_cselib_val (val);
309 next = val->locs;
310
311 if (GET_CODE (loc) == VALUE)
312 {
313 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
314
315 gcc_checking_assert (PRESERVED_VALUE_P (loc)
316 == PRESERVED_VALUE_P (val->val_rtx));
317
318 if (val->val_rtx == loc)
319 return;
320 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
321 {
322 /* Reverse the insertion. */
323 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
324 return;
325 }
326
327 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
328
329 if (CSELIB_VAL_PTR (loc)->locs)
330 {
331 /* Bring all locs from LOC to VAL. */
332 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
333 {
334 /* Adjust values that have LOC as canonical so that VAL
335 becomes their canonical. */
336 if (el->loc && GET_CODE (el->loc) == VALUE)
337 {
338 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
339 == loc);
340 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
341 }
342 }
343 el->next = val->locs;
344 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
345 }
346
347 if (CSELIB_VAL_PTR (loc)->addr_list)
348 {
349 /* Bring in addr_list into canonical node. */
350 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
351 while (last->next)
352 last = last->next;
353 last->next = val->addr_list;
354 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
355 CSELIB_VAL_PTR (loc)->addr_list = NULL;
356 }
357
358 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
359 && val->next_containing_mem == NULL)
360 {
361 /* Add VAL to the containing_mem list after LOC. LOC will
362 be removed when we notice it doesn't contain any
363 MEMs. */
364 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
365 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
366 }
367
368 /* Chain LOC back to VAL. */
369 el = elt_loc_list_pool.allocate ();
370 el->loc = val->val_rtx;
371 el->setting_insn = cselib_current_insn;
372 el->next = NULL;
373 CSELIB_VAL_PTR (loc)->locs = el;
374 }
375
376 el = elt_loc_list_pool.allocate ();
377 el->loc = loc;
378 el->setting_insn = cselib_current_insn;
379 el->next = next;
380 val->locs = el;
381 }
382
383 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
384 originating from a debug insn, maintaining the debug values
385 count. */
386
387 static inline void
388 promote_debug_loc (struct elt_loc_list *l)
389 {
390 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
391 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
392 {
393 n_debug_values--;
394 l->setting_insn = cselib_current_insn;
395 if (cselib_preserve_constants && l->next)
396 {
397 gcc_assert (l->next->setting_insn
398 && DEBUG_INSN_P (l->next->setting_insn)
399 && !l->next->next);
400 l->next->setting_insn = cselib_current_insn;
401 }
402 else
403 gcc_assert (!l->next);
404 }
405 }
406
407 /* The elt_list at *PL is no longer needed. Unchain it and free its
408 storage. */
409
410 static inline void
411 unchain_one_elt_list (struct elt_list **pl)
412 {
413 struct elt_list *l = *pl;
414
415 *pl = l->next;
416 elt_list_pool.remove (l);
417 }
418
419 /* Likewise for elt_loc_lists. */
420
421 static void
422 unchain_one_elt_loc_list (struct elt_loc_list **pl)
423 {
424 struct elt_loc_list *l = *pl;
425
426 *pl = l->next;
427 elt_loc_list_pool.remove (l);
428 }
429
430 /* Likewise for cselib_vals. This also frees the addr_list associated with
431 V. */
432
433 static void
434 unchain_one_value (cselib_val *v)
435 {
436 while (v->addr_list)
437 unchain_one_elt_list (&v->addr_list);
438
439 cselib_val_pool.remove (v);
440 }
441
442 /* Remove all entries from the hash table. Also used during
443 initialization. */
444
445 void
446 cselib_clear_table (void)
447 {
448 cselib_reset_table (1);
449 }
450
451 /* Return TRUE if V is a constant, a function invariant or a VALUE
452 equivalence; FALSE otherwise. */
453
454 static bool
455 invariant_or_equiv_p (cselib_val *v)
456 {
457 struct elt_loc_list *l;
458
459 if (v == cfa_base_preserved_val)
460 return true;
461
462 /* Keep VALUE equivalences around. */
463 for (l = v->locs; l; l = l->next)
464 if (GET_CODE (l->loc) == VALUE)
465 return true;
466
467 if (v->locs != NULL
468 && v->locs->next == NULL)
469 {
470 if (CONSTANT_P (v->locs->loc)
471 && (GET_CODE (v->locs->loc) != CONST
472 || !references_value_p (v->locs->loc, 0)))
473 return true;
474 /* Although a debug expr may be bound to different expressions,
475 we can preserve it as if it was constant, to get unification
476 and proper merging within var-tracking. */
477 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
478 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
479 || GET_CODE (v->locs->loc) == ENTRY_VALUE
480 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
481 return true;
482
483 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
484 if (GET_CODE (v->locs->loc) == PLUS
485 && CONST_INT_P (XEXP (v->locs->loc, 1))
486 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
487 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
488 return true;
489 }
490
491 return false;
492 }
493
494 /* Remove from hash table all VALUEs except constants, function
495 invariants and VALUE equivalences. */
496
497 int
498 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
499 {
500 cselib_val *v = *x;
501
502 if (invariant_or_equiv_p (v))
503 {
504 cselib_hasher::key lookup = {
505 GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
506 };
507 cselib_val **slot
508 = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
509 v->hash, INSERT);
510 gcc_assert (!*slot);
511 *slot = v;
512 }
513
514 cselib_hash_table->clear_slot (x);
515
516 return 1;
517 }
518
519 /* Remove all entries from the hash table, arranging for the next
520 value to be numbered NUM. */
521
522 void
523 cselib_reset_table (unsigned int num)
524 {
525 unsigned int i;
526
527 max_value_regs = 0;
528
529 if (cfa_base_preserved_val)
530 {
531 unsigned int regno = cfa_base_preserved_regno;
532 unsigned int new_used_regs = 0;
533 for (i = 0; i < n_used_regs; i++)
534 if (used_regs[i] == regno)
535 {
536 new_used_regs = 1;
537 continue;
538 }
539 else
540 REG_VALUES (used_regs[i]) = 0;
541 gcc_assert (new_used_regs == 1);
542 n_used_regs = new_used_regs;
543 used_regs[0] = regno;
544 max_value_regs
545 = hard_regno_nregs (regno,
546 GET_MODE (cfa_base_preserved_val->locs->loc));
547
548 /* If cfa_base is sp + const_int, need to preserve also the
549 SP_DERIVED_VALUE_P value. */
550 for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
551 l; l = l->next)
552 if (GET_CODE (l->loc) == PLUS
553 && GET_CODE (XEXP (l->loc, 0)) == VALUE
554 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
555 && CONST_INT_P (XEXP (l->loc, 1)))
556 {
557 if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
558 {
559 rtx val = cfa_base_preserved_val->val_rtx;
560 rtx_insn *save_cselib_current_insn = cselib_current_insn;
561 cselib_current_insn = l->setting_insn;
562 new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
563 plus_constant (Pmode, val,
564 -UINTVAL (XEXP (l->loc, 1))));
565 cselib_current_insn = save_cselib_current_insn;
566 }
567 break;
568 }
569 }
570 else
571 {
572 for (i = 0; i < n_used_regs; i++)
573 REG_VALUES (used_regs[i]) = 0;
574 n_used_regs = 0;
575 }
576
577 if (cselib_preserve_constants)
578 cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
579 else
580 {
581 cselib_hash_table->empty ();
582 gcc_checking_assert (!cselib_any_perm_equivs);
583 }
584
585 n_useless_values = 0;
586 n_useless_debug_values = 0;
587 n_debug_values = 0;
588
589 next_uid = num;
590
591 first_containing_mem = &dummy_val;
592 }
593
594 /* Return the number of the next value that will be generated. */
595
596 unsigned int
597 cselib_get_next_uid (void)
598 {
599 return next_uid;
600 }
601
602 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
603 INSERTing if requested. When X is part of the address of a MEM,
604 MEMMODE should specify the mode of the MEM. */
605
606 static cselib_val **
607 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
608 enum insert_option insert, machine_mode memmode)
609 {
610 cselib_val **slot = NULL;
611 cselib_hasher::key lookup = { mode, x, memmode };
612 if (cselib_preserve_constants)
613 slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
614 NO_INSERT);
615 if (!slot)
616 slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
617 return slot;
618 }
619
620 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
621 only return true for values which point to a cselib_val whose value
622 element has been set to zero, which implies the cselib_val will be
623 removed. */
624
625 int
626 references_value_p (const_rtx x, int only_useless)
627 {
628 const enum rtx_code code = GET_CODE (x);
629 const char *fmt = GET_RTX_FORMAT (code);
630 int i, j;
631
632 if (GET_CODE (x) == VALUE
633 && (! only_useless
634 || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
635 return 1;
636
637 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
638 {
639 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
640 return 1;
641 else if (fmt[i] == 'E')
642 for (j = 0; j < XVECLEN (x, i); j++)
643 if (references_value_p (XVECEXP (x, i, j), only_useless))
644 return 1;
645 }
646
647 return 0;
648 }
649
650 /* Return true if V is a useless VALUE and can be discarded as such. */
651
652 static bool
653 cselib_useless_value_p (cselib_val *v)
654 {
655 return (v->locs == 0
656 && !PRESERVED_VALUE_P (v->val_rtx)
657 && !SP_DERIVED_VALUE_P (v->val_rtx));
658 }
659
660 /* For all locations found in X, delete locations that reference useless
661 values (i.e. values without any location). Called through
662 htab_traverse. */
663
664 int
665 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
666 {
667 cselib_val *v = *x;
668 struct elt_loc_list **p = &v->locs;
669 bool had_locs = v->locs != NULL;
670 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
671
672 while (*p)
673 {
674 if (references_value_p ((*p)->loc, 1))
675 unchain_one_elt_loc_list (p);
676 else
677 p = &(*p)->next;
678 }
679
680 if (had_locs && cselib_useless_value_p (v))
681 {
682 if (setting_insn && DEBUG_INSN_P (setting_insn))
683 n_useless_debug_values++;
684 else
685 n_useless_values++;
686 values_became_useless = 1;
687 }
688 return 1;
689 }
690
691 /* If X is a value with no locations, remove it from the hashtable. */
692
693 int
694 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
695 {
696 cselib_val *v = *x;
697
698 if (v->locs == 0 && cselib_useless_value_p (v))
699 {
700 if (cselib_discard_hook)
701 cselib_discard_hook (v);
702
703 CSELIB_VAL_PTR (v->val_rtx) = NULL;
704 cselib_hash_table->clear_slot (x);
705 unchain_one_value (v);
706 n_useless_values--;
707 }
708
709 return 1;
710 }
711
712 /* Clean out useless values (i.e. those which no longer have locations
713 associated with them) from the hash table. */
714
715 static void
716 remove_useless_values (void)
717 {
718 cselib_val **p, *v;
719
720 /* First pass: eliminate locations that reference the value. That in
721 turn can make more values useless. */
722 do
723 {
724 values_became_useless = 0;
725 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
726 }
727 while (values_became_useless);
728
729 /* Second pass: actually remove the values. */
730
731 p = &first_containing_mem;
732 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
733 if (v->locs && v == canonical_cselib_val (v))
734 {
735 *p = v;
736 p = &(*p)->next_containing_mem;
737 }
738 *p = &dummy_val;
739
740 n_useless_values += n_useless_debug_values;
741 n_debug_values -= n_useless_debug_values;
742 n_useless_debug_values = 0;
743
744 cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
745
746 gcc_assert (!n_useless_values);
747 }
748
749 /* Arrange for a value to not be removed from the hash table even if
750 it becomes useless. */
751
752 void
753 cselib_preserve_value (cselib_val *v)
754 {
755 PRESERVED_VALUE_P (v->val_rtx) = 1;
756 }
757
758 /* Test whether a value is preserved. */
759
760 bool
761 cselib_preserved_value_p (cselib_val *v)
762 {
763 return PRESERVED_VALUE_P (v->val_rtx);
764 }
765
766 /* Arrange for a REG value to be assumed constant through the whole function,
767 never invalidated and preserved across cselib_reset_table calls. */
768
769 void
770 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
771 {
772 if (cselib_preserve_constants
773 && v->locs
774 && REG_P (v->locs->loc))
775 {
776 cfa_base_preserved_val = v;
777 cfa_base_preserved_regno = regno;
778 }
779 }
780
781 /* Clean all non-constant expressions in the hash table, but retain
782 their values. */
783
784 void
785 cselib_preserve_only_values (void)
786 {
787 int i;
788
789 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
790 cselib_invalidate_regno (i, reg_raw_mode[i]);
791
792 cselib_invalidate_mem (callmem);
793
794 remove_useless_values ();
795
796 gcc_assert (first_containing_mem == &dummy_val);
797 }
798
799 /* Arrange for a value to be marked as based on stack pointer
800 for find_base_term purposes. */
801
802 void
803 cselib_set_value_sp_based (cselib_val *v)
804 {
805 SP_BASED_VALUE_P (v->val_rtx) = 1;
806 }
807
808 /* Test whether a value is based on stack pointer for
809 find_base_term purposes. */
810
811 bool
812 cselib_sp_based_value_p (cselib_val *v)
813 {
814 return SP_BASED_VALUE_P (v->val_rtx);
815 }
816
817 /* Return the mode in which a register was last set. If X is not a
818 register, return its mode. If the mode in which the register was
819 set is not known, or the value was already clobbered, return
820 VOIDmode. */
821
822 machine_mode
823 cselib_reg_set_mode (const_rtx x)
824 {
825 if (!REG_P (x))
826 return GET_MODE (x);
827
828 if (REG_VALUES (REGNO (x)) == NULL
829 || REG_VALUES (REGNO (x))->elt == NULL)
830 return VOIDmode;
831
832 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
833 }
834
835 /* If x is a PLUS or an autoinc operation, expand the operation,
836 storing the offset, if any, in *OFF. */
837
838 static rtx
839 autoinc_split (rtx x, rtx *off, machine_mode memmode)
840 {
841 switch (GET_CODE (x))
842 {
843 case PLUS:
844 *off = XEXP (x, 1);
845 x = XEXP (x, 0);
846 break;
847
848 case PRE_DEC:
849 if (memmode == VOIDmode)
850 return x;
851
852 *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
853 x = XEXP (x, 0);
854 break;
855
856 case PRE_INC:
857 if (memmode == VOIDmode)
858 return x;
859
860 *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
861 x = XEXP (x, 0);
862 break;
863
864 case PRE_MODIFY:
865 x = XEXP (x, 1);
866 break;
867
868 case POST_DEC:
869 case POST_INC:
870 case POST_MODIFY:
871 x = XEXP (x, 0);
872 break;
873
874 default:
875 break;
876 }
877
878 if (GET_MODE (x) == Pmode
879 && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
880 && (*off == NULL_RTX || CONST_INT_P (*off)))
881 {
882 cselib_val *e;
883 if (GET_CODE (x) == VALUE)
884 e = CSELIB_VAL_PTR (x);
885 else
886 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
887 if (e)
888 {
889 if (SP_DERIVED_VALUE_P (e->val_rtx)
890 && (*off == NULL_RTX || *off == const0_rtx))
891 {
892 *off = NULL_RTX;
893 return e->val_rtx;
894 }
895 for (struct elt_loc_list *l = e->locs; l; l = l->next)
896 if (GET_CODE (l->loc) == PLUS
897 && GET_CODE (XEXP (l->loc, 0)) == VALUE
898 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
899 && CONST_INT_P (XEXP (l->loc, 1)))
900 {
901 if (*off == NULL_RTX)
902 *off = XEXP (l->loc, 1);
903 else
904 *off = plus_constant (Pmode, *off,
905 INTVAL (XEXP (l->loc, 1)));
906 if (*off == const0_rtx)
907 *off = NULL_RTX;
908 return XEXP (l->loc, 0);
909 }
910 }
911 }
912 return x;
913 }
914
915 /* Return nonzero if we can prove that X and Y contain the same value,
916 taking our gathered information into account. MEMMODE holds the
917 mode of the enclosing MEM, if any, as required to deal with autoinc
918 addressing modes. If X and Y are not (known to be) part of
919 addresses, MEMMODE should be VOIDmode. */
920
921 int
922 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
923 {
924 enum rtx_code code;
925 const char *fmt;
926 int i;
927
928 if (REG_P (x) || MEM_P (x))
929 {
930 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
931
932 if (e)
933 x = e->val_rtx;
934 }
935
936 if (REG_P (y) || MEM_P (y))
937 {
938 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
939
940 if (e)
941 y = e->val_rtx;
942 }
943
944 if (x == y)
945 return 1;
946
947 if (GET_CODE (x) == VALUE)
948 {
949 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
950 struct elt_loc_list *l;
951
952 if (GET_CODE (y) == VALUE)
953 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
954
955 if ((SP_DERIVED_VALUE_P (x)
956 || SP_DERIVED_VALUE_P (e->val_rtx))
957 && GET_MODE (y) == Pmode)
958 {
959 rtx yoff = NULL;
960 rtx yr = autoinc_split (y, &yoff, memmode);
961 if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
962 return 1;
963 }
964
965 if (depth == 128)
966 return 0;
967
968 for (l = e->locs; l; l = l->next)
969 {
970 rtx t = l->loc;
971
972 /* Avoid infinite recursion. We know we have the canonical
973 value, so we can just skip any values in the equivalence
974 list. */
975 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
976 continue;
977 else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
978 return 1;
979 }
980
981 return 0;
982 }
983 else if (GET_CODE (y) == VALUE)
984 {
985 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
986 struct elt_loc_list *l;
987
988 if ((SP_DERIVED_VALUE_P (y)
989 || SP_DERIVED_VALUE_P (e->val_rtx))
990 && GET_MODE (x) == Pmode)
991 {
992 rtx xoff = NULL;
993 rtx xr = autoinc_split (x, &xoff, memmode);
994 if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
995 return 1;
996 }
997
998 if (depth == 128)
999 return 0;
1000
1001 for (l = e->locs; l; l = l->next)
1002 {
1003 rtx t = l->loc;
1004
1005 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
1006 continue;
1007 else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
1008 return 1;
1009 }
1010
1011 return 0;
1012 }
1013
1014 if (GET_MODE (x) != GET_MODE (y))
1015 return 0;
1016
1017 if (GET_CODE (x) != GET_CODE (y)
1018 || (GET_CODE (x) == PLUS
1019 && GET_MODE (x) == Pmode
1020 && CONST_INT_P (XEXP (x, 1))
1021 && CONST_INT_P (XEXP (y, 1))))
1022 {
1023 rtx xorig = x, yorig = y;
1024 rtx xoff = NULL, yoff = NULL;
1025
1026 x = autoinc_split (x, &xoff, memmode);
1027 y = autoinc_split (y, &yoff, memmode);
1028
1029 /* Don't recurse if nothing changed. */
1030 if (x != xorig || y != yorig)
1031 {
1032 if (!xoff != !yoff)
1033 return 0;
1034
1035 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
1036 return 0;
1037
1038 return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1039 }
1040
1041 if (GET_CODE (xorig) != GET_CODE (yorig))
1042 return 0;
1043 }
1044
1045 /* These won't be handled correctly by the code below. */
1046 switch (GET_CODE (x))
1047 {
1048 CASE_CONST_UNIQUE:
1049 case DEBUG_EXPR:
1050 return 0;
1051
1052 case CONST_VECTOR:
1053 if (!same_vector_encodings_p (x, y))
1054 return false;
1055 break;
1056
1057 case DEBUG_IMPLICIT_PTR:
1058 return DEBUG_IMPLICIT_PTR_DECL (x)
1059 == DEBUG_IMPLICIT_PTR_DECL (y);
1060
1061 case DEBUG_PARAMETER_REF:
1062 return DEBUG_PARAMETER_REF_DECL (x)
1063 == DEBUG_PARAMETER_REF_DECL (y);
1064
1065 case ENTRY_VALUE:
1066 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1067 use rtx_equal_for_cselib_1 to compare the operands. */
1068 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1069
1070 case LABEL_REF:
1071 return label_ref_label (x) == label_ref_label (y);
1072
1073 case REG:
1074 return REGNO (x) == REGNO (y);
1075
1076 case MEM:
1077 /* We have to compare any autoinc operations in the addresses
1078 using this MEM's mode. */
1079 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1080 depth);
1081
1082 default:
1083 break;
1084 }
1085
1086 code = GET_CODE (x);
1087 fmt = GET_RTX_FORMAT (code);
1088
1089 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1090 {
1091 int j;
1092
1093 switch (fmt[i])
1094 {
1095 case 'w':
1096 if (XWINT (x, i) != XWINT (y, i))
1097 return 0;
1098 break;
1099
1100 case 'n':
1101 case 'i':
1102 if (XINT (x, i) != XINT (y, i))
1103 return 0;
1104 break;
1105
1106 case 'p':
1107 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1108 return 0;
1109 break;
1110
1111 case 'V':
1112 case 'E':
1113 /* Two vectors must have the same length. */
1114 if (XVECLEN (x, i) != XVECLEN (y, i))
1115 return 0;
1116
1117 /* And the corresponding elements must match. */
1118 for (j = 0; j < XVECLEN (x, i); j++)
1119 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1120 XVECEXP (y, i, j), memmode, depth))
1121 return 0;
1122 break;
1123
1124 case 'e':
1125 if (i == 1
1126 && targetm.commutative_p (x, UNKNOWN)
1127 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1128 depth)
1129 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1130 depth))
1131 return 1;
1132 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1133 depth))
1134 return 0;
1135 break;
1136
1137 case 'S':
1138 case 's':
1139 if (strcmp (XSTR (x, i), XSTR (y, i)))
1140 return 0;
1141 break;
1142
1143 case 'u':
1144 /* These are just backpointers, so they don't matter. */
1145 break;
1146
1147 case '0':
1148 case 't':
1149 break;
1150
1151 /* It is believed that rtx's at this level will never
1152 contain anything but integers and other rtx's,
1153 except for within LABEL_REFs and SYMBOL_REFs. */
1154 default:
1155 gcc_unreachable ();
1156 }
1157 }
1158 return 1;
1159 }
1160
1161 /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1162 truly redundant, taking into account aliasing information. */
1163 bool
1164 cselib_redundant_set_p (rtx set)
1165 {
1166 gcc_assert (GET_CODE (set) == SET);
1167 rtx dest = SET_DEST (set);
1168 if (cselib_reg_set_mode (dest) != GET_MODE (dest))
1169 return false;
1170
1171 if (!rtx_equal_for_cselib_p (dest, SET_SRC (set)))
1172 return false;
1173
1174 while (GET_CODE (dest) == SUBREG
1175 || GET_CODE (dest) == ZERO_EXTRACT
1176 || GET_CODE (dest) == STRICT_LOW_PART)
1177 dest = XEXP (dest, 0);
1178
1179 if (!flag_strict_aliasing || !MEM_P (dest))
1180 return true;
1181
1182 /* For a store we need to check that suppressing it will not change
1183 the effective alias set. */
1184 rtx dest_addr = XEXP (dest, 0);
1185
1186 /* Lookup the equivalents to the original dest (rather than just the
1187 MEM). */
1188 cselib_val *src_val = cselib_lookup (SET_DEST (set),
1189 GET_MODE (SET_DEST (set)),
1190 0, VOIDmode);
1191
1192 if (src_val)
1193 {
1194 /* Walk the list of source equivalents to find the MEM accessing
1195 the same location. */
1196 for (elt_loc_list *l = src_val->locs; l; l = l->next)
1197 {
1198 rtx src_equiv = l->loc;
1199 while (GET_CODE (src_equiv) == SUBREG
1200 || GET_CODE (src_equiv) == ZERO_EXTRACT
1201 || GET_CODE (src_equiv) == STRICT_LOW_PART)
1202 src_equiv = XEXP (src_equiv, 0);
1203
1204 if (MEM_P (src_equiv))
1205 {
1206 /* Match the MEMs by comparing the addresses. We can
1207 only remove the later store if the earlier aliases at
1208 least all the accesses of the later one. */
1209 if (rtx_equal_for_cselib_1 (dest_addr, XEXP (src_equiv, 0),
1210 GET_MODE (dest), 0))
1211 return mems_same_for_tbaa_p (src_equiv, dest);
1212 }
1213 }
1214 }
1215
1216 /* We failed to find a recorded value in the cselib history, so try
1217 the source of this set; this catches cases such as *p = *q when p
1218 and q have the same value. */
1219 rtx src = SET_SRC (set);
1220 while (GET_CODE (src) == SUBREG)
1221 src = XEXP (src, 0);
1222
1223 if (MEM_P (src)
1224 && rtx_equal_for_cselib_1 (dest_addr, XEXP (src, 0), GET_MODE (dest), 0))
1225 return mems_same_for_tbaa_p (src, dest);
1226
1227 return false;
1228 }
1229
1230 /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1231 except that it hashes (plus:P x c). */
1232
1233 static unsigned int
1234 cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1235 machine_mode memmode)
1236 {
1237 cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1238 if (! e)
1239 return 0;
1240
1241 if (! SP_DERIVED_VALUE_P (e->val_rtx))
1242 for (struct elt_loc_list *l = e->locs; l; l = l->next)
1243 if (GET_CODE (l->loc) == PLUS
1244 && GET_CODE (XEXP (l->loc, 0)) == VALUE
1245 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1246 && CONST_INT_P (XEXP (l->loc, 1)))
1247 {
1248 e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1249 c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1250 break;
1251 }
1252 if (c == 0)
1253 return e->hash;
1254
1255 unsigned hash = (unsigned) PLUS + (unsigned) GET_MODE (x);
1256 hash += e->hash;
1257 unsigned int tem_hash = (unsigned) CONST_INT + (unsigned) VOIDmode;
1258 tem_hash += ((unsigned) CONST_INT << 7) + (unsigned HOST_WIDE_INT) c;
1259 if (tem_hash == 0)
1260 tem_hash = (unsigned int) CONST_INT;
1261 hash += tem_hash;
1262 return hash ? hash : 1 + (unsigned int) PLUS;
1263 }
1264
1265 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1266 For registers and memory locations, we look up their cselib_val structure
1267 and return its VALUE element.
1268 Possible reasons for return 0 are: the object is volatile, or we couldn't
1269 find a register or memory location in the table and CREATE is zero. If
1270 CREATE is nonzero, table elts are created for regs and mem.
1271 N.B. this hash function returns the same hash value for RTXes that
1272 differ only in the order of operands, thus it is suitable for comparisons
1273 that take commutativity into account.
1274 If we wanted to also support associative rules, we'd have to use a different
1275 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1276 MEMMODE indicates the mode of an enclosing MEM, and it's only
1277 used to compute autoinc values.
1278 We used to have a MODE argument for hashing for CONST_INTs, but that
1279 didn't make sense, since it caused spurious hash differences between
1280 (set (reg:SI 1) (const_int))
1281 (plus:SI (reg:SI 2) (reg:SI 1))
1282 and
1283 (plus:SI (reg:SI 2) (const_int))
1284 If the mode is important in any context, it must be checked specifically
1285 in a comparison anyway, since relying on hash differences is unsafe. */
1286
1287 static unsigned int
1288 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1289 {
1290 cselib_val *e;
1291 poly_int64 offset;
1292 int i, j;
1293 enum rtx_code code;
1294 const char *fmt;
1295 unsigned int hash = 0;
1296
1297 code = GET_CODE (x);
1298 hash += (unsigned) code + (unsigned) GET_MODE (x);
1299
1300 switch (code)
1301 {
1302 case VALUE:
1303 e = CSELIB_VAL_PTR (x);
1304 return e->hash;
1305
1306 case MEM:
1307 case REG:
1308 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1309 if (! e)
1310 return 0;
1311
1312 return e->hash;
1313
1314 case DEBUG_EXPR:
1315 hash += ((unsigned) DEBUG_EXPR << 7)
1316 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1317 return hash ? hash : (unsigned int) DEBUG_EXPR;
1318
1319 case DEBUG_IMPLICIT_PTR:
1320 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1321 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1322 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1323
1324 case DEBUG_PARAMETER_REF:
1325 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1326 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1327 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1328
1329 case ENTRY_VALUE:
1330 /* ENTRY_VALUEs are function invariant, thus try to avoid
1331 recursing on argument if ENTRY_VALUE is one of the
1332 forms emitted by expand_debug_expr, otherwise
1333 ENTRY_VALUE hash would depend on the current value
1334 in some register or memory. */
1335 if (REG_P (ENTRY_VALUE_EXP (x)))
1336 hash += (unsigned int) REG
1337 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1338 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1339 else if (MEM_P (ENTRY_VALUE_EXP (x))
1340 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1341 hash += (unsigned int) MEM
1342 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1343 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1344 else
1345 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1346 return hash ? hash : (unsigned int) ENTRY_VALUE;
1347
1348 case CONST_INT:
1349 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1350 return hash ? hash : (unsigned int) CONST_INT;
1351
1352 case CONST_WIDE_INT:
1353 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1354 hash += CONST_WIDE_INT_ELT (x, i);
1355 return hash;
1356
1357 case CONST_POLY_INT:
1358 {
1359 inchash::hash h;
1360 h.add_int (hash);
1361 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1362 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1363 return h.end ();
1364 }
1365
1366 case CONST_DOUBLE:
1367 /* This is like the general case, except that it only counts
1368 the integers representing the constant. */
1369 hash += (unsigned) code + (unsigned) GET_MODE (x);
1370 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1371 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1372 + (unsigned) CONST_DOUBLE_HIGH (x));
1373 else
1374 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1375 return hash ? hash : (unsigned int) CONST_DOUBLE;
1376
1377 case CONST_FIXED:
1378 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1379 hash += fixed_hash (CONST_FIXED_VALUE (x));
1380 return hash ? hash : (unsigned int) CONST_FIXED;
1381
1382 case CONST_VECTOR:
1383 {
1384 int units;
1385 rtx elt;
1386
1387 units = const_vector_encoded_nelts (x);
1388
1389 for (i = 0; i < units; ++i)
1390 {
1391 elt = CONST_VECTOR_ENCODED_ELT (x, i);
1392 hash += cselib_hash_rtx (elt, 0, memmode);
1393 }
1394
1395 return hash;
1396 }
1397
1398 /* Assume there is only one rtx object for any given label. */
1399 case LABEL_REF:
1400 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1401 differences and differences between each stage's debugging dumps. */
1402 hash += (((unsigned int) LABEL_REF << 7)
1403 + CODE_LABEL_NUMBER (label_ref_label (x)));
1404 return hash ? hash : (unsigned int) LABEL_REF;
1405
1406 case SYMBOL_REF:
1407 {
1408 /* Don't hash on the symbol's address to avoid bootstrap differences.
1409 Different hash values may cause expressions to be recorded in
1410 different orders and thus different registers to be used in the
1411 final assembler. This also avoids differences in the dump files
1412 between various stages. */
1413 unsigned int h = 0;
1414 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1415
1416 while (*p)
1417 h += (h << 7) + *p++; /* ??? revisit */
1418
1419 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1420 return hash ? hash : (unsigned int) SYMBOL_REF;
1421 }
1422
1423 case PRE_DEC:
1424 case PRE_INC:
1425 /* We can't compute these without knowing the MEM mode. */
1426 gcc_assert (memmode != VOIDmode);
1427 offset = GET_MODE_SIZE (memmode);
1428 if (code == PRE_DEC)
1429 offset = -offset;
1430 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1431 like (mem:MEMMODE (plus (reg) (const_int I))). */
1432 if (GET_MODE (x) == Pmode
1433 && (REG_P (XEXP (x, 0))
1434 || MEM_P (XEXP (x, 0))
1435 || GET_CODE (XEXP (x, 0)) == VALUE))
1436 {
1437 HOST_WIDE_INT c;
1438 if (offset.is_constant (&c))
1439 return cselib_hash_plus_const_int (XEXP (x, 0),
1440 trunc_int_for_mode (c, Pmode),
1441 create, memmode);
1442 }
1443 hash = ((unsigned) PLUS + (unsigned) GET_MODE (x)
1444 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1445 + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1446 create, memmode));
1447 return hash ? hash : 1 + (unsigned) PLUS;
1448
1449 case PRE_MODIFY:
1450 gcc_assert (memmode != VOIDmode);
1451 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1452
1453 case POST_DEC:
1454 case POST_INC:
1455 case POST_MODIFY:
1456 gcc_assert (memmode != VOIDmode);
1457 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1458
1459 case PC:
1460 case CALL:
1461 case UNSPEC_VOLATILE:
1462 return 0;
1463
1464 case ASM_OPERANDS:
1465 if (MEM_VOLATILE_P (x))
1466 return 0;
1467
1468 break;
1469
1470 case PLUS:
1471 if (GET_MODE (x) == Pmode
1472 && (REG_P (XEXP (x, 0))
1473 || MEM_P (XEXP (x, 0))
1474 || GET_CODE (XEXP (x, 0)) == VALUE)
1475 && CONST_INT_P (XEXP (x, 1)))
1476 return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1477 create, memmode);
1478 break;
1479
1480 default:
1481 break;
1482 }
1483
1484 i = GET_RTX_LENGTH (code) - 1;
1485 fmt = GET_RTX_FORMAT (code);
1486 for (; i >= 0; i--)
1487 {
1488 switch (fmt[i])
1489 {
1490 case 'e':
1491 {
1492 rtx tem = XEXP (x, i);
1493 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1494
1495 if (tem_hash == 0)
1496 return 0;
1497
1498 hash += tem_hash;
1499 }
1500 break;
1501 case 'E':
1502 for (j = 0; j < XVECLEN (x, i); j++)
1503 {
1504 unsigned int tem_hash
1505 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1506
1507 if (tem_hash == 0)
1508 return 0;
1509
1510 hash += tem_hash;
1511 }
1512 break;
1513
1514 case 's':
1515 {
1516 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1517
1518 if (p)
1519 while (*p)
1520 hash += *p++;
1521 break;
1522 }
1523
1524 case 'i':
1525 hash += XINT (x, i);
1526 break;
1527
1528 case 'p':
1529 hash += constant_lower_bound (SUBREG_BYTE (x));
1530 break;
1531
1532 case '0':
1533 case 't':
1534 /* unused */
1535 break;
1536
1537 default:
1538 gcc_unreachable ();
1539 }
1540 }
1541
1542 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1543 }
1544
1545 /* Create a new value structure for VALUE and initialize it. The mode of the
1546 value is MODE. */
1547
1548 static inline cselib_val *
1549 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1550 {
1551 cselib_val *e = cselib_val_pool.allocate ();
1552
1553 gcc_assert (hash);
1554 gcc_assert (next_uid);
1555
1556 e->hash = hash;
1557 e->uid = next_uid++;
1558 /* We use an alloc pool to allocate this RTL construct because it
1559 accounts for about 8% of the overall memory usage. We know
1560 precisely when we can have VALUE RTXen (when cselib is active)
1561 so we don't need to put them in garbage collected memory.
1562 ??? Why should a VALUE be an RTX in the first place? */
1563 e->val_rtx = (rtx_def*) value_pool.allocate ();
1564 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1565 PUT_CODE (e->val_rtx, VALUE);
1566 PUT_MODE (e->val_rtx, mode);
1567 CSELIB_VAL_PTR (e->val_rtx) = e;
1568 e->addr_list = 0;
1569 e->locs = 0;
1570 e->next_containing_mem = 0;
1571
1572 scalar_int_mode int_mode;
1573 if (REG_P (x) && is_int_mode (mode, &int_mode)
1574 && GET_MODE_SIZE (int_mode) > 1
1575 && REG_VALUES (REGNO (x)) != NULL
1576 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
1577 {
1578 rtx copy = shallow_copy_rtx (x);
1579 scalar_int_mode narrow_mode_iter;
1580 FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
1581 {
1582 PUT_MODE_RAW (copy, narrow_mode_iter);
1583 cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode);
1584 if (v)
1585 {
1586 rtx sub = lowpart_subreg (narrow_mode_iter, e->val_rtx, int_mode);
1587 if (sub)
1588 new_elt_loc_list (v, sub);
1589 }
1590 }
1591 }
1592
1593 if (dump_file && (dump_flags & TDF_CSELIB))
1594 {
1595 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1596 if (flag_dump_noaddr || flag_dump_unnumbered)
1597 fputs ("# ", dump_file);
1598 else
1599 fprintf (dump_file, "%p ", (void*)e);
1600 print_rtl_single (dump_file, x);
1601 fputc ('\n', dump_file);
1602 }
1603
1604 return e;
1605 }
1606
1607 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1608 contains the data at this address. X is a MEM that represents the
1609 value. Update the two value structures to represent this situation. */
1610
1611 static void
1612 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1613 {
1614 addr_elt = canonical_cselib_val (addr_elt);
1615 mem_elt = canonical_cselib_val (mem_elt);
1616
1617 /* Avoid duplicates. */
1618 addr_space_t as = MEM_ADDR_SPACE (x);
1619 for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1620 if (MEM_P (l->loc)
1621 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1622 && MEM_ADDR_SPACE (l->loc) == as)
1623 {
1624 promote_debug_loc (l);
1625 return;
1626 }
1627
1628 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1629 new_elt_loc_list (mem_elt,
1630 replace_equiv_address_nv (x, addr_elt->val_rtx));
1631 if (mem_elt->next_containing_mem == NULL)
1632 {
1633 mem_elt->next_containing_mem = first_containing_mem;
1634 first_containing_mem = mem_elt;
1635 }
1636 }
1637
1638 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1639 If CREATE, make a new one if we haven't seen it before. */
1640
1641 static cselib_val *
1642 cselib_lookup_mem (rtx x, int create)
1643 {
1644 machine_mode mode = GET_MODE (x);
1645 machine_mode addr_mode;
1646 cselib_val **slot;
1647 cselib_val *addr;
1648 cselib_val *mem_elt;
1649
1650 if (MEM_VOLATILE_P (x) || mode == BLKmode
1651 || !cselib_record_memory
1652 || (FLOAT_MODE_P (mode) && flag_float_store))
1653 return 0;
1654
1655 addr_mode = GET_MODE (XEXP (x, 0));
1656 if (addr_mode == VOIDmode)
1657 addr_mode = Pmode;
1658
1659 /* Look up the value for the address. */
1660 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1661 if (! addr)
1662 return 0;
1663 addr = canonical_cselib_val (addr);
1664
1665 /* Find a value that describes a value of our mode at that address. */
1666 addr_space_t as = MEM_ADDR_SPACE (x);
1667 for (elt_list *l = addr->addr_list; l; l = l->next)
1668 if (GET_MODE (l->elt->val_rtx) == mode)
1669 {
1670 for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1671 if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1672 {
1673 promote_debug_loc (l->elt->locs);
1674 return l->elt;
1675 }
1676 }
1677
1678 if (! create)
1679 return 0;
1680
1681 mem_elt = new_cselib_val (next_uid, mode, x);
1682 add_mem_for_addr (addr, mem_elt, x);
1683 slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1684 *slot = mem_elt;
1685 return mem_elt;
1686 }
1687
1688 /* Search through the possible substitutions in P. We prefer a non reg
1689 substitution because this allows us to expand the tree further. If
1690 we find, just a reg, take the lowest regno. There may be several
1691 non-reg results, we just take the first one because they will all
1692 expand to the same place. */
1693
1694 static rtx
1695 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1696 int max_depth)
1697 {
1698 rtx reg_result = NULL;
1699 unsigned int regno = UINT_MAX;
1700 struct elt_loc_list *p_in = p;
1701
1702 for (; p; p = p->next)
1703 {
1704 /* Return these right away to avoid returning stack pointer based
1705 expressions for frame pointer and vice versa, which is something
1706 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1707 for more details. */
1708 if (REG_P (p->loc)
1709 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1710 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1711 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1712 || REGNO (p->loc) == cfa_base_preserved_regno))
1713 return p->loc;
1714 /* Avoid infinite recursion trying to expand a reg into a
1715 the same reg. */
1716 if ((REG_P (p->loc))
1717 && (REGNO (p->loc) < regno)
1718 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1719 {
1720 reg_result = p->loc;
1721 regno = REGNO (p->loc);
1722 }
1723 /* Avoid infinite recursion and do not try to expand the
1724 value. */
1725 else if (GET_CODE (p->loc) == VALUE
1726 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1727 continue;
1728 else if (!REG_P (p->loc))
1729 {
1730 rtx result, note;
1731 if (dump_file && (dump_flags & TDF_CSELIB))
1732 {
1733 print_inline_rtx (dump_file, p->loc, 0);
1734 fprintf (dump_file, "\n");
1735 }
1736 if (GET_CODE (p->loc) == LO_SUM
1737 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1738 && p->setting_insn
1739 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1740 && XEXP (note, 0) == XEXP (p->loc, 1))
1741 return XEXP (p->loc, 1);
1742 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1743 if (result)
1744 return result;
1745 }
1746
1747 }
1748
1749 if (regno != UINT_MAX)
1750 {
1751 rtx result;
1752 if (dump_file && (dump_flags & TDF_CSELIB))
1753 fprintf (dump_file, "r%d\n", regno);
1754
1755 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1756 if (result)
1757 return result;
1758 }
1759
1760 if (dump_file && (dump_flags & TDF_CSELIB))
1761 {
1762 if (reg_result)
1763 {
1764 print_inline_rtx (dump_file, reg_result, 0);
1765 fprintf (dump_file, "\n");
1766 }
1767 else
1768 fprintf (dump_file, "NULL\n");
1769 }
1770 return reg_result;
1771 }
1772
1773
1774 /* Forward substitute and expand an expression out to its roots.
1775 This is the opposite of common subexpression. Because local value
1776 numbering is such a weak optimization, the expanded expression is
1777 pretty much unique (not from a pointer equals point of view but
1778 from a tree shape point of view.
1779
1780 This function returns NULL if the expansion fails. The expansion
1781 will fail if there is no value number for one of the operands or if
1782 one of the operands has been overwritten between the current insn
1783 and the beginning of the basic block. For instance x has no
1784 expansion in:
1785
1786 r1 <- r1 + 3
1787 x <- r1 + 8
1788
1789 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1790 It is clear on return. */
1791
1792 rtx
1793 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1794 {
1795 struct expand_value_data evd;
1796
1797 evd.regs_active = regs_active;
1798 evd.callback = NULL;
1799 evd.callback_arg = NULL;
1800 evd.dummy = false;
1801
1802 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1803 }
1804
1805 /* Same as cselib_expand_value_rtx, but using a callback to try to
1806 resolve some expressions. The CB function should return ORIG if it
1807 can't or does not want to deal with a certain RTX. Any other
1808 return value, including NULL, will be used as the expansion for
1809 VALUE, without any further changes. */
1810
1811 rtx
1812 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1813 cselib_expand_callback cb, void *data)
1814 {
1815 struct expand_value_data evd;
1816
1817 evd.regs_active = regs_active;
1818 evd.callback = cb;
1819 evd.callback_arg = data;
1820 evd.dummy = false;
1821
1822 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1823 }
1824
1825 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1826 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1827 would return NULL or non-NULL, without allocating new rtx. */
1828
1829 bool
1830 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1831 cselib_expand_callback cb, void *data)
1832 {
1833 struct expand_value_data evd;
1834
1835 evd.regs_active = regs_active;
1836 evd.callback = cb;
1837 evd.callback_arg = data;
1838 evd.dummy = true;
1839
1840 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1841 }
1842
1843 /* Internal implementation of cselib_expand_value_rtx and
1844 cselib_expand_value_rtx_cb. */
1845
1846 static rtx
1847 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1848 int max_depth)
1849 {
1850 rtx copy, scopy;
1851 int i, j;
1852 RTX_CODE code;
1853 const char *format_ptr;
1854 machine_mode mode;
1855
1856 code = GET_CODE (orig);
1857
1858 /* For the context of dse, if we end up expand into a huge tree, we
1859 will not have a useful address, so we might as well just give up
1860 quickly. */
1861 if (max_depth <= 0)
1862 return NULL;
1863
1864 switch (code)
1865 {
1866 case REG:
1867 {
1868 struct elt_list *l = REG_VALUES (REGNO (orig));
1869
1870 if (l && l->elt == NULL)
1871 l = l->next;
1872 for (; l; l = l->next)
1873 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1874 {
1875 rtx result;
1876 unsigned regno = REGNO (orig);
1877
1878 /* The only thing that we are not willing to do (this
1879 is requirement of dse and if others potential uses
1880 need this function we should add a parm to control
1881 it) is that we will not substitute the
1882 STACK_POINTER_REGNUM, FRAME_POINTER or the
1883 HARD_FRAME_POINTER.
1884
1885 These expansions confuses the code that notices that
1886 stores into the frame go dead at the end of the
1887 function and that the frame is not effected by calls
1888 to subroutines. If you allow the
1889 STACK_POINTER_REGNUM substitution, then dse will
1890 think that parameter pushing also goes dead which is
1891 wrong. If you allow the FRAME_POINTER or the
1892 HARD_FRAME_POINTER then you lose the opportunity to
1893 make the frame assumptions. */
1894 if (regno == STACK_POINTER_REGNUM
1895 || regno == FRAME_POINTER_REGNUM
1896 || regno == HARD_FRAME_POINTER_REGNUM
1897 || regno == cfa_base_preserved_regno)
1898 return orig;
1899
1900 bitmap_set_bit (evd->regs_active, regno);
1901
1902 if (dump_file && (dump_flags & TDF_CSELIB))
1903 fprintf (dump_file, "expanding: r%d into: ", regno);
1904
1905 result = expand_loc (l->elt->locs, evd, max_depth);
1906 bitmap_clear_bit (evd->regs_active, regno);
1907
1908 if (result)
1909 return result;
1910 else
1911 return orig;
1912 }
1913 return orig;
1914 }
1915
1916 CASE_CONST_ANY:
1917 case SYMBOL_REF:
1918 case CODE_LABEL:
1919 case PC:
1920 case SCRATCH:
1921 /* SCRATCH must be shared because they represent distinct values. */
1922 return orig;
1923 case CLOBBER:
1924 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1925 return orig;
1926 break;
1927
1928 case CONST:
1929 if (shared_const_p (orig))
1930 return orig;
1931 break;
1932
1933 case SUBREG:
1934 {
1935 rtx subreg;
1936
1937 if (evd->callback)
1938 {
1939 subreg = evd->callback (orig, evd->regs_active, max_depth,
1940 evd->callback_arg);
1941 if (subreg != orig)
1942 return subreg;
1943 }
1944
1945 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1946 max_depth - 1);
1947 if (!subreg)
1948 return NULL;
1949 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1950 GET_MODE (SUBREG_REG (orig)),
1951 SUBREG_BYTE (orig));
1952 if (scopy == NULL
1953 || (GET_CODE (scopy) == SUBREG
1954 && !REG_P (SUBREG_REG (scopy))
1955 && !MEM_P (SUBREG_REG (scopy))))
1956 return NULL;
1957
1958 return scopy;
1959 }
1960
1961 case VALUE:
1962 {
1963 rtx result;
1964
1965 if (dump_file && (dump_flags & TDF_CSELIB))
1966 {
1967 fputs ("\nexpanding ", dump_file);
1968 print_rtl_single (dump_file, orig);
1969 fputs (" into...", dump_file);
1970 }
1971
1972 if (evd->callback)
1973 {
1974 result = evd->callback (orig, evd->regs_active, max_depth,
1975 evd->callback_arg);
1976
1977 if (result != orig)
1978 return result;
1979 }
1980
1981 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1982 return result;
1983 }
1984
1985 case DEBUG_EXPR:
1986 if (evd->callback)
1987 return evd->callback (orig, evd->regs_active, max_depth,
1988 evd->callback_arg);
1989 return orig;
1990
1991 default:
1992 break;
1993 }
1994
1995 /* Copy the various flags, fields, and other information. We assume
1996 that all fields need copying, and then clear the fields that should
1997 not be copied. That is the sensible default behavior, and forces
1998 us to explicitly document why we are *not* copying a flag. */
1999 if (evd->dummy)
2000 copy = NULL;
2001 else
2002 copy = shallow_copy_rtx (orig);
2003
2004 format_ptr = GET_RTX_FORMAT (code);
2005
2006 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2007 switch (*format_ptr++)
2008 {
2009 case 'e':
2010 if (XEXP (orig, i) != NULL)
2011 {
2012 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
2013 max_depth - 1);
2014 if (!result)
2015 return NULL;
2016 if (copy)
2017 XEXP (copy, i) = result;
2018 }
2019 break;
2020
2021 case 'E':
2022 case 'V':
2023 if (XVEC (orig, i) != NULL)
2024 {
2025 if (copy)
2026 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2027 for (j = 0; j < XVECLEN (orig, i); j++)
2028 {
2029 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
2030 evd, max_depth - 1);
2031 if (!result)
2032 return NULL;
2033 if (copy)
2034 XVECEXP (copy, i, j) = result;
2035 }
2036 }
2037 break;
2038
2039 case 't':
2040 case 'w':
2041 case 'i':
2042 case 's':
2043 case 'S':
2044 case 'T':
2045 case 'u':
2046 case 'B':
2047 case '0':
2048 /* These are left unchanged. */
2049 break;
2050
2051 default:
2052 gcc_unreachable ();
2053 }
2054
2055 if (evd->dummy)
2056 return orig;
2057
2058 mode = GET_MODE (copy);
2059 /* If an operand has been simplified into CONST_INT, which doesn't
2060 have a mode and the mode isn't derivable from whole rtx's mode,
2061 try simplify_*_operation first with mode from original's operand
2062 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2063 scopy = copy;
2064 switch (GET_RTX_CLASS (code))
2065 {
2066 case RTX_UNARY:
2067 if (CONST_INT_P (XEXP (copy, 0))
2068 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2069 {
2070 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
2071 GET_MODE (XEXP (orig, 0)));
2072 if (scopy)
2073 return scopy;
2074 }
2075 break;
2076 case RTX_COMM_ARITH:
2077 case RTX_BIN_ARITH:
2078 /* These expressions can derive operand modes from the whole rtx's mode. */
2079 break;
2080 case RTX_TERNARY:
2081 case RTX_BITFIELD_OPS:
2082 if (CONST_INT_P (XEXP (copy, 0))
2083 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2084 {
2085 scopy = simplify_ternary_operation (code, mode,
2086 GET_MODE (XEXP (orig, 0)),
2087 XEXP (copy, 0), XEXP (copy, 1),
2088 XEXP (copy, 2));
2089 if (scopy)
2090 return scopy;
2091 }
2092 break;
2093 case RTX_COMPARE:
2094 case RTX_COMM_COMPARE:
2095 if (CONST_INT_P (XEXP (copy, 0))
2096 && GET_MODE (XEXP (copy, 1)) == VOIDmode
2097 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2098 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2099 {
2100 scopy = simplify_relational_operation (code, mode,
2101 (GET_MODE (XEXP (orig, 0))
2102 != VOIDmode)
2103 ? GET_MODE (XEXP (orig, 0))
2104 : GET_MODE (XEXP (orig, 1)),
2105 XEXP (copy, 0),
2106 XEXP (copy, 1));
2107 if (scopy)
2108 return scopy;
2109 }
2110 break;
2111 default:
2112 break;
2113 }
2114 scopy = simplify_rtx (copy);
2115 if (scopy)
2116 return scopy;
2117 return copy;
2118 }
2119
2120 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2121 with VALUE expressions. This way, it becomes independent of changes
2122 to registers and memory.
2123 X isn't actually modified; if modifications are needed, new rtl is
2124 allocated. However, the return value can share rtl with X.
2125 If X is within a MEM, MEMMODE must be the mode of the MEM. */
2126
2127 rtx
2128 cselib_subst_to_values (rtx x, machine_mode memmode)
2129 {
2130 enum rtx_code code = GET_CODE (x);
2131 const char *fmt = GET_RTX_FORMAT (code);
2132 cselib_val *e;
2133 struct elt_list *l;
2134 rtx copy = x;
2135 int i;
2136 poly_int64 offset;
2137
2138 switch (code)
2139 {
2140 case REG:
2141 l = REG_VALUES (REGNO (x));
2142 if (l && l->elt == NULL)
2143 l = l->next;
2144 for (; l; l = l->next)
2145 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2146 return l->elt->val_rtx;
2147
2148 gcc_unreachable ();
2149
2150 case MEM:
2151 e = cselib_lookup_mem (x, 0);
2152 /* This used to happen for autoincrements, but we deal with them
2153 properly now. Remove the if stmt for the next release. */
2154 if (! e)
2155 {
2156 /* Assign a value that doesn't match any other. */
2157 e = new_cselib_val (next_uid, GET_MODE (x), x);
2158 }
2159 return e->val_rtx;
2160
2161 case ENTRY_VALUE:
2162 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2163 if (! e)
2164 break;
2165 return e->val_rtx;
2166
2167 CASE_CONST_ANY:
2168 return x;
2169
2170 case PRE_DEC:
2171 case PRE_INC:
2172 gcc_assert (memmode != VOIDmode);
2173 offset = GET_MODE_SIZE (memmode);
2174 if (code == PRE_DEC)
2175 offset = -offset;
2176 return cselib_subst_to_values (plus_constant (GET_MODE (x),
2177 XEXP (x, 0), offset),
2178 memmode);
2179
2180 case PRE_MODIFY:
2181 gcc_assert (memmode != VOIDmode);
2182 return cselib_subst_to_values (XEXP (x, 1), memmode);
2183
2184 case POST_DEC:
2185 case POST_INC:
2186 case POST_MODIFY:
2187 gcc_assert (memmode != VOIDmode);
2188 return cselib_subst_to_values (XEXP (x, 0), memmode);
2189
2190 case PLUS:
2191 if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2192 {
2193 rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2194 if (GET_CODE (t) == VALUE)
2195 {
2196 if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2197 return t;
2198 for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2199 l; l = l->next)
2200 if (GET_CODE (l->loc) == PLUS
2201 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2202 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2203 && CONST_INT_P (XEXP (l->loc, 1)))
2204 return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2205 }
2206 if (t != XEXP (x, 0))
2207 {
2208 copy = shallow_copy_rtx (x);
2209 XEXP (copy, 0) = t;
2210 }
2211 return copy;
2212 }
2213
2214 default:
2215 break;
2216 }
2217
2218 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2219 {
2220 if (fmt[i] == 'e')
2221 {
2222 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
2223
2224 if (t != XEXP (x, i))
2225 {
2226 if (x == copy)
2227 copy = shallow_copy_rtx (x);
2228 XEXP (copy, i) = t;
2229 }
2230 }
2231 else if (fmt[i] == 'E')
2232 {
2233 int j;
2234
2235 for (j = 0; j < XVECLEN (x, i); j++)
2236 {
2237 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
2238
2239 if (t != XVECEXP (x, i, j))
2240 {
2241 if (XVEC (x, i) == XVEC (copy, i))
2242 {
2243 if (x == copy)
2244 copy = shallow_copy_rtx (x);
2245 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2246 }
2247 XVECEXP (copy, i, j) = t;
2248 }
2249 }
2250 }
2251 }
2252
2253 return copy;
2254 }
2255
2256 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2257
2258 rtx
2259 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
2260 {
2261 rtx ret;
2262 gcc_assert (!cselib_current_insn);
2263 cselib_current_insn = insn;
2264 ret = cselib_subst_to_values (x, memmode);
2265 cselib_current_insn = NULL;
2266 return ret;
2267 }
2268
2269 /* Look up the rtl expression X in our tables and return the value it
2270 has. If CREATE is zero, we return NULL if we don't know the value.
2271 Otherwise, we create a new one if possible, using mode MODE if X
2272 doesn't have a mode (i.e. because it's a constant). When X is part
2273 of an address, MEMMODE should be the mode of the enclosing MEM if
2274 we're tracking autoinc expressions. */
2275
2276 static cselib_val *
2277 cselib_lookup_1 (rtx x, machine_mode mode,
2278 int create, machine_mode memmode)
2279 {
2280 cselib_val **slot;
2281 cselib_val *e;
2282 unsigned int hashval;
2283
2284 if (GET_MODE (x) != VOIDmode)
2285 mode = GET_MODE (x);
2286
2287 if (GET_CODE (x) == VALUE)
2288 return CSELIB_VAL_PTR (x);
2289
2290 if (REG_P (x))
2291 {
2292 struct elt_list *l;
2293 unsigned int i = REGNO (x);
2294
2295 l = REG_VALUES (i);
2296 if (l && l->elt == NULL)
2297 l = l->next;
2298 for (; l; l = l->next)
2299 if (mode == GET_MODE (l->elt->val_rtx))
2300 {
2301 promote_debug_loc (l->elt->locs);
2302 return l->elt;
2303 }
2304
2305 if (! create)
2306 return 0;
2307
2308 if (i < FIRST_PSEUDO_REGISTER)
2309 {
2310 unsigned int n = hard_regno_nregs (i, mode);
2311
2312 if (n > max_value_regs)
2313 max_value_regs = n;
2314 }
2315
2316 e = new_cselib_val (next_uid, GET_MODE (x), x);
2317 if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2318 SP_DERIVED_VALUE_P (e->val_rtx) = 1;
2319 new_elt_loc_list (e, x);
2320
2321 scalar_int_mode int_mode;
2322 if (REG_VALUES (i) == 0)
2323 {
2324 /* Maintain the invariant that the first entry of
2325 REG_VALUES, if present, must be the value used to set the
2326 register, or NULL. */
2327 used_regs[n_used_regs++] = i;
2328 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2329 }
2330 else if (cselib_preserve_constants
2331 && is_int_mode (mode, &int_mode))
2332 {
2333 /* During var-tracking, try harder to find equivalences
2334 for SUBREGs. If a setter sets say a DImode register
2335 and user uses that register only in SImode, add a lowpart
2336 subreg location. */
2337 struct elt_list *lwider = NULL;
2338 scalar_int_mode lmode;
2339 l = REG_VALUES (i);
2340 if (l && l->elt == NULL)
2341 l = l->next;
2342 for (; l; l = l->next)
2343 if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2344 && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2345 && (lwider == NULL
2346 || partial_subreg_p (lmode,
2347 GET_MODE (lwider->elt->val_rtx))))
2348 {
2349 struct elt_loc_list *el;
2350 if (i < FIRST_PSEUDO_REGISTER
2351 && hard_regno_nregs (i, lmode) != 1)
2352 continue;
2353 for (el = l->elt->locs; el; el = el->next)
2354 if (!REG_P (el->loc))
2355 break;
2356 if (el)
2357 lwider = l;
2358 }
2359 if (lwider)
2360 {
2361 rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2362 GET_MODE (lwider->elt->val_rtx));
2363 if (sub)
2364 new_elt_loc_list (e, sub);
2365 }
2366 }
2367 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2368 slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2369 *slot = e;
2370 return e;
2371 }
2372
2373 if (MEM_P (x))
2374 return cselib_lookup_mem (x, create);
2375
2376 hashval = cselib_hash_rtx (x, create, memmode);
2377 /* Can't even create if hashing is not possible. */
2378 if (! hashval)
2379 return 0;
2380
2381 slot = cselib_find_slot (mode, x, hashval,
2382 create ? INSERT : NO_INSERT, memmode);
2383 if (slot == 0)
2384 return 0;
2385
2386 e = (cselib_val *) *slot;
2387 if (e)
2388 return e;
2389
2390 e = new_cselib_val (hashval, mode, x);
2391
2392 /* We have to fill the slot before calling cselib_subst_to_values:
2393 the hash table is inconsistent until we do so, and
2394 cselib_subst_to_values will need to do lookups. */
2395 *slot = e;
2396 rtx v = cselib_subst_to_values (x, memmode);
2397
2398 /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2399 VALUE that isn't in the hash tables anymore. */
2400 if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2401 PRESERVED_VALUE_P (e->val_rtx) = 1;
2402
2403 new_elt_loc_list (e, v);
2404 return e;
2405 }
2406
2407 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2408
2409 cselib_val *
2410 cselib_lookup_from_insn (rtx x, machine_mode mode,
2411 int create, machine_mode memmode, rtx_insn *insn)
2412 {
2413 cselib_val *ret;
2414
2415 gcc_assert (!cselib_current_insn);
2416 cselib_current_insn = insn;
2417
2418 ret = cselib_lookup (x, mode, create, memmode);
2419
2420 cselib_current_insn = NULL;
2421
2422 return ret;
2423 }
2424
2425 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2426 maintains invariants related with debug insns. */
2427
2428 cselib_val *
2429 cselib_lookup (rtx x, machine_mode mode,
2430 int create, machine_mode memmode)
2431 {
2432 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2433
2434 /* ??? Should we return NULL if we're not to create an entry, the
2435 found loc is a debug loc and cselib_current_insn is not DEBUG?
2436 If so, we should also avoid converting val to non-DEBUG; probably
2437 easiest setting cselib_current_insn to NULL before the call
2438 above. */
2439
2440 if (dump_file && (dump_flags & TDF_CSELIB))
2441 {
2442 fputs ("cselib lookup ", dump_file);
2443 print_inline_rtx (dump_file, x, 2);
2444 fprintf (dump_file, " => %u:%u\n",
2445 ret ? ret->uid : 0,
2446 ret ? ret->hash : 0);
2447 }
2448
2449 return ret;
2450 }
2451
2452 /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2453
2454 static void
2455 cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2456 {
2457 cselib_val *v = (*l)->elt;
2458 if (*l == REG_VALUES (regno))
2459 {
2460 /* Maintain the invariant that the first entry of
2461 REG_VALUES, if present, must be the value used to set
2462 the register, or NULL. This is also nice because
2463 then we won't push the same regno onto user_regs
2464 multiple times. */
2465 (*l)->elt = NULL;
2466 l = &(*l)->next;
2467 }
2468 else
2469 unchain_one_elt_list (l);
2470
2471 v = canonical_cselib_val (v);
2472
2473 bool had_locs = v->locs != NULL;
2474 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2475
2476 /* Now, we clear the mapping from value to reg. It must exist, so
2477 this code will crash intentionally if it doesn't. */
2478 for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2479 {
2480 rtx x = (*p)->loc;
2481
2482 if (REG_P (x) && REGNO (x) == regno)
2483 {
2484 unchain_one_elt_loc_list (p);
2485 break;
2486 }
2487 }
2488
2489 if (had_locs && cselib_useless_value_p (v))
2490 {
2491 if (setting_insn && DEBUG_INSN_P (setting_insn))
2492 n_useless_debug_values++;
2493 else
2494 n_useless_values++;
2495 }
2496 }
2497
2498 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2499 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2500 is used to determine how many hard registers are being changed. If MODE
2501 is VOIDmode, then only REGNO is being changed; this is used when
2502 invalidating call clobbered registers across a call. */
2503
2504 static void
2505 cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2506 {
2507 unsigned int endregno;
2508 unsigned int i;
2509
2510 /* If we see pseudos after reload, something is _wrong_. */
2511 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2512 || reg_renumber[regno] < 0);
2513
2514 /* Determine the range of registers that must be invalidated. For
2515 pseudos, only REGNO is affected. For hard regs, we must take MODE
2516 into account, and we must also invalidate lower register numbers
2517 if they contain values that overlap REGNO. */
2518 if (regno < FIRST_PSEUDO_REGISTER)
2519 {
2520 gcc_assert (mode != VOIDmode);
2521
2522 if (regno < max_value_regs)
2523 i = 0;
2524 else
2525 i = regno - max_value_regs;
2526
2527 endregno = end_hard_regno (mode, regno);
2528 }
2529 else
2530 {
2531 i = regno;
2532 endregno = regno + 1;
2533 }
2534
2535 for (; i < endregno; i++)
2536 {
2537 struct elt_list **l = &REG_VALUES (i);
2538
2539 /* Go through all known values for this reg; if it overlaps the range
2540 we're invalidating, remove the value. */
2541 while (*l)
2542 {
2543 cselib_val *v = (*l)->elt;
2544 unsigned int this_last = i;
2545
2546 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2547 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2548
2549 if (this_last < regno || v == NULL
2550 || (v == cfa_base_preserved_val
2551 && i == cfa_base_preserved_regno))
2552 {
2553 l = &(*l)->next;
2554 continue;
2555 }
2556
2557 /* We have an overlap. */
2558 cselib_invalidate_regno_val (i, l);
2559 }
2560 }
2561 }
2562 \f
2563 /* Invalidate any locations in the table which are changed because of a
2564 store to MEM_RTX. If this is called because of a non-const call
2565 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2566
2567 static void
2568 cselib_invalidate_mem (rtx mem_rtx)
2569 {
2570 cselib_val **vp, *v, *next;
2571 int num_mems = 0;
2572 rtx mem_addr;
2573
2574 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2575 mem_rtx = canon_rtx (mem_rtx);
2576
2577 vp = &first_containing_mem;
2578 for (v = *vp; v != &dummy_val; v = next)
2579 {
2580 bool has_mem = false;
2581 struct elt_loc_list **p = &v->locs;
2582 bool had_locs = v->locs != NULL;
2583 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2584
2585 while (*p)
2586 {
2587 rtx x = (*p)->loc;
2588 cselib_val *addr;
2589 struct elt_list **mem_chain;
2590
2591 /* MEMs may occur in locations only at the top level; below
2592 that every MEM or REG is substituted by its VALUE. */
2593 if (!MEM_P (x))
2594 {
2595 p = &(*p)->next;
2596 continue;
2597 }
2598 if (num_mems < param_max_cselib_memory_locations
2599 && ! canon_anti_dependence (x, false, mem_rtx,
2600 GET_MODE (mem_rtx), mem_addr))
2601 {
2602 has_mem = true;
2603 num_mems++;
2604 p = &(*p)->next;
2605 continue;
2606 }
2607
2608 /* This one overlaps. */
2609 /* We must have a mapping from this MEM's address to the
2610 value (E). Remove that, too. */
2611 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2612 addr = canonical_cselib_val (addr);
2613 gcc_checking_assert (v == canonical_cselib_val (v));
2614 mem_chain = &addr->addr_list;
2615 for (;;)
2616 {
2617 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2618
2619 if (canon == v)
2620 {
2621 unchain_one_elt_list (mem_chain);
2622 break;
2623 }
2624
2625 /* Record canonicalized elt. */
2626 (*mem_chain)->elt = canon;
2627
2628 mem_chain = &(*mem_chain)->next;
2629 }
2630
2631 unchain_one_elt_loc_list (p);
2632 }
2633
2634 if (had_locs && cselib_useless_value_p (v))
2635 {
2636 if (setting_insn && DEBUG_INSN_P (setting_insn))
2637 n_useless_debug_values++;
2638 else
2639 n_useless_values++;
2640 }
2641
2642 next = v->next_containing_mem;
2643 if (has_mem)
2644 {
2645 *vp = v;
2646 vp = &(*vp)->next_containing_mem;
2647 }
2648 else
2649 v->next_containing_mem = NULL;
2650 }
2651 *vp = &dummy_val;
2652 }
2653
2654 /* Invalidate DEST. */
2655
2656 void
2657 cselib_invalidate_rtx (rtx dest)
2658 {
2659 while (GET_CODE (dest) == SUBREG
2660 || GET_CODE (dest) == ZERO_EXTRACT
2661 || GET_CODE (dest) == STRICT_LOW_PART)
2662 dest = XEXP (dest, 0);
2663
2664 if (REG_P (dest))
2665 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2666 else if (MEM_P (dest))
2667 cselib_invalidate_mem (dest);
2668 }
2669
2670 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2671
2672 static void
2673 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
2674 void *data ATTRIBUTE_UNUSED)
2675 {
2676 cselib_invalidate_rtx (dest);
2677 }
2678
2679 /* Record the result of a SET instruction. DEST is being set; the source
2680 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2681 describes its address. */
2682
2683 static void
2684 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2685 {
2686 if (src_elt == 0 || side_effects_p (dest))
2687 return;
2688
2689 if (REG_P (dest))
2690 {
2691 unsigned int dreg = REGNO (dest);
2692 if (dreg < FIRST_PSEUDO_REGISTER)
2693 {
2694 unsigned int n = REG_NREGS (dest);
2695
2696 if (n > max_value_regs)
2697 max_value_regs = n;
2698 }
2699
2700 if (REG_VALUES (dreg) == 0)
2701 {
2702 used_regs[n_used_regs++] = dreg;
2703 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2704 }
2705 else
2706 {
2707 /* The register should have been invalidated. */
2708 gcc_assert (REG_VALUES (dreg)->elt == 0);
2709 REG_VALUES (dreg)->elt = src_elt;
2710 }
2711
2712 if (cselib_useless_value_p (src_elt))
2713 n_useless_values--;
2714 new_elt_loc_list (src_elt, dest);
2715 }
2716 else if (MEM_P (dest) && dest_addr_elt != 0
2717 && cselib_record_memory)
2718 {
2719 if (cselib_useless_value_p (src_elt))
2720 n_useless_values--;
2721 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2722 }
2723 }
2724
2725 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2726
2727 void
2728 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2729 {
2730 cselib_val *nelt;
2731 rtx_insn *save_cselib_current_insn = cselib_current_insn;
2732
2733 gcc_checking_assert (elt);
2734 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2735 gcc_checking_assert (!side_effects_p (x));
2736
2737 cselib_current_insn = insn;
2738
2739 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2740
2741 if (nelt != elt)
2742 {
2743 cselib_any_perm_equivs = true;
2744
2745 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2746 cselib_preserve_value (nelt);
2747
2748 new_elt_loc_list (nelt, elt->val_rtx);
2749 }
2750
2751 cselib_current_insn = save_cselib_current_insn;
2752 }
2753
2754 /* Return TRUE if any permanent equivalences have been recorded since
2755 the table was last initialized. */
2756 bool
2757 cselib_have_permanent_equivalences (void)
2758 {
2759 return cselib_any_perm_equivs;
2760 }
2761
2762 /* Record stack_pointer_rtx to be equal to
2763 (plus:P cfa_base_preserved_val offset). Used by var-tracking
2764 at the start of basic blocks for !frame_pointer_needed functions. */
2765
2766 void
2767 cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2768 {
2769 rtx sp_derived_value = NULL_RTX;
2770 for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2771 if (GET_CODE (l->loc) == VALUE
2772 && SP_DERIVED_VALUE_P (l->loc))
2773 {
2774 sp_derived_value = l->loc;
2775 break;
2776 }
2777 else if (GET_CODE (l->loc) == PLUS
2778 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2779 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2780 && CONST_INT_P (XEXP (l->loc, 1)))
2781 {
2782 sp_derived_value = XEXP (l->loc, 0);
2783 offset = offset + UINTVAL (XEXP (l->loc, 1));
2784 break;
2785 }
2786 if (sp_derived_value == NULL_RTX)
2787 return;
2788 cselib_val *val
2789 = cselib_lookup_from_insn (plus_constant (Pmode, sp_derived_value, offset),
2790 Pmode, 1, VOIDmode, insn);
2791 if (val != NULL)
2792 {
2793 PRESERVED_VALUE_P (val->val_rtx) = 1;
2794 cselib_record_set (stack_pointer_rtx, val, NULL);
2795 }
2796 }
2797
2798 /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2799 that can be expressed using cfa_base_preserved_val + CONST_INT. */
2800
2801 bool
2802 cselib_sp_derived_value_p (cselib_val *v)
2803 {
2804 if (!SP_DERIVED_VALUE_P (v->val_rtx))
2805 for (struct elt_loc_list *l = v->locs; l; l = l->next)
2806 if (GET_CODE (l->loc) == PLUS
2807 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2808 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2809 && CONST_INT_P (XEXP (l->loc, 1)))
2810 v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2811 if (!SP_DERIVED_VALUE_P (v->val_rtx))
2812 return false;
2813 for (struct elt_loc_list *l = v->locs; l; l = l->next)
2814 if (l->loc == cfa_base_preserved_val->val_rtx)
2815 return true;
2816 else if (GET_CODE (l->loc) == PLUS
2817 && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2818 && CONST_INT_P (XEXP (l->loc, 1)))
2819 return true;
2820 return false;
2821 }
2822
2823 /* There is no good way to determine how many elements there can be
2824 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2825 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2826
2827 struct cselib_record_autoinc_data
2828 {
2829 struct cselib_set *sets;
2830 int n_sets;
2831 };
2832
2833 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2834 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2835
2836 static int
2837 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2838 rtx dest, rtx src, rtx srcoff, void *arg)
2839 {
2840 struct cselib_record_autoinc_data *data;
2841 data = (struct cselib_record_autoinc_data *)arg;
2842
2843 data->sets[data->n_sets].dest = dest;
2844
2845 if (srcoff)
2846 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2847 else
2848 data->sets[data->n_sets].src = src;
2849
2850 data->n_sets++;
2851
2852 return 0;
2853 }
2854
2855 /* Record the effects of any sets and autoincs in INSN. */
2856 static void
2857 cselib_record_sets (rtx_insn *insn)
2858 {
2859 int n_sets = 0;
2860 int i;
2861 struct cselib_set sets[MAX_SETS];
2862 rtx cond = 0;
2863 int n_sets_before_autoinc;
2864 int n_strict_low_parts = 0;
2865 struct cselib_record_autoinc_data data;
2866
2867 rtx body = PATTERN (insn);
2868 if (GET_CODE (body) == COND_EXEC)
2869 {
2870 cond = COND_EXEC_TEST (body);
2871 body = COND_EXEC_CODE (body);
2872 }
2873
2874 /* Find all sets. */
2875 if (GET_CODE (body) == SET)
2876 {
2877 sets[0].src = SET_SRC (body);
2878 sets[0].dest = SET_DEST (body);
2879 n_sets = 1;
2880 }
2881 else if (GET_CODE (body) == PARALLEL)
2882 {
2883 /* Look through the PARALLEL and record the values being
2884 set, if possible. Also handle any CLOBBERs. */
2885 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2886 {
2887 rtx x = XVECEXP (body, 0, i);
2888
2889 if (GET_CODE (x) == SET)
2890 {
2891 sets[n_sets].src = SET_SRC (x);
2892 sets[n_sets].dest = SET_DEST (x);
2893 n_sets++;
2894 }
2895 }
2896 }
2897
2898 if (n_sets == 1
2899 && MEM_P (sets[0].src)
2900 && !cselib_record_memory
2901 && MEM_READONLY_P (sets[0].src))
2902 {
2903 rtx note = find_reg_equal_equiv_note (insn);
2904
2905 if (note && CONSTANT_P (XEXP (note, 0)))
2906 sets[0].src = XEXP (note, 0);
2907 }
2908
2909 data.sets = sets;
2910 data.n_sets = n_sets_before_autoinc = n_sets;
2911 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2912 n_sets = data.n_sets;
2913
2914 /* Look up the values that are read. Do this before invalidating the
2915 locations that are written. */
2916 for (i = 0; i < n_sets; i++)
2917 {
2918 rtx dest = sets[i].dest;
2919 rtx orig = dest;
2920
2921 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2922 the low part after invalidating any knowledge about larger modes. */
2923 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2924 sets[i].dest = dest = XEXP (dest, 0);
2925
2926 /* We don't know how to record anything but REG or MEM. */
2927 if (REG_P (dest)
2928 || (MEM_P (dest) && cselib_record_memory))
2929 {
2930 rtx src = sets[i].src;
2931 if (cond)
2932 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2933 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2934 if (MEM_P (dest))
2935 {
2936 machine_mode address_mode = get_address_mode (dest);
2937
2938 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2939 address_mode, 1,
2940 GET_MODE (dest));
2941 }
2942 else
2943 sets[i].dest_addr_elt = 0;
2944 }
2945
2946 /* Improve handling of STRICT_LOW_PART if the current value is known
2947 to be const0_rtx, then the low bits will be set to dest and higher
2948 bits will remain zero. Used in code like:
2949
2950 {di:SI=0;clobber flags:CC;}
2951 flags:CCNO=cmp(bx:SI,0)
2952 strict_low_part(di:QI)=flags:CCNO<=0
2953
2954 where we can note both that di:QI=flags:CCNO<=0 and
2955 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2956 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2957 scalar_int_mode mode;
2958 if (dest != orig
2959 && cselib_record_sets_hook
2960 && REG_P (dest)
2961 && HARD_REGISTER_P (dest)
2962 && sets[i].src_elt
2963 && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2964 && n_sets + n_strict_low_parts < MAX_SETS)
2965 {
2966 opt_scalar_int_mode wider_mode_iter;
2967 FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2968 {
2969 scalar_int_mode wider_mode = wider_mode_iter.require ();
2970 if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2971 break;
2972
2973 rtx reg = gen_lowpart (wider_mode, dest);
2974 if (!REG_P (reg))
2975 break;
2976
2977 cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2978 if (!v)
2979 continue;
2980
2981 struct elt_loc_list *l;
2982 for (l = v->locs; l; l = l->next)
2983 if (l->loc == const0_rtx)
2984 break;
2985
2986 if (!l)
2987 continue;
2988
2989 sets[n_sets + n_strict_low_parts].dest = reg;
2990 sets[n_sets + n_strict_low_parts].src = dest;
2991 sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2992 break;
2993 }
2994 }
2995 }
2996
2997 if (cselib_record_sets_hook)
2998 cselib_record_sets_hook (insn, sets, n_sets);
2999
3000 /* Invalidate all locations written by this insn. Note that the elts we
3001 looked up in the previous loop aren't affected, just some of their
3002 locations may go away. */
3003 note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
3004
3005 for (i = n_sets_before_autoinc; i < n_sets; i++)
3006 cselib_invalidate_rtx (sets[i].dest);
3007
3008 /* If this is an asm, look for duplicate sets. This can happen when the
3009 user uses the same value as an output multiple times. This is valid
3010 if the outputs are not actually used thereafter. Treat this case as
3011 if the value isn't actually set. We do this by smashing the destination
3012 to pc_rtx, so that we won't record the value later. */
3013 if (n_sets >= 2 && asm_noperands (body) >= 0)
3014 {
3015 for (i = 0; i < n_sets; i++)
3016 {
3017 rtx dest = sets[i].dest;
3018 if (REG_P (dest) || MEM_P (dest))
3019 {
3020 int j;
3021 for (j = i + 1; j < n_sets; j++)
3022 if (rtx_equal_p (dest, sets[j].dest))
3023 {
3024 sets[i].dest = pc_rtx;
3025 sets[j].dest = pc_rtx;
3026 }
3027 }
3028 }
3029 }
3030
3031 /* Now enter the equivalences in our tables. */
3032 for (i = 0; i < n_sets; i++)
3033 {
3034 rtx dest = sets[i].dest;
3035 if (REG_P (dest)
3036 || (MEM_P (dest) && cselib_record_memory))
3037 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
3038 }
3039
3040 /* And deal with STRICT_LOW_PART. */
3041 for (i = 0; i < n_strict_low_parts; i++)
3042 {
3043 if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
3044 continue;
3045 machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
3046 cselib_val *v
3047 = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
3048 cselib_preserve_value (v);
3049 rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
3050 sets[n_sets + i].src_elt->val_rtx);
3051 cselib_add_permanent_equiv (v, r, insn);
3052 }
3053 }
3054
3055 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3056
3057 bool
3058 fp_setter_insn (rtx_insn *insn)
3059 {
3060 rtx expr, pat = NULL_RTX;
3061
3062 if (!RTX_FRAME_RELATED_P (insn))
3063 return false;
3064
3065 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
3066 if (expr)
3067 pat = XEXP (expr, 0);
3068 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
3069 return false;
3070
3071 /* Don't return true for frame pointer restores in the epilogue. */
3072 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
3073 return false;
3074 return true;
3075 }
3076
3077 /* V is one of the values in REG_VALUES (REGNO). Return true if it
3078 would be invalidated by CALLEE_ABI. */
3079
3080 static bool
3081 cselib_invalidated_by_call_p (const function_abi &callee_abi,
3082 unsigned int regno, cselib_val *v)
3083 {
3084 machine_mode mode = GET_MODE (v->val_rtx);
3085 if (mode == VOIDmode)
3086 {
3087 v = REG_VALUES (regno)->elt;
3088 if (!v)
3089 /* If we don't know what the mode of the constant value is, and we
3090 don't know what mode the register was set in, conservatively
3091 assume that the register is clobbered. The value's going to be
3092 essentially useless in this case anyway. */
3093 return true;
3094 mode = GET_MODE (v->val_rtx);
3095 }
3096 return callee_abi.clobbers_reg_p (mode, regno);
3097 }
3098
3099 /* Record the effects of INSN. */
3100
3101 void
3102 cselib_process_insn (rtx_insn *insn)
3103 {
3104 int i;
3105 rtx x;
3106
3107 cselib_current_insn = insn;
3108
3109 /* Forget everything at a CODE_LABEL or a setjmp. */
3110 if ((LABEL_P (insn)
3111 || (CALL_P (insn)
3112 && find_reg_note (insn, REG_SETJMP, NULL)))
3113 && !cselib_preserve_constants)
3114 {
3115 cselib_reset_table (next_uid);
3116 cselib_current_insn = NULL;
3117 return;
3118 }
3119
3120 if (! INSN_P (insn))
3121 {
3122 cselib_current_insn = NULL;
3123 return;
3124 }
3125
3126 /* If this is a call instruction, forget anything stored in a
3127 call clobbered register, or, if this is not a const call, in
3128 memory. */
3129 if (CALL_P (insn))
3130 {
3131 function_abi callee_abi = insn_callee_abi (insn);
3132 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3133 {
3134 elt_list **l = &REG_VALUES (i);
3135 while (*l)
3136 {
3137 cselib_val *v = (*l)->elt;
3138 if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
3139 cselib_invalidate_regno_val (i, l);
3140 else
3141 l = &(*l)->next;
3142 }
3143 }
3144
3145 /* Since it is not clear how cselib is going to be used, be
3146 conservative here and treat looping pure or const functions
3147 as if they were regular functions. */
3148 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3149 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
3150 cselib_invalidate_mem (callmem);
3151 else
3152 /* For const/pure calls, invalidate any argument slots because
3153 they are owned by the callee. */
3154 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3155 if (GET_CODE (XEXP (x, 0)) == USE
3156 && MEM_P (XEXP (XEXP (x, 0), 0)))
3157 cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
3158 }
3159
3160 cselib_record_sets (insn);
3161
3162 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3163 after we have processed the insn. */
3164 if (CALL_P (insn))
3165 {
3166 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3167 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3168 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3169
3170 /* Flush everything on setjmp. */
3171 if (cselib_preserve_constants
3172 && find_reg_note (insn, REG_SETJMP, NULL))
3173 {
3174 cselib_preserve_only_values ();
3175 cselib_reset_table (next_uid);
3176 }
3177 }
3178
3179 /* On setter of the hard frame pointer if frame_pointer_needed,
3180 invalidate stack_pointer_rtx, so that sp and {,h}fp based
3181 VALUEs are distinct. */
3182 if (reload_completed
3183 && frame_pointer_needed
3184 && fp_setter_insn (insn))
3185 cselib_invalidate_rtx (stack_pointer_rtx);
3186
3187 cselib_current_insn = NULL;
3188
3189 if (n_useless_values > MAX_USELESS_VALUES
3190 /* remove_useless_values is linear in the hash table size. Avoid
3191 quadratic behavior for very large hashtables with very few
3192 useless elements. */
3193 && ((unsigned int)n_useless_values
3194 > (cselib_hash_table->elements () - n_debug_values) / 4))
3195 remove_useless_values ();
3196 }
3197
3198 /* Initialize cselib for one pass. The caller must also call
3199 init_alias_analysis. */
3200
3201 void
3202 cselib_init (int record_what)
3203 {
3204 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3205 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
3206 cselib_any_perm_equivs = false;
3207
3208 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3209 see canon_true_dependence. This is only created once. */
3210 if (! callmem)
3211 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3212
3213 cselib_nregs = max_reg_num ();
3214
3215 /* We preserve reg_values to allow expensive clearing of the whole thing.
3216 Reallocate it however if it happens to be too large. */
3217 if (!reg_values || reg_values_size < cselib_nregs
3218 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
3219 {
3220 free (reg_values);
3221 /* Some space for newly emit instructions so we don't end up
3222 reallocating in between passes. */
3223 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
3224 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
3225 }
3226 used_regs = XNEWVEC (unsigned int, cselib_nregs);
3227 n_used_regs = 0;
3228 /* FIXME: enable sanitization (PR87845) */
3229 cselib_hash_table
3230 = new hash_table<cselib_hasher> (31, /* ggc */ false,
3231 /* sanitize_eq_and_hash */ false);
3232 if (cselib_preserve_constants)
3233 cselib_preserved_hash_table
3234 = new hash_table<cselib_hasher> (31, /* ggc */ false,
3235 /* sanitize_eq_and_hash */ false);
3236 next_uid = 1;
3237 }
3238
3239 /* Called when the current user is done with cselib. */
3240
3241 void
3242 cselib_finish (void)
3243 {
3244 bool preserved = cselib_preserve_constants;
3245 cselib_discard_hook = NULL;
3246 cselib_preserve_constants = false;
3247 cselib_any_perm_equivs = false;
3248 cfa_base_preserved_val = NULL;
3249 cfa_base_preserved_regno = INVALID_REGNUM;
3250 elt_list_pool.release ();
3251 elt_loc_list_pool.release ();
3252 cselib_val_pool.release ();
3253 value_pool.release ();
3254 cselib_clear_table ();
3255 delete cselib_hash_table;
3256 cselib_hash_table = NULL;
3257 if (preserved)
3258 delete cselib_preserved_hash_table;
3259 cselib_preserved_hash_table = NULL;
3260 free (used_regs);
3261 used_regs = 0;
3262 n_useless_values = 0;
3263 n_useless_debug_values = 0;
3264 n_debug_values = 0;
3265 next_uid = 0;
3266 }
3267
3268 /* Dump the cselib_val *X to FILE *OUT. */
3269
3270 int
3271 dump_cselib_val (cselib_val **x, FILE *out)
3272 {
3273 cselib_val *v = *x;
3274 bool need_lf = true;
3275
3276 print_inline_rtx (out, v->val_rtx, 0);
3277
3278 if (v->locs)
3279 {
3280 struct elt_loc_list *l = v->locs;
3281 if (need_lf)
3282 {
3283 fputc ('\n', out);
3284 need_lf = false;
3285 }
3286 fputs (" locs:", out);
3287 do
3288 {
3289 if (l->setting_insn)
3290 fprintf (out, "\n from insn %i ",
3291 INSN_UID (l->setting_insn));
3292 else
3293 fprintf (out, "\n ");
3294 print_inline_rtx (out, l->loc, 4);
3295 }
3296 while ((l = l->next));
3297 fputc ('\n', out);
3298 }
3299 else
3300 {
3301 fputs (" no locs", out);
3302 need_lf = true;
3303 }
3304
3305 if (v->addr_list)
3306 {
3307 struct elt_list *e = v->addr_list;
3308 if (need_lf)
3309 {
3310 fputc ('\n', out);
3311 need_lf = false;
3312 }
3313 fputs (" addr list:", out);
3314 do
3315 {
3316 fputs ("\n ", out);
3317 print_inline_rtx (out, e->elt->val_rtx, 2);
3318 }
3319 while ((e = e->next));
3320 fputc ('\n', out);
3321 }
3322 else
3323 {
3324 fputs (" no addrs", out);
3325 need_lf = true;
3326 }
3327
3328 if (v->next_containing_mem == &dummy_val)
3329 fputs (" last mem\n", out);
3330 else if (v->next_containing_mem)
3331 {
3332 fputs (" next mem ", out);
3333 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3334 fputc ('\n', out);
3335 }
3336 else if (need_lf)
3337 fputc ('\n', out);
3338
3339 return 1;
3340 }
3341
3342 /* Dump to OUT everything in the CSELIB table. */
3343
3344 void
3345 dump_cselib_table (FILE *out)
3346 {
3347 fprintf (out, "cselib hash table:\n");
3348 cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
3349 fprintf (out, "cselib preserved hash table:\n");
3350 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
3351 if (first_containing_mem != &dummy_val)
3352 {
3353 fputs ("first mem ", out);
3354 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3355 fputc ('\n', out);
3356 }
3357 fprintf (out, "next uid %i\n", next_uid);
3358 }
3359
3360 #include "gt-cselib.h"