]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/local-alloc.c
Merge basic-improvements-branch to trunk
[thirdparty/gcc.git] / gcc / local-alloc.c
1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* Allocation of hard register numbers to pseudo registers is done in
23 two passes. In this pass we consider only regs that are born and
24 die once within one basic block. We do this one basic block at a
25 time. Then the next pass allocates the registers that remain.
26 Two passes are used because this pass uses methods that work only
27 on linear code, but that do a better job than the general methods
28 used in global_alloc, and more quickly too.
29
30 The assignments made are recorded in the vector reg_renumber
31 whose space is allocated here. The rtl code itself is not altered.
32
33 We assign each instruction in the basic block a number
34 which is its order from the beginning of the block.
35 Then we can represent the lifetime of a pseudo register with
36 a pair of numbers, and check for conflicts easily.
37 We can record the availability of hard registers with a
38 HARD_REG_SET for each instruction. The HARD_REG_SET
39 contains 0 or 1 for each hard reg.
40
41 To avoid register shuffling, we tie registers together when one
42 dies by being copied into another, or dies in an instruction that
43 does arithmetic to produce another. The tied registers are
44 allocated as one. Registers with different reg class preferences
45 can never be tied unless the class preferred by one is a subclass
46 of the one preferred by the other.
47
48 Tying is represented with "quantity numbers".
49 A non-tied register is given a new quantity number.
50 Tied registers have the same quantity number.
51
52 We have provision to exempt registers, even when they are contained
53 within the block, that can be tied to others that are not contained in it.
54 This is so that global_alloc could process them both and tie them then.
55 But this is currently disabled since tying in global_alloc is not
56 yet implemented. */
57
58 /* Pseudos allocated here can be reallocated by global.c if the hard register
59 is used as a spill register. Currently we don't allocate such pseudos
60 here if their preferred class is likely to be used by spills. */
61
62 #include "config.h"
63 #include "system.h"
64 #include "coretypes.h"
65 #include "tm.h"
66 #include "hard-reg-set.h"
67 #include "rtl.h"
68 #include "tm_p.h"
69 #include "flags.h"
70 #include "basic-block.h"
71 #include "regs.h"
72 #include "function.h"
73 #include "insn-config.h"
74 #include "insn-attr.h"
75 #include "recog.h"
76 #include "output.h"
77 #include "toplev.h"
78 #include "except.h"
79 #include "integrate.h"
80 \f
81 /* Next quantity number available for allocation. */
82
83 static int next_qty;
84
85 /* Information we maintain about each quantity. */
86 struct qty
87 {
88 /* The number of refs to quantity Q. */
89
90 int n_refs;
91
92 /* The frequency of uses of quantity Q. */
93
94 int freq;
95
96 /* Insn number (counting from head of basic block)
97 where quantity Q was born. -1 if birth has not been recorded. */
98
99 int birth;
100
101 /* Insn number (counting from head of basic block)
102 where given quantity died. Due to the way tying is done,
103 and the fact that we consider in this pass only regs that die but once,
104 a quantity can die only once. Each quantity's life span
105 is a set of consecutive insns. -1 if death has not been recorded. */
106
107 int death;
108
109 /* Number of words needed to hold the data in given quantity.
110 This depends on its machine mode. It is used for these purposes:
111 1. It is used in computing the relative importances of qtys,
112 which determines the order in which we look for regs for them.
113 2. It is used in rules that prevent tying several registers of
114 different sizes in a way that is geometrically impossible
115 (see combine_regs). */
116
117 int size;
118
119 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
120
121 int n_calls_crossed;
122
123 /* The register number of one pseudo register whose reg_qty value is Q.
124 This register should be the head of the chain
125 maintained in reg_next_in_qty. */
126
127 int first_reg;
128
129 /* Reg class contained in (smaller than) the preferred classes of all
130 the pseudo regs that are tied in given quantity.
131 This is the preferred class for allocating that quantity. */
132
133 enum reg_class min_class;
134
135 /* Register class within which we allocate given qty if we can't get
136 its preferred class. */
137
138 enum reg_class alternate_class;
139
140 /* This holds the mode of the registers that are tied to given qty,
141 or VOIDmode if registers with differing modes are tied together. */
142
143 enum machine_mode mode;
144
145 /* the hard reg number chosen for given quantity,
146 or -1 if none was found. */
147
148 short phys_reg;
149 };
150
151 static struct qty *qty;
152
153 /* These fields are kept separately to speedup their clearing. */
154
155 /* We maintain two hard register sets that indicate suggested hard registers
156 for each quantity. The first, phys_copy_sugg, contains hard registers
157 that are tied to the quantity by a simple copy. The second contains all
158 hard registers that are tied to the quantity via an arithmetic operation.
159
160 The former register set is given priority for allocation. This tends to
161 eliminate copy insns. */
162
163 /* Element Q is a set of hard registers that are suggested for quantity Q by
164 copy insns. */
165
166 static HARD_REG_SET *qty_phys_copy_sugg;
167
168 /* Element Q is a set of hard registers that are suggested for quantity Q by
169 arithmetic insns. */
170
171 static HARD_REG_SET *qty_phys_sugg;
172
173 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
174
175 static short *qty_phys_num_copy_sugg;
176
177 /* Element Q is the number of suggested registers in qty_phys_sugg. */
178
179 static short *qty_phys_num_sugg;
180
181 /* If (REG N) has been assigned a quantity number, is a register number
182 of another register assigned the same quantity number, or -1 for the
183 end of the chain. qty->first_reg point to the head of this chain. */
184
185 static int *reg_next_in_qty;
186
187 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
188 if it is >= 0,
189 of -1 if this register cannot be allocated by local-alloc,
190 or -2 if not known yet.
191
192 Note that if we see a use or death of pseudo register N with
193 reg_qty[N] == -2, register N must be local to the current block. If
194 it were used in more than one block, we would have reg_qty[N] == -1.
195 This relies on the fact that if reg_basic_block[N] is >= 0, register N
196 will not appear in any other block. We save a considerable number of
197 tests by exploiting this.
198
199 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
200 be referenced. */
201
202 static int *reg_qty;
203
204 /* The offset (in words) of register N within its quantity.
205 This can be nonzero if register N is SImode, and has been tied
206 to a subreg of a DImode register. */
207
208 static char *reg_offset;
209
210 /* Vector of substitutions of register numbers,
211 used to map pseudo regs into hardware regs.
212 This is set up as a result of register allocation.
213 Element N is the hard reg assigned to pseudo reg N,
214 or is -1 if no hard reg was assigned.
215 If N is a hard reg number, element N is N. */
216
217 short *reg_renumber;
218
219 /* Set of hard registers live at the current point in the scan
220 of the instructions in a basic block. */
221
222 static HARD_REG_SET regs_live;
223
224 /* Each set of hard registers indicates registers live at a particular
225 point in the basic block. For N even, regs_live_at[N] says which
226 hard registers are needed *after* insn N/2 (i.e., they may not
227 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
228
229 If an object is to conflict with the inputs of insn J but not the
230 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
231 if it is to conflict with the outputs of insn J but not the inputs of
232 insn J + 1, it is said to die at index J*2 + 1. */
233
234 static HARD_REG_SET *regs_live_at;
235
236 /* Communicate local vars `insn_number' and `insn'
237 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
238 static int this_insn_number;
239 static rtx this_insn;
240
241 struct equivalence
242 {
243 /* Set when an attempt should be made to replace a register
244 with the associated src_p entry. */
245
246 char replace;
247
248 /* Set when a REG_EQUIV note is found or created. Use to
249 keep track of what memory accesses might be created later,
250 e.g. by reload. */
251
252 rtx replacement;
253
254 rtx *src_p;
255
256 /* Loop depth is used to recognize equivalences which appear
257 to be present within the same loop (or in an inner loop). */
258
259 int loop_depth;
260
261 /* The list of each instruction which initializes this register. */
262
263 rtx init_insns;
264 };
265
266 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
267 structure for that register. */
268
269 static struct equivalence *reg_equiv;
270
271 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
272 static int recorded_label_ref;
273
274 static void alloc_qty PARAMS ((int, enum machine_mode, int, int));
275 static void validate_equiv_mem_from_store PARAMS ((rtx, rtx, void *));
276 static int validate_equiv_mem PARAMS ((rtx, rtx, rtx));
277 static int equiv_init_varies_p PARAMS ((rtx));
278 static int equiv_init_movable_p PARAMS ((rtx, int));
279 static int contains_replace_regs PARAMS ((rtx));
280 static int memref_referenced_p PARAMS ((rtx, rtx));
281 static int memref_used_between_p PARAMS ((rtx, rtx, rtx));
282 static void update_equiv_regs PARAMS ((void));
283 static void no_equiv PARAMS ((rtx, rtx, void *));
284 static void block_alloc PARAMS ((int));
285 static int qty_sugg_compare PARAMS ((int, int));
286 static int qty_sugg_compare_1 PARAMS ((const PTR, const PTR));
287 static int qty_compare PARAMS ((int, int));
288 static int qty_compare_1 PARAMS ((const PTR, const PTR));
289 static int combine_regs PARAMS ((rtx, rtx, int, int, rtx, int));
290 static int reg_meets_class_p PARAMS ((int, enum reg_class));
291 static void update_qty_class PARAMS ((int, int));
292 static void reg_is_set PARAMS ((rtx, rtx, void *));
293 static void reg_is_born PARAMS ((rtx, int));
294 static void wipe_dead_reg PARAMS ((rtx, int));
295 static int find_free_reg PARAMS ((enum reg_class, enum machine_mode,
296 int, int, int, int, int));
297 static void mark_life PARAMS ((int, enum machine_mode, int));
298 static void post_mark_life PARAMS ((int, enum machine_mode, int, int, int));
299 static int no_conflict_p PARAMS ((rtx, rtx, rtx));
300 static int requires_inout PARAMS ((const char *));
301 \f
302 /* Allocate a new quantity (new within current basic block)
303 for register number REGNO which is born at index BIRTH
304 within the block. MODE and SIZE are info on reg REGNO. */
305
306 static void
307 alloc_qty (regno, mode, size, birth)
308 int regno;
309 enum machine_mode mode;
310 int size, birth;
311 {
312 int qtyno = next_qty++;
313
314 reg_qty[regno] = qtyno;
315 reg_offset[regno] = 0;
316 reg_next_in_qty[regno] = -1;
317
318 qty[qtyno].first_reg = regno;
319 qty[qtyno].size = size;
320 qty[qtyno].mode = mode;
321 qty[qtyno].birth = birth;
322 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
323 qty[qtyno].min_class = reg_preferred_class (regno);
324 qty[qtyno].alternate_class = reg_alternate_class (regno);
325 qty[qtyno].n_refs = REG_N_REFS (regno);
326 qty[qtyno].freq = REG_FREQ (regno);
327 }
328 \f
329 /* Main entry point of this file. */
330
331 int
332 local_alloc ()
333 {
334 int i;
335 int max_qty;
336 basic_block b;
337
338 /* We need to keep track of whether or not we recorded a LABEL_REF so
339 that we know if the jump optimizer needs to be rerun. */
340 recorded_label_ref = 0;
341
342 /* Leaf functions and non-leaf functions have different needs.
343 If defined, let the machine say what kind of ordering we
344 should use. */
345 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
346 ORDER_REGS_FOR_LOCAL_ALLOC;
347 #endif
348
349 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
350 registers. */
351 if (optimize)
352 update_equiv_regs ();
353
354 /* This sets the maximum number of quantities we can have. Quantity
355 numbers start at zero and we can have one for each pseudo. */
356 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
357
358 /* Allocate vectors of temporary data.
359 See the declarations of these variables, above,
360 for what they mean. */
361
362 qty = (struct qty *) xmalloc (max_qty * sizeof (struct qty));
363 qty_phys_copy_sugg
364 = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
365 qty_phys_num_copy_sugg = (short *) xmalloc (max_qty * sizeof (short));
366 qty_phys_sugg = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
367 qty_phys_num_sugg = (short *) xmalloc (max_qty * sizeof (short));
368
369 reg_qty = (int *) xmalloc (max_regno * sizeof (int));
370 reg_offset = (char *) xmalloc (max_regno * sizeof (char));
371 reg_next_in_qty = (int *) xmalloc (max_regno * sizeof (int));
372
373 /* Determine which pseudo-registers can be allocated by local-alloc.
374 In general, these are the registers used only in a single block and
375 which only die once.
376
377 We need not be concerned with which block actually uses the register
378 since we will never see it outside that block. */
379
380 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
381 {
382 if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
383 reg_qty[i] = -2;
384 else
385 reg_qty[i] = -1;
386 }
387
388 /* Force loop below to initialize entire quantity array. */
389 next_qty = max_qty;
390
391 /* Allocate each block's local registers, block by block. */
392
393 FOR_EACH_BB (b)
394 {
395 /* NEXT_QTY indicates which elements of the `qty_...'
396 vectors might need to be initialized because they were used
397 for the previous block; it is set to the entire array before
398 block 0. Initialize those, with explicit loop if there are few,
399 else with bzero and bcopy. Do not initialize vectors that are
400 explicit set by `alloc_qty'. */
401
402 if (next_qty < 6)
403 {
404 for (i = 0; i < next_qty; i++)
405 {
406 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
407 qty_phys_num_copy_sugg[i] = 0;
408 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
409 qty_phys_num_sugg[i] = 0;
410 }
411 }
412 else
413 {
414 #define CLEAR(vector) \
415 memset ((char *) (vector), 0, (sizeof (*(vector))) * next_qty);
416
417 CLEAR (qty_phys_copy_sugg);
418 CLEAR (qty_phys_num_copy_sugg);
419 CLEAR (qty_phys_sugg);
420 CLEAR (qty_phys_num_sugg);
421 }
422
423 next_qty = 0;
424
425 block_alloc (b->index);
426 }
427
428 free (qty);
429 free (qty_phys_copy_sugg);
430 free (qty_phys_num_copy_sugg);
431 free (qty_phys_sugg);
432 free (qty_phys_num_sugg);
433
434 free (reg_qty);
435 free (reg_offset);
436 free (reg_next_in_qty);
437
438 return recorded_label_ref;
439 }
440 \f
441 /* Used for communication between the following two functions: contains
442 a MEM that we wish to ensure remains unchanged. */
443 static rtx equiv_mem;
444
445 /* Set nonzero if EQUIV_MEM is modified. */
446 static int equiv_mem_modified;
447
448 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
449 Called via note_stores. */
450
451 static void
452 validate_equiv_mem_from_store (dest, set, data)
453 rtx dest;
454 rtx set ATTRIBUTE_UNUSED;
455 void *data ATTRIBUTE_UNUSED;
456 {
457 if ((GET_CODE (dest) == REG
458 && reg_overlap_mentioned_p (dest, equiv_mem))
459 || (GET_CODE (dest) == MEM
460 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
461 equiv_mem_modified = 1;
462 }
463
464 /* Verify that no store between START and the death of REG invalidates
465 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
466 by storing into an overlapping memory location, or with a non-const
467 CALL_INSN.
468
469 Return 1 if MEMREF remains valid. */
470
471 static int
472 validate_equiv_mem (start, reg, memref)
473 rtx start;
474 rtx reg;
475 rtx memref;
476 {
477 rtx insn;
478 rtx note;
479
480 equiv_mem = memref;
481 equiv_mem_modified = 0;
482
483 /* If the memory reference has side effects or is volatile, it isn't a
484 valid equivalence. */
485 if (side_effects_p (memref))
486 return 0;
487
488 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
489 {
490 if (! INSN_P (insn))
491 continue;
492
493 if (find_reg_note (insn, REG_DEAD, reg))
494 return 1;
495
496 if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
497 && ! CONST_OR_PURE_CALL_P (insn))
498 return 0;
499
500 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
501
502 /* If a register mentioned in MEMREF is modified via an
503 auto-increment, we lose the equivalence. Do the same if one
504 dies; although we could extend the life, it doesn't seem worth
505 the trouble. */
506
507 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
508 if ((REG_NOTE_KIND (note) == REG_INC
509 || REG_NOTE_KIND (note) == REG_DEAD)
510 && GET_CODE (XEXP (note, 0)) == REG
511 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
512 return 0;
513 }
514
515 return 0;
516 }
517
518 /* Returns zero if X is known to be invariant. */
519
520 static int
521 equiv_init_varies_p (x)
522 rtx x;
523 {
524 RTX_CODE code = GET_CODE (x);
525 int i;
526 const char *fmt;
527
528 switch (code)
529 {
530 case MEM:
531 return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
532
533 case QUEUED:
534 return 1;
535
536 case CONST:
537 case CONST_INT:
538 case CONST_DOUBLE:
539 case CONST_VECTOR:
540 case SYMBOL_REF:
541 case LABEL_REF:
542 return 0;
543
544 case REG:
545 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
546
547 case ASM_OPERANDS:
548 if (MEM_VOLATILE_P (x))
549 return 1;
550
551 /* FALLTHROUGH */
552
553 default:
554 break;
555 }
556
557 fmt = GET_RTX_FORMAT (code);
558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
559 if (fmt[i] == 'e')
560 {
561 if (equiv_init_varies_p (XEXP (x, i)))
562 return 1;
563 }
564 else if (fmt[i] == 'E')
565 {
566 int j;
567 for (j = 0; j < XVECLEN (x, i); j++)
568 if (equiv_init_varies_p (XVECEXP (x, i, j)))
569 return 1;
570 }
571
572 return 0;
573 }
574
575 /* Returns nonzero if X (used to initialize register REGNO) is movable.
576 X is only movable if the registers it uses have equivalent initializations
577 which appear to be within the same loop (or in an inner loop) and movable
578 or if they are not candidates for local_alloc and don't vary. */
579
580 static int
581 equiv_init_movable_p (x, regno)
582 rtx x;
583 int regno;
584 {
585 int i, j;
586 const char *fmt;
587 enum rtx_code code = GET_CODE (x);
588
589 switch (code)
590 {
591 case SET:
592 return equiv_init_movable_p (SET_SRC (x), regno);
593
594 case CC0:
595 case CLOBBER:
596 return 0;
597
598 case PRE_INC:
599 case PRE_DEC:
600 case POST_INC:
601 case POST_DEC:
602 case PRE_MODIFY:
603 case POST_MODIFY:
604 return 0;
605
606 case REG:
607 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
608 && reg_equiv[REGNO (x)].replace)
609 || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
610
611 case UNSPEC_VOLATILE:
612 return 0;
613
614 case ASM_OPERANDS:
615 if (MEM_VOLATILE_P (x))
616 return 0;
617
618 /* FALLTHROUGH */
619
620 default:
621 break;
622 }
623
624 fmt = GET_RTX_FORMAT (code);
625 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 switch (fmt[i])
627 {
628 case 'e':
629 if (! equiv_init_movable_p (XEXP (x, i), regno))
630 return 0;
631 break;
632 case 'E':
633 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
634 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
635 return 0;
636 break;
637 }
638
639 return 1;
640 }
641
642 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
643
644 static int
645 contains_replace_regs (x)
646 rtx x;
647 {
648 int i, j;
649 const char *fmt;
650 enum rtx_code code = GET_CODE (x);
651
652 switch (code)
653 {
654 case CONST_INT:
655 case CONST:
656 case LABEL_REF:
657 case SYMBOL_REF:
658 case CONST_DOUBLE:
659 case CONST_VECTOR:
660 case PC:
661 case CC0:
662 case HIGH:
663 return 0;
664
665 case REG:
666 return reg_equiv[REGNO (x)].replace;
667
668 default:
669 break;
670 }
671
672 fmt = GET_RTX_FORMAT (code);
673 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
674 switch (fmt[i])
675 {
676 case 'e':
677 if (contains_replace_regs (XEXP (x, i)))
678 return 1;
679 break;
680 case 'E':
681 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
682 if (contains_replace_regs (XVECEXP (x, i, j)))
683 return 1;
684 break;
685 }
686
687 return 0;
688 }
689 \f
690 /* TRUE if X references a memory location that would be affected by a store
691 to MEMREF. */
692
693 static int
694 memref_referenced_p (memref, x)
695 rtx x;
696 rtx memref;
697 {
698 int i, j;
699 const char *fmt;
700 enum rtx_code code = GET_CODE (x);
701
702 switch (code)
703 {
704 case CONST_INT:
705 case CONST:
706 case LABEL_REF:
707 case SYMBOL_REF:
708 case CONST_DOUBLE:
709 case CONST_VECTOR:
710 case PC:
711 case CC0:
712 case HIGH:
713 case LO_SUM:
714 return 0;
715
716 case REG:
717 return (reg_equiv[REGNO (x)].replacement
718 && memref_referenced_p (memref,
719 reg_equiv[REGNO (x)].replacement));
720
721 case MEM:
722 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
723 return 1;
724 break;
725
726 case SET:
727 /* If we are setting a MEM, it doesn't count (its address does), but any
728 other SET_DEST that has a MEM in it is referencing the MEM. */
729 if (GET_CODE (SET_DEST (x)) == MEM)
730 {
731 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
732 return 1;
733 }
734 else if (memref_referenced_p (memref, SET_DEST (x)))
735 return 1;
736
737 return memref_referenced_p (memref, SET_SRC (x));
738
739 default:
740 break;
741 }
742
743 fmt = GET_RTX_FORMAT (code);
744 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
745 switch (fmt[i])
746 {
747 case 'e':
748 if (memref_referenced_p (memref, XEXP (x, i)))
749 return 1;
750 break;
751 case 'E':
752 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
753 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
754 return 1;
755 break;
756 }
757
758 return 0;
759 }
760
761 /* TRUE if some insn in the range (START, END] references a memory location
762 that would be affected by a store to MEMREF. */
763
764 static int
765 memref_used_between_p (memref, start, end)
766 rtx memref;
767 rtx start;
768 rtx end;
769 {
770 rtx insn;
771
772 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
773 insn = NEXT_INSN (insn))
774 if (INSN_P (insn) && memref_referenced_p (memref, PATTERN (insn)))
775 return 1;
776
777 return 0;
778 }
779 \f
780 /* Return nonzero if the rtx X is invariant over the current function. */
781 /* ??? Actually, the places this is used in reload expect exactly what
782 is tested here, and not everything that is function invariant. In
783 particular, the frame pointer and arg pointer are special cased;
784 pic_offset_table_rtx is not, and this will cause aborts when we
785 go to spill these things to memory. */
786
787 int
788 function_invariant_p (x)
789 rtx x;
790 {
791 if (CONSTANT_P (x))
792 return 1;
793 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
794 return 1;
795 if (GET_CODE (x) == PLUS
796 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
797 && CONSTANT_P (XEXP (x, 1)))
798 return 1;
799 return 0;
800 }
801
802 /* Find registers that are equivalent to a single value throughout the
803 compilation (either because they can be referenced in memory or are set once
804 from a single constant). Lower their priority for a register.
805
806 If such a register is only referenced once, try substituting its value
807 into the using insn. If it succeeds, we can eliminate the register
808 completely. */
809
810 static void
811 update_equiv_regs ()
812 {
813 rtx insn;
814 basic_block bb;
815 int loop_depth;
816 regset_head cleared_regs;
817 int clear_regnos = 0;
818
819 reg_equiv = (struct equivalence *) xcalloc (max_regno, sizeof *reg_equiv);
820 INIT_REG_SET (&cleared_regs);
821
822 init_alias_analysis ();
823
824 /* Scan the insns and find which registers have equivalences. Do this
825 in a separate scan of the insns because (due to -fcse-follow-jumps)
826 a register can be set below its use. */
827 FOR_EACH_BB (bb)
828 {
829 loop_depth = bb->loop_depth;
830
831 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
832 {
833 rtx note;
834 rtx set;
835 rtx dest, src;
836 int regno;
837
838 if (! INSN_P (insn))
839 continue;
840
841 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
842 if (REG_NOTE_KIND (note) == REG_INC)
843 no_equiv (XEXP (note, 0), note, NULL);
844
845 set = single_set (insn);
846
847 /* If this insn contains more (or less) than a single SET,
848 only mark all destinations as having no known equivalence. */
849 if (set == 0)
850 {
851 note_stores (PATTERN (insn), no_equiv, NULL);
852 continue;
853 }
854 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
855 {
856 int i;
857
858 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
859 {
860 rtx part = XVECEXP (PATTERN (insn), 0, i);
861 if (part != set)
862 note_stores (part, no_equiv, NULL);
863 }
864 }
865
866 dest = SET_DEST (set);
867 src = SET_SRC (set);
868
869 /* If this sets a MEM to the contents of a REG that is only used
870 in a single basic block, see if the register is always equivalent
871 to that memory location and if moving the store from INSN to the
872 insn that set REG is safe. If so, put a REG_EQUIV note on the
873 initializing insn.
874
875 Don't add a REG_EQUIV note if the insn already has one. The existing
876 REG_EQUIV is likely more useful than the one we are adding.
877
878 If one of the regs in the address has reg_equiv[REGNO].replace set,
879 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
880 optimization may move the set of this register immediately before
881 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
882 the mention in the REG_EQUIV note would be to an uninitialized
883 pseudo. */
884 /* ????? This test isn't good enough; we might see a MEM with a use of
885 a pseudo register before we see its setting insn that will cause
886 reg_equiv[].replace for that pseudo to be set.
887 Equivalences to MEMs should be made in another pass, after the
888 reg_equiv[].replace information has been gathered. */
889
890 if (GET_CODE (dest) == MEM && GET_CODE (src) == REG
891 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
892 && REG_BASIC_BLOCK (regno) >= 0
893 && REG_N_SETS (regno) == 1
894 && reg_equiv[regno].init_insns != 0
895 && reg_equiv[regno].init_insns != const0_rtx
896 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
897 REG_EQUIV, NULL_RTX)
898 && ! contains_replace_regs (XEXP (dest, 0)))
899 {
900 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
901 if (validate_equiv_mem (init_insn, src, dest)
902 && ! memref_used_between_p (dest, init_insn, insn))
903 REG_NOTES (init_insn)
904 = gen_rtx_EXPR_LIST (REG_EQUIV, dest, REG_NOTES (init_insn));
905 }
906
907 /* We only handle the case of a pseudo register being set
908 once, or always to the same value. */
909 /* ??? The mn10200 port breaks if we add equivalences for
910 values that need an ADDRESS_REGS register and set them equivalent
911 to a MEM of a pseudo. The actual problem is in the over-conservative
912 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
913 calculate_needs, but we traditionally work around this problem
914 here by rejecting equivalences when the destination is in a register
915 that's likely spilled. This is fragile, of course, since the
916 preferred class of a pseudo depends on all instructions that set
917 or use it. */
918
919 if (GET_CODE (dest) != REG
920 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
921 || reg_equiv[regno].init_insns == const0_rtx
922 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
923 && GET_CODE (src) == MEM))
924 {
925 /* This might be seting a SUBREG of a pseudo, a pseudo that is
926 also set somewhere else to a constant. */
927 note_stores (set, no_equiv, NULL);
928 continue;
929 }
930
931 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
932
933 /* cse sometimes generates function invariants, but doesn't put a
934 REG_EQUAL note on the insn. Since this note would be redundant,
935 there's no point creating it earlier than here. */
936 if (! note && ! rtx_varies_p (src, 0))
937 note = set_unique_reg_note (insn, REG_EQUAL, src);
938
939 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
940 since it represents a function call */
941 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
942 note = NULL_RTX;
943
944 if (REG_N_SETS (regno) != 1
945 && (! note
946 || rtx_varies_p (XEXP (note, 0), 0)
947 || (reg_equiv[regno].replacement
948 && ! rtx_equal_p (XEXP (note, 0),
949 reg_equiv[regno].replacement))))
950 {
951 no_equiv (dest, set, NULL);
952 continue;
953 }
954 /* Record this insn as initializing this register. */
955 reg_equiv[regno].init_insns
956 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
957
958 /* If this register is known to be equal to a constant, record that
959 it is always equivalent to the constant. */
960 if (note && ! rtx_varies_p (XEXP (note, 0), 0))
961 PUT_MODE (note, (enum machine_mode) REG_EQUIV);
962
963 /* If this insn introduces a "constant" register, decrease the priority
964 of that register. Record this insn if the register is only used once
965 more and the equivalence value is the same as our source.
966
967 The latter condition is checked for two reasons: First, it is an
968 indication that it may be more efficient to actually emit the insn
969 as written (if no registers are available, reload will substitute
970 the equivalence). Secondly, it avoids problems with any registers
971 dying in this insn whose death notes would be missed.
972
973 If we don't have a REG_EQUIV note, see if this insn is loading
974 a register used only in one basic block from a MEM. If so, and the
975 MEM remains unchanged for the life of the register, add a REG_EQUIV
976 note. */
977
978 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
979
980 if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
981 && GET_CODE (SET_SRC (set)) == MEM
982 && validate_equiv_mem (insn, dest, SET_SRC (set)))
983 REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
984 REG_NOTES (insn));
985
986 if (note)
987 {
988 int regno = REGNO (dest);
989
990 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
991 We might end up substituting the LABEL_REF for uses of the
992 pseudo here or later. That kind of transformation may turn an
993 indirect jump into a direct jump, in which case we must rerun the
994 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
995 if (GET_CODE (XEXP (note, 0)) == LABEL_REF
996 || (GET_CODE (XEXP (note, 0)) == CONST
997 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
998 && (GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0))
999 == LABEL_REF)))
1000 recorded_label_ref = 1;
1001
1002 reg_equiv[regno].replacement = XEXP (note, 0);
1003 reg_equiv[regno].src_p = &SET_SRC (set);
1004 reg_equiv[regno].loop_depth = loop_depth;
1005
1006 /* Don't mess with things live during setjmp. */
1007 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
1008 {
1009 /* Note that the statement below does not affect the priority
1010 in local-alloc! */
1011 REG_LIVE_LENGTH (regno) *= 2;
1012
1013
1014 /* If the register is referenced exactly twice, meaning it is
1015 set once and used once, indicate that the reference may be
1016 replaced by the equivalence we computed above. Do this
1017 even if the register is only used in one block so that
1018 dependencies can be handled where the last register is
1019 used in a different block (i.e. HIGH / LO_SUM sequences)
1020 and to reduce the number of registers alive across
1021 calls. */
1022
1023 if (REG_N_REFS (regno) == 2
1024 && (rtx_equal_p (XEXP (note, 0), src)
1025 || ! equiv_init_varies_p (src))
1026 && GET_CODE (insn) == INSN
1027 && equiv_init_movable_p (PATTERN (insn), regno))
1028 reg_equiv[regno].replace = 1;
1029 }
1030 }
1031 }
1032 }
1033
1034 /* Now scan all regs killed in an insn to see if any of them are
1035 registers only used that once. If so, see if we can replace the
1036 reference with the equivalent from. If we can, delete the
1037 initializing reference and this register will go away. If we
1038 can't replace the reference, and the initialzing reference is
1039 within the same loop (or in an inner loop), then move the register
1040 initialization just before the use, so that they are in the same
1041 basic block. */
1042 FOR_EACH_BB_REVERSE (bb)
1043 {
1044 loop_depth = bb->loop_depth;
1045 for (insn = bb->end; insn != PREV_INSN (bb->head); insn = PREV_INSN (insn))
1046 {
1047 rtx link;
1048
1049 if (! INSN_P (insn))
1050 continue;
1051
1052 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1053 {
1054 if (REG_NOTE_KIND (link) == REG_DEAD
1055 /* Make sure this insn still refers to the register. */
1056 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1057 {
1058 int regno = REGNO (XEXP (link, 0));
1059 rtx equiv_insn;
1060
1061 if (! reg_equiv[regno].replace
1062 || reg_equiv[regno].loop_depth < loop_depth)
1063 continue;
1064
1065 /* reg_equiv[REGNO].replace gets set only when
1066 REG_N_REFS[REGNO] is 2, i.e. the register is set
1067 once and used once. (If it were only set, but not used,
1068 flow would have deleted the setting insns.) Hence
1069 there can only be one insn in reg_equiv[REGNO].init_insns. */
1070 if (reg_equiv[regno].init_insns == NULL_RTX
1071 || XEXP (reg_equiv[regno].init_insns, 1) != NULL_RTX)
1072 abort ();
1073 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1074
1075 /* We may not move instructions that can throw, since
1076 that changes basic block boundaries and we are not
1077 prepared to adjust the CFG to match. */
1078 if (can_throw_internal (equiv_insn))
1079 continue;
1080
1081 if (asm_noperands (PATTERN (equiv_insn)) < 0
1082 && validate_replace_rtx (regno_reg_rtx[regno],
1083 *(reg_equiv[regno].src_p), insn))
1084 {
1085 rtx equiv_link;
1086 rtx last_link;
1087 rtx note;
1088
1089 /* Find the last note. */
1090 for (last_link = link; XEXP (last_link, 1);
1091 last_link = XEXP (last_link, 1))
1092 ;
1093
1094 /* Append the REG_DEAD notes from equiv_insn. */
1095 equiv_link = REG_NOTES (equiv_insn);
1096 while (equiv_link)
1097 {
1098 note = equiv_link;
1099 equiv_link = XEXP (equiv_link, 1);
1100 if (REG_NOTE_KIND (note) == REG_DEAD)
1101 {
1102 remove_note (equiv_insn, note);
1103 XEXP (last_link, 1) = note;
1104 XEXP (note, 1) = NULL_RTX;
1105 last_link = note;
1106 }
1107 }
1108
1109 remove_death (regno, insn);
1110 REG_N_REFS (regno) = 0;
1111 REG_FREQ (regno) = 0;
1112 delete_insn (equiv_insn);
1113
1114 reg_equiv[regno].init_insns
1115 = XEXP (reg_equiv[regno].init_insns, 1);
1116 }
1117 /* Move the initialization of the register to just before
1118 INSN. Update the flow information. */
1119 else if (PREV_INSN (insn) != equiv_insn)
1120 {
1121 rtx new_insn;
1122
1123 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1124 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1125 REG_NOTES (equiv_insn) = 0;
1126
1127 /* Make sure this insn is recognized before reload begins,
1128 otherwise eliminate_regs_in_insn will abort. */
1129 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1130
1131 delete_insn (equiv_insn);
1132
1133 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1134
1135 REG_BASIC_BLOCK (regno) = bb->index;
1136 REG_N_CALLS_CROSSED (regno) = 0;
1137 REG_LIVE_LENGTH (regno) = 2;
1138
1139 if (insn == bb->head)
1140 bb->head = PREV_INSN (insn);
1141
1142 /* Remember to clear REGNO from all basic block's live
1143 info. */
1144 SET_REGNO_REG_SET (&cleared_regs, regno);
1145 clear_regnos++;
1146 }
1147 }
1148 }
1149 }
1150 }
1151
1152 /* Clear all dead REGNOs from all basic block's live info. */
1153 if (clear_regnos)
1154 {
1155 int j;
1156 if (clear_regnos > 8)
1157 {
1158 FOR_EACH_BB (bb)
1159 {
1160 AND_COMPL_REG_SET (bb->global_live_at_start, &cleared_regs);
1161 AND_COMPL_REG_SET (bb->global_live_at_end, &cleared_regs);
1162 }
1163 }
1164 else
1165 EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j,
1166 {
1167 FOR_EACH_BB (bb)
1168 {
1169 CLEAR_REGNO_REG_SET (bb->global_live_at_start, j);
1170 CLEAR_REGNO_REG_SET (bb->global_live_at_end, j);
1171 }
1172 });
1173 }
1174
1175 /* Clean up. */
1176 end_alias_analysis ();
1177 CLEAR_REG_SET (&cleared_regs);
1178 free (reg_equiv);
1179 }
1180
1181 /* Mark REG as having no known equivalence.
1182 Some instructions might have been proceessed before and furnished
1183 with REG_EQUIV notes for this register; these notes will have to be
1184 removed.
1185 STORE is the piece of RTL that does the non-constant / conflicting
1186 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1187 but needs to be there because this function is called from note_stores. */
1188 static void
1189 no_equiv (reg, store, data)
1190 rtx reg, store ATTRIBUTE_UNUSED;
1191 void *data ATTRIBUTE_UNUSED;
1192 {
1193 int regno;
1194 rtx list;
1195
1196 if (GET_CODE (reg) != REG)
1197 return;
1198 regno = REGNO (reg);
1199 list = reg_equiv[regno].init_insns;
1200 if (list == const0_rtx)
1201 return;
1202 for (; list; list = XEXP (list, 1))
1203 {
1204 rtx insn = XEXP (list, 0);
1205 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1206 }
1207 reg_equiv[regno].init_insns = const0_rtx;
1208 reg_equiv[regno].replacement = NULL_RTX;
1209 }
1210 \f
1211 /* Allocate hard regs to the pseudo regs used only within block number B.
1212 Only the pseudos that die but once can be handled. */
1213
1214 static void
1215 block_alloc (b)
1216 int b;
1217 {
1218 int i, q;
1219 rtx insn;
1220 rtx note, hard_reg;
1221 int insn_number = 0;
1222 int insn_count = 0;
1223 int max_uid = get_max_uid ();
1224 int *qty_order;
1225 int no_conflict_combined_regno = -1;
1226
1227 /* Count the instructions in the basic block. */
1228
1229 insn = BLOCK_END (b);
1230 while (1)
1231 {
1232 if (GET_CODE (insn) != NOTE)
1233 if (++insn_count > max_uid)
1234 abort ();
1235 if (insn == BLOCK_HEAD (b))
1236 break;
1237 insn = PREV_INSN (insn);
1238 }
1239
1240 /* +2 to leave room for a post_mark_life at the last insn and for
1241 the birth of a CLOBBER in the first insn. */
1242 regs_live_at = (HARD_REG_SET *) xcalloc ((2 * insn_count + 2),
1243 sizeof (HARD_REG_SET));
1244
1245 /* Initialize table of hardware registers currently live. */
1246
1247 REG_SET_TO_HARD_REG_SET (regs_live, BASIC_BLOCK (b)->global_live_at_start);
1248
1249 /* This loop scans the instructions of the basic block
1250 and assigns quantities to registers.
1251 It computes which registers to tie. */
1252
1253 insn = BLOCK_HEAD (b);
1254 while (1)
1255 {
1256 if (GET_CODE (insn) != NOTE)
1257 insn_number++;
1258
1259 if (INSN_P (insn))
1260 {
1261 rtx link, set;
1262 int win = 0;
1263 rtx r0, r1 = NULL_RTX;
1264 int combined_regno = -1;
1265 int i;
1266
1267 this_insn_number = insn_number;
1268 this_insn = insn;
1269
1270 extract_insn (insn);
1271 which_alternative = -1;
1272
1273 /* Is this insn suitable for tying two registers?
1274 If so, try doing that.
1275 Suitable insns are those with at least two operands and where
1276 operand 0 is an output that is a register that is not
1277 earlyclobber.
1278
1279 We can tie operand 0 with some operand that dies in this insn.
1280 First look for operands that are required to be in the same
1281 register as operand 0. If we find such, only try tying that
1282 operand or one that can be put into that operand if the
1283 operation is commutative. If we don't find an operand
1284 that is required to be in the same register as operand 0,
1285 we can tie with any operand.
1286
1287 Subregs in place of regs are also ok.
1288
1289 If tying is done, WIN is set nonzero. */
1290
1291 if (optimize
1292 && recog_data.n_operands > 1
1293 && recog_data.constraints[0][0] == '='
1294 && recog_data.constraints[0][1] != '&')
1295 {
1296 /* If non-negative, is an operand that must match operand 0. */
1297 int must_match_0 = -1;
1298 /* Counts number of alternatives that require a match with
1299 operand 0. */
1300 int n_matching_alts = 0;
1301
1302 for (i = 1; i < recog_data.n_operands; i++)
1303 {
1304 const char *p = recog_data.constraints[i];
1305 int this_match = requires_inout (p);
1306
1307 n_matching_alts += this_match;
1308 if (this_match == recog_data.n_alternatives)
1309 must_match_0 = i;
1310 }
1311
1312 r0 = recog_data.operand[0];
1313 for (i = 1; i < recog_data.n_operands; i++)
1314 {
1315 /* Skip this operand if we found an operand that
1316 must match operand 0 and this operand isn't it
1317 and can't be made to be it by commutativity. */
1318
1319 if (must_match_0 >= 0 && i != must_match_0
1320 && ! (i == must_match_0 + 1
1321 && recog_data.constraints[i-1][0] == '%')
1322 && ! (i == must_match_0 - 1
1323 && recog_data.constraints[i][0] == '%'))
1324 continue;
1325
1326 /* Likewise if each alternative has some operand that
1327 must match operand zero. In that case, skip any
1328 operand that doesn't list operand 0 since we know that
1329 the operand always conflicts with operand 0. We
1330 ignore commutatity in this case to keep things simple. */
1331 if (n_matching_alts == recog_data.n_alternatives
1332 && 0 == requires_inout (recog_data.constraints[i]))
1333 continue;
1334
1335 r1 = recog_data.operand[i];
1336
1337 /* If the operand is an address, find a register in it.
1338 There may be more than one register, but we only try one
1339 of them. */
1340 if (recog_data.constraints[i][0] == 'p'
1341 || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0]))
1342 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1343 r1 = XEXP (r1, 0);
1344
1345 /* Avoid making a call-saved register unnecessarily
1346 clobbered. */
1347 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1348 if (hard_reg != NULL_RTX)
1349 {
1350 if (GET_CODE (hard_reg) == REG
1351 && IN_RANGE (REGNO (hard_reg),
1352 0, FIRST_PSEUDO_REGISTER - 1)
1353 && ! call_used_regs[REGNO (hard_reg)])
1354 continue;
1355 }
1356
1357 if (GET_CODE (r0) == REG || GET_CODE (r0) == SUBREG)
1358 {
1359 /* We have two priorities for hard register preferences.
1360 If we have a move insn or an insn whose first input
1361 can only be in the same register as the output, give
1362 priority to an equivalence found from that insn. */
1363 int may_save_copy
1364 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1365
1366 if (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1367 win = combine_regs (r1, r0, may_save_copy,
1368 insn_number, insn, 0);
1369 }
1370 if (win)
1371 break;
1372 }
1373 }
1374
1375 /* Recognize an insn sequence with an ultimate result
1376 which can safely overlap one of the inputs.
1377 The sequence begins with a CLOBBER of its result,
1378 and ends with an insn that copies the result to itself
1379 and has a REG_EQUAL note for an equivalent formula.
1380 That note indicates what the inputs are.
1381 The result and the input can overlap if each insn in
1382 the sequence either doesn't mention the input
1383 or has a REG_NO_CONFLICT note to inhibit the conflict.
1384
1385 We do the combining test at the CLOBBER so that the
1386 destination register won't have had a quantity number
1387 assigned, since that would prevent combining. */
1388
1389 if (optimize
1390 && GET_CODE (PATTERN (insn)) == CLOBBER
1391 && (r0 = XEXP (PATTERN (insn), 0),
1392 GET_CODE (r0) == REG)
1393 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1394 && XEXP (link, 0) != 0
1395 && GET_CODE (XEXP (link, 0)) == INSN
1396 && (set = single_set (XEXP (link, 0))) != 0
1397 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1398 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1399 NULL_RTX)) != 0)
1400 {
1401 if (r1 = XEXP (note, 0), GET_CODE (r1) == REG
1402 /* Check that we have such a sequence. */
1403 && no_conflict_p (insn, r0, r1))
1404 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1405 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1406 && (r1 = XEXP (XEXP (note, 0), 0),
1407 GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1408 && no_conflict_p (insn, r0, r1))
1409 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1410
1411 /* Here we care if the operation to be computed is
1412 commutative. */
1413 else if ((GET_CODE (XEXP (note, 0)) == EQ
1414 || GET_CODE (XEXP (note, 0)) == NE
1415 || GET_RTX_CLASS (GET_CODE (XEXP (note, 0))) == 'c')
1416 && (r1 = XEXP (XEXP (note, 0), 1),
1417 (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG))
1418 && no_conflict_p (insn, r0, r1))
1419 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1420
1421 /* If we did combine something, show the register number
1422 in question so that we know to ignore its death. */
1423 if (win)
1424 no_conflict_combined_regno = REGNO (r1);
1425 }
1426
1427 /* If registers were just tied, set COMBINED_REGNO
1428 to the number of the register used in this insn
1429 that was tied to the register set in this insn.
1430 This register's qty should not be "killed". */
1431
1432 if (win)
1433 {
1434 while (GET_CODE (r1) == SUBREG)
1435 r1 = SUBREG_REG (r1);
1436 combined_regno = REGNO (r1);
1437 }
1438
1439 /* Mark the death of everything that dies in this instruction,
1440 except for anything that was just combined. */
1441
1442 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1443 if (REG_NOTE_KIND (link) == REG_DEAD
1444 && GET_CODE (XEXP (link, 0)) == REG
1445 && combined_regno != (int) REGNO (XEXP (link, 0))
1446 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1447 || ! find_reg_note (insn, REG_NO_CONFLICT,
1448 XEXP (link, 0))))
1449 wipe_dead_reg (XEXP (link, 0), 0);
1450
1451 /* Allocate qty numbers for all registers local to this block
1452 that are born (set) in this instruction.
1453 A pseudo that already has a qty is not changed. */
1454
1455 note_stores (PATTERN (insn), reg_is_set, NULL);
1456
1457 /* If anything is set in this insn and then unused, mark it as dying
1458 after this insn, so it will conflict with our outputs. This
1459 can't match with something that combined, and it doesn't matter
1460 if it did. Do this after the calls to reg_is_set since these
1461 die after, not during, the current insn. */
1462
1463 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1464 if (REG_NOTE_KIND (link) == REG_UNUSED
1465 && GET_CODE (XEXP (link, 0)) == REG)
1466 wipe_dead_reg (XEXP (link, 0), 1);
1467
1468 /* If this is an insn that has a REG_RETVAL note pointing at a
1469 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1470 block, so clear any register number that combined within it. */
1471 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1472 && GET_CODE (XEXP (note, 0)) == INSN
1473 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1474 no_conflict_combined_regno = -1;
1475 }
1476
1477 /* Set the registers live after INSN_NUMBER. Note that we never
1478 record the registers live before the block's first insn, since no
1479 pseudos we care about are live before that insn. */
1480
1481 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1482 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1483
1484 if (insn == BLOCK_END (b))
1485 break;
1486
1487 insn = NEXT_INSN (insn);
1488 }
1489
1490 /* Now every register that is local to this basic block
1491 should have been given a quantity, or else -1 meaning ignore it.
1492 Every quantity should have a known birth and death.
1493
1494 Order the qtys so we assign them registers in order of the
1495 number of suggested registers they need so we allocate those with
1496 the most restrictive needs first. */
1497
1498 qty_order = (int *) xmalloc (next_qty * sizeof (int));
1499 for (i = 0; i < next_qty; i++)
1500 qty_order[i] = i;
1501
1502 #define EXCHANGE(I1, I2) \
1503 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1504
1505 switch (next_qty)
1506 {
1507 case 3:
1508 /* Make qty_order[2] be the one to allocate last. */
1509 if (qty_sugg_compare (0, 1) > 0)
1510 EXCHANGE (0, 1);
1511 if (qty_sugg_compare (1, 2) > 0)
1512 EXCHANGE (2, 1);
1513
1514 /* ... Fall through ... */
1515 case 2:
1516 /* Put the best one to allocate in qty_order[0]. */
1517 if (qty_sugg_compare (0, 1) > 0)
1518 EXCHANGE (0, 1);
1519
1520 /* ... Fall through ... */
1521
1522 case 1:
1523 case 0:
1524 /* Nothing to do here. */
1525 break;
1526
1527 default:
1528 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1529 }
1530
1531 /* Try to put each quantity in a suggested physical register, if it has one.
1532 This may cause registers to be allocated that otherwise wouldn't be, but
1533 this seems acceptable in local allocation (unlike global allocation). */
1534 for (i = 0; i < next_qty; i++)
1535 {
1536 q = qty_order[i];
1537 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1538 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1539 0, 1, qty[q].birth, qty[q].death);
1540 else
1541 qty[q].phys_reg = -1;
1542 }
1543
1544 /* Order the qtys so we assign them registers in order of
1545 decreasing length of life. Normally call qsort, but if we
1546 have only a very small number of quantities, sort them ourselves. */
1547
1548 for (i = 0; i < next_qty; i++)
1549 qty_order[i] = i;
1550
1551 #define EXCHANGE(I1, I2) \
1552 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1553
1554 switch (next_qty)
1555 {
1556 case 3:
1557 /* Make qty_order[2] be the one to allocate last. */
1558 if (qty_compare (0, 1) > 0)
1559 EXCHANGE (0, 1);
1560 if (qty_compare (1, 2) > 0)
1561 EXCHANGE (2, 1);
1562
1563 /* ... Fall through ... */
1564 case 2:
1565 /* Put the best one to allocate in qty_order[0]. */
1566 if (qty_compare (0, 1) > 0)
1567 EXCHANGE (0, 1);
1568
1569 /* ... Fall through ... */
1570
1571 case 1:
1572 case 0:
1573 /* Nothing to do here. */
1574 break;
1575
1576 default:
1577 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1578 }
1579
1580 /* Now for each qty that is not a hardware register,
1581 look for a hardware register to put it in.
1582 First try the register class that is cheapest for this qty,
1583 if there is more than one class. */
1584
1585 for (i = 0; i < next_qty; i++)
1586 {
1587 q = qty_order[i];
1588 if (qty[q].phys_reg < 0)
1589 {
1590 #ifdef INSN_SCHEDULING
1591 /* These values represent the adjusted lifetime of a qty so
1592 that it conflicts with qtys which appear near the start/end
1593 of this qty's lifetime.
1594
1595 The purpose behind extending the lifetime of this qty is to
1596 discourage the register allocator from creating false
1597 dependencies.
1598
1599 The adjustment value is chosen to indicate that this qty
1600 conflicts with all the qtys in the instructions immediately
1601 before and after the lifetime of this qty.
1602
1603 Experiments have shown that higher values tend to hurt
1604 overall code performance.
1605
1606 If allocation using the extended lifetime fails we will try
1607 again with the qty's unadjusted lifetime. */
1608 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1609 int fake_death = MIN (insn_number * 2 + 1,
1610 qty[q].death + 2 - qty[q].death % 2);
1611 #endif
1612
1613 if (N_REG_CLASSES > 1)
1614 {
1615 #ifdef INSN_SCHEDULING
1616 /* We try to avoid using hard registers allocated to qtys which
1617 are born immediately after this qty or die immediately before
1618 this qty.
1619
1620 This optimization is only appropriate when we will run
1621 a scheduling pass after reload and we are not optimizing
1622 for code size. */
1623 if (flag_schedule_insns_after_reload
1624 && !optimize_size
1625 && !SMALL_REGISTER_CLASSES)
1626 {
1627 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1628 qty[q].mode, q, 0, 0,
1629 fake_birth, fake_death);
1630 if (qty[q].phys_reg >= 0)
1631 continue;
1632 }
1633 #endif
1634 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1635 qty[q].mode, q, 0, 0,
1636 qty[q].birth, qty[q].death);
1637 if (qty[q].phys_reg >= 0)
1638 continue;
1639 }
1640
1641 #ifdef INSN_SCHEDULING
1642 /* Similarly, avoid false dependencies. */
1643 if (flag_schedule_insns_after_reload
1644 && !optimize_size
1645 && !SMALL_REGISTER_CLASSES
1646 && qty[q].alternate_class != NO_REGS)
1647 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1648 qty[q].mode, q, 0, 0,
1649 fake_birth, fake_death);
1650 #endif
1651 if (qty[q].alternate_class != NO_REGS)
1652 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1653 qty[q].mode, q, 0, 0,
1654 qty[q].birth, qty[q].death);
1655 }
1656 }
1657
1658 /* Now propagate the register assignments
1659 to the pseudo regs belonging to the qtys. */
1660
1661 for (q = 0; q < next_qty; q++)
1662 if (qty[q].phys_reg >= 0)
1663 {
1664 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1665 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1666 }
1667
1668 /* Clean up. */
1669 free (regs_live_at);
1670 free (qty_order);
1671 }
1672 \f
1673 /* Compare two quantities' priority for getting real registers.
1674 We give shorter-lived quantities higher priority.
1675 Quantities with more references are also preferred, as are quantities that
1676 require multiple registers. This is the identical prioritization as
1677 done by global-alloc.
1678
1679 We used to give preference to registers with *longer* lives, but using
1680 the same algorithm in both local- and global-alloc can speed up execution
1681 of some programs by as much as a factor of three! */
1682
1683 /* Note that the quotient will never be bigger than
1684 the value of floor_log2 times the maximum number of
1685 times a register can occur in one insn (surely less than 100)
1686 weighted by frequency (max REG_FREQ_MAX).
1687 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1688 QTY_CMP_PRI is also used by qty_sugg_compare. */
1689
1690 #define QTY_CMP_PRI(q) \
1691 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1692 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1693
1694 static int
1695 qty_compare (q1, q2)
1696 int q1, q2;
1697 {
1698 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1699 }
1700
1701 static int
1702 qty_compare_1 (q1p, q2p)
1703 const PTR q1p;
1704 const PTR q2p;
1705 {
1706 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1707 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1708
1709 if (tem != 0)
1710 return tem;
1711
1712 /* If qtys are equally good, sort by qty number,
1713 so that the results of qsort leave nothing to chance. */
1714 return q1 - q2;
1715 }
1716 \f
1717 /* Compare two quantities' priority for getting real registers. This version
1718 is called for quantities that have suggested hard registers. First priority
1719 goes to quantities that have copy preferences, then to those that have
1720 normal preferences. Within those groups, quantities with the lower
1721 number of preferences have the highest priority. Of those, we use the same
1722 algorithm as above. */
1723
1724 #define QTY_CMP_SUGG(q) \
1725 (qty_phys_num_copy_sugg[q] \
1726 ? qty_phys_num_copy_sugg[q] \
1727 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1728
1729 static int
1730 qty_sugg_compare (q1, q2)
1731 int q1, q2;
1732 {
1733 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1734
1735 if (tem != 0)
1736 return tem;
1737
1738 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1739 }
1740
1741 static int
1742 qty_sugg_compare_1 (q1p, q2p)
1743 const PTR q1p;
1744 const PTR q2p;
1745 {
1746 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1747 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1748
1749 if (tem != 0)
1750 return tem;
1751
1752 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1753 if (tem != 0)
1754 return tem;
1755
1756 /* If qtys are equally good, sort by qty number,
1757 so that the results of qsort leave nothing to chance. */
1758 return q1 - q2;
1759 }
1760
1761 #undef QTY_CMP_SUGG
1762 #undef QTY_CMP_PRI
1763 \f
1764 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1765 Returns 1 if have done so, or 0 if cannot.
1766
1767 Combining registers means marking them as having the same quantity
1768 and adjusting the offsets within the quantity if either of
1769 them is a SUBREG).
1770
1771 We don't actually combine a hard reg with a pseudo; instead
1772 we just record the hard reg as the suggestion for the pseudo's quantity.
1773 If we really combined them, we could lose if the pseudo lives
1774 across an insn that clobbers the hard reg (eg, movstr).
1775
1776 ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1777 there is no REG_DEAD note on INSN. This occurs during the processing
1778 of REG_NO_CONFLICT blocks.
1779
1780 MAY_SAVE_COPYCOPY is nonzero if this insn is simply copying USEDREG to
1781 SETREG or if the input and output must share a register.
1782 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1783
1784 There are elaborate checks for the validity of combining. */
1785
1786 static int
1787 combine_regs (usedreg, setreg, may_save_copy, insn_number, insn, already_dead)
1788 rtx usedreg, setreg;
1789 int may_save_copy;
1790 int insn_number;
1791 rtx insn;
1792 int already_dead;
1793 {
1794 int ureg, sreg;
1795 int offset = 0;
1796 int usize, ssize;
1797 int sqty;
1798
1799 /* Determine the numbers and sizes of registers being used. If a subreg
1800 is present that does not change the entire register, don't consider
1801 this a copy insn. */
1802
1803 while (GET_CODE (usedreg) == SUBREG)
1804 {
1805 rtx subreg = SUBREG_REG (usedreg);
1806
1807 if (GET_CODE (subreg) == REG)
1808 {
1809 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1810 may_save_copy = 0;
1811
1812 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1813 offset += subreg_regno_offset (REGNO (subreg),
1814 GET_MODE (subreg),
1815 SUBREG_BYTE (usedreg),
1816 GET_MODE (usedreg));
1817 else
1818 offset += (SUBREG_BYTE (usedreg)
1819 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1820 }
1821
1822 usedreg = subreg;
1823 }
1824
1825 if (GET_CODE (usedreg) != REG)
1826 return 0;
1827
1828 ureg = REGNO (usedreg);
1829 if (ureg < FIRST_PSEUDO_REGISTER)
1830 usize = HARD_REGNO_NREGS (ureg, GET_MODE (usedreg));
1831 else
1832 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1833 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1834 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1835
1836 while (GET_CODE (setreg) == SUBREG)
1837 {
1838 rtx subreg = SUBREG_REG (setreg);
1839
1840 if (GET_CODE (subreg) == REG)
1841 {
1842 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1843 may_save_copy = 0;
1844
1845 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1846 offset -= subreg_regno_offset (REGNO (subreg),
1847 GET_MODE (subreg),
1848 SUBREG_BYTE (setreg),
1849 GET_MODE (setreg));
1850 else
1851 offset -= (SUBREG_BYTE (setreg)
1852 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1853 }
1854
1855 setreg = subreg;
1856 }
1857
1858 if (GET_CODE (setreg) != REG)
1859 return 0;
1860
1861 sreg = REGNO (setreg);
1862 if (sreg < FIRST_PSEUDO_REGISTER)
1863 ssize = HARD_REGNO_NREGS (sreg, GET_MODE (setreg));
1864 else
1865 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1866 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1867 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1868
1869 /* If UREG is a pseudo-register that hasn't already been assigned a
1870 quantity number, it means that it is not local to this block or dies
1871 more than once. In either event, we can't do anything with it. */
1872 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1873 /* Do not combine registers unless one fits within the other. */
1874 || (offset > 0 && usize + offset > ssize)
1875 || (offset < 0 && usize + offset < ssize)
1876 /* Do not combine with a smaller already-assigned object
1877 if that smaller object is already combined with something bigger. */
1878 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1879 && usize < qty[reg_qty[ureg]].size)
1880 /* Can't combine if SREG is not a register we can allocate. */
1881 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1882 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1883 These have already been taken care of. This probably wouldn't
1884 combine anyway, but don't take any chances. */
1885 || (ureg >= FIRST_PSEUDO_REGISTER
1886 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1887 /* Don't tie something to itself. In most cases it would make no
1888 difference, but it would screw up if the reg being tied to itself
1889 also dies in this insn. */
1890 || ureg == sreg
1891 /* Don't try to connect two different hardware registers. */
1892 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1893 /* Don't connect two different machine modes if they have different
1894 implications as to which registers may be used. */
1895 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1896 return 0;
1897
1898 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1899 qty_phys_sugg for the pseudo instead of tying them.
1900
1901 Return "failure" so that the lifespan of UREG is terminated here;
1902 that way the two lifespans will be disjoint and nothing will prevent
1903 the pseudo reg from being given this hard reg. */
1904
1905 if (ureg < FIRST_PSEUDO_REGISTER)
1906 {
1907 /* Allocate a quantity number so we have a place to put our
1908 suggestions. */
1909 if (reg_qty[sreg] == -2)
1910 reg_is_born (setreg, 2 * insn_number);
1911
1912 if (reg_qty[sreg] >= 0)
1913 {
1914 if (may_save_copy
1915 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1916 {
1917 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1918 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1919 }
1920 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1921 {
1922 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1923 qty_phys_num_sugg[reg_qty[sreg]]++;
1924 }
1925 }
1926 return 0;
1927 }
1928
1929 /* Similarly for SREG a hard register and UREG a pseudo register. */
1930
1931 if (sreg < FIRST_PSEUDO_REGISTER)
1932 {
1933 if (may_save_copy
1934 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1935 {
1936 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1937 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1938 }
1939 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1940 {
1941 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1942 qty_phys_num_sugg[reg_qty[ureg]]++;
1943 }
1944 return 0;
1945 }
1946
1947 /* At this point we know that SREG and UREG are both pseudos.
1948 Do nothing if SREG already has a quantity or is a register that we
1949 don't allocate. */
1950 if (reg_qty[sreg] >= -1
1951 /* If we are not going to let any regs live across calls,
1952 don't tie a call-crossing reg to a non-call-crossing reg. */
1953 || (current_function_has_nonlocal_label
1954 && ((REG_N_CALLS_CROSSED (ureg) > 0)
1955 != (REG_N_CALLS_CROSSED (sreg) > 0))))
1956 return 0;
1957
1958 /* We don't already know about SREG, so tie it to UREG
1959 if this is the last use of UREG, provided the classes they want
1960 are compatible. */
1961
1962 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
1963 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
1964 {
1965 /* Add SREG to UREG's quantity. */
1966 sqty = reg_qty[ureg];
1967 reg_qty[sreg] = sqty;
1968 reg_offset[sreg] = reg_offset[ureg] + offset;
1969 reg_next_in_qty[sreg] = qty[sqty].first_reg;
1970 qty[sqty].first_reg = sreg;
1971
1972 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
1973 update_qty_class (sqty, sreg);
1974
1975 /* Update info about quantity SQTY. */
1976 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
1977 qty[sqty].n_refs += REG_N_REFS (sreg);
1978 qty[sqty].freq += REG_FREQ (sreg);
1979 if (usize < ssize)
1980 {
1981 int i;
1982
1983 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
1984 reg_offset[i] -= offset;
1985
1986 qty[sqty].size = ssize;
1987 qty[sqty].mode = GET_MODE (setreg);
1988 }
1989 }
1990 else
1991 return 0;
1992
1993 return 1;
1994 }
1995 \f
1996 /* Return 1 if the preferred class of REG allows it to be tied
1997 to a quantity or register whose class is CLASS.
1998 True if REG's reg class either contains or is contained in CLASS. */
1999
2000 static int
2001 reg_meets_class_p (reg, class)
2002 int reg;
2003 enum reg_class class;
2004 {
2005 enum reg_class rclass = reg_preferred_class (reg);
2006 return (reg_class_subset_p (rclass, class)
2007 || reg_class_subset_p (class, rclass));
2008 }
2009
2010 /* Update the class of QTYNO assuming that REG is being tied to it. */
2011
2012 static void
2013 update_qty_class (qtyno, reg)
2014 int qtyno;
2015 int reg;
2016 {
2017 enum reg_class rclass = reg_preferred_class (reg);
2018 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2019 qty[qtyno].min_class = rclass;
2020
2021 rclass = reg_alternate_class (reg);
2022 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2023 qty[qtyno].alternate_class = rclass;
2024 }
2025 \f
2026 /* Handle something which alters the value of an rtx REG.
2027
2028 REG is whatever is set or clobbered. SETTER is the rtx that
2029 is modifying the register.
2030
2031 If it is not really a register, we do nothing.
2032 The file-global variables `this_insn' and `this_insn_number'
2033 carry info from `block_alloc'. */
2034
2035 static void
2036 reg_is_set (reg, setter, data)
2037 rtx reg;
2038 rtx setter;
2039 void *data ATTRIBUTE_UNUSED;
2040 {
2041 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2042 a hard register. These may actually not exist any more. */
2043
2044 if (GET_CODE (reg) != SUBREG
2045 && GET_CODE (reg) != REG)
2046 return;
2047
2048 /* Mark this register as being born. If it is used in a CLOBBER, mark
2049 it as being born halfway between the previous insn and this insn so that
2050 it conflicts with our inputs but not the outputs of the previous insn. */
2051
2052 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2053 }
2054 \f
2055 /* Handle beginning of the life of register REG.
2056 BIRTH is the index at which this is happening. */
2057
2058 static void
2059 reg_is_born (reg, birth)
2060 rtx reg;
2061 int birth;
2062 {
2063 int regno;
2064
2065 if (GET_CODE (reg) == SUBREG)
2066 {
2067 regno = REGNO (SUBREG_REG (reg));
2068 if (regno < FIRST_PSEUDO_REGISTER)
2069 regno = subreg_hard_regno (reg, 1);
2070 }
2071 else
2072 regno = REGNO (reg);
2073
2074 if (regno < FIRST_PSEUDO_REGISTER)
2075 {
2076 mark_life (regno, GET_MODE (reg), 1);
2077
2078 /* If the register was to have been born earlier that the present
2079 insn, mark it as live where it is actually born. */
2080 if (birth < 2 * this_insn_number)
2081 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2082 }
2083 else
2084 {
2085 if (reg_qty[regno] == -2)
2086 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2087
2088 /* If this register has a quantity number, show that it isn't dead. */
2089 if (reg_qty[regno] >= 0)
2090 qty[reg_qty[regno]].death = -1;
2091 }
2092 }
2093
2094 /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2095 REG is an output that is dying (i.e., it is never used), otherwise it
2096 is an input (the normal case).
2097 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2098
2099 static void
2100 wipe_dead_reg (reg, output_p)
2101 rtx reg;
2102 int output_p;
2103 {
2104 int regno = REGNO (reg);
2105
2106 /* If this insn has multiple results,
2107 and the dead reg is used in one of the results,
2108 extend its life to after this insn,
2109 so it won't get allocated together with any other result of this insn.
2110
2111 It is unsafe to use !single_set here since it will ignore an unused
2112 output. Just because an output is unused does not mean the compiler
2113 can assume the side effect will not occur. Consider if REG appears
2114 in the address of an output and we reload the output. If we allocate
2115 REG to the same hard register as an unused output we could set the hard
2116 register before the output reload insn. */
2117 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2118 && multiple_sets (this_insn))
2119 {
2120 int i;
2121 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2122 {
2123 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2124 if (GET_CODE (set) == SET
2125 && GET_CODE (SET_DEST (set)) != REG
2126 && !rtx_equal_p (reg, SET_DEST (set))
2127 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2128 output_p = 1;
2129 }
2130 }
2131
2132 /* If this register is used in an auto-increment address, then extend its
2133 life to after this insn, so that it won't get allocated together with
2134 the result of this insn. */
2135 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2136 output_p = 1;
2137
2138 if (regno < FIRST_PSEUDO_REGISTER)
2139 {
2140 mark_life (regno, GET_MODE (reg), 0);
2141
2142 /* If a hard register is dying as an output, mark it as in use at
2143 the beginning of this insn (the above statement would cause this
2144 not to happen). */
2145 if (output_p)
2146 post_mark_life (regno, GET_MODE (reg), 1,
2147 2 * this_insn_number, 2 * this_insn_number + 1);
2148 }
2149
2150 else if (reg_qty[regno] >= 0)
2151 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2152 }
2153 \f
2154 /* Find a block of SIZE words of hard regs in reg_class CLASS
2155 that can hold something of machine-mode MODE
2156 (but actually we test only the first of the block for holding MODE)
2157 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2158 and return the number of the first of them.
2159 Return -1 if such a block cannot be found.
2160 If QTYNO crosses calls, insist on a register preserved by calls,
2161 unless ACCEPT_CALL_CLOBBERED is nonzero.
2162
2163 If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2164 register is available. If not, return -1. */
2165
2166 static int
2167 find_free_reg (class, mode, qtyno, accept_call_clobbered, just_try_suggested,
2168 born_index, dead_index)
2169 enum reg_class class;
2170 enum machine_mode mode;
2171 int qtyno;
2172 int accept_call_clobbered;
2173 int just_try_suggested;
2174 int born_index, dead_index;
2175 {
2176 int i, ins;
2177 HARD_REG_SET first_used, used;
2178 #ifdef ELIMINABLE_REGS
2179 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2180 #endif
2181
2182 /* Validate our parameters. */
2183 if (born_index < 0 || born_index > dead_index)
2184 abort ();
2185
2186 /* Don't let a pseudo live in a reg across a function call
2187 if we might get a nonlocal goto. */
2188 if (current_function_has_nonlocal_label
2189 && qty[qtyno].n_calls_crossed > 0)
2190 return -1;
2191
2192 if (accept_call_clobbered)
2193 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2194 else if (qty[qtyno].n_calls_crossed == 0)
2195 COPY_HARD_REG_SET (used, fixed_reg_set);
2196 else
2197 COPY_HARD_REG_SET (used, call_used_reg_set);
2198
2199 if (accept_call_clobbered)
2200 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2201
2202 for (ins = born_index; ins < dead_index; ins++)
2203 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2204
2205 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2206
2207 /* Don't use the frame pointer reg in local-alloc even if
2208 we may omit the frame pointer, because if we do that and then we
2209 need a frame pointer, reload won't know how to move the pseudo
2210 to another hard reg. It can move only regs made by global-alloc.
2211
2212 This is true of any register that can be eliminated. */
2213 #ifdef ELIMINABLE_REGS
2214 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2215 SET_HARD_REG_BIT (used, eliminables[i].from);
2216 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2217 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2218 that it might be eliminated into. */
2219 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2220 #endif
2221 #else
2222 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2223 #endif
2224
2225 #ifdef CANNOT_CHANGE_MODE_CLASS
2226 cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2227 #endif
2228
2229 /* Normally, the registers that can be used for the first register in
2230 a multi-register quantity are the same as those that can be used for
2231 subsequent registers. However, if just trying suggested registers,
2232 restrict our consideration to them. If there are copy-suggested
2233 register, try them. Otherwise, try the arithmetic-suggested
2234 registers. */
2235 COPY_HARD_REG_SET (first_used, used);
2236
2237 if (just_try_suggested)
2238 {
2239 if (qty_phys_num_copy_sugg[qtyno] != 0)
2240 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2241 else
2242 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2243 }
2244
2245 /* If all registers are excluded, we can't do anything. */
2246 GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2247
2248 /* If at least one would be suitable, test each hard reg. */
2249
2250 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2251 {
2252 #ifdef REG_ALLOC_ORDER
2253 int regno = reg_alloc_order[i];
2254 #else
2255 int regno = i;
2256 #endif
2257 if (! TEST_HARD_REG_BIT (first_used, regno)
2258 && HARD_REGNO_MODE_OK (regno, mode)
2259 && (qty[qtyno].n_calls_crossed == 0
2260 || accept_call_clobbered
2261 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2262 {
2263 int j;
2264 int size1 = HARD_REGNO_NREGS (regno, mode);
2265 for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2266 if (j == size1)
2267 {
2268 /* Mark that this register is in use between its birth and death
2269 insns. */
2270 post_mark_life (regno, mode, 1, born_index, dead_index);
2271 return regno;
2272 }
2273 #ifndef REG_ALLOC_ORDER
2274 /* Skip starting points we know will lose. */
2275 i += j;
2276 #endif
2277 }
2278 }
2279
2280 fail:
2281 /* If we are just trying suggested register, we have just tried copy-
2282 suggested registers, and there are arithmetic-suggested registers,
2283 try them. */
2284
2285 /* If it would be profitable to allocate a call-clobbered register
2286 and save and restore it around calls, do that. */
2287 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2288 && qty_phys_num_sugg[qtyno] != 0)
2289 {
2290 /* Don't try the copy-suggested regs again. */
2291 qty_phys_num_copy_sugg[qtyno] = 0;
2292 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2293 born_index, dead_index);
2294 }
2295
2296 /* We need not check to see if the current function has nonlocal
2297 labels because we don't put any pseudos that are live over calls in
2298 registers in that case. */
2299
2300 if (! accept_call_clobbered
2301 && flag_caller_saves
2302 && ! just_try_suggested
2303 && qty[qtyno].n_calls_crossed != 0
2304 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2305 qty[qtyno].n_calls_crossed))
2306 {
2307 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2308 if (i >= 0)
2309 caller_save_needed = 1;
2310 return i;
2311 }
2312 return -1;
2313 }
2314 \f
2315 /* Mark that REGNO with machine-mode MODE is live starting from the current
2316 insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2317 is zero). */
2318
2319 static void
2320 mark_life (regno, mode, life)
2321 int regno;
2322 enum machine_mode mode;
2323 int life;
2324 {
2325 int j = HARD_REGNO_NREGS (regno, mode);
2326 if (life)
2327 while (--j >= 0)
2328 SET_HARD_REG_BIT (regs_live, regno + j);
2329 else
2330 while (--j >= 0)
2331 CLEAR_HARD_REG_BIT (regs_live, regno + j);
2332 }
2333
2334 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2335 is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2336 to insn number DEATH (exclusive). */
2337
2338 static void
2339 post_mark_life (regno, mode, life, birth, death)
2340 int regno;
2341 enum machine_mode mode;
2342 int life, birth, death;
2343 {
2344 int j = HARD_REGNO_NREGS (regno, mode);
2345 #ifdef HARD_REG_SET
2346 /* Declare it register if it's a scalar. */
2347 register
2348 #endif
2349 HARD_REG_SET this_reg;
2350
2351 CLEAR_HARD_REG_SET (this_reg);
2352 while (--j >= 0)
2353 SET_HARD_REG_BIT (this_reg, regno + j);
2354
2355 if (life)
2356 while (birth < death)
2357 {
2358 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2359 birth++;
2360 }
2361 else
2362 while (birth < death)
2363 {
2364 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2365 birth++;
2366 }
2367 }
2368 \f
2369 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2370 is the register being clobbered, and R1 is a register being used in
2371 the equivalent expression.
2372
2373 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2374 in which it is used, return 1.
2375
2376 Otherwise, return 0. */
2377
2378 static int
2379 no_conflict_p (insn, r0, r1)
2380 rtx insn, r0 ATTRIBUTE_UNUSED, r1;
2381 {
2382 int ok = 0;
2383 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2384 rtx p, last;
2385
2386 /* If R1 is a hard register, return 0 since we handle this case
2387 when we scan the insns that actually use it. */
2388
2389 if (note == 0
2390 || (GET_CODE (r1) == REG && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2391 || (GET_CODE (r1) == SUBREG && GET_CODE (SUBREG_REG (r1)) == REG
2392 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2393 return 0;
2394
2395 last = XEXP (note, 0);
2396
2397 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2398 if (INSN_P (p))
2399 {
2400 if (find_reg_note (p, REG_DEAD, r1))
2401 ok = 1;
2402
2403 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2404 some earlier optimization pass has inserted instructions into
2405 the sequence, and it is not safe to perform this optimization.
2406 Note that emit_no_conflict_block always ensures that this is
2407 true when these sequences are created. */
2408 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2409 return 0;
2410 }
2411
2412 return ok;
2413 }
2414 \f
2415 /* Return the number of alternatives for which the constraint string P
2416 indicates that the operand must be equal to operand 0 and that no register
2417 is acceptable. */
2418
2419 static int
2420 requires_inout (p)
2421 const char *p;
2422 {
2423 char c;
2424 int found_zero = 0;
2425 int reg_allowed = 0;
2426 int num_matching_alts = 0;
2427
2428 while ((c = *p++))
2429 switch (c)
2430 {
2431 case '=': case '+': case '?':
2432 case '#': case '&': case '!':
2433 case '*': case '%':
2434 case 'm': case '<': case '>': case 'V': case 'o':
2435 case 'E': case 'F': case 'G': case 'H':
2436 case 's': case 'i': case 'n':
2437 case 'I': case 'J': case 'K': case 'L':
2438 case 'M': case 'N': case 'O': case 'P':
2439 case 'X':
2440 /* These don't say anything we care about. */
2441 break;
2442
2443 case ',':
2444 if (found_zero && ! reg_allowed)
2445 num_matching_alts++;
2446
2447 found_zero = reg_allowed = 0;
2448 break;
2449
2450 case '0':
2451 found_zero = 1;
2452 break;
2453
2454 case '1': case '2': case '3': case '4': case '5':
2455 case '6': case '7': case '8': case '9':
2456 /* Skip the balance of the matching constraint. */
2457 while (ISDIGIT (*p))
2458 p++;
2459 break;
2460
2461 default:
2462 if (REG_CLASS_FROM_LETTER (c) == NO_REGS
2463 && !EXTRA_ADDRESS_CONSTRAINT (c))
2464 break;
2465 /* FALLTHRU */
2466 case 'p':
2467 case 'g': case 'r':
2468 reg_allowed = 1;
2469 break;
2470 }
2471
2472 if (found_zero && ! reg_allowed)
2473 num_matching_alts++;
2474
2475 return num_matching_alts;
2476 }
2477 \f
2478 void
2479 dump_local_alloc (file)
2480 FILE *file;
2481 {
2482 int i;
2483 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2484 if (reg_renumber[i] != -1)
2485 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2486 }