]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ira-lives.c
2008-08-26 Vladimir Makarov <vmakarov@redhat.com>
[thirdparty/gcc.git] / gcc / ira-lives.c
1 /* IRA processing allocno lives to build allocno live ranges.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "regs.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "target.h"
30 #include "flags.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "toplev.h"
36 #include "params.h"
37 #include "df.h"
38 #include "sparseset.h"
39 #include "ira-int.h"
40
41 /* The code in this file is similar to one in global but the code
42 works on the allocno basis and creates live ranges instead of
43 pseudo-register conflicts. */
44
45 /* Program points are enumerated by numbers from range
46 0..IRA_MAX_POINT-1. There are approximately two times more program
47 points than insns. Program points are places in the program where
48 liveness info can be changed. In most general case (there are more
49 complicated cases too) some program points correspond to places
50 where input operand dies and other ones correspond to places where
51 output operands are born. */
52 int ira_max_point;
53
54 /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
55 live ranges with given start/finish point. */
56 allocno_live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
57
58 /* Number of the current program point. */
59 static int curr_point;
60
61 /* Point where register pressure excess started or -1 if there is no
62 register pressure excess. Excess pressure for a register class at
63 some point means that there are more allocnos of given register
64 class living at the point than number of hard-registers of the
65 class available for the allocation. It is defined only for cover
66 classes. */
67 static int high_pressure_start_point[N_REG_CLASSES];
68
69 /* Allocnos live at current point in the scan. */
70 static sparseset allocnos_live;
71
72 /* Set of hard regs (except eliminable ones) currently live. */
73 static HARD_REG_SET hard_regs_live;
74
75 /* The loop tree node corresponding to the current basic block. */
76 static ira_loop_tree_node_t curr_bb_node;
77
78 /* The function processing birth of register REGNO. It updates living
79 hard regs and conflict hard regs for living allocnos or starts a
80 new live range for the allocno corresponding to REGNO if it is
81 necessary. */
82 static void
83 make_regno_born (int regno)
84 {
85 unsigned int i;
86 ira_allocno_t a;
87 allocno_live_range_t p;
88
89 if (regno < FIRST_PSEUDO_REGISTER)
90 {
91 SET_HARD_REG_BIT (hard_regs_live, regno);
92 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
93 {
94 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (ira_allocnos[i]),
95 regno);
96 SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (ira_allocnos[i]),
97 regno);
98 }
99 return;
100 }
101 a = ira_curr_regno_allocno_map[regno];
102 if (a == NULL)
103 return;
104 if ((p = ALLOCNO_LIVE_RANGES (a)) == NULL
105 || (p->finish != curr_point && p->finish + 1 != curr_point))
106 ALLOCNO_LIVE_RANGES (a)
107 = ira_create_allocno_live_range (a, curr_point, -1,
108 ALLOCNO_LIVE_RANGES (a));
109 }
110
111 /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for allocno A. */
112 static void
113 update_allocno_pressure_excess_length (ira_allocno_t a)
114 {
115 int start;
116 enum reg_class cover_class;
117 allocno_live_range_t p;
118
119 cover_class = ALLOCNO_COVER_CLASS (a);
120 if (high_pressure_start_point[cover_class] < 0)
121 return;
122 p = ALLOCNO_LIVE_RANGES (a);
123 ira_assert (p != NULL);
124 start = (high_pressure_start_point[cover_class] > p->start
125 ? high_pressure_start_point[cover_class] : p->start);
126 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
127 }
128
129 /* Process the death of register REGNO. This updates hard_regs_live
130 or finishes the current live range for the allocno corresponding to
131 REGNO. */
132 static void
133 make_regno_dead (int regno)
134 {
135 ira_allocno_t a;
136 allocno_live_range_t p;
137
138 if (regno < FIRST_PSEUDO_REGISTER)
139 {
140 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
141 return;
142 }
143 a = ira_curr_regno_allocno_map[regno];
144 if (a == NULL)
145 return;
146 p = ALLOCNO_LIVE_RANGES (a);
147 ira_assert (p != NULL);
148 p->finish = curr_point;
149 update_allocno_pressure_excess_length (a);
150 }
151
152 /* Process the birth and, right after then, death of register
153 REGNO. */
154 static void
155 make_regno_born_and_dead (int regno)
156 {
157 make_regno_born (regno);
158 make_regno_dead (regno);
159 }
160
161 /* The current register pressures for each cover class for the current
162 basic block. */
163 static int curr_reg_pressure[N_REG_CLASSES];
164
165 /* Mark allocno A as currently living and update current register
166 pressure, maximal register pressure for the current BB, start point
167 of the register pressure excess, and conflicting hard registers of
168 A. */
169 static void
170 set_allocno_live (ira_allocno_t a)
171 {
172 int nregs;
173 enum reg_class cover_class;
174
175 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
176 return;
177 sparseset_set_bit (allocnos_live, ALLOCNO_NUM (a));
178 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live);
179 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live);
180 cover_class = ALLOCNO_COVER_CLASS (a);
181 nregs = ira_reg_class_nregs[cover_class][ALLOCNO_MODE (a)];
182 curr_reg_pressure[cover_class] += nregs;
183 if (high_pressure_start_point[cover_class] < 0
184 && (curr_reg_pressure[cover_class]
185 > ira_available_class_regs[cover_class]))
186 high_pressure_start_point[cover_class] = curr_point;
187 if (curr_bb_node->reg_pressure[cover_class]
188 < curr_reg_pressure[cover_class])
189 curr_bb_node->reg_pressure[cover_class] = curr_reg_pressure[cover_class];
190 }
191
192 /* Mark allocno A as currently not living and update current register
193 pressure, start point of the register pressure excess, and register
194 pressure excess length for living allocnos. */
195 static void
196 clear_allocno_live (ira_allocno_t a)
197 {
198 unsigned int i;
199 enum reg_class cover_class;
200
201 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
202 {
203 cover_class = ALLOCNO_COVER_CLASS (a);
204 curr_reg_pressure[cover_class]
205 -= ira_reg_class_nregs[cover_class][ALLOCNO_MODE (a)];
206 ira_assert (curr_reg_pressure[cover_class] >= 0);
207 if (high_pressure_start_point[cover_class] >= 0
208 && (curr_reg_pressure[cover_class]
209 <= ira_available_class_regs[cover_class]))
210 {
211 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
212 {
213 update_allocno_pressure_excess_length (ira_allocnos[i]);
214 }
215 high_pressure_start_point[cover_class] = -1;
216 }
217 }
218 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (a));
219 }
220
221 /* Record all regs that are set in any one insn. Communication from
222 mark_reg_{store,clobber}. */
223 static VEC(rtx, heap) *regs_set;
224
225 /* Handle the case where REG is set by the insn being scanned, during
226 the scan to build live ranges and calculate reg pressure info.
227 Store a 1 in hard_regs_live or allocnos_live for this register or
228 the corresponding allocno, record how many consecutive hardware
229 registers it actually needs.
230
231 Note that even if REG does not remain alive after this insn, we
232 must mark it here as live, to ensure a conflict between REG and any
233 other reg allocnos set in this insn that really do live. This is
234 because those other allocnos could be considered after this.
235
236 REG might actually be something other than a register; if so, we do
237 nothing.
238
239 SETTER is 0 if this register was modified by an auto-increment
240 (i.e., a REG_INC note was found for it). */
241 static void
242 mark_reg_store (rtx reg, const_rtx setter ATTRIBUTE_UNUSED,
243 void *data ATTRIBUTE_UNUSED)
244 {
245 int regno;
246
247 if (GET_CODE (reg) == SUBREG)
248 reg = SUBREG_REG (reg);
249
250 if (! REG_P (reg))
251 return;
252
253 VEC_safe_push (rtx, heap, regs_set, reg);
254
255 regno = REGNO (reg);
256
257 if (regno >= FIRST_PSEUDO_REGISTER)
258 {
259 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
260
261 if (a != NULL)
262 {
263 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
264 return;
265 set_allocno_live (a);
266 }
267 make_regno_born (regno);
268 }
269 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
270 {
271 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
272 enum reg_class cover_class;
273
274 while (regno < last)
275 {
276 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
277 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
278 {
279 cover_class = ira_class_translate[REGNO_REG_CLASS (regno)];
280 if (cover_class != NO_REGS)
281 {
282 curr_reg_pressure[cover_class]++;
283 if (high_pressure_start_point[cover_class] < 0
284 && (curr_reg_pressure[cover_class]
285 > ira_available_class_regs[cover_class]))
286 high_pressure_start_point[cover_class] = curr_point;
287 }
288 make_regno_born (regno);
289 if (cover_class != NO_REGS
290 && (curr_bb_node->reg_pressure[cover_class]
291 < curr_reg_pressure[cover_class]))
292 curr_bb_node->reg_pressure[cover_class]
293 = curr_reg_pressure[cover_class];
294 }
295 regno++;
296 }
297 }
298 }
299
300 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
301 static void
302 mark_reg_clobber (rtx reg, const_rtx setter, void *data)
303 {
304 if (GET_CODE (setter) == CLOBBER)
305 mark_reg_store (reg, setter, data);
306 }
307
308 /* Record that hard register REG (if it is a hard register) has
309 conflicts with all the allocno currently live or the corresponding
310 allocno lives at just the current program point. Do not mark REG
311 (or the allocno) itself as live. */
312 static void
313 mark_reg_conflicts (rtx reg)
314 {
315 int regno;
316
317 if (GET_CODE (reg) == SUBREG)
318 reg = SUBREG_REG (reg);
319
320 if (! REG_P (reg))
321 return;
322
323 regno = REGNO (reg);
324
325 if (regno >= FIRST_PSEUDO_REGISTER)
326 make_regno_born_and_dead (regno);
327 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
328 {
329 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
330
331 while (regno < last)
332 {
333 make_regno_born_and_dead (regno);
334 regno++;
335 }
336 }
337 }
338
339 /* Mark REG (or the corresponding allocno) as being dead (following
340 the insn being scanned now). Store a 0 in hard_regs_live or
341 allocnos_live for the register. */
342 static void
343 mark_reg_death (rtx reg)
344 {
345 unsigned int i;
346 int regno = REGNO (reg);
347
348 if (regno >= FIRST_PSEUDO_REGISTER)
349 {
350 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
351
352 if (a != NULL)
353 {
354 if (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
355 return;
356 clear_allocno_live (a);
357 }
358 make_regno_dead (regno);
359 }
360 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
361 {
362 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
363 enum reg_class cover_class;
364
365 while (regno < last)
366 {
367 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
368 {
369 cover_class = ira_class_translate[REGNO_REG_CLASS (regno)];
370 if (cover_class != NO_REGS)
371 {
372 curr_reg_pressure[cover_class]--;
373 if (high_pressure_start_point[cover_class] >= 0
374 && (curr_reg_pressure[cover_class]
375 <= ira_available_class_regs[cover_class]))
376 {
377 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
378 {
379 update_allocno_pressure_excess_length
380 (ira_allocnos[i]);
381 }
382 high_pressure_start_point[cover_class] = -1;
383 }
384 ira_assert (curr_reg_pressure[cover_class] >= 0);
385 }
386 make_regno_dead (regno);
387 }
388 regno++;
389 }
390 }
391 }
392
393 /* Checks that CONSTRAINTS permits to use only one hard register. If
394 it is so, the function returns the class of the hard register.
395 Otherwise it returns NO_REGS. */
396 static enum reg_class
397 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
398 {
399 int ignore_p;
400 enum reg_class cl, next_cl;
401 int c;
402
403 cl = NO_REGS;
404 for (ignore_p = false;
405 (c = *constraints);
406 constraints += CONSTRAINT_LEN (c, constraints))
407 if (c == '#')
408 ignore_p = true;
409 else if (c == ',')
410 ignore_p = false;
411 else if (! ignore_p)
412 switch (c)
413 {
414 case ' ':
415 case '\t':
416 case '=':
417 case '+':
418 case '*':
419 case '&':
420 case '%':
421 case '!':
422 case '?':
423 break;
424 case 'i':
425 if (CONSTANT_P (op)
426 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
427 return NO_REGS;
428 break;
429
430 case 'n':
431 if (GET_CODE (op) == CONST_INT
432 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
433 || (equiv_const != NULL_RTX
434 && (GET_CODE (equiv_const) == CONST_INT
435 || (GET_CODE (equiv_const) == CONST_DOUBLE
436 && GET_MODE (equiv_const) == VOIDmode))))
437 return NO_REGS;
438 break;
439
440 case 's':
441 if ((CONSTANT_P (op) && GET_CODE (op) != CONST_INT
442 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
443 || (equiv_const != NULL_RTX
444 && CONSTANT_P (equiv_const)
445 && GET_CODE (equiv_const) != CONST_INT
446 && (GET_CODE (equiv_const) != CONST_DOUBLE
447 || GET_MODE (equiv_const) != VOIDmode)))
448 return NO_REGS;
449 break;
450
451 case 'I':
452 case 'J':
453 case 'K':
454 case 'L':
455 case 'M':
456 case 'N':
457 case 'O':
458 case 'P':
459 if ((GET_CODE (op) == CONST_INT
460 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
461 || (equiv_const != NULL_RTX
462 && GET_CODE (equiv_const) == CONST_INT
463 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
464 c, constraints)))
465 return NO_REGS;
466 break;
467
468 case 'E':
469 case 'F':
470 if (GET_CODE (op) == CONST_DOUBLE
471 || (GET_CODE (op) == CONST_VECTOR
472 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
473 || (equiv_const != NULL_RTX
474 && (GET_CODE (equiv_const) == CONST_DOUBLE
475 || (GET_CODE (equiv_const) == CONST_VECTOR
476 && (GET_MODE_CLASS (GET_MODE (equiv_const))
477 == MODE_VECTOR_FLOAT)))))
478 return NO_REGS;
479 break;
480
481 case 'G':
482 case 'H':
483 if ((GET_CODE (op) == CONST_DOUBLE
484 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
485 || (equiv_const != NULL_RTX
486 && GET_CODE (equiv_const) == CONST_DOUBLE
487 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
488 c, constraints)))
489 return NO_REGS;
490 /* ??? what about memory */
491 case 'r':
492 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
493 case 'h': case 'j': case 'k': case 'l':
494 case 'q': case 't': case 'u':
495 case 'v': case 'w': case 'x': case 'y': case 'z':
496 case 'A': case 'B': case 'C': case 'D':
497 case 'Q': case 'R': case 'S': case 'T': case 'U':
498 case 'W': case 'Y': case 'Z':
499 next_cl = (c == 'r'
500 ? GENERAL_REGS
501 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
502 if ((cl != NO_REGS && next_cl != cl)
503 || ira_available_class_regs[next_cl] > 1)
504 return NO_REGS;
505 cl = next_cl;
506 break;
507
508 case '0': case '1': case '2': case '3': case '4':
509 case '5': case '6': case '7': case '8': case '9':
510 next_cl
511 = single_reg_class (recog_data.constraints[c - '0'],
512 recog_data.operand[c - '0'], NULL_RTX);
513 if ((cl != NO_REGS && next_cl != cl) || next_cl == NO_REGS
514 || ira_available_class_regs[next_cl] > 1)
515 return NO_REGS;
516 cl = next_cl;
517 break;
518
519 default:
520 return NO_REGS;
521 }
522 return cl;
523 }
524
525 /* The function checks that operand OP_NUM of the current insn can use
526 only one hard register. If it is so, the function returns the
527 class of the hard register. Otherwise it returns NO_REGS. */
528 static enum reg_class
529 single_reg_operand_class (int op_num)
530 {
531 if (op_num < 0 || recog_data.n_alternatives == 0)
532 return NO_REGS;
533 return single_reg_class (recog_data.constraints[op_num],
534 recog_data.operand[op_num], NULL_RTX);
535 }
536
537 /* Processes input operands, if IN_P, or output operands otherwise of
538 the current insn with FREQ to find allocno which can use only one
539 hard register and makes other currently living allocnos conflicting
540 with the hard register. */
541 static void
542 process_single_reg_class_operands (bool in_p, int freq)
543 {
544 int i, regno, cost;
545 unsigned int px;
546 enum reg_class cl, cover_class;
547 rtx operand;
548 ira_allocno_t operand_a, a;
549
550 for (i = 0; i < recog_data.n_operands; i++)
551 {
552 operand = recog_data.operand[i];
553 if (in_p && recog_data.operand_type[i] != OP_IN
554 && recog_data.operand_type[i] != OP_INOUT)
555 continue;
556 if (! in_p && recog_data.operand_type[i] != OP_OUT
557 && recog_data.operand_type[i] != OP_INOUT)
558 continue;
559 cl = single_reg_operand_class (i);
560 if (cl == NO_REGS)
561 continue;
562
563 operand_a = NULL;
564
565 if (GET_CODE (operand) == SUBREG)
566 operand = SUBREG_REG (operand);
567
568 if (REG_P (operand)
569 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
570 {
571 enum machine_mode mode;
572 enum reg_class cover_class;
573
574 operand_a = ira_curr_regno_allocno_map[regno];
575 mode = ALLOCNO_MODE (operand_a);
576 cover_class = ALLOCNO_COVER_CLASS (operand_a);
577 if (ira_class_subset_p[cl][cover_class]
578 && ira_class_hard_regs_num[cl] != 0
579 && (ira_class_hard_reg_index[cover_class]
580 [ira_class_hard_regs[cl][0]]) >= 0
581 && reg_class_size[cl] <= (unsigned) CLASS_MAX_NREGS (cl, mode))
582 {
583 /* ??? FREQ */
584 cost = freq * (in_p
585 ? ira_register_move_cost[mode][cover_class][cl]
586 : ira_register_move_cost[mode][cl][cover_class]);
587 ira_allocate_and_set_costs
588 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a), cover_class, 0);
589 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
590 [ira_class_hard_reg_index
591 [cover_class][ira_class_hard_regs[cl][0]]]
592 -= cost;
593 }
594 }
595
596 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
597 {
598 a = ira_allocnos[px];
599 cover_class = ALLOCNO_COVER_CLASS (a);
600 if (a != operand_a)
601 {
602 /* We could increase costs of A instead of making it
603 conflicting with the hard register. But it works worse
604 because it will be spilled in reload in anyway. */
605 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
606 reg_class_contents[cl]);
607 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
608 reg_class_contents[cl]);
609 }
610 }
611 }
612 }
613
614 /* Process insns of the basic block given by its LOOP_TREE_NODE to
615 update allocno live ranges, allocno hard register conflicts,
616 intersected calls, and register pressure info for allocnos for the
617 basic block for and regions containing the basic block. */
618 static void
619 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
620 {
621 int i;
622 unsigned int j;
623 basic_block bb;
624 rtx insn;
625 edge e;
626 edge_iterator ei;
627 bitmap_iterator bi;
628 bitmap reg_live_in;
629 unsigned int px;
630
631 bb = loop_tree_node->bb;
632 if (bb != NULL)
633 {
634 for (i = 0; i < ira_reg_class_cover_size; i++)
635 {
636 curr_reg_pressure[ira_reg_class_cover[i]] = 0;
637 high_pressure_start_point[ira_reg_class_cover[i]] = -1;
638 }
639 curr_bb_node = loop_tree_node;
640 reg_live_in = DF_LR_IN (bb);
641 sparseset_clear (allocnos_live);
642 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_in);
643 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
644 AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
645 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
646 if (TEST_HARD_REG_BIT (hard_regs_live, i))
647 {
648 enum reg_class cover_class;
649
650 cover_class = REGNO_REG_CLASS (i);
651 if (cover_class == NO_REGS)
652 continue;
653 cover_class = ira_class_translate[cover_class];
654 curr_reg_pressure[cover_class]++;
655 if (curr_bb_node->reg_pressure[cover_class]
656 < curr_reg_pressure[cover_class])
657 curr_bb_node->reg_pressure[cover_class]
658 = curr_reg_pressure[cover_class];
659 ira_assert (curr_reg_pressure[cover_class]
660 <= ira_available_class_regs[cover_class]);
661 }
662 EXECUTE_IF_SET_IN_BITMAP (reg_live_in, FIRST_PSEUDO_REGISTER, j, bi)
663 {
664 ira_allocno_t a = ira_curr_regno_allocno_map[j];
665
666 if (a == NULL)
667 continue;
668 ira_assert (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)));
669 set_allocno_live (a);
670 make_regno_born (j);
671 }
672
673 #ifdef EH_RETURN_DATA_REGNO
674 if (bb_has_eh_pred (bb))
675 {
676 for (j = 0; ; ++j)
677 {
678 unsigned int regno = EH_RETURN_DATA_REGNO (j);
679
680 if (regno == INVALID_REGNUM)
681 break;
682 make_regno_born_and_dead (regno);
683 }
684 }
685 #endif
686
687 /* Allocnos can't go in stack regs at the start of a basic block
688 that is reached by an abnormal edge. Likewise for call
689 clobbered regs, because caller-save, fixup_abnormal_edges and
690 possibly the table driven EH machinery are not quite ready to
691 handle such allocnos live across such edges. */
692 FOR_EACH_EDGE (e, ei, bb->preds)
693 if (e->flags & EDGE_ABNORMAL)
694 break;
695
696 if (e != NULL)
697 {
698 #ifdef STACK_REGS
699 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
700 {
701 ALLOCNO_NO_STACK_REG_P (ira_allocnos[px]) = true;
702 ALLOCNO_TOTAL_NO_STACK_REG_P (ira_allocnos[px]) = true;
703 }
704 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
705 make_regno_born_and_dead (px);
706 #endif
707 /* No need to record conflicts for call clobbered regs if we
708 have nonlocal labels around, as we don't ever try to
709 allocate such regs in this case. */
710 if (!cfun->has_nonlocal_label)
711 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
712 if (call_used_regs[px])
713 make_regno_born_and_dead (px);
714 }
715
716 /* Scan the code of this basic block, noting which allocnos and
717 hard regs are born or die. */
718 FOR_BB_INSNS (bb, insn)
719 {
720 rtx link;
721 int freq;
722
723 if (! INSN_P (insn))
724 continue;
725
726 freq = REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn));
727 if (freq == 0)
728 freq = 1;
729
730 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
731 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
732 INSN_UID (insn), loop_tree_node->parent->loop->num,
733 curr_point);
734
735 /* Check regs_set is an empty set. */
736 gcc_assert (VEC_empty (rtx, regs_set));
737
738 /* Mark any allocnos clobbered by INSN as live, so they
739 conflict with the inputs. */
740 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
741
742 extract_insn (insn);
743 process_single_reg_class_operands (true, freq);
744
745 /* Mark any allocnos dead after INSN as dead now. */
746 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
747 if (REG_NOTE_KIND (link) == REG_DEAD)
748 mark_reg_death (XEXP (link, 0));
749
750 curr_point++;
751
752 if (CALL_P (insn))
753 {
754 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
755 {
756 ira_allocno_t a = ira_allocnos[i];
757
758 ALLOCNO_CALL_FREQ (a) += freq;
759 ALLOCNO_CALLS_CROSSED_NUM (a)++;
760 /* Don't allocate allocnos that cross calls, if this
761 function receives a nonlocal goto. */
762 if (cfun->has_nonlocal_label)
763 {
764 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
765 SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
766 }
767 }
768 }
769
770 /* Mark any allocnos set in INSN as live. Clobbers are
771 processed again, so they will conflict with the reg
772 allocnos that are set. */
773 note_stores (PATTERN (insn), mark_reg_store, NULL);
774
775 #ifdef AUTO_INC_DEC
776 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
777 if (REG_NOTE_KIND (link) == REG_INC)
778 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
779 #endif
780
781 /* If INSN has multiple outputs, then any allocno that dies
782 here and is used inside of an output must conflict with
783 the other outputs.
784
785 It is unsafe to use !single_set here since it will ignore
786 an unused output. Just because an output is unused does
787 not mean the compiler can assume the side effect will not
788 occur. Consider if ALLOCNO appears in the address of an
789 output and we reload the output. If we allocate ALLOCNO
790 to the same hard register as an unused output we could
791 set the hard register before the output reload insn. */
792 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
793 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
794 if (REG_NOTE_KIND (link) == REG_DEAD)
795 {
796 int i;
797 int used_in_output = 0;
798 rtx reg = XEXP (link, 0);
799
800 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
801 {
802 rtx set = XVECEXP (PATTERN (insn), 0, i);
803
804 if (GET_CODE (set) == SET
805 && ! REG_P (SET_DEST (set))
806 && ! rtx_equal_p (reg, SET_DEST (set))
807 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
808 used_in_output = 1;
809 }
810 if (used_in_output)
811 mark_reg_conflicts (reg);
812 }
813
814 process_single_reg_class_operands (false, freq);
815
816 /* Mark any allocnos set in INSN and then never used. */
817 while (! VEC_empty (rtx, regs_set))
818 {
819 rtx reg = VEC_pop (rtx, regs_set);
820 rtx note = find_regno_note (insn, REG_UNUSED, REGNO (reg));
821
822 if (note)
823 mark_reg_death (XEXP (note, 0));
824 }
825 curr_point++;
826 }
827 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
828 {
829 make_regno_dead (ALLOCNO_REGNO (ira_allocnos[i]));
830 }
831
832 curr_point++;
833
834 }
835 /* Propagate register pressure to upper loop tree nodes: */
836 if (loop_tree_node != ira_loop_tree_root)
837 for (i = 0; i < ira_reg_class_cover_size; i++)
838 {
839 enum reg_class cover_class;
840
841 cover_class = ira_reg_class_cover[i];
842 if (loop_tree_node->reg_pressure[cover_class]
843 > loop_tree_node->parent->reg_pressure[cover_class])
844 loop_tree_node->parent->reg_pressure[cover_class]
845 = loop_tree_node->reg_pressure[cover_class];
846 }
847 }
848
849 /* Create and set up IRA_START_POINT_RANGES and
850 IRA_FINISH_POINT_RANGES. */
851 static void
852 create_start_finish_chains (void)
853 {
854 ira_allocno_t a;
855 ira_allocno_iterator ai;
856 allocno_live_range_t r;
857
858 ira_start_point_ranges
859 = (allocno_live_range_t *) ira_allocate (ira_max_point
860 * sizeof (allocno_live_range_t));
861 memset (ira_start_point_ranges, 0,
862 ira_max_point * sizeof (allocno_live_range_t));
863 ira_finish_point_ranges
864 = (allocno_live_range_t *) ira_allocate (ira_max_point
865 * sizeof (allocno_live_range_t));
866 memset (ira_finish_point_ranges, 0,
867 ira_max_point * sizeof (allocno_live_range_t));
868 FOR_EACH_ALLOCNO (a, ai)
869 {
870 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
871 {
872 r->start_next = ira_start_point_ranges[r->start];
873 ira_start_point_ranges[r->start] = r;
874 r->finish_next = ira_finish_point_ranges[r->finish];
875 ira_finish_point_ranges[r->finish] = r;
876 }
877 }
878 }
879
880 /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
881 new live ranges and program points were added as a result if new
882 insn generation. */
883 void
884 ira_rebuild_start_finish_chains (void)
885 {
886 ira_free (ira_finish_point_ranges);
887 ira_free (ira_start_point_ranges);
888 create_start_finish_chains ();
889 }
890
891 /* Print live ranges R to file F. */
892 void
893 ira_print_live_range_list (FILE *f, allocno_live_range_t r)
894 {
895 for (; r != NULL; r = r->next)
896 fprintf (f, " [%d..%d]", r->start, r->finish);
897 fprintf (f, "\n");
898 }
899
900 /* Print live ranges R to stderr. */
901 void
902 ira_debug_live_range_list (allocno_live_range_t r)
903 {
904 ira_print_live_range_list (stderr, r);
905 }
906
907 /* Print live ranges of allocno A to file F. */
908 static void
909 print_allocno_live_ranges (FILE *f, ira_allocno_t a)
910 {
911 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
912 ira_print_live_range_list (f, ALLOCNO_LIVE_RANGES (a));
913 }
914
915 /* Print live ranges of allocno A to stderr. */
916 void
917 ira_debug_allocno_live_ranges (ira_allocno_t a)
918 {
919 print_allocno_live_ranges (stderr, a);
920 }
921
922 /* Print live ranges of all allocnos to file F. */
923 static void
924 print_live_ranges (FILE *f)
925 {
926 ira_allocno_t a;
927 ira_allocno_iterator ai;
928
929 FOR_EACH_ALLOCNO (a, ai)
930 print_allocno_live_ranges (f, a);
931 }
932
933 /* Print live ranges of all allocnos to stderr. */
934 void
935 ira_debug_live_ranges (void)
936 {
937 print_live_ranges (stderr);
938 }
939
940 /* The main entry function creates live ranges, set up
941 CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for allocnos, and
942 calculate register pressure info. */
943 void
944 ira_create_allocno_live_ranges (void)
945 {
946 allocnos_live = sparseset_alloc (ira_allocnos_num);
947 /* Make a vector that mark_reg_{store,clobber} will store in. */
948 if (!regs_set)
949 regs_set = VEC_alloc (rtx, heap, 10);
950 curr_point = 0;
951 ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
952 process_bb_node_lives);
953 ira_max_point = curr_point;
954 create_start_finish_chains ();
955 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
956 print_live_ranges (ira_dump_file);
957 /* Clean up. */
958 sparseset_free (allocnos_live);
959 }
960
961 /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES. */
962 void
963 ira_finish_allocno_live_ranges (void)
964 {
965 ira_free (ira_finish_point_ranges);
966 ira_free (ira_start_point_ranges);
967 }