]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ira-build.c
i386.md: Check cmp/branch fuse for cmp peephole optimization.
[thirdparty/gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec
VM
1/* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "target.h"
29#include "regs.h"
30#include "flags.h"
31#include "hard-reg-set.h"
32#include "basic-block.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "toplev.h"
36#include "params.h"
37#include "df.h"
38#include "output.h"
39#include "reload.h"
40#include "sparseset.h"
41#include "ira-int.h"
42
43static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
44 ira_loop_tree_node_t);
45
46/* The root of the loop tree corresponding to the all function. */
47ira_loop_tree_node_t ira_loop_tree_root;
48
49/* Height of the loop tree. */
50int ira_loop_tree_height;
51
52/* All nodes representing basic blocks are referred through the
53 following array. We can not use basic block member `aux' for this
54 because it is used for insertion of insns on edges. */
55ira_loop_tree_node_t ira_bb_nodes;
56
57/* All nodes representing loops are referred through the following
58 array. */
59ira_loop_tree_node_t ira_loop_nodes;
60
61/* Map regno -> allocnos with given regno (see comments for
62 allocno member `next_regno_allocno'). */
63ira_allocno_t *ira_regno_allocno_map;
64
65/* Array of references to all allocnos. The order number of the
66 allocno corresponds to the index in the array. Removed allocnos
67 have NULL element value. */
68ira_allocno_t *ira_allocnos;
69
70/* Sizes of the previous array. */
71int ira_allocnos_num;
72
73/* Map conflict id -> allocno with given conflict id (see comments for
74 allocno member `conflict_id'). */
75ira_allocno_t *ira_conflict_id_allocno_map;
76
77/* Array of references to all copies. The order number of the copy
78 corresponds to the index in the array. Removed copies have NULL
79 element value. */
80ira_copy_t *ira_copies;
81
82/* Size of the previous array. */
83int ira_copies_num;
84
85\f
86
87/* LAST_BASIC_BLOCK before generating additional insns because of live
88 range splitting. Emitting insns on a critical edge creates a new
89 basic block. */
90static int last_basic_block_before_change;
91
92/* The following function allocates the loop tree nodes. If LOOPS_P
93 is FALSE, the nodes corresponding to the loops (except the root
94 which corresponds the all function) will be not allocated but nodes
95 will still be allocated for basic blocks. */
96static void
97create_loop_tree_nodes (bool loops_p)
98{
99 unsigned int i, j;
100 int max_regno;
101 bool skip_p;
102 edge_iterator ei;
103 edge e;
104 VEC (edge, heap) *edges;
105 loop_p loop;
106
107 ira_bb_nodes
108 = ((struct ira_loop_tree_node *)
109 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
110 last_basic_block_before_change = last_basic_block;
111 for (i = 0; i < (unsigned int) last_basic_block; i++)
112 {
113 ira_bb_nodes[i].regno_allocno_map = NULL;
114 memset (ira_bb_nodes[i].reg_pressure, 0,
115 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 116 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
117 ira_bb_nodes[i].modified_regnos = NULL;
118 ira_bb_nodes[i].border_allocnos = NULL;
119 ira_bb_nodes[i].local_copies = NULL;
120 }
121 ira_loop_nodes = ((struct ira_loop_tree_node *)
122 ira_allocate (sizeof (struct ira_loop_tree_node)
123 * VEC_length (loop_p, ira_loops.larray)));
124 max_regno = max_reg_num ();
125 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
126 {
127 if (loop != ira_loops.tree_root)
128 {
129 ira_loop_nodes[i].regno_allocno_map = NULL;
130 if (! loops_p)
131 continue;
132 skip_p = false;
133 FOR_EACH_EDGE (e, ei, loop->header->preds)
134 if (e->src != loop->latch
135 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
136 {
137 skip_p = true;
138 break;
139 }
140 if (skip_p)
141 continue;
142 edges = get_loop_exit_edges (loop);
143 for (j = 0; VEC_iterate (edge, edges, j, e); j++)
144 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
145 {
146 skip_p = true;
147 break;
148 }
149 VEC_free (edge, heap, edges);
150 if (skip_p)
151 continue;
152 }
153 ira_loop_nodes[i].regno_allocno_map
154 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
155 memset (ira_loop_nodes[i].regno_allocno_map, 0,
156 sizeof (ira_allocno_t) * max_regno);
157 memset (ira_loop_nodes[i].reg_pressure, 0,
158 sizeof (ira_loop_nodes[i].reg_pressure));
49d988e7 159 ira_loop_nodes[i].all_allocnos = ira_allocate_bitmap ();
058e97ec
VM
160 ira_loop_nodes[i].modified_regnos = ira_allocate_bitmap ();
161 ira_loop_nodes[i].border_allocnos = ira_allocate_bitmap ();
162 ira_loop_nodes[i].local_copies = ira_allocate_bitmap ();
163 }
164}
165
166/* The function returns TRUE if there are more one allocation
167 region. */
168static bool
169more_one_region_p (void)
170{
171 unsigned int i;
172 loop_p loop;
173
174 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
175 if (ira_loop_nodes[i].regno_allocno_map != NULL
176 && ira_loop_tree_root != &ira_loop_nodes[i])
177 return true;
178 return false;
179}
180
181/* Free the loop tree node of a loop. */
182static void
183finish_loop_tree_node (ira_loop_tree_node_t loop)
184{
185 if (loop->regno_allocno_map != NULL)
186 {
187 ira_assert (loop->bb == NULL);
188 ira_free_bitmap (loop->local_copies);
189 ira_free_bitmap (loop->border_allocnos);
190 ira_free_bitmap (loop->modified_regnos);
49d988e7 191 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
192 ira_free (loop->regno_allocno_map);
193 loop->regno_allocno_map = NULL;
194 }
195}
196
197/* Free the loop tree nodes. */
198static void
199finish_loop_tree_nodes (void)
200{
201 unsigned int i;
202 loop_p loop;
203
204 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
205 finish_loop_tree_node (&ira_loop_nodes[i]);
206 ira_free (ira_loop_nodes);
207 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
208 {
209 if (ira_bb_nodes[i].local_copies != NULL)
210 ira_free_bitmap (ira_bb_nodes[i].local_copies);
211 if (ira_bb_nodes[i].border_allocnos != NULL)
212 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
213 if (ira_bb_nodes[i].modified_regnos != NULL)
214 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
215 if (ira_bb_nodes[i].all_allocnos != NULL)
216 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
217 if (ira_bb_nodes[i].regno_allocno_map != NULL)
218 ira_free (ira_bb_nodes[i].regno_allocno_map);
219 }
220 ira_free (ira_bb_nodes);
221}
222
223\f
224
225/* The following recursive function adds LOOP to the loop tree
226 hierarchy. LOOP is added only once. */
227static void
228add_loop_to_tree (struct loop *loop)
229{
230 struct loop *parent;
231 ira_loop_tree_node_t loop_node, parent_node;
232
233 /* We can not use loop node access macros here because of potential
234 checking and because the nodes are not initialized enough
235 yet. */
236 if (loop_outer (loop) != NULL)
237 add_loop_to_tree (loop_outer (loop));
238 if (ira_loop_nodes[loop->num].regno_allocno_map != NULL
239 && ira_loop_nodes[loop->num].children == NULL)
240 {
241 /* We have not added loop node to the tree yet. */
242 loop_node = &ira_loop_nodes[loop->num];
243 loop_node->loop = loop;
244 loop_node->bb = NULL;
245 for (parent = loop_outer (loop);
246 parent != NULL;
247 parent = loop_outer (parent))
248 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
249 break;
250 if (parent == NULL)
251 {
252 loop_node->next = NULL;
253 loop_node->subloop_next = NULL;
254 loop_node->parent = NULL;
255 }
256 else
257 {
258 parent_node = &ira_loop_nodes[parent->num];
259 loop_node->next = parent_node->children;
260 parent_node->children = loop_node;
261 loop_node->subloop_next = parent_node->subloops;
262 parent_node->subloops = loop_node;
263 loop_node->parent = parent_node;
264 }
265 }
266}
267
268/* The following recursive function sets up levels of nodes of the
269 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
270 The function returns maximal value of level in the tree + 1. */
271static int
272setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
273{
274 int height, max_height;
275 ira_loop_tree_node_t subloop_node;
276
277 ira_assert (loop_node->bb == NULL);
278 loop_node->level = level;
279 max_height = level + 1;
280 for (subloop_node = loop_node->subloops;
281 subloop_node != NULL;
282 subloop_node = subloop_node->subloop_next)
283 {
284 ira_assert (subloop_node->bb == NULL);
285 height = setup_loop_tree_level (subloop_node, level + 1);
286 if (height > max_height)
287 max_height = height;
288 }
289 return max_height;
290}
291
292/* Create the loop tree. The algorithm is designed to provide correct
293 order of loops (they are ordered by their last loop BB) and basic
294 blocks in the chain formed by member next. */
295static void
296form_loop_tree (void)
297{
298 unsigned int i;
299 basic_block bb;
300 struct loop *parent;
301 ira_loop_tree_node_t bb_node, loop_node;
302 loop_p loop;
303
304 /* We can not use loop/bb node access macros because of potential
305 checking and because the nodes are not initialized enough
306 yet. */
307 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
308 if (ira_loop_nodes[i].regno_allocno_map != NULL)
309 {
310 ira_loop_nodes[i].children = NULL;
311 ira_loop_nodes[i].subloops = NULL;
312 }
acb37d29 313 FOR_EACH_BB (bb)
058e97ec
VM
314 {
315 bb_node = &ira_bb_nodes[bb->index];
316 bb_node->bb = bb;
317 bb_node->loop = NULL;
318 bb_node->subloops = NULL;
319 bb_node->children = NULL;
320 bb_node->subloop_next = NULL;
321 bb_node->next = NULL;
322 for (parent = bb->loop_father;
323 parent != NULL;
324 parent = loop_outer (parent))
325 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
326 break;
327 add_loop_to_tree (parent);
328 loop_node = &ira_loop_nodes[parent->num];
329 bb_node->next = loop_node->children;
330 bb_node->parent = loop_node;
331 loop_node->children = bb_node;
332 }
333 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
334 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
335 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
336}
337
338\f
339
340/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
341 tree nodes. */
342static void
343rebuild_regno_allocno_maps (void)
344{
345 unsigned int l;
346 int max_regno, regno;
347 ira_allocno_t a;
348 ira_loop_tree_node_t loop_tree_node;
349 loop_p loop;
350 ira_allocno_iterator ai;
351
352 max_regno = max_reg_num ();
353 for (l = 0; VEC_iterate (loop_p, ira_loops.larray, l, loop); l++)
354 if (ira_loop_nodes[l].regno_allocno_map != NULL)
355 {
356 ira_free (ira_loop_nodes[l].regno_allocno_map);
357 ira_loop_nodes[l].regno_allocno_map
358 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
359 * max_regno);
360 memset (ira_loop_nodes[l].regno_allocno_map, 0,
361 sizeof (ira_allocno_t) * max_regno);
362 }
363 ira_free (ira_regno_allocno_map);
364 ira_regno_allocno_map
365 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
366 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
367 FOR_EACH_ALLOCNO (a, ai)
368 {
369 if (ALLOCNO_CAP_MEMBER (a) != NULL)
370 /* Caps are not in the regno allocno maps. */
371 continue;
372 regno = ALLOCNO_REGNO (a);
373 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
374 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
375 ira_regno_allocno_map[regno] = a;
376 if (loop_tree_node->regno_allocno_map[regno] == NULL)
377 /* Remember that we can create temporary allocnos to break
378 cycles in register shuffle. */
379 loop_tree_node->regno_allocno_map[regno] = a;
380 }
381}
382
383\f
384
385/* Pools for allocnos and allocno live ranges. */
386static alloc_pool allocno_pool, allocno_live_range_pool;
387
388/* Vec containing references to all created allocnos. It is a
389 container of array allocnos. */
390static VEC(ira_allocno_t,heap) *allocno_vec;
391
392/* Vec containing references to all created allocnos. It is a
393 container of ira_conflict_id_allocno_map. */
394static VEC(ira_allocno_t,heap) *ira_conflict_id_allocno_map_vec;
395
396/* Initialize data concerning allocnos. */
397static void
398initiate_allocnos (void)
399{
400 allocno_live_range_pool
401 = create_alloc_pool ("allocno live ranges",
402 sizeof (struct ira_allocno_live_range), 100);
403 allocno_pool
404 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
405 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
406 ira_allocnos = NULL;
407 ira_allocnos_num = 0;
408 ira_conflict_id_allocno_map_vec
409 = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
410 ira_conflict_id_allocno_map = NULL;
411 ira_regno_allocno_map
412 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
413 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
414}
415
416/* Create and return the allocno corresponding to REGNO in
417 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
418 same regno if CAP_P is FALSE. */
419ira_allocno_t
420ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
421{
422 ira_allocno_t a;
423
424 a = (ira_allocno_t) pool_alloc (allocno_pool);
425 ALLOCNO_REGNO (a) = regno;
426 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
427 if (! cap_p)
428 {
429 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
430 ira_regno_allocno_map[regno] = a;
431 if (loop_tree_node->regno_allocno_map[regno] == NULL)
432 /* Remember that we can create temporary allocnos to break
433 cycles in register shuffle on region borders (see
434 ira-emit.c). */
435 loop_tree_node->regno_allocno_map[regno] = a;
436 }
437 ALLOCNO_CAP (a) = NULL;
438 ALLOCNO_CAP_MEMBER (a) = NULL;
439 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 440 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec
VM
441 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = NULL;
442 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
443 COPY_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
444 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
445 ALLOCNO_NREFS (a) = 0;
854bd721 446 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
447 ALLOCNO_HARD_REGNO (a) = -1;
448 ALLOCNO_CALL_FREQ (a) = 0;
449 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
450#ifdef STACK_REGS
451 ALLOCNO_NO_STACK_REG_P (a) = false;
452 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
453#endif
454 ALLOCNO_MEM_OPTIMIZED_DEST (a) = NULL;
455 ALLOCNO_MEM_OPTIMIZED_DEST_P (a) = false;
456 ALLOCNO_SOMEWHERE_RENAMED_P (a) = false;
457 ALLOCNO_CHILD_RENAMED_P (a) = false;
458 ALLOCNO_DONT_REASSIGN_P (a) = false;
459 ALLOCNO_IN_GRAPH_P (a) = false;
460 ALLOCNO_ASSIGNED_P (a) = false;
461 ALLOCNO_MAY_BE_SPILLED_P (a) = false;
462 ALLOCNO_SPLAY_REMOVED_P (a) = false;
463 ALLOCNO_CONFLICT_VEC_P (a) = false;
464 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
465 ALLOCNO_COPIES (a) = NULL;
466 ALLOCNO_HARD_REG_COSTS (a) = NULL;
467 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
468 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
469 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
470 ALLOCNO_LEFT_CONFLICTS_NUM (a) = -1;
471 ALLOCNO_COVER_CLASS (a) = NO_REGS;
472 ALLOCNO_COVER_CLASS_COST (a) = 0;
473 ALLOCNO_MEMORY_COST (a) = 0;
474 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
475 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
476 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
477 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
478 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
479 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
480 ALLOCNO_LIVE_RANGES (a) = NULL;
481 ALLOCNO_MIN (a) = INT_MAX;
482 ALLOCNO_MAX (a) = -1;
483 ALLOCNO_CONFLICT_ID (a) = ira_allocnos_num;
484 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
485 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
486 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
487 VEC_safe_push (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec, a);
488 ira_conflict_id_allocno_map
489 = VEC_address (ira_allocno_t, ira_conflict_id_allocno_map_vec);
490 return a;
491}
492
493/* Set up cover class for A and update its conflict hard registers. */
494void
495ira_set_allocno_cover_class (ira_allocno_t a, enum reg_class cover_class)
496{
497 ALLOCNO_COVER_CLASS (a) = cover_class;
498 IOR_COMPL_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
499 reg_class_contents[cover_class]);
500 IOR_COMPL_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
501 reg_class_contents[cover_class]);
502}
503
504/* Return TRUE if the conflict vector with NUM elements is more
505 profitable than conflict bit vector for A. */
506bool
507ira_conflict_vector_profitable_p (ira_allocno_t a, int num)
508{
509 int nw;
510
511 if (ALLOCNO_MAX (a) < ALLOCNO_MIN (a))
512 /* We prefer bit vector in such case because it does not result in
513 allocation. */
514 return false;
515
516 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS) / IRA_INT_BITS;
517 return (2 * sizeof (ira_allocno_t) * (num + 1)
518 < 3 * nw * sizeof (IRA_INT_TYPE));
519}
520
521/* Allocates and initialize the conflict vector of A for NUM
522 conflicting allocnos. */
523void
524ira_allocate_allocno_conflict_vec (ira_allocno_t a, int num)
525{
526 int size;
527 ira_allocno_t *vec;
528
529 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
530 num++; /* for NULL end marker */
531 size = sizeof (ira_allocno_t) * num;
532 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
533 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
534 vec[0] = NULL;
535 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
536 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
537 ALLOCNO_CONFLICT_VEC_P (a) = true;
538}
539
540/* Allocate and initialize the conflict bit vector of A. */
541static void
542allocate_allocno_conflict_bit_vec (ira_allocno_t a)
543{
544 unsigned int size;
545
546 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
547 size = ((ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS)
548 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
549 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
550 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0, size);
551 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
552 ALLOCNO_CONFLICT_VEC_P (a) = false;
553}
554
555/* Allocate and initialize the conflict vector or conflict bit vector
556 of A for NUM conflicting allocnos whatever is more profitable. */
557void
558ira_allocate_allocno_conflicts (ira_allocno_t a, int num)
559{
560 if (ira_conflict_vector_profitable_p (a, num))
561 ira_allocate_allocno_conflict_vec (a, num);
562 else
563 allocate_allocno_conflict_bit_vec (a);
564}
565
566/* Add A2 to the conflicts of A1. */
567static void
568add_to_allocno_conflicts (ira_allocno_t a1, ira_allocno_t a2)
569{
570 int num;
571 unsigned int size;
572
573 if (ALLOCNO_CONFLICT_VEC_P (a1))
574 {
575 ira_allocno_t *vec;
576
577 num = ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1) + 2;
578 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1)
579 >= num * sizeof (ira_allocno_t))
580 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
581 else
582 {
583 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
584 vec = (ira_allocno_t *) ira_allocate (size);
585 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
586 sizeof (ira_allocno_t) * ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1));
587 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
588 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
589 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
590 }
591 vec[num - 2] = a2;
592 vec[num - 1] = NULL;
593 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1)++;
594 }
595 else
596 {
597 int nw, added_head_nw, id;
598 IRA_INT_TYPE *vec;
599
600 id = ALLOCNO_CONFLICT_ID (a2);
601 vec = (IRA_INT_TYPE *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
602 if (ALLOCNO_MIN (a1) > id)
603 {
604 /* Expand head of the bit vector. */
605 added_head_nw = (ALLOCNO_MIN (a1) - id - 1) / IRA_INT_BITS + 1;
606 nw = (ALLOCNO_MAX (a1) - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
607 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
608 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) >= size)
609 {
610 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
611 vec, nw * sizeof (IRA_INT_TYPE));
612 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
613 }
614 else
615 {
616 size
617 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
618 vec = (IRA_INT_TYPE *) ira_allocate (size);
619 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
620 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
621 nw * sizeof (IRA_INT_TYPE));
622 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
623 memset ((char *) vec
624 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
625 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
626 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
627 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
628 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
629 }
630 ALLOCNO_MIN (a1) -= added_head_nw * IRA_INT_BITS;
631 }
632 else if (ALLOCNO_MAX (a1) < id)
633 {
634 nw = (id - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
635 size = nw * sizeof (IRA_INT_TYPE);
636 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) < size)
637 {
638 /* Expand tail of the bit vector. */
639 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
640 vec = (IRA_INT_TYPE *) ira_allocate (size);
641 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
642 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
643 memset ((char *) vec + ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1),
644 0, size - ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
645 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
646 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
647 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
648 }
649 ALLOCNO_MAX (a1) = id;
650 }
651 SET_ALLOCNO_SET_BIT (vec, id, ALLOCNO_MIN (a1), ALLOCNO_MAX (a1));
652 }
653}
654
655/* Add A1 to the conflicts of A2 and vise versa. */
656void
657ira_add_allocno_conflict (ira_allocno_t a1, ira_allocno_t a2)
658{
659 add_to_allocno_conflicts (a1, a2);
660 add_to_allocno_conflicts (a2, a1);
661}
662
663/* Clear all conflicts of allocno A. */
664static void
665clear_allocno_conflicts (ira_allocno_t a)
666{
667 if (ALLOCNO_CONFLICT_VEC_P (a))
668 {
669 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
670 ((ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a))[0] = NULL;
671 }
672 else if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) != 0)
673 {
674 int nw;
675
676 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a)) / IRA_INT_BITS + 1;
677 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0,
678 nw * sizeof (IRA_INT_TYPE));
679 }
680}
681
682/* The array used to find duplications in conflict vectors of
683 allocnos. */
684static int *allocno_conflict_check;
685
686/* The value used to mark allocation presence in conflict vector of
687 the current allocno. */
688static int curr_allocno_conflict_check_tick;
689
690/* Remove duplications in conflict vector of A. */
691static void
692compress_allocno_conflict_vec (ira_allocno_t a)
693{
694 ira_allocno_t *vec, conflict_a;
695 int i, j;
696
697 ira_assert (ALLOCNO_CONFLICT_VEC_P (a));
698 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
699 curr_allocno_conflict_check_tick++;
700 for (i = j = 0; (conflict_a = vec[i]) != NULL; i++)
701 {
702 if (allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
703 != curr_allocno_conflict_check_tick)
704 {
705 allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
706 = curr_allocno_conflict_check_tick;
707 vec[j++] = conflict_a;
708 }
709 }
710 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = j;
711 vec[j] = NULL;
712}
713
714/* Remove duplications in conflict vectors of all allocnos. */
715static void
716compress_conflict_vecs (void)
717{
718 ira_allocno_t a;
719 ira_allocno_iterator ai;
720
721 allocno_conflict_check
722 = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
723 memset (allocno_conflict_check, 0, sizeof (int) * ira_allocnos_num);
724 curr_allocno_conflict_check_tick = 0;
725 FOR_EACH_ALLOCNO (a, ai)
726 if (ALLOCNO_CONFLICT_VEC_P (a))
727 compress_allocno_conflict_vec (a);
728 ira_free (allocno_conflict_check);
729}
730
731/* This recursive function outputs allocno A and if it is a cap the
732 function outputs its members. */
733void
734ira_print_expanded_allocno (ira_allocno_t a)
735{
736 basic_block bb;
737
738 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
739 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
740 fprintf (ira_dump_file, ",b%d", bb->index);
741 else
742 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
743 if (ALLOCNO_CAP_MEMBER (a) != NULL)
744 {
745 fprintf (ira_dump_file, ":");
746 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
747 }
748 fprintf (ira_dump_file, ")");
749}
750
751/* Create and return the cap representing allocno A in the
752 parent loop. */
753static ira_allocno_t
754create_cap_allocno (ira_allocno_t a)
755{
756 ira_allocno_t cap;
757 ira_loop_tree_node_t parent;
758 enum reg_class cover_class;
759
760 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
761 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
762 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
763 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
764 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
765 cover_class = ALLOCNO_COVER_CLASS (a);
766 ira_set_allocno_cover_class (cap, cover_class);
767 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
768 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec
VM
769 ALLOCNO_CAP (a) = cap;
770 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
771 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
772 ALLOCNO_UPDATED_MEMORY_COST (cap) = ALLOCNO_UPDATED_MEMORY_COST (a);
773 ira_allocate_and_copy_costs
774 (&ALLOCNO_HARD_REG_COSTS (cap), cover_class, ALLOCNO_HARD_REG_COSTS (a));
775 ira_allocate_and_copy_costs
776 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), cover_class,
777 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
778 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
779 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
780 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
781 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (cap),
782 ALLOCNO_CONFLICT_HARD_REGS (a));
783 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (cap),
784 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
785 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
786#ifdef STACK_REGS
787 ALLOCNO_NO_STACK_REG_P (cap) = ALLOCNO_NO_STACK_REG_P (a);
788 ALLOCNO_TOTAL_NO_STACK_REG_P (cap) = ALLOCNO_TOTAL_NO_STACK_REG_P (a);
789#endif
790 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
791 {
792 fprintf (ira_dump_file, " Creating cap ");
793 ira_print_expanded_allocno (cap);
794 fprintf (ira_dump_file, "\n");
795 }
796 return cap;
797}
798
799/* Create and return allocno live range with given attributes. */
800allocno_live_range_t
801ira_create_allocno_live_range (ira_allocno_t a, int start, int finish,
802 allocno_live_range_t next)
803{
804 allocno_live_range_t p;
805
806 p = (allocno_live_range_t) pool_alloc (allocno_live_range_pool);
807 p->allocno = a;
808 p->start = start;
809 p->finish = finish;
810 p->next = next;
811 return p;
812}
813
814/* Copy allocno live range R and return the result. */
815static allocno_live_range_t
816copy_allocno_live_range (allocno_live_range_t r)
817{
818 allocno_live_range_t p;
819
820 p = (allocno_live_range_t) pool_alloc (allocno_live_range_pool);
821 *p = *r;
822 return p;
823}
824
825/* Copy allocno live range list given by its head R and return the
826 result. */
827static allocno_live_range_t
828copy_allocno_live_range_list (allocno_live_range_t r)
829{
830 allocno_live_range_t p, first, last;
831
832 if (r == NULL)
833 return NULL;
834 for (first = last = NULL; r != NULL; r = r->next)
835 {
836 p = copy_allocno_live_range (r);
837 if (first == NULL)
838 first = p;
839 else
840 last->next = p;
841 last = p;
842 }
843 return first;
844}
845
846/* Free allocno live range R. */
847void
848ira_finish_allocno_live_range (allocno_live_range_t r)
849{
850 pool_free (allocno_live_range_pool, r);
851}
852
853/* Free updated register costs of allocno A. */
854void
855ira_free_allocno_updated_costs (ira_allocno_t a)
856{
857 enum reg_class cover_class;
858
859 cover_class = ALLOCNO_COVER_CLASS (a);
860 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
861 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
862 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
863 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
864 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
865 cover_class);
866 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
867}
868
869/* Free the memory allocated for allocno A. */
870static void
871finish_allocno (ira_allocno_t a)
872{
873 allocno_live_range_t r, next_r;
874 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
875
876 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
877 ira_conflict_id_allocno_map[ALLOCNO_CONFLICT_ID (a)] = NULL;
878 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) != NULL)
879 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a));
880 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
881 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), cover_class);
882 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
883 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), cover_class);
884 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
885 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
886 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
887 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
888 cover_class);
889 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = next_r)
890 {
891 next_r = r->next;
892 ira_finish_allocno_live_range (r);
893 }
894 pool_free (allocno_pool, a);
895}
896
897/* Free the memory allocated for all allocnos. */
898static void
899finish_allocnos (void)
900{
901 ira_allocno_t a;
902 ira_allocno_iterator ai;
903
904 FOR_EACH_ALLOCNO (a, ai)
905 finish_allocno (a);
906 ira_free (ira_regno_allocno_map);
907 VEC_free (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec);
908 VEC_free (ira_allocno_t, heap, allocno_vec);
909 free_alloc_pool (allocno_pool);
910 free_alloc_pool (allocno_live_range_pool);
911}
912
913\f
914
915/* Pools for copies. */
916static alloc_pool copy_pool;
917
918/* Vec containing references to all created copies. It is a
919 container of array ira_copies. */
920static VEC(ira_copy_t,heap) *copy_vec;
921
922/* The function initializes data concerning allocno copies. */
923static void
924initiate_copies (void)
925{
926 copy_pool
927 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
928 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
929 ira_copies = NULL;
930 ira_copies_num = 0;
931}
932
933/* Return copy connecting A1 and A2 and originated from INSN of
934 LOOP_TREE_NODE if any. */
935static ira_copy_t
936find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
937 ira_loop_tree_node_t loop_tree_node)
938{
939 ira_copy_t cp, next_cp;
940 ira_allocno_t another_a;
941
942 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
943 {
944 if (cp->first == a1)
945 {
946 next_cp = cp->next_first_allocno_copy;
947 another_a = cp->second;
948 }
949 else if (cp->second == a1)
950 {
951 next_cp = cp->next_second_allocno_copy;
952 another_a = cp->first;
953 }
954 else
955 gcc_unreachable ();
956 if (another_a == a2 && cp->insn == insn
957 && cp->loop_tree_node == loop_tree_node)
958 return cp;
959 }
960 return NULL;
961}
962
963/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
964 SECOND, FREQ, and INSN. */
965ira_copy_t
966ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq, rtx insn,
967 ira_loop_tree_node_t loop_tree_node)
968{
969 ira_copy_t cp;
970
971 cp = (ira_copy_t) pool_alloc (copy_pool);
972 cp->num = ira_copies_num;
973 cp->first = first;
974 cp->second = second;
975 cp->freq = freq;
976 cp->insn = insn;
977 cp->loop_tree_node = loop_tree_node;
978 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
979 ira_copies = VEC_address (ira_copy_t, copy_vec);
980 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
981 return cp;
982}
983
984/* Attach a copy CP to allocnos involved into the copy. */
985void
986ira_add_allocno_copy_to_list (ira_copy_t cp)
987{
988 ira_allocno_t first = cp->first, second = cp->second;
989
990 cp->prev_first_allocno_copy = NULL;
991 cp->prev_second_allocno_copy = NULL;
992 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
993 if (cp->next_first_allocno_copy != NULL)
994 {
995 if (cp->next_first_allocno_copy->first == first)
996 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
997 else
998 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
999 }
1000 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1001 if (cp->next_second_allocno_copy != NULL)
1002 {
1003 if (cp->next_second_allocno_copy->second == second)
1004 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1005 else
1006 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1007 }
1008 ALLOCNO_COPIES (first) = cp;
1009 ALLOCNO_COPIES (second) = cp;
1010}
1011
1012/* Detach a copy CP from allocnos involved into the copy. */
1013void
1014ira_remove_allocno_copy_from_list (ira_copy_t cp)
1015{
1016 ira_allocno_t first = cp->first, second = cp->second;
1017 ira_copy_t prev, next;
1018
1019 next = cp->next_first_allocno_copy;
1020 prev = cp->prev_first_allocno_copy;
1021 if (prev == NULL)
1022 ALLOCNO_COPIES (first) = next;
1023 else if (prev->first == first)
1024 prev->next_first_allocno_copy = next;
1025 else
1026 prev->next_second_allocno_copy = next;
1027 if (next != NULL)
1028 {
1029 if (next->first == first)
1030 next->prev_first_allocno_copy = prev;
1031 else
1032 next->prev_second_allocno_copy = prev;
1033 }
1034 cp->prev_first_allocno_copy = cp->next_first_allocno_copy = NULL;
1035
1036 next = cp->next_second_allocno_copy;
1037 prev = cp->prev_second_allocno_copy;
1038 if (prev == NULL)
1039 ALLOCNO_COPIES (second) = next;
1040 else if (prev->second == second)
1041 prev->next_second_allocno_copy = next;
1042 else
1043 prev->next_first_allocno_copy = next;
1044 if (next != NULL)
1045 {
1046 if (next->second == second)
1047 next->prev_second_allocno_copy = prev;
1048 else
1049 next->prev_first_allocno_copy = prev;
1050 }
1051 cp->prev_second_allocno_copy = cp->next_second_allocno_copy = NULL;
1052}
1053
1054/* Make a copy CP a canonical copy where number of the
1055 first allocno is less than the second one. */
1056void
1057ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1058{
1059 ira_allocno_t temp;
1060 ira_copy_t temp_cp;
1061
1062 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1063 return;
1064
1065 temp = cp->first;
1066 cp->first = cp->second;
1067 cp->second = temp;
1068
1069 temp_cp = cp->prev_first_allocno_copy;
1070 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1071 cp->prev_second_allocno_copy = temp_cp;
1072
1073 temp_cp = cp->next_first_allocno_copy;
1074 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1075 cp->next_second_allocno_copy = temp_cp;
1076}
1077
1078/* Create (or update frequency if the copy already exists) and return
1079 the copy of allocnos FIRST and SECOND with frequency FREQ
1080 corresponding to move insn INSN (if any) and originated from
1081 LOOP_TREE_NODE. */
1082ira_copy_t
1083ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1084 rtx insn, ira_loop_tree_node_t loop_tree_node)
1085{
1086 ira_copy_t cp;
1087
1088 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1089 {
1090 cp->freq += freq;
1091 return cp;
1092 }
1093 cp = ira_create_copy (first, second, freq, insn, loop_tree_node);
1094 ira_assert (first != NULL && second != NULL);
1095 ira_add_allocno_copy_to_list (cp);
1096 ira_swap_allocno_copy_ends_if_necessary (cp);
1097 return cp;
1098}
1099
4cda38d5
VM
1100/* Print info about copy CP into file F. */
1101static void
1102print_copy (FILE *f, ira_copy_t cp)
1103{
1104 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d\n", cp->num,
1105 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1106 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq);
1107}
1108
1109/* Print info about copy CP into stderr. */
1110void
1111ira_debug_copy (ira_copy_t cp)
1112{
1113 print_copy (stderr, cp);
1114}
1115
1116/* Print info about all copies into file F. */
1117static void
1118print_copies (FILE *f)
1119{
1120 ira_copy_t cp;
1121 ira_copy_iterator ci;
1122
1123 FOR_EACH_COPY (cp, ci)
1124 print_copy (f, cp);
1125}
1126
1127/* Print info about all copies into stderr. */
1128void
1129ira_debug_copies (void)
1130{
1131 print_copies (stderr);
1132}
1133
058e97ec
VM
1134/* Print info about copies involving allocno A into file F. */
1135static void
1136print_allocno_copies (FILE *f, ira_allocno_t a)
1137{
1138 ira_allocno_t another_a;
1139 ira_copy_t cp, next_cp;
1140
1141 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1142 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1143 {
1144 if (cp->first == a)
1145 {
1146 next_cp = cp->next_first_allocno_copy;
1147 another_a = cp->second;
1148 }
1149 else if (cp->second == a)
1150 {
1151 next_cp = cp->next_second_allocno_copy;
1152 another_a = cp->first;
1153 }
1154 else
1155 gcc_unreachable ();
1156 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1157 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1158 }
1159 fprintf (f, "\n");
1160}
1161
1162/* Print info about copies involving allocno A into stderr. */
1163void
1164ira_debug_allocno_copies (ira_allocno_t a)
1165{
1166 print_allocno_copies (stderr, a);
1167}
1168
1169/* The function frees memory allocated for copy CP. */
1170static void
1171finish_copy (ira_copy_t cp)
1172{
1173 pool_free (copy_pool, cp);
1174}
1175
1176
1177/* Free memory allocated for all copies. */
1178static void
1179finish_copies (void)
1180{
1181 ira_copy_t cp;
1182 ira_copy_iterator ci;
1183
1184 FOR_EACH_COPY (cp, ci)
1185 finish_copy (cp);
1186 VEC_free (ira_copy_t, heap, copy_vec);
1187 free_alloc_pool (copy_pool);
1188}
1189
1190\f
1191
1192/* Pools for cost vectors. It is defined only for cover classes. */
1193static alloc_pool cost_vector_pool[N_REG_CLASSES];
1194
1195/* The function initiates work with hard register cost vectors. It
1196 creates allocation pool for each cover class. */
1197static void
1198initiate_cost_vectors (void)
1199{
1200 int i;
1201 enum reg_class cover_class;
1202
1203 for (i = 0; i < ira_reg_class_cover_size; i++)
1204 {
1205 cover_class = ira_reg_class_cover[i];
1206 cost_vector_pool[cover_class]
1207 = create_alloc_pool ("cost vectors",
1208 sizeof (int)
1209 * ira_class_hard_regs_num[cover_class],
1210 100);
1211 }
1212}
1213
1214/* Allocate and return a cost vector VEC for COVER_CLASS. */
1215int *
1216ira_allocate_cost_vector (enum reg_class cover_class)
1217{
1218 return (int *) pool_alloc (cost_vector_pool[cover_class]);
1219}
1220
1221/* Free a cost vector VEC for COVER_CLASS. */
1222void
1223ira_free_cost_vector (int *vec, enum reg_class cover_class)
1224{
1225 ira_assert (vec != NULL);
1226 pool_free (cost_vector_pool[cover_class], vec);
1227}
1228
1229/* Finish work with hard register cost vectors. Release allocation
1230 pool for each cover class. */
1231static void
1232finish_cost_vectors (void)
1233{
1234 int i;
1235 enum reg_class cover_class;
1236
1237 for (i = 0; i < ira_reg_class_cover_size; i++)
1238 {
1239 cover_class = ira_reg_class_cover[i];
1240 free_alloc_pool (cost_vector_pool[cover_class]);
1241 }
1242}
1243
1244\f
1245
1246/* The current loop tree node and its regno allocno map. */
1247ira_loop_tree_node_t ira_curr_loop_tree_node;
1248ira_allocno_t *ira_curr_regno_allocno_map;
1249
1250/* This recursive function traverses loop tree with root LOOP_NODE
1251 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1252 correspondingly in preorder and postorder. The function sets up
1253 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1254 basic block nodes of LOOP_NODE is also processed (before its
1255 subloop nodes). */
1256void
1257ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1258 void (*preorder_func) (ira_loop_tree_node_t),
1259 void (*postorder_func) (ira_loop_tree_node_t))
1260{
1261 ira_loop_tree_node_t subloop_node;
1262
1263 ira_assert (loop_node->bb == NULL);
1264 ira_curr_loop_tree_node = loop_node;
1265 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1266
1267 if (preorder_func != NULL)
1268 (*preorder_func) (loop_node);
1269
1270 if (bb_p)
1271 for (subloop_node = loop_node->children;
1272 subloop_node != NULL;
1273 subloop_node = subloop_node->next)
1274 if (subloop_node->bb != NULL)
1275 {
1276 if (preorder_func != NULL)
1277 (*preorder_func) (subloop_node);
1278
1279 if (postorder_func != NULL)
1280 (*postorder_func) (subloop_node);
1281 }
1282
1283 for (subloop_node = loop_node->subloops;
1284 subloop_node != NULL;
1285 subloop_node = subloop_node->subloop_next)
1286 {
1287 ira_assert (subloop_node->bb == NULL);
1288 ira_traverse_loop_tree (bb_p, subloop_node,
1289 preorder_func, postorder_func);
1290 }
1291
1292 ira_curr_loop_tree_node = loop_node;
1293 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1294
1295 if (postorder_func != NULL)
1296 (*postorder_func) (loop_node);
1297}
1298
1299\f
1300
1301/* The basic block currently being processed. */
1302static basic_block curr_bb;
1303
1304/* This recursive function creates allocnos corresponding to
1305 pseudo-registers containing in X. True OUTPUT_P means that X is
1306 a lvalue. */
1307static void
1308create_insn_allocnos (rtx x, bool output_p)
1309{
1310 int i, j;
1311 const char *fmt;
1312 enum rtx_code code = GET_CODE (x);
1313
1314 if (code == REG)
1315 {
1316 int regno;
1317
1318 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1319 {
1320 ira_allocno_t a;
1321
1322 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1323 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1324
1325 ALLOCNO_NREFS (a)++;
1326 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1327 if (output_p)
1328 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1329 }
1330 return;
1331 }
1332 else if (code == SET)
1333 {
1334 create_insn_allocnos (SET_DEST (x), true);
1335 create_insn_allocnos (SET_SRC (x), false);
1336 return;
1337 }
1338 else if (code == CLOBBER)
1339 {
1340 create_insn_allocnos (XEXP (x, 0), true);
1341 return;
1342 }
1343 else if (code == MEM)
1344 {
1345 create_insn_allocnos (XEXP (x, 0), false);
1346 return;
1347 }
1348 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
1349 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1350 {
1351 create_insn_allocnos (XEXP (x, 0), true);
1352 create_insn_allocnos (XEXP (x, 0), false);
1353 return;
1354 }
1355
1356 fmt = GET_RTX_FORMAT (code);
1357 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1358 {
1359 if (fmt[i] == 'e')
1360 create_insn_allocnos (XEXP (x, i), output_p);
1361 else if (fmt[i] == 'E')
1362 for (j = 0; j < XVECLEN (x, i); j++)
1363 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1364 }
1365}
1366
1367/* Create allocnos corresponding to pseudo-registers living in the
1368 basic block represented by the corresponding loop tree node
1369 BB_NODE. */
1370static void
1371create_bb_allocnos (ira_loop_tree_node_t bb_node)
1372{
1373 basic_block bb;
1374 rtx insn;
1375 unsigned int i;
1376 bitmap_iterator bi;
1377
1378 curr_bb = bb = bb_node->bb;
1379 ira_assert (bb != NULL);
acb37d29 1380 FOR_BB_INSNS_REVERSE (bb, insn)
058e97ec
VM
1381 if (INSN_P (insn))
1382 create_insn_allocnos (PATTERN (insn), false);
1383 /* It might be a allocno living through from one subloop to
1384 another. */
1385 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
1386 if (ira_curr_regno_allocno_map[i] == NULL)
1387 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1388}
1389
1390/* Create allocnos corresponding to pseudo-registers living on edge E
1391 (a loop entry or exit). Also mark the allocnos as living on the
1392 loop border. */
1393static void
1394create_loop_allocnos (edge e)
1395{
1396 unsigned int i;
1397 bitmap live_in_regs, border_allocnos;
1398 bitmap_iterator bi;
1399 ira_loop_tree_node_t parent;
1400
1401 live_in_regs = DF_LR_IN (e->dest);
1402 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
1403 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
1404 FIRST_PSEUDO_REGISTER, i, bi)
1405 if (bitmap_bit_p (live_in_regs, i))
1406 {
1407 if (ira_curr_regno_allocno_map[i] == NULL)
1408 {
1409 /* The order of creations is important for right
1410 ira_regno_allocno_map. */
1411 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1412 && parent->regno_allocno_map[i] == NULL)
1413 ira_create_allocno (i, false, parent);
1414 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1415 }
1416 bitmap_set_bit (border_allocnos,
1417 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1418 }
1419}
1420
1421/* Create allocnos corresponding to pseudo-registers living in loop
1422 represented by the corresponding loop tree node LOOP_NODE. This
1423 function is called by ira_traverse_loop_tree. */
1424static void
1425create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1426{
1427 if (loop_node->bb != NULL)
1428 create_bb_allocnos (loop_node);
1429 else if (loop_node != ira_loop_tree_root)
1430 {
1431 int i;
1432 edge_iterator ei;
1433 edge e;
1434 VEC (edge, heap) *edges;
1435
1436 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1437 if (e->src != loop_node->loop->latch)
1438 create_loop_allocnos (e);
1439
1440 edges = get_loop_exit_edges (loop_node->loop);
1441 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
1442 create_loop_allocnos (e);
1443 VEC_free (edge, heap, edges);
1444 }
1445}
1446
1447/* Propagate information about allocnos modified inside the loop given
1448 by its LOOP_TREE_NODE to its parent. */
1449static void
1450propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1451{
1452 if (loop_tree_node == ira_loop_tree_root)
1453 return;
1454 ira_assert (loop_tree_node->bb == NULL);
1455 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1456 loop_tree_node->modified_regnos);
1457}
1458
1459/* Propagate new info about allocno A (see comments about accumulated
1460 info in allocno definition) to the corresponding allocno on upper
1461 loop tree level. So allocnos on upper levels accumulate
1462 information about the corresponding allocnos in nested regions.
1463 The new info means allocno info finally calculated in this
1464 file. */
1465static void
1466propagate_allocno_info (void)
1467{
1468 int i;
1469 ira_allocno_t a, parent_a;
1470 ira_loop_tree_node_t parent;
1471 enum reg_class cover_class;
1472
1473 if (flag_ira_algorithm != IRA_ALGORITHM_REGIONAL
1474 && flag_ira_algorithm != IRA_ALGORITHM_MIXED)
1475 return;
1476 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1477 for (a = ira_regno_allocno_map[i];
1478 a != NULL;
1479 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1480 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1481 && (parent_a = parent->regno_allocno_map[i]) != NULL
1482 /* There are no caps yet at this point. So use
1483 border_allocnos to find allocnos for the propagation. */
1484 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1485 ALLOCNO_NUM (a)))
1486 {
1487 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1488 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1489 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1490#ifdef STACK_REGS
1491 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
1492 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
1493#endif
1494 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
1495 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
1496 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1497 += ALLOCNO_CALLS_CROSSED_NUM (a);
1498 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1499 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1500 cover_class = ALLOCNO_COVER_CLASS (a);
1501 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1502 ira_allocate_and_accumulate_costs
1503 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1504 ALLOCNO_HARD_REG_COSTS (a));
1505 ira_allocate_and_accumulate_costs
1506 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1507 cover_class,
1508 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1509 ALLOCNO_COVER_CLASS_COST (parent_a)
1510 += ALLOCNO_COVER_CLASS_COST (a);
1511 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1512 ALLOCNO_UPDATED_MEMORY_COST (parent_a)
1513 += ALLOCNO_UPDATED_MEMORY_COST (a);
1514 }
1515}
1516
1517/* Create allocnos corresponding to pseudo-registers in the current
1518 function. Traverse the loop tree for this. */
1519static void
1520create_allocnos (void)
1521{
1522 /* We need to process BB first to correctly link allocnos by member
1523 next_regno_allocno. */
1524 ira_traverse_loop_tree (true, ira_loop_tree_root,
1525 create_loop_tree_node_allocnos, NULL);
1526 if (optimize)
1527 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1528 propagate_modified_regnos);
1529}
1530
1531\f
1532
1533/* The page contains function to remove some regions from a separate
1534 register allocation. We remove regions whose separate allocation
1535 will hardly improve the result. As a result we speed up regional
1536 register allocation. */
1537
1538/* Merge ranges R1 and R2 and returns the result. The function
1539 maintains the order of ranges and tries to minimize number of the
1540 result ranges. */
1541static allocno_live_range_t
1542merge_ranges (allocno_live_range_t r1, allocno_live_range_t r2)
1543{
1544 allocno_live_range_t first, last, temp;
1545
1546 if (r1 == NULL)
1547 return r2;
1548 if (r2 == NULL)
1549 return r1;
1550 for (first = last = NULL; r1 != NULL && r2 != NULL;)
1551 {
1552 if (r1->start < r2->start)
1553 {
1554 temp = r1;
1555 r1 = r2;
1556 r2 = temp;
1557 }
1558 if (r1->start <= r2->finish + 1)
1559 {
1560 /* Intersected ranges: merge r1 and r2 into r1. */
1561 r1->start = r2->start;
1562 if (r1->finish < r2->finish)
1563 r1->finish = r2->finish;
1564 temp = r2;
1565 r2 = r2->next;
1566 ira_finish_allocno_live_range (temp);
1567 if (r2 == NULL)
1568 {
1569 /* To try to merge with subsequent ranges in r1. */
1570 r2 = r1->next;
1571 r1->next = NULL;
1572 }
1573 }
1574 else
1575 {
1576 /* Add r1 to the result. */
1577 if (first == NULL)
1578 first = last = r1;
1579 else
1580 {
1581 last->next = r1;
1582 last = r1;
1583 }
1584 r1 = r1->next;
1585 if (r1 == NULL)
1586 {
1587 /* To try to merge with subsequent ranges in r2. */
1588 r1 = r2->next;
1589 r2->next = NULL;
1590 }
1591 }
1592 }
1593 if (r1 != NULL)
1594 {
1595 if (first == NULL)
1596 first = r1;
1597 else
1598 last->next = r1;
1599 ira_assert (r1->next == NULL);
1600 }
1601 else if (r2 != NULL)
1602 {
1603 if (first == NULL)
1604 first = r2;
1605 else
1606 last->next = r2;
1607 ira_assert (r2->next == NULL);
1608 }
1609 else
1610 {
1611 ira_assert (last->next == NULL);
1612 }
1613 return first;
1614}
1615
1616/* The function changes allocno in range list given by R onto A. */
1617static void
1618change_allocno_in_range_list (allocno_live_range_t r, ira_allocno_t a)
1619{
1620 for (; r != NULL; r = r->next)
1621 r->allocno = a;
1622}
1623
1624/* Return TRUE if NODE represents a loop with low register
1625 pressure. */
1626static bool
1627low_pressure_loop_node_p (ira_loop_tree_node_t node)
1628{
1629 int i;
1630 enum reg_class cover_class;
1631
1632 if (node->bb != NULL)
1633 return false;
1634
1635 for (i = 0; i < ira_reg_class_cover_size; i++)
1636 {
1637 cover_class = ira_reg_class_cover[i];
1638 if (node->reg_pressure[cover_class]
1639 > ira_available_class_regs[cover_class])
1640 return false;
1641 }
1642 return true;
1643}
1644
1645/* Return TRUE if NODE represents a loop with should be removed from
1646 regional allocation. We remove a loop with low register pressure
1647 inside another loop with register pressure. In this case a
1648 separate allocation of the loop hardly helps (for irregular
1649 register file architecture it could help by choosing a better hard
1650 register in the loop but we prefer faster allocation even in this
1651 case). */
1652static bool
1653loop_node_to_be_removed_p (ira_loop_tree_node_t node)
1654{
1655 return (node->parent != NULL && low_pressure_loop_node_p (node->parent)
1656 && low_pressure_loop_node_p (node));
1657}
1658
1659/* Definition of vector of loop tree nodes. */
1660DEF_VEC_P(ira_loop_tree_node_t);
1661DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1662
1663/* Vec containing references to all removed loop tree nodes. */
1664static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1665
1666/* Vec containing references to all children of loop tree nodes. */
1667static VEC(ira_loop_tree_node_t,heap) *children_vec;
1668
1669/* Remove subregions of NODE if their separate allocation will not
1670 improve the result. */
1671static void
1672remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1673{
1674 unsigned int start;
1675 bool remove_p;
1676 ira_loop_tree_node_t subnode;
1677
1678 remove_p = loop_node_to_be_removed_p (node);
1679 if (! remove_p)
1680 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1681 start = VEC_length (ira_loop_tree_node_t, children_vec);
1682 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1683 if (subnode->bb == NULL)
1684 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
1685 else
1686 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
1687 node->children = node->subloops = NULL;
1688 if (remove_p)
1689 {
1690 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
1691 return;
1692 }
1693 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
1694 {
1695 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
1696 subnode->parent = node;
1697 subnode->next = node->children;
1698 node->children = subnode;
1699 if (subnode->bb == NULL)
1700 {
1701 subnode->subloop_next = node->subloops;
1702 node->subloops = subnode;
1703 }
1704 }
1705}
1706
1707/* Remove allocnos from loops removed from the allocation
1708 consideration. */
1709static void
1710remove_unnecessary_allocnos (void)
1711{
1712 int regno;
1713 bool merged_p;
1714 enum reg_class cover_class;
1715 ira_allocno_t a, prev_a, next_a, parent_a;
1716 ira_loop_tree_node_t a_node, parent;
1717 allocno_live_range_t r;
1718
1719 merged_p = false;
1720 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
1721 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
1722 a != NULL;
1723 a = next_a)
1724 {
1725 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
1726 a_node = ALLOCNO_LOOP_TREE_NODE (a);
1727 if (! loop_node_to_be_removed_p (a_node))
1728 prev_a = a;
1729 else
1730 {
1731 for (parent = a_node->parent;
1732 (parent_a = parent->regno_allocno_map[regno]) == NULL
1733 && loop_node_to_be_removed_p (parent);
1734 parent = parent->parent)
1735 ;
1736 if (parent_a == NULL)
1737 {
1738 /* There are no allocnos with the same regno in upper
1739 region -- just move the allocno to the upper
1740 region. */
1741 prev_a = a;
1742 ALLOCNO_LOOP_TREE_NODE (a) = parent;
1743 parent->regno_allocno_map[regno] = a;
49d988e7 1744 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
058e97ec
VM
1745 }
1746 else
1747 {
1748 /* Remove the allocno and update info of allocno in
1749 the upper region. */
1750 if (prev_a == NULL)
1751 ira_regno_allocno_map[regno] = next_a;
1752 else
1753 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
1754 r = ALLOCNO_LIVE_RANGES (a);
1755 change_allocno_in_range_list (r, parent_a);
1756 ALLOCNO_LIVE_RANGES (parent_a)
1757 = merge_ranges (r, ALLOCNO_LIVE_RANGES (parent_a));
1758 merged_p = true;
1759 ALLOCNO_LIVE_RANGES (a) = NULL;
1760 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (parent_a),
1761 ALLOCNO_CONFLICT_HARD_REGS (a));
1762#ifdef STACK_REGS
1763 if (ALLOCNO_NO_STACK_REG_P (a))
1764 ALLOCNO_NO_STACK_REG_P (parent_a) = true;
1765#endif
1766 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1767 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1768 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1769 IOR_HARD_REG_SET
1770 (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
1771 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
1772 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1773 += ALLOCNO_CALLS_CROSSED_NUM (a);
1774 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1775 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1776#ifdef STACK_REGS
1777 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
1778 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
1779#endif
1780 cover_class = ALLOCNO_COVER_CLASS (a);
1781 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1782 ira_allocate_and_accumulate_costs
1783 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1784 ALLOCNO_HARD_REG_COSTS (a));
1785 ira_allocate_and_accumulate_costs
1786 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1787 cover_class,
1788 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1789 ALLOCNO_COVER_CLASS_COST (parent_a)
1790 += ALLOCNO_COVER_CLASS_COST (a);
1791 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1792 ALLOCNO_UPDATED_MEMORY_COST (parent_a)
1793 += ALLOCNO_UPDATED_MEMORY_COST (a);
1794 finish_allocno (a);
1795 }
1796 }
1797 }
1798 if (merged_p)
1799 ira_rebuild_start_finish_chains ();
1800}
1801
1802/* Remove loops from consideration. We remove loops for which a
1803 separate allocation will not improve the result. We have to do
1804 this after allocno creation and their costs and cover class
1805 evaluation because only after that the register pressure can be
1806 known and is calculated. */
1807static void
1808remove_unnecessary_regions (void)
1809{
1810 children_vec
1811 = VEC_alloc (ira_loop_tree_node_t, heap,
1812 last_basic_block + VEC_length (loop_p, ira_loops.larray));
1813 removed_loop_vec
1814 = VEC_alloc (ira_loop_tree_node_t, heap,
1815 last_basic_block + VEC_length (loop_p, ira_loops.larray));
1816 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
1817 VEC_free (ira_loop_tree_node_t, heap, children_vec);
1818 remove_unnecessary_allocnos ();
1819 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
1820 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
1821 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
1822}
1823
1824\f
1825
1826/* Set up minimal and maximal live range points for allocnos. */
1827static void
1828setup_min_max_allocno_live_range_point (void)
1829{
1830 int i;
1831 ira_allocno_t a, parent_a, cap;
1832 ira_allocno_iterator ai;
1833 allocno_live_range_t r;
1834 ira_loop_tree_node_t parent;
1835
1836 FOR_EACH_ALLOCNO (a, ai)
1837 {
1838 r = ALLOCNO_LIVE_RANGES (a);
1839 if (r == NULL)
1840 continue;
1841 ALLOCNO_MAX (a) = r->finish;
1842 for (; r->next != NULL; r = r->next)
1843 ;
1844 ALLOCNO_MIN (a) = r->start;
1845 }
1846 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1847 for (a = ira_regno_allocno_map[i];
1848 a != NULL;
1849 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1850 {
1851 if (ALLOCNO_MAX (a) < 0)
1852 continue;
1853 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1854 /* Accumulation of range info. */
1855 if (ALLOCNO_CAP (a) != NULL)
1856 {
1857 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
1858 {
1859 if (ALLOCNO_MAX (cap) < ALLOCNO_MAX (a))
1860 ALLOCNO_MAX (cap) = ALLOCNO_MAX (a);
1861 if (ALLOCNO_MIN (cap) > ALLOCNO_MIN (a))
1862 ALLOCNO_MIN (cap) = ALLOCNO_MIN (a);
1863 }
1864 continue;
1865 }
1866 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
1867 continue;
1868 parent_a = parent->regno_allocno_map[i];
1869 if (ALLOCNO_MAX (parent_a) < ALLOCNO_MAX (a))
1870 ALLOCNO_MAX (parent_a) = ALLOCNO_MAX (a);
1871 if (ALLOCNO_MIN (parent_a) > ALLOCNO_MIN (a))
1872 ALLOCNO_MIN (parent_a) = ALLOCNO_MIN (a);
1873 }
1874#ifdef ENABLE_IRA_CHECKING
1875 FOR_EACH_ALLOCNO (a, ai)
1876 {
1877 if ((0 <= ALLOCNO_MIN (a) && ALLOCNO_MIN (a) <= ira_max_point)
1878 && (0 <= ALLOCNO_MAX (a) && ALLOCNO_MAX (a) <= ira_max_point))
1879 continue;
1880 gcc_unreachable ();
1881 }
1882#endif
1883}
1884
1885/* Sort allocnos according to their live ranges. Allocnos with
1886 smaller cover class are put first. Allocnos with the same cove
1887 class are ordered according their start (min). Allocnos with the
1888 same start are ordered according their finish (max). */
1889static int
1890allocno_range_compare_func (const void *v1p, const void *v2p)
1891{
1892 int diff;
1893 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
1894 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
1895
1896 if ((diff = ALLOCNO_COVER_CLASS (a1) - ALLOCNO_COVER_CLASS (a2)) != 0)
1897 return diff;
1898 if ((diff = ALLOCNO_MIN (a1) - ALLOCNO_MIN (a2)) != 0)
1899 return diff;
1900 if ((diff = ALLOCNO_MAX (a1) - ALLOCNO_MAX (a2)) != 0)
1901 return diff;
1902 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
1903}
1904
1905/* Sort ira_conflict_id_allocno_map and set up conflict id of
1906 allocnos. */
1907static void
1908sort_conflict_id_allocno_map (void)
1909{
1910 int i, num;
1911 ira_allocno_t a;
1912 ira_allocno_iterator ai;
1913
1914 num = 0;
1915 FOR_EACH_ALLOCNO (a, ai)
1916 ira_conflict_id_allocno_map[num++] = a;
1917 qsort (ira_conflict_id_allocno_map, num, sizeof (ira_allocno_t),
1918 allocno_range_compare_func);
1919 for (i = 0; i < num; i++)
1920 if ((a = ira_conflict_id_allocno_map[i]) != NULL)
1921 ALLOCNO_CONFLICT_ID (a) = i;
1922 for (i = num; i < ira_allocnos_num; i++)
1923 ira_conflict_id_allocno_map[i] = NULL;
1924}
1925
1926/* Set up minimal and maximal conflict ids of allocnos with which
1927 given allocno can conflict. */
1928static void
1929setup_min_max_conflict_allocno_ids (void)
1930{
1931 enum reg_class cover_class;
1932 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
1933 int *live_range_min, *last_lived;
1934 ira_allocno_t a;
1935
1936 live_range_min = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
1937 cover_class = -1;
1938 first_not_finished = -1;
1939 for (i = 0; i < ira_allocnos_num; i++)
1940 {
1941 a = ira_conflict_id_allocno_map[i];
1942 if (a == NULL)
1943 continue;
1944 if (cover_class != ALLOCNO_COVER_CLASS (a))
1945 {
1946 cover_class = ALLOCNO_COVER_CLASS (a);
1947 min = i;
1948 first_not_finished = i;
1949 }
1950 else
1951 {
1952 start = ALLOCNO_MIN (a);
1953 /* If we skip an allocno, the allocno with smaller ids will
1954 be also skipped because of the secondary sorting the
1955 range finishes (see function
1956 allocno_range_compare_func). */
1957 while (first_not_finished < i
1958 && start > ALLOCNO_MAX (ira_conflict_id_allocno_map
1959 [first_not_finished]))
1960 first_not_finished++;
1961 min = first_not_finished;
1962 }
1963 if (min == i)
1964 /* We could increase min further in this case but it is good
1965 enough. */
1966 min++;
1967 live_range_min[i] = ALLOCNO_MIN (a);
1968 ALLOCNO_MIN (a) = min;
1969 }
1970 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1971 cover_class = -1;
1972 filled_area_start = -1;
1973 for (i = ira_allocnos_num - 1; i >= 0; i--)
1974 {
1975 a = ira_conflict_id_allocno_map[i];
1976 if (a == NULL)
1977 continue;
1978 if (cover_class != ALLOCNO_COVER_CLASS (a))
1979 {
1980 cover_class = ALLOCNO_COVER_CLASS (a);
1981 for (j = 0; j < ira_max_point; j++)
1982 last_lived[j] = -1;
1983 filled_area_start = ira_max_point;
1984 }
1985 min = live_range_min[i];
1986 finish = ALLOCNO_MAX (a);
1987 max = last_lived[finish];
1988 if (max < 0)
1989 /* We could decrease max further in this case but it is good
1990 enough. */
1991 max = ALLOCNO_CONFLICT_ID (a) - 1;
1992 ALLOCNO_MAX (a) = max;
1993 /* In filling, we can go further A range finish to recognize
1994 intersection quickly because if the finish of subsequently
1995 processed allocno (it has smaller conflict id) range is
1996 further A range finish than they are definitely intersected
1997 (the reason for this is the allocnos with bigger conflict id
1998 have their range starts not smaller than allocnos with
1999 smaller ids. */
2000 for (j = min; j < filled_area_start; j++)
2001 last_lived[j] = i;
2002 filled_area_start = min;
2003 }
2004 ira_free (last_lived);
2005 ira_free (live_range_min);
2006}
2007
2008\f
2009
2010static void
2011create_caps (void)
2012{
2013 ira_allocno_t a;
2014 ira_allocno_iterator ai;
2015 ira_loop_tree_node_t loop_tree_node;
2016
2017 FOR_EACH_ALLOCNO (a, ai)
2018 {
2019 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2020 continue;
2021 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2022 create_cap_allocno (a);
2023 else if (ALLOCNO_CAP (a) == NULL)
2024 {
2025 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2026 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2027 create_cap_allocno (a);
2028 }
2029 }
2030}
2031
2032\f
2033
2034/* The page contains code transforming more one region internal
2035 representation (IR) to one region IR which is necessary for reload.
2036 This transformation is called IR flattening. We might just rebuild
2037 the IR for one region but we don't do it because it takes a lot of
2038 time. */
2039
82b33628
VM
2040/* Map: regno -> allocnos which will finally represent the regno for
2041 IR with one region. */
2042static ira_allocno_t *regno_top_level_allocno_map;
2043
2044/* Process all allocnos originated from pseudo REGNO and copy live
2045 ranges from low level allocnos to final allocnos which are
2046 destinations of removed stores at a loop exit. Return true if we
2047 copied live ranges. */
2048static bool
2049copy_live_ranges_to_removed_store_destinations (int regno)
2050{
2051 ira_allocno_t a, parent_a;
2052 ira_loop_tree_node_t parent;
2053 allocno_live_range_t r;
2054 bool merged_p;
2055
2056 merged_p = false;
2057 for (a = ira_regno_allocno_map[regno];
2058 a != NULL;
2059 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2060 {
2061 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))])
2062 /* This allocno will be removed. */
2063 continue;
2064 /* Caps will be removed. */
2065 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2066 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2067 parent != NULL;
2068 parent = parent->parent)
2069 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2070 || (parent_a == regno_top_level_allocno_map[REGNO (ALLOCNO_REG
2071 (parent_a))]
2072 && ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)))
2073 break;
2074 if (parent == NULL || parent_a == NULL)
2075 continue;
2076 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2077 {
2078 fprintf
2079 (ira_dump_file,
2080 " Coping ranges of a%dr%d to a%dr%d: ",
2081 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)),
2082 ALLOCNO_NUM (parent_a), REGNO (ALLOCNO_REG (parent_a)));
2083 ira_print_live_range_list (ira_dump_file,
2084 ALLOCNO_LIVE_RANGES (a));
2085 }
2086 r = copy_allocno_live_range_list (ALLOCNO_LIVE_RANGES (a));
2087 change_allocno_in_range_list (r, parent_a);
2088 ALLOCNO_LIVE_RANGES (parent_a)
2089 = merge_ranges (r, ALLOCNO_LIVE_RANGES (parent_a));
2090 merged_p = true;
2091 }
2092 return merged_p;
058e97ec
VM
2093}
2094
2095/* Flatten the IR. In other words, this function transforms IR as if
2096 it were built with one region (without loops). We could make it
2097 much simpler by rebuilding IR with one region, but unfortunately it
2098 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2099 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2100 IRA_MAX_POINT before emitting insns on the loop borders. */
2101void
2102ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2103{
2104 int i, j, num;
2105 bool propagate_p, stop_p, keep_p;
2106 int hard_regs_num;
82b33628 2107 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec
VM
2108 unsigned int n;
2109 enum reg_class cover_class;
2110 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 2111 ira_copy_t cp;
82b33628 2112 ira_loop_tree_node_t parent, node;
058e97ec
VM
2113 allocno_live_range_t r;
2114 ira_allocno_iterator ai;
2115 ira_copy_iterator ci;
2116 sparseset allocnos_live;
058e97ec
VM
2117 bool *allocno_propagated_p;
2118
2119 regno_top_level_allocno_map
2120 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
2121 memset (regno_top_level_allocno_map, 0,
2122 max_reg_num () * sizeof (ira_allocno_t));
2123 allocno_propagated_p
2124 = (bool *) ira_allocate (ira_allocnos_num * sizeof (bool));
2125 memset (allocno_propagated_p, 0, ira_allocnos_num * sizeof (bool));
2126 new_pseudos_p = merged_p = false;
2127 /* Fix final allocno attributes. */
2128 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2129 {
82b33628 2130 mem_dest_p = propagate_p = false;
058e97ec
VM
2131 for (a = ira_regno_allocno_map[i];
2132 a != NULL;
2133 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2134 {
2135 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2136 if (ALLOCNO_SOMEWHERE_RENAMED_P (a))
2137 new_pseudos_p = true;
2138 if (ALLOCNO_CAP (a) != NULL
2139 || (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL
2140 || ((parent_a = parent->regno_allocno_map[ALLOCNO_REGNO (a)])
2141 == NULL))
2142 {
2143 ALLOCNO_COPIES (a) = NULL;
2144 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2145 continue;
2146 }
2147 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
82b33628
VM
2148 if (ALLOCNO_MEM_OPTIMIZED_DEST (a) != NULL)
2149 mem_dest_p = true;
058e97ec
VM
2150 if (propagate_p)
2151 {
2152 if (!allocno_propagated_p [ALLOCNO_NUM (parent_a)])
2153 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
2154 ALLOCNO_CONFLICT_HARD_REGS (parent_a));
2155 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
2156 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
2157#ifdef STACK_REGS
2158 if (!allocno_propagated_p [ALLOCNO_NUM (parent_a)])
2159 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a)
2160 = ALLOCNO_NO_STACK_REG_P (parent_a);
2161 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a)
2162 = (ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a)
2163 || ALLOCNO_TOTAL_NO_STACK_REG_P (a));
2164#endif
2165 allocno_propagated_p [ALLOCNO_NUM (parent_a)] = true;
2166 }
2167 if (REGNO (ALLOCNO_REG (a)) == REGNO (ALLOCNO_REG (parent_a)))
2168 {
2169 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2170 {
2171 fprintf (ira_dump_file,
2172 " Moving ranges of a%dr%d to a%dr%d: ",
2173 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)),
2174 ALLOCNO_NUM (parent_a),
2175 REGNO (ALLOCNO_REG (parent_a)));
2176 ira_print_live_range_list (ira_dump_file,
2177 ALLOCNO_LIVE_RANGES (a));
2178 }
2179 change_allocno_in_range_list (ALLOCNO_LIVE_RANGES (a), parent_a);
2180 ALLOCNO_LIVE_RANGES (parent_a)
2181 = merge_ranges (ALLOCNO_LIVE_RANGES (a),
2182 ALLOCNO_LIVE_RANGES (parent_a));
2183 merged_p = true;
2184 ALLOCNO_LIVE_RANGES (a) = NULL;
2185 ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2186 = (ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2187 || ALLOCNO_MEM_OPTIMIZED_DEST_P (a));
2188 continue;
2189 }
2190 new_pseudos_p = true;
2191 propagate_p = true;
2192 first = ALLOCNO_MEM_OPTIMIZED_DEST (a) == NULL ? NULL : a;
2193 stop_p = false;
2194 for (;;)
2195 {
2196 if (first == NULL
2197 && ALLOCNO_MEM_OPTIMIZED_DEST (parent_a) != NULL)
2198 first = parent_a;
2199 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2200 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
2201 if (first != NULL
2202 && ALLOCNO_MEM_OPTIMIZED_DEST (first) == parent_a)
2203 stop_p = true;
2204 else if (!stop_p)
2205 {
2206 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2207 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2208 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2209 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2210 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2211 }
2212 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2213 && ALLOCNO_NREFS (parent_a) >= 0
2214 && ALLOCNO_FREQ (parent_a) >= 0);
2215 cover_class = ALLOCNO_COVER_CLASS (parent_a);
2216 hard_regs_num = ira_class_hard_regs_num[cover_class];
2217 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2218 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2219 for (j = 0; j < hard_regs_num; j++)
2220 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2221 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2222 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2223 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2224 for (j = 0; j < hard_regs_num; j++)
2225 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2226 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2227 ALLOCNO_COVER_CLASS_COST (parent_a)
2228 -= ALLOCNO_COVER_CLASS_COST (a);
2229 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
2230 if (ALLOCNO_CAP (parent_a) != NULL
2231 || (parent
2232 = ALLOCNO_LOOP_TREE_NODE (parent_a)->parent) == NULL
2233 || (parent_a = (parent->regno_allocno_map
2234 [ALLOCNO_REGNO (parent_a)])) == NULL)
2235 break;
2236 }
058e97ec
VM
2237 ALLOCNO_COPIES (a) = NULL;
2238 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2239 }
82b33628
VM
2240 if (mem_dest_p && copy_live_ranges_to_removed_store_destinations (i))
2241 merged_p = true;
058e97ec
VM
2242 }
2243 ira_free (allocno_propagated_p);
2244 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2245 if (merged_p || ira_max_point_before_emit != ira_max_point)
2246 ira_rebuild_start_finish_chains ();
2247 if (new_pseudos_p)
2248 {
2249 /* Rebuild conflicts. */
2250 FOR_EACH_ALLOCNO (a, ai)
2251 {
2252 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2253 || ALLOCNO_CAP_MEMBER (a) != NULL)
2254 continue;
2255 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2256 ira_assert (r->allocno == a);
2257 clear_allocno_conflicts (a);
2258 }
2259 allocnos_live = sparseset_alloc (ira_allocnos_num);
2260 for (i = 0; i < ira_max_point; i++)
2261 {
2262 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2263 {
2264 a = r->allocno;
2265 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2266 || ALLOCNO_CAP_MEMBER (a) != NULL)
2267 continue;
2268 num = ALLOCNO_NUM (a);
2269 cover_class = ALLOCNO_COVER_CLASS (a);
2270 sparseset_set_bit (allocnos_live, num);
2271 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, n)
2272 {
2273 ira_allocno_t live_a = ira_allocnos[n];
2274
2275 if (cover_class == ALLOCNO_COVER_CLASS (live_a)
2276 /* Don't set up conflict for the allocno with itself. */
2277 && num != (int) n)
2278 ira_add_allocno_conflict (a, live_a);
2279 }
2280 }
2281
2282 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
2283 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (r->allocno));
2284 }
2285 sparseset_free (allocnos_live);
2286 compress_conflict_vecs ();
2287 }
2288 /* Mark some copies for removing and change allocnos in the rest
2289 copies. */
2290 FOR_EACH_COPY (cp, ci)
2291 {
2292 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2293 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2294 {
2295 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2296 fprintf
2297 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2298 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2299 ALLOCNO_NUM (cp->first), REGNO (ALLOCNO_REG (cp->first)),
2300 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2301 ALLOCNO_NUM (cp->second), REGNO (ALLOCNO_REG (cp->second)));
2302 cp->loop_tree_node = NULL;
2303 continue;
2304 }
2305 first = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->first))];
2306 second = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->second))];
2307 node = cp->loop_tree_node;
2308 if (node == NULL)
2309 keep_p = true; /* It copy generated in ira-emit.c. */
2310 else
2311 {
2312 /* Check that the copy was not propagated from level on
2313 which we will have different pseudos. */
2314 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2315 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2316 keep_p = ((REGNO (ALLOCNO_REG (first))
2317 == REGNO (ALLOCNO_REG (node_first)))
2318 && (REGNO (ALLOCNO_REG (second))
2319 == REGNO (ALLOCNO_REG (node_second))));
2320 }
2321 if (keep_p)
2322 {
2323 cp->loop_tree_node = ira_loop_tree_root;
2324 cp->first = first;
2325 cp->second = second;
2326 }
2327 else
2328 {
2329 cp->loop_tree_node = NULL;
2330 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2331 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2332 cp->num, ALLOCNO_NUM (cp->first),
2333 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
2334 REGNO (ALLOCNO_REG (cp->second)));
2335 }
2336 }
2337 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2338 FOR_EACH_ALLOCNO (a, ai)
2339 {
2340 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2341 || ALLOCNO_CAP_MEMBER (a) != NULL)
2342 {
2343 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2344 fprintf (ira_dump_file, " Remove a%dr%d\n",
2345 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
2346 finish_allocno (a);
2347 continue;
2348 }
2349 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2350 ALLOCNO_REGNO (a) = REGNO (ALLOCNO_REG (a));
2351 ALLOCNO_CAP (a) = NULL;
2352 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
2353 if (! ALLOCNO_ASSIGNED_P (a))
2354 ira_free_allocno_updated_costs (a);
2355 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2356 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2357 }
2358 /* Remove unnecessary copies. */
2359 FOR_EACH_COPY (cp, ci)
2360 {
2361 if (cp->loop_tree_node == NULL)
2362 {
2363 ira_copies[cp->num] = NULL;
2364 finish_copy (cp);
2365 continue;
2366 }
2367 ira_assert
2368 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2369 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2370 ira_add_allocno_copy_to_list (cp);
2371 ira_swap_allocno_copy_ends_if_necessary (cp);
2372 }
2373 rebuild_regno_allocno_maps ();
2374 ira_free (regno_top_level_allocno_map);
2375}
2376
2377\f
2378
2379#ifdef ENABLE_IRA_CHECKING
2380/* Check creation of all allocnos. Allocnos on lower levels should
2381 have allocnos or caps on all upper levels. */
2382static void
2383check_allocno_creation (void)
2384{
2385 ira_allocno_t a;
2386 ira_allocno_iterator ai;
2387 ira_loop_tree_node_t loop_tree_node;
2388
2389 FOR_EACH_ALLOCNO (a, ai)
2390 {
49d988e7
VM
2391 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2392 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
2393 ALLOCNO_NUM (a)));
2394 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
2395 continue;
2396 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 2397 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 2398 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
2399 ira_assert (loop_tree_node->parent
2400 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
2401 && bitmap_bit_p (loop_tree_node->border_allocnos,
2402 ALLOCNO_NUM (a)));
058e97ec
VM
2403 }
2404}
2405#endif
2406
2407/* Create a internal representation (IR) for IRA (allocnos, copies,
2408 loop tree nodes). If LOOPS_P is FALSE the nodes corresponding to
2409 the loops (except the root which corresponds the all function) and
2410 correspondingly allocnos for the loops will be not created. Such
2411 parameter value is used for Chaitin-Briggs coloring. The function
2412 returns TRUE if we generate loop structure (besides nodes
2413 representing all function and the basic blocks) for regional
2414 allocation. A true return means that we really need to flatten IR
2415 before the reload. */
2416bool
2417ira_build (bool loops_p)
2418{
2419 df_analyze ();
2420
2421 initiate_cost_vectors ();
2422 initiate_allocnos ();
2423 initiate_copies ();
2424 create_loop_tree_nodes (loops_p);
2425 form_loop_tree ();
2426 create_allocnos ();
2427 ira_costs ();
2428 ira_create_allocno_live_ranges ();
2429 remove_unnecessary_regions ();
2430 loops_p = more_one_region_p ();
2431 if (loops_p)
2432 {
2433 propagate_allocno_info ();
2434 create_caps ();
2435 }
2436 ira_tune_allocno_costs_and_cover_classes ();
2437#ifdef ENABLE_IRA_CHECKING
2438 check_allocno_creation ();
2439#endif
2440 setup_min_max_allocno_live_range_point ();
2441 sort_conflict_id_allocno_map ();
2442 setup_min_max_conflict_allocno_ids ();
2443 ira_build_conflicts ();
4cda38d5
VM
2444 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
2445 print_copies (ira_dump_file);
058e97ec
VM
2446 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
2447 {
2448 int n, nr;
2449 ira_allocno_t a;
2450 allocno_live_range_t r;
2451 ira_allocno_iterator ai;
2452
2453 n = 0;
2454 FOR_EACH_ALLOCNO (a, ai)
2455 n += ALLOCNO_CONFLICT_ALLOCNOS_NUM (a);
2456 nr = 0;
2457 FOR_EACH_ALLOCNO (a, ai)
2458 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2459 nr++;
2460 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
2461 VEC_length (loop_p, ira_loops.larray), n_basic_blocks,
2462 ira_max_point);
2463 fprintf (ira_dump_file,
2464 " allocnos=%d, copies=%d, conflicts=%d, ranges=%d\n",
2465 ira_allocnos_num, ira_copies_num, n, nr);
2466 }
2467 return loops_p;
2468}
2469
2470/* Release the data created by function ira_build. */
2471void
2472ira_destroy (void)
2473{
2474 finish_loop_tree_nodes ();
2475 finish_copies ();
2476 finish_allocnos ();
2477 finish_cost_vectors ();
2478 ira_finish_allocno_live_ranges ();
2479}