]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ira-build.c
Daily bump.
[thirdparty/gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec 1/* Building internal representation for IRA.
5624e564 2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
058e97ec
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "target.h"
28#include "regs.h"
29#include "flags.h"
30#include "hard-reg-set.h"
60393bbc
AM
31#include "predict.h"
32#include "vec.h"
33#include "hashtab.h"
34#include "hash-set.h"
35#include "machmode.h"
36#include "input.h"
37#include "function.h"
38#include "dominance.h"
39#include "cfg.h"
058e97ec
VM
40#include "basic-block.h"
41#include "insn-config.h"
42#include "recog.h"
718f9c0f 43#include "diagnostic-core.h"
058e97ec
VM
44#include "params.h"
45#include "df.h"
058e97ec
VM
46#include "reload.h"
47#include "sparseset.h"
48#include "ira-int.h"
5936d944 49#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
058e97ec 50
070a1983 51static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx_insn *,
058e97ec
VM
52 ira_loop_tree_node_t);
53
54/* The root of the loop tree corresponding to the all function. */
55ira_loop_tree_node_t ira_loop_tree_root;
56
57/* Height of the loop tree. */
58int ira_loop_tree_height;
59
60/* All nodes representing basic blocks are referred through the
61 following array. We can not use basic block member `aux' for this
62 because it is used for insertion of insns on edges. */
63ira_loop_tree_node_t ira_bb_nodes;
64
65/* All nodes representing loops are referred through the following
66 array. */
67ira_loop_tree_node_t ira_loop_nodes;
68
caff7edf
JJ
69/* And size of the ira_loop_nodes array. */
70unsigned int ira_loop_nodes_count;
71
b8698a0f 72/* Map regno -> allocnos with given regno (see comments for
058e97ec
VM
73 allocno member `next_regno_allocno'). */
74ira_allocno_t *ira_regno_allocno_map;
75
76/* Array of references to all allocnos. The order number of the
77 allocno corresponds to the index in the array. Removed allocnos
78 have NULL element value. */
79ira_allocno_t *ira_allocnos;
80
81/* Sizes of the previous array. */
82int ira_allocnos_num;
83
a49ae217
BS
84/* Count of conflict record structures we've created, used when creating
85 a new conflict id. */
86int ira_objects_num;
87
88/* Map a conflict id to its conflict record. */
89ira_object_t *ira_object_id_map;
058e97ec 90
3b6d1699
VM
91/* Array of references to all allocno preferences. The order number
92 of the preference corresponds to the index in the array. */
93ira_pref_t *ira_prefs;
94
95/* Size of the previous array. */
96int ira_prefs_num;
97
058e97ec
VM
98/* Array of references to all copies. The order number of the copy
99 corresponds to the index in the array. Removed copies have NULL
100 element value. */
101ira_copy_t *ira_copies;
102
103/* Size of the previous array. */
104int ira_copies_num;
105
106\f
107
108/* LAST_BASIC_BLOCK before generating additional insns because of live
109 range splitting. Emitting insns on a critical edge creates a new
110 basic block. */
111static int last_basic_block_before_change;
112
2608d841
VM
113/* Initialize some members in loop tree node NODE. Use LOOP_NUM for
114 the member loop_num. */
058e97ec 115static void
2608d841
VM
116init_loop_tree_node (struct ira_loop_tree_node *node, int loop_num)
117{
118 int max_regno = max_reg_num ();
119
120 node->regno_allocno_map
121 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
122 memset (node->regno_allocno_map, 0, sizeof (ira_allocno_t) * max_regno);
123 memset (node->reg_pressure, 0, sizeof (node->reg_pressure));
124 node->all_allocnos = ira_allocate_bitmap ();
125 node->modified_regnos = ira_allocate_bitmap ();
126 node->border_allocnos = ira_allocate_bitmap ();
127 node->local_copies = ira_allocate_bitmap ();
128 node->loop_num = loop_num;
129 node->children = NULL;
130 node->subloops = NULL;
131}
132
133
134/* The following function allocates the loop tree nodes. If
135 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
136 the root which corresponds the all function) will be not allocated
137 but nodes will still be allocated for basic blocks. */
138static void
139create_loop_tree_nodes (void)
058e97ec
VM
140{
141 unsigned int i, j;
058e97ec
VM
142 bool skip_p;
143 edge_iterator ei;
144 edge e;
9771b263 145 vec<edge> edges;
058e97ec
VM
146 loop_p loop;
147
148 ira_bb_nodes
149 = ((struct ira_loop_tree_node *)
8b1c6fd7
DM
150 ira_allocate (sizeof (struct ira_loop_tree_node)
151 * last_basic_block_for_fn (cfun)));
152 last_basic_block_before_change = last_basic_block_for_fn (cfun);
153 for (i = 0; i < (unsigned int) last_basic_block_for_fn (cfun); i++)
058e97ec
VM
154 {
155 ira_bb_nodes[i].regno_allocno_map = NULL;
156 memset (ira_bb_nodes[i].reg_pressure, 0,
157 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 158 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
159 ira_bb_nodes[i].modified_regnos = NULL;
160 ira_bb_nodes[i].border_allocnos = NULL;
161 ira_bb_nodes[i].local_copies = NULL;
162 }
2608d841
VM
163 if (current_loops == NULL)
164 {
caff7edf 165 ira_loop_nodes_count = 1;
2608d841
VM
166 ira_loop_nodes = ((struct ira_loop_tree_node *)
167 ira_allocate (sizeof (struct ira_loop_tree_node)));
168 init_loop_tree_node (ira_loop_nodes, 0);
169 return;
170 }
0fc822d0 171 ira_loop_nodes_count = number_of_loops (cfun);
058e97ec
VM
172 ira_loop_nodes = ((struct ira_loop_tree_node *)
173 ira_allocate (sizeof (struct ira_loop_tree_node)
caff7edf 174 * ira_loop_nodes_count));
0fc822d0 175 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
058e97ec 176 {
661bc682 177 if (loop_outer (loop) != NULL)
058e97ec
VM
178 {
179 ira_loop_nodes[i].regno_allocno_map = NULL;
058e97ec
VM
180 skip_p = false;
181 FOR_EACH_EDGE (e, ei, loop->header->preds)
182 if (e->src != loop->latch
183 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
184 {
185 skip_p = true;
186 break;
187 }
188 if (skip_p)
189 continue;
190 edges = get_loop_exit_edges (loop);
9771b263 191 FOR_EACH_VEC_ELT (edges, j, e)
058e97ec
VM
192 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
193 {
194 skip_p = true;
195 break;
196 }
9771b263 197 edges.release ();
058e97ec
VM
198 if (skip_p)
199 continue;
200 }
2608d841 201 init_loop_tree_node (&ira_loop_nodes[i], loop->num);
058e97ec
VM
202 }
203}
204
205/* The function returns TRUE if there are more one allocation
206 region. */
207static bool
208more_one_region_p (void)
209{
210 unsigned int i;
211 loop_p loop;
212
2608d841 213 if (current_loops != NULL)
0fc822d0 214 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
2608d841
VM
215 if (ira_loop_nodes[i].regno_allocno_map != NULL
216 && ira_loop_tree_root != &ira_loop_nodes[i])
217 return true;
058e97ec
VM
218 return false;
219}
220
221/* Free the loop tree node of a loop. */
222static void
223finish_loop_tree_node (ira_loop_tree_node_t loop)
224{
225 if (loop->regno_allocno_map != NULL)
226 {
227 ira_assert (loop->bb == NULL);
228 ira_free_bitmap (loop->local_copies);
229 ira_free_bitmap (loop->border_allocnos);
230 ira_free_bitmap (loop->modified_regnos);
49d988e7 231 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
232 ira_free (loop->regno_allocno_map);
233 loop->regno_allocno_map = NULL;
234 }
235}
236
237/* Free the loop tree nodes. */
238static void
239finish_loop_tree_nodes (void)
240{
241 unsigned int i;
058e97ec 242
caff7edf
JJ
243 for (i = 0; i < ira_loop_nodes_count; i++)
244 finish_loop_tree_node (&ira_loop_nodes[i]);
058e97ec
VM
245 ira_free (ira_loop_nodes);
246 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
247 {
248 if (ira_bb_nodes[i].local_copies != NULL)
249 ira_free_bitmap (ira_bb_nodes[i].local_copies);
250 if (ira_bb_nodes[i].border_allocnos != NULL)
251 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
252 if (ira_bb_nodes[i].modified_regnos != NULL)
253 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
254 if (ira_bb_nodes[i].all_allocnos != NULL)
255 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
256 if (ira_bb_nodes[i].regno_allocno_map != NULL)
257 ira_free (ira_bb_nodes[i].regno_allocno_map);
258 }
259 ira_free (ira_bb_nodes);
260}
261
262\f
263
264/* The following recursive function adds LOOP to the loop tree
2608d841
VM
265 hierarchy. LOOP is added only once. If LOOP is NULL we adding
266 loop designating the whole function when CFG loops are not
267 built. */
058e97ec
VM
268static void
269add_loop_to_tree (struct loop *loop)
270{
2608d841 271 int loop_num;
058e97ec
VM
272 struct loop *parent;
273 ira_loop_tree_node_t loop_node, parent_node;
274
275 /* We can not use loop node access macros here because of potential
276 checking and because the nodes are not initialized enough
277 yet. */
2608d841 278 if (loop != NULL && loop_outer (loop) != NULL)
058e97ec 279 add_loop_to_tree (loop_outer (loop));
2608d841
VM
280 loop_num = loop != NULL ? loop->num : 0;
281 if (ira_loop_nodes[loop_num].regno_allocno_map != NULL
282 && ira_loop_nodes[loop_num].children == NULL)
058e97ec
VM
283 {
284 /* We have not added loop node to the tree yet. */
2608d841 285 loop_node = &ira_loop_nodes[loop_num];
058e97ec
VM
286 loop_node->loop = loop;
287 loop_node->bb = NULL;
2608d841
VM
288 if (loop == NULL)
289 parent = NULL;
290 else
291 {
292 for (parent = loop_outer (loop);
293 parent != NULL;
294 parent = loop_outer (parent))
295 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
296 break;
297 }
058e97ec
VM
298 if (parent == NULL)
299 {
300 loop_node->next = NULL;
301 loop_node->subloop_next = NULL;
302 loop_node->parent = NULL;
303 }
304 else
305 {
306 parent_node = &ira_loop_nodes[parent->num];
307 loop_node->next = parent_node->children;
308 parent_node->children = loop_node;
309 loop_node->subloop_next = parent_node->subloops;
310 parent_node->subloops = loop_node;
311 loop_node->parent = parent_node;
312 }
313 }
314}
315
316/* The following recursive function sets up levels of nodes of the
317 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
318 The function returns maximal value of level in the tree + 1. */
319static int
320setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
321{
322 int height, max_height;
323 ira_loop_tree_node_t subloop_node;
324
325 ira_assert (loop_node->bb == NULL);
326 loop_node->level = level;
327 max_height = level + 1;
328 for (subloop_node = loop_node->subloops;
329 subloop_node != NULL;
330 subloop_node = subloop_node->subloop_next)
331 {
332 ira_assert (subloop_node->bb == NULL);
333 height = setup_loop_tree_level (subloop_node, level + 1);
334 if (height > max_height)
335 max_height = height;
336 }
337 return max_height;
338}
339
340/* Create the loop tree. The algorithm is designed to provide correct
341 order of loops (they are ordered by their last loop BB) and basic
342 blocks in the chain formed by member next. */
343static void
344form_loop_tree (void)
345{
058e97ec
VM
346 basic_block bb;
347 struct loop *parent;
348 ira_loop_tree_node_t bb_node, loop_node;
058e97ec
VM
349
350 /* We can not use loop/bb node access macros because of potential
351 checking and because the nodes are not initialized enough
352 yet. */
11cd3bed 353 FOR_EACH_BB_FN (bb, cfun)
058e97ec
VM
354 {
355 bb_node = &ira_bb_nodes[bb->index];
356 bb_node->bb = bb;
357 bb_node->loop = NULL;
358 bb_node->subloops = NULL;
359 bb_node->children = NULL;
360 bb_node->subloop_next = NULL;
361 bb_node->next = NULL;
2608d841
VM
362 if (current_loops == NULL)
363 parent = NULL;
364 else
365 {
366 for (parent = bb->loop_father;
367 parent != NULL;
368 parent = loop_outer (parent))
369 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
370 break;
371 }
058e97ec 372 add_loop_to_tree (parent);
2608d841 373 loop_node = &ira_loop_nodes[parent == NULL ? 0 : parent->num];
058e97ec
VM
374 bb_node->next = loop_node->children;
375 bb_node->parent = loop_node;
376 loop_node->children = bb_node;
377 }
2608d841 378 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (0);
058e97ec
VM
379 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
380 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
381}
382
383\f
384
385/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
386 tree nodes. */
387static void
388rebuild_regno_allocno_maps (void)
389{
390 unsigned int l;
391 int max_regno, regno;
392 ira_allocno_t a;
393 ira_loop_tree_node_t loop_tree_node;
394 loop_p loop;
395 ira_allocno_iterator ai;
396
2608d841 397 ira_assert (current_loops != NULL);
058e97ec 398 max_regno = max_reg_num ();
0fc822d0 399 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), l, loop)
058e97ec
VM
400 if (ira_loop_nodes[l].regno_allocno_map != NULL)
401 {
402 ira_free (ira_loop_nodes[l].regno_allocno_map);
403 ira_loop_nodes[l].regno_allocno_map
404 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
405 * max_regno);
406 memset (ira_loop_nodes[l].regno_allocno_map, 0,
407 sizeof (ira_allocno_t) * max_regno);
408 }
409 ira_free (ira_regno_allocno_map);
410 ira_regno_allocno_map
411 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
412 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
413 FOR_EACH_ALLOCNO (a, ai)
414 {
415 if (ALLOCNO_CAP_MEMBER (a) != NULL)
416 /* Caps are not in the regno allocno maps. */
417 continue;
418 regno = ALLOCNO_REGNO (a);
419 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
420 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
421 ira_regno_allocno_map[regno] = a;
422 if (loop_tree_node->regno_allocno_map[regno] == NULL)
423 /* Remember that we can create temporary allocnos to break
424 cycles in register shuffle. */
425 loop_tree_node->regno_allocno_map[regno] = a;
426 }
427}
058e97ec
VM
428\f
429
a49ae217
BS
430/* Pools for allocnos, allocno live ranges and objects. */
431static alloc_pool allocno_pool, live_range_pool, object_pool;
058e97ec
VM
432
433/* Vec containing references to all created allocnos. It is a
434 container of array allocnos. */
9771b263 435static vec<ira_allocno_t> allocno_vec;
058e97ec 436
a49ae217
BS
437/* Vec containing references to all created ira_objects. It is a
438 container of ira_object_id_map. */
9771b263 439static vec<ira_object_t> ira_object_id_map_vec;
058e97ec
VM
440
441/* Initialize data concerning allocnos. */
442static void
443initiate_allocnos (void)
444{
b14151b5
BS
445 live_range_pool
446 = create_alloc_pool ("live ranges",
447 sizeof (struct live_range), 100);
058e97ec
VM
448 allocno_pool
449 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
a49ae217
BS
450 object_pool
451 = create_alloc_pool ("objects", sizeof (struct ira_object), 100);
9771b263 452 allocno_vec.create (max_reg_num () * 2);
058e97ec
VM
453 ira_allocnos = NULL;
454 ira_allocnos_num = 0;
a49ae217 455 ira_objects_num = 0;
9771b263 456 ira_object_id_map_vec.create (max_reg_num () * 2);
a49ae217 457 ira_object_id_map = NULL;
058e97ec 458 ira_regno_allocno_map
1756cb66
VM
459 = (ira_allocno_t *) ira_allocate (max_reg_num ()
460 * sizeof (ira_allocno_t));
058e97ec
VM
461 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
462}
463
a49ae217
BS
464/* Create and return an object corresponding to a new allocno A. */
465static ira_object_t
ac0ab4f7 466ira_create_object (ira_allocno_t a, int subword)
a49ae217 467{
1756cb66 468 enum reg_class aclass = ALLOCNO_CLASS (a);
a49ae217
BS
469 ira_object_t obj = (ira_object_t) pool_alloc (object_pool);
470
471 OBJECT_ALLOCNO (obj) = a;
ac0ab4f7 472 OBJECT_SUBWORD (obj) = subword;
a49ae217
BS
473 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
474 OBJECT_CONFLICT_VEC_P (obj) = false;
475 OBJECT_CONFLICT_ARRAY (obj) = NULL;
476 OBJECT_NUM_CONFLICTS (obj) = 0;
477 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
478 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
479 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1756cb66 480 reg_class_contents[aclass]);
a49ae217 481 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1756cb66 482 reg_class_contents[aclass]);
a49ae217
BS
483 OBJECT_MIN (obj) = INT_MAX;
484 OBJECT_MAX (obj) = -1;
9140d27b 485 OBJECT_LIVE_RANGES (obj) = NULL;
a49ae217 486
9771b263 487 ira_object_id_map_vec.safe_push (obj);
a49ae217 488 ira_object_id_map
9771b263
DN
489 = ira_object_id_map_vec.address ();
490 ira_objects_num = ira_object_id_map_vec.length ();
ac0ab4f7 491
a49ae217
BS
492 return obj;
493}
494
058e97ec
VM
495/* Create and return the allocno corresponding to REGNO in
496 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
497 same regno if CAP_P is FALSE. */
498ira_allocno_t
1756cb66
VM
499ira_create_allocno (int regno, bool cap_p,
500 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
501{
502 ira_allocno_t a;
503
504 a = (ira_allocno_t) pool_alloc (allocno_pool);
505 ALLOCNO_REGNO (a) = regno;
506 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
507 if (! cap_p)
508 {
509 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
510 ira_regno_allocno_map[regno] = a;
511 if (loop_tree_node->regno_allocno_map[regno] == NULL)
512 /* Remember that we can create temporary allocnos to break
513 cycles in register shuffle on region borders (see
514 ira-emit.c). */
515 loop_tree_node->regno_allocno_map[regno] = a;
516 }
517 ALLOCNO_CAP (a) = NULL;
518 ALLOCNO_CAP_MEMBER (a) = NULL;
519 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 520 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec 521 ALLOCNO_NREFS (a) = 0;
854bd721 522 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
523 ALLOCNO_HARD_REGNO (a) = -1;
524 ALLOCNO_CALL_FREQ (a) = 0;
525 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
e384e6b5 526 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a) = 0;
c2ba7e7a 527 CLEAR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
528#ifdef STACK_REGS
529 ALLOCNO_NO_STACK_REG_P (a) = false;
530 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
531#endif
058e97ec 532 ALLOCNO_DONT_REASSIGN_P (a) = false;
927425df 533 ALLOCNO_BAD_SPILL_P (a) = false;
058e97ec 534 ALLOCNO_ASSIGNED_P (a) = false;
058e97ec 535 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
d1bb282e 536 ALLOCNO_WMODE (a) = ALLOCNO_MODE (a);
3b6d1699 537 ALLOCNO_PREFS (a) = NULL;
058e97ec
VM
538 ALLOCNO_COPIES (a) = NULL;
539 ALLOCNO_HARD_REG_COSTS (a) = NULL;
540 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
541 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
542 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1756cb66
VM
543 ALLOCNO_CLASS (a) = NO_REGS;
544 ALLOCNO_UPDATED_CLASS_COST (a) = 0;
545 ALLOCNO_CLASS_COST (a) = 0;
058e97ec
VM
546 ALLOCNO_MEMORY_COST (a) = 0;
547 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
548 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
ac0ab4f7 549 ALLOCNO_NUM_OBJECTS (a) = 0;
a49ae217 550
1756cb66 551 ALLOCNO_ADD_DATA (a) = NULL;
9771b263
DN
552 allocno_vec.safe_push (a);
553 ira_allocnos = allocno_vec.address ();
554 ira_allocnos_num = allocno_vec.length ();
ac0ab4f7 555
058e97ec
VM
556 return a;
557}
558
1756cb66
VM
559/* Set up register class for A and update its conflict hard
560 registers. */
058e97ec 561void
1756cb66 562ira_set_allocno_class (ira_allocno_t a, enum reg_class aclass)
058e97ec 563{
1756cb66
VM
564 ira_allocno_object_iterator oi;
565 ira_object_t obj;
566
567 ALLOCNO_CLASS (a) = aclass;
568 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
569 {
570 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
571 reg_class_contents[aclass]);
572 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
573 reg_class_contents[aclass]);
574 }
a49ae217
BS
575}
576
ac0ab4f7
BS
577/* Determine the number of objects we should associate with allocno A
578 and allocate them. */
a49ae217 579void
ac0ab4f7 580ira_create_allocno_objects (ira_allocno_t a)
a49ae217 581{
ef4bddc2 582 machine_mode mode = ALLOCNO_MODE (a);
1756cb66
VM
583 enum reg_class aclass = ALLOCNO_CLASS (a);
584 int n = ira_reg_class_max_nregs[aclass][mode];
ac0ab4f7
BS
585 int i;
586
587 if (GET_MODE_SIZE (mode) != 2 * UNITS_PER_WORD || n != 2)
588 n = 1;
589
590 ALLOCNO_NUM_OBJECTS (a) = n;
591 for (i = 0; i < n; i++)
592 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
a49ae217
BS
593}
594
ac0ab4f7 595/* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
1756cb66 596 ALLOCNO_OBJECT structures. This must be called after the allocno
ac0ab4f7 597 classes are known. */
a49ae217
BS
598static void
599create_allocno_objects (void)
600{
601 ira_allocno_t a;
602 ira_allocno_iterator ai;
603
604 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7 605 ira_create_allocno_objects (a);
058e97ec
VM
606}
607
ac0ab4f7
BS
608/* Merge hard register conflict information for all objects associated with
609 allocno TO into the corresponding objects associated with FROM.
610 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
3c55880a
BS
611static void
612merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
613 bool total_only)
614{
ac0ab4f7
BS
615 int i;
616 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
617 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
618 {
619 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
620 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1756cb66 621
ac0ab4f7
BS
622 if (!total_only)
623 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj),
624 OBJECT_CONFLICT_HARD_REGS (from_obj));
625 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj),
626 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj));
627 }
3c55880a
BS
628#ifdef STACK_REGS
629 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
630 ALLOCNO_NO_STACK_REG_P (to) = true;
631 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
632 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
633#endif
634}
635
ac0ab4f7
BS
636/* Update hard register conflict information for all objects associated with
637 A to include the regs in SET. */
638void
639ior_hard_reg_conflicts (ira_allocno_t a, HARD_REG_SET *set)
640{
641 ira_allocno_object_iterator i;
642 ira_object_t obj;
1756cb66 643
ac0ab4f7
BS
644 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
645 {
646 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), *set);
647 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), *set);
648 }
649}
650
a49ae217
BS
651/* Return TRUE if a conflict vector with NUM elements is more
652 profitable than a conflict bit vector for OBJ. */
058e97ec 653bool
a49ae217 654ira_conflict_vector_profitable_p (ira_object_t obj, int num)
058e97ec
VM
655{
656 int nw;
a49ae217
BS
657 int max = OBJECT_MAX (obj);
658 int min = OBJECT_MIN (obj);
058e97ec 659
a49ae217
BS
660 if (max < min)
661 /* We prefer a bit vector in such case because it does not result
662 in allocation. */
058e97ec
VM
663 return false;
664
a49ae217
BS
665 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
666 return (2 * sizeof (ira_object_t) * (num + 1)
058e97ec
VM
667 < 3 * nw * sizeof (IRA_INT_TYPE));
668}
669
a49ae217
BS
670/* Allocates and initialize the conflict vector of OBJ for NUM
671 conflicting objects. */
058e97ec 672void
a49ae217 673ira_allocate_conflict_vec (ira_object_t obj, int num)
058e97ec
VM
674{
675 int size;
a49ae217 676 ira_object_t *vec;
058e97ec 677
a49ae217 678 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
058e97ec 679 num++; /* for NULL end marker */
a49ae217
BS
680 size = sizeof (ira_object_t) * num;
681 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
682 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
058e97ec 683 vec[0] = NULL;
a49ae217
BS
684 OBJECT_NUM_CONFLICTS (obj) = 0;
685 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
686 OBJECT_CONFLICT_VEC_P (obj) = true;
058e97ec
VM
687}
688
a49ae217 689/* Allocate and initialize the conflict bit vector of OBJ. */
058e97ec 690static void
a49ae217 691allocate_conflict_bit_vec (ira_object_t obj)
058e97ec
VM
692{
693 unsigned int size;
694
a49ae217
BS
695 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
696 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
058e97ec 697 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
a49ae217
BS
698 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
699 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
700 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
701 OBJECT_CONFLICT_VEC_P (obj) = false;
058e97ec
VM
702}
703
704/* Allocate and initialize the conflict vector or conflict bit vector
ac0ab4f7 705 of OBJ for NUM conflicting allocnos whatever is more profitable. */
058e97ec 706void
ac0ab4f7 707ira_allocate_object_conflicts (ira_object_t obj, int num)
058e97ec 708{
ac0ab4f7
BS
709 if (ira_conflict_vector_profitable_p (obj, num))
710 ira_allocate_conflict_vec (obj, num);
058e97ec 711 else
ac0ab4f7 712 allocate_conflict_bit_vec (obj);
058e97ec
VM
713}
714
a49ae217 715/* Add OBJ2 to the conflicts of OBJ1. */
058e97ec 716static void
a49ae217 717add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
058e97ec
VM
718{
719 int num;
720 unsigned int size;
721
a49ae217 722 if (OBJECT_CONFLICT_VEC_P (obj1))
058e97ec 723 {
a49ae217
BS
724 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
725 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
726 num = curr_num + 2;
727 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
058e97ec 728 {
a49ae217 729 ira_object_t *newvec;
058e97ec 730 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
a49ae217
BS
731 newvec = (ira_object_t *) ira_allocate (size);
732 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
733 ira_free (vec);
734 vec = newvec;
735 OBJECT_CONFLICT_ARRAY (obj1) = vec;
736 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 737 }
a49ae217 738 vec[num - 2] = obj2;
058e97ec 739 vec[num - 1] = NULL;
a49ae217 740 OBJECT_NUM_CONFLICTS (obj1)++;
058e97ec
VM
741 }
742 else
743 {
744 int nw, added_head_nw, id;
a49ae217 745 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
058e97ec 746
a49ae217
BS
747 id = OBJECT_CONFLICT_ID (obj2);
748 if (OBJECT_MIN (obj1) > id)
058e97ec
VM
749 {
750 /* Expand head of the bit vector. */
a49ae217
BS
751 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
752 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 753 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
a49ae217 754 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
058e97ec
VM
755 {
756 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
757 vec, nw * sizeof (IRA_INT_TYPE));
758 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
759 }
760 else
761 {
762 size
763 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
764 vec = (IRA_INT_TYPE *) ira_allocate (size);
765 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
a49ae217 766 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
767 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
768 memset ((char *) vec
769 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
770 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
a49ae217
BS
771 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
772 OBJECT_CONFLICT_ARRAY (obj1) = vec;
773 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 774 }
a49ae217 775 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
058e97ec 776 }
a49ae217 777 else if (OBJECT_MAX (obj1) < id)
058e97ec 778 {
a49ae217 779 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 780 size = nw * sizeof (IRA_INT_TYPE);
a49ae217 781 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
058e97ec
VM
782 {
783 /* Expand tail of the bit vector. */
784 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
785 vec = (IRA_INT_TYPE *) ira_allocate (size);
a49ae217
BS
786 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
787 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
788 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
789 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
790 OBJECT_CONFLICT_ARRAY (obj1) = vec;
791 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 792 }
a49ae217 793 OBJECT_MAX (obj1) = id;
058e97ec 794 }
a49ae217 795 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
058e97ec
VM
796 }
797}
798
a49ae217
BS
799/* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
800static void
801ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
058e97ec 802{
a49ae217
BS
803 add_to_conflicts (obj1, obj2);
804 add_to_conflicts (obj2, obj1);
058e97ec
VM
805}
806
a49ae217 807/* Clear all conflicts of OBJ. */
058e97ec 808static void
a49ae217 809clear_conflicts (ira_object_t obj)
058e97ec 810{
a49ae217 811 if (OBJECT_CONFLICT_VEC_P (obj))
058e97ec 812 {
a49ae217
BS
813 OBJECT_NUM_CONFLICTS (obj) = 0;
814 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
058e97ec 815 }
a49ae217 816 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
058e97ec
VM
817 {
818 int nw;
819
a49ae217
BS
820 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
821 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
822 }
823}
824
825/* The array used to find duplications in conflict vectors of
826 allocnos. */
a49ae217 827static int *conflict_check;
058e97ec
VM
828
829/* The value used to mark allocation presence in conflict vector of
830 the current allocno. */
a49ae217 831static int curr_conflict_check_tick;
058e97ec 832
a49ae217 833/* Remove duplications in conflict vector of OBJ. */
058e97ec 834static void
a49ae217 835compress_conflict_vec (ira_object_t obj)
058e97ec 836{
a49ae217 837 ira_object_t *vec, conflict_obj;
058e97ec
VM
838 int i, j;
839
a49ae217
BS
840 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
841 vec = OBJECT_CONFLICT_VEC (obj);
842 curr_conflict_check_tick++;
843 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
058e97ec 844 {
a49ae217
BS
845 int id = OBJECT_CONFLICT_ID (conflict_obj);
846 if (conflict_check[id] != curr_conflict_check_tick)
058e97ec 847 {
a49ae217
BS
848 conflict_check[id] = curr_conflict_check_tick;
849 vec[j++] = conflict_obj;
058e97ec
VM
850 }
851 }
a49ae217 852 OBJECT_NUM_CONFLICTS (obj) = j;
058e97ec
VM
853 vec[j] = NULL;
854}
855
856/* Remove duplications in conflict vectors of all allocnos. */
857static void
858compress_conflict_vecs (void)
859{
ac0ab4f7
BS
860 ira_object_t obj;
861 ira_object_iterator oi;
058e97ec 862
a49ae217
BS
863 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
864 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
865 curr_conflict_check_tick = 0;
ac0ab4f7 866 FOR_EACH_OBJECT (obj, oi)
a49ae217 867 {
a49ae217
BS
868 if (OBJECT_CONFLICT_VEC_P (obj))
869 compress_conflict_vec (obj);
870 }
871 ira_free (conflict_check);
058e97ec
VM
872}
873
874/* This recursive function outputs allocno A and if it is a cap the
875 function outputs its members. */
876void
877ira_print_expanded_allocno (ira_allocno_t a)
878{
879 basic_block bb;
880
881 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
882 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
883 fprintf (ira_dump_file, ",b%d", bb->index);
884 else
2608d841 885 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop_num);
058e97ec
VM
886 if (ALLOCNO_CAP_MEMBER (a) != NULL)
887 {
888 fprintf (ira_dump_file, ":");
889 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
890 }
891 fprintf (ira_dump_file, ")");
892}
893
894/* Create and return the cap representing allocno A in the
895 parent loop. */
896static ira_allocno_t
897create_cap_allocno (ira_allocno_t a)
898{
899 ira_allocno_t cap;
900 ira_loop_tree_node_t parent;
1756cb66 901 enum reg_class aclass;
058e97ec 902
058e97ec
VM
903 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
904 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
905 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
d1bb282e 906 ALLOCNO_WMODE (cap) = ALLOCNO_WMODE (a);
1756cb66
VM
907 aclass = ALLOCNO_CLASS (a);
908 ira_set_allocno_class (cap, aclass);
ac0ab4f7 909 ira_create_allocno_objects (cap);
058e97ec 910 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec 911 ALLOCNO_CAP (a) = cap;
1756cb66 912 ALLOCNO_CLASS_COST (cap) = ALLOCNO_CLASS_COST (a);
058e97ec 913 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
058e97ec 914 ira_allocate_and_copy_costs
1756cb66 915 (&ALLOCNO_HARD_REG_COSTS (cap), aclass, ALLOCNO_HARD_REG_COSTS (a));
058e97ec 916 ira_allocate_and_copy_costs
1756cb66 917 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), aclass,
058e97ec 918 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
927425df 919 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
058e97ec
VM
920 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
921 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
922 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
ac0ab4f7 923
3c55880a 924 merge_hard_reg_conflicts (a, cap, false);
ac0ab4f7 925
058e97ec 926 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5 927 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
c2ba7e7a
RO
928 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (cap),
929 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
930 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
931 {
932 fprintf (ira_dump_file, " Creating cap ");
933 ira_print_expanded_allocno (cap);
934 fprintf (ira_dump_file, "\n");
935 }
936 return cap;
937}
938
ac0ab4f7 939/* Create and return a live range for OBJECT with given attributes. */
b14151b5 940live_range_t
9140d27b
BS
941ira_create_live_range (ira_object_t obj, int start, int finish,
942 live_range_t next)
058e97ec 943{
b14151b5 944 live_range_t p;
058e97ec 945
b14151b5 946 p = (live_range_t) pool_alloc (live_range_pool);
9140d27b 947 p->object = obj;
058e97ec
VM
948 p->start = start;
949 p->finish = finish;
950 p->next = next;
951 return p;
952}
953
ac0ab4f7
BS
954/* Create a new live range for OBJECT and queue it at the head of its
955 live range list. */
956void
957ira_add_live_range_to_object (ira_object_t object, int start, int finish)
958{
959 live_range_t p;
960 p = ira_create_live_range (object, start, finish,
961 OBJECT_LIVE_RANGES (object));
962 OBJECT_LIVE_RANGES (object) = p;
963}
964
058e97ec 965/* Copy allocno live range R and return the result. */
b14151b5 966static live_range_t
9140d27b 967copy_live_range (live_range_t r)
058e97ec 968{
b14151b5 969 live_range_t p;
058e97ec 970
b14151b5 971 p = (live_range_t) pool_alloc (live_range_pool);
058e97ec
VM
972 *p = *r;
973 return p;
974}
975
976/* Copy allocno live range list given by its head R and return the
977 result. */
b14151b5 978live_range_t
9140d27b 979ira_copy_live_range_list (live_range_t r)
058e97ec 980{
b14151b5 981 live_range_t p, first, last;
058e97ec
VM
982
983 if (r == NULL)
984 return NULL;
985 for (first = last = NULL; r != NULL; r = r->next)
986 {
9140d27b 987 p = copy_live_range (r);
058e97ec
VM
988 if (first == NULL)
989 first = p;
990 else
991 last->next = p;
992 last = p;
993 }
994 return first;
995}
996
3553f0bb
VM
997/* Merge ranges R1 and R2 and returns the result. The function
998 maintains the order of ranges and tries to minimize number of the
999 result ranges. */
b14151b5 1000live_range_t
9140d27b 1001ira_merge_live_ranges (live_range_t r1, live_range_t r2)
3553f0bb 1002{
b14151b5 1003 live_range_t first, last, temp;
3553f0bb
VM
1004
1005 if (r1 == NULL)
1006 return r2;
1007 if (r2 == NULL)
1008 return r1;
1009 for (first = last = NULL; r1 != NULL && r2 != NULL;)
1010 {
1011 if (r1->start < r2->start)
1012 {
1013 temp = r1;
1014 r1 = r2;
1015 r2 = temp;
1016 }
1017 if (r1->start <= r2->finish + 1)
1018 {
1019 /* Intersected ranges: merge r1 and r2 into r1. */
1020 r1->start = r2->start;
1021 if (r1->finish < r2->finish)
1022 r1->finish = r2->finish;
1023 temp = r2;
1024 r2 = r2->next;
9140d27b 1025 ira_finish_live_range (temp);
3553f0bb
VM
1026 if (r2 == NULL)
1027 {
1028 /* To try to merge with subsequent ranges in r1. */
1029 r2 = r1->next;
1030 r1->next = NULL;
1031 }
1032 }
1033 else
1034 {
1035 /* Add r1 to the result. */
1036 if (first == NULL)
1037 first = last = r1;
1038 else
1039 {
1040 last->next = r1;
1041 last = r1;
1042 }
1043 r1 = r1->next;
1044 if (r1 == NULL)
1045 {
1046 /* To try to merge with subsequent ranges in r2. */
1047 r1 = r2->next;
1048 r2->next = NULL;
1049 }
1050 }
1051 }
1052 if (r1 != NULL)
1053 {
1054 if (first == NULL)
1055 first = r1;
1056 else
1057 last->next = r1;
1058 ira_assert (r1->next == NULL);
1059 }
1060 else if (r2 != NULL)
1061 {
1062 if (first == NULL)
1063 first = r2;
1064 else
1065 last->next = r2;
1066 ira_assert (r2->next == NULL);
1067 }
1068 else
1069 {
1070 ira_assert (last->next == NULL);
1071 }
1072 return first;
1073}
1074
1075/* Return TRUE if live ranges R1 and R2 intersect. */
1076bool
9140d27b 1077ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
3553f0bb
VM
1078{
1079 /* Remember the live ranges are always kept ordered. */
1080 while (r1 != NULL && r2 != NULL)
1081 {
1082 if (r1->start > r2->finish)
1083 r1 = r1->next;
1084 else if (r2->start > r1->finish)
1085 r2 = r2->next;
1086 else
1087 return true;
1088 }
1089 return false;
1090}
1091
058e97ec
VM
1092/* Free allocno live range R. */
1093void
9140d27b 1094ira_finish_live_range (live_range_t r)
058e97ec 1095{
b14151b5 1096 pool_free (live_range_pool, r);
058e97ec
VM
1097}
1098
3553f0bb
VM
1099/* Free list of allocno live ranges starting with R. */
1100void
9140d27b 1101ira_finish_live_range_list (live_range_t r)
3553f0bb 1102{
b14151b5 1103 live_range_t next_r;
3553f0bb
VM
1104
1105 for (; r != NULL; r = next_r)
1106 {
1107 next_r = r->next;
9140d27b 1108 ira_finish_live_range (r);
3553f0bb
VM
1109 }
1110}
1111
058e97ec
VM
1112/* Free updated register costs of allocno A. */
1113void
1114ira_free_allocno_updated_costs (ira_allocno_t a)
1115{
1756cb66 1116 enum reg_class aclass;
058e97ec 1117
1756cb66 1118 aclass = ALLOCNO_CLASS (a);
058e97ec 1119 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1120 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1121 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1122 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1123 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66 1124 aclass);
058e97ec
VM
1125 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1126}
1127
1756cb66
VM
1128/* Free and nullify all cost vectors allocated earlier for allocno
1129 A. */
058e97ec 1130static void
1756cb66 1131ira_free_allocno_costs (ira_allocno_t a)
058e97ec 1132{
1756cb66 1133 enum reg_class aclass = ALLOCNO_CLASS (a);
ac0ab4f7
BS
1134 ira_object_t obj;
1135 ira_allocno_object_iterator oi;
058e97ec 1136
ac0ab4f7
BS
1137 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1138 {
1139 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1140 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1141 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1142 ira_free (OBJECT_CONFLICT_ARRAY (obj));
1143 pool_free (object_pool, obj);
1144 }
9140d27b 1145
058e97ec 1146 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
058e97ec 1147 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1756cb66 1148 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), aclass);
058e97ec 1149 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1756cb66 1150 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), aclass);
058e97ec 1151 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1152 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1153 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1154 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66
VM
1155 aclass);
1156 ALLOCNO_HARD_REG_COSTS (a) = NULL;
1157 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
1158 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1159 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1160}
1161
1162/* Free the memory allocated for allocno A. */
1163static void
1164finish_allocno (ira_allocno_t a)
1165{
1166 ira_free_allocno_costs (a);
058e97ec
VM
1167 pool_free (allocno_pool, a);
1168}
1169
1170/* Free the memory allocated for all allocnos. */
1171static void
1172finish_allocnos (void)
1173{
1174 ira_allocno_t a;
1175 ira_allocno_iterator ai;
1176
1177 FOR_EACH_ALLOCNO (a, ai)
1178 finish_allocno (a);
1179 ira_free (ira_regno_allocno_map);
9771b263
DN
1180 ira_object_id_map_vec.release ();
1181 allocno_vec.release ();
058e97ec 1182 free_alloc_pool (allocno_pool);
a49ae217 1183 free_alloc_pool (object_pool);
b14151b5 1184 free_alloc_pool (live_range_pool);
058e97ec
VM
1185}
1186
1187\f
1188
3b6d1699
VM
1189/* Pools for allocno preferences. */
1190static alloc_pool pref_pool;
1191
1192/* Vec containing references to all created preferences. It is a
1193 container of array ira_prefs. */
1194static vec<ira_pref_t> pref_vec;
1195
1196/* The function initializes data concerning allocno prefs. */
1197static void
1198initiate_prefs (void)
1199{
1200 pref_pool
1201 = create_alloc_pool ("prefs", sizeof (struct ira_allocno_pref), 100);
1202 pref_vec.create (get_max_uid ());
1203 ira_prefs = NULL;
1204 ira_prefs_num = 0;
1205}
1206
1207/* Return pref for A and HARD_REGNO if any. */
1208static ira_pref_t
1209find_allocno_pref (ira_allocno_t a, int hard_regno)
1210{
1211 ira_pref_t pref;
1212
1213 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1214 if (pref->allocno == a && pref->hard_regno == hard_regno)
1215 return pref;
1216 return NULL;
1217}
1218
1219/* Create and return pref with given attributes A, HARD_REGNO, and FREQ. */
1220ira_pref_t
1221ira_create_pref (ira_allocno_t a, int hard_regno, int freq)
1222{
1223 ira_pref_t pref;
1224
1225 pref = (ira_pref_t) pool_alloc (pref_pool);
1226 pref->num = ira_prefs_num;
1227 pref->allocno = a;
1228 pref->hard_regno = hard_regno;
1229 pref->freq = freq;
1230 pref_vec.safe_push (pref);
1231 ira_prefs = pref_vec.address ();
1232 ira_prefs_num = pref_vec.length ();
1233 return pref;
1234}
1235
df3e3493 1236/* Attach a pref PREF to the corresponding allocno. */
3b6d1699
VM
1237static void
1238add_allocno_pref_to_list (ira_pref_t pref)
1239{
1240 ira_allocno_t a = pref->allocno;
1241
1242 pref->next_pref = ALLOCNO_PREFS (a);
1243 ALLOCNO_PREFS (a) = pref;
1244}
1245
1246/* Create (or update frequency if the pref already exists) the pref of
1247 allocnos A preferring HARD_REGNO with frequency FREQ. */
1248void
1249ira_add_allocno_pref (ira_allocno_t a, int hard_regno, int freq)
1250{
1251 ira_pref_t pref;
1252
1253 if (freq <= 0)
1254 return;
1255 if ((pref = find_allocno_pref (a, hard_regno)) != NULL)
1256 {
1257 pref->freq += freq;
1258 return;
1259 }
1260 pref = ira_create_pref (a, hard_regno, freq);
1261 ira_assert (a != NULL);
1262 add_allocno_pref_to_list (pref);
1263}
1264
1265/* Print info about PREF into file F. */
1266static void
1267print_pref (FILE *f, ira_pref_t pref)
1268{
1269 fprintf (f, " pref%d:a%d(r%d)<-hr%d@%d\n", pref->num,
1270 ALLOCNO_NUM (pref->allocno), ALLOCNO_REGNO (pref->allocno),
1271 pref->hard_regno, pref->freq);
1272}
1273
1274/* Print info about PREF into stderr. */
1275void
1276ira_debug_pref (ira_pref_t pref)
1277{
1278 print_pref (stderr, pref);
1279}
1280
1281/* Print info about all prefs into file F. */
1282static void
1283print_prefs (FILE *f)
1284{
1285 ira_pref_t pref;
1286 ira_pref_iterator pi;
1287
1288 FOR_EACH_PREF (pref, pi)
1289 print_pref (f, pref);
1290}
1291
1292/* Print info about all prefs into stderr. */
1293void
1294ira_debug_prefs (void)
1295{
1296 print_prefs (stderr);
1297}
1298
1299/* Print info about prefs involving allocno A into file F. */
1300static void
1301print_allocno_prefs (FILE *f, ira_allocno_t a)
1302{
1303 ira_pref_t pref;
1304
1305 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1306 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1307 fprintf (f, " pref%d:hr%d@%d", pref->num, pref->hard_regno, pref->freq);
1308 fprintf (f, "\n");
1309}
1310
1311/* Print info about prefs involving allocno A into stderr. */
1312void
1313ira_debug_allocno_prefs (ira_allocno_t a)
1314{
1315 print_allocno_prefs (stderr, a);
1316}
1317
1318/* The function frees memory allocated for PREF. */
1319static void
1320finish_pref (ira_pref_t pref)
1321{
1322 ira_prefs[pref->num] = NULL;
1323 pool_free (pref_pool, pref);
1324}
1325
1326/* Remove PREF from the list of allocno prefs and free memory for
1327 it. */
1328void
1329ira_remove_pref (ira_pref_t pref)
1330{
1331 ira_pref_t cpref, prev;
1332
1333 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1334 fprintf (ira_dump_file, " Removing pref%d:hr%d@%d\n",
1335 pref->num, pref->hard_regno, pref->freq);
1336 for (prev = NULL, cpref = ALLOCNO_PREFS (pref->allocno);
1337 cpref != NULL;
1338 prev = cpref, cpref = cpref->next_pref)
1339 if (cpref == pref)
1340 break;
1341 ira_assert (cpref != NULL);
1342 if (prev == NULL)
1343 ALLOCNO_PREFS (pref->allocno) = pref->next_pref;
1344 else
1345 prev->next_pref = pref->next_pref;
1346 finish_pref (pref);
1347}
1348
1349/* Remove all prefs of allocno A. */
1350void
1351ira_remove_allocno_prefs (ira_allocno_t a)
1352{
1353 ira_pref_t pref, next_pref;
1354
1355 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = next_pref)
1356 {
1357 next_pref = pref->next_pref;
1358 finish_pref (pref);
1359 }
1360 ALLOCNO_PREFS (a) = NULL;
1361}
1362
1363/* Free memory allocated for all prefs. */
1364static void
1365finish_prefs (void)
1366{
1367 ira_pref_t pref;
1368 ira_pref_iterator pi;
1369
1370 FOR_EACH_PREF (pref, pi)
1371 finish_pref (pref);
1372 pref_vec.release ();
1373 free_alloc_pool (pref_pool);
1374}
1375
1376\f
1377
058e97ec
VM
1378/* Pools for copies. */
1379static alloc_pool copy_pool;
1380
1381/* Vec containing references to all created copies. It is a
1382 container of array ira_copies. */
9771b263 1383static vec<ira_copy_t> copy_vec;
058e97ec
VM
1384
1385/* The function initializes data concerning allocno copies. */
1386static void
1387initiate_copies (void)
1388{
1389 copy_pool
1390 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
9771b263 1391 copy_vec.create (get_max_uid ());
058e97ec
VM
1392 ira_copies = NULL;
1393 ira_copies_num = 0;
1394}
1395
1396/* Return copy connecting A1 and A2 and originated from INSN of
1397 LOOP_TREE_NODE if any. */
1398static ira_copy_t
070a1983 1399find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx_insn *insn,
058e97ec
VM
1400 ira_loop_tree_node_t loop_tree_node)
1401{
1402 ira_copy_t cp, next_cp;
1403 ira_allocno_t another_a;
1404
1405 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1406 {
1407 if (cp->first == a1)
1408 {
1409 next_cp = cp->next_first_allocno_copy;
1410 another_a = cp->second;
1411 }
1412 else if (cp->second == a1)
1413 {
1414 next_cp = cp->next_second_allocno_copy;
1415 another_a = cp->first;
1416 }
1417 else
1418 gcc_unreachable ();
1419 if (another_a == a2 && cp->insn == insn
1420 && cp->loop_tree_node == loop_tree_node)
1421 return cp;
1422 }
1423 return NULL;
1424}
1425
1426/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
548a6322 1427 SECOND, FREQ, CONSTRAINT_P, and INSN. */
058e97ec 1428ira_copy_t
548a6322 1429ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1430 bool constraint_p, rtx_insn *insn,
058e97ec
VM
1431 ira_loop_tree_node_t loop_tree_node)
1432{
1433 ira_copy_t cp;
1434
1435 cp = (ira_copy_t) pool_alloc (copy_pool);
1436 cp->num = ira_copies_num;
1437 cp->first = first;
1438 cp->second = second;
1439 cp->freq = freq;
548a6322 1440 cp->constraint_p = constraint_p;
058e97ec
VM
1441 cp->insn = insn;
1442 cp->loop_tree_node = loop_tree_node;
9771b263
DN
1443 copy_vec.safe_push (cp);
1444 ira_copies = copy_vec.address ();
1445 ira_copies_num = copy_vec.length ();
058e97ec
VM
1446 return cp;
1447}
1448
1449/* Attach a copy CP to allocnos involved into the copy. */
3b6d1699
VM
1450static void
1451add_allocno_copy_to_list (ira_copy_t cp)
058e97ec
VM
1452{
1453 ira_allocno_t first = cp->first, second = cp->second;
1454
1455 cp->prev_first_allocno_copy = NULL;
1456 cp->prev_second_allocno_copy = NULL;
1457 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1458 if (cp->next_first_allocno_copy != NULL)
1459 {
1460 if (cp->next_first_allocno_copy->first == first)
1461 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1462 else
1463 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1464 }
1465 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1466 if (cp->next_second_allocno_copy != NULL)
1467 {
1468 if (cp->next_second_allocno_copy->second == second)
1469 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1470 else
1471 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1472 }
1473 ALLOCNO_COPIES (first) = cp;
1474 ALLOCNO_COPIES (second) = cp;
1475}
1476
058e97ec
VM
1477/* Make a copy CP a canonical copy where number of the
1478 first allocno is less than the second one. */
3b6d1699
VM
1479static void
1480swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
058e97ec
VM
1481{
1482 ira_allocno_t temp;
1483 ira_copy_t temp_cp;
1484
1485 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1486 return;
1487
1488 temp = cp->first;
1489 cp->first = cp->second;
1490 cp->second = temp;
1491
1492 temp_cp = cp->prev_first_allocno_copy;
1493 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1494 cp->prev_second_allocno_copy = temp_cp;
1495
1496 temp_cp = cp->next_first_allocno_copy;
1497 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1498 cp->next_second_allocno_copy = temp_cp;
1499}
1500
1501/* Create (or update frequency if the copy already exists) and return
1502 the copy of allocnos FIRST and SECOND with frequency FREQ
1503 corresponding to move insn INSN (if any) and originated from
1504 LOOP_TREE_NODE. */
1505ira_copy_t
1506ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1507 bool constraint_p, rtx_insn *insn,
548a6322 1508 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
1509{
1510 ira_copy_t cp;
1511
1512 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1513 {
1514 cp->freq += freq;
1515 return cp;
1516 }
548a6322
VM
1517 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1518 loop_tree_node);
058e97ec 1519 ira_assert (first != NULL && second != NULL);
3b6d1699
VM
1520 add_allocno_copy_to_list (cp);
1521 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
1522 return cp;
1523}
1524
4cda38d5
VM
1525/* Print info about copy CP into file F. */
1526static void
1527print_copy (FILE *f, ira_copy_t cp)
1528{
548a6322 1529 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
4cda38d5 1530 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
548a6322
VM
1531 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1532 cp->insn != NULL
1533 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
4cda38d5
VM
1534}
1535
7b3b6ae4
LC
1536DEBUG_FUNCTION void
1537debug (ira_allocno_copy &ref)
1538{
1539 print_copy (stderr, &ref);
1540}
1541
1542DEBUG_FUNCTION void
1543debug (ira_allocno_copy *ptr)
1544{
1545 if (ptr)
1546 debug (*ptr);
1547 else
1548 fprintf (stderr, "<nil>\n");
1549}
1550
4cda38d5
VM
1551/* Print info about copy CP into stderr. */
1552void
1553ira_debug_copy (ira_copy_t cp)
1554{
1555 print_copy (stderr, cp);
1556}
1557
1558/* Print info about all copies into file F. */
1559static void
1560print_copies (FILE *f)
1561{
1562 ira_copy_t cp;
1563 ira_copy_iterator ci;
1564
1565 FOR_EACH_COPY (cp, ci)
1566 print_copy (f, cp);
1567}
1568
1569/* Print info about all copies into stderr. */
1570void
1571ira_debug_copies (void)
1572{
1573 print_copies (stderr);
1574}
1575
058e97ec
VM
1576/* Print info about copies involving allocno A into file F. */
1577static void
1578print_allocno_copies (FILE *f, ira_allocno_t a)
1579{
1580 ira_allocno_t another_a;
1581 ira_copy_t cp, next_cp;
1582
1583 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1584 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1585 {
1586 if (cp->first == a)
1587 {
1588 next_cp = cp->next_first_allocno_copy;
1589 another_a = cp->second;
1590 }
1591 else if (cp->second == a)
1592 {
1593 next_cp = cp->next_second_allocno_copy;
1594 another_a = cp->first;
1595 }
1596 else
1597 gcc_unreachable ();
1598 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1599 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1600 }
1601 fprintf (f, "\n");
1602}
1603
7b3b6ae4
LC
1604DEBUG_FUNCTION void
1605debug (ira_allocno &ref)
1606{
1607 print_allocno_copies (stderr, &ref);
1608}
1609
1610DEBUG_FUNCTION void
1611debug (ira_allocno *ptr)
1612{
1613 if (ptr)
1614 debug (*ptr);
1615 else
1616 fprintf (stderr, "<nil>\n");
1617}
1618
1619
058e97ec
VM
1620/* Print info about copies involving allocno A into stderr. */
1621void
1622ira_debug_allocno_copies (ira_allocno_t a)
1623{
1624 print_allocno_copies (stderr, a);
1625}
1626
1627/* The function frees memory allocated for copy CP. */
1628static void
1629finish_copy (ira_copy_t cp)
1630{
1631 pool_free (copy_pool, cp);
1632}
1633
1634
1635/* Free memory allocated for all copies. */
1636static void
1637finish_copies (void)
1638{
1639 ira_copy_t cp;
1640 ira_copy_iterator ci;
1641
1642 FOR_EACH_COPY (cp, ci)
1643 finish_copy (cp);
9771b263 1644 copy_vec.release ();
058e97ec
VM
1645 free_alloc_pool (copy_pool);
1646}
1647
1648\f
1649
1756cb66 1650/* Pools for cost vectors. It is defined only for allocno classes. */
058e97ec
VM
1651static alloc_pool cost_vector_pool[N_REG_CLASSES];
1652
1653/* The function initiates work with hard register cost vectors. It
1756cb66 1654 creates allocation pool for each allocno class. */
058e97ec
VM
1655static void
1656initiate_cost_vectors (void)
1657{
1658 int i;
1756cb66 1659 enum reg_class aclass;
058e97ec 1660
1756cb66 1661 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1662 {
1756cb66
VM
1663 aclass = ira_allocno_classes[i];
1664 cost_vector_pool[aclass]
058e97ec 1665 = create_alloc_pool ("cost vectors",
1756cb66 1666 sizeof (int) * ira_class_hard_regs_num[aclass],
058e97ec
VM
1667 100);
1668 }
1669}
1670
1756cb66 1671/* Allocate and return a cost vector VEC for ACLASS. */
058e97ec 1672int *
6f76a878 1673ira_allocate_cost_vector (reg_class_t aclass)
058e97ec 1674{
6f76a878 1675 return (int *) pool_alloc (cost_vector_pool[(int) aclass]);
058e97ec
VM
1676}
1677
1756cb66 1678/* Free a cost vector VEC for ACLASS. */
058e97ec 1679void
6f76a878 1680ira_free_cost_vector (int *vec, reg_class_t aclass)
058e97ec
VM
1681{
1682 ira_assert (vec != NULL);
6f76a878 1683 pool_free (cost_vector_pool[(int) aclass], vec);
058e97ec
VM
1684}
1685
1686/* Finish work with hard register cost vectors. Release allocation
1756cb66 1687 pool for each allocno class. */
058e97ec
VM
1688static void
1689finish_cost_vectors (void)
1690{
1691 int i;
1756cb66 1692 enum reg_class aclass;
058e97ec 1693
1756cb66 1694 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1695 {
1756cb66
VM
1696 aclass = ira_allocno_classes[i];
1697 free_alloc_pool (cost_vector_pool[aclass]);
058e97ec
VM
1698 }
1699}
1700
1701\f
1702
e6a7da82
SB
1703/* Compute a post-ordering of the reverse control flow of the loop body
1704 designated by the children nodes of LOOP_NODE, whose body nodes in
1705 pre-order are input as LOOP_PREORDER. Return a VEC with a post-order
1706 of the reverse loop body.
1707
1708 For the post-order of the reverse CFG, we visit the basic blocks in
1709 LOOP_PREORDER array in the reverse order of where they appear.
1710 This is important: We do not just want to compute a post-order of
1711 the reverse CFG, we want to make a best-guess for a visiting order that
1712 minimizes the number of chain elements per allocno live range. If the
1713 blocks would be visited in a different order, we would still compute a
1714 correct post-ordering but it would be less likely that two nodes
1715 connected by an edge in the CFG are neighbours in the topsort. */
1716
9771b263 1717static vec<ira_loop_tree_node_t>
e6a7da82 1718ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
9771b263 1719 vec<ira_loop_tree_node_t> loop_preorder)
e6a7da82 1720{
6e1aa848 1721 vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
e6a7da82
SB
1722 unsigned int n_loop_preorder;
1723
9771b263 1724 n_loop_preorder = loop_preorder.length ();
e6a7da82
SB
1725 if (n_loop_preorder != 0)
1726 {
1727 ira_loop_tree_node_t subloop_node;
1728 unsigned int i;
ef062b13 1729 auto_vec<ira_loop_tree_node_t> dfs_stack;
e6a7da82
SB
1730
1731 /* This is a bit of strange abuse of the BB_VISITED flag: We use
1732 the flag to mark blocks we still have to visit to add them to
1733 our post-order. Define an alias to avoid confusion. */
1734#define BB_TO_VISIT BB_VISITED
1735
9771b263 1736 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1737 {
1738 gcc_checking_assert (! (subloop_node->bb->flags & BB_TO_VISIT));
1739 subloop_node->bb->flags |= BB_TO_VISIT;
1740 }
1741
9771b263
DN
1742 topsort_nodes.create (n_loop_preorder);
1743 dfs_stack.create (n_loop_preorder);
e6a7da82 1744
9771b263 1745 FOR_EACH_VEC_ELT_REVERSE (loop_preorder, i, subloop_node)
e6a7da82
SB
1746 {
1747 if (! (subloop_node->bb->flags & BB_TO_VISIT))
1748 continue;
1749
1750 subloop_node->bb->flags &= ~BB_TO_VISIT;
9771b263
DN
1751 dfs_stack.quick_push (subloop_node);
1752 while (! dfs_stack.is_empty ())
e6a7da82
SB
1753 {
1754 edge e;
1755 edge_iterator ei;
1756
9771b263 1757 ira_loop_tree_node_t n = dfs_stack.last ();
e6a7da82
SB
1758 FOR_EACH_EDGE (e, ei, n->bb->preds)
1759 {
1760 ira_loop_tree_node_t pred_node;
1761 basic_block pred_bb = e->src;
1762
fefa31b5 1763 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
e6a7da82
SB
1764 continue;
1765
1766 pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
1767 if (pred_node != n
1768 && (pred_node->bb->flags & BB_TO_VISIT))
1769 {
1770 pred_node->bb->flags &= ~BB_TO_VISIT;
9771b263 1771 dfs_stack.quick_push (pred_node);
e6a7da82
SB
1772 }
1773 }
9771b263 1774 if (n == dfs_stack.last ())
e6a7da82 1775 {
9771b263
DN
1776 dfs_stack.pop ();
1777 topsort_nodes.quick_push (n);
e6a7da82
SB
1778 }
1779 }
1780 }
1781
1782#undef BB_TO_VISIT
e6a7da82
SB
1783 }
1784
9771b263 1785 gcc_assert (topsort_nodes.length () == n_loop_preorder);
e6a7da82
SB
1786 return topsort_nodes;
1787}
1788
058e97ec
VM
1789/* The current loop tree node and its regno allocno map. */
1790ira_loop_tree_node_t ira_curr_loop_tree_node;
1791ira_allocno_t *ira_curr_regno_allocno_map;
1792
1793/* This recursive function traverses loop tree with root LOOP_NODE
1794 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1795 correspondingly in preorder and postorder. The function sets up
1796 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1797 basic block nodes of LOOP_NODE is also processed (before its
e6a7da82
SB
1798 subloop nodes).
1799
1800 If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
1801 the loop are passed in the *reverse* post-order of the *reverse*
1802 CFG. This is only used by ira_create_allocno_live_ranges, which
1803 wants to visit basic blocks in this order to minimize the number
1804 of elements per live range chain.
1805 Note that the loop tree nodes are still visited in the normal,
1806 forward post-order of the loop tree. */
1807
058e97ec
VM
1808void
1809ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1810 void (*preorder_func) (ira_loop_tree_node_t),
1811 void (*postorder_func) (ira_loop_tree_node_t))
1812{
1813 ira_loop_tree_node_t subloop_node;
1814
1815 ira_assert (loop_node->bb == NULL);
1816 ira_curr_loop_tree_node = loop_node;
1817 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1818
1819 if (preorder_func != NULL)
1820 (*preorder_func) (loop_node);
b8698a0f 1821
058e97ec 1822 if (bb_p)
e6a7da82 1823 {
ef062b13 1824 auto_vec<ira_loop_tree_node_t> loop_preorder;
e6a7da82
SB
1825 unsigned int i;
1826
1827 /* Add all nodes to the set of nodes to visit. The IRA loop tree
1828 is set up such that nodes in the loop body appear in a pre-order
1829 of their place in the CFG. */
1830 for (subloop_node = loop_node->children;
1831 subloop_node != NULL;
1832 subloop_node = subloop_node->next)
1833 if (subloop_node->bb != NULL)
9771b263 1834 loop_preorder.safe_push (subloop_node);
e6a7da82
SB
1835
1836 if (preorder_func != NULL)
9771b263 1837 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1838 (*preorder_func) (subloop_node);
1839
1840 if (postorder_func != NULL)
058e97ec 1841 {
9771b263 1842 vec<ira_loop_tree_node_t> loop_rev_postorder =
e6a7da82 1843 ira_loop_tree_body_rev_postorder (loop_node, loop_preorder);
9771b263 1844 FOR_EACH_VEC_ELT_REVERSE (loop_rev_postorder, i, subloop_node)
058e97ec 1845 (*postorder_func) (subloop_node);
9771b263 1846 loop_rev_postorder.release ();
058e97ec 1847 }
e6a7da82
SB
1848 }
1849
058e97ec
VM
1850 for (subloop_node = loop_node->subloops;
1851 subloop_node != NULL;
1852 subloop_node = subloop_node->subloop_next)
1853 {
1854 ira_assert (subloop_node->bb == NULL);
1855 ira_traverse_loop_tree (bb_p, subloop_node,
1856 preorder_func, postorder_func);
1857 }
1858
1859 ira_curr_loop_tree_node = loop_node;
1860 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1861
1862 if (postorder_func != NULL)
1863 (*postorder_func) (loop_node);
1864}
1865
1866\f
1867
1868/* The basic block currently being processed. */
1869static basic_block curr_bb;
1870
1871/* This recursive function creates allocnos corresponding to
1872 pseudo-registers containing in X. True OUTPUT_P means that X is
d1bb282e 1873 an lvalue. PARENT corresponds to the parent expression of X. */
058e97ec 1874static void
d1bb282e 1875create_insn_allocnos (rtx x, rtx outer, bool output_p)
058e97ec
VM
1876{
1877 int i, j;
1878 const char *fmt;
1879 enum rtx_code code = GET_CODE (x);
1880
1881 if (code == REG)
1882 {
1883 int regno;
1884
1885 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1886 {
1887 ira_allocno_t a;
1888
1889 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
d1bb282e
DS
1890 {
1891 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1892 if (outer != NULL && GET_CODE (outer) == SUBREG)
1893 {
ef4bddc2 1894 machine_mode wmode = GET_MODE (outer);
d1bb282e
DS
1895 if (GET_MODE_SIZE (wmode) > GET_MODE_SIZE (ALLOCNO_WMODE (a)))
1896 ALLOCNO_WMODE (a) = wmode;
1897 }
1898 }
b8698a0f 1899
058e97ec
VM
1900 ALLOCNO_NREFS (a)++;
1901 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1902 if (output_p)
1903 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1904 }
1905 return;
1906 }
1907 else if (code == SET)
1908 {
d1bb282e
DS
1909 create_insn_allocnos (SET_DEST (x), NULL, true);
1910 create_insn_allocnos (SET_SRC (x), NULL, false);
058e97ec
VM
1911 return;
1912 }
1913 else if (code == CLOBBER)
1914 {
d1bb282e 1915 create_insn_allocnos (XEXP (x, 0), NULL, true);
058e97ec
VM
1916 return;
1917 }
1918 else if (code == MEM)
1919 {
d1bb282e 1920 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1921 return;
1922 }
b8698a0f 1923 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
058e97ec
VM
1924 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1925 {
d1bb282e
DS
1926 create_insn_allocnos (XEXP (x, 0), NULL, true);
1927 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1928 return;
1929 }
1930
1931 fmt = GET_RTX_FORMAT (code);
1932 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1933 {
1934 if (fmt[i] == 'e')
d1bb282e 1935 create_insn_allocnos (XEXP (x, i), x, output_p);
058e97ec
VM
1936 else if (fmt[i] == 'E')
1937 for (j = 0; j < XVECLEN (x, i); j++)
d1bb282e 1938 create_insn_allocnos (XVECEXP (x, i, j), x, output_p);
058e97ec
VM
1939 }
1940}
1941
1942/* Create allocnos corresponding to pseudo-registers living in the
1943 basic block represented by the corresponding loop tree node
1944 BB_NODE. */
1945static void
1946create_bb_allocnos (ira_loop_tree_node_t bb_node)
1947{
1948 basic_block bb;
070a1983 1949 rtx_insn *insn;
058e97ec
VM
1950 unsigned int i;
1951 bitmap_iterator bi;
1952
1953 curr_bb = bb = bb_node->bb;
1954 ira_assert (bb != NULL);
acb37d29 1955 FOR_BB_INSNS_REVERSE (bb, insn)
b5b8b0ac 1956 if (NONDEBUG_INSN_P (insn))
d1bb282e 1957 create_insn_allocnos (PATTERN (insn), NULL, false);
058e97ec
VM
1958 /* It might be a allocno living through from one subloop to
1959 another. */
bf744527 1960 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb), FIRST_PSEUDO_REGISTER, i, bi)
058e97ec
VM
1961 if (ira_curr_regno_allocno_map[i] == NULL)
1962 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1963}
1964
1965/* Create allocnos corresponding to pseudo-registers living on edge E
1966 (a loop entry or exit). Also mark the allocnos as living on the
1967 loop border. */
1968static void
1969create_loop_allocnos (edge e)
1970{
1971 unsigned int i;
1972 bitmap live_in_regs, border_allocnos;
1973 bitmap_iterator bi;
1974 ira_loop_tree_node_t parent;
1975
bf744527 1976 live_in_regs = df_get_live_in (e->dest);
058e97ec 1977 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
bf744527 1978 EXECUTE_IF_SET_IN_REG_SET (df_get_live_out (e->src),
058e97ec
VM
1979 FIRST_PSEUDO_REGISTER, i, bi)
1980 if (bitmap_bit_p (live_in_regs, i))
1981 {
1982 if (ira_curr_regno_allocno_map[i] == NULL)
1983 {
1984 /* The order of creations is important for right
1985 ira_regno_allocno_map. */
1986 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1987 && parent->regno_allocno_map[i] == NULL)
1988 ira_create_allocno (i, false, parent);
1989 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1990 }
1991 bitmap_set_bit (border_allocnos,
1992 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1993 }
1994}
1995
1996/* Create allocnos corresponding to pseudo-registers living in loop
1997 represented by the corresponding loop tree node LOOP_NODE. This
1998 function is called by ira_traverse_loop_tree. */
1999static void
2000create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
2001{
2002 if (loop_node->bb != NULL)
2003 create_bb_allocnos (loop_node);
2004 else if (loop_node != ira_loop_tree_root)
2005 {
2006 int i;
2007 edge_iterator ei;
2008 edge e;
9771b263 2009 vec<edge> edges;
058e97ec 2010
2608d841 2011 ira_assert (current_loops != NULL);
058e97ec
VM
2012 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
2013 if (e->src != loop_node->loop->latch)
2014 create_loop_allocnos (e);
b8698a0f 2015
058e97ec 2016 edges = get_loop_exit_edges (loop_node->loop);
9771b263 2017 FOR_EACH_VEC_ELT (edges, i, e)
058e97ec 2018 create_loop_allocnos (e);
9771b263 2019 edges.release ();
058e97ec
VM
2020 }
2021}
2022
2023/* Propagate information about allocnos modified inside the loop given
2024 by its LOOP_TREE_NODE to its parent. */
2025static void
2026propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
2027{
2028 if (loop_tree_node == ira_loop_tree_root)
2029 return;
2030 ira_assert (loop_tree_node->bb == NULL);
2031 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
2032 loop_tree_node->modified_regnos);
2033}
2034
2035/* Propagate new info about allocno A (see comments about accumulated
2036 info in allocno definition) to the corresponding allocno on upper
2037 loop tree level. So allocnos on upper levels accumulate
2038 information about the corresponding allocnos in nested regions.
2039 The new info means allocno info finally calculated in this
2040 file. */
2041static void
2042propagate_allocno_info (void)
2043{
2044 int i;
2045 ira_allocno_t a, parent_a;
2046 ira_loop_tree_node_t parent;
1756cb66 2047 enum reg_class aclass;
058e97ec 2048
7db7ed3c
VM
2049 if (flag_ira_region != IRA_REGION_ALL
2050 && flag_ira_region != IRA_REGION_MIXED)
058e97ec
VM
2051 return;
2052 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2053 for (a = ira_regno_allocno_map[i];
2054 a != NULL;
2055 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2056 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
2057 && (parent_a = parent->regno_allocno_map[i]) != NULL
2058 /* There are no caps yet at this point. So use
2059 border_allocnos to find allocnos for the propagation. */
2060 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
2061 ALLOCNO_NUM (a)))
2062 {
927425df
VM
2063 if (! ALLOCNO_BAD_SPILL_P (a))
2064 ALLOCNO_BAD_SPILL_P (parent_a) = false;
058e97ec
VM
2065 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
2066 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
2067 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3c55880a 2068 merge_hard_reg_conflicts (a, parent_a, true);
058e97ec
VM
2069 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2070 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
2071 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2072 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
c2ba7e7a
RO
2073 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a),
2074 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
2075 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2076 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1756cb66
VM
2077 aclass = ALLOCNO_CLASS (a);
2078 ira_assert (aclass == ALLOCNO_CLASS (parent_a));
058e97ec 2079 ira_allocate_and_accumulate_costs
1756cb66 2080 (&ALLOCNO_HARD_REG_COSTS (parent_a), aclass,
058e97ec
VM
2081 ALLOCNO_HARD_REG_COSTS (a));
2082 ira_allocate_and_accumulate_costs
2083 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1756cb66 2084 aclass,
058e97ec 2085 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1756cb66
VM
2086 ALLOCNO_CLASS_COST (parent_a)
2087 += ALLOCNO_CLASS_COST (a);
058e97ec 2088 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
058e97ec
VM
2089 }
2090}
2091
2092/* Create allocnos corresponding to pseudo-registers in the current
2093 function. Traverse the loop tree for this. */
2094static void
2095create_allocnos (void)
2096{
2097 /* We need to process BB first to correctly link allocnos by member
2098 next_regno_allocno. */
2099 ira_traverse_loop_tree (true, ira_loop_tree_root,
2100 create_loop_tree_node_allocnos, NULL);
2101 if (optimize)
2102 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
2103 propagate_modified_regnos);
2104}
2105
2106\f
2107
2108/* The page contains function to remove some regions from a separate
2109 register allocation. We remove regions whose separate allocation
2110 will hardly improve the result. As a result we speed up regional
2111 register allocation. */
2112
9140d27b 2113/* The function changes the object in range list given by R to OBJ. */
058e97ec 2114static void
9140d27b 2115change_object_in_range_list (live_range_t r, ira_object_t obj)
058e97ec
VM
2116{
2117 for (; r != NULL; r = r->next)
9140d27b 2118 r->object = obj;
058e97ec
VM
2119}
2120
3c55880a
BS
2121/* Move all live ranges associated with allocno FROM to allocno TO. */
2122static void
2123move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2124{
ac0ab4f7
BS
2125 int i;
2126 int n = ALLOCNO_NUM_OBJECTS (from);
2127
2128 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
3c55880a 2129
ac0ab4f7 2130 for (i = 0; i < n; i++)
3c55880a 2131 {
ac0ab4f7
BS
2132 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2133 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2134 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2135
2136 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2137 {
2138 fprintf (ira_dump_file,
2139 " Moving ranges of a%dr%d to a%dr%d: ",
2140 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2141 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2142 ira_print_live_range_list (ira_dump_file, lr);
2143 }
2144 change_object_in_range_list (lr, to_obj);
2145 OBJECT_LIVE_RANGES (to_obj)
2146 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
2147 OBJECT_LIVE_RANGES (from_obj) = NULL;
3c55880a 2148 }
3c55880a
BS
2149}
2150
3c55880a
BS
2151static void
2152copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2153{
ac0ab4f7
BS
2154 int i;
2155 int n = ALLOCNO_NUM_OBJECTS (from);
3c55880a 2156
ac0ab4f7
BS
2157 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
2158
2159 for (i = 0; i < n; i++)
3c55880a 2160 {
ac0ab4f7
BS
2161 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2162 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2163 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2164
2165 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2166 {
2167 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
2168 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2169 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2170 ira_print_live_range_list (ira_dump_file, lr);
2171 }
2172 lr = ira_copy_live_range_list (lr);
2173 change_object_in_range_list (lr, to_obj);
2174 OBJECT_LIVE_RANGES (to_obj)
2175 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
3c55880a 2176 }
3c55880a
BS
2177}
2178
058e97ec
VM
2179/* Return TRUE if NODE represents a loop with low register
2180 pressure. */
2181static bool
2182low_pressure_loop_node_p (ira_loop_tree_node_t node)
2183{
2184 int i;
1756cb66 2185 enum reg_class pclass;
b8698a0f 2186
058e97ec
VM
2187 if (node->bb != NULL)
2188 return false;
b8698a0f 2189
1756cb66 2190 for (i = 0; i < ira_pressure_classes_num; i++)
058e97ec 2191 {
1756cb66 2192 pclass = ira_pressure_classes[i];
f508f827
RS
2193 if (node->reg_pressure[pclass] > ira_class_hard_regs_num[pclass]
2194 && ira_class_hard_regs_num[pclass] > 1)
058e97ec
VM
2195 return false;
2196 }
2197 return true;
2198}
2199
30a435d8
VM
2200#ifdef STACK_REGS
2201/* Return TRUE if LOOP has a complex enter or exit edge. We don't
2202 form a region from such loop if the target use stack register
2203 because reg-stack.c can not deal with such edges. */
8c5fdaae 2204static bool
30a435d8 2205loop_with_complex_edge_p (struct loop *loop)
8c5fdaae
VM
2206{
2207 int i;
2208 edge_iterator ei;
2209 edge e;
9771b263 2210 vec<edge> edges;
f5843d08 2211 bool res;
8c5fdaae
VM
2212
2213 FOR_EACH_EDGE (e, ei, loop->header->preds)
2214 if (e->flags & EDGE_EH)
2215 return true;
2216 edges = get_loop_exit_edges (loop);
f5843d08 2217 res = false;
9771b263 2218 FOR_EACH_VEC_ELT (edges, i, e)
30a435d8 2219 if (e->flags & EDGE_COMPLEX)
f5843d08
RG
2220 {
2221 res = true;
2222 break;
2223 }
9771b263 2224 edges.release ();
f5843d08 2225 return res;
8c5fdaae 2226}
30a435d8 2227#endif
8c5fdaae 2228
30ea859e
VM
2229/* Sort loops for marking them for removal. We put already marked
2230 loops first, then less frequent loops next, and then outer loops
2231 next. */
2232static int
2233loop_compare_func (const void *v1p, const void *v2p)
2234{
2235 int diff;
2236 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
2237 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
2238
2239 ira_assert (l1->parent != NULL && l2->parent != NULL);
2240 if (l1->to_remove_p && ! l2->to_remove_p)
2241 return -1;
2242 if (! l1->to_remove_p && l2->to_remove_p)
2243 return 1;
2244 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
2245 return diff;
2246 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
2247 return diff;
2248 /* Make sorting stable. */
2608d841 2249 return l1->loop_num - l2->loop_num;
30ea859e
VM
2250}
2251
30ea859e
VM
2252/* Mark loops which should be removed from regional allocation. We
2253 remove a loop with low register pressure inside another loop with
2254 register pressure. In this case a separate allocation of the loop
2255 hardly helps (for irregular register file architecture it could
2256 help by choosing a better hard register in the loop but we prefer
2257 faster allocation even in this case). We also remove cheap loops
8c5fdaae
VM
2258 if there are more than IRA_MAX_LOOPS_NUM of them. Loop with EH
2259 exit or enter edges are removed too because the allocation might
2260 require put pseudo moves on the EH edges (we could still do this
2261 for pseudos with caller saved hard registers in some cases but it
2262 is impossible to say here or during top-down allocation pass what
2263 hard register the pseudos get finally). */
30ea859e
VM
2264static void
2265mark_loops_for_removal (void)
058e97ec 2266{
30ea859e
VM
2267 int i, n;
2268 ira_loop_tree_node_t *sorted_loops;
2269 loop_p loop;
2270
2608d841 2271 ira_assert (current_loops != NULL);
30ea859e
VM
2272 sorted_loops
2273 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
0fc822d0
RB
2274 * number_of_loops (cfun));
2275 for (n = i = 0; vec_safe_iterate (get_loops (cfun), i, &loop); i++)
30ea859e
VM
2276 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2277 {
2278 if (ira_loop_nodes[i].parent == NULL)
2279 {
2280 /* Don't remove the root. */
2281 ira_loop_nodes[i].to_remove_p = false;
2282 continue;
2283 }
2284 sorted_loops[n++] = &ira_loop_nodes[i];
2285 ira_loop_nodes[i].to_remove_p
8c5fdaae
VM
2286 = ((low_pressure_loop_node_p (ira_loop_nodes[i].parent)
2287 && low_pressure_loop_node_p (&ira_loop_nodes[i]))
30a435d8
VM
2288#ifdef STACK_REGS
2289 || loop_with_complex_edge_p (ira_loop_nodes[i].loop)
2290#endif
2291 );
30ea859e
VM
2292 }
2293 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
2294 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
2295 {
2296 sorted_loops[i]->to_remove_p = true;
2297 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2298 fprintf
2299 (ira_dump_file,
2300 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
2608d841 2301 sorted_loops[i]->loop_num, sorted_loops[i]->loop->header->index,
30ea859e
VM
2302 sorted_loops[i]->loop->header->frequency,
2303 loop_depth (sorted_loops[i]->loop),
2304 low_pressure_loop_node_p (sorted_loops[i]->parent)
2305 && low_pressure_loop_node_p (sorted_loops[i])
2306 ? "low pressure" : "cheap loop");
2307 }
2308 ira_free (sorted_loops);
058e97ec
VM
2309}
2310
311aab06
VM
2311/* Mark all loops but root for removing. */
2312static void
2313mark_all_loops_for_removal (void)
2314{
2315 int i;
2316 loop_p loop;
2317
2608d841 2318 ira_assert (current_loops != NULL);
0fc822d0 2319 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
311aab06
VM
2320 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2321 {
2322 if (ira_loop_nodes[i].parent == NULL)
2323 {
2324 /* Don't remove the root. */
2325 ira_loop_nodes[i].to_remove_p = false;
2326 continue;
2327 }
2328 ira_loop_nodes[i].to_remove_p = true;
2329 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2330 fprintf
2331 (ira_dump_file,
2332 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
2608d841 2333 ira_loop_nodes[i].loop_num,
311aab06
VM
2334 ira_loop_nodes[i].loop->header->index,
2335 ira_loop_nodes[i].loop->header->frequency,
2336 loop_depth (ira_loop_nodes[i].loop));
2337 }
2338}
30ea859e 2339
058e97ec 2340/* Definition of vector of loop tree nodes. */
058e97ec
VM
2341
2342/* Vec containing references to all removed loop tree nodes. */
9771b263 2343static vec<ira_loop_tree_node_t> removed_loop_vec;
058e97ec
VM
2344
2345/* Vec containing references to all children of loop tree nodes. */
9771b263 2346static vec<ira_loop_tree_node_t> children_vec;
058e97ec
VM
2347
2348/* Remove subregions of NODE if their separate allocation will not
2349 improve the result. */
2350static void
2351remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
2352{
2353 unsigned int start;
2354 bool remove_p;
2355 ira_loop_tree_node_t subnode;
2356
30ea859e 2357 remove_p = node->to_remove_p;
058e97ec 2358 if (! remove_p)
9771b263
DN
2359 children_vec.safe_push (node);
2360 start = children_vec.length ();
058e97ec
VM
2361 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
2362 if (subnode->bb == NULL)
2363 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
2364 else
9771b263 2365 children_vec.safe_push (subnode);
058e97ec
VM
2366 node->children = node->subloops = NULL;
2367 if (remove_p)
2368 {
9771b263 2369 removed_loop_vec.safe_push (node);
058e97ec
VM
2370 return;
2371 }
9771b263 2372 while (children_vec.length () > start)
058e97ec 2373 {
9771b263 2374 subnode = children_vec.pop ();
058e97ec
VM
2375 subnode->parent = node;
2376 subnode->next = node->children;
2377 node->children = subnode;
2378 if (subnode->bb == NULL)
2379 {
2380 subnode->subloop_next = node->subloops;
2381 node->subloops = subnode;
2382 }
2383 }
2384}
2385
c6bb4c93
VM
2386/* Return TRUE if NODE is inside PARENT. */
2387static bool
2388loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
2389{
2390 for (node = node->parent; node != NULL; node = node->parent)
2391 if (node == parent)
2392 return true;
2393 return false;
2394}
2395
2396/* Sort allocnos according to their order in regno allocno list. */
2397static int
2398regno_allocno_order_compare_func (const void *v1p, const void *v2p)
2399{
2400 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2401 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2402 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
2403 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
2404
2405 if (loop_is_inside_p (n1, n2))
2406 return -1;
2407 else if (loop_is_inside_p (n2, n1))
2408 return 1;
2409 /* If allocnos are equally good, sort by allocno numbers, so that
2410 the results of qsort leave nothing to chance. We put allocnos
2411 with higher number first in the list because it is the original
2412 order for allocnos from loops on the same levels. */
2413 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
2414}
2415
2416/* This array is used to sort allocnos to restore allocno order in
2417 the regno allocno list. */
2418static ira_allocno_t *regno_allocnos;
2419
2420/* Restore allocno order for REGNO in the regno allocno list. */
2421static void
2422ira_rebuild_regno_allocno_list (int regno)
2423{
2424 int i, n;
2425 ira_allocno_t a;
2426
2427 for (n = 0, a = ira_regno_allocno_map[regno];
2428 a != NULL;
2429 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2430 regno_allocnos[n++] = a;
2431 ira_assert (n > 0);
b8698a0f 2432 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
c6bb4c93
VM
2433 regno_allocno_order_compare_func);
2434 for (i = 1; i < n; i++)
2435 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
2436 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
2437 ira_regno_allocno_map[regno] = regno_allocnos[0];
2438 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2439 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2440}
2441
311aab06
VM
2442/* Propagate info from allocno FROM_A to allocno A. */
2443static void
2444propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2445{
1756cb66 2446 enum reg_class aclass;
311aab06 2447
3c55880a 2448 merge_hard_reg_conflicts (from_a, a, false);
311aab06
VM
2449 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2450 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2451 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
311aab06 2452 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
e384e6b5
BS
2453 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)
2454 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a);
c2ba7e7a
RO
2455 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a),
2456 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (from_a));
2457
311aab06
VM
2458 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2459 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2460 if (! ALLOCNO_BAD_SPILL_P (from_a))
2461 ALLOCNO_BAD_SPILL_P (a) = false;
1756cb66
VM
2462 aclass = ALLOCNO_CLASS (from_a);
2463 ira_assert (aclass == ALLOCNO_CLASS (a));
2464 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), aclass,
311aab06
VM
2465 ALLOCNO_HARD_REG_COSTS (from_a));
2466 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 2467 aclass,
311aab06 2468 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1756cb66 2469 ALLOCNO_CLASS_COST (a) += ALLOCNO_CLASS_COST (from_a);
311aab06
VM
2470 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2471}
2472
058e97ec
VM
2473/* Remove allocnos from loops removed from the allocation
2474 consideration. */
2475static void
2476remove_unnecessary_allocnos (void)
2477{
2478 int regno;
c6bb4c93 2479 bool merged_p, rebuild_p;
058e97ec
VM
2480 ira_allocno_t a, prev_a, next_a, parent_a;
2481 ira_loop_tree_node_t a_node, parent;
058e97ec
VM
2482
2483 merged_p = false;
c6bb4c93 2484 regno_allocnos = NULL;
058e97ec 2485 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
c6bb4c93
VM
2486 {
2487 rebuild_p = false;
2488 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2489 a != NULL;
2490 a = next_a)
2491 {
2492 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2493 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2494 if (! a_node->to_remove_p)
2495 prev_a = a;
2496 else
2497 {
2498 for (parent = a_node->parent;
2499 (parent_a = parent->regno_allocno_map[regno]) == NULL
2500 && parent->to_remove_p;
2501 parent = parent->parent)
2502 ;
2503 if (parent_a == NULL)
2504 {
311aab06
VM
2505 /* There are no allocnos with the same regno in
2506 upper region -- just move the allocno to the
2507 upper region. */
c6bb4c93
VM
2508 prev_a = a;
2509 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2510 parent->regno_allocno_map[regno] = a;
2511 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2512 rebuild_p = true;
2513 }
2514 else
2515 {
2516 /* Remove the allocno and update info of allocno in
2517 the upper region. */
2518 if (prev_a == NULL)
2519 ira_regno_allocno_map[regno] = next_a;
2520 else
2521 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
3c55880a 2522 move_allocno_live_ranges (a, parent_a);
c6bb4c93 2523 merged_p = true;
311aab06 2524 propagate_some_info_from_allocno (parent_a, a);
46044dd9
L
2525 /* Remove it from the corresponding regno allocno
2526 map to avoid info propagation of subsequent
2527 allocno into this already removed allocno. */
2528 a_node->regno_allocno_map[regno] = NULL;
3b6d1699 2529 ira_remove_allocno_prefs (a);
c6bb4c93
VM
2530 finish_allocno (a);
2531 }
2532 }
2533 }
2534 if (rebuild_p)
2535 /* We need to restore the order in regno allocno list. */
2536 {
2537 if (regno_allocnos == NULL)
2538 regno_allocnos
2539 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2540 * ira_allocnos_num);
2541 ira_rebuild_regno_allocno_list (regno);
2542 }
2543 }
058e97ec
VM
2544 if (merged_p)
2545 ira_rebuild_start_finish_chains ();
c6bb4c93
VM
2546 if (regno_allocnos != NULL)
2547 ira_free (regno_allocnos);
058e97ec
VM
2548}
2549
311aab06 2550/* Remove allocnos from all loops but the root. */
058e97ec 2551static void
311aab06 2552remove_low_level_allocnos (void)
058e97ec 2553{
311aab06
VM
2554 int regno;
2555 bool merged_p, propagate_p;
2556 ira_allocno_t a, top_a;
2557 ira_loop_tree_node_t a_node, parent;
311aab06
VM
2558 ira_allocno_iterator ai;
2559
2560 merged_p = false;
2561 FOR_EACH_ALLOCNO (a, ai)
2562 {
2563 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2564 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2565 continue;
2566 regno = ALLOCNO_REGNO (a);
2567 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2568 {
2569 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2570 ira_loop_tree_root->regno_allocno_map[regno] = a;
2571 continue;
2572 }
2573 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2574 /* Remove the allocno and update info of allocno in the upper
2575 region. */
3c55880a 2576 move_allocno_live_ranges (a, top_a);
311aab06 2577 merged_p = true;
311aab06
VM
2578 if (propagate_p)
2579 propagate_some_info_from_allocno (top_a, a);
2580 }
2581 FOR_EACH_ALLOCNO (a, ai)
2582 {
2583 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2584 if (a_node == ira_loop_tree_root)
2585 continue;
2586 parent = a_node->parent;
2587 regno = ALLOCNO_REGNO (a);
2588 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2589 ira_assert (ALLOCNO_CAP (a) != NULL);
2590 else if (ALLOCNO_CAP (a) == NULL)
2591 ira_assert (parent->regno_allocno_map[regno] != NULL);
2592 }
2593 FOR_EACH_ALLOCNO (a, ai)
2594 {
2595 regno = ALLOCNO_REGNO (a);
2596 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2597 {
ac0ab4f7
BS
2598 ira_object_t obj;
2599 ira_allocno_object_iterator oi;
a49ae217 2600
311aab06
VM
2601 ira_regno_allocno_map[regno] = a;
2602 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2603 ALLOCNO_CAP_MEMBER (a) = NULL;
ac0ab4f7
BS
2604 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2605 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2606 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
311aab06
VM
2607#ifdef STACK_REGS
2608 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2609 ALLOCNO_NO_STACK_REG_P (a) = true;
2610#endif
2611 }
2612 else
3b6d1699
VM
2613 {
2614 ira_remove_allocno_prefs (a);
2615 finish_allocno (a);
2616 }
311aab06
VM
2617 }
2618 if (merged_p)
2619 ira_rebuild_start_finish_chains ();
2620}
2621
2622/* Remove loops from consideration. We remove all loops except for
2623 root if ALL_P or loops for which a separate allocation will not
2624 improve the result. We have to do this after allocno creation and
1756cb66
VM
2625 their costs and allocno class evaluation because only after that
2626 the register pressure can be known and is calculated. */
311aab06
VM
2627static void
2628remove_unnecessary_regions (bool all_p)
2629{
2608d841
VM
2630 if (current_loops == NULL)
2631 return;
311aab06
VM
2632 if (all_p)
2633 mark_all_loops_for_removal ();
2634 else
2635 mark_loops_for_removal ();
8b1c6fd7
DM
2636 children_vec.create (last_basic_block_for_fn (cfun)
2637 + number_of_loops (cfun));
2638 removed_loop_vec.create (last_basic_block_for_fn (cfun)
2639 + number_of_loops (cfun));
9771b263
DN
2640 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
2641 children_vec.release ();
311aab06
VM
2642 if (all_p)
2643 remove_low_level_allocnos ();
2644 else
2645 remove_unnecessary_allocnos ();
9771b263
DN
2646 while (removed_loop_vec.length () > 0)
2647 finish_loop_tree_node (removed_loop_vec.pop ());
2648 removed_loop_vec.release ();
058e97ec
VM
2649}
2650
2651\f
2652
927425df
VM
2653/* At this point true value of allocno attribute bad_spill_p means
2654 that there is an insn where allocno occurs and where the allocno
2655 can not be used as memory. The function updates the attribute, now
2656 it can be true only for allocnos which can not be used as memory in
2657 an insn and in whose live ranges there is other allocno deaths.
2658 Spilling allocnos with true value will not improve the code because
2659 it will not make other allocnos colorable and additional reloads
2660 for the corresponding pseudo will be generated in reload pass for
2661 each insn it occurs.
2662
2663 This is a trick mentioned in one classic article of Chaitin etc
2664 which is frequently omitted in other implementations of RA based on
2665 graph coloring. */
2666static void
2667update_bad_spill_attribute (void)
2668{
2669 int i;
2670 ira_allocno_t a;
2671 ira_allocno_iterator ai;
ac0ab4f7
BS
2672 ira_allocno_object_iterator aoi;
2673 ira_object_t obj;
b14151b5 2674 live_range_t r;
1756cb66 2675 enum reg_class aclass;
927425df
VM
2676 bitmap_head dead_points[N_REG_CLASSES];
2677
1756cb66 2678 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2679 {
1756cb66
VM
2680 aclass = ira_allocno_classes[i];
2681 bitmap_initialize (&dead_points[aclass], &reg_obstack);
927425df
VM
2682 }
2683 FOR_EACH_ALLOCNO (a, ai)
2684 {
1756cb66
VM
2685 aclass = ALLOCNO_CLASS (a);
2686 if (aclass == NO_REGS)
927425df 2687 continue;
ac0ab4f7
BS
2688 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2689 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1756cb66 2690 bitmap_set_bit (&dead_points[aclass], r->finish);
927425df
VM
2691 }
2692 FOR_EACH_ALLOCNO (a, ai)
2693 {
1756cb66
VM
2694 aclass = ALLOCNO_CLASS (a);
2695 if (aclass == NO_REGS)
927425df
VM
2696 continue;
2697 if (! ALLOCNO_BAD_SPILL_P (a))
2698 continue;
ac0ab4f7 2699 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
927425df 2700 {
ac0ab4f7
BS
2701 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2702 {
2703 for (i = r->start + 1; i < r->finish; i++)
1756cb66 2704 if (bitmap_bit_p (&dead_points[aclass], i))
ac0ab4f7
BS
2705 break;
2706 if (i < r->finish)
2707 break;
2708 }
2709 if (r != NULL)
2710 {
2711 ALLOCNO_BAD_SPILL_P (a) = false;
927425df 2712 break;
ac0ab4f7 2713 }
927425df 2714 }
927425df 2715 }
1756cb66 2716 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2717 {
1756cb66
VM
2718 aclass = ira_allocno_classes[i];
2719 bitmap_clear (&dead_points[aclass]);
927425df
VM
2720 }
2721}
2722
2723\f
2724
058e97ec
VM
2725/* Set up minimal and maximal live range points for allocnos. */
2726static void
2727setup_min_max_allocno_live_range_point (void)
2728{
2729 int i;
2730 ira_allocno_t a, parent_a, cap;
2731 ira_allocno_iterator ai;
ac0ab4f7
BS
2732#ifdef ENABLE_IRA_CHECKING
2733 ira_object_iterator oi;
2734 ira_object_t obj;
2735#endif
b14151b5 2736 live_range_t r;
058e97ec
VM
2737 ira_loop_tree_node_t parent;
2738
2739 FOR_EACH_ALLOCNO (a, ai)
2740 {
ac0ab4f7 2741 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2742
ac0ab4f7
BS
2743 for (i = 0; i < n; i++)
2744 {
2745 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2746 r = OBJECT_LIVE_RANGES (obj);
2747 if (r == NULL)
2748 continue;
2749 OBJECT_MAX (obj) = r->finish;
2750 for (; r->next != NULL; r = r->next)
2751 ;
2752 OBJECT_MIN (obj) = r->start;
2753 }
058e97ec
VM
2754 }
2755 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2756 for (a = ira_regno_allocno_map[i];
2757 a != NULL;
2758 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2759 {
ac0ab4f7
BS
2760 int j;
2761 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2762
ac0ab4f7 2763 for (j = 0; j < n; j++)
058e97ec 2764 {
ac0ab4f7
BS
2765 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2766 ira_object_t parent_obj;
2767
2768 if (OBJECT_MAX (obj) < 0)
2769 continue;
2770 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2771 /* Accumulation of range info. */
2772 if (ALLOCNO_CAP (a) != NULL)
058e97ec 2773 {
ac0ab4f7
BS
2774 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2775 {
2776 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2777 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2778 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2779 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2780 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2781 }
2782 continue;
058e97ec 2783 }
ac0ab4f7
BS
2784 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2785 continue;
2786 parent_a = parent->regno_allocno_map[i];
2787 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2788 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2789 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2790 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2791 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
058e97ec 2792 }
058e97ec
VM
2793 }
2794#ifdef ENABLE_IRA_CHECKING
ac0ab4f7 2795 FOR_EACH_OBJECT (obj, oi)
058e97ec 2796 {
a49ae217
BS
2797 if ((0 <= OBJECT_MIN (obj) && OBJECT_MIN (obj) <= ira_max_point)
2798 && (0 <= OBJECT_MAX (obj) && OBJECT_MAX (obj) <= ira_max_point))
058e97ec
VM
2799 continue;
2800 gcc_unreachable ();
2801 }
2802#endif
2803}
2804
2805/* Sort allocnos according to their live ranges. Allocnos with
1756cb66
VM
2806 smaller allocno class are put first unless we use priority
2807 coloring. Allocnos with the same class are ordered according
2808 their start (min). Allocnos with the same start are ordered
2809 according their finish (max). */
058e97ec 2810static int
ac0ab4f7 2811object_range_compare_func (const void *v1p, const void *v2p)
058e97ec
VM
2812{
2813 int diff;
a49ae217
BS
2814 ira_object_t obj1 = *(const ira_object_t *) v1p;
2815 ira_object_t obj2 = *(const ira_object_t *) v2p;
2816 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2817 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
058e97ec 2818
a49ae217 2819 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
058e97ec 2820 return diff;
a49ae217 2821 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
058e97ec
VM
2822 return diff;
2823 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2824}
2825
a49ae217 2826/* Sort ira_object_id_map and set up conflict id of allocnos. */
058e97ec 2827static void
a49ae217 2828sort_conflict_id_map (void)
058e97ec
VM
2829{
2830 int i, num;
2831 ira_allocno_t a;
2832 ira_allocno_iterator ai;
2833
2834 num = 0;
2835 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7
BS
2836 {
2837 ira_allocno_object_iterator oi;
2838 ira_object_t obj;
2839
2840 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2841 ira_object_id_map[num++] = obj;
2842 }
85c00e0b
JJ
2843 if (num > 1)
2844 qsort (ira_object_id_map, num, sizeof (ira_object_t),
2845 object_range_compare_func);
058e97ec 2846 for (i = 0; i < num; i++)
a49ae217
BS
2847 {
2848 ira_object_t obj = ira_object_id_map[i];
1756cb66 2849
a49ae217
BS
2850 gcc_assert (obj != NULL);
2851 OBJECT_CONFLICT_ID (obj) = i;
2852 }
2853 for (i = num; i < ira_objects_num; i++)
2854 ira_object_id_map[i] = NULL;
058e97ec
VM
2855}
2856
2857/* Set up minimal and maximal conflict ids of allocnos with which
2858 given allocno can conflict. */
2859static void
2860setup_min_max_conflict_allocno_ids (void)
2861{
1756cb66 2862 int aclass;
058e97ec
VM
2863 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2864 int *live_range_min, *last_lived;
ac0ab4f7 2865 int word0_min, word0_max;
058e97ec 2866 ira_allocno_t a;
ac0ab4f7 2867 ira_allocno_iterator ai;
058e97ec 2868
a49ae217 2869 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
1756cb66 2870 aclass = -1;
058e97ec 2871 first_not_finished = -1;
a49ae217 2872 for (i = 0; i < ira_objects_num; i++)
058e97ec 2873 {
a49ae217 2874 ira_object_t obj = ira_object_id_map[i];
1756cb66 2875
a49ae217 2876 if (obj == NULL)
058e97ec 2877 continue;
a49ae217
BS
2878
2879 a = OBJECT_ALLOCNO (obj);
2880
1756cb66 2881 if (aclass < 0)
058e97ec 2882 {
1756cb66 2883 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2884 min = i;
2885 first_not_finished = i;
2886 }
2887 else
2888 {
a49ae217 2889 start = OBJECT_MIN (obj);
058e97ec
VM
2890 /* If we skip an allocno, the allocno with smaller ids will
2891 be also skipped because of the secondary sorting the
2892 range finishes (see function
ac0ab4f7 2893 object_range_compare_func). */
058e97ec 2894 while (first_not_finished < i
a49ae217 2895 && start > OBJECT_MAX (ira_object_id_map
ac0ab4f7 2896 [first_not_finished]))
058e97ec
VM
2897 first_not_finished++;
2898 min = first_not_finished;
b8698a0f 2899 }
058e97ec
VM
2900 if (min == i)
2901 /* We could increase min further in this case but it is good
2902 enough. */
2903 min++;
a49ae217
BS
2904 live_range_min[i] = OBJECT_MIN (obj);
2905 OBJECT_MIN (obj) = min;
058e97ec
VM
2906 }
2907 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1756cb66 2908 aclass = -1;
058e97ec 2909 filled_area_start = -1;
a49ae217 2910 for (i = ira_objects_num - 1; i >= 0; i--)
058e97ec 2911 {
a49ae217 2912 ira_object_t obj = ira_object_id_map[i];
1756cb66 2913
a49ae217 2914 if (obj == NULL)
058e97ec 2915 continue;
a49ae217
BS
2916
2917 a = OBJECT_ALLOCNO (obj);
1756cb66 2918 if (aclass < 0)
058e97ec 2919 {
1756cb66 2920 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2921 for (j = 0; j < ira_max_point; j++)
2922 last_lived[j] = -1;
2923 filled_area_start = ira_max_point;
2924 }
2925 min = live_range_min[i];
a49ae217 2926 finish = OBJECT_MAX (obj);
058e97ec
VM
2927 max = last_lived[finish];
2928 if (max < 0)
2929 /* We could decrease max further in this case but it is good
2930 enough. */
a49ae217
BS
2931 max = OBJECT_CONFLICT_ID (obj) - 1;
2932 OBJECT_MAX (obj) = max;
058e97ec
VM
2933 /* In filling, we can go further A range finish to recognize
2934 intersection quickly because if the finish of subsequently
2935 processed allocno (it has smaller conflict id) range is
2936 further A range finish than they are definitely intersected
2937 (the reason for this is the allocnos with bigger conflict id
2938 have their range starts not smaller than allocnos with
2939 smaller ids. */
2940 for (j = min; j < filled_area_start; j++)
2941 last_lived[j] = i;
2942 filled_area_start = min;
2943 }
2944 ira_free (last_lived);
2945 ira_free (live_range_min);
ac0ab4f7
BS
2946
2947 /* For allocnos with more than one object, we may later record extra conflicts in
2948 subobject 0 that we cannot really know about here.
2949 For now, simply widen the min/max range of these subobjects. */
2950
2951 word0_min = INT_MAX;
2952 word0_max = INT_MIN;
2953
2954 FOR_EACH_ALLOCNO (a, ai)
2955 {
2956 int n = ALLOCNO_NUM_OBJECTS (a);
2957 ira_object_t obj0;
1756cb66 2958
ac0ab4f7
BS
2959 if (n < 2)
2960 continue;
2961 obj0 = ALLOCNO_OBJECT (a, 0);
2962 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2963 word0_min = OBJECT_CONFLICT_ID (obj0);
2964 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2965 word0_max = OBJECT_CONFLICT_ID (obj0);
2966 }
2967 FOR_EACH_ALLOCNO (a, ai)
2968 {
2969 int n = ALLOCNO_NUM_OBJECTS (a);
2970 ira_object_t obj0;
1756cb66 2971
ac0ab4f7
BS
2972 if (n < 2)
2973 continue;
2974 obj0 = ALLOCNO_OBJECT (a, 0);
2975 if (OBJECT_MIN (obj0) > word0_min)
2976 OBJECT_MIN (obj0) = word0_min;
2977 if (OBJECT_MAX (obj0) < word0_max)
2978 OBJECT_MAX (obj0) = word0_max;
2979 }
058e97ec
VM
2980}
2981
2982\f
2983
2984static void
2985create_caps (void)
2986{
2987 ira_allocno_t a;
2988 ira_allocno_iterator ai;
2989 ira_loop_tree_node_t loop_tree_node;
2990
2991 FOR_EACH_ALLOCNO (a, ai)
2992 {
2993 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2994 continue;
2995 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2996 create_cap_allocno (a);
2997 else if (ALLOCNO_CAP (a) == NULL)
2998 {
2999 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3000 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
3001 create_cap_allocno (a);
3002 }
3003 }
3004}
3005
3006\f
3007
3008/* The page contains code transforming more one region internal
3009 representation (IR) to one region IR which is necessary for reload.
3010 This transformation is called IR flattening. We might just rebuild
3011 the IR for one region but we don't do it because it takes a lot of
3012 time. */
3013
82b33628
VM
3014/* Map: regno -> allocnos which will finally represent the regno for
3015 IR with one region. */
3016static ira_allocno_t *regno_top_level_allocno_map;
3017
029da7d4
BS
3018/* Find the allocno that corresponds to A at a level one higher up in the
3019 loop tree. Returns NULL if A is a cap, or if it has no parent. */
3020ira_allocno_t
3021ira_parent_allocno (ira_allocno_t a)
3022{
3023 ira_loop_tree_node_t parent;
3024
3025 if (ALLOCNO_CAP (a) != NULL)
3026 return NULL;
3027
3028 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3029 if (parent == NULL)
3030 return NULL;
3031
3032 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
3033}
3034
3035/* Find the allocno that corresponds to A at a level one higher up in the
3036 loop tree. If ALLOCNO_CAP is set for A, return that. */
3037ira_allocno_t
3038ira_parent_or_cap_allocno (ira_allocno_t a)
3039{
3040 if (ALLOCNO_CAP (a) != NULL)
3041 return ALLOCNO_CAP (a);
3042
3043 return ira_parent_allocno (a);
3044}
3045
82b33628 3046/* Process all allocnos originated from pseudo REGNO and copy live
801f03e3
VM
3047 ranges, hard reg conflicts, and allocno stack reg attributes from
3048 low level allocnos to final allocnos which are destinations of
3049 removed stores at a loop exit. Return true if we copied live
3050 ranges. */
82b33628 3051static bool
801f03e3 3052copy_info_to_removed_store_destinations (int regno)
82b33628 3053{
504b33d8
ILT
3054 ira_allocno_t a;
3055 ira_allocno_t parent_a = NULL;
82b33628 3056 ira_loop_tree_node_t parent;
82b33628
VM
3057 bool merged_p;
3058
3059 merged_p = false;
3060 for (a = ira_regno_allocno_map[regno];
3061 a != NULL;
3062 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3063 {
1756cb66 3064 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))])
82b33628
VM
3065 /* This allocno will be removed. */
3066 continue;
ac0ab4f7 3067
82b33628
VM
3068 /* Caps will be removed. */
3069 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
3070 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3071 parent != NULL;
3072 parent = parent->parent)
3073 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
1756cb66
VM
3074 || (parent_a
3075 == regno_top_level_allocno_map[REGNO
3076 (allocno_emit_reg (parent_a))]
3077 && ALLOCNO_EMIT_DATA (parent_a)->mem_optimized_dest_p))
82b33628
VM
3078 break;
3079 if (parent == NULL || parent_a == NULL)
3080 continue;
ac0ab4f7 3081
3c55880a
BS
3082 copy_allocno_live_ranges (a, parent_a);
3083 merge_hard_reg_conflicts (a, parent_a, true);
ac0ab4f7 3084
ea1c67e6
VM
3085 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3086 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3087 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3088 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3089 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
c2ba7e7a
RO
3090 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a),
3091 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
ea1c67e6
VM
3092 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3093 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
82b33628
VM
3094 merged_p = true;
3095 }
3096 return merged_p;
058e97ec
VM
3097}
3098
3099/* Flatten the IR. In other words, this function transforms IR as if
3100 it were built with one region (without loops). We could make it
3101 much simpler by rebuilding IR with one region, but unfortunately it
3102 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
3103 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
3104 IRA_MAX_POINT before emitting insns on the loop borders. */
3105void
3106ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
3107{
9140d27b 3108 int i, j;
ea1c67e6 3109 bool keep_p;
058e97ec 3110 int hard_regs_num;
82b33628 3111 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec 3112 unsigned int n;
1756cb66 3113 enum reg_class aclass;
058e97ec 3114 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 3115 ira_copy_t cp;
029da7d4 3116 ira_loop_tree_node_t node;
b14151b5 3117 live_range_t r;
058e97ec
VM
3118 ira_allocno_iterator ai;
3119 ira_copy_iterator ci;
058e97ec
VM
3120
3121 regno_top_level_allocno_map
1756cb66
VM
3122 = (ira_allocno_t *) ira_allocate (max_reg_num ()
3123 * sizeof (ira_allocno_t));
058e97ec
VM
3124 memset (regno_top_level_allocno_map, 0,
3125 max_reg_num () * sizeof (ira_allocno_t));
058e97ec 3126 new_pseudos_p = merged_p = false;
0ca9fa56
VM
3127 FOR_EACH_ALLOCNO (a, ai)
3128 {
ac0ab4f7
BS
3129 ira_allocno_object_iterator oi;
3130 ira_object_t obj;
1756cb66 3131
0ca9fa56
VM
3132 if (ALLOCNO_CAP_MEMBER (a) != NULL)
3133 /* Caps are not in the regno allocno maps and they are never
3134 will be transformed into allocnos existing after IR
3135 flattening. */
3136 continue;
ac0ab4f7
BS
3137 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3138 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
3139 OBJECT_CONFLICT_HARD_REGS (obj));
0ca9fa56
VM
3140#ifdef STACK_REGS
3141 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
3142#endif
3143 }
058e97ec
VM
3144 /* Fix final allocno attributes. */
3145 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
3146 {
0ca9fa56 3147 mem_dest_p = false;
058e97ec
VM
3148 for (a = ira_regno_allocno_map[i];
3149 a != NULL;
3150 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3151 {
1756cb66
VM
3152 ira_emit_data_t parent_data, data = ALLOCNO_EMIT_DATA (a);
3153
058e97ec 3154 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1756cb66 3155 if (data->somewhere_renamed_p)
058e97ec 3156 new_pseudos_p = true;
029da7d4
BS
3157 parent_a = ira_parent_allocno (a);
3158 if (parent_a == NULL)
058e97ec
VM
3159 {
3160 ALLOCNO_COPIES (a) = NULL;
1756cb66 3161 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec
VM
3162 continue;
3163 }
3164 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
b8698a0f 3165
1756cb66 3166 if (data->mem_optimized_dest != NULL)
82b33628 3167 mem_dest_p = true;
1756cb66
VM
3168 parent_data = ALLOCNO_EMIT_DATA (parent_a);
3169 if (REGNO (data->reg) == REGNO (parent_data->reg))
058e97ec 3170 {
3c55880a
BS
3171 merge_hard_reg_conflicts (a, parent_a, true);
3172 move_allocno_live_ranges (a, parent_a);
058e97ec 3173 merged_p = true;
1756cb66
VM
3174 parent_data->mem_optimized_dest_p
3175 = (parent_data->mem_optimized_dest_p
3176 || data->mem_optimized_dest_p);
058e97ec
VM
3177 continue;
3178 }
3179 new_pseudos_p = true;
058e97ec
VM
3180 for (;;)
3181 {
058e97ec
VM
3182 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
3183 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
ea1c67e6
VM
3184 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
3185 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3186 -= ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3187 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3188 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
ea1c67e6
VM
3189 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3190 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
058e97ec
VM
3191 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
3192 && ALLOCNO_NREFS (parent_a) >= 0
3193 && ALLOCNO_FREQ (parent_a) >= 0);
1756cb66
VM
3194 aclass = ALLOCNO_CLASS (parent_a);
3195 hard_regs_num = ira_class_hard_regs_num[aclass];
058e97ec
VM
3196 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
3197 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
3198 for (j = 0; j < hard_regs_num; j++)
3199 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
3200 -= ALLOCNO_HARD_REG_COSTS (a)[j];
3201 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
3202 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
3203 for (j = 0; j < hard_regs_num; j++)
3204 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
3205 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
1756cb66
VM
3206 ALLOCNO_CLASS_COST (parent_a)
3207 -= ALLOCNO_CLASS_COST (a);
058e97ec 3208 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
029da7d4
BS
3209 parent_a = ira_parent_allocno (parent_a);
3210 if (parent_a == NULL)
058e97ec
VM
3211 break;
3212 }
058e97ec 3213 ALLOCNO_COPIES (a) = NULL;
1756cb66 3214 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec 3215 }
801f03e3 3216 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
82b33628 3217 merged_p = true;
058e97ec 3218 }
058e97ec
VM
3219 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
3220 if (merged_p || ira_max_point_before_emit != ira_max_point)
3221 ira_rebuild_start_finish_chains ();
3222 if (new_pseudos_p)
3223 {
9140d27b
BS
3224 sparseset objects_live;
3225
058e97ec
VM
3226 /* Rebuild conflicts. */
3227 FOR_EACH_ALLOCNO (a, ai)
3228 {
ac0ab4f7
BS
3229 ira_allocno_object_iterator oi;
3230 ira_object_t obj;
1756cb66
VM
3231
3232 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3233 || ALLOCNO_CAP_MEMBER (a) != NULL)
3234 continue;
ac0ab4f7
BS
3235 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3236 {
3237 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3238 ira_assert (r->object == obj);
3239 clear_conflicts (obj);
3240 }
058e97ec 3241 }
9140d27b 3242 objects_live = sparseset_alloc (ira_objects_num);
058e97ec
VM
3243 for (i = 0; i < ira_max_point; i++)
3244 {
3245 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
3246 {
9140d27b 3247 ira_object_t obj = r->object;
1756cb66 3248
9140d27b 3249 a = OBJECT_ALLOCNO (obj);
1756cb66 3250 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3251 || ALLOCNO_CAP_MEMBER (a) != NULL)
3252 continue;
ac0ab4f7 3253
1756cb66 3254 aclass = ALLOCNO_CLASS (a);
9140d27b 3255 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
058e97ec 3256 {
9140d27b
BS
3257 ira_object_t live_obj = ira_object_id_map[n];
3258 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
1756cb66
VM
3259 enum reg_class live_aclass = ALLOCNO_CLASS (live_a);
3260
3261 if (ira_reg_classes_intersect_p[aclass][live_aclass]
058e97ec 3262 /* Don't set up conflict for the allocno with itself. */
9140d27b
BS
3263 && live_a != a)
3264 ira_add_conflict (obj, live_obj);
058e97ec 3265 }
3feb0298 3266 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
058e97ec 3267 }
b8698a0f 3268
058e97ec 3269 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
9140d27b 3270 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
058e97ec 3271 }
9140d27b 3272 sparseset_free (objects_live);
058e97ec
VM
3273 compress_conflict_vecs ();
3274 }
3275 /* Mark some copies for removing and change allocnos in the rest
3276 copies. */
3277 FOR_EACH_COPY (cp, ci)
3278 {
3279 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
3280 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
3281 {
3282 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3283 fprintf
3284 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
3285 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
1756cb66
VM
3286 ALLOCNO_NUM (cp->first),
3287 REGNO (allocno_emit_reg (cp->first)),
058e97ec 3288 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
1756cb66
VM
3289 ALLOCNO_NUM (cp->second),
3290 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3291 cp->loop_tree_node = NULL;
3292 continue;
3293 }
1756cb66
VM
3294 first
3295 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->first))];
3296 second
3297 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->second))];
058e97ec
VM
3298 node = cp->loop_tree_node;
3299 if (node == NULL)
3300 keep_p = true; /* It copy generated in ira-emit.c. */
3301 else
3302 {
3303 /* Check that the copy was not propagated from level on
3304 which we will have different pseudos. */
3305 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
3306 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
1756cb66
VM
3307 keep_p = ((REGNO (allocno_emit_reg (first))
3308 == REGNO (allocno_emit_reg (node_first)))
3309 && (REGNO (allocno_emit_reg (second))
3310 == REGNO (allocno_emit_reg (node_second))));
058e97ec
VM
3311 }
3312 if (keep_p)
3313 {
3314 cp->loop_tree_node = ira_loop_tree_root;
3315 cp->first = first;
3316 cp->second = second;
3317 }
3318 else
3319 {
3320 cp->loop_tree_node = NULL;
3321 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3322 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
3323 cp->num, ALLOCNO_NUM (cp->first),
1756cb66
VM
3324 REGNO (allocno_emit_reg (cp->first)),
3325 ALLOCNO_NUM (cp->second),
3326 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3327 }
3328 }
3329 /* Remove unnecessary allocnos on lower levels of the loop tree. */
3330 FOR_EACH_ALLOCNO (a, ai)
3331 {
1756cb66 3332 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3333 || ALLOCNO_CAP_MEMBER (a) != NULL)
3334 {
3335 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3336 fprintf (ira_dump_file, " Remove a%dr%d\n",
1756cb66 3337 ALLOCNO_NUM (a), REGNO (allocno_emit_reg (a)));
3b6d1699 3338 ira_remove_allocno_prefs (a);
058e97ec
VM
3339 finish_allocno (a);
3340 continue;
3341 }
3342 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
1756cb66 3343 ALLOCNO_REGNO (a) = REGNO (allocno_emit_reg (a));
058e97ec 3344 ALLOCNO_CAP (a) = NULL;
cb1ca6ac 3345 /* Restore updated costs for assignments from reload. */
058e97ec 3346 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
1756cb66 3347 ALLOCNO_UPDATED_CLASS_COST (a) = ALLOCNO_CLASS_COST (a);
058e97ec
VM
3348 if (! ALLOCNO_ASSIGNED_P (a))
3349 ira_free_allocno_updated_costs (a);
3350 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
3351 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
3352 }
3353 /* Remove unnecessary copies. */
3354 FOR_EACH_COPY (cp, ci)
3355 {
3356 if (cp->loop_tree_node == NULL)
3357 {
3358 ira_copies[cp->num] = NULL;
3359 finish_copy (cp);
3360 continue;
3361 }
3362 ira_assert
3363 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
3364 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
3b6d1699
VM
3365 add_allocno_copy_to_list (cp);
3366 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
3367 }
3368 rebuild_regno_allocno_maps ();
b15a7ae6
VM
3369 if (ira_max_point != ira_max_point_before_emit)
3370 ira_compress_allocno_live_ranges ();
058e97ec
VM
3371 ira_free (regno_top_level_allocno_map);
3372}
3373
3374\f
3375
3376#ifdef ENABLE_IRA_CHECKING
3377/* Check creation of all allocnos. Allocnos on lower levels should
3378 have allocnos or caps on all upper levels. */
3379static void
3380check_allocno_creation (void)
3381{
3382 ira_allocno_t a;
3383 ira_allocno_iterator ai;
3384 ira_loop_tree_node_t loop_tree_node;
3385
3386 FOR_EACH_ALLOCNO (a, ai)
3387 {
49d988e7
VM
3388 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3389 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
3390 ALLOCNO_NUM (a)));
3391 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
3392 continue;
3393 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 3394 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 3395 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
3396 ira_assert (loop_tree_node->parent
3397 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
3398 && bitmap_bit_p (loop_tree_node->border_allocnos,
3399 ALLOCNO_NUM (a)));
058e97ec
VM
3400 }
3401}
3402#endif
3403
4ac293e2
VM
3404/* Identify allocnos which prefer a register class with a single hard register.
3405 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3406 less likely to use the preferred singleton register. */
3407static void
3408update_conflict_hard_reg_costs (void)
3409{
3410 ira_allocno_t a;
3411 ira_allocno_iterator ai;
3412 int i, index, min;
3413
3414 FOR_EACH_ALLOCNO (a, ai)
3415 {
6f76a878
AS
3416 reg_class_t aclass = ALLOCNO_CLASS (a);
3417 reg_class_t pref = reg_preferred_class (ALLOCNO_REGNO (a));
c9d74da6
RS
3418 int singleton = ira_class_singleton[pref][ALLOCNO_MODE (a)];
3419 if (singleton < 0)
4ac293e2 3420 continue;
c9d74da6 3421 index = ira_class_hard_reg_index[(int) aclass][singleton];
4ac293e2
VM
3422 if (index < 0)
3423 continue;
3424 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
3425 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
3426 continue;
3427 min = INT_MAX;
6f76a878 3428 for (i = ira_class_hard_regs_num[(int) aclass] - 1; i >= 0; i--)
1756cb66 3429 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_CLASS_COST (a)
4ac293e2
VM
3430 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
3431 min = ALLOCNO_HARD_REG_COSTS (a)[i];
3432 if (min == INT_MAX)
3433 continue;
3434 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 3435 aclass, 0);
4ac293e2 3436 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
1756cb66 3437 -= min - ALLOCNO_CLASS_COST (a);
4ac293e2
VM
3438 }
3439}
3440
058e97ec 3441/* Create a internal representation (IR) for IRA (allocnos, copies,
2608d841
VM
3442 loop tree nodes). The function returns TRUE if we generate loop
3443 structure (besides nodes representing all function and the basic
3444 blocks) for regional allocation. A true return means that we
3445 really need to flatten IR before the reload. */
058e97ec 3446bool
2608d841 3447ira_build (void)
058e97ec 3448{
2608d841 3449 bool loops_p;
058e97ec 3450
2608d841 3451 df_analyze ();
058e97ec
VM
3452 initiate_cost_vectors ();
3453 initiate_allocnos ();
3b6d1699 3454 initiate_prefs ();
058e97ec 3455 initiate_copies ();
2608d841 3456 create_loop_tree_nodes ();
058e97ec
VM
3457 form_loop_tree ();
3458 create_allocnos ();
3459 ira_costs ();
a49ae217 3460 create_allocno_objects ();
058e97ec 3461 ira_create_allocno_live_ranges ();
311aab06 3462 remove_unnecessary_regions (false);
b15a7ae6 3463 ira_compress_allocno_live_ranges ();
927425df 3464 update_bad_spill_attribute ();
058e97ec
VM
3465 loops_p = more_one_region_p ();
3466 if (loops_p)
3467 {
3468 propagate_allocno_info ();
3469 create_caps ();
3470 }
1756cb66 3471 ira_tune_allocno_costs ();
058e97ec
VM
3472#ifdef ENABLE_IRA_CHECKING
3473 check_allocno_creation ();
3474#endif
3475 setup_min_max_allocno_live_range_point ();
a49ae217 3476 sort_conflict_id_map ();
058e97ec
VM
3477 setup_min_max_conflict_allocno_ids ();
3478 ira_build_conflicts ();
4ac293e2 3479 update_conflict_hard_reg_costs ();
311aab06
VM
3480 if (! ira_conflicts_p)
3481 {
3482 ira_allocno_t a;
3483 ira_allocno_iterator ai;
3484
3485 /* Remove all regions but root one. */
3486 if (loops_p)
3487 {
3488 remove_unnecessary_regions (true);
3489 loops_p = false;
3490 }
3491 /* We don't save hard registers around calls for fast allocation
3492 -- add caller clobbered registers as conflicting ones to
3493 allocno crossing calls. */
3494 FOR_EACH_ALLOCNO (a, ai)
3495 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
ac0ab4f7 3496 ior_hard_reg_conflicts (a, &call_used_reg_set);
311aab06 3497 }
4cda38d5
VM
3498 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3499 print_copies (ira_dump_file);
3b6d1699
VM
3500 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3501 print_prefs (ira_dump_file);
058e97ec
VM
3502 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3503 {
ac0ab4f7 3504 int n, nr, nr_big;
058e97ec 3505 ira_allocno_t a;
b14151b5 3506 live_range_t r;
058e97ec
VM
3507 ira_allocno_iterator ai;
3508
3509 n = 0;
ac0ab4f7
BS
3510 nr = 0;
3511 nr_big = 0;
058e97ec 3512 FOR_EACH_ALLOCNO (a, ai)
a49ae217 3513 {
ac0ab4f7 3514 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
1756cb66 3515
ac0ab4f7
BS
3516 if (nobj > 1)
3517 nr_big++;
3518 for (j = 0; j < nobj; j++)
3519 {
3520 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3521 n += OBJECT_NUM_CONFLICTS (obj);
3522 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3523 nr++;
3524 }
a49ae217 3525 }
058e97ec 3526 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
0fc822d0 3527 current_loops == NULL ? 1 : number_of_loops (cfun),
0cae8d31 3528 n_basic_blocks_for_fn (cfun), ira_max_point);
058e97ec 3529 fprintf (ira_dump_file,
ac0ab4f7
BS
3530 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3531 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
058e97ec
VM
3532 }
3533 return loops_p;
3534}
3535
3536/* Release the data created by function ira_build. */
3537void
3538ira_destroy (void)
3539{
3540 finish_loop_tree_nodes ();
3b6d1699 3541 finish_prefs ();
058e97ec
VM
3542 finish_copies ();
3543 finish_allocnos ();
3544 finish_cost_vectors ();
3545 ira_finish_allocno_live_ranges ();
3546}