]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ira-build.c
[Ada] Improved support for aspect alignment in CCG
[thirdparty/gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec 1/* Building internal representation for IRA.
8d9254fc 2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
058e97ec
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5 25#include "target.h"
058e97ec 26#include "rtl.h"
957060b5 27#include "predict.h"
c7131fb2 28#include "df.h"
058e97ec 29#include "insn-config.h"
957060b5 30#include "regs.h"
4d0cdd0c 31#include "memmodel.h"
957060b5
AM
32#include "ira.h"
33#include "ira-int.h"
058e97ec 34#include "sparseset.h"
c7131fb2 35#include "cfgloop.h"
058e97ec 36
070a1983 37static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx_insn *,
058e97ec
VM
38 ira_loop_tree_node_t);
39
40/* The root of the loop tree corresponding to the all function. */
41ira_loop_tree_node_t ira_loop_tree_root;
42
43/* Height of the loop tree. */
44int ira_loop_tree_height;
45
46/* All nodes representing basic blocks are referred through the
67914693 47 following array. We cannot use basic block member `aux' for this
058e97ec
VM
48 because it is used for insertion of insns on edges. */
49ira_loop_tree_node_t ira_bb_nodes;
50
51/* All nodes representing loops are referred through the following
52 array. */
53ira_loop_tree_node_t ira_loop_nodes;
54
caff7edf
JJ
55/* And size of the ira_loop_nodes array. */
56unsigned int ira_loop_nodes_count;
57
b8698a0f 58/* Map regno -> allocnos with given regno (see comments for
058e97ec
VM
59 allocno member `next_regno_allocno'). */
60ira_allocno_t *ira_regno_allocno_map;
61
62/* Array of references to all allocnos. The order number of the
63 allocno corresponds to the index in the array. Removed allocnos
64 have NULL element value. */
65ira_allocno_t *ira_allocnos;
66
67/* Sizes of the previous array. */
68int ira_allocnos_num;
69
a49ae217
BS
70/* Count of conflict record structures we've created, used when creating
71 a new conflict id. */
72int ira_objects_num;
73
74/* Map a conflict id to its conflict record. */
75ira_object_t *ira_object_id_map;
058e97ec 76
3b6d1699
VM
77/* Array of references to all allocno preferences. The order number
78 of the preference corresponds to the index in the array. */
79ira_pref_t *ira_prefs;
80
81/* Size of the previous array. */
82int ira_prefs_num;
83
058e97ec
VM
84/* Array of references to all copies. The order number of the copy
85 corresponds to the index in the array. Removed copies have NULL
86 element value. */
87ira_copy_t *ira_copies;
88
89/* Size of the previous array. */
90int ira_copies_num;
91
92\f
93
94/* LAST_BASIC_BLOCK before generating additional insns because of live
95 range splitting. Emitting insns on a critical edge creates a new
96 basic block. */
97static int last_basic_block_before_change;
98
2608d841
VM
99/* Initialize some members in loop tree node NODE. Use LOOP_NUM for
100 the member loop_num. */
058e97ec 101static void
2608d841
VM
102init_loop_tree_node (struct ira_loop_tree_node *node, int loop_num)
103{
104 int max_regno = max_reg_num ();
105
106 node->regno_allocno_map
107 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
108 memset (node->regno_allocno_map, 0, sizeof (ira_allocno_t) * max_regno);
109 memset (node->reg_pressure, 0, sizeof (node->reg_pressure));
110 node->all_allocnos = ira_allocate_bitmap ();
111 node->modified_regnos = ira_allocate_bitmap ();
112 node->border_allocnos = ira_allocate_bitmap ();
113 node->local_copies = ira_allocate_bitmap ();
114 node->loop_num = loop_num;
115 node->children = NULL;
116 node->subloops = NULL;
117}
118
119
120/* The following function allocates the loop tree nodes. If
121 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
122 the root which corresponds the all function) will be not allocated
123 but nodes will still be allocated for basic blocks. */
124static void
125create_loop_tree_nodes (void)
058e97ec
VM
126{
127 unsigned int i, j;
058e97ec
VM
128 bool skip_p;
129 edge_iterator ei;
130 edge e;
9771b263 131 vec<edge> edges;
058e97ec
VM
132 loop_p loop;
133
134 ira_bb_nodes
135 = ((struct ira_loop_tree_node *)
8b1c6fd7
DM
136 ira_allocate (sizeof (struct ira_loop_tree_node)
137 * last_basic_block_for_fn (cfun)));
138 last_basic_block_before_change = last_basic_block_for_fn (cfun);
139 for (i = 0; i < (unsigned int) last_basic_block_for_fn (cfun); i++)
058e97ec
VM
140 {
141 ira_bb_nodes[i].regno_allocno_map = NULL;
142 memset (ira_bb_nodes[i].reg_pressure, 0,
143 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 144 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
145 ira_bb_nodes[i].modified_regnos = NULL;
146 ira_bb_nodes[i].border_allocnos = NULL;
147 ira_bb_nodes[i].local_copies = NULL;
148 }
2608d841
VM
149 if (current_loops == NULL)
150 {
caff7edf 151 ira_loop_nodes_count = 1;
2608d841
VM
152 ira_loop_nodes = ((struct ira_loop_tree_node *)
153 ira_allocate (sizeof (struct ira_loop_tree_node)));
154 init_loop_tree_node (ira_loop_nodes, 0);
155 return;
156 }
0fc822d0 157 ira_loop_nodes_count = number_of_loops (cfun);
058e97ec
VM
158 ira_loop_nodes = ((struct ira_loop_tree_node *)
159 ira_allocate (sizeof (struct ira_loop_tree_node)
caff7edf 160 * ira_loop_nodes_count));
0fc822d0 161 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
058e97ec 162 {
661bc682 163 if (loop_outer (loop) != NULL)
058e97ec
VM
164 {
165 ira_loop_nodes[i].regno_allocno_map = NULL;
058e97ec
VM
166 skip_p = false;
167 FOR_EACH_EDGE (e, ei, loop->header->preds)
168 if (e->src != loop->latch
169 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
170 {
171 skip_p = true;
172 break;
173 }
174 if (skip_p)
175 continue;
176 edges = get_loop_exit_edges (loop);
9771b263 177 FOR_EACH_VEC_ELT (edges, j, e)
058e97ec
VM
178 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
179 {
180 skip_p = true;
181 break;
182 }
9771b263 183 edges.release ();
058e97ec
VM
184 if (skip_p)
185 continue;
186 }
2608d841 187 init_loop_tree_node (&ira_loop_nodes[i], loop->num);
058e97ec
VM
188 }
189}
190
191/* The function returns TRUE if there are more one allocation
192 region. */
193static bool
194more_one_region_p (void)
195{
196 unsigned int i;
197 loop_p loop;
198
2608d841 199 if (current_loops != NULL)
0fc822d0 200 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
2608d841
VM
201 if (ira_loop_nodes[i].regno_allocno_map != NULL
202 && ira_loop_tree_root != &ira_loop_nodes[i])
203 return true;
058e97ec
VM
204 return false;
205}
206
207/* Free the loop tree node of a loop. */
208static void
209finish_loop_tree_node (ira_loop_tree_node_t loop)
210{
211 if (loop->regno_allocno_map != NULL)
212 {
213 ira_assert (loop->bb == NULL);
214 ira_free_bitmap (loop->local_copies);
215 ira_free_bitmap (loop->border_allocnos);
216 ira_free_bitmap (loop->modified_regnos);
49d988e7 217 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
218 ira_free (loop->regno_allocno_map);
219 loop->regno_allocno_map = NULL;
220 }
221}
222
223/* Free the loop tree nodes. */
224static void
225finish_loop_tree_nodes (void)
226{
227 unsigned int i;
058e97ec 228
caff7edf
JJ
229 for (i = 0; i < ira_loop_nodes_count; i++)
230 finish_loop_tree_node (&ira_loop_nodes[i]);
058e97ec
VM
231 ira_free (ira_loop_nodes);
232 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
233 {
234 if (ira_bb_nodes[i].local_copies != NULL)
235 ira_free_bitmap (ira_bb_nodes[i].local_copies);
236 if (ira_bb_nodes[i].border_allocnos != NULL)
237 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
238 if (ira_bb_nodes[i].modified_regnos != NULL)
239 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
240 if (ira_bb_nodes[i].all_allocnos != NULL)
241 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
242 if (ira_bb_nodes[i].regno_allocno_map != NULL)
243 ira_free (ira_bb_nodes[i].regno_allocno_map);
244 }
245 ira_free (ira_bb_nodes);
246}
247
248\f
249
250/* The following recursive function adds LOOP to the loop tree
2608d841
VM
251 hierarchy. LOOP is added only once. If LOOP is NULL we adding
252 loop designating the whole function when CFG loops are not
253 built. */
058e97ec 254static void
99b1c316 255add_loop_to_tree (class loop *loop)
058e97ec 256{
2608d841 257 int loop_num;
99b1c316 258 class loop *parent;
058e97ec
VM
259 ira_loop_tree_node_t loop_node, parent_node;
260
67914693 261 /* We cannot use loop node access macros here because of potential
058e97ec
VM
262 checking and because the nodes are not initialized enough
263 yet. */
2608d841 264 if (loop != NULL && loop_outer (loop) != NULL)
058e97ec 265 add_loop_to_tree (loop_outer (loop));
2608d841
VM
266 loop_num = loop != NULL ? loop->num : 0;
267 if (ira_loop_nodes[loop_num].regno_allocno_map != NULL
268 && ira_loop_nodes[loop_num].children == NULL)
058e97ec
VM
269 {
270 /* We have not added loop node to the tree yet. */
2608d841 271 loop_node = &ira_loop_nodes[loop_num];
058e97ec
VM
272 loop_node->loop = loop;
273 loop_node->bb = NULL;
2608d841
VM
274 if (loop == NULL)
275 parent = NULL;
276 else
277 {
278 for (parent = loop_outer (loop);
279 parent != NULL;
280 parent = loop_outer (parent))
281 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
282 break;
283 }
058e97ec
VM
284 if (parent == NULL)
285 {
286 loop_node->next = NULL;
287 loop_node->subloop_next = NULL;
288 loop_node->parent = NULL;
289 }
290 else
291 {
292 parent_node = &ira_loop_nodes[parent->num];
293 loop_node->next = parent_node->children;
294 parent_node->children = loop_node;
295 loop_node->subloop_next = parent_node->subloops;
296 parent_node->subloops = loop_node;
297 loop_node->parent = parent_node;
298 }
299 }
300}
301
302/* The following recursive function sets up levels of nodes of the
303 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
304 The function returns maximal value of level in the tree + 1. */
305static int
306setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
307{
308 int height, max_height;
309 ira_loop_tree_node_t subloop_node;
310
311 ira_assert (loop_node->bb == NULL);
312 loop_node->level = level;
313 max_height = level + 1;
314 for (subloop_node = loop_node->subloops;
315 subloop_node != NULL;
316 subloop_node = subloop_node->subloop_next)
317 {
318 ira_assert (subloop_node->bb == NULL);
319 height = setup_loop_tree_level (subloop_node, level + 1);
320 if (height > max_height)
321 max_height = height;
322 }
323 return max_height;
324}
325
326/* Create the loop tree. The algorithm is designed to provide correct
327 order of loops (they are ordered by their last loop BB) and basic
328 blocks in the chain formed by member next. */
329static void
330form_loop_tree (void)
331{
058e97ec 332 basic_block bb;
99b1c316 333 class loop *parent;
058e97ec 334 ira_loop_tree_node_t bb_node, loop_node;
058e97ec 335
67914693 336 /* We cannot use loop/bb node access macros because of potential
058e97ec
VM
337 checking and because the nodes are not initialized enough
338 yet. */
11cd3bed 339 FOR_EACH_BB_FN (bb, cfun)
058e97ec
VM
340 {
341 bb_node = &ira_bb_nodes[bb->index];
342 bb_node->bb = bb;
343 bb_node->loop = NULL;
344 bb_node->subloops = NULL;
345 bb_node->children = NULL;
346 bb_node->subloop_next = NULL;
347 bb_node->next = NULL;
2608d841
VM
348 if (current_loops == NULL)
349 parent = NULL;
350 else
351 {
352 for (parent = bb->loop_father;
353 parent != NULL;
354 parent = loop_outer (parent))
355 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
356 break;
357 }
058e97ec 358 add_loop_to_tree (parent);
2608d841 359 loop_node = &ira_loop_nodes[parent == NULL ? 0 : parent->num];
058e97ec
VM
360 bb_node->next = loop_node->children;
361 bb_node->parent = loop_node;
362 loop_node->children = bb_node;
363 }
2608d841 364 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (0);
058e97ec
VM
365 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
366 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
367}
368
369\f
370
371/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
372 tree nodes. */
373static void
374rebuild_regno_allocno_maps (void)
375{
376 unsigned int l;
377 int max_regno, regno;
378 ira_allocno_t a;
379 ira_loop_tree_node_t loop_tree_node;
380 loop_p loop;
381 ira_allocno_iterator ai;
382
2608d841 383 ira_assert (current_loops != NULL);
058e97ec 384 max_regno = max_reg_num ();
0fc822d0 385 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), l, loop)
058e97ec
VM
386 if (ira_loop_nodes[l].regno_allocno_map != NULL)
387 {
388 ira_free (ira_loop_nodes[l].regno_allocno_map);
389 ira_loop_nodes[l].regno_allocno_map
390 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
391 * max_regno);
392 memset (ira_loop_nodes[l].regno_allocno_map, 0,
393 sizeof (ira_allocno_t) * max_regno);
394 }
395 ira_free (ira_regno_allocno_map);
396 ira_regno_allocno_map
397 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
398 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
399 FOR_EACH_ALLOCNO (a, ai)
400 {
401 if (ALLOCNO_CAP_MEMBER (a) != NULL)
402 /* Caps are not in the regno allocno maps. */
403 continue;
404 regno = ALLOCNO_REGNO (a);
405 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
406 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
407 ira_regno_allocno_map[regno] = a;
408 if (loop_tree_node->regno_allocno_map[regno] == NULL)
409 /* Remember that we can create temporary allocnos to break
410 cycles in register shuffle. */
411 loop_tree_node->regno_allocno_map[regno] = a;
412 }
413}
058e97ec
VM
414\f
415
a49ae217 416/* Pools for allocnos, allocno live ranges and objects. */
fcb87c50
MM
417static object_allocator<live_range> live_range_pool ("live ranges");
418static object_allocator<ira_allocno> allocno_pool ("allocnos");
419static object_allocator<ira_object> object_pool ("objects");
058e97ec
VM
420
421/* Vec containing references to all created allocnos. It is a
422 container of array allocnos. */
9771b263 423static vec<ira_allocno_t> allocno_vec;
058e97ec 424
a49ae217
BS
425/* Vec containing references to all created ira_objects. It is a
426 container of ira_object_id_map. */
9771b263 427static vec<ira_object_t> ira_object_id_map_vec;
058e97ec
VM
428
429/* Initialize data concerning allocnos. */
430static void
431initiate_allocnos (void)
432{
9771b263 433 allocno_vec.create (max_reg_num () * 2);
058e97ec
VM
434 ira_allocnos = NULL;
435 ira_allocnos_num = 0;
a49ae217 436 ira_objects_num = 0;
9771b263 437 ira_object_id_map_vec.create (max_reg_num () * 2);
a49ae217 438 ira_object_id_map = NULL;
058e97ec 439 ira_regno_allocno_map
1756cb66
VM
440 = (ira_allocno_t *) ira_allocate (max_reg_num ()
441 * sizeof (ira_allocno_t));
058e97ec
VM
442 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
443}
444
a49ae217
BS
445/* Create and return an object corresponding to a new allocno A. */
446static ira_object_t
ac0ab4f7 447ira_create_object (ira_allocno_t a, int subword)
a49ae217 448{
1756cb66 449 enum reg_class aclass = ALLOCNO_CLASS (a);
0b470bae 450 ira_object_t obj = object_pool.allocate ();
a49ae217
BS
451
452 OBJECT_ALLOCNO (obj) = a;
ac0ab4f7 453 OBJECT_SUBWORD (obj) = subword;
a49ae217
BS
454 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
455 OBJECT_CONFLICT_VEC_P (obj) = false;
456 OBJECT_CONFLICT_ARRAY (obj) = NULL;
457 OBJECT_NUM_CONFLICTS (obj) = 0;
6576d245
RS
458 OBJECT_CONFLICT_HARD_REGS (obj) = ira_no_alloc_regs;
459 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) = ira_no_alloc_regs;
4897c5aa
RS
460 OBJECT_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
461 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
a49ae217
BS
462 OBJECT_MIN (obj) = INT_MAX;
463 OBJECT_MAX (obj) = -1;
9140d27b 464 OBJECT_LIVE_RANGES (obj) = NULL;
a49ae217 465
9771b263 466 ira_object_id_map_vec.safe_push (obj);
a49ae217 467 ira_object_id_map
9771b263
DN
468 = ira_object_id_map_vec.address ();
469 ira_objects_num = ira_object_id_map_vec.length ();
ac0ab4f7 470
a49ae217
BS
471 return obj;
472}
473
058e97ec
VM
474/* Create and return the allocno corresponding to REGNO in
475 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
476 same regno if CAP_P is FALSE. */
477ira_allocno_t
1756cb66
VM
478ira_create_allocno (int regno, bool cap_p,
479 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
480{
481 ira_allocno_t a;
482
0b470bae 483 a = allocno_pool.allocate ();
058e97ec
VM
484 ALLOCNO_REGNO (a) = regno;
485 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
486 if (! cap_p)
487 {
488 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
489 ira_regno_allocno_map[regno] = a;
490 if (loop_tree_node->regno_allocno_map[regno] == NULL)
491 /* Remember that we can create temporary allocnos to break
492 cycles in register shuffle on region borders (see
493 ira-emit.c). */
494 loop_tree_node->regno_allocno_map[regno] = a;
495 }
496 ALLOCNO_CAP (a) = NULL;
497 ALLOCNO_CAP_MEMBER (a) = NULL;
498 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 499 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec 500 ALLOCNO_NREFS (a) = 0;
854bd721 501 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
502 ALLOCNO_HARD_REGNO (a) = -1;
503 ALLOCNO_CALL_FREQ (a) = 0;
504 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
e384e6b5 505 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a) = 0;
3366b378 506 ALLOCNO_CROSSED_CALLS_ABIS (a) = 0;
c2ba7e7a 507 CLEAR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
508#ifdef STACK_REGS
509 ALLOCNO_NO_STACK_REG_P (a) = false;
510 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
511#endif
058e97ec 512 ALLOCNO_DONT_REASSIGN_P (a) = false;
927425df 513 ALLOCNO_BAD_SPILL_P (a) = false;
058e97ec 514 ALLOCNO_ASSIGNED_P (a) = false;
058e97ec 515 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
d1bb282e 516 ALLOCNO_WMODE (a) = ALLOCNO_MODE (a);
3b6d1699 517 ALLOCNO_PREFS (a) = NULL;
058e97ec
VM
518 ALLOCNO_COPIES (a) = NULL;
519 ALLOCNO_HARD_REG_COSTS (a) = NULL;
520 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
521 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
522 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1756cb66
VM
523 ALLOCNO_CLASS (a) = NO_REGS;
524 ALLOCNO_UPDATED_CLASS_COST (a) = 0;
525 ALLOCNO_CLASS_COST (a) = 0;
058e97ec
VM
526 ALLOCNO_MEMORY_COST (a) = 0;
527 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
528 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
ac0ab4f7 529 ALLOCNO_NUM_OBJECTS (a) = 0;
a49ae217 530
1756cb66 531 ALLOCNO_ADD_DATA (a) = NULL;
9771b263
DN
532 allocno_vec.safe_push (a);
533 ira_allocnos = allocno_vec.address ();
534 ira_allocnos_num = allocno_vec.length ();
ac0ab4f7 535
058e97ec
VM
536 return a;
537}
538
1756cb66
VM
539/* Set up register class for A and update its conflict hard
540 registers. */
058e97ec 541void
1756cb66 542ira_set_allocno_class (ira_allocno_t a, enum reg_class aclass)
058e97ec 543{
1756cb66
VM
544 ira_allocno_object_iterator oi;
545 ira_object_t obj;
546
547 ALLOCNO_CLASS (a) = aclass;
548 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
549 {
4897c5aa
RS
550 OBJECT_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
551 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
1756cb66 552 }
a49ae217
BS
553}
554
ac0ab4f7
BS
555/* Determine the number of objects we should associate with allocno A
556 and allocate them. */
a49ae217 557void
ac0ab4f7 558ira_create_allocno_objects (ira_allocno_t a)
a49ae217 559{
ef4bddc2 560 machine_mode mode = ALLOCNO_MODE (a);
1756cb66
VM
561 enum reg_class aclass = ALLOCNO_CLASS (a);
562 int n = ira_reg_class_max_nregs[aclass][mode];
ac0ab4f7
BS
563 int i;
564
cf098191 565 if (n != 2 || maybe_ne (GET_MODE_SIZE (mode), n * UNITS_PER_WORD))
ac0ab4f7
BS
566 n = 1;
567
568 ALLOCNO_NUM_OBJECTS (a) = n;
569 for (i = 0; i < n; i++)
570 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
a49ae217
BS
571}
572
ac0ab4f7 573/* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
1756cb66 574 ALLOCNO_OBJECT structures. This must be called after the allocno
ac0ab4f7 575 classes are known. */
a49ae217
BS
576static void
577create_allocno_objects (void)
578{
579 ira_allocno_t a;
580 ira_allocno_iterator ai;
581
582 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7 583 ira_create_allocno_objects (a);
058e97ec
VM
584}
585
ac0ab4f7
BS
586/* Merge hard register conflict information for all objects associated with
587 allocno TO into the corresponding objects associated with FROM.
588 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
3c55880a
BS
589static void
590merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
591 bool total_only)
592{
ac0ab4f7
BS
593 int i;
594 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
595 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
596 {
597 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
598 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1756cb66 599
ac0ab4f7 600 if (!total_only)
44942965
RS
601 OBJECT_CONFLICT_HARD_REGS (to_obj)
602 |= OBJECT_CONFLICT_HARD_REGS (from_obj);
603 OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj)
604 |= OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj);
ac0ab4f7 605 }
3c55880a
BS
606#ifdef STACK_REGS
607 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
608 ALLOCNO_NO_STACK_REG_P (to) = true;
609 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
610 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
611#endif
612}
613
ac0ab4f7
BS
614/* Update hard register conflict information for all objects associated with
615 A to include the regs in SET. */
616void
75f4e3a1 617ior_hard_reg_conflicts (ira_allocno_t a, const_hard_reg_set set)
ac0ab4f7
BS
618{
619 ira_allocno_object_iterator i;
620 ira_object_t obj;
1756cb66 621
ac0ab4f7
BS
622 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
623 {
75f4e3a1
RS
624 OBJECT_CONFLICT_HARD_REGS (obj) |= set;
625 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= set;
ac0ab4f7
BS
626 }
627}
628
a49ae217
BS
629/* Return TRUE if a conflict vector with NUM elements is more
630 profitable than a conflict bit vector for OBJ. */
058e97ec 631bool
a49ae217 632ira_conflict_vector_profitable_p (ira_object_t obj, int num)
058e97ec
VM
633{
634 int nw;
a49ae217
BS
635 int max = OBJECT_MAX (obj);
636 int min = OBJECT_MIN (obj);
058e97ec 637
a49ae217
BS
638 if (max < min)
639 /* We prefer a bit vector in such case because it does not result
640 in allocation. */
058e97ec
VM
641 return false;
642
a49ae217
BS
643 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
644 return (2 * sizeof (ira_object_t) * (num + 1)
058e97ec
VM
645 < 3 * nw * sizeof (IRA_INT_TYPE));
646}
647
a49ae217
BS
648/* Allocates and initialize the conflict vector of OBJ for NUM
649 conflicting objects. */
058e97ec 650void
a49ae217 651ira_allocate_conflict_vec (ira_object_t obj, int num)
058e97ec
VM
652{
653 int size;
a49ae217 654 ira_object_t *vec;
058e97ec 655
a49ae217 656 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
058e97ec 657 num++; /* for NULL end marker */
a49ae217
BS
658 size = sizeof (ira_object_t) * num;
659 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
660 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
058e97ec 661 vec[0] = NULL;
a49ae217
BS
662 OBJECT_NUM_CONFLICTS (obj) = 0;
663 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
664 OBJECT_CONFLICT_VEC_P (obj) = true;
058e97ec
VM
665}
666
a49ae217 667/* Allocate and initialize the conflict bit vector of OBJ. */
058e97ec 668static void
a49ae217 669allocate_conflict_bit_vec (ira_object_t obj)
058e97ec
VM
670{
671 unsigned int size;
672
a49ae217
BS
673 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
674 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
058e97ec 675 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
a49ae217
BS
676 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
677 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
678 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
679 OBJECT_CONFLICT_VEC_P (obj) = false;
058e97ec
VM
680}
681
682/* Allocate and initialize the conflict vector or conflict bit vector
ac0ab4f7 683 of OBJ for NUM conflicting allocnos whatever is more profitable. */
058e97ec 684void
ac0ab4f7 685ira_allocate_object_conflicts (ira_object_t obj, int num)
058e97ec 686{
ac0ab4f7
BS
687 if (ira_conflict_vector_profitable_p (obj, num))
688 ira_allocate_conflict_vec (obj, num);
058e97ec 689 else
ac0ab4f7 690 allocate_conflict_bit_vec (obj);
058e97ec
VM
691}
692
a49ae217 693/* Add OBJ2 to the conflicts of OBJ1. */
058e97ec 694static void
a49ae217 695add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
058e97ec
VM
696{
697 int num;
698 unsigned int size;
699
a49ae217 700 if (OBJECT_CONFLICT_VEC_P (obj1))
058e97ec 701 {
a49ae217
BS
702 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
703 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
704 num = curr_num + 2;
705 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
058e97ec 706 {
a49ae217 707 ira_object_t *newvec;
058e97ec 708 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
a49ae217
BS
709 newvec = (ira_object_t *) ira_allocate (size);
710 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
711 ira_free (vec);
712 vec = newvec;
713 OBJECT_CONFLICT_ARRAY (obj1) = vec;
714 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 715 }
a49ae217 716 vec[num - 2] = obj2;
058e97ec 717 vec[num - 1] = NULL;
a49ae217 718 OBJECT_NUM_CONFLICTS (obj1)++;
058e97ec
VM
719 }
720 else
721 {
722 int nw, added_head_nw, id;
a49ae217 723 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
058e97ec 724
a49ae217
BS
725 id = OBJECT_CONFLICT_ID (obj2);
726 if (OBJECT_MIN (obj1) > id)
058e97ec
VM
727 {
728 /* Expand head of the bit vector. */
a49ae217
BS
729 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
730 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 731 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
a49ae217 732 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
058e97ec
VM
733 {
734 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
735 vec, nw * sizeof (IRA_INT_TYPE));
736 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
737 }
738 else
739 {
740 size
741 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
742 vec = (IRA_INT_TYPE *) ira_allocate (size);
743 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
a49ae217 744 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
745 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
746 memset ((char *) vec
747 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
748 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
a49ae217
BS
749 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
750 OBJECT_CONFLICT_ARRAY (obj1) = vec;
751 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 752 }
a49ae217 753 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
058e97ec 754 }
a49ae217 755 else if (OBJECT_MAX (obj1) < id)
058e97ec 756 {
a49ae217 757 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 758 size = nw * sizeof (IRA_INT_TYPE);
a49ae217 759 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
058e97ec
VM
760 {
761 /* Expand tail of the bit vector. */
762 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
763 vec = (IRA_INT_TYPE *) ira_allocate (size);
a49ae217
BS
764 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
765 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
766 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
767 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
768 OBJECT_CONFLICT_ARRAY (obj1) = vec;
769 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 770 }
a49ae217 771 OBJECT_MAX (obj1) = id;
058e97ec 772 }
a49ae217 773 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
058e97ec
VM
774 }
775}
776
a49ae217
BS
777/* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
778static void
779ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
058e97ec 780{
a49ae217
BS
781 add_to_conflicts (obj1, obj2);
782 add_to_conflicts (obj2, obj1);
058e97ec
VM
783}
784
a49ae217 785/* Clear all conflicts of OBJ. */
058e97ec 786static void
a49ae217 787clear_conflicts (ira_object_t obj)
058e97ec 788{
a49ae217 789 if (OBJECT_CONFLICT_VEC_P (obj))
058e97ec 790 {
a49ae217
BS
791 OBJECT_NUM_CONFLICTS (obj) = 0;
792 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
058e97ec 793 }
a49ae217 794 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
058e97ec
VM
795 {
796 int nw;
797
a49ae217
BS
798 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
799 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
800 }
801}
802
803/* The array used to find duplications in conflict vectors of
804 allocnos. */
a49ae217 805static int *conflict_check;
058e97ec
VM
806
807/* The value used to mark allocation presence in conflict vector of
808 the current allocno. */
a49ae217 809static int curr_conflict_check_tick;
058e97ec 810
a49ae217 811/* Remove duplications in conflict vector of OBJ. */
058e97ec 812static void
a49ae217 813compress_conflict_vec (ira_object_t obj)
058e97ec 814{
a49ae217 815 ira_object_t *vec, conflict_obj;
058e97ec
VM
816 int i, j;
817
a49ae217
BS
818 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
819 vec = OBJECT_CONFLICT_VEC (obj);
820 curr_conflict_check_tick++;
821 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
058e97ec 822 {
a49ae217
BS
823 int id = OBJECT_CONFLICT_ID (conflict_obj);
824 if (conflict_check[id] != curr_conflict_check_tick)
058e97ec 825 {
a49ae217
BS
826 conflict_check[id] = curr_conflict_check_tick;
827 vec[j++] = conflict_obj;
058e97ec
VM
828 }
829 }
a49ae217 830 OBJECT_NUM_CONFLICTS (obj) = j;
058e97ec
VM
831 vec[j] = NULL;
832}
833
834/* Remove duplications in conflict vectors of all allocnos. */
835static void
836compress_conflict_vecs (void)
837{
ac0ab4f7
BS
838 ira_object_t obj;
839 ira_object_iterator oi;
058e97ec 840
a49ae217
BS
841 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
842 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
843 curr_conflict_check_tick = 0;
ac0ab4f7 844 FOR_EACH_OBJECT (obj, oi)
a49ae217 845 {
a49ae217
BS
846 if (OBJECT_CONFLICT_VEC_P (obj))
847 compress_conflict_vec (obj);
848 }
849 ira_free (conflict_check);
058e97ec
VM
850}
851
852/* This recursive function outputs allocno A and if it is a cap the
853 function outputs its members. */
854void
855ira_print_expanded_allocno (ira_allocno_t a)
856{
857 basic_block bb;
858
859 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
860 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
861 fprintf (ira_dump_file, ",b%d", bb->index);
862 else
2608d841 863 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop_num);
058e97ec
VM
864 if (ALLOCNO_CAP_MEMBER (a) != NULL)
865 {
866 fprintf (ira_dump_file, ":");
867 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
868 }
869 fprintf (ira_dump_file, ")");
870}
871
872/* Create and return the cap representing allocno A in the
873 parent loop. */
874static ira_allocno_t
875create_cap_allocno (ira_allocno_t a)
876{
877 ira_allocno_t cap;
878 ira_loop_tree_node_t parent;
1756cb66 879 enum reg_class aclass;
058e97ec 880
058e97ec
VM
881 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
882 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
883 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
d1bb282e 884 ALLOCNO_WMODE (cap) = ALLOCNO_WMODE (a);
1756cb66
VM
885 aclass = ALLOCNO_CLASS (a);
886 ira_set_allocno_class (cap, aclass);
ac0ab4f7 887 ira_create_allocno_objects (cap);
058e97ec 888 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec 889 ALLOCNO_CAP (a) = cap;
1756cb66 890 ALLOCNO_CLASS_COST (cap) = ALLOCNO_CLASS_COST (a);
058e97ec 891 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
058e97ec 892 ira_allocate_and_copy_costs
1756cb66 893 (&ALLOCNO_HARD_REG_COSTS (cap), aclass, ALLOCNO_HARD_REG_COSTS (a));
058e97ec 894 ira_allocate_and_copy_costs
1756cb66 895 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), aclass,
058e97ec 896 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
927425df 897 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
058e97ec
VM
898 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
899 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
900 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
ac0ab4f7 901
3c55880a 902 merge_hard_reg_conflicts (a, cap, false);
ac0ab4f7 903
058e97ec 904 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5 905 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222 906 ALLOCNO_CROSSED_CALLS_ABIS (cap) = ALLOCNO_CROSSED_CALLS_ABIS (a);
44942965 907 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (cap)
6c476222 908 = ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a);
058e97ec
VM
909 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
910 {
911 fprintf (ira_dump_file, " Creating cap ");
912 ira_print_expanded_allocno (cap);
913 fprintf (ira_dump_file, "\n");
914 }
915 return cap;
916}
917
ac0ab4f7 918/* Create and return a live range for OBJECT with given attributes. */
b14151b5 919live_range_t
9140d27b
BS
920ira_create_live_range (ira_object_t obj, int start, int finish,
921 live_range_t next)
058e97ec 922{
b14151b5 923 live_range_t p;
058e97ec 924
0b470bae 925 p = live_range_pool.allocate ();
9140d27b 926 p->object = obj;
058e97ec
VM
927 p->start = start;
928 p->finish = finish;
929 p->next = next;
930 return p;
931}
932
ac0ab4f7
BS
933/* Create a new live range for OBJECT and queue it at the head of its
934 live range list. */
935void
936ira_add_live_range_to_object (ira_object_t object, int start, int finish)
937{
938 live_range_t p;
939 p = ira_create_live_range (object, start, finish,
940 OBJECT_LIVE_RANGES (object));
941 OBJECT_LIVE_RANGES (object) = p;
942}
943
058e97ec 944/* Copy allocno live range R and return the result. */
b14151b5 945static live_range_t
9140d27b 946copy_live_range (live_range_t r)
058e97ec 947{
b14151b5 948 live_range_t p;
058e97ec 949
0b470bae 950 p = live_range_pool.allocate ();
058e97ec
VM
951 *p = *r;
952 return p;
953}
954
955/* Copy allocno live range list given by its head R and return the
956 result. */
b14151b5 957live_range_t
9140d27b 958ira_copy_live_range_list (live_range_t r)
058e97ec 959{
b14151b5 960 live_range_t p, first, last;
058e97ec
VM
961
962 if (r == NULL)
963 return NULL;
964 for (first = last = NULL; r != NULL; r = r->next)
965 {
9140d27b 966 p = copy_live_range (r);
058e97ec
VM
967 if (first == NULL)
968 first = p;
969 else
970 last->next = p;
971 last = p;
972 }
973 return first;
974}
975
3553f0bb
VM
976/* Merge ranges R1 and R2 and returns the result. The function
977 maintains the order of ranges and tries to minimize number of the
978 result ranges. */
b14151b5 979live_range_t
9140d27b 980ira_merge_live_ranges (live_range_t r1, live_range_t r2)
3553f0bb 981{
fab27f52 982 live_range_t first, last;
3553f0bb
VM
983
984 if (r1 == NULL)
985 return r2;
986 if (r2 == NULL)
987 return r1;
988 for (first = last = NULL; r1 != NULL && r2 != NULL;)
989 {
990 if (r1->start < r2->start)
fab27f52 991 std::swap (r1, r2);
3553f0bb
VM
992 if (r1->start <= r2->finish + 1)
993 {
994 /* Intersected ranges: merge r1 and r2 into r1. */
995 r1->start = r2->start;
996 if (r1->finish < r2->finish)
997 r1->finish = r2->finish;
fab27f52 998 live_range_t temp = r2;
3553f0bb 999 r2 = r2->next;
9140d27b 1000 ira_finish_live_range (temp);
3553f0bb
VM
1001 if (r2 == NULL)
1002 {
1003 /* To try to merge with subsequent ranges in r1. */
1004 r2 = r1->next;
1005 r1->next = NULL;
1006 }
1007 }
1008 else
1009 {
1010 /* Add r1 to the result. */
1011 if (first == NULL)
1012 first = last = r1;
1013 else
1014 {
1015 last->next = r1;
1016 last = r1;
1017 }
1018 r1 = r1->next;
1019 if (r1 == NULL)
1020 {
1021 /* To try to merge with subsequent ranges in r2. */
1022 r1 = r2->next;
1023 r2->next = NULL;
1024 }
1025 }
1026 }
1027 if (r1 != NULL)
1028 {
1029 if (first == NULL)
1030 first = r1;
1031 else
1032 last->next = r1;
1033 ira_assert (r1->next == NULL);
1034 }
1035 else if (r2 != NULL)
1036 {
1037 if (first == NULL)
1038 first = r2;
1039 else
1040 last->next = r2;
1041 ira_assert (r2->next == NULL);
1042 }
1043 else
1044 {
1045 ira_assert (last->next == NULL);
1046 }
1047 return first;
1048}
1049
1050/* Return TRUE if live ranges R1 and R2 intersect. */
1051bool
9140d27b 1052ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
3553f0bb
VM
1053{
1054 /* Remember the live ranges are always kept ordered. */
1055 while (r1 != NULL && r2 != NULL)
1056 {
1057 if (r1->start > r2->finish)
1058 r1 = r1->next;
1059 else if (r2->start > r1->finish)
1060 r2 = r2->next;
1061 else
1062 return true;
1063 }
1064 return false;
1065}
1066
058e97ec
VM
1067/* Free allocno live range R. */
1068void
9140d27b 1069ira_finish_live_range (live_range_t r)
058e97ec 1070{
0b470bae 1071 live_range_pool.remove (r);
058e97ec
VM
1072}
1073
3553f0bb
VM
1074/* Free list of allocno live ranges starting with R. */
1075void
9140d27b 1076ira_finish_live_range_list (live_range_t r)
3553f0bb 1077{
b14151b5 1078 live_range_t next_r;
3553f0bb
VM
1079
1080 for (; r != NULL; r = next_r)
1081 {
1082 next_r = r->next;
9140d27b 1083 ira_finish_live_range (r);
3553f0bb
VM
1084 }
1085}
1086
058e97ec
VM
1087/* Free updated register costs of allocno A. */
1088void
1089ira_free_allocno_updated_costs (ira_allocno_t a)
1090{
1756cb66 1091 enum reg_class aclass;
058e97ec 1092
1756cb66 1093 aclass = ALLOCNO_CLASS (a);
058e97ec 1094 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1095 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1096 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1097 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1098 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66 1099 aclass);
058e97ec
VM
1100 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1101}
1102
1756cb66
VM
1103/* Free and nullify all cost vectors allocated earlier for allocno
1104 A. */
058e97ec 1105static void
1756cb66 1106ira_free_allocno_costs (ira_allocno_t a)
058e97ec 1107{
1756cb66 1108 enum reg_class aclass = ALLOCNO_CLASS (a);
ac0ab4f7
BS
1109 ira_object_t obj;
1110 ira_allocno_object_iterator oi;
058e97ec 1111
ac0ab4f7
BS
1112 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1113 {
1114 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1115 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1116 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1117 ira_free (OBJECT_CONFLICT_ARRAY (obj));
0b470bae 1118 object_pool.remove (obj);
ac0ab4f7 1119 }
9140d27b 1120
058e97ec 1121 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
058e97ec 1122 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1756cb66 1123 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), aclass);
058e97ec 1124 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1756cb66 1125 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), aclass);
058e97ec 1126 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1127 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1128 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1129 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66
VM
1130 aclass);
1131 ALLOCNO_HARD_REG_COSTS (a) = NULL;
1132 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
1133 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1134 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1135}
1136
1137/* Free the memory allocated for allocno A. */
1138static void
1139finish_allocno (ira_allocno_t a)
1140{
1141 ira_free_allocno_costs (a);
0b470bae 1142 allocno_pool.remove (a);
058e97ec
VM
1143}
1144
1145/* Free the memory allocated for all allocnos. */
1146static void
1147finish_allocnos (void)
1148{
1149 ira_allocno_t a;
1150 ira_allocno_iterator ai;
1151
1152 FOR_EACH_ALLOCNO (a, ai)
1153 finish_allocno (a);
1154 ira_free (ira_regno_allocno_map);
9771b263
DN
1155 ira_object_id_map_vec.release ();
1156 allocno_vec.release ();
0b470bae
ML
1157 allocno_pool.release ();
1158 object_pool.release ();
1159 live_range_pool.release ();
058e97ec
VM
1160}
1161
1162\f
1163
3b6d1699 1164/* Pools for allocno preferences. */
fcb87c50 1165static object_allocator <ira_allocno_pref> pref_pool ("prefs");
3b6d1699
VM
1166
1167/* Vec containing references to all created preferences. It is a
1168 container of array ira_prefs. */
1169static vec<ira_pref_t> pref_vec;
1170
1171/* The function initializes data concerning allocno prefs. */
1172static void
1173initiate_prefs (void)
1174{
3b6d1699
VM
1175 pref_vec.create (get_max_uid ());
1176 ira_prefs = NULL;
1177 ira_prefs_num = 0;
1178}
1179
1180/* Return pref for A and HARD_REGNO if any. */
1181static ira_pref_t
1182find_allocno_pref (ira_allocno_t a, int hard_regno)
1183{
1184 ira_pref_t pref;
1185
1186 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1187 if (pref->allocno == a && pref->hard_regno == hard_regno)
1188 return pref;
1189 return NULL;
1190}
1191
1192/* Create and return pref with given attributes A, HARD_REGNO, and FREQ. */
1193ira_pref_t
1194ira_create_pref (ira_allocno_t a, int hard_regno, int freq)
1195{
1196 ira_pref_t pref;
1197
0b470bae 1198 pref = pref_pool.allocate ();
3b6d1699
VM
1199 pref->num = ira_prefs_num;
1200 pref->allocno = a;
1201 pref->hard_regno = hard_regno;
1202 pref->freq = freq;
1203 pref_vec.safe_push (pref);
1204 ira_prefs = pref_vec.address ();
1205 ira_prefs_num = pref_vec.length ();
1206 return pref;
1207}
1208
df3e3493 1209/* Attach a pref PREF to the corresponding allocno. */
3b6d1699
VM
1210static void
1211add_allocno_pref_to_list (ira_pref_t pref)
1212{
1213 ira_allocno_t a = pref->allocno;
1214
1215 pref->next_pref = ALLOCNO_PREFS (a);
1216 ALLOCNO_PREFS (a) = pref;
1217}
1218
1219/* Create (or update frequency if the pref already exists) the pref of
1220 allocnos A preferring HARD_REGNO with frequency FREQ. */
1221void
1222ira_add_allocno_pref (ira_allocno_t a, int hard_regno, int freq)
1223{
1224 ira_pref_t pref;
1225
1226 if (freq <= 0)
1227 return;
1228 if ((pref = find_allocno_pref (a, hard_regno)) != NULL)
1229 {
1230 pref->freq += freq;
1231 return;
1232 }
1233 pref = ira_create_pref (a, hard_regno, freq);
1234 ira_assert (a != NULL);
1235 add_allocno_pref_to_list (pref);
1236}
1237
1238/* Print info about PREF into file F. */
1239static void
1240print_pref (FILE *f, ira_pref_t pref)
1241{
1242 fprintf (f, " pref%d:a%d(r%d)<-hr%d@%d\n", pref->num,
1243 ALLOCNO_NUM (pref->allocno), ALLOCNO_REGNO (pref->allocno),
1244 pref->hard_regno, pref->freq);
1245}
1246
1247/* Print info about PREF into stderr. */
1248void
1249ira_debug_pref (ira_pref_t pref)
1250{
1251 print_pref (stderr, pref);
1252}
1253
1254/* Print info about all prefs into file F. */
1255static void
1256print_prefs (FILE *f)
1257{
1258 ira_pref_t pref;
1259 ira_pref_iterator pi;
1260
1261 FOR_EACH_PREF (pref, pi)
1262 print_pref (f, pref);
1263}
1264
1265/* Print info about all prefs into stderr. */
1266void
1267ira_debug_prefs (void)
1268{
1269 print_prefs (stderr);
1270}
1271
1272/* Print info about prefs involving allocno A into file F. */
1273static void
1274print_allocno_prefs (FILE *f, ira_allocno_t a)
1275{
1276 ira_pref_t pref;
1277
1278 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1279 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1280 fprintf (f, " pref%d:hr%d@%d", pref->num, pref->hard_regno, pref->freq);
1281 fprintf (f, "\n");
1282}
1283
1284/* Print info about prefs involving allocno A into stderr. */
1285void
1286ira_debug_allocno_prefs (ira_allocno_t a)
1287{
1288 print_allocno_prefs (stderr, a);
1289}
1290
1291/* The function frees memory allocated for PREF. */
1292static void
1293finish_pref (ira_pref_t pref)
1294{
1295 ira_prefs[pref->num] = NULL;
0b470bae 1296 pref_pool.remove (pref);
3b6d1699
VM
1297}
1298
1299/* Remove PREF from the list of allocno prefs and free memory for
1300 it. */
1301void
1302ira_remove_pref (ira_pref_t pref)
1303{
1304 ira_pref_t cpref, prev;
1305
1306 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1307 fprintf (ira_dump_file, " Removing pref%d:hr%d@%d\n",
1308 pref->num, pref->hard_regno, pref->freq);
1309 for (prev = NULL, cpref = ALLOCNO_PREFS (pref->allocno);
1310 cpref != NULL;
1311 prev = cpref, cpref = cpref->next_pref)
1312 if (cpref == pref)
1313 break;
1314 ira_assert (cpref != NULL);
1315 if (prev == NULL)
1316 ALLOCNO_PREFS (pref->allocno) = pref->next_pref;
1317 else
1318 prev->next_pref = pref->next_pref;
1319 finish_pref (pref);
1320}
1321
1322/* Remove all prefs of allocno A. */
1323void
1324ira_remove_allocno_prefs (ira_allocno_t a)
1325{
1326 ira_pref_t pref, next_pref;
1327
1328 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = next_pref)
1329 {
1330 next_pref = pref->next_pref;
1331 finish_pref (pref);
1332 }
1333 ALLOCNO_PREFS (a) = NULL;
1334}
1335
1336/* Free memory allocated for all prefs. */
1337static void
1338finish_prefs (void)
1339{
1340 ira_pref_t pref;
1341 ira_pref_iterator pi;
1342
1343 FOR_EACH_PREF (pref, pi)
1344 finish_pref (pref);
1345 pref_vec.release ();
0b470bae 1346 pref_pool.release ();
3b6d1699
VM
1347}
1348
1349\f
1350
058e97ec 1351/* Pools for copies. */
fcb87c50 1352static object_allocator<ira_allocno_copy> copy_pool ("copies");
058e97ec
VM
1353
1354/* Vec containing references to all created copies. It is a
1355 container of array ira_copies. */
9771b263 1356static vec<ira_copy_t> copy_vec;
058e97ec
VM
1357
1358/* The function initializes data concerning allocno copies. */
1359static void
1360initiate_copies (void)
1361{
9771b263 1362 copy_vec.create (get_max_uid ());
058e97ec
VM
1363 ira_copies = NULL;
1364 ira_copies_num = 0;
1365}
1366
1367/* Return copy connecting A1 and A2 and originated from INSN of
1368 LOOP_TREE_NODE if any. */
1369static ira_copy_t
070a1983 1370find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx_insn *insn,
058e97ec
VM
1371 ira_loop_tree_node_t loop_tree_node)
1372{
1373 ira_copy_t cp, next_cp;
1374 ira_allocno_t another_a;
1375
1376 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1377 {
1378 if (cp->first == a1)
1379 {
1380 next_cp = cp->next_first_allocno_copy;
1381 another_a = cp->second;
1382 }
1383 else if (cp->second == a1)
1384 {
1385 next_cp = cp->next_second_allocno_copy;
1386 another_a = cp->first;
1387 }
1388 else
1389 gcc_unreachable ();
1390 if (another_a == a2 && cp->insn == insn
1391 && cp->loop_tree_node == loop_tree_node)
1392 return cp;
1393 }
1394 return NULL;
1395}
1396
1397/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
548a6322 1398 SECOND, FREQ, CONSTRAINT_P, and INSN. */
058e97ec 1399ira_copy_t
548a6322 1400ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1401 bool constraint_p, rtx_insn *insn,
058e97ec
VM
1402 ira_loop_tree_node_t loop_tree_node)
1403{
1404 ira_copy_t cp;
1405
0b470bae 1406 cp = copy_pool.allocate ();
058e97ec
VM
1407 cp->num = ira_copies_num;
1408 cp->first = first;
1409 cp->second = second;
1410 cp->freq = freq;
548a6322 1411 cp->constraint_p = constraint_p;
058e97ec
VM
1412 cp->insn = insn;
1413 cp->loop_tree_node = loop_tree_node;
9771b263
DN
1414 copy_vec.safe_push (cp);
1415 ira_copies = copy_vec.address ();
1416 ira_copies_num = copy_vec.length ();
058e97ec
VM
1417 return cp;
1418}
1419
1420/* Attach a copy CP to allocnos involved into the copy. */
3b6d1699
VM
1421static void
1422add_allocno_copy_to_list (ira_copy_t cp)
058e97ec
VM
1423{
1424 ira_allocno_t first = cp->first, second = cp->second;
1425
1426 cp->prev_first_allocno_copy = NULL;
1427 cp->prev_second_allocno_copy = NULL;
1428 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1429 if (cp->next_first_allocno_copy != NULL)
1430 {
1431 if (cp->next_first_allocno_copy->first == first)
1432 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1433 else
1434 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1435 }
1436 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1437 if (cp->next_second_allocno_copy != NULL)
1438 {
1439 if (cp->next_second_allocno_copy->second == second)
1440 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1441 else
1442 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1443 }
1444 ALLOCNO_COPIES (first) = cp;
1445 ALLOCNO_COPIES (second) = cp;
1446}
1447
058e97ec
VM
1448/* Make a copy CP a canonical copy where number of the
1449 first allocno is less than the second one. */
3b6d1699
VM
1450static void
1451swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
058e97ec 1452{
058e97ec
VM
1453 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1454 return;
1455
fab27f52
MM
1456 std::swap (cp->first, cp->second);
1457 std::swap (cp->prev_first_allocno_copy, cp->prev_second_allocno_copy);
1458 std::swap (cp->next_first_allocno_copy, cp->next_second_allocno_copy);
058e97ec
VM
1459}
1460
1461/* Create (or update frequency if the copy already exists) and return
1462 the copy of allocnos FIRST and SECOND with frequency FREQ
1463 corresponding to move insn INSN (if any) and originated from
1464 LOOP_TREE_NODE. */
1465ira_copy_t
1466ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1467 bool constraint_p, rtx_insn *insn,
548a6322 1468 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
1469{
1470 ira_copy_t cp;
1471
1472 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1473 {
1474 cp->freq += freq;
1475 return cp;
1476 }
548a6322
VM
1477 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1478 loop_tree_node);
058e97ec 1479 ira_assert (first != NULL && second != NULL);
3b6d1699
VM
1480 add_allocno_copy_to_list (cp);
1481 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
1482 return cp;
1483}
1484
4cda38d5
VM
1485/* Print info about copy CP into file F. */
1486static void
1487print_copy (FILE *f, ira_copy_t cp)
1488{
548a6322 1489 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
4cda38d5 1490 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
548a6322
VM
1491 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1492 cp->insn != NULL
1493 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
4cda38d5
VM
1494}
1495
7b3b6ae4
LC
1496DEBUG_FUNCTION void
1497debug (ira_allocno_copy &ref)
1498{
1499 print_copy (stderr, &ref);
1500}
1501
1502DEBUG_FUNCTION void
1503debug (ira_allocno_copy *ptr)
1504{
1505 if (ptr)
1506 debug (*ptr);
1507 else
1508 fprintf (stderr, "<nil>\n");
1509}
1510
4cda38d5
VM
1511/* Print info about copy CP into stderr. */
1512void
1513ira_debug_copy (ira_copy_t cp)
1514{
1515 print_copy (stderr, cp);
1516}
1517
1518/* Print info about all copies into file F. */
1519static void
1520print_copies (FILE *f)
1521{
1522 ira_copy_t cp;
1523 ira_copy_iterator ci;
1524
1525 FOR_EACH_COPY (cp, ci)
1526 print_copy (f, cp);
1527}
1528
1529/* Print info about all copies into stderr. */
1530void
1531ira_debug_copies (void)
1532{
1533 print_copies (stderr);
1534}
1535
058e97ec
VM
1536/* Print info about copies involving allocno A into file F. */
1537static void
1538print_allocno_copies (FILE *f, ira_allocno_t a)
1539{
1540 ira_allocno_t another_a;
1541 ira_copy_t cp, next_cp;
1542
1543 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1544 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1545 {
1546 if (cp->first == a)
1547 {
1548 next_cp = cp->next_first_allocno_copy;
1549 another_a = cp->second;
1550 }
1551 else if (cp->second == a)
1552 {
1553 next_cp = cp->next_second_allocno_copy;
1554 another_a = cp->first;
1555 }
1556 else
1557 gcc_unreachable ();
1558 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1559 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1560 }
1561 fprintf (f, "\n");
1562}
1563
7b3b6ae4
LC
1564DEBUG_FUNCTION void
1565debug (ira_allocno &ref)
1566{
1567 print_allocno_copies (stderr, &ref);
1568}
1569
1570DEBUG_FUNCTION void
1571debug (ira_allocno *ptr)
1572{
1573 if (ptr)
1574 debug (*ptr);
1575 else
1576 fprintf (stderr, "<nil>\n");
1577}
1578
1579
058e97ec
VM
1580/* Print info about copies involving allocno A into stderr. */
1581void
1582ira_debug_allocno_copies (ira_allocno_t a)
1583{
1584 print_allocno_copies (stderr, a);
1585}
1586
1587/* The function frees memory allocated for copy CP. */
1588static void
1589finish_copy (ira_copy_t cp)
1590{
0b470bae 1591 copy_pool.remove (cp);
058e97ec
VM
1592}
1593
1594
1595/* Free memory allocated for all copies. */
1596static void
1597finish_copies (void)
1598{
1599 ira_copy_t cp;
1600 ira_copy_iterator ci;
1601
1602 FOR_EACH_COPY (cp, ci)
1603 finish_copy (cp);
9771b263 1604 copy_vec.release ();
0b470bae 1605 copy_pool.release ();
058e97ec
VM
1606}
1607
1608\f
1609
1756cb66 1610/* Pools for cost vectors. It is defined only for allocno classes. */
fb0b2914 1611static pool_allocator *cost_vector_pool[N_REG_CLASSES];
058e97ec
VM
1612
1613/* The function initiates work with hard register cost vectors. It
1756cb66 1614 creates allocation pool for each allocno class. */
058e97ec
VM
1615static void
1616initiate_cost_vectors (void)
1617{
1618 int i;
1756cb66 1619 enum reg_class aclass;
058e97ec 1620
1756cb66 1621 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1622 {
1756cb66 1623 aclass = ira_allocno_classes[i];
fb0b2914 1624 cost_vector_pool[aclass] = new pool_allocator
fcb87c50 1625 ("cost vectors", sizeof (int) * (ira_class_hard_regs_num[aclass]));
058e97ec
VM
1626 }
1627}
1628
1756cb66 1629/* Allocate and return a cost vector VEC for ACLASS. */
058e97ec 1630int *
6f76a878 1631ira_allocate_cost_vector (reg_class_t aclass)
058e97ec 1632{
fb0b2914 1633 return (int*) cost_vector_pool[(int) aclass]->allocate ();
058e97ec
VM
1634}
1635
1756cb66 1636/* Free a cost vector VEC for ACLASS. */
058e97ec 1637void
6f76a878 1638ira_free_cost_vector (int *vec, reg_class_t aclass)
058e97ec
VM
1639{
1640 ira_assert (vec != NULL);
3599f64a 1641 cost_vector_pool[(int) aclass]->remove (vec);
058e97ec
VM
1642}
1643
1644/* Finish work with hard register cost vectors. Release allocation
1756cb66 1645 pool for each allocno class. */
058e97ec
VM
1646static void
1647finish_cost_vectors (void)
1648{
1649 int i;
1756cb66 1650 enum reg_class aclass;
058e97ec 1651
1756cb66 1652 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1653 {
1756cb66 1654 aclass = ira_allocno_classes[i];
3599f64a 1655 delete cost_vector_pool[aclass];
058e97ec
VM
1656 }
1657}
1658
1659\f
1660
e6a7da82
SB
1661/* Compute a post-ordering of the reverse control flow of the loop body
1662 designated by the children nodes of LOOP_NODE, whose body nodes in
1663 pre-order are input as LOOP_PREORDER. Return a VEC with a post-order
1664 of the reverse loop body.
1665
1666 For the post-order of the reverse CFG, we visit the basic blocks in
1667 LOOP_PREORDER array in the reverse order of where they appear.
1668 This is important: We do not just want to compute a post-order of
1669 the reverse CFG, we want to make a best-guess for a visiting order that
1670 minimizes the number of chain elements per allocno live range. If the
1671 blocks would be visited in a different order, we would still compute a
1672 correct post-ordering but it would be less likely that two nodes
9c582551 1673 connected by an edge in the CFG are neighbors in the topsort. */
e6a7da82 1674
9771b263 1675static vec<ira_loop_tree_node_t>
e6a7da82 1676ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
9771b263 1677 vec<ira_loop_tree_node_t> loop_preorder)
e6a7da82 1678{
6e1aa848 1679 vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
e6a7da82
SB
1680 unsigned int n_loop_preorder;
1681
9771b263 1682 n_loop_preorder = loop_preorder.length ();
e6a7da82
SB
1683 if (n_loop_preorder != 0)
1684 {
1685 ira_loop_tree_node_t subloop_node;
1686 unsigned int i;
ef062b13 1687 auto_vec<ira_loop_tree_node_t> dfs_stack;
e6a7da82
SB
1688
1689 /* This is a bit of strange abuse of the BB_VISITED flag: We use
1690 the flag to mark blocks we still have to visit to add them to
1691 our post-order. Define an alias to avoid confusion. */
1692#define BB_TO_VISIT BB_VISITED
1693
9771b263 1694 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1695 {
1696 gcc_checking_assert (! (subloop_node->bb->flags & BB_TO_VISIT));
1697 subloop_node->bb->flags |= BB_TO_VISIT;
1698 }
1699
9771b263
DN
1700 topsort_nodes.create (n_loop_preorder);
1701 dfs_stack.create (n_loop_preorder);
e6a7da82 1702
9771b263 1703 FOR_EACH_VEC_ELT_REVERSE (loop_preorder, i, subloop_node)
e6a7da82
SB
1704 {
1705 if (! (subloop_node->bb->flags & BB_TO_VISIT))
1706 continue;
1707
1708 subloop_node->bb->flags &= ~BB_TO_VISIT;
9771b263
DN
1709 dfs_stack.quick_push (subloop_node);
1710 while (! dfs_stack.is_empty ())
e6a7da82
SB
1711 {
1712 edge e;
1713 edge_iterator ei;
1714
9771b263 1715 ira_loop_tree_node_t n = dfs_stack.last ();
e6a7da82
SB
1716 FOR_EACH_EDGE (e, ei, n->bb->preds)
1717 {
1718 ira_loop_tree_node_t pred_node;
1719 basic_block pred_bb = e->src;
1720
fefa31b5 1721 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
e6a7da82
SB
1722 continue;
1723
1724 pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
1725 if (pred_node != n
1726 && (pred_node->bb->flags & BB_TO_VISIT))
1727 {
1728 pred_node->bb->flags &= ~BB_TO_VISIT;
9771b263 1729 dfs_stack.quick_push (pred_node);
e6a7da82
SB
1730 }
1731 }
9771b263 1732 if (n == dfs_stack.last ())
e6a7da82 1733 {
9771b263
DN
1734 dfs_stack.pop ();
1735 topsort_nodes.quick_push (n);
e6a7da82
SB
1736 }
1737 }
1738 }
1739
1740#undef BB_TO_VISIT
e6a7da82
SB
1741 }
1742
9771b263 1743 gcc_assert (topsort_nodes.length () == n_loop_preorder);
e6a7da82
SB
1744 return topsort_nodes;
1745}
1746
058e97ec
VM
1747/* The current loop tree node and its regno allocno map. */
1748ira_loop_tree_node_t ira_curr_loop_tree_node;
1749ira_allocno_t *ira_curr_regno_allocno_map;
1750
1751/* This recursive function traverses loop tree with root LOOP_NODE
1752 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1753 correspondingly in preorder and postorder. The function sets up
1754 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1755 basic block nodes of LOOP_NODE is also processed (before its
e6a7da82
SB
1756 subloop nodes).
1757
1758 If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
1759 the loop are passed in the *reverse* post-order of the *reverse*
1760 CFG. This is only used by ira_create_allocno_live_ranges, which
1761 wants to visit basic blocks in this order to minimize the number
1762 of elements per live range chain.
1763 Note that the loop tree nodes are still visited in the normal,
1764 forward post-order of the loop tree. */
1765
058e97ec
VM
1766void
1767ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1768 void (*preorder_func) (ira_loop_tree_node_t),
1769 void (*postorder_func) (ira_loop_tree_node_t))
1770{
1771 ira_loop_tree_node_t subloop_node;
1772
1773 ira_assert (loop_node->bb == NULL);
1774 ira_curr_loop_tree_node = loop_node;
1775 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1776
1777 if (preorder_func != NULL)
1778 (*preorder_func) (loop_node);
b8698a0f 1779
058e97ec 1780 if (bb_p)
e6a7da82 1781 {
ef062b13 1782 auto_vec<ira_loop_tree_node_t> loop_preorder;
e6a7da82
SB
1783 unsigned int i;
1784
1785 /* Add all nodes to the set of nodes to visit. The IRA loop tree
1786 is set up such that nodes in the loop body appear in a pre-order
1787 of their place in the CFG. */
1788 for (subloop_node = loop_node->children;
1789 subloop_node != NULL;
1790 subloop_node = subloop_node->next)
1791 if (subloop_node->bb != NULL)
9771b263 1792 loop_preorder.safe_push (subloop_node);
e6a7da82
SB
1793
1794 if (preorder_func != NULL)
9771b263 1795 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1796 (*preorder_func) (subloop_node);
1797
1798 if (postorder_func != NULL)
058e97ec 1799 {
9771b263 1800 vec<ira_loop_tree_node_t> loop_rev_postorder =
e6a7da82 1801 ira_loop_tree_body_rev_postorder (loop_node, loop_preorder);
9771b263 1802 FOR_EACH_VEC_ELT_REVERSE (loop_rev_postorder, i, subloop_node)
058e97ec 1803 (*postorder_func) (subloop_node);
9771b263 1804 loop_rev_postorder.release ();
058e97ec 1805 }
e6a7da82
SB
1806 }
1807
058e97ec
VM
1808 for (subloop_node = loop_node->subloops;
1809 subloop_node != NULL;
1810 subloop_node = subloop_node->subloop_next)
1811 {
1812 ira_assert (subloop_node->bb == NULL);
1813 ira_traverse_loop_tree (bb_p, subloop_node,
1814 preorder_func, postorder_func);
1815 }
1816
1817 ira_curr_loop_tree_node = loop_node;
1818 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1819
1820 if (postorder_func != NULL)
1821 (*postorder_func) (loop_node);
1822}
1823
1824\f
1825
1826/* The basic block currently being processed. */
1827static basic_block curr_bb;
1828
1829/* This recursive function creates allocnos corresponding to
1830 pseudo-registers containing in X. True OUTPUT_P means that X is
d1bb282e 1831 an lvalue. PARENT corresponds to the parent expression of X. */
058e97ec 1832static void
d1bb282e 1833create_insn_allocnos (rtx x, rtx outer, bool output_p)
058e97ec
VM
1834{
1835 int i, j;
1836 const char *fmt;
1837 enum rtx_code code = GET_CODE (x);
1838
1839 if (code == REG)
1840 {
1841 int regno;
1842
1843 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1844 {
1845 ira_allocno_t a;
1846
1847 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
d1bb282e
DS
1848 {
1849 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1850 if (outer != NULL && GET_CODE (outer) == SUBREG)
1851 {
ef4bddc2 1852 machine_mode wmode = GET_MODE (outer);
bd4288c0 1853 if (partial_subreg_p (ALLOCNO_WMODE (a), wmode))
d1bb282e
DS
1854 ALLOCNO_WMODE (a) = wmode;
1855 }
1856 }
b8698a0f 1857
058e97ec
VM
1858 ALLOCNO_NREFS (a)++;
1859 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1860 if (output_p)
1861 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1862 }
1863 return;
1864 }
1865 else if (code == SET)
1866 {
d1bb282e
DS
1867 create_insn_allocnos (SET_DEST (x), NULL, true);
1868 create_insn_allocnos (SET_SRC (x), NULL, false);
058e97ec
VM
1869 return;
1870 }
1871 else if (code == CLOBBER)
1872 {
d1bb282e 1873 create_insn_allocnos (XEXP (x, 0), NULL, true);
058e97ec
VM
1874 return;
1875 }
1876 else if (code == MEM)
1877 {
d1bb282e 1878 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1879 return;
1880 }
b8698a0f 1881 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
058e97ec
VM
1882 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1883 {
d1bb282e
DS
1884 create_insn_allocnos (XEXP (x, 0), NULL, true);
1885 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1886 return;
1887 }
1888
1889 fmt = GET_RTX_FORMAT (code);
1890 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1891 {
1892 if (fmt[i] == 'e')
d1bb282e 1893 create_insn_allocnos (XEXP (x, i), x, output_p);
058e97ec
VM
1894 else if (fmt[i] == 'E')
1895 for (j = 0; j < XVECLEN (x, i); j++)
d1bb282e 1896 create_insn_allocnos (XVECEXP (x, i, j), x, output_p);
058e97ec
VM
1897 }
1898}
1899
1900/* Create allocnos corresponding to pseudo-registers living in the
1901 basic block represented by the corresponding loop tree node
1902 BB_NODE. */
1903static void
1904create_bb_allocnos (ira_loop_tree_node_t bb_node)
1905{
1906 basic_block bb;
070a1983 1907 rtx_insn *insn;
058e97ec
VM
1908 unsigned int i;
1909 bitmap_iterator bi;
1910
1911 curr_bb = bb = bb_node->bb;
1912 ira_assert (bb != NULL);
acb37d29 1913 FOR_BB_INSNS_REVERSE (bb, insn)
b5b8b0ac 1914 if (NONDEBUG_INSN_P (insn))
d1bb282e 1915 create_insn_allocnos (PATTERN (insn), NULL, false);
058e97ec
VM
1916 /* It might be a allocno living through from one subloop to
1917 another. */
bf744527 1918 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb), FIRST_PSEUDO_REGISTER, i, bi)
058e97ec
VM
1919 if (ira_curr_regno_allocno_map[i] == NULL)
1920 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1921}
1922
1923/* Create allocnos corresponding to pseudo-registers living on edge E
1924 (a loop entry or exit). Also mark the allocnos as living on the
1925 loop border. */
1926static void
1927create_loop_allocnos (edge e)
1928{
1929 unsigned int i;
1930 bitmap live_in_regs, border_allocnos;
1931 bitmap_iterator bi;
1932 ira_loop_tree_node_t parent;
1933
bf744527 1934 live_in_regs = df_get_live_in (e->dest);
058e97ec 1935 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
bf744527 1936 EXECUTE_IF_SET_IN_REG_SET (df_get_live_out (e->src),
058e97ec
VM
1937 FIRST_PSEUDO_REGISTER, i, bi)
1938 if (bitmap_bit_p (live_in_regs, i))
1939 {
1940 if (ira_curr_regno_allocno_map[i] == NULL)
1941 {
1942 /* The order of creations is important for right
1943 ira_regno_allocno_map. */
1944 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1945 && parent->regno_allocno_map[i] == NULL)
1946 ira_create_allocno (i, false, parent);
1947 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1948 }
1949 bitmap_set_bit (border_allocnos,
1950 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1951 }
1952}
1953
1954/* Create allocnos corresponding to pseudo-registers living in loop
1955 represented by the corresponding loop tree node LOOP_NODE. This
1956 function is called by ira_traverse_loop_tree. */
1957static void
1958create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1959{
1960 if (loop_node->bb != NULL)
1961 create_bb_allocnos (loop_node);
1962 else if (loop_node != ira_loop_tree_root)
1963 {
1964 int i;
1965 edge_iterator ei;
1966 edge e;
9771b263 1967 vec<edge> edges;
058e97ec 1968
2608d841 1969 ira_assert (current_loops != NULL);
058e97ec
VM
1970 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1971 if (e->src != loop_node->loop->latch)
1972 create_loop_allocnos (e);
b8698a0f 1973
058e97ec 1974 edges = get_loop_exit_edges (loop_node->loop);
9771b263 1975 FOR_EACH_VEC_ELT (edges, i, e)
058e97ec 1976 create_loop_allocnos (e);
9771b263 1977 edges.release ();
058e97ec
VM
1978 }
1979}
1980
1981/* Propagate information about allocnos modified inside the loop given
1982 by its LOOP_TREE_NODE to its parent. */
1983static void
1984propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1985{
1986 if (loop_tree_node == ira_loop_tree_root)
1987 return;
1988 ira_assert (loop_tree_node->bb == NULL);
1989 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1990 loop_tree_node->modified_regnos);
1991}
1992
1993/* Propagate new info about allocno A (see comments about accumulated
1994 info in allocno definition) to the corresponding allocno on upper
1995 loop tree level. So allocnos on upper levels accumulate
1996 information about the corresponding allocnos in nested regions.
1997 The new info means allocno info finally calculated in this
1998 file. */
1999static void
2000propagate_allocno_info (void)
2001{
2002 int i;
2003 ira_allocno_t a, parent_a;
2004 ira_loop_tree_node_t parent;
1756cb66 2005 enum reg_class aclass;
058e97ec 2006
7db7ed3c
VM
2007 if (flag_ira_region != IRA_REGION_ALL
2008 && flag_ira_region != IRA_REGION_MIXED)
058e97ec
VM
2009 return;
2010 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2011 for (a = ira_regno_allocno_map[i];
2012 a != NULL;
2013 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2014 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
2015 && (parent_a = parent->regno_allocno_map[i]) != NULL
2016 /* There are no caps yet at this point. So use
2017 border_allocnos to find allocnos for the propagation. */
2018 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
2019 ALLOCNO_NUM (a)))
2020 {
927425df
VM
2021 if (! ALLOCNO_BAD_SPILL_P (a))
2022 ALLOCNO_BAD_SPILL_P (parent_a) = false;
058e97ec
VM
2023 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
2024 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
2025 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3c55880a 2026 merge_hard_reg_conflicts (a, parent_a, true);
058e97ec
VM
2027 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2028 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
2029 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2030 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222
RS
2031 ALLOCNO_CROSSED_CALLS_ABIS (parent_a)
2032 |= ALLOCNO_CROSSED_CALLS_ABIS (a);
44942965
RS
2033 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a)
2034 |= ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a);
058e97ec
VM
2035 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2036 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1756cb66
VM
2037 aclass = ALLOCNO_CLASS (a);
2038 ira_assert (aclass == ALLOCNO_CLASS (parent_a));
058e97ec 2039 ira_allocate_and_accumulate_costs
1756cb66 2040 (&ALLOCNO_HARD_REG_COSTS (parent_a), aclass,
058e97ec
VM
2041 ALLOCNO_HARD_REG_COSTS (a));
2042 ira_allocate_and_accumulate_costs
2043 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1756cb66 2044 aclass,
058e97ec 2045 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1756cb66
VM
2046 ALLOCNO_CLASS_COST (parent_a)
2047 += ALLOCNO_CLASS_COST (a);
058e97ec 2048 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
058e97ec
VM
2049 }
2050}
2051
2052/* Create allocnos corresponding to pseudo-registers in the current
2053 function. Traverse the loop tree for this. */
2054static void
2055create_allocnos (void)
2056{
2057 /* We need to process BB first to correctly link allocnos by member
2058 next_regno_allocno. */
2059 ira_traverse_loop_tree (true, ira_loop_tree_root,
2060 create_loop_tree_node_allocnos, NULL);
2061 if (optimize)
2062 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
2063 propagate_modified_regnos);
2064}
2065
2066\f
2067
2068/* The page contains function to remove some regions from a separate
2069 register allocation. We remove regions whose separate allocation
2070 will hardly improve the result. As a result we speed up regional
2071 register allocation. */
2072
9140d27b 2073/* The function changes the object in range list given by R to OBJ. */
058e97ec 2074static void
9140d27b 2075change_object_in_range_list (live_range_t r, ira_object_t obj)
058e97ec
VM
2076{
2077 for (; r != NULL; r = r->next)
9140d27b 2078 r->object = obj;
058e97ec
VM
2079}
2080
3c55880a
BS
2081/* Move all live ranges associated with allocno FROM to allocno TO. */
2082static void
2083move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2084{
ac0ab4f7
BS
2085 int i;
2086 int n = ALLOCNO_NUM_OBJECTS (from);
2087
2088 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
3c55880a 2089
ac0ab4f7 2090 for (i = 0; i < n; i++)
3c55880a 2091 {
ac0ab4f7
BS
2092 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2093 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2094 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2095
2096 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2097 {
2098 fprintf (ira_dump_file,
2099 " Moving ranges of a%dr%d to a%dr%d: ",
2100 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2101 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2102 ira_print_live_range_list (ira_dump_file, lr);
2103 }
2104 change_object_in_range_list (lr, to_obj);
2105 OBJECT_LIVE_RANGES (to_obj)
2106 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
2107 OBJECT_LIVE_RANGES (from_obj) = NULL;
3c55880a 2108 }
3c55880a
BS
2109}
2110
3c55880a
BS
2111static void
2112copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2113{
ac0ab4f7
BS
2114 int i;
2115 int n = ALLOCNO_NUM_OBJECTS (from);
3c55880a 2116
ac0ab4f7
BS
2117 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
2118
2119 for (i = 0; i < n; i++)
3c55880a 2120 {
ac0ab4f7
BS
2121 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2122 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2123 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2124
2125 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2126 {
2127 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
2128 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2129 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2130 ira_print_live_range_list (ira_dump_file, lr);
2131 }
2132 lr = ira_copy_live_range_list (lr);
2133 change_object_in_range_list (lr, to_obj);
2134 OBJECT_LIVE_RANGES (to_obj)
2135 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
3c55880a 2136 }
3c55880a
BS
2137}
2138
058e97ec
VM
2139/* Return TRUE if NODE represents a loop with low register
2140 pressure. */
2141static bool
2142low_pressure_loop_node_p (ira_loop_tree_node_t node)
2143{
2144 int i;
1756cb66 2145 enum reg_class pclass;
b8698a0f 2146
058e97ec
VM
2147 if (node->bb != NULL)
2148 return false;
b8698a0f 2149
1756cb66 2150 for (i = 0; i < ira_pressure_classes_num; i++)
058e97ec 2151 {
1756cb66 2152 pclass = ira_pressure_classes[i];
f508f827
RS
2153 if (node->reg_pressure[pclass] > ira_class_hard_regs_num[pclass]
2154 && ira_class_hard_regs_num[pclass] > 1)
058e97ec
VM
2155 return false;
2156 }
2157 return true;
2158}
2159
30a435d8
VM
2160#ifdef STACK_REGS
2161/* Return TRUE if LOOP has a complex enter or exit edge. We don't
2162 form a region from such loop if the target use stack register
67914693 2163 because reg-stack.c cannot deal with such edges. */
8c5fdaae 2164static bool
99b1c316 2165loop_with_complex_edge_p (class loop *loop)
8c5fdaae
VM
2166{
2167 int i;
2168 edge_iterator ei;
2169 edge e;
9771b263 2170 vec<edge> edges;
f5843d08 2171 bool res;
8c5fdaae
VM
2172
2173 FOR_EACH_EDGE (e, ei, loop->header->preds)
2174 if (e->flags & EDGE_EH)
2175 return true;
2176 edges = get_loop_exit_edges (loop);
f5843d08 2177 res = false;
9771b263 2178 FOR_EACH_VEC_ELT (edges, i, e)
30a435d8 2179 if (e->flags & EDGE_COMPLEX)
f5843d08
RG
2180 {
2181 res = true;
2182 break;
2183 }
9771b263 2184 edges.release ();
f5843d08 2185 return res;
8c5fdaae 2186}
30a435d8 2187#endif
8c5fdaae 2188
30ea859e
VM
2189/* Sort loops for marking them for removal. We put already marked
2190 loops first, then less frequent loops next, and then outer loops
2191 next. */
2192static int
2193loop_compare_func (const void *v1p, const void *v2p)
2194{
2195 int diff;
2196 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
2197 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
2198
2199 ira_assert (l1->parent != NULL && l2->parent != NULL);
2200 if (l1->to_remove_p && ! l2->to_remove_p)
2201 return -1;
2202 if (! l1->to_remove_p && l2->to_remove_p)
2203 return 1;
e7a74006
JH
2204 if ((diff = l1->loop->header->count.to_frequency (cfun)
2205 - l2->loop->header->count.to_frequency (cfun)) != 0)
30ea859e
VM
2206 return diff;
2207 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
2208 return diff;
2209 /* Make sorting stable. */
2608d841 2210 return l1->loop_num - l2->loop_num;
30ea859e
VM
2211}
2212
30ea859e
VM
2213/* Mark loops which should be removed from regional allocation. We
2214 remove a loop with low register pressure inside another loop with
2215 register pressure. In this case a separate allocation of the loop
2216 hardly helps (for irregular register file architecture it could
2217 help by choosing a better hard register in the loop but we prefer
2218 faster allocation even in this case). We also remove cheap loops
028d4092 2219 if there are more than param_ira_max_loops_num of them. Loop with EH
8c5fdaae
VM
2220 exit or enter edges are removed too because the allocation might
2221 require put pseudo moves on the EH edges (we could still do this
2222 for pseudos with caller saved hard registers in some cases but it
2223 is impossible to say here or during top-down allocation pass what
2224 hard register the pseudos get finally). */
30ea859e
VM
2225static void
2226mark_loops_for_removal (void)
058e97ec 2227{
30ea859e
VM
2228 int i, n;
2229 ira_loop_tree_node_t *sorted_loops;
2230 loop_p loop;
2231
2608d841 2232 ira_assert (current_loops != NULL);
30ea859e
VM
2233 sorted_loops
2234 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
0fc822d0
RB
2235 * number_of_loops (cfun));
2236 for (n = i = 0; vec_safe_iterate (get_loops (cfun), i, &loop); i++)
30ea859e
VM
2237 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2238 {
2239 if (ira_loop_nodes[i].parent == NULL)
2240 {
2241 /* Don't remove the root. */
2242 ira_loop_nodes[i].to_remove_p = false;
2243 continue;
2244 }
2245 sorted_loops[n++] = &ira_loop_nodes[i];
2246 ira_loop_nodes[i].to_remove_p
8c5fdaae
VM
2247 = ((low_pressure_loop_node_p (ira_loop_nodes[i].parent)
2248 && low_pressure_loop_node_p (&ira_loop_nodes[i]))
30a435d8
VM
2249#ifdef STACK_REGS
2250 || loop_with_complex_edge_p (ira_loop_nodes[i].loop)
2251#endif
2252 );
30ea859e
VM
2253 }
2254 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
028d4092 2255 for (i = 0; i < n - param_ira_max_loops_num; i++)
30ea859e
VM
2256 {
2257 sorted_loops[i]->to_remove_p = true;
2258 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2259 fprintf
2260 (ira_dump_file,
2261 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
2608d841 2262 sorted_loops[i]->loop_num, sorted_loops[i]->loop->header->index,
e7a74006 2263 sorted_loops[i]->loop->header->count.to_frequency (cfun),
30ea859e
VM
2264 loop_depth (sorted_loops[i]->loop),
2265 low_pressure_loop_node_p (sorted_loops[i]->parent)
2266 && low_pressure_loop_node_p (sorted_loops[i])
2267 ? "low pressure" : "cheap loop");
2268 }
2269 ira_free (sorted_loops);
058e97ec
VM
2270}
2271
311aab06
VM
2272/* Mark all loops but root for removing. */
2273static void
2274mark_all_loops_for_removal (void)
2275{
2276 int i;
2277 loop_p loop;
2278
2608d841 2279 ira_assert (current_loops != NULL);
0fc822d0 2280 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
311aab06
VM
2281 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2282 {
2283 if (ira_loop_nodes[i].parent == NULL)
2284 {
2285 /* Don't remove the root. */
2286 ira_loop_nodes[i].to_remove_p = false;
2287 continue;
2288 }
2289 ira_loop_nodes[i].to_remove_p = true;
2290 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2291 fprintf
2292 (ira_dump_file,
2293 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
2608d841 2294 ira_loop_nodes[i].loop_num,
311aab06 2295 ira_loop_nodes[i].loop->header->index,
e7a74006 2296 ira_loop_nodes[i].loop->header->count.to_frequency (cfun),
311aab06
VM
2297 loop_depth (ira_loop_nodes[i].loop));
2298 }
2299}
30ea859e 2300
058e97ec 2301/* Definition of vector of loop tree nodes. */
058e97ec
VM
2302
2303/* Vec containing references to all removed loop tree nodes. */
9771b263 2304static vec<ira_loop_tree_node_t> removed_loop_vec;
058e97ec
VM
2305
2306/* Vec containing references to all children of loop tree nodes. */
9771b263 2307static vec<ira_loop_tree_node_t> children_vec;
058e97ec
VM
2308
2309/* Remove subregions of NODE if their separate allocation will not
2310 improve the result. */
2311static void
2312remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
2313{
2314 unsigned int start;
2315 bool remove_p;
2316 ira_loop_tree_node_t subnode;
2317
30ea859e 2318 remove_p = node->to_remove_p;
058e97ec 2319 if (! remove_p)
9771b263
DN
2320 children_vec.safe_push (node);
2321 start = children_vec.length ();
058e97ec
VM
2322 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
2323 if (subnode->bb == NULL)
2324 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
2325 else
9771b263 2326 children_vec.safe_push (subnode);
058e97ec
VM
2327 node->children = node->subloops = NULL;
2328 if (remove_p)
2329 {
9771b263 2330 removed_loop_vec.safe_push (node);
058e97ec
VM
2331 return;
2332 }
9771b263 2333 while (children_vec.length () > start)
058e97ec 2334 {
9771b263 2335 subnode = children_vec.pop ();
058e97ec
VM
2336 subnode->parent = node;
2337 subnode->next = node->children;
2338 node->children = subnode;
2339 if (subnode->bb == NULL)
2340 {
2341 subnode->subloop_next = node->subloops;
2342 node->subloops = subnode;
2343 }
2344 }
2345}
2346
c6bb4c93
VM
2347/* Return TRUE if NODE is inside PARENT. */
2348static bool
2349loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
2350{
2351 for (node = node->parent; node != NULL; node = node->parent)
2352 if (node == parent)
2353 return true;
2354 return false;
2355}
2356
2357/* Sort allocnos according to their order in regno allocno list. */
2358static int
2359regno_allocno_order_compare_func (const void *v1p, const void *v2p)
2360{
2361 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2362 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2363 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
2364 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
2365
2366 if (loop_is_inside_p (n1, n2))
2367 return -1;
2368 else if (loop_is_inside_p (n2, n1))
2369 return 1;
2370 /* If allocnos are equally good, sort by allocno numbers, so that
2371 the results of qsort leave nothing to chance. We put allocnos
2372 with higher number first in the list because it is the original
2373 order for allocnos from loops on the same levels. */
2374 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
2375}
2376
2377/* This array is used to sort allocnos to restore allocno order in
2378 the regno allocno list. */
2379static ira_allocno_t *regno_allocnos;
2380
2381/* Restore allocno order for REGNO in the regno allocno list. */
2382static void
2383ira_rebuild_regno_allocno_list (int regno)
2384{
2385 int i, n;
2386 ira_allocno_t a;
2387
2388 for (n = 0, a = ira_regno_allocno_map[regno];
2389 a != NULL;
2390 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2391 regno_allocnos[n++] = a;
2392 ira_assert (n > 0);
b8698a0f 2393 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
c6bb4c93
VM
2394 regno_allocno_order_compare_func);
2395 for (i = 1; i < n; i++)
2396 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
2397 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
2398 ira_regno_allocno_map[regno] = regno_allocnos[0];
2399 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2400 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2401}
2402
311aab06
VM
2403/* Propagate info from allocno FROM_A to allocno A. */
2404static void
2405propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2406{
1756cb66 2407 enum reg_class aclass;
311aab06 2408
3c55880a 2409 merge_hard_reg_conflicts (from_a, a, false);
311aab06
VM
2410 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2411 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2412 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
311aab06 2413 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
e384e6b5
BS
2414 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)
2415 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a);
6c476222 2416 ALLOCNO_CROSSED_CALLS_ABIS (a) |= ALLOCNO_CROSSED_CALLS_ABIS (from_a);
44942965
RS
2417 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a)
2418 |= ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (from_a);
c2ba7e7a 2419
311aab06
VM
2420 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2421 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2422 if (! ALLOCNO_BAD_SPILL_P (from_a))
2423 ALLOCNO_BAD_SPILL_P (a) = false;
1756cb66
VM
2424 aclass = ALLOCNO_CLASS (from_a);
2425 ira_assert (aclass == ALLOCNO_CLASS (a));
2426 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), aclass,
311aab06
VM
2427 ALLOCNO_HARD_REG_COSTS (from_a));
2428 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 2429 aclass,
311aab06 2430 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1756cb66 2431 ALLOCNO_CLASS_COST (a) += ALLOCNO_CLASS_COST (from_a);
311aab06
VM
2432 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2433}
2434
058e97ec
VM
2435/* Remove allocnos from loops removed from the allocation
2436 consideration. */
2437static void
2438remove_unnecessary_allocnos (void)
2439{
2440 int regno;
c6bb4c93 2441 bool merged_p, rebuild_p;
058e97ec
VM
2442 ira_allocno_t a, prev_a, next_a, parent_a;
2443 ira_loop_tree_node_t a_node, parent;
058e97ec
VM
2444
2445 merged_p = false;
c6bb4c93 2446 regno_allocnos = NULL;
058e97ec 2447 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
c6bb4c93
VM
2448 {
2449 rebuild_p = false;
2450 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2451 a != NULL;
2452 a = next_a)
2453 {
2454 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2455 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2456 if (! a_node->to_remove_p)
2457 prev_a = a;
2458 else
2459 {
2460 for (parent = a_node->parent;
2461 (parent_a = parent->regno_allocno_map[regno]) == NULL
2462 && parent->to_remove_p;
2463 parent = parent->parent)
2464 ;
2465 if (parent_a == NULL)
2466 {
311aab06
VM
2467 /* There are no allocnos with the same regno in
2468 upper region -- just move the allocno to the
2469 upper region. */
c6bb4c93
VM
2470 prev_a = a;
2471 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2472 parent->regno_allocno_map[regno] = a;
2473 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2474 rebuild_p = true;
2475 }
2476 else
2477 {
2478 /* Remove the allocno and update info of allocno in
2479 the upper region. */
2480 if (prev_a == NULL)
2481 ira_regno_allocno_map[regno] = next_a;
2482 else
2483 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
3c55880a 2484 move_allocno_live_ranges (a, parent_a);
c6bb4c93 2485 merged_p = true;
311aab06 2486 propagate_some_info_from_allocno (parent_a, a);
46044dd9
L
2487 /* Remove it from the corresponding regno allocno
2488 map to avoid info propagation of subsequent
2489 allocno into this already removed allocno. */
2490 a_node->regno_allocno_map[regno] = NULL;
3b6d1699 2491 ira_remove_allocno_prefs (a);
c6bb4c93
VM
2492 finish_allocno (a);
2493 }
2494 }
2495 }
2496 if (rebuild_p)
2497 /* We need to restore the order in regno allocno list. */
2498 {
2499 if (regno_allocnos == NULL)
2500 regno_allocnos
2501 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2502 * ira_allocnos_num);
2503 ira_rebuild_regno_allocno_list (regno);
2504 }
2505 }
058e97ec
VM
2506 if (merged_p)
2507 ira_rebuild_start_finish_chains ();
c6bb4c93
VM
2508 if (regno_allocnos != NULL)
2509 ira_free (regno_allocnos);
058e97ec
VM
2510}
2511
311aab06 2512/* Remove allocnos from all loops but the root. */
058e97ec 2513static void
311aab06 2514remove_low_level_allocnos (void)
058e97ec 2515{
311aab06
VM
2516 int regno;
2517 bool merged_p, propagate_p;
2518 ira_allocno_t a, top_a;
2519 ira_loop_tree_node_t a_node, parent;
311aab06
VM
2520 ira_allocno_iterator ai;
2521
2522 merged_p = false;
2523 FOR_EACH_ALLOCNO (a, ai)
2524 {
2525 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2526 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2527 continue;
2528 regno = ALLOCNO_REGNO (a);
2529 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2530 {
2531 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2532 ira_loop_tree_root->regno_allocno_map[regno] = a;
2533 continue;
2534 }
2535 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2536 /* Remove the allocno and update info of allocno in the upper
2537 region. */
3c55880a 2538 move_allocno_live_ranges (a, top_a);
311aab06 2539 merged_p = true;
311aab06
VM
2540 if (propagate_p)
2541 propagate_some_info_from_allocno (top_a, a);
2542 }
2543 FOR_EACH_ALLOCNO (a, ai)
2544 {
2545 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2546 if (a_node == ira_loop_tree_root)
2547 continue;
2548 parent = a_node->parent;
2549 regno = ALLOCNO_REGNO (a);
2550 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2551 ira_assert (ALLOCNO_CAP (a) != NULL);
2552 else if (ALLOCNO_CAP (a) == NULL)
2553 ira_assert (parent->regno_allocno_map[regno] != NULL);
2554 }
2555 FOR_EACH_ALLOCNO (a, ai)
2556 {
2557 regno = ALLOCNO_REGNO (a);
2558 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2559 {
ac0ab4f7
BS
2560 ira_object_t obj;
2561 ira_allocno_object_iterator oi;
a49ae217 2562
311aab06
VM
2563 ira_regno_allocno_map[regno] = a;
2564 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2565 ALLOCNO_CAP_MEMBER (a) = NULL;
ac0ab4f7 2566 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
6576d245
RS
2567 OBJECT_CONFLICT_HARD_REGS (obj)
2568 = OBJECT_TOTAL_CONFLICT_HARD_REGS (obj);
311aab06
VM
2569#ifdef STACK_REGS
2570 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2571 ALLOCNO_NO_STACK_REG_P (a) = true;
2572#endif
2573 }
2574 else
3b6d1699
VM
2575 {
2576 ira_remove_allocno_prefs (a);
2577 finish_allocno (a);
2578 }
311aab06
VM
2579 }
2580 if (merged_p)
2581 ira_rebuild_start_finish_chains ();
2582}
2583
2584/* Remove loops from consideration. We remove all loops except for
2585 root if ALL_P or loops for which a separate allocation will not
2586 improve the result. We have to do this after allocno creation and
1756cb66
VM
2587 their costs and allocno class evaluation because only after that
2588 the register pressure can be known and is calculated. */
311aab06
VM
2589static void
2590remove_unnecessary_regions (bool all_p)
2591{
2608d841
VM
2592 if (current_loops == NULL)
2593 return;
311aab06
VM
2594 if (all_p)
2595 mark_all_loops_for_removal ();
2596 else
2597 mark_loops_for_removal ();
8b1c6fd7
DM
2598 children_vec.create (last_basic_block_for_fn (cfun)
2599 + number_of_loops (cfun));
2600 removed_loop_vec.create (last_basic_block_for_fn (cfun)
2601 + number_of_loops (cfun));
9771b263
DN
2602 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
2603 children_vec.release ();
311aab06
VM
2604 if (all_p)
2605 remove_low_level_allocnos ();
2606 else
2607 remove_unnecessary_allocnos ();
9771b263
DN
2608 while (removed_loop_vec.length () > 0)
2609 finish_loop_tree_node (removed_loop_vec.pop ());
2610 removed_loop_vec.release ();
058e97ec
VM
2611}
2612
2613\f
2614
927425df
VM
2615/* At this point true value of allocno attribute bad_spill_p means
2616 that there is an insn where allocno occurs and where the allocno
67914693
SL
2617 cannot be used as memory. The function updates the attribute, now
2618 it can be true only for allocnos which cannot be used as memory in
927425df
VM
2619 an insn and in whose live ranges there is other allocno deaths.
2620 Spilling allocnos with true value will not improve the code because
2621 it will not make other allocnos colorable and additional reloads
2622 for the corresponding pseudo will be generated in reload pass for
2623 each insn it occurs.
2624
2625 This is a trick mentioned in one classic article of Chaitin etc
2626 which is frequently omitted in other implementations of RA based on
2627 graph coloring. */
2628static void
2629update_bad_spill_attribute (void)
2630{
2631 int i;
2632 ira_allocno_t a;
2633 ira_allocno_iterator ai;
ac0ab4f7
BS
2634 ira_allocno_object_iterator aoi;
2635 ira_object_t obj;
b14151b5 2636 live_range_t r;
1756cb66 2637 enum reg_class aclass;
927425df
VM
2638 bitmap_head dead_points[N_REG_CLASSES];
2639
1756cb66 2640 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2641 {
1756cb66
VM
2642 aclass = ira_allocno_classes[i];
2643 bitmap_initialize (&dead_points[aclass], &reg_obstack);
927425df
VM
2644 }
2645 FOR_EACH_ALLOCNO (a, ai)
2646 {
1756cb66
VM
2647 aclass = ALLOCNO_CLASS (a);
2648 if (aclass == NO_REGS)
927425df 2649 continue;
ac0ab4f7
BS
2650 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2651 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1756cb66 2652 bitmap_set_bit (&dead_points[aclass], r->finish);
927425df
VM
2653 }
2654 FOR_EACH_ALLOCNO (a, ai)
2655 {
1756cb66
VM
2656 aclass = ALLOCNO_CLASS (a);
2657 if (aclass == NO_REGS)
927425df
VM
2658 continue;
2659 if (! ALLOCNO_BAD_SPILL_P (a))
2660 continue;
ac0ab4f7 2661 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
927425df 2662 {
ac0ab4f7
BS
2663 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2664 {
2665 for (i = r->start + 1; i < r->finish; i++)
1756cb66 2666 if (bitmap_bit_p (&dead_points[aclass], i))
ac0ab4f7
BS
2667 break;
2668 if (i < r->finish)
2669 break;
2670 }
2671 if (r != NULL)
2672 {
2673 ALLOCNO_BAD_SPILL_P (a) = false;
927425df 2674 break;
ac0ab4f7 2675 }
927425df 2676 }
927425df 2677 }
1756cb66 2678 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2679 {
1756cb66
VM
2680 aclass = ira_allocno_classes[i];
2681 bitmap_clear (&dead_points[aclass]);
927425df
VM
2682 }
2683}
2684
2685\f
2686
058e97ec
VM
2687/* Set up minimal and maximal live range points for allocnos. */
2688static void
2689setup_min_max_allocno_live_range_point (void)
2690{
2691 int i;
2692 ira_allocno_t a, parent_a, cap;
2693 ira_allocno_iterator ai;
ac0ab4f7
BS
2694#ifdef ENABLE_IRA_CHECKING
2695 ira_object_iterator oi;
2696 ira_object_t obj;
2697#endif
b14151b5 2698 live_range_t r;
058e97ec
VM
2699 ira_loop_tree_node_t parent;
2700
2701 FOR_EACH_ALLOCNO (a, ai)
2702 {
ac0ab4f7 2703 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2704
ac0ab4f7
BS
2705 for (i = 0; i < n; i++)
2706 {
2707 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2708 r = OBJECT_LIVE_RANGES (obj);
2709 if (r == NULL)
2710 continue;
2711 OBJECT_MAX (obj) = r->finish;
2712 for (; r->next != NULL; r = r->next)
2713 ;
2714 OBJECT_MIN (obj) = r->start;
2715 }
058e97ec
VM
2716 }
2717 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2718 for (a = ira_regno_allocno_map[i];
2719 a != NULL;
2720 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2721 {
ac0ab4f7
BS
2722 int j;
2723 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2724
ac0ab4f7 2725 for (j = 0; j < n; j++)
058e97ec 2726 {
ac0ab4f7
BS
2727 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2728 ira_object_t parent_obj;
2729
2730 if (OBJECT_MAX (obj) < 0)
ffd464df
VM
2731 {
2732 /* The object is not used and hence does not live. */
2733 ira_assert (OBJECT_LIVE_RANGES (obj) == NULL);
2734 OBJECT_MAX (obj) = 0;
2735 OBJECT_MIN (obj) = 1;
2736 continue;
2737 }
ac0ab4f7
BS
2738 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2739 /* Accumulation of range info. */
2740 if (ALLOCNO_CAP (a) != NULL)
058e97ec 2741 {
ac0ab4f7
BS
2742 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2743 {
2744 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2745 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2746 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2747 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2748 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2749 }
2750 continue;
058e97ec 2751 }
ac0ab4f7
BS
2752 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2753 continue;
2754 parent_a = parent->regno_allocno_map[i];
2755 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2756 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2757 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2758 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2759 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
058e97ec 2760 }
058e97ec
VM
2761 }
2762#ifdef ENABLE_IRA_CHECKING
ac0ab4f7 2763 FOR_EACH_OBJECT (obj, oi)
058e97ec 2764 {
01512446
JJ
2765 if ((OBJECT_MIN (obj) >= 0 && OBJECT_MIN (obj) <= ira_max_point)
2766 && (OBJECT_MAX (obj) >= 0 && OBJECT_MAX (obj) <= ira_max_point))
058e97ec
VM
2767 continue;
2768 gcc_unreachable ();
2769 }
2770#endif
2771}
2772
2773/* Sort allocnos according to their live ranges. Allocnos with
1756cb66
VM
2774 smaller allocno class are put first unless we use priority
2775 coloring. Allocnos with the same class are ordered according
2776 their start (min). Allocnos with the same start are ordered
2777 according their finish (max). */
058e97ec 2778static int
ac0ab4f7 2779object_range_compare_func (const void *v1p, const void *v2p)
058e97ec
VM
2780{
2781 int diff;
a49ae217
BS
2782 ira_object_t obj1 = *(const ira_object_t *) v1p;
2783 ira_object_t obj2 = *(const ira_object_t *) v2p;
2784 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2785 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
058e97ec 2786
a49ae217 2787 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
058e97ec 2788 return diff;
a49ae217 2789 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
058e97ec
VM
2790 return diff;
2791 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2792}
2793
a49ae217 2794/* Sort ira_object_id_map and set up conflict id of allocnos. */
058e97ec 2795static void
a49ae217 2796sort_conflict_id_map (void)
058e97ec
VM
2797{
2798 int i, num;
2799 ira_allocno_t a;
2800 ira_allocno_iterator ai;
2801
2802 num = 0;
2803 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7
BS
2804 {
2805 ira_allocno_object_iterator oi;
2806 ira_object_t obj;
2807
2808 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2809 ira_object_id_map[num++] = obj;
2810 }
85c00e0b
JJ
2811 if (num > 1)
2812 qsort (ira_object_id_map, num, sizeof (ira_object_t),
2813 object_range_compare_func);
058e97ec 2814 for (i = 0; i < num; i++)
a49ae217
BS
2815 {
2816 ira_object_t obj = ira_object_id_map[i];
1756cb66 2817
a49ae217
BS
2818 gcc_assert (obj != NULL);
2819 OBJECT_CONFLICT_ID (obj) = i;
2820 }
2821 for (i = num; i < ira_objects_num; i++)
2822 ira_object_id_map[i] = NULL;
058e97ec
VM
2823}
2824
2825/* Set up minimal and maximal conflict ids of allocnos with which
2826 given allocno can conflict. */
2827static void
2828setup_min_max_conflict_allocno_ids (void)
2829{
1756cb66 2830 int aclass;
058e97ec
VM
2831 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2832 int *live_range_min, *last_lived;
ac0ab4f7 2833 int word0_min, word0_max;
058e97ec 2834 ira_allocno_t a;
ac0ab4f7 2835 ira_allocno_iterator ai;
058e97ec 2836
a49ae217 2837 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
1756cb66 2838 aclass = -1;
058e97ec 2839 first_not_finished = -1;
a49ae217 2840 for (i = 0; i < ira_objects_num; i++)
058e97ec 2841 {
a49ae217 2842 ira_object_t obj = ira_object_id_map[i];
1756cb66 2843
a49ae217 2844 if (obj == NULL)
058e97ec 2845 continue;
a49ae217
BS
2846
2847 a = OBJECT_ALLOCNO (obj);
2848
1756cb66 2849 if (aclass < 0)
058e97ec 2850 {
1756cb66 2851 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2852 min = i;
2853 first_not_finished = i;
2854 }
2855 else
2856 {
a49ae217 2857 start = OBJECT_MIN (obj);
058e97ec
VM
2858 /* If we skip an allocno, the allocno with smaller ids will
2859 be also skipped because of the secondary sorting the
2860 range finishes (see function
ac0ab4f7 2861 object_range_compare_func). */
058e97ec 2862 while (first_not_finished < i
a49ae217 2863 && start > OBJECT_MAX (ira_object_id_map
ac0ab4f7 2864 [first_not_finished]))
058e97ec
VM
2865 first_not_finished++;
2866 min = first_not_finished;
b8698a0f 2867 }
058e97ec
VM
2868 if (min == i)
2869 /* We could increase min further in this case but it is good
2870 enough. */
2871 min++;
a49ae217
BS
2872 live_range_min[i] = OBJECT_MIN (obj);
2873 OBJECT_MIN (obj) = min;
058e97ec
VM
2874 }
2875 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1756cb66 2876 aclass = -1;
058e97ec 2877 filled_area_start = -1;
a49ae217 2878 for (i = ira_objects_num - 1; i >= 0; i--)
058e97ec 2879 {
a49ae217 2880 ira_object_t obj = ira_object_id_map[i];
1756cb66 2881
a49ae217 2882 if (obj == NULL)
058e97ec 2883 continue;
a49ae217
BS
2884
2885 a = OBJECT_ALLOCNO (obj);
1756cb66 2886 if (aclass < 0)
058e97ec 2887 {
1756cb66 2888 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2889 for (j = 0; j < ira_max_point; j++)
2890 last_lived[j] = -1;
2891 filled_area_start = ira_max_point;
2892 }
2893 min = live_range_min[i];
a49ae217 2894 finish = OBJECT_MAX (obj);
058e97ec
VM
2895 max = last_lived[finish];
2896 if (max < 0)
2897 /* We could decrease max further in this case but it is good
2898 enough. */
a49ae217
BS
2899 max = OBJECT_CONFLICT_ID (obj) - 1;
2900 OBJECT_MAX (obj) = max;
058e97ec
VM
2901 /* In filling, we can go further A range finish to recognize
2902 intersection quickly because if the finish of subsequently
2903 processed allocno (it has smaller conflict id) range is
2904 further A range finish than they are definitely intersected
2905 (the reason for this is the allocnos with bigger conflict id
2906 have their range starts not smaller than allocnos with
2907 smaller ids. */
2908 for (j = min; j < filled_area_start; j++)
2909 last_lived[j] = i;
2910 filled_area_start = min;
2911 }
2912 ira_free (last_lived);
2913 ira_free (live_range_min);
ac0ab4f7
BS
2914
2915 /* For allocnos with more than one object, we may later record extra conflicts in
2916 subobject 0 that we cannot really know about here.
2917 For now, simply widen the min/max range of these subobjects. */
2918
2919 word0_min = INT_MAX;
2920 word0_max = INT_MIN;
2921
2922 FOR_EACH_ALLOCNO (a, ai)
2923 {
2924 int n = ALLOCNO_NUM_OBJECTS (a);
2925 ira_object_t obj0;
1756cb66 2926
ac0ab4f7
BS
2927 if (n < 2)
2928 continue;
2929 obj0 = ALLOCNO_OBJECT (a, 0);
2930 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2931 word0_min = OBJECT_CONFLICT_ID (obj0);
2932 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2933 word0_max = OBJECT_CONFLICT_ID (obj0);
2934 }
2935 FOR_EACH_ALLOCNO (a, ai)
2936 {
2937 int n = ALLOCNO_NUM_OBJECTS (a);
2938 ira_object_t obj0;
1756cb66 2939
ac0ab4f7
BS
2940 if (n < 2)
2941 continue;
2942 obj0 = ALLOCNO_OBJECT (a, 0);
2943 if (OBJECT_MIN (obj0) > word0_min)
2944 OBJECT_MIN (obj0) = word0_min;
2945 if (OBJECT_MAX (obj0) < word0_max)
2946 OBJECT_MAX (obj0) = word0_max;
2947 }
058e97ec
VM
2948}
2949
2950\f
2951
2952static void
2953create_caps (void)
2954{
2955 ira_allocno_t a;
2956 ira_allocno_iterator ai;
2957 ira_loop_tree_node_t loop_tree_node;
2958
2959 FOR_EACH_ALLOCNO (a, ai)
2960 {
2961 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2962 continue;
2963 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2964 create_cap_allocno (a);
2965 else if (ALLOCNO_CAP (a) == NULL)
2966 {
2967 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2968 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2969 create_cap_allocno (a);
2970 }
2971 }
2972}
2973
2974\f
2975
2976/* The page contains code transforming more one region internal
2977 representation (IR) to one region IR which is necessary for reload.
2978 This transformation is called IR flattening. We might just rebuild
2979 the IR for one region but we don't do it because it takes a lot of
2980 time. */
2981
82b33628
VM
2982/* Map: regno -> allocnos which will finally represent the regno for
2983 IR with one region. */
2984static ira_allocno_t *regno_top_level_allocno_map;
2985
029da7d4
BS
2986/* Find the allocno that corresponds to A at a level one higher up in the
2987 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2988ira_allocno_t
2989ira_parent_allocno (ira_allocno_t a)
2990{
2991 ira_loop_tree_node_t parent;
2992
2993 if (ALLOCNO_CAP (a) != NULL)
2994 return NULL;
2995
2996 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2997 if (parent == NULL)
2998 return NULL;
2999
3000 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
3001}
3002
3003/* Find the allocno that corresponds to A at a level one higher up in the
3004 loop tree. If ALLOCNO_CAP is set for A, return that. */
3005ira_allocno_t
3006ira_parent_or_cap_allocno (ira_allocno_t a)
3007{
3008 if (ALLOCNO_CAP (a) != NULL)
3009 return ALLOCNO_CAP (a);
3010
3011 return ira_parent_allocno (a);
3012}
3013
82b33628 3014/* Process all allocnos originated from pseudo REGNO and copy live
801f03e3
VM
3015 ranges, hard reg conflicts, and allocno stack reg attributes from
3016 low level allocnos to final allocnos which are destinations of
3017 removed stores at a loop exit. Return true if we copied live
3018 ranges. */
82b33628 3019static bool
801f03e3 3020copy_info_to_removed_store_destinations (int regno)
82b33628 3021{
504b33d8
ILT
3022 ira_allocno_t a;
3023 ira_allocno_t parent_a = NULL;
82b33628 3024 ira_loop_tree_node_t parent;
82b33628
VM
3025 bool merged_p;
3026
3027 merged_p = false;
3028 for (a = ira_regno_allocno_map[regno];
3029 a != NULL;
3030 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3031 {
1756cb66 3032 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))])
82b33628
VM
3033 /* This allocno will be removed. */
3034 continue;
ac0ab4f7 3035
82b33628
VM
3036 /* Caps will be removed. */
3037 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
3038 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3039 parent != NULL;
3040 parent = parent->parent)
3041 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
1756cb66
VM
3042 || (parent_a
3043 == regno_top_level_allocno_map[REGNO
3044 (allocno_emit_reg (parent_a))]
3045 && ALLOCNO_EMIT_DATA (parent_a)->mem_optimized_dest_p))
82b33628
VM
3046 break;
3047 if (parent == NULL || parent_a == NULL)
3048 continue;
ac0ab4f7 3049
3c55880a
BS
3050 copy_allocno_live_ranges (a, parent_a);
3051 merge_hard_reg_conflicts (a, parent_a, true);
ac0ab4f7 3052
ea1c67e6
VM
3053 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3054 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3055 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3056 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3057 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222
RS
3058 ALLOCNO_CROSSED_CALLS_ABIS (parent_a)
3059 |= ALLOCNO_CROSSED_CALLS_ABIS (a);
44942965
RS
3060 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a)
3061 |= ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a);
ea1c67e6
VM
3062 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3063 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
82b33628
VM
3064 merged_p = true;
3065 }
3066 return merged_p;
058e97ec
VM
3067}
3068
3069/* Flatten the IR. In other words, this function transforms IR as if
3070 it were built with one region (without loops). We could make it
3071 much simpler by rebuilding IR with one region, but unfortunately it
3072 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
3073 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
3074 IRA_MAX_POINT before emitting insns on the loop borders. */
3075void
3076ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
3077{
9140d27b 3078 int i, j;
ea1c67e6 3079 bool keep_p;
058e97ec 3080 int hard_regs_num;
82b33628 3081 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec 3082 unsigned int n;
1756cb66 3083 enum reg_class aclass;
058e97ec 3084 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 3085 ira_copy_t cp;
029da7d4 3086 ira_loop_tree_node_t node;
b14151b5 3087 live_range_t r;
058e97ec
VM
3088 ira_allocno_iterator ai;
3089 ira_copy_iterator ci;
058e97ec
VM
3090
3091 regno_top_level_allocno_map
1756cb66
VM
3092 = (ira_allocno_t *) ira_allocate (max_reg_num ()
3093 * sizeof (ira_allocno_t));
058e97ec
VM
3094 memset (regno_top_level_allocno_map, 0,
3095 max_reg_num () * sizeof (ira_allocno_t));
058e97ec 3096 new_pseudos_p = merged_p = false;
0ca9fa56
VM
3097 FOR_EACH_ALLOCNO (a, ai)
3098 {
ac0ab4f7
BS
3099 ira_allocno_object_iterator oi;
3100 ira_object_t obj;
1756cb66 3101
0ca9fa56
VM
3102 if (ALLOCNO_CAP_MEMBER (a) != NULL)
3103 /* Caps are not in the regno allocno maps and they are never
3104 will be transformed into allocnos existing after IR
3105 flattening. */
3106 continue;
ac0ab4f7 3107 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
6576d245
RS
3108 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj)
3109 = OBJECT_CONFLICT_HARD_REGS (obj);
0ca9fa56
VM
3110#ifdef STACK_REGS
3111 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
3112#endif
3113 }
058e97ec
VM
3114 /* Fix final allocno attributes. */
3115 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
3116 {
0ca9fa56 3117 mem_dest_p = false;
058e97ec
VM
3118 for (a = ira_regno_allocno_map[i];
3119 a != NULL;
3120 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3121 {
1756cb66
VM
3122 ira_emit_data_t parent_data, data = ALLOCNO_EMIT_DATA (a);
3123
058e97ec 3124 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1756cb66 3125 if (data->somewhere_renamed_p)
058e97ec 3126 new_pseudos_p = true;
029da7d4
BS
3127 parent_a = ira_parent_allocno (a);
3128 if (parent_a == NULL)
058e97ec
VM
3129 {
3130 ALLOCNO_COPIES (a) = NULL;
1756cb66 3131 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec
VM
3132 continue;
3133 }
3134 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
b8698a0f 3135
1756cb66 3136 if (data->mem_optimized_dest != NULL)
82b33628 3137 mem_dest_p = true;
1756cb66
VM
3138 parent_data = ALLOCNO_EMIT_DATA (parent_a);
3139 if (REGNO (data->reg) == REGNO (parent_data->reg))
058e97ec 3140 {
3c55880a
BS
3141 merge_hard_reg_conflicts (a, parent_a, true);
3142 move_allocno_live_ranges (a, parent_a);
058e97ec 3143 merged_p = true;
1756cb66
VM
3144 parent_data->mem_optimized_dest_p
3145 = (parent_data->mem_optimized_dest_p
3146 || data->mem_optimized_dest_p);
058e97ec
VM
3147 continue;
3148 }
3149 new_pseudos_p = true;
058e97ec
VM
3150 for (;;)
3151 {
058e97ec
VM
3152 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
3153 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
ea1c67e6
VM
3154 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
3155 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3156 -= ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3157 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3158 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222
RS
3159 /* Assume that ALLOCNO_CROSSED_CALLS_ABIS and
3160 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS stay the same.
3161 We'd need to rebuild the IR to do better. */
ea1c67e6
VM
3162 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3163 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
058e97ec
VM
3164 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
3165 && ALLOCNO_NREFS (parent_a) >= 0
3166 && ALLOCNO_FREQ (parent_a) >= 0);
1756cb66
VM
3167 aclass = ALLOCNO_CLASS (parent_a);
3168 hard_regs_num = ira_class_hard_regs_num[aclass];
058e97ec
VM
3169 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
3170 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
3171 for (j = 0; j < hard_regs_num; j++)
3172 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
3173 -= ALLOCNO_HARD_REG_COSTS (a)[j];
3174 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
3175 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
3176 for (j = 0; j < hard_regs_num; j++)
3177 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
3178 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
1756cb66
VM
3179 ALLOCNO_CLASS_COST (parent_a)
3180 -= ALLOCNO_CLASS_COST (a);
058e97ec 3181 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
029da7d4
BS
3182 parent_a = ira_parent_allocno (parent_a);
3183 if (parent_a == NULL)
058e97ec
VM
3184 break;
3185 }
058e97ec 3186 ALLOCNO_COPIES (a) = NULL;
1756cb66 3187 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec 3188 }
801f03e3 3189 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
82b33628 3190 merged_p = true;
058e97ec 3191 }
058e97ec
VM
3192 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
3193 if (merged_p || ira_max_point_before_emit != ira_max_point)
3194 ira_rebuild_start_finish_chains ();
3195 if (new_pseudos_p)
3196 {
9140d27b
BS
3197 sparseset objects_live;
3198
058e97ec
VM
3199 /* Rebuild conflicts. */
3200 FOR_EACH_ALLOCNO (a, ai)
3201 {
ac0ab4f7
BS
3202 ira_allocno_object_iterator oi;
3203 ira_object_t obj;
1756cb66
VM
3204
3205 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3206 || ALLOCNO_CAP_MEMBER (a) != NULL)
3207 continue;
ac0ab4f7
BS
3208 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3209 {
3210 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3211 ira_assert (r->object == obj);
3212 clear_conflicts (obj);
3213 }
058e97ec 3214 }
9140d27b 3215 objects_live = sparseset_alloc (ira_objects_num);
058e97ec
VM
3216 for (i = 0; i < ira_max_point; i++)
3217 {
3218 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
3219 {
9140d27b 3220 ira_object_t obj = r->object;
1756cb66 3221
9140d27b 3222 a = OBJECT_ALLOCNO (obj);
1756cb66 3223 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3224 || ALLOCNO_CAP_MEMBER (a) != NULL)
3225 continue;
ac0ab4f7 3226
1756cb66 3227 aclass = ALLOCNO_CLASS (a);
9140d27b 3228 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
058e97ec 3229 {
9140d27b
BS
3230 ira_object_t live_obj = ira_object_id_map[n];
3231 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
1756cb66
VM
3232 enum reg_class live_aclass = ALLOCNO_CLASS (live_a);
3233
3234 if (ira_reg_classes_intersect_p[aclass][live_aclass]
058e97ec 3235 /* Don't set up conflict for the allocno with itself. */
9140d27b
BS
3236 && live_a != a)
3237 ira_add_conflict (obj, live_obj);
058e97ec 3238 }
3feb0298 3239 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
058e97ec 3240 }
b8698a0f 3241
058e97ec 3242 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
9140d27b 3243 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
058e97ec 3244 }
9140d27b 3245 sparseset_free (objects_live);
058e97ec
VM
3246 compress_conflict_vecs ();
3247 }
3248 /* Mark some copies for removing and change allocnos in the rest
3249 copies. */
3250 FOR_EACH_COPY (cp, ci)
3251 {
3252 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
3253 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
3254 {
3255 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3256 fprintf
3257 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
3258 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
1756cb66
VM
3259 ALLOCNO_NUM (cp->first),
3260 REGNO (allocno_emit_reg (cp->first)),
058e97ec 3261 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
1756cb66
VM
3262 ALLOCNO_NUM (cp->second),
3263 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3264 cp->loop_tree_node = NULL;
3265 continue;
3266 }
1756cb66
VM
3267 first
3268 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->first))];
3269 second
3270 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->second))];
058e97ec
VM
3271 node = cp->loop_tree_node;
3272 if (node == NULL)
3273 keep_p = true; /* It copy generated in ira-emit.c. */
3274 else
3275 {
3276 /* Check that the copy was not propagated from level on
3277 which we will have different pseudos. */
3278 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
3279 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
1756cb66
VM
3280 keep_p = ((REGNO (allocno_emit_reg (first))
3281 == REGNO (allocno_emit_reg (node_first)))
3282 && (REGNO (allocno_emit_reg (second))
3283 == REGNO (allocno_emit_reg (node_second))));
058e97ec
VM
3284 }
3285 if (keep_p)
3286 {
3287 cp->loop_tree_node = ira_loop_tree_root;
3288 cp->first = first;
3289 cp->second = second;
3290 }
3291 else
3292 {
3293 cp->loop_tree_node = NULL;
3294 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3295 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
3296 cp->num, ALLOCNO_NUM (cp->first),
1756cb66
VM
3297 REGNO (allocno_emit_reg (cp->first)),
3298 ALLOCNO_NUM (cp->second),
3299 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3300 }
3301 }
3302 /* Remove unnecessary allocnos on lower levels of the loop tree. */
3303 FOR_EACH_ALLOCNO (a, ai)
3304 {
1756cb66 3305 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3306 || ALLOCNO_CAP_MEMBER (a) != NULL)
3307 {
3308 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3309 fprintf (ira_dump_file, " Remove a%dr%d\n",
1756cb66 3310 ALLOCNO_NUM (a), REGNO (allocno_emit_reg (a)));
3b6d1699 3311 ira_remove_allocno_prefs (a);
058e97ec
VM
3312 finish_allocno (a);
3313 continue;
3314 }
3315 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
1756cb66 3316 ALLOCNO_REGNO (a) = REGNO (allocno_emit_reg (a));
058e97ec 3317 ALLOCNO_CAP (a) = NULL;
cb1ca6ac 3318 /* Restore updated costs for assignments from reload. */
058e97ec 3319 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
1756cb66 3320 ALLOCNO_UPDATED_CLASS_COST (a) = ALLOCNO_CLASS_COST (a);
058e97ec
VM
3321 if (! ALLOCNO_ASSIGNED_P (a))
3322 ira_free_allocno_updated_costs (a);
3323 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
3324 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
3325 }
3326 /* Remove unnecessary copies. */
3327 FOR_EACH_COPY (cp, ci)
3328 {
3329 if (cp->loop_tree_node == NULL)
3330 {
3331 ira_copies[cp->num] = NULL;
3332 finish_copy (cp);
3333 continue;
3334 }
3335 ira_assert
3336 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
3337 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
3b6d1699
VM
3338 add_allocno_copy_to_list (cp);
3339 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
3340 }
3341 rebuild_regno_allocno_maps ();
b15a7ae6
VM
3342 if (ira_max_point != ira_max_point_before_emit)
3343 ira_compress_allocno_live_ranges ();
058e97ec
VM
3344 ira_free (regno_top_level_allocno_map);
3345}
3346
3347\f
3348
3349#ifdef ENABLE_IRA_CHECKING
3350/* Check creation of all allocnos. Allocnos on lower levels should
3351 have allocnos or caps on all upper levels. */
3352static void
3353check_allocno_creation (void)
3354{
3355 ira_allocno_t a;
3356 ira_allocno_iterator ai;
3357 ira_loop_tree_node_t loop_tree_node;
3358
3359 FOR_EACH_ALLOCNO (a, ai)
3360 {
49d988e7
VM
3361 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3362 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
3363 ALLOCNO_NUM (a)));
3364 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
3365 continue;
3366 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 3367 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 3368 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
3369 ira_assert (loop_tree_node->parent
3370 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
3371 && bitmap_bit_p (loop_tree_node->border_allocnos,
3372 ALLOCNO_NUM (a)));
058e97ec
VM
3373 }
3374}
3375#endif
3376
4ac293e2
VM
3377/* Identify allocnos which prefer a register class with a single hard register.
3378 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3379 less likely to use the preferred singleton register. */
3380static void
3381update_conflict_hard_reg_costs (void)
3382{
3383 ira_allocno_t a;
3384 ira_allocno_iterator ai;
3385 int i, index, min;
3386
3387 FOR_EACH_ALLOCNO (a, ai)
3388 {
6f76a878
AS
3389 reg_class_t aclass = ALLOCNO_CLASS (a);
3390 reg_class_t pref = reg_preferred_class (ALLOCNO_REGNO (a));
c9d74da6
RS
3391 int singleton = ira_class_singleton[pref][ALLOCNO_MODE (a)];
3392 if (singleton < 0)
4ac293e2 3393 continue;
c9d74da6 3394 index = ira_class_hard_reg_index[(int) aclass][singleton];
4ac293e2
VM
3395 if (index < 0)
3396 continue;
3397 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
3398 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
3399 continue;
3400 min = INT_MAX;
6f76a878 3401 for (i = ira_class_hard_regs_num[(int) aclass] - 1; i >= 0; i--)
1756cb66 3402 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_CLASS_COST (a)
4ac293e2
VM
3403 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
3404 min = ALLOCNO_HARD_REG_COSTS (a)[i];
3405 if (min == INT_MAX)
3406 continue;
3407 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 3408 aclass, 0);
4ac293e2 3409 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
1756cb66 3410 -= min - ALLOCNO_CLASS_COST (a);
4ac293e2
VM
3411 }
3412}
3413
058e97ec 3414/* Create a internal representation (IR) for IRA (allocnos, copies,
2608d841
VM
3415 loop tree nodes). The function returns TRUE if we generate loop
3416 structure (besides nodes representing all function and the basic
3417 blocks) for regional allocation. A true return means that we
3418 really need to flatten IR before the reload. */
058e97ec 3419bool
2608d841 3420ira_build (void)
058e97ec 3421{
2608d841 3422 bool loops_p;
058e97ec 3423
2608d841 3424 df_analyze ();
058e97ec
VM
3425 initiate_cost_vectors ();
3426 initiate_allocnos ();
3b6d1699 3427 initiate_prefs ();
058e97ec 3428 initiate_copies ();
2608d841 3429 create_loop_tree_nodes ();
058e97ec
VM
3430 form_loop_tree ();
3431 create_allocnos ();
3432 ira_costs ();
a49ae217 3433 create_allocno_objects ();
058e97ec 3434 ira_create_allocno_live_ranges ();
311aab06 3435 remove_unnecessary_regions (false);
b15a7ae6 3436 ira_compress_allocno_live_ranges ();
927425df 3437 update_bad_spill_attribute ();
058e97ec
VM
3438 loops_p = more_one_region_p ();
3439 if (loops_p)
3440 {
3441 propagate_allocno_info ();
3442 create_caps ();
3443 }
1756cb66 3444 ira_tune_allocno_costs ();
058e97ec
VM
3445#ifdef ENABLE_IRA_CHECKING
3446 check_allocno_creation ();
3447#endif
3448 setup_min_max_allocno_live_range_point ();
a49ae217 3449 sort_conflict_id_map ();
058e97ec
VM
3450 setup_min_max_conflict_allocno_ids ();
3451 ira_build_conflicts ();
4ac293e2 3452 update_conflict_hard_reg_costs ();
311aab06
VM
3453 if (! ira_conflicts_p)
3454 {
3455 ira_allocno_t a;
3456 ira_allocno_iterator ai;
3457
3458 /* Remove all regions but root one. */
3459 if (loops_p)
3460 {
3461 remove_unnecessary_regions (true);
3462 loops_p = false;
3463 }
3464 /* We don't save hard registers around calls for fast allocation
3465 -- add caller clobbered registers as conflicting ones to
3466 allocno crossing calls. */
3467 FOR_EACH_ALLOCNO (a, ai)
3468 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
6c476222 3469 ior_hard_reg_conflicts (a, ira_need_caller_save_regs (a));
311aab06 3470 }
4cda38d5
VM
3471 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3472 print_copies (ira_dump_file);
3b6d1699
VM
3473 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3474 print_prefs (ira_dump_file);
058e97ec
VM
3475 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3476 {
ac0ab4f7 3477 int n, nr, nr_big;
058e97ec 3478 ira_allocno_t a;
b14151b5 3479 live_range_t r;
058e97ec
VM
3480 ira_allocno_iterator ai;
3481
3482 n = 0;
ac0ab4f7
BS
3483 nr = 0;
3484 nr_big = 0;
058e97ec 3485 FOR_EACH_ALLOCNO (a, ai)
a49ae217 3486 {
ac0ab4f7 3487 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
1756cb66 3488
ac0ab4f7
BS
3489 if (nobj > 1)
3490 nr_big++;
3491 for (j = 0; j < nobj; j++)
3492 {
3493 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3494 n += OBJECT_NUM_CONFLICTS (obj);
3495 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3496 nr++;
3497 }
a49ae217 3498 }
058e97ec 3499 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
0fc822d0 3500 current_loops == NULL ? 1 : number_of_loops (cfun),
0cae8d31 3501 n_basic_blocks_for_fn (cfun), ira_max_point);
058e97ec 3502 fprintf (ira_dump_file,
ac0ab4f7
BS
3503 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3504 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
058e97ec
VM
3505 }
3506 return loops_p;
3507}
3508
3509/* Release the data created by function ira_build. */
3510void
3511ira_destroy (void)
3512{
3513 finish_loop_tree_nodes ();
3b6d1699 3514 finish_prefs ();
058e97ec
VM
3515 finish_copies ();
3516 finish_allocnos ();
3517 finish_cost_vectors ();
3518 ira_finish_allocno_live_ranges ();
3519}