]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ira-build.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec 1/* Building internal representation for IRA.
99dee823 2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
058e97ec
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5 25#include "target.h"
058e97ec 26#include "rtl.h"
957060b5 27#include "predict.h"
c7131fb2 28#include "df.h"
058e97ec 29#include "insn-config.h"
957060b5 30#include "regs.h"
4d0cdd0c 31#include "memmodel.h"
957060b5
AM
32#include "ira.h"
33#include "ira-int.h"
058e97ec 34#include "sparseset.h"
c7131fb2 35#include "cfgloop.h"
058e97ec 36
070a1983 37static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx_insn *,
058e97ec
VM
38 ira_loop_tree_node_t);
39
40/* The root of the loop tree corresponding to the all function. */
41ira_loop_tree_node_t ira_loop_tree_root;
42
43/* Height of the loop tree. */
44int ira_loop_tree_height;
45
46/* All nodes representing basic blocks are referred through the
67914693 47 following array. We cannot use basic block member `aux' for this
058e97ec
VM
48 because it is used for insertion of insns on edges. */
49ira_loop_tree_node_t ira_bb_nodes;
50
51/* All nodes representing loops are referred through the following
52 array. */
53ira_loop_tree_node_t ira_loop_nodes;
54
caff7edf
JJ
55/* And size of the ira_loop_nodes array. */
56unsigned int ira_loop_nodes_count;
57
b8698a0f 58/* Map regno -> allocnos with given regno (see comments for
058e97ec
VM
59 allocno member `next_regno_allocno'). */
60ira_allocno_t *ira_regno_allocno_map;
61
62/* Array of references to all allocnos. The order number of the
63 allocno corresponds to the index in the array. Removed allocnos
64 have NULL element value. */
65ira_allocno_t *ira_allocnos;
66
67/* Sizes of the previous array. */
68int ira_allocnos_num;
69
a49ae217
BS
70/* Count of conflict record structures we've created, used when creating
71 a new conflict id. */
72int ira_objects_num;
73
74/* Map a conflict id to its conflict record. */
75ira_object_t *ira_object_id_map;
058e97ec 76
3b6d1699
VM
77/* Array of references to all allocno preferences. The order number
78 of the preference corresponds to the index in the array. */
79ira_pref_t *ira_prefs;
80
81/* Size of the previous array. */
82int ira_prefs_num;
83
058e97ec
VM
84/* Array of references to all copies. The order number of the copy
85 corresponds to the index in the array. Removed copies have NULL
86 element value. */
87ira_copy_t *ira_copies;
88
89/* Size of the previous array. */
90int ira_copies_num;
91
92\f
93
94/* LAST_BASIC_BLOCK before generating additional insns because of live
95 range splitting. Emitting insns on a critical edge creates a new
96 basic block. */
97static int last_basic_block_before_change;
98
2608d841
VM
99/* Initialize some members in loop tree node NODE. Use LOOP_NUM for
100 the member loop_num. */
058e97ec 101static void
2608d841
VM
102init_loop_tree_node (struct ira_loop_tree_node *node, int loop_num)
103{
104 int max_regno = max_reg_num ();
105
106 node->regno_allocno_map
107 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
108 memset (node->regno_allocno_map, 0, sizeof (ira_allocno_t) * max_regno);
109 memset (node->reg_pressure, 0, sizeof (node->reg_pressure));
110 node->all_allocnos = ira_allocate_bitmap ();
111 node->modified_regnos = ira_allocate_bitmap ();
112 node->border_allocnos = ira_allocate_bitmap ();
113 node->local_copies = ira_allocate_bitmap ();
114 node->loop_num = loop_num;
115 node->children = NULL;
116 node->subloops = NULL;
117}
118
119
120/* The following function allocates the loop tree nodes. If
121 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
122 the root which corresponds the all function) will be not allocated
123 but nodes will still be allocated for basic blocks. */
124static void
125create_loop_tree_nodes (void)
058e97ec
VM
126{
127 unsigned int i, j;
058e97ec
VM
128 bool skip_p;
129 edge_iterator ei;
130 edge e;
058e97ec
VM
131 loop_p loop;
132
133 ira_bb_nodes
134 = ((struct ira_loop_tree_node *)
8b1c6fd7
DM
135 ira_allocate (sizeof (struct ira_loop_tree_node)
136 * last_basic_block_for_fn (cfun)));
137 last_basic_block_before_change = last_basic_block_for_fn (cfun);
138 for (i = 0; i < (unsigned int) last_basic_block_for_fn (cfun); i++)
058e97ec
VM
139 {
140 ira_bb_nodes[i].regno_allocno_map = NULL;
141 memset (ira_bb_nodes[i].reg_pressure, 0,
142 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 143 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
144 ira_bb_nodes[i].modified_regnos = NULL;
145 ira_bb_nodes[i].border_allocnos = NULL;
146 ira_bb_nodes[i].local_copies = NULL;
147 }
2608d841
VM
148 if (current_loops == NULL)
149 {
caff7edf 150 ira_loop_nodes_count = 1;
2608d841
VM
151 ira_loop_nodes = ((struct ira_loop_tree_node *)
152 ira_allocate (sizeof (struct ira_loop_tree_node)));
153 init_loop_tree_node (ira_loop_nodes, 0);
154 return;
155 }
0fc822d0 156 ira_loop_nodes_count = number_of_loops (cfun);
058e97ec
VM
157 ira_loop_nodes = ((struct ira_loop_tree_node *)
158 ira_allocate (sizeof (struct ira_loop_tree_node)
caff7edf 159 * ira_loop_nodes_count));
0fc822d0 160 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
058e97ec 161 {
661bc682 162 if (loop_outer (loop) != NULL)
058e97ec
VM
163 {
164 ira_loop_nodes[i].regno_allocno_map = NULL;
058e97ec
VM
165 skip_p = false;
166 FOR_EACH_EDGE (e, ei, loop->header->preds)
167 if (e->src != loop->latch
168 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
169 {
170 skip_p = true;
171 break;
172 }
173 if (skip_p)
174 continue;
4b9d61f7 175 auto_vec<edge> edges = get_loop_exit_edges (loop);
9771b263 176 FOR_EACH_VEC_ELT (edges, j, e)
058e97ec
VM
177 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
178 {
179 skip_p = true;
180 break;
181 }
058e97ec
VM
182 if (skip_p)
183 continue;
184 }
2608d841 185 init_loop_tree_node (&ira_loop_nodes[i], loop->num);
058e97ec
VM
186 }
187}
188
189/* The function returns TRUE if there are more one allocation
190 region. */
191static bool
192more_one_region_p (void)
193{
194 unsigned int i;
195 loop_p loop;
196
2608d841 197 if (current_loops != NULL)
0fc822d0 198 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
2608d841
VM
199 if (ira_loop_nodes[i].regno_allocno_map != NULL
200 && ira_loop_tree_root != &ira_loop_nodes[i])
201 return true;
058e97ec
VM
202 return false;
203}
204
205/* Free the loop tree node of a loop. */
206static void
207finish_loop_tree_node (ira_loop_tree_node_t loop)
208{
209 if (loop->regno_allocno_map != NULL)
210 {
211 ira_assert (loop->bb == NULL);
212 ira_free_bitmap (loop->local_copies);
213 ira_free_bitmap (loop->border_allocnos);
214 ira_free_bitmap (loop->modified_regnos);
49d988e7 215 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
216 ira_free (loop->regno_allocno_map);
217 loop->regno_allocno_map = NULL;
218 }
219}
220
221/* Free the loop tree nodes. */
222static void
223finish_loop_tree_nodes (void)
224{
225 unsigned int i;
058e97ec 226
caff7edf
JJ
227 for (i = 0; i < ira_loop_nodes_count; i++)
228 finish_loop_tree_node (&ira_loop_nodes[i]);
058e97ec
VM
229 ira_free (ira_loop_nodes);
230 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
231 {
232 if (ira_bb_nodes[i].local_copies != NULL)
233 ira_free_bitmap (ira_bb_nodes[i].local_copies);
234 if (ira_bb_nodes[i].border_allocnos != NULL)
235 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
236 if (ira_bb_nodes[i].modified_regnos != NULL)
237 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
238 if (ira_bb_nodes[i].all_allocnos != NULL)
239 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
240 if (ira_bb_nodes[i].regno_allocno_map != NULL)
241 ira_free (ira_bb_nodes[i].regno_allocno_map);
242 }
243 ira_free (ira_bb_nodes);
244}
245
246\f
247
248/* The following recursive function adds LOOP to the loop tree
2608d841
VM
249 hierarchy. LOOP is added only once. If LOOP is NULL we adding
250 loop designating the whole function when CFG loops are not
251 built. */
058e97ec 252static void
99b1c316 253add_loop_to_tree (class loop *loop)
058e97ec 254{
2608d841 255 int loop_num;
99b1c316 256 class loop *parent;
058e97ec
VM
257 ira_loop_tree_node_t loop_node, parent_node;
258
67914693 259 /* We cannot use loop node access macros here because of potential
058e97ec
VM
260 checking and because the nodes are not initialized enough
261 yet. */
2608d841 262 if (loop != NULL && loop_outer (loop) != NULL)
058e97ec 263 add_loop_to_tree (loop_outer (loop));
2608d841
VM
264 loop_num = loop != NULL ? loop->num : 0;
265 if (ira_loop_nodes[loop_num].regno_allocno_map != NULL
266 && ira_loop_nodes[loop_num].children == NULL)
058e97ec
VM
267 {
268 /* We have not added loop node to the tree yet. */
2608d841 269 loop_node = &ira_loop_nodes[loop_num];
058e97ec
VM
270 loop_node->loop = loop;
271 loop_node->bb = NULL;
2608d841
VM
272 if (loop == NULL)
273 parent = NULL;
274 else
275 {
276 for (parent = loop_outer (loop);
277 parent != NULL;
278 parent = loop_outer (parent))
279 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
280 break;
281 }
058e97ec
VM
282 if (parent == NULL)
283 {
284 loop_node->next = NULL;
285 loop_node->subloop_next = NULL;
286 loop_node->parent = NULL;
287 }
288 else
289 {
290 parent_node = &ira_loop_nodes[parent->num];
291 loop_node->next = parent_node->children;
292 parent_node->children = loop_node;
293 loop_node->subloop_next = parent_node->subloops;
294 parent_node->subloops = loop_node;
295 loop_node->parent = parent_node;
296 }
297 }
298}
299
300/* The following recursive function sets up levels of nodes of the
301 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
302 The function returns maximal value of level in the tree + 1. */
303static int
304setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
305{
306 int height, max_height;
307 ira_loop_tree_node_t subloop_node;
308
309 ira_assert (loop_node->bb == NULL);
310 loop_node->level = level;
311 max_height = level + 1;
312 for (subloop_node = loop_node->subloops;
313 subloop_node != NULL;
314 subloop_node = subloop_node->subloop_next)
315 {
316 ira_assert (subloop_node->bb == NULL);
317 height = setup_loop_tree_level (subloop_node, level + 1);
318 if (height > max_height)
319 max_height = height;
320 }
321 return max_height;
322}
323
324/* Create the loop tree. The algorithm is designed to provide correct
325 order of loops (they are ordered by their last loop BB) and basic
326 blocks in the chain formed by member next. */
327static void
328form_loop_tree (void)
329{
058e97ec 330 basic_block bb;
99b1c316 331 class loop *parent;
058e97ec 332 ira_loop_tree_node_t bb_node, loop_node;
058e97ec 333
67914693 334 /* We cannot use loop/bb node access macros because of potential
058e97ec
VM
335 checking and because the nodes are not initialized enough
336 yet. */
11cd3bed 337 FOR_EACH_BB_FN (bb, cfun)
058e97ec
VM
338 {
339 bb_node = &ira_bb_nodes[bb->index];
340 bb_node->bb = bb;
341 bb_node->loop = NULL;
342 bb_node->subloops = NULL;
343 bb_node->children = NULL;
344 bb_node->subloop_next = NULL;
345 bb_node->next = NULL;
2608d841
VM
346 if (current_loops == NULL)
347 parent = NULL;
348 else
349 {
350 for (parent = bb->loop_father;
351 parent != NULL;
352 parent = loop_outer (parent))
353 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
354 break;
355 }
058e97ec 356 add_loop_to_tree (parent);
2608d841 357 loop_node = &ira_loop_nodes[parent == NULL ? 0 : parent->num];
058e97ec
VM
358 bb_node->next = loop_node->children;
359 bb_node->parent = loop_node;
360 loop_node->children = bb_node;
361 }
2608d841 362 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (0);
058e97ec
VM
363 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
364 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
365}
366
367\f
368
369/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
370 tree nodes. */
371static void
372rebuild_regno_allocno_maps (void)
373{
374 unsigned int l;
375 int max_regno, regno;
376 ira_allocno_t a;
377 ira_loop_tree_node_t loop_tree_node;
378 loop_p loop;
379 ira_allocno_iterator ai;
380
2608d841 381 ira_assert (current_loops != NULL);
058e97ec 382 max_regno = max_reg_num ();
0fc822d0 383 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), l, loop)
058e97ec
VM
384 if (ira_loop_nodes[l].regno_allocno_map != NULL)
385 {
386 ira_free (ira_loop_nodes[l].regno_allocno_map);
387 ira_loop_nodes[l].regno_allocno_map
388 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
389 * max_regno);
390 memset (ira_loop_nodes[l].regno_allocno_map, 0,
391 sizeof (ira_allocno_t) * max_regno);
392 }
393 ira_free (ira_regno_allocno_map);
394 ira_regno_allocno_map
395 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
396 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
397 FOR_EACH_ALLOCNO (a, ai)
398 {
399 if (ALLOCNO_CAP_MEMBER (a) != NULL)
400 /* Caps are not in the regno allocno maps. */
401 continue;
402 regno = ALLOCNO_REGNO (a);
403 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
404 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
405 ira_regno_allocno_map[regno] = a;
406 if (loop_tree_node->regno_allocno_map[regno] == NULL)
407 /* Remember that we can create temporary allocnos to break
408 cycles in register shuffle. */
409 loop_tree_node->regno_allocno_map[regno] = a;
410 }
411}
058e97ec
VM
412\f
413
a49ae217 414/* Pools for allocnos, allocno live ranges and objects. */
fcb87c50
MM
415static object_allocator<live_range> live_range_pool ("live ranges");
416static object_allocator<ira_allocno> allocno_pool ("allocnos");
417static object_allocator<ira_object> object_pool ("objects");
058e97ec
VM
418
419/* Vec containing references to all created allocnos. It is a
420 container of array allocnos. */
9771b263 421static vec<ira_allocno_t> allocno_vec;
058e97ec 422
a49ae217
BS
423/* Vec containing references to all created ira_objects. It is a
424 container of ira_object_id_map. */
9771b263 425static vec<ira_object_t> ira_object_id_map_vec;
058e97ec
VM
426
427/* Initialize data concerning allocnos. */
428static void
429initiate_allocnos (void)
430{
9771b263 431 allocno_vec.create (max_reg_num () * 2);
058e97ec
VM
432 ira_allocnos = NULL;
433 ira_allocnos_num = 0;
a49ae217 434 ira_objects_num = 0;
9771b263 435 ira_object_id_map_vec.create (max_reg_num () * 2);
a49ae217 436 ira_object_id_map = NULL;
058e97ec 437 ira_regno_allocno_map
1756cb66
VM
438 = (ira_allocno_t *) ira_allocate (max_reg_num ()
439 * sizeof (ira_allocno_t));
058e97ec
VM
440 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
441}
442
a49ae217
BS
443/* Create and return an object corresponding to a new allocno A. */
444static ira_object_t
ac0ab4f7 445ira_create_object (ira_allocno_t a, int subword)
a49ae217 446{
1756cb66 447 enum reg_class aclass = ALLOCNO_CLASS (a);
0b470bae 448 ira_object_t obj = object_pool.allocate ();
a49ae217
BS
449
450 OBJECT_ALLOCNO (obj) = a;
ac0ab4f7 451 OBJECT_SUBWORD (obj) = subword;
a49ae217
BS
452 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
453 OBJECT_CONFLICT_VEC_P (obj) = false;
454 OBJECT_CONFLICT_ARRAY (obj) = NULL;
455 OBJECT_NUM_CONFLICTS (obj) = 0;
6576d245
RS
456 OBJECT_CONFLICT_HARD_REGS (obj) = ira_no_alloc_regs;
457 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) = ira_no_alloc_regs;
4897c5aa
RS
458 OBJECT_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
459 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
a49ae217
BS
460 OBJECT_MIN (obj) = INT_MAX;
461 OBJECT_MAX (obj) = -1;
9140d27b 462 OBJECT_LIVE_RANGES (obj) = NULL;
a49ae217 463
9771b263 464 ira_object_id_map_vec.safe_push (obj);
a49ae217 465 ira_object_id_map
9771b263
DN
466 = ira_object_id_map_vec.address ();
467 ira_objects_num = ira_object_id_map_vec.length ();
ac0ab4f7 468
a49ae217
BS
469 return obj;
470}
471
058e97ec
VM
472/* Create and return the allocno corresponding to REGNO in
473 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
474 same regno if CAP_P is FALSE. */
475ira_allocno_t
1756cb66
VM
476ira_create_allocno (int regno, bool cap_p,
477 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
478{
479 ira_allocno_t a;
480
0b470bae 481 a = allocno_pool.allocate ();
058e97ec
VM
482 ALLOCNO_REGNO (a) = regno;
483 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
484 if (! cap_p)
485 {
486 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
487 ira_regno_allocno_map[regno] = a;
488 if (loop_tree_node->regno_allocno_map[regno] == NULL)
489 /* Remember that we can create temporary allocnos to break
490 cycles in register shuffle on region borders (see
491 ira-emit.c). */
492 loop_tree_node->regno_allocno_map[regno] = a;
493 }
494 ALLOCNO_CAP (a) = NULL;
495 ALLOCNO_CAP_MEMBER (a) = NULL;
496 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 497 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec 498 ALLOCNO_NREFS (a) = 0;
854bd721 499 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
500 ALLOCNO_HARD_REGNO (a) = -1;
501 ALLOCNO_CALL_FREQ (a) = 0;
502 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
e384e6b5 503 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a) = 0;
3366b378 504 ALLOCNO_CROSSED_CALLS_ABIS (a) = 0;
c2ba7e7a 505 CLEAR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
506#ifdef STACK_REGS
507 ALLOCNO_NO_STACK_REG_P (a) = false;
508 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
509#endif
058e97ec 510 ALLOCNO_DONT_REASSIGN_P (a) = false;
927425df 511 ALLOCNO_BAD_SPILL_P (a) = false;
058e97ec 512 ALLOCNO_ASSIGNED_P (a) = false;
058e97ec 513 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
d1bb282e 514 ALLOCNO_WMODE (a) = ALLOCNO_MODE (a);
3b6d1699 515 ALLOCNO_PREFS (a) = NULL;
058e97ec
VM
516 ALLOCNO_COPIES (a) = NULL;
517 ALLOCNO_HARD_REG_COSTS (a) = NULL;
518 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
519 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
520 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1756cb66
VM
521 ALLOCNO_CLASS (a) = NO_REGS;
522 ALLOCNO_UPDATED_CLASS_COST (a) = 0;
523 ALLOCNO_CLASS_COST (a) = 0;
058e97ec
VM
524 ALLOCNO_MEMORY_COST (a) = 0;
525 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
526 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
ac0ab4f7 527 ALLOCNO_NUM_OBJECTS (a) = 0;
a49ae217 528
1756cb66 529 ALLOCNO_ADD_DATA (a) = NULL;
9771b263
DN
530 allocno_vec.safe_push (a);
531 ira_allocnos = allocno_vec.address ();
532 ira_allocnos_num = allocno_vec.length ();
ac0ab4f7 533
058e97ec
VM
534 return a;
535}
536
1756cb66
VM
537/* Set up register class for A and update its conflict hard
538 registers. */
058e97ec 539void
1756cb66 540ira_set_allocno_class (ira_allocno_t a, enum reg_class aclass)
058e97ec 541{
1756cb66
VM
542 ira_allocno_object_iterator oi;
543 ira_object_t obj;
544
545 ALLOCNO_CLASS (a) = aclass;
546 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
547 {
4897c5aa
RS
548 OBJECT_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
549 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= ~reg_class_contents[aclass];
1756cb66 550 }
a49ae217
BS
551}
552
ac0ab4f7
BS
553/* Determine the number of objects we should associate with allocno A
554 and allocate them. */
a49ae217 555void
ac0ab4f7 556ira_create_allocno_objects (ira_allocno_t a)
a49ae217 557{
ef4bddc2 558 machine_mode mode = ALLOCNO_MODE (a);
1756cb66
VM
559 enum reg_class aclass = ALLOCNO_CLASS (a);
560 int n = ira_reg_class_max_nregs[aclass][mode];
ac0ab4f7
BS
561 int i;
562
cf098191 563 if (n != 2 || maybe_ne (GET_MODE_SIZE (mode), n * UNITS_PER_WORD))
ac0ab4f7
BS
564 n = 1;
565
566 ALLOCNO_NUM_OBJECTS (a) = n;
567 for (i = 0; i < n; i++)
568 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
a49ae217
BS
569}
570
ac0ab4f7 571/* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
1756cb66 572 ALLOCNO_OBJECT structures. This must be called after the allocno
ac0ab4f7 573 classes are known. */
a49ae217
BS
574static void
575create_allocno_objects (void)
576{
577 ira_allocno_t a;
578 ira_allocno_iterator ai;
579
580 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7 581 ira_create_allocno_objects (a);
058e97ec
VM
582}
583
ac0ab4f7
BS
584/* Merge hard register conflict information for all objects associated with
585 allocno TO into the corresponding objects associated with FROM.
586 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
3c55880a
BS
587static void
588merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
589 bool total_only)
590{
ac0ab4f7
BS
591 int i;
592 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
593 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
594 {
595 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
596 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1756cb66 597
ac0ab4f7 598 if (!total_only)
44942965
RS
599 OBJECT_CONFLICT_HARD_REGS (to_obj)
600 |= OBJECT_CONFLICT_HARD_REGS (from_obj);
601 OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj)
602 |= OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj);
ac0ab4f7 603 }
3c55880a
BS
604#ifdef STACK_REGS
605 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
606 ALLOCNO_NO_STACK_REG_P (to) = true;
607 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
608 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
609#endif
610}
611
ac0ab4f7
BS
612/* Update hard register conflict information for all objects associated with
613 A to include the regs in SET. */
614void
75f4e3a1 615ior_hard_reg_conflicts (ira_allocno_t a, const_hard_reg_set set)
ac0ab4f7
BS
616{
617 ira_allocno_object_iterator i;
618 ira_object_t obj;
1756cb66 619
ac0ab4f7
BS
620 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
621 {
75f4e3a1
RS
622 OBJECT_CONFLICT_HARD_REGS (obj) |= set;
623 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= set;
ac0ab4f7
BS
624 }
625}
626
a49ae217
BS
627/* Return TRUE if a conflict vector with NUM elements is more
628 profitable than a conflict bit vector for OBJ. */
058e97ec 629bool
a49ae217 630ira_conflict_vector_profitable_p (ira_object_t obj, int num)
058e97ec
VM
631{
632 int nw;
a49ae217
BS
633 int max = OBJECT_MAX (obj);
634 int min = OBJECT_MIN (obj);
058e97ec 635
a49ae217
BS
636 if (max < min)
637 /* We prefer a bit vector in such case because it does not result
638 in allocation. */
058e97ec
VM
639 return false;
640
a49ae217
BS
641 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
642 return (2 * sizeof (ira_object_t) * (num + 1)
058e97ec
VM
643 < 3 * nw * sizeof (IRA_INT_TYPE));
644}
645
a49ae217
BS
646/* Allocates and initialize the conflict vector of OBJ for NUM
647 conflicting objects. */
058e97ec 648void
a49ae217 649ira_allocate_conflict_vec (ira_object_t obj, int num)
058e97ec
VM
650{
651 int size;
a49ae217 652 ira_object_t *vec;
058e97ec 653
a49ae217 654 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
058e97ec 655 num++; /* for NULL end marker */
a49ae217
BS
656 size = sizeof (ira_object_t) * num;
657 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
658 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
058e97ec 659 vec[0] = NULL;
a49ae217
BS
660 OBJECT_NUM_CONFLICTS (obj) = 0;
661 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
662 OBJECT_CONFLICT_VEC_P (obj) = true;
058e97ec
VM
663}
664
a49ae217 665/* Allocate and initialize the conflict bit vector of OBJ. */
058e97ec 666static void
a49ae217 667allocate_conflict_bit_vec (ira_object_t obj)
058e97ec
VM
668{
669 unsigned int size;
670
a49ae217
BS
671 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
672 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
058e97ec 673 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
a49ae217
BS
674 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
675 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
676 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
677 OBJECT_CONFLICT_VEC_P (obj) = false;
058e97ec
VM
678}
679
680/* Allocate and initialize the conflict vector or conflict bit vector
ac0ab4f7 681 of OBJ for NUM conflicting allocnos whatever is more profitable. */
058e97ec 682void
ac0ab4f7 683ira_allocate_object_conflicts (ira_object_t obj, int num)
058e97ec 684{
ac0ab4f7
BS
685 if (ira_conflict_vector_profitable_p (obj, num))
686 ira_allocate_conflict_vec (obj, num);
058e97ec 687 else
ac0ab4f7 688 allocate_conflict_bit_vec (obj);
058e97ec
VM
689}
690
a49ae217 691/* Add OBJ2 to the conflicts of OBJ1. */
058e97ec 692static void
a49ae217 693add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
058e97ec
VM
694{
695 int num;
696 unsigned int size;
697
a49ae217 698 if (OBJECT_CONFLICT_VEC_P (obj1))
058e97ec 699 {
a49ae217
BS
700 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
701 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
702 num = curr_num + 2;
703 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
058e97ec 704 {
a49ae217 705 ira_object_t *newvec;
058e97ec 706 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
a49ae217
BS
707 newvec = (ira_object_t *) ira_allocate (size);
708 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
709 ira_free (vec);
710 vec = newvec;
711 OBJECT_CONFLICT_ARRAY (obj1) = vec;
712 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 713 }
a49ae217 714 vec[num - 2] = obj2;
058e97ec 715 vec[num - 1] = NULL;
a49ae217 716 OBJECT_NUM_CONFLICTS (obj1)++;
058e97ec
VM
717 }
718 else
719 {
720 int nw, added_head_nw, id;
a49ae217 721 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
058e97ec 722
a49ae217
BS
723 id = OBJECT_CONFLICT_ID (obj2);
724 if (OBJECT_MIN (obj1) > id)
058e97ec
VM
725 {
726 /* Expand head of the bit vector. */
a49ae217
BS
727 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
728 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 729 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
a49ae217 730 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
058e97ec
VM
731 {
732 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
733 vec, nw * sizeof (IRA_INT_TYPE));
734 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
735 }
736 else
737 {
738 size
739 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
740 vec = (IRA_INT_TYPE *) ira_allocate (size);
741 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
a49ae217 742 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
743 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
744 memset ((char *) vec
745 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
746 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
a49ae217
BS
747 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
748 OBJECT_CONFLICT_ARRAY (obj1) = vec;
749 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 750 }
a49ae217 751 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
058e97ec 752 }
a49ae217 753 else if (OBJECT_MAX (obj1) < id)
058e97ec 754 {
a49ae217 755 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 756 size = nw * sizeof (IRA_INT_TYPE);
a49ae217 757 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
058e97ec
VM
758 {
759 /* Expand tail of the bit vector. */
760 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
761 vec = (IRA_INT_TYPE *) ira_allocate (size);
a49ae217
BS
762 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
763 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
764 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
765 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
766 OBJECT_CONFLICT_ARRAY (obj1) = vec;
767 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 768 }
a49ae217 769 OBJECT_MAX (obj1) = id;
058e97ec 770 }
a49ae217 771 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
058e97ec
VM
772 }
773}
774
a49ae217
BS
775/* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
776static void
777ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
058e97ec 778{
a49ae217
BS
779 add_to_conflicts (obj1, obj2);
780 add_to_conflicts (obj2, obj1);
058e97ec
VM
781}
782
a49ae217 783/* Clear all conflicts of OBJ. */
058e97ec 784static void
a49ae217 785clear_conflicts (ira_object_t obj)
058e97ec 786{
a49ae217 787 if (OBJECT_CONFLICT_VEC_P (obj))
058e97ec 788 {
a49ae217
BS
789 OBJECT_NUM_CONFLICTS (obj) = 0;
790 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
058e97ec 791 }
a49ae217 792 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
058e97ec
VM
793 {
794 int nw;
795
a49ae217
BS
796 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
797 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
798 }
799}
800
801/* The array used to find duplications in conflict vectors of
802 allocnos. */
a49ae217 803static int *conflict_check;
058e97ec
VM
804
805/* The value used to mark allocation presence in conflict vector of
806 the current allocno. */
a49ae217 807static int curr_conflict_check_tick;
058e97ec 808
a49ae217 809/* Remove duplications in conflict vector of OBJ. */
058e97ec 810static void
a49ae217 811compress_conflict_vec (ira_object_t obj)
058e97ec 812{
a49ae217 813 ira_object_t *vec, conflict_obj;
058e97ec
VM
814 int i, j;
815
a49ae217
BS
816 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
817 vec = OBJECT_CONFLICT_VEC (obj);
818 curr_conflict_check_tick++;
819 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
058e97ec 820 {
a49ae217
BS
821 int id = OBJECT_CONFLICT_ID (conflict_obj);
822 if (conflict_check[id] != curr_conflict_check_tick)
058e97ec 823 {
a49ae217
BS
824 conflict_check[id] = curr_conflict_check_tick;
825 vec[j++] = conflict_obj;
058e97ec
VM
826 }
827 }
a49ae217 828 OBJECT_NUM_CONFLICTS (obj) = j;
058e97ec
VM
829 vec[j] = NULL;
830}
831
832/* Remove duplications in conflict vectors of all allocnos. */
833static void
834compress_conflict_vecs (void)
835{
ac0ab4f7
BS
836 ira_object_t obj;
837 ira_object_iterator oi;
058e97ec 838
a49ae217
BS
839 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
840 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
841 curr_conflict_check_tick = 0;
ac0ab4f7 842 FOR_EACH_OBJECT (obj, oi)
a49ae217 843 {
a49ae217
BS
844 if (OBJECT_CONFLICT_VEC_P (obj))
845 compress_conflict_vec (obj);
846 }
847 ira_free (conflict_check);
058e97ec
VM
848}
849
850/* This recursive function outputs allocno A and if it is a cap the
851 function outputs its members. */
852void
853ira_print_expanded_allocno (ira_allocno_t a)
854{
855 basic_block bb;
856
857 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
858 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
859 fprintf (ira_dump_file, ",b%d", bb->index);
860 else
2608d841 861 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop_num);
058e97ec
VM
862 if (ALLOCNO_CAP_MEMBER (a) != NULL)
863 {
864 fprintf (ira_dump_file, ":");
865 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
866 }
867 fprintf (ira_dump_file, ")");
868}
869
870/* Create and return the cap representing allocno A in the
871 parent loop. */
872static ira_allocno_t
873create_cap_allocno (ira_allocno_t a)
874{
875 ira_allocno_t cap;
876 ira_loop_tree_node_t parent;
1756cb66 877 enum reg_class aclass;
058e97ec 878
058e97ec
VM
879 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
880 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
881 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
d1bb282e 882 ALLOCNO_WMODE (cap) = ALLOCNO_WMODE (a);
1756cb66
VM
883 aclass = ALLOCNO_CLASS (a);
884 ira_set_allocno_class (cap, aclass);
ac0ab4f7 885 ira_create_allocno_objects (cap);
058e97ec 886 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec 887 ALLOCNO_CAP (a) = cap;
1756cb66 888 ALLOCNO_CLASS_COST (cap) = ALLOCNO_CLASS_COST (a);
058e97ec 889 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
058e97ec 890 ira_allocate_and_copy_costs
1756cb66 891 (&ALLOCNO_HARD_REG_COSTS (cap), aclass, ALLOCNO_HARD_REG_COSTS (a));
058e97ec 892 ira_allocate_and_copy_costs
1756cb66 893 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), aclass,
058e97ec 894 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
927425df 895 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
058e97ec
VM
896 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
897 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
898 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
ac0ab4f7 899
3c55880a 900 merge_hard_reg_conflicts (a, cap, false);
ac0ab4f7 901
058e97ec 902 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5 903 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222 904 ALLOCNO_CROSSED_CALLS_ABIS (cap) = ALLOCNO_CROSSED_CALLS_ABIS (a);
44942965 905 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (cap)
6c476222 906 = ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a);
058e97ec
VM
907 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
908 {
909 fprintf (ira_dump_file, " Creating cap ");
910 ira_print_expanded_allocno (cap);
911 fprintf (ira_dump_file, "\n");
912 }
913 return cap;
914}
915
ac0ab4f7 916/* Create and return a live range for OBJECT with given attributes. */
b14151b5 917live_range_t
9140d27b
BS
918ira_create_live_range (ira_object_t obj, int start, int finish,
919 live_range_t next)
058e97ec 920{
b14151b5 921 live_range_t p;
058e97ec 922
0b470bae 923 p = live_range_pool.allocate ();
9140d27b 924 p->object = obj;
058e97ec
VM
925 p->start = start;
926 p->finish = finish;
927 p->next = next;
928 return p;
929}
930
ac0ab4f7
BS
931/* Create a new live range for OBJECT and queue it at the head of its
932 live range list. */
933void
934ira_add_live_range_to_object (ira_object_t object, int start, int finish)
935{
936 live_range_t p;
937 p = ira_create_live_range (object, start, finish,
938 OBJECT_LIVE_RANGES (object));
939 OBJECT_LIVE_RANGES (object) = p;
940}
941
058e97ec 942/* Copy allocno live range R and return the result. */
b14151b5 943static live_range_t
9140d27b 944copy_live_range (live_range_t r)
058e97ec 945{
b14151b5 946 live_range_t p;
058e97ec 947
0b470bae 948 p = live_range_pool.allocate ();
058e97ec
VM
949 *p = *r;
950 return p;
951}
952
953/* Copy allocno live range list given by its head R and return the
954 result. */
b14151b5 955live_range_t
9140d27b 956ira_copy_live_range_list (live_range_t r)
058e97ec 957{
b14151b5 958 live_range_t p, first, last;
058e97ec
VM
959
960 if (r == NULL)
961 return NULL;
962 for (first = last = NULL; r != NULL; r = r->next)
963 {
9140d27b 964 p = copy_live_range (r);
058e97ec
VM
965 if (first == NULL)
966 first = p;
967 else
968 last->next = p;
969 last = p;
970 }
971 return first;
972}
973
3553f0bb
VM
974/* Merge ranges R1 and R2 and returns the result. The function
975 maintains the order of ranges and tries to minimize number of the
976 result ranges. */
b14151b5 977live_range_t
9140d27b 978ira_merge_live_ranges (live_range_t r1, live_range_t r2)
3553f0bb 979{
fab27f52 980 live_range_t first, last;
3553f0bb
VM
981
982 if (r1 == NULL)
983 return r2;
984 if (r2 == NULL)
985 return r1;
986 for (first = last = NULL; r1 != NULL && r2 != NULL;)
987 {
988 if (r1->start < r2->start)
fab27f52 989 std::swap (r1, r2);
3553f0bb
VM
990 if (r1->start <= r2->finish + 1)
991 {
992 /* Intersected ranges: merge r1 and r2 into r1. */
993 r1->start = r2->start;
994 if (r1->finish < r2->finish)
995 r1->finish = r2->finish;
fab27f52 996 live_range_t temp = r2;
3553f0bb 997 r2 = r2->next;
9140d27b 998 ira_finish_live_range (temp);
3553f0bb
VM
999 if (r2 == NULL)
1000 {
1001 /* To try to merge with subsequent ranges in r1. */
1002 r2 = r1->next;
1003 r1->next = NULL;
1004 }
1005 }
1006 else
1007 {
1008 /* Add r1 to the result. */
1009 if (first == NULL)
1010 first = last = r1;
1011 else
1012 {
1013 last->next = r1;
1014 last = r1;
1015 }
1016 r1 = r1->next;
1017 if (r1 == NULL)
1018 {
1019 /* To try to merge with subsequent ranges in r2. */
1020 r1 = r2->next;
1021 r2->next = NULL;
1022 }
1023 }
1024 }
1025 if (r1 != NULL)
1026 {
1027 if (first == NULL)
1028 first = r1;
1029 else
1030 last->next = r1;
1031 ira_assert (r1->next == NULL);
1032 }
1033 else if (r2 != NULL)
1034 {
1035 if (first == NULL)
1036 first = r2;
1037 else
1038 last->next = r2;
1039 ira_assert (r2->next == NULL);
1040 }
1041 else
1042 {
1043 ira_assert (last->next == NULL);
1044 }
1045 return first;
1046}
1047
1048/* Return TRUE if live ranges R1 and R2 intersect. */
1049bool
9140d27b 1050ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
3553f0bb
VM
1051{
1052 /* Remember the live ranges are always kept ordered. */
1053 while (r1 != NULL && r2 != NULL)
1054 {
1055 if (r1->start > r2->finish)
1056 r1 = r1->next;
1057 else if (r2->start > r1->finish)
1058 r2 = r2->next;
1059 else
1060 return true;
1061 }
1062 return false;
1063}
1064
058e97ec
VM
1065/* Free allocno live range R. */
1066void
9140d27b 1067ira_finish_live_range (live_range_t r)
058e97ec 1068{
0b470bae 1069 live_range_pool.remove (r);
058e97ec
VM
1070}
1071
3553f0bb
VM
1072/* Free list of allocno live ranges starting with R. */
1073void
9140d27b 1074ira_finish_live_range_list (live_range_t r)
3553f0bb 1075{
b14151b5 1076 live_range_t next_r;
3553f0bb
VM
1077
1078 for (; r != NULL; r = next_r)
1079 {
1080 next_r = r->next;
9140d27b 1081 ira_finish_live_range (r);
3553f0bb
VM
1082 }
1083}
1084
058e97ec
VM
1085/* Free updated register costs of allocno A. */
1086void
1087ira_free_allocno_updated_costs (ira_allocno_t a)
1088{
1756cb66 1089 enum reg_class aclass;
058e97ec 1090
1756cb66 1091 aclass = ALLOCNO_CLASS (a);
058e97ec 1092 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1093 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1094 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1095 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1096 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66 1097 aclass);
058e97ec
VM
1098 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1099}
1100
1756cb66
VM
1101/* Free and nullify all cost vectors allocated earlier for allocno
1102 A. */
058e97ec 1103static void
1756cb66 1104ira_free_allocno_costs (ira_allocno_t a)
058e97ec 1105{
1756cb66 1106 enum reg_class aclass = ALLOCNO_CLASS (a);
ac0ab4f7
BS
1107 ira_object_t obj;
1108 ira_allocno_object_iterator oi;
058e97ec 1109
ac0ab4f7
BS
1110 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1111 {
1112 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1113 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1114 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1115 ira_free (OBJECT_CONFLICT_ARRAY (obj));
0b470bae 1116 object_pool.remove (obj);
ac0ab4f7 1117 }
9140d27b 1118
058e97ec 1119 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
058e97ec 1120 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1756cb66 1121 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), aclass);
058e97ec 1122 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1756cb66 1123 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), aclass);
058e97ec 1124 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1125 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1126 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1127 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66
VM
1128 aclass);
1129 ALLOCNO_HARD_REG_COSTS (a) = NULL;
1130 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
1131 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1132 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1133}
1134
1135/* Free the memory allocated for allocno A. */
1136static void
1137finish_allocno (ira_allocno_t a)
1138{
1139 ira_free_allocno_costs (a);
0b470bae 1140 allocno_pool.remove (a);
058e97ec
VM
1141}
1142
1143/* Free the memory allocated for all allocnos. */
1144static void
1145finish_allocnos (void)
1146{
1147 ira_allocno_t a;
1148 ira_allocno_iterator ai;
1149
1150 FOR_EACH_ALLOCNO (a, ai)
1151 finish_allocno (a);
1152 ira_free (ira_regno_allocno_map);
9771b263
DN
1153 ira_object_id_map_vec.release ();
1154 allocno_vec.release ();
0b470bae
ML
1155 allocno_pool.release ();
1156 object_pool.release ();
1157 live_range_pool.release ();
058e97ec
VM
1158}
1159
1160\f
1161
3b6d1699 1162/* Pools for allocno preferences. */
fcb87c50 1163static object_allocator <ira_allocno_pref> pref_pool ("prefs");
3b6d1699
VM
1164
1165/* Vec containing references to all created preferences. It is a
1166 container of array ira_prefs. */
1167static vec<ira_pref_t> pref_vec;
1168
1169/* The function initializes data concerning allocno prefs. */
1170static void
1171initiate_prefs (void)
1172{
3b6d1699
VM
1173 pref_vec.create (get_max_uid ());
1174 ira_prefs = NULL;
1175 ira_prefs_num = 0;
1176}
1177
1178/* Return pref for A and HARD_REGNO if any. */
1179static ira_pref_t
1180find_allocno_pref (ira_allocno_t a, int hard_regno)
1181{
1182 ira_pref_t pref;
1183
1184 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1185 if (pref->allocno == a && pref->hard_regno == hard_regno)
1186 return pref;
1187 return NULL;
1188}
1189
1190/* Create and return pref with given attributes A, HARD_REGNO, and FREQ. */
1191ira_pref_t
1192ira_create_pref (ira_allocno_t a, int hard_regno, int freq)
1193{
1194 ira_pref_t pref;
1195
0b470bae 1196 pref = pref_pool.allocate ();
3b6d1699
VM
1197 pref->num = ira_prefs_num;
1198 pref->allocno = a;
1199 pref->hard_regno = hard_regno;
1200 pref->freq = freq;
1201 pref_vec.safe_push (pref);
1202 ira_prefs = pref_vec.address ();
1203 ira_prefs_num = pref_vec.length ();
1204 return pref;
1205}
1206
df3e3493 1207/* Attach a pref PREF to the corresponding allocno. */
3b6d1699
VM
1208static void
1209add_allocno_pref_to_list (ira_pref_t pref)
1210{
1211 ira_allocno_t a = pref->allocno;
1212
1213 pref->next_pref = ALLOCNO_PREFS (a);
1214 ALLOCNO_PREFS (a) = pref;
1215}
1216
1217/* Create (or update frequency if the pref already exists) the pref of
1218 allocnos A preferring HARD_REGNO with frequency FREQ. */
1219void
1220ira_add_allocno_pref (ira_allocno_t a, int hard_regno, int freq)
1221{
1222 ira_pref_t pref;
1223
1224 if (freq <= 0)
1225 return;
1226 if ((pref = find_allocno_pref (a, hard_regno)) != NULL)
1227 {
1228 pref->freq += freq;
1229 return;
1230 }
1231 pref = ira_create_pref (a, hard_regno, freq);
1232 ira_assert (a != NULL);
1233 add_allocno_pref_to_list (pref);
1234}
1235
1236/* Print info about PREF into file F. */
1237static void
1238print_pref (FILE *f, ira_pref_t pref)
1239{
1240 fprintf (f, " pref%d:a%d(r%d)<-hr%d@%d\n", pref->num,
1241 ALLOCNO_NUM (pref->allocno), ALLOCNO_REGNO (pref->allocno),
1242 pref->hard_regno, pref->freq);
1243}
1244
1245/* Print info about PREF into stderr. */
1246void
1247ira_debug_pref (ira_pref_t pref)
1248{
1249 print_pref (stderr, pref);
1250}
1251
1252/* Print info about all prefs into file F. */
1253static void
1254print_prefs (FILE *f)
1255{
1256 ira_pref_t pref;
1257 ira_pref_iterator pi;
1258
1259 FOR_EACH_PREF (pref, pi)
1260 print_pref (f, pref);
1261}
1262
1263/* Print info about all prefs into stderr. */
1264void
1265ira_debug_prefs (void)
1266{
1267 print_prefs (stderr);
1268}
1269
1270/* Print info about prefs involving allocno A into file F. */
1271static void
1272print_allocno_prefs (FILE *f, ira_allocno_t a)
1273{
1274 ira_pref_t pref;
1275
1276 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1277 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1278 fprintf (f, " pref%d:hr%d@%d", pref->num, pref->hard_regno, pref->freq);
1279 fprintf (f, "\n");
1280}
1281
1282/* Print info about prefs involving allocno A into stderr. */
1283void
1284ira_debug_allocno_prefs (ira_allocno_t a)
1285{
1286 print_allocno_prefs (stderr, a);
1287}
1288
1289/* The function frees memory allocated for PREF. */
1290static void
1291finish_pref (ira_pref_t pref)
1292{
1293 ira_prefs[pref->num] = NULL;
0b470bae 1294 pref_pool.remove (pref);
3b6d1699
VM
1295}
1296
1297/* Remove PREF from the list of allocno prefs and free memory for
1298 it. */
1299void
1300ira_remove_pref (ira_pref_t pref)
1301{
1302 ira_pref_t cpref, prev;
1303
1304 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1305 fprintf (ira_dump_file, " Removing pref%d:hr%d@%d\n",
1306 pref->num, pref->hard_regno, pref->freq);
1307 for (prev = NULL, cpref = ALLOCNO_PREFS (pref->allocno);
1308 cpref != NULL;
1309 prev = cpref, cpref = cpref->next_pref)
1310 if (cpref == pref)
1311 break;
1312 ira_assert (cpref != NULL);
1313 if (prev == NULL)
1314 ALLOCNO_PREFS (pref->allocno) = pref->next_pref;
1315 else
1316 prev->next_pref = pref->next_pref;
1317 finish_pref (pref);
1318}
1319
1320/* Remove all prefs of allocno A. */
1321void
1322ira_remove_allocno_prefs (ira_allocno_t a)
1323{
1324 ira_pref_t pref, next_pref;
1325
1326 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = next_pref)
1327 {
1328 next_pref = pref->next_pref;
1329 finish_pref (pref);
1330 }
1331 ALLOCNO_PREFS (a) = NULL;
1332}
1333
1334/* Free memory allocated for all prefs. */
1335static void
1336finish_prefs (void)
1337{
1338 ira_pref_t pref;
1339 ira_pref_iterator pi;
1340
1341 FOR_EACH_PREF (pref, pi)
1342 finish_pref (pref);
1343 pref_vec.release ();
0b470bae 1344 pref_pool.release ();
3b6d1699
VM
1345}
1346
1347\f
1348
058e97ec 1349/* Pools for copies. */
fcb87c50 1350static object_allocator<ira_allocno_copy> copy_pool ("copies");
058e97ec
VM
1351
1352/* Vec containing references to all created copies. It is a
1353 container of array ira_copies. */
9771b263 1354static vec<ira_copy_t> copy_vec;
058e97ec
VM
1355
1356/* The function initializes data concerning allocno copies. */
1357static void
1358initiate_copies (void)
1359{
9771b263 1360 copy_vec.create (get_max_uid ());
058e97ec
VM
1361 ira_copies = NULL;
1362 ira_copies_num = 0;
1363}
1364
1365/* Return copy connecting A1 and A2 and originated from INSN of
1366 LOOP_TREE_NODE if any. */
1367static ira_copy_t
070a1983 1368find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx_insn *insn,
058e97ec
VM
1369 ira_loop_tree_node_t loop_tree_node)
1370{
1371 ira_copy_t cp, next_cp;
1372 ira_allocno_t another_a;
1373
1374 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1375 {
1376 if (cp->first == a1)
1377 {
1378 next_cp = cp->next_first_allocno_copy;
1379 another_a = cp->second;
1380 }
1381 else if (cp->second == a1)
1382 {
1383 next_cp = cp->next_second_allocno_copy;
1384 another_a = cp->first;
1385 }
1386 else
1387 gcc_unreachable ();
1388 if (another_a == a2 && cp->insn == insn
1389 && cp->loop_tree_node == loop_tree_node)
1390 return cp;
1391 }
1392 return NULL;
1393}
1394
1395/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
548a6322 1396 SECOND, FREQ, CONSTRAINT_P, and INSN. */
058e97ec 1397ira_copy_t
548a6322 1398ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1399 bool constraint_p, rtx_insn *insn,
058e97ec
VM
1400 ira_loop_tree_node_t loop_tree_node)
1401{
1402 ira_copy_t cp;
1403
0b470bae 1404 cp = copy_pool.allocate ();
058e97ec
VM
1405 cp->num = ira_copies_num;
1406 cp->first = first;
1407 cp->second = second;
1408 cp->freq = freq;
548a6322 1409 cp->constraint_p = constraint_p;
058e97ec
VM
1410 cp->insn = insn;
1411 cp->loop_tree_node = loop_tree_node;
9771b263
DN
1412 copy_vec.safe_push (cp);
1413 ira_copies = copy_vec.address ();
1414 ira_copies_num = copy_vec.length ();
058e97ec
VM
1415 return cp;
1416}
1417
1418/* Attach a copy CP to allocnos involved into the copy. */
3b6d1699
VM
1419static void
1420add_allocno_copy_to_list (ira_copy_t cp)
058e97ec
VM
1421{
1422 ira_allocno_t first = cp->first, second = cp->second;
1423
1424 cp->prev_first_allocno_copy = NULL;
1425 cp->prev_second_allocno_copy = NULL;
1426 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1427 if (cp->next_first_allocno_copy != NULL)
1428 {
1429 if (cp->next_first_allocno_copy->first == first)
1430 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1431 else
1432 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1433 }
1434 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1435 if (cp->next_second_allocno_copy != NULL)
1436 {
1437 if (cp->next_second_allocno_copy->second == second)
1438 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1439 else
1440 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1441 }
1442 ALLOCNO_COPIES (first) = cp;
1443 ALLOCNO_COPIES (second) = cp;
1444}
1445
058e97ec
VM
1446/* Make a copy CP a canonical copy where number of the
1447 first allocno is less than the second one. */
3b6d1699
VM
1448static void
1449swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
058e97ec 1450{
058e97ec
VM
1451 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1452 return;
1453
fab27f52
MM
1454 std::swap (cp->first, cp->second);
1455 std::swap (cp->prev_first_allocno_copy, cp->prev_second_allocno_copy);
1456 std::swap (cp->next_first_allocno_copy, cp->next_second_allocno_copy);
058e97ec
VM
1457}
1458
1459/* Create (or update frequency if the copy already exists) and return
1460 the copy of allocnos FIRST and SECOND with frequency FREQ
1461 corresponding to move insn INSN (if any) and originated from
1462 LOOP_TREE_NODE. */
1463ira_copy_t
1464ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1465 bool constraint_p, rtx_insn *insn,
548a6322 1466 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
1467{
1468 ira_copy_t cp;
1469
1470 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1471 {
1472 cp->freq += freq;
1473 return cp;
1474 }
548a6322
VM
1475 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1476 loop_tree_node);
058e97ec 1477 ira_assert (first != NULL && second != NULL);
3b6d1699
VM
1478 add_allocno_copy_to_list (cp);
1479 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
1480 return cp;
1481}
1482
4cda38d5
VM
1483/* Print info about copy CP into file F. */
1484static void
1485print_copy (FILE *f, ira_copy_t cp)
1486{
548a6322 1487 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
4cda38d5 1488 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
548a6322
VM
1489 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1490 cp->insn != NULL
1491 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
4cda38d5
VM
1492}
1493
7b3b6ae4
LC
1494DEBUG_FUNCTION void
1495debug (ira_allocno_copy &ref)
1496{
1497 print_copy (stderr, &ref);
1498}
1499
1500DEBUG_FUNCTION void
1501debug (ira_allocno_copy *ptr)
1502{
1503 if (ptr)
1504 debug (*ptr);
1505 else
1506 fprintf (stderr, "<nil>\n");
1507}
1508
4cda38d5
VM
1509/* Print info about copy CP into stderr. */
1510void
1511ira_debug_copy (ira_copy_t cp)
1512{
1513 print_copy (stderr, cp);
1514}
1515
1516/* Print info about all copies into file F. */
1517static void
1518print_copies (FILE *f)
1519{
1520 ira_copy_t cp;
1521 ira_copy_iterator ci;
1522
1523 FOR_EACH_COPY (cp, ci)
1524 print_copy (f, cp);
1525}
1526
1527/* Print info about all copies into stderr. */
1528void
1529ira_debug_copies (void)
1530{
1531 print_copies (stderr);
1532}
1533
058e97ec
VM
1534/* Print info about copies involving allocno A into file F. */
1535static void
1536print_allocno_copies (FILE *f, ira_allocno_t a)
1537{
1538 ira_allocno_t another_a;
1539 ira_copy_t cp, next_cp;
1540
1541 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1542 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1543 {
1544 if (cp->first == a)
1545 {
1546 next_cp = cp->next_first_allocno_copy;
1547 another_a = cp->second;
1548 }
1549 else if (cp->second == a)
1550 {
1551 next_cp = cp->next_second_allocno_copy;
1552 another_a = cp->first;
1553 }
1554 else
1555 gcc_unreachable ();
1556 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1557 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1558 }
1559 fprintf (f, "\n");
1560}
1561
7b3b6ae4
LC
1562DEBUG_FUNCTION void
1563debug (ira_allocno &ref)
1564{
1565 print_allocno_copies (stderr, &ref);
1566}
1567
1568DEBUG_FUNCTION void
1569debug (ira_allocno *ptr)
1570{
1571 if (ptr)
1572 debug (*ptr);
1573 else
1574 fprintf (stderr, "<nil>\n");
1575}
1576
1577
058e97ec
VM
1578/* Print info about copies involving allocno A into stderr. */
1579void
1580ira_debug_allocno_copies (ira_allocno_t a)
1581{
1582 print_allocno_copies (stderr, a);
1583}
1584
1585/* The function frees memory allocated for copy CP. */
1586static void
1587finish_copy (ira_copy_t cp)
1588{
0b470bae 1589 copy_pool.remove (cp);
058e97ec
VM
1590}
1591
1592
1593/* Free memory allocated for all copies. */
1594static void
1595finish_copies (void)
1596{
1597 ira_copy_t cp;
1598 ira_copy_iterator ci;
1599
1600 FOR_EACH_COPY (cp, ci)
1601 finish_copy (cp);
9771b263 1602 copy_vec.release ();
0b470bae 1603 copy_pool.release ();
058e97ec
VM
1604}
1605
1606\f
1607
1756cb66 1608/* Pools for cost vectors. It is defined only for allocno classes. */
fb0b2914 1609static pool_allocator *cost_vector_pool[N_REG_CLASSES];
058e97ec
VM
1610
1611/* The function initiates work with hard register cost vectors. It
1756cb66 1612 creates allocation pool for each allocno class. */
058e97ec
VM
1613static void
1614initiate_cost_vectors (void)
1615{
1616 int i;
1756cb66 1617 enum reg_class aclass;
058e97ec 1618
1756cb66 1619 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1620 {
1756cb66 1621 aclass = ira_allocno_classes[i];
fb0b2914 1622 cost_vector_pool[aclass] = new pool_allocator
fcb87c50 1623 ("cost vectors", sizeof (int) * (ira_class_hard_regs_num[aclass]));
058e97ec
VM
1624 }
1625}
1626
1756cb66 1627/* Allocate and return a cost vector VEC for ACLASS. */
058e97ec 1628int *
6f76a878 1629ira_allocate_cost_vector (reg_class_t aclass)
058e97ec 1630{
fb0b2914 1631 return (int*) cost_vector_pool[(int) aclass]->allocate ();
058e97ec
VM
1632}
1633
1756cb66 1634/* Free a cost vector VEC for ACLASS. */
058e97ec 1635void
6f76a878 1636ira_free_cost_vector (int *vec, reg_class_t aclass)
058e97ec
VM
1637{
1638 ira_assert (vec != NULL);
3599f64a 1639 cost_vector_pool[(int) aclass]->remove (vec);
058e97ec
VM
1640}
1641
1642/* Finish work with hard register cost vectors. Release allocation
1756cb66 1643 pool for each allocno class. */
058e97ec
VM
1644static void
1645finish_cost_vectors (void)
1646{
1647 int i;
1756cb66 1648 enum reg_class aclass;
058e97ec 1649
1756cb66 1650 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1651 {
1756cb66 1652 aclass = ira_allocno_classes[i];
3599f64a 1653 delete cost_vector_pool[aclass];
058e97ec
VM
1654 }
1655}
1656
1657\f
1658
e6a7da82
SB
1659/* Compute a post-ordering of the reverse control flow of the loop body
1660 designated by the children nodes of LOOP_NODE, whose body nodes in
1661 pre-order are input as LOOP_PREORDER. Return a VEC with a post-order
1662 of the reverse loop body.
1663
1664 For the post-order of the reverse CFG, we visit the basic blocks in
1665 LOOP_PREORDER array in the reverse order of where they appear.
1666 This is important: We do not just want to compute a post-order of
1667 the reverse CFG, we want to make a best-guess for a visiting order that
1668 minimizes the number of chain elements per allocno live range. If the
1669 blocks would be visited in a different order, we would still compute a
1670 correct post-ordering but it would be less likely that two nodes
9c582551 1671 connected by an edge in the CFG are neighbors in the topsort. */
e6a7da82 1672
9771b263 1673static vec<ira_loop_tree_node_t>
e6a7da82 1674ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
9771b263 1675 vec<ira_loop_tree_node_t> loop_preorder)
e6a7da82 1676{
6e1aa848 1677 vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
e6a7da82
SB
1678 unsigned int n_loop_preorder;
1679
9771b263 1680 n_loop_preorder = loop_preorder.length ();
e6a7da82
SB
1681 if (n_loop_preorder != 0)
1682 {
1683 ira_loop_tree_node_t subloop_node;
1684 unsigned int i;
ef062b13 1685 auto_vec<ira_loop_tree_node_t> dfs_stack;
e6a7da82
SB
1686
1687 /* This is a bit of strange abuse of the BB_VISITED flag: We use
1688 the flag to mark blocks we still have to visit to add them to
1689 our post-order. Define an alias to avoid confusion. */
1690#define BB_TO_VISIT BB_VISITED
1691
9771b263 1692 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1693 {
1694 gcc_checking_assert (! (subloop_node->bb->flags & BB_TO_VISIT));
1695 subloop_node->bb->flags |= BB_TO_VISIT;
1696 }
1697
9771b263
DN
1698 topsort_nodes.create (n_loop_preorder);
1699 dfs_stack.create (n_loop_preorder);
e6a7da82 1700
9771b263 1701 FOR_EACH_VEC_ELT_REVERSE (loop_preorder, i, subloop_node)
e6a7da82
SB
1702 {
1703 if (! (subloop_node->bb->flags & BB_TO_VISIT))
1704 continue;
1705
1706 subloop_node->bb->flags &= ~BB_TO_VISIT;
9771b263
DN
1707 dfs_stack.quick_push (subloop_node);
1708 while (! dfs_stack.is_empty ())
e6a7da82
SB
1709 {
1710 edge e;
1711 edge_iterator ei;
1712
9771b263 1713 ira_loop_tree_node_t n = dfs_stack.last ();
e6a7da82
SB
1714 FOR_EACH_EDGE (e, ei, n->bb->preds)
1715 {
1716 ira_loop_tree_node_t pred_node;
1717 basic_block pred_bb = e->src;
1718
fefa31b5 1719 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
e6a7da82
SB
1720 continue;
1721
1722 pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
1723 if (pred_node != n
1724 && (pred_node->bb->flags & BB_TO_VISIT))
1725 {
1726 pred_node->bb->flags &= ~BB_TO_VISIT;
9771b263 1727 dfs_stack.quick_push (pred_node);
e6a7da82
SB
1728 }
1729 }
9771b263 1730 if (n == dfs_stack.last ())
e6a7da82 1731 {
9771b263
DN
1732 dfs_stack.pop ();
1733 topsort_nodes.quick_push (n);
e6a7da82
SB
1734 }
1735 }
1736 }
1737
1738#undef BB_TO_VISIT
e6a7da82
SB
1739 }
1740
9771b263 1741 gcc_assert (topsort_nodes.length () == n_loop_preorder);
e6a7da82
SB
1742 return topsort_nodes;
1743}
1744
058e97ec
VM
1745/* The current loop tree node and its regno allocno map. */
1746ira_loop_tree_node_t ira_curr_loop_tree_node;
1747ira_allocno_t *ira_curr_regno_allocno_map;
1748
1749/* This recursive function traverses loop tree with root LOOP_NODE
1750 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1751 correspondingly in preorder and postorder. The function sets up
1752 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1753 basic block nodes of LOOP_NODE is also processed (before its
e6a7da82
SB
1754 subloop nodes).
1755
1756 If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
1757 the loop are passed in the *reverse* post-order of the *reverse*
1758 CFG. This is only used by ira_create_allocno_live_ranges, which
1759 wants to visit basic blocks in this order to minimize the number
1760 of elements per live range chain.
1761 Note that the loop tree nodes are still visited in the normal,
1762 forward post-order of the loop tree. */
1763
058e97ec
VM
1764void
1765ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1766 void (*preorder_func) (ira_loop_tree_node_t),
1767 void (*postorder_func) (ira_loop_tree_node_t))
1768{
1769 ira_loop_tree_node_t subloop_node;
1770
1771 ira_assert (loop_node->bb == NULL);
1772 ira_curr_loop_tree_node = loop_node;
1773 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1774
1775 if (preorder_func != NULL)
1776 (*preorder_func) (loop_node);
b8698a0f 1777
058e97ec 1778 if (bb_p)
e6a7da82 1779 {
ef062b13 1780 auto_vec<ira_loop_tree_node_t> loop_preorder;
e6a7da82
SB
1781 unsigned int i;
1782
1783 /* Add all nodes to the set of nodes to visit. The IRA loop tree
1784 is set up such that nodes in the loop body appear in a pre-order
1785 of their place in the CFG. */
1786 for (subloop_node = loop_node->children;
1787 subloop_node != NULL;
1788 subloop_node = subloop_node->next)
1789 if (subloop_node->bb != NULL)
9771b263 1790 loop_preorder.safe_push (subloop_node);
e6a7da82
SB
1791
1792 if (preorder_func != NULL)
9771b263 1793 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1794 (*preorder_func) (subloop_node);
1795
1796 if (postorder_func != NULL)
058e97ec 1797 {
9771b263 1798 vec<ira_loop_tree_node_t> loop_rev_postorder =
e6a7da82 1799 ira_loop_tree_body_rev_postorder (loop_node, loop_preorder);
9771b263 1800 FOR_EACH_VEC_ELT_REVERSE (loop_rev_postorder, i, subloop_node)
058e97ec 1801 (*postorder_func) (subloop_node);
9771b263 1802 loop_rev_postorder.release ();
058e97ec 1803 }
e6a7da82
SB
1804 }
1805
058e97ec
VM
1806 for (subloop_node = loop_node->subloops;
1807 subloop_node != NULL;
1808 subloop_node = subloop_node->subloop_next)
1809 {
1810 ira_assert (subloop_node->bb == NULL);
1811 ira_traverse_loop_tree (bb_p, subloop_node,
1812 preorder_func, postorder_func);
1813 }
1814
1815 ira_curr_loop_tree_node = loop_node;
1816 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1817
1818 if (postorder_func != NULL)
1819 (*postorder_func) (loop_node);
1820}
1821
1822\f
1823
1824/* The basic block currently being processed. */
1825static basic_block curr_bb;
1826
1827/* This recursive function creates allocnos corresponding to
1828 pseudo-registers containing in X. True OUTPUT_P means that X is
d1bb282e 1829 an lvalue. PARENT corresponds to the parent expression of X. */
058e97ec 1830static void
d1bb282e 1831create_insn_allocnos (rtx x, rtx outer, bool output_p)
058e97ec
VM
1832{
1833 int i, j;
1834 const char *fmt;
1835 enum rtx_code code = GET_CODE (x);
1836
1837 if (code == REG)
1838 {
1839 int regno;
1840
1841 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1842 {
1843 ira_allocno_t a;
1844
1845 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
d1bb282e
DS
1846 {
1847 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1848 if (outer != NULL && GET_CODE (outer) == SUBREG)
1849 {
ef4bddc2 1850 machine_mode wmode = GET_MODE (outer);
bd4288c0 1851 if (partial_subreg_p (ALLOCNO_WMODE (a), wmode))
d1bb282e
DS
1852 ALLOCNO_WMODE (a) = wmode;
1853 }
1854 }
b8698a0f 1855
058e97ec
VM
1856 ALLOCNO_NREFS (a)++;
1857 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1858 if (output_p)
1859 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1860 }
1861 return;
1862 }
1863 else if (code == SET)
1864 {
d1bb282e
DS
1865 create_insn_allocnos (SET_DEST (x), NULL, true);
1866 create_insn_allocnos (SET_SRC (x), NULL, false);
058e97ec
VM
1867 return;
1868 }
1869 else if (code == CLOBBER)
1870 {
d1bb282e 1871 create_insn_allocnos (XEXP (x, 0), NULL, true);
058e97ec
VM
1872 return;
1873 }
1874 else if (code == MEM)
1875 {
d1bb282e 1876 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1877 return;
1878 }
b8698a0f 1879 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
058e97ec
VM
1880 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1881 {
d1bb282e
DS
1882 create_insn_allocnos (XEXP (x, 0), NULL, true);
1883 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1884 return;
1885 }
1886
1887 fmt = GET_RTX_FORMAT (code);
1888 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1889 {
1890 if (fmt[i] == 'e')
d1bb282e 1891 create_insn_allocnos (XEXP (x, i), x, output_p);
058e97ec
VM
1892 else if (fmt[i] == 'E')
1893 for (j = 0; j < XVECLEN (x, i); j++)
d1bb282e 1894 create_insn_allocnos (XVECEXP (x, i, j), x, output_p);
058e97ec
VM
1895 }
1896}
1897
1898/* Create allocnos corresponding to pseudo-registers living in the
1899 basic block represented by the corresponding loop tree node
1900 BB_NODE. */
1901static void
1902create_bb_allocnos (ira_loop_tree_node_t bb_node)
1903{
1904 basic_block bb;
070a1983 1905 rtx_insn *insn;
058e97ec
VM
1906 unsigned int i;
1907 bitmap_iterator bi;
1908
1909 curr_bb = bb = bb_node->bb;
1910 ira_assert (bb != NULL);
acb37d29 1911 FOR_BB_INSNS_REVERSE (bb, insn)
b5b8b0ac 1912 if (NONDEBUG_INSN_P (insn))
d1bb282e 1913 create_insn_allocnos (PATTERN (insn), NULL, false);
058e97ec
VM
1914 /* It might be a allocno living through from one subloop to
1915 another. */
bf744527 1916 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb), FIRST_PSEUDO_REGISTER, i, bi)
058e97ec
VM
1917 if (ira_curr_regno_allocno_map[i] == NULL)
1918 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1919}
1920
1921/* Create allocnos corresponding to pseudo-registers living on edge E
1922 (a loop entry or exit). Also mark the allocnos as living on the
1923 loop border. */
1924static void
1925create_loop_allocnos (edge e)
1926{
1927 unsigned int i;
1928 bitmap live_in_regs, border_allocnos;
1929 bitmap_iterator bi;
1930 ira_loop_tree_node_t parent;
1931
bf744527 1932 live_in_regs = df_get_live_in (e->dest);
058e97ec 1933 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
bf744527 1934 EXECUTE_IF_SET_IN_REG_SET (df_get_live_out (e->src),
058e97ec
VM
1935 FIRST_PSEUDO_REGISTER, i, bi)
1936 if (bitmap_bit_p (live_in_regs, i))
1937 {
1938 if (ira_curr_regno_allocno_map[i] == NULL)
1939 {
1940 /* The order of creations is important for right
1941 ira_regno_allocno_map. */
1942 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1943 && parent->regno_allocno_map[i] == NULL)
1944 ira_create_allocno (i, false, parent);
1945 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1946 }
1947 bitmap_set_bit (border_allocnos,
1948 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1949 }
1950}
1951
1952/* Create allocnos corresponding to pseudo-registers living in loop
1953 represented by the corresponding loop tree node LOOP_NODE. This
1954 function is called by ira_traverse_loop_tree. */
1955static void
1956create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1957{
1958 if (loop_node->bb != NULL)
1959 create_bb_allocnos (loop_node);
1960 else if (loop_node != ira_loop_tree_root)
1961 {
1962 int i;
1963 edge_iterator ei;
1964 edge e;
058e97ec 1965
2608d841 1966 ira_assert (current_loops != NULL);
058e97ec
VM
1967 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1968 if (e->src != loop_node->loop->latch)
1969 create_loop_allocnos (e);
b8698a0f 1970
4b9d61f7 1971 auto_vec<edge> edges = get_loop_exit_edges (loop_node->loop);
9771b263 1972 FOR_EACH_VEC_ELT (edges, i, e)
058e97ec 1973 create_loop_allocnos (e);
058e97ec
VM
1974 }
1975}
1976
1977/* Propagate information about allocnos modified inside the loop given
1978 by its LOOP_TREE_NODE to its parent. */
1979static void
1980propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1981{
1982 if (loop_tree_node == ira_loop_tree_root)
1983 return;
1984 ira_assert (loop_tree_node->bb == NULL);
1985 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1986 loop_tree_node->modified_regnos);
1987}
1988
1989/* Propagate new info about allocno A (see comments about accumulated
1990 info in allocno definition) to the corresponding allocno on upper
1991 loop tree level. So allocnos on upper levels accumulate
1992 information about the corresponding allocnos in nested regions.
1993 The new info means allocno info finally calculated in this
1994 file. */
1995static void
1996propagate_allocno_info (void)
1997{
1998 int i;
1999 ira_allocno_t a, parent_a;
2000 ira_loop_tree_node_t parent;
1756cb66 2001 enum reg_class aclass;
058e97ec 2002
7db7ed3c
VM
2003 if (flag_ira_region != IRA_REGION_ALL
2004 && flag_ira_region != IRA_REGION_MIXED)
058e97ec
VM
2005 return;
2006 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2007 for (a = ira_regno_allocno_map[i];
2008 a != NULL;
2009 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2010 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
2011 && (parent_a = parent->regno_allocno_map[i]) != NULL
2012 /* There are no caps yet at this point. So use
2013 border_allocnos to find allocnos for the propagation. */
2014 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
2015 ALLOCNO_NUM (a)))
2016 {
927425df
VM
2017 if (! ALLOCNO_BAD_SPILL_P (a))
2018 ALLOCNO_BAD_SPILL_P (parent_a) = false;
058e97ec
VM
2019 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
2020 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
2021 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3c55880a 2022 merge_hard_reg_conflicts (a, parent_a, true);
058e97ec
VM
2023 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2024 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
2025 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2026 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222
RS
2027 ALLOCNO_CROSSED_CALLS_ABIS (parent_a)
2028 |= ALLOCNO_CROSSED_CALLS_ABIS (a);
44942965
RS
2029 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a)
2030 |= ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a);
058e97ec
VM
2031 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2032 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1756cb66
VM
2033 aclass = ALLOCNO_CLASS (a);
2034 ira_assert (aclass == ALLOCNO_CLASS (parent_a));
058e97ec 2035 ira_allocate_and_accumulate_costs
1756cb66 2036 (&ALLOCNO_HARD_REG_COSTS (parent_a), aclass,
058e97ec
VM
2037 ALLOCNO_HARD_REG_COSTS (a));
2038 ira_allocate_and_accumulate_costs
2039 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1756cb66 2040 aclass,
058e97ec 2041 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1756cb66
VM
2042 ALLOCNO_CLASS_COST (parent_a)
2043 += ALLOCNO_CLASS_COST (a);
058e97ec 2044 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
058e97ec
VM
2045 }
2046}
2047
2048/* Create allocnos corresponding to pseudo-registers in the current
2049 function. Traverse the loop tree for this. */
2050static void
2051create_allocnos (void)
2052{
2053 /* We need to process BB first to correctly link allocnos by member
2054 next_regno_allocno. */
2055 ira_traverse_loop_tree (true, ira_loop_tree_root,
2056 create_loop_tree_node_allocnos, NULL);
2057 if (optimize)
2058 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
2059 propagate_modified_regnos);
2060}
2061
2062\f
2063
2064/* The page contains function to remove some regions from a separate
2065 register allocation. We remove regions whose separate allocation
2066 will hardly improve the result. As a result we speed up regional
2067 register allocation. */
2068
9140d27b 2069/* The function changes the object in range list given by R to OBJ. */
058e97ec 2070static void
9140d27b 2071change_object_in_range_list (live_range_t r, ira_object_t obj)
058e97ec
VM
2072{
2073 for (; r != NULL; r = r->next)
9140d27b 2074 r->object = obj;
058e97ec
VM
2075}
2076
3c55880a
BS
2077/* Move all live ranges associated with allocno FROM to allocno TO. */
2078static void
2079move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2080{
ac0ab4f7
BS
2081 int i;
2082 int n = ALLOCNO_NUM_OBJECTS (from);
2083
2084 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
3c55880a 2085
ac0ab4f7 2086 for (i = 0; i < n; i++)
3c55880a 2087 {
ac0ab4f7
BS
2088 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2089 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2090 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2091
2092 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2093 {
2094 fprintf (ira_dump_file,
2095 " Moving ranges of a%dr%d to a%dr%d: ",
2096 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2097 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2098 ira_print_live_range_list (ira_dump_file, lr);
2099 }
2100 change_object_in_range_list (lr, to_obj);
2101 OBJECT_LIVE_RANGES (to_obj)
2102 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
2103 OBJECT_LIVE_RANGES (from_obj) = NULL;
3c55880a 2104 }
3c55880a
BS
2105}
2106
3c55880a
BS
2107static void
2108copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2109{
ac0ab4f7
BS
2110 int i;
2111 int n = ALLOCNO_NUM_OBJECTS (from);
3c55880a 2112
ac0ab4f7
BS
2113 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
2114
2115 for (i = 0; i < n; i++)
3c55880a 2116 {
ac0ab4f7
BS
2117 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2118 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2119 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2120
2121 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2122 {
2123 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
2124 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2125 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2126 ira_print_live_range_list (ira_dump_file, lr);
2127 }
2128 lr = ira_copy_live_range_list (lr);
2129 change_object_in_range_list (lr, to_obj);
2130 OBJECT_LIVE_RANGES (to_obj)
2131 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
3c55880a 2132 }
3c55880a
BS
2133}
2134
058e97ec
VM
2135/* Return TRUE if NODE represents a loop with low register
2136 pressure. */
2137static bool
2138low_pressure_loop_node_p (ira_loop_tree_node_t node)
2139{
2140 int i;
1756cb66 2141 enum reg_class pclass;
b8698a0f 2142
058e97ec
VM
2143 if (node->bb != NULL)
2144 return false;
b8698a0f 2145
1756cb66 2146 for (i = 0; i < ira_pressure_classes_num; i++)
058e97ec 2147 {
1756cb66 2148 pclass = ira_pressure_classes[i];
f508f827
RS
2149 if (node->reg_pressure[pclass] > ira_class_hard_regs_num[pclass]
2150 && ira_class_hard_regs_num[pclass] > 1)
058e97ec
VM
2151 return false;
2152 }
2153 return true;
2154}
2155
30a435d8
VM
2156#ifdef STACK_REGS
2157/* Return TRUE if LOOP has a complex enter or exit edge. We don't
2158 form a region from such loop if the target use stack register
67914693 2159 because reg-stack.c cannot deal with such edges. */
8c5fdaae 2160static bool
99b1c316 2161loop_with_complex_edge_p (class loop *loop)
8c5fdaae
VM
2162{
2163 int i;
2164 edge_iterator ei;
2165 edge e;
f5843d08 2166 bool res;
8c5fdaae
VM
2167
2168 FOR_EACH_EDGE (e, ei, loop->header->preds)
2169 if (e->flags & EDGE_EH)
2170 return true;
4b9d61f7 2171 auto_vec<edge> edges = get_loop_exit_edges (loop);
f5843d08 2172 res = false;
9771b263 2173 FOR_EACH_VEC_ELT (edges, i, e)
30a435d8 2174 if (e->flags & EDGE_COMPLEX)
f5843d08
RG
2175 {
2176 res = true;
2177 break;
2178 }
f5843d08 2179 return res;
8c5fdaae 2180}
30a435d8 2181#endif
8c5fdaae 2182
30ea859e
VM
2183/* Sort loops for marking them for removal. We put already marked
2184 loops first, then less frequent loops next, and then outer loops
2185 next. */
2186static int
2187loop_compare_func (const void *v1p, const void *v2p)
2188{
2189 int diff;
2190 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
2191 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
2192
2193 ira_assert (l1->parent != NULL && l2->parent != NULL);
2194 if (l1->to_remove_p && ! l2->to_remove_p)
2195 return -1;
2196 if (! l1->to_remove_p && l2->to_remove_p)
2197 return 1;
e7a74006
JH
2198 if ((diff = l1->loop->header->count.to_frequency (cfun)
2199 - l2->loop->header->count.to_frequency (cfun)) != 0)
30ea859e
VM
2200 return diff;
2201 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
2202 return diff;
2203 /* Make sorting stable. */
2608d841 2204 return l1->loop_num - l2->loop_num;
30ea859e
VM
2205}
2206
30ea859e
VM
2207/* Mark loops which should be removed from regional allocation. We
2208 remove a loop with low register pressure inside another loop with
2209 register pressure. In this case a separate allocation of the loop
2210 hardly helps (for irregular register file architecture it could
2211 help by choosing a better hard register in the loop but we prefer
2212 faster allocation even in this case). We also remove cheap loops
028d4092 2213 if there are more than param_ira_max_loops_num of them. Loop with EH
8c5fdaae
VM
2214 exit or enter edges are removed too because the allocation might
2215 require put pseudo moves on the EH edges (we could still do this
2216 for pseudos with caller saved hard registers in some cases but it
2217 is impossible to say here or during top-down allocation pass what
2218 hard register the pseudos get finally). */
30ea859e
VM
2219static void
2220mark_loops_for_removal (void)
058e97ec 2221{
30ea859e
VM
2222 int i, n;
2223 ira_loop_tree_node_t *sorted_loops;
2224 loop_p loop;
2225
2608d841 2226 ira_assert (current_loops != NULL);
30ea859e
VM
2227 sorted_loops
2228 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
0fc822d0
RB
2229 * number_of_loops (cfun));
2230 for (n = i = 0; vec_safe_iterate (get_loops (cfun), i, &loop); i++)
30ea859e
VM
2231 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2232 {
2233 if (ira_loop_nodes[i].parent == NULL)
2234 {
2235 /* Don't remove the root. */
2236 ira_loop_nodes[i].to_remove_p = false;
2237 continue;
2238 }
2239 sorted_loops[n++] = &ira_loop_nodes[i];
2240 ira_loop_nodes[i].to_remove_p
8c5fdaae
VM
2241 = ((low_pressure_loop_node_p (ira_loop_nodes[i].parent)
2242 && low_pressure_loop_node_p (&ira_loop_nodes[i]))
30a435d8
VM
2243#ifdef STACK_REGS
2244 || loop_with_complex_edge_p (ira_loop_nodes[i].loop)
2245#endif
2246 );
30ea859e
VM
2247 }
2248 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
028d4092 2249 for (i = 0; i < n - param_ira_max_loops_num; i++)
30ea859e
VM
2250 {
2251 sorted_loops[i]->to_remove_p = true;
2252 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2253 fprintf
2254 (ira_dump_file,
2255 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
2608d841 2256 sorted_loops[i]->loop_num, sorted_loops[i]->loop->header->index,
e7a74006 2257 sorted_loops[i]->loop->header->count.to_frequency (cfun),
30ea859e
VM
2258 loop_depth (sorted_loops[i]->loop),
2259 low_pressure_loop_node_p (sorted_loops[i]->parent)
2260 && low_pressure_loop_node_p (sorted_loops[i])
2261 ? "low pressure" : "cheap loop");
2262 }
2263 ira_free (sorted_loops);
058e97ec
VM
2264}
2265
311aab06
VM
2266/* Mark all loops but root for removing. */
2267static void
2268mark_all_loops_for_removal (void)
2269{
2270 int i;
2271 loop_p loop;
2272
2608d841 2273 ira_assert (current_loops != NULL);
0fc822d0 2274 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
311aab06
VM
2275 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2276 {
2277 if (ira_loop_nodes[i].parent == NULL)
2278 {
2279 /* Don't remove the root. */
2280 ira_loop_nodes[i].to_remove_p = false;
2281 continue;
2282 }
2283 ira_loop_nodes[i].to_remove_p = true;
2284 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2285 fprintf
2286 (ira_dump_file,
2287 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
2608d841 2288 ira_loop_nodes[i].loop_num,
311aab06 2289 ira_loop_nodes[i].loop->header->index,
e7a74006 2290 ira_loop_nodes[i].loop->header->count.to_frequency (cfun),
311aab06
VM
2291 loop_depth (ira_loop_nodes[i].loop));
2292 }
2293}
30ea859e 2294
058e97ec 2295/* Definition of vector of loop tree nodes. */
058e97ec
VM
2296
2297/* Vec containing references to all removed loop tree nodes. */
9771b263 2298static vec<ira_loop_tree_node_t> removed_loop_vec;
058e97ec
VM
2299
2300/* Vec containing references to all children of loop tree nodes. */
9771b263 2301static vec<ira_loop_tree_node_t> children_vec;
058e97ec
VM
2302
2303/* Remove subregions of NODE if their separate allocation will not
2304 improve the result. */
2305static void
2306remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
2307{
2308 unsigned int start;
2309 bool remove_p;
2310 ira_loop_tree_node_t subnode;
2311
30ea859e 2312 remove_p = node->to_remove_p;
058e97ec 2313 if (! remove_p)
9771b263
DN
2314 children_vec.safe_push (node);
2315 start = children_vec.length ();
058e97ec
VM
2316 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
2317 if (subnode->bb == NULL)
2318 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
2319 else
9771b263 2320 children_vec.safe_push (subnode);
058e97ec
VM
2321 node->children = node->subloops = NULL;
2322 if (remove_p)
2323 {
9771b263 2324 removed_loop_vec.safe_push (node);
058e97ec
VM
2325 return;
2326 }
9771b263 2327 while (children_vec.length () > start)
058e97ec 2328 {
9771b263 2329 subnode = children_vec.pop ();
058e97ec
VM
2330 subnode->parent = node;
2331 subnode->next = node->children;
2332 node->children = subnode;
2333 if (subnode->bb == NULL)
2334 {
2335 subnode->subloop_next = node->subloops;
2336 node->subloops = subnode;
2337 }
2338 }
2339}
2340
c6bb4c93
VM
2341/* Return TRUE if NODE is inside PARENT. */
2342static bool
2343loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
2344{
2345 for (node = node->parent; node != NULL; node = node->parent)
2346 if (node == parent)
2347 return true;
2348 return false;
2349}
2350
2351/* Sort allocnos according to their order in regno allocno list. */
2352static int
2353regno_allocno_order_compare_func (const void *v1p, const void *v2p)
2354{
2355 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2356 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2357 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
2358 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
2359
2360 if (loop_is_inside_p (n1, n2))
2361 return -1;
2362 else if (loop_is_inside_p (n2, n1))
2363 return 1;
2364 /* If allocnos are equally good, sort by allocno numbers, so that
2365 the results of qsort leave nothing to chance. We put allocnos
2366 with higher number first in the list because it is the original
2367 order for allocnos from loops on the same levels. */
2368 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
2369}
2370
2371/* This array is used to sort allocnos to restore allocno order in
2372 the regno allocno list. */
2373static ira_allocno_t *regno_allocnos;
2374
2375/* Restore allocno order for REGNO in the regno allocno list. */
2376static void
2377ira_rebuild_regno_allocno_list (int regno)
2378{
2379 int i, n;
2380 ira_allocno_t a;
2381
2382 for (n = 0, a = ira_regno_allocno_map[regno];
2383 a != NULL;
2384 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2385 regno_allocnos[n++] = a;
2386 ira_assert (n > 0);
b8698a0f 2387 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
c6bb4c93
VM
2388 regno_allocno_order_compare_func);
2389 for (i = 1; i < n; i++)
2390 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
2391 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
2392 ira_regno_allocno_map[regno] = regno_allocnos[0];
2393 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2394 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2395}
2396
311aab06
VM
2397/* Propagate info from allocno FROM_A to allocno A. */
2398static void
2399propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2400{
1756cb66 2401 enum reg_class aclass;
311aab06 2402
3c55880a 2403 merge_hard_reg_conflicts (from_a, a, false);
311aab06
VM
2404 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2405 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2406 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
311aab06 2407 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
e384e6b5
BS
2408 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)
2409 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a);
6c476222 2410 ALLOCNO_CROSSED_CALLS_ABIS (a) |= ALLOCNO_CROSSED_CALLS_ABIS (from_a);
44942965
RS
2411 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a)
2412 |= ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (from_a);
c2ba7e7a 2413
311aab06
VM
2414 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2415 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2416 if (! ALLOCNO_BAD_SPILL_P (from_a))
2417 ALLOCNO_BAD_SPILL_P (a) = false;
1756cb66
VM
2418 aclass = ALLOCNO_CLASS (from_a);
2419 ira_assert (aclass == ALLOCNO_CLASS (a));
2420 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), aclass,
311aab06
VM
2421 ALLOCNO_HARD_REG_COSTS (from_a));
2422 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 2423 aclass,
311aab06 2424 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1756cb66 2425 ALLOCNO_CLASS_COST (a) += ALLOCNO_CLASS_COST (from_a);
311aab06
VM
2426 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2427}
2428
058e97ec
VM
2429/* Remove allocnos from loops removed from the allocation
2430 consideration. */
2431static void
2432remove_unnecessary_allocnos (void)
2433{
2434 int regno;
c6bb4c93 2435 bool merged_p, rebuild_p;
058e97ec
VM
2436 ira_allocno_t a, prev_a, next_a, parent_a;
2437 ira_loop_tree_node_t a_node, parent;
058e97ec
VM
2438
2439 merged_p = false;
c6bb4c93 2440 regno_allocnos = NULL;
058e97ec 2441 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
c6bb4c93
VM
2442 {
2443 rebuild_p = false;
2444 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2445 a != NULL;
2446 a = next_a)
2447 {
2448 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2449 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2450 if (! a_node->to_remove_p)
2451 prev_a = a;
2452 else
2453 {
2454 for (parent = a_node->parent;
2455 (parent_a = parent->regno_allocno_map[regno]) == NULL
2456 && parent->to_remove_p;
2457 parent = parent->parent)
2458 ;
2459 if (parent_a == NULL)
2460 {
311aab06
VM
2461 /* There are no allocnos with the same regno in
2462 upper region -- just move the allocno to the
2463 upper region. */
c6bb4c93
VM
2464 prev_a = a;
2465 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2466 parent->regno_allocno_map[regno] = a;
2467 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2468 rebuild_p = true;
2469 }
2470 else
2471 {
2472 /* Remove the allocno and update info of allocno in
2473 the upper region. */
2474 if (prev_a == NULL)
2475 ira_regno_allocno_map[regno] = next_a;
2476 else
2477 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
3c55880a 2478 move_allocno_live_ranges (a, parent_a);
c6bb4c93 2479 merged_p = true;
311aab06 2480 propagate_some_info_from_allocno (parent_a, a);
46044dd9
L
2481 /* Remove it from the corresponding regno allocno
2482 map to avoid info propagation of subsequent
2483 allocno into this already removed allocno. */
2484 a_node->regno_allocno_map[regno] = NULL;
3b6d1699 2485 ira_remove_allocno_prefs (a);
c6bb4c93
VM
2486 finish_allocno (a);
2487 }
2488 }
2489 }
2490 if (rebuild_p)
2491 /* We need to restore the order in regno allocno list. */
2492 {
2493 if (regno_allocnos == NULL)
2494 regno_allocnos
2495 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2496 * ira_allocnos_num);
2497 ira_rebuild_regno_allocno_list (regno);
2498 }
2499 }
058e97ec
VM
2500 if (merged_p)
2501 ira_rebuild_start_finish_chains ();
c6bb4c93
VM
2502 if (regno_allocnos != NULL)
2503 ira_free (regno_allocnos);
058e97ec
VM
2504}
2505
311aab06 2506/* Remove allocnos from all loops but the root. */
058e97ec 2507static void
311aab06 2508remove_low_level_allocnos (void)
058e97ec 2509{
311aab06
VM
2510 int regno;
2511 bool merged_p, propagate_p;
2512 ira_allocno_t a, top_a;
2513 ira_loop_tree_node_t a_node, parent;
311aab06
VM
2514 ira_allocno_iterator ai;
2515
2516 merged_p = false;
2517 FOR_EACH_ALLOCNO (a, ai)
2518 {
2519 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2520 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2521 continue;
2522 regno = ALLOCNO_REGNO (a);
2523 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2524 {
2525 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2526 ira_loop_tree_root->regno_allocno_map[regno] = a;
2527 continue;
2528 }
2529 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2530 /* Remove the allocno and update info of allocno in the upper
2531 region. */
3c55880a 2532 move_allocno_live_ranges (a, top_a);
311aab06 2533 merged_p = true;
311aab06
VM
2534 if (propagate_p)
2535 propagate_some_info_from_allocno (top_a, a);
2536 }
2537 FOR_EACH_ALLOCNO (a, ai)
2538 {
2539 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2540 if (a_node == ira_loop_tree_root)
2541 continue;
2542 parent = a_node->parent;
2543 regno = ALLOCNO_REGNO (a);
2544 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2545 ira_assert (ALLOCNO_CAP (a) != NULL);
2546 else if (ALLOCNO_CAP (a) == NULL)
2547 ira_assert (parent->regno_allocno_map[regno] != NULL);
2548 }
2549 FOR_EACH_ALLOCNO (a, ai)
2550 {
2551 regno = ALLOCNO_REGNO (a);
2552 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2553 {
ac0ab4f7
BS
2554 ira_object_t obj;
2555 ira_allocno_object_iterator oi;
a49ae217 2556
311aab06
VM
2557 ira_regno_allocno_map[regno] = a;
2558 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2559 ALLOCNO_CAP_MEMBER (a) = NULL;
ac0ab4f7 2560 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
6576d245
RS
2561 OBJECT_CONFLICT_HARD_REGS (obj)
2562 = OBJECT_TOTAL_CONFLICT_HARD_REGS (obj);
311aab06
VM
2563#ifdef STACK_REGS
2564 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2565 ALLOCNO_NO_STACK_REG_P (a) = true;
2566#endif
2567 }
2568 else
3b6d1699
VM
2569 {
2570 ira_remove_allocno_prefs (a);
2571 finish_allocno (a);
2572 }
311aab06
VM
2573 }
2574 if (merged_p)
2575 ira_rebuild_start_finish_chains ();
2576}
2577
2578/* Remove loops from consideration. We remove all loops except for
2579 root if ALL_P or loops for which a separate allocation will not
2580 improve the result. We have to do this after allocno creation and
1756cb66
VM
2581 their costs and allocno class evaluation because only after that
2582 the register pressure can be known and is calculated. */
311aab06
VM
2583static void
2584remove_unnecessary_regions (bool all_p)
2585{
2608d841
VM
2586 if (current_loops == NULL)
2587 return;
311aab06
VM
2588 if (all_p)
2589 mark_all_loops_for_removal ();
2590 else
2591 mark_loops_for_removal ();
8b1c6fd7
DM
2592 children_vec.create (last_basic_block_for_fn (cfun)
2593 + number_of_loops (cfun));
2594 removed_loop_vec.create (last_basic_block_for_fn (cfun)
2595 + number_of_loops (cfun));
9771b263
DN
2596 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
2597 children_vec.release ();
311aab06
VM
2598 if (all_p)
2599 remove_low_level_allocnos ();
2600 else
2601 remove_unnecessary_allocnos ();
9771b263
DN
2602 while (removed_loop_vec.length () > 0)
2603 finish_loop_tree_node (removed_loop_vec.pop ());
2604 removed_loop_vec.release ();
058e97ec
VM
2605}
2606
2607\f
2608
927425df
VM
2609/* At this point true value of allocno attribute bad_spill_p means
2610 that there is an insn where allocno occurs and where the allocno
67914693
SL
2611 cannot be used as memory. The function updates the attribute, now
2612 it can be true only for allocnos which cannot be used as memory in
927425df
VM
2613 an insn and in whose live ranges there is other allocno deaths.
2614 Spilling allocnos with true value will not improve the code because
2615 it will not make other allocnos colorable and additional reloads
2616 for the corresponding pseudo will be generated in reload pass for
2617 each insn it occurs.
2618
2619 This is a trick mentioned in one classic article of Chaitin etc
2620 which is frequently omitted in other implementations of RA based on
2621 graph coloring. */
2622static void
2623update_bad_spill_attribute (void)
2624{
2625 int i;
2626 ira_allocno_t a;
2627 ira_allocno_iterator ai;
ac0ab4f7
BS
2628 ira_allocno_object_iterator aoi;
2629 ira_object_t obj;
b14151b5 2630 live_range_t r;
1756cb66 2631 enum reg_class aclass;
927425df
VM
2632 bitmap_head dead_points[N_REG_CLASSES];
2633
1756cb66 2634 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2635 {
1756cb66
VM
2636 aclass = ira_allocno_classes[i];
2637 bitmap_initialize (&dead_points[aclass], &reg_obstack);
927425df
VM
2638 }
2639 FOR_EACH_ALLOCNO (a, ai)
2640 {
1756cb66
VM
2641 aclass = ALLOCNO_CLASS (a);
2642 if (aclass == NO_REGS)
927425df 2643 continue;
ac0ab4f7
BS
2644 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2645 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1756cb66 2646 bitmap_set_bit (&dead_points[aclass], r->finish);
927425df
VM
2647 }
2648 FOR_EACH_ALLOCNO (a, ai)
2649 {
1756cb66
VM
2650 aclass = ALLOCNO_CLASS (a);
2651 if (aclass == NO_REGS)
927425df
VM
2652 continue;
2653 if (! ALLOCNO_BAD_SPILL_P (a))
2654 continue;
ac0ab4f7 2655 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
927425df 2656 {
ac0ab4f7
BS
2657 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2658 {
2659 for (i = r->start + 1; i < r->finish; i++)
1756cb66 2660 if (bitmap_bit_p (&dead_points[aclass], i))
ac0ab4f7
BS
2661 break;
2662 if (i < r->finish)
2663 break;
2664 }
2665 if (r != NULL)
2666 {
2667 ALLOCNO_BAD_SPILL_P (a) = false;
927425df 2668 break;
ac0ab4f7 2669 }
927425df 2670 }
927425df 2671 }
1756cb66 2672 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2673 {
1756cb66
VM
2674 aclass = ira_allocno_classes[i];
2675 bitmap_clear (&dead_points[aclass]);
927425df
VM
2676 }
2677}
2678
2679\f
2680
058e97ec
VM
2681/* Set up minimal and maximal live range points for allocnos. */
2682static void
2683setup_min_max_allocno_live_range_point (void)
2684{
2685 int i;
2686 ira_allocno_t a, parent_a, cap;
2687 ira_allocno_iterator ai;
ac0ab4f7
BS
2688#ifdef ENABLE_IRA_CHECKING
2689 ira_object_iterator oi;
2690 ira_object_t obj;
2691#endif
b14151b5 2692 live_range_t r;
058e97ec
VM
2693 ira_loop_tree_node_t parent;
2694
2695 FOR_EACH_ALLOCNO (a, ai)
2696 {
ac0ab4f7 2697 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2698
ac0ab4f7
BS
2699 for (i = 0; i < n; i++)
2700 {
2701 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2702 r = OBJECT_LIVE_RANGES (obj);
2703 if (r == NULL)
2704 continue;
2705 OBJECT_MAX (obj) = r->finish;
2706 for (; r->next != NULL; r = r->next)
2707 ;
2708 OBJECT_MIN (obj) = r->start;
2709 }
058e97ec
VM
2710 }
2711 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2712 for (a = ira_regno_allocno_map[i];
2713 a != NULL;
2714 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2715 {
ac0ab4f7
BS
2716 int j;
2717 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2718
ac0ab4f7 2719 for (j = 0; j < n; j++)
058e97ec 2720 {
ac0ab4f7
BS
2721 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2722 ira_object_t parent_obj;
2723
2724 if (OBJECT_MAX (obj) < 0)
ffd464df
VM
2725 {
2726 /* The object is not used and hence does not live. */
2727 ira_assert (OBJECT_LIVE_RANGES (obj) == NULL);
2728 OBJECT_MAX (obj) = 0;
2729 OBJECT_MIN (obj) = 1;
2730 continue;
2731 }
ac0ab4f7
BS
2732 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2733 /* Accumulation of range info. */
2734 if (ALLOCNO_CAP (a) != NULL)
058e97ec 2735 {
ac0ab4f7
BS
2736 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2737 {
2738 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2739 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2740 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2741 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2742 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2743 }
2744 continue;
058e97ec 2745 }
ac0ab4f7
BS
2746 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2747 continue;
2748 parent_a = parent->regno_allocno_map[i];
2749 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2750 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2751 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2752 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2753 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
058e97ec 2754 }
058e97ec
VM
2755 }
2756#ifdef ENABLE_IRA_CHECKING
ac0ab4f7 2757 FOR_EACH_OBJECT (obj, oi)
058e97ec 2758 {
01512446
JJ
2759 if ((OBJECT_MIN (obj) >= 0 && OBJECT_MIN (obj) <= ira_max_point)
2760 && (OBJECT_MAX (obj) >= 0 && OBJECT_MAX (obj) <= ira_max_point))
058e97ec
VM
2761 continue;
2762 gcc_unreachable ();
2763 }
2764#endif
2765}
2766
2767/* Sort allocnos according to their live ranges. Allocnos with
1756cb66
VM
2768 smaller allocno class are put first unless we use priority
2769 coloring. Allocnos with the same class are ordered according
2770 their start (min). Allocnos with the same start are ordered
2771 according their finish (max). */
058e97ec 2772static int
ac0ab4f7 2773object_range_compare_func (const void *v1p, const void *v2p)
058e97ec
VM
2774{
2775 int diff;
a49ae217
BS
2776 ira_object_t obj1 = *(const ira_object_t *) v1p;
2777 ira_object_t obj2 = *(const ira_object_t *) v2p;
2778 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2779 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
058e97ec 2780
a49ae217 2781 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
058e97ec 2782 return diff;
a49ae217 2783 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
058e97ec
VM
2784 return diff;
2785 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2786}
2787
a49ae217 2788/* Sort ira_object_id_map and set up conflict id of allocnos. */
058e97ec 2789static void
a49ae217 2790sort_conflict_id_map (void)
058e97ec
VM
2791{
2792 int i, num;
2793 ira_allocno_t a;
2794 ira_allocno_iterator ai;
2795
2796 num = 0;
2797 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7
BS
2798 {
2799 ira_allocno_object_iterator oi;
2800 ira_object_t obj;
2801
2802 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2803 ira_object_id_map[num++] = obj;
2804 }
85c00e0b
JJ
2805 if (num > 1)
2806 qsort (ira_object_id_map, num, sizeof (ira_object_t),
2807 object_range_compare_func);
058e97ec 2808 for (i = 0; i < num; i++)
a49ae217
BS
2809 {
2810 ira_object_t obj = ira_object_id_map[i];
1756cb66 2811
a49ae217
BS
2812 gcc_assert (obj != NULL);
2813 OBJECT_CONFLICT_ID (obj) = i;
2814 }
2815 for (i = num; i < ira_objects_num; i++)
2816 ira_object_id_map[i] = NULL;
058e97ec
VM
2817}
2818
2819/* Set up minimal and maximal conflict ids of allocnos with which
2820 given allocno can conflict. */
2821static void
2822setup_min_max_conflict_allocno_ids (void)
2823{
1756cb66 2824 int aclass;
058e97ec
VM
2825 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2826 int *live_range_min, *last_lived;
ac0ab4f7 2827 int word0_min, word0_max;
058e97ec 2828 ira_allocno_t a;
ac0ab4f7 2829 ira_allocno_iterator ai;
058e97ec 2830
a49ae217 2831 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
1756cb66 2832 aclass = -1;
058e97ec 2833 first_not_finished = -1;
a49ae217 2834 for (i = 0; i < ira_objects_num; i++)
058e97ec 2835 {
a49ae217 2836 ira_object_t obj = ira_object_id_map[i];
1756cb66 2837
a49ae217 2838 if (obj == NULL)
058e97ec 2839 continue;
a49ae217
BS
2840
2841 a = OBJECT_ALLOCNO (obj);
2842
1756cb66 2843 if (aclass < 0)
058e97ec 2844 {
1756cb66 2845 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2846 min = i;
2847 first_not_finished = i;
2848 }
2849 else
2850 {
a49ae217 2851 start = OBJECT_MIN (obj);
058e97ec
VM
2852 /* If we skip an allocno, the allocno with smaller ids will
2853 be also skipped because of the secondary sorting the
2854 range finishes (see function
ac0ab4f7 2855 object_range_compare_func). */
058e97ec 2856 while (first_not_finished < i
a49ae217 2857 && start > OBJECT_MAX (ira_object_id_map
ac0ab4f7 2858 [first_not_finished]))
058e97ec
VM
2859 first_not_finished++;
2860 min = first_not_finished;
b8698a0f 2861 }
058e97ec
VM
2862 if (min == i)
2863 /* We could increase min further in this case but it is good
2864 enough. */
2865 min++;
a49ae217
BS
2866 live_range_min[i] = OBJECT_MIN (obj);
2867 OBJECT_MIN (obj) = min;
058e97ec
VM
2868 }
2869 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1756cb66 2870 aclass = -1;
058e97ec 2871 filled_area_start = -1;
a49ae217 2872 for (i = ira_objects_num - 1; i >= 0; i--)
058e97ec 2873 {
a49ae217 2874 ira_object_t obj = ira_object_id_map[i];
1756cb66 2875
a49ae217 2876 if (obj == NULL)
058e97ec 2877 continue;
a49ae217
BS
2878
2879 a = OBJECT_ALLOCNO (obj);
1756cb66 2880 if (aclass < 0)
058e97ec 2881 {
1756cb66 2882 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2883 for (j = 0; j < ira_max_point; j++)
2884 last_lived[j] = -1;
2885 filled_area_start = ira_max_point;
2886 }
2887 min = live_range_min[i];
a49ae217 2888 finish = OBJECT_MAX (obj);
058e97ec
VM
2889 max = last_lived[finish];
2890 if (max < 0)
2891 /* We could decrease max further in this case but it is good
2892 enough. */
a49ae217
BS
2893 max = OBJECT_CONFLICT_ID (obj) - 1;
2894 OBJECT_MAX (obj) = max;
058e97ec
VM
2895 /* In filling, we can go further A range finish to recognize
2896 intersection quickly because if the finish of subsequently
2897 processed allocno (it has smaller conflict id) range is
2898 further A range finish than they are definitely intersected
2899 (the reason for this is the allocnos with bigger conflict id
2900 have their range starts not smaller than allocnos with
2901 smaller ids. */
2902 for (j = min; j < filled_area_start; j++)
2903 last_lived[j] = i;
2904 filled_area_start = min;
2905 }
2906 ira_free (last_lived);
2907 ira_free (live_range_min);
ac0ab4f7
BS
2908
2909 /* For allocnos with more than one object, we may later record extra conflicts in
2910 subobject 0 that we cannot really know about here.
2911 For now, simply widen the min/max range of these subobjects. */
2912
2913 word0_min = INT_MAX;
2914 word0_max = INT_MIN;
2915
2916 FOR_EACH_ALLOCNO (a, ai)
2917 {
2918 int n = ALLOCNO_NUM_OBJECTS (a);
2919 ira_object_t obj0;
1756cb66 2920
ac0ab4f7
BS
2921 if (n < 2)
2922 continue;
2923 obj0 = ALLOCNO_OBJECT (a, 0);
2924 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2925 word0_min = OBJECT_CONFLICT_ID (obj0);
2926 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2927 word0_max = OBJECT_CONFLICT_ID (obj0);
2928 }
2929 FOR_EACH_ALLOCNO (a, ai)
2930 {
2931 int n = ALLOCNO_NUM_OBJECTS (a);
2932 ira_object_t obj0;
1756cb66 2933
ac0ab4f7
BS
2934 if (n < 2)
2935 continue;
2936 obj0 = ALLOCNO_OBJECT (a, 0);
2937 if (OBJECT_MIN (obj0) > word0_min)
2938 OBJECT_MIN (obj0) = word0_min;
2939 if (OBJECT_MAX (obj0) < word0_max)
2940 OBJECT_MAX (obj0) = word0_max;
2941 }
058e97ec
VM
2942}
2943
2944\f
2945
2946static void
2947create_caps (void)
2948{
2949 ira_allocno_t a;
2950 ira_allocno_iterator ai;
2951 ira_loop_tree_node_t loop_tree_node;
2952
2953 FOR_EACH_ALLOCNO (a, ai)
2954 {
2955 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2956 continue;
2957 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2958 create_cap_allocno (a);
2959 else if (ALLOCNO_CAP (a) == NULL)
2960 {
2961 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2962 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2963 create_cap_allocno (a);
2964 }
2965 }
2966}
2967
2968\f
2969
2970/* The page contains code transforming more one region internal
2971 representation (IR) to one region IR which is necessary for reload.
2972 This transformation is called IR flattening. We might just rebuild
2973 the IR for one region but we don't do it because it takes a lot of
2974 time. */
2975
82b33628
VM
2976/* Map: regno -> allocnos which will finally represent the regno for
2977 IR with one region. */
2978static ira_allocno_t *regno_top_level_allocno_map;
2979
029da7d4
BS
2980/* Find the allocno that corresponds to A at a level one higher up in the
2981 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2982ira_allocno_t
2983ira_parent_allocno (ira_allocno_t a)
2984{
2985 ira_loop_tree_node_t parent;
2986
2987 if (ALLOCNO_CAP (a) != NULL)
2988 return NULL;
2989
2990 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2991 if (parent == NULL)
2992 return NULL;
2993
2994 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
2995}
2996
2997/* Find the allocno that corresponds to A at a level one higher up in the
2998 loop tree. If ALLOCNO_CAP is set for A, return that. */
2999ira_allocno_t
3000ira_parent_or_cap_allocno (ira_allocno_t a)
3001{
3002 if (ALLOCNO_CAP (a) != NULL)
3003 return ALLOCNO_CAP (a);
3004
3005 return ira_parent_allocno (a);
3006}
3007
82b33628 3008/* Process all allocnos originated from pseudo REGNO and copy live
801f03e3
VM
3009 ranges, hard reg conflicts, and allocno stack reg attributes from
3010 low level allocnos to final allocnos which are destinations of
3011 removed stores at a loop exit. Return true if we copied live
3012 ranges. */
82b33628 3013static bool
801f03e3 3014copy_info_to_removed_store_destinations (int regno)
82b33628 3015{
504b33d8
ILT
3016 ira_allocno_t a;
3017 ira_allocno_t parent_a = NULL;
82b33628 3018 ira_loop_tree_node_t parent;
82b33628
VM
3019 bool merged_p;
3020
3021 merged_p = false;
3022 for (a = ira_regno_allocno_map[regno];
3023 a != NULL;
3024 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3025 {
1756cb66 3026 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))])
82b33628
VM
3027 /* This allocno will be removed. */
3028 continue;
ac0ab4f7 3029
82b33628
VM
3030 /* Caps will be removed. */
3031 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
3032 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3033 parent != NULL;
3034 parent = parent->parent)
3035 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
1756cb66
VM
3036 || (parent_a
3037 == regno_top_level_allocno_map[REGNO
3038 (allocno_emit_reg (parent_a))]
3039 && ALLOCNO_EMIT_DATA (parent_a)->mem_optimized_dest_p))
82b33628
VM
3040 break;
3041 if (parent == NULL || parent_a == NULL)
3042 continue;
ac0ab4f7 3043
3c55880a
BS
3044 copy_allocno_live_ranges (a, parent_a);
3045 merge_hard_reg_conflicts (a, parent_a, true);
ac0ab4f7 3046
ea1c67e6
VM
3047 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3048 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3049 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3050 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3051 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222
RS
3052 ALLOCNO_CROSSED_CALLS_ABIS (parent_a)
3053 |= ALLOCNO_CROSSED_CALLS_ABIS (a);
44942965
RS
3054 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a)
3055 |= ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a);
ea1c67e6
VM
3056 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3057 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
82b33628
VM
3058 merged_p = true;
3059 }
3060 return merged_p;
058e97ec
VM
3061}
3062
3063/* Flatten the IR. In other words, this function transforms IR as if
3064 it were built with one region (without loops). We could make it
3065 much simpler by rebuilding IR with one region, but unfortunately it
3066 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
3067 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
3068 IRA_MAX_POINT before emitting insns on the loop borders. */
3069void
3070ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
3071{
9140d27b 3072 int i, j;
ea1c67e6 3073 bool keep_p;
058e97ec 3074 int hard_regs_num;
82b33628 3075 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec 3076 unsigned int n;
1756cb66 3077 enum reg_class aclass;
058e97ec 3078 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 3079 ira_copy_t cp;
029da7d4 3080 ira_loop_tree_node_t node;
b14151b5 3081 live_range_t r;
058e97ec
VM
3082 ira_allocno_iterator ai;
3083 ira_copy_iterator ci;
058e97ec
VM
3084
3085 regno_top_level_allocno_map
1756cb66
VM
3086 = (ira_allocno_t *) ira_allocate (max_reg_num ()
3087 * sizeof (ira_allocno_t));
058e97ec
VM
3088 memset (regno_top_level_allocno_map, 0,
3089 max_reg_num () * sizeof (ira_allocno_t));
058e97ec 3090 new_pseudos_p = merged_p = false;
0ca9fa56
VM
3091 FOR_EACH_ALLOCNO (a, ai)
3092 {
ac0ab4f7
BS
3093 ira_allocno_object_iterator oi;
3094 ira_object_t obj;
1756cb66 3095
0ca9fa56
VM
3096 if (ALLOCNO_CAP_MEMBER (a) != NULL)
3097 /* Caps are not in the regno allocno maps and they are never
3098 will be transformed into allocnos existing after IR
3099 flattening. */
3100 continue;
ac0ab4f7 3101 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
6576d245
RS
3102 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj)
3103 = OBJECT_CONFLICT_HARD_REGS (obj);
0ca9fa56
VM
3104#ifdef STACK_REGS
3105 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
3106#endif
3107 }
058e97ec
VM
3108 /* Fix final allocno attributes. */
3109 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
3110 {
0ca9fa56 3111 mem_dest_p = false;
058e97ec
VM
3112 for (a = ira_regno_allocno_map[i];
3113 a != NULL;
3114 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3115 {
1756cb66
VM
3116 ira_emit_data_t parent_data, data = ALLOCNO_EMIT_DATA (a);
3117
058e97ec 3118 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1756cb66 3119 if (data->somewhere_renamed_p)
058e97ec 3120 new_pseudos_p = true;
029da7d4
BS
3121 parent_a = ira_parent_allocno (a);
3122 if (parent_a == NULL)
058e97ec
VM
3123 {
3124 ALLOCNO_COPIES (a) = NULL;
1756cb66 3125 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec
VM
3126 continue;
3127 }
3128 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
b8698a0f 3129
1756cb66 3130 if (data->mem_optimized_dest != NULL)
82b33628 3131 mem_dest_p = true;
1756cb66
VM
3132 parent_data = ALLOCNO_EMIT_DATA (parent_a);
3133 if (REGNO (data->reg) == REGNO (parent_data->reg))
058e97ec 3134 {
3c55880a
BS
3135 merge_hard_reg_conflicts (a, parent_a, true);
3136 move_allocno_live_ranges (a, parent_a);
058e97ec 3137 merged_p = true;
1756cb66
VM
3138 parent_data->mem_optimized_dest_p
3139 = (parent_data->mem_optimized_dest_p
3140 || data->mem_optimized_dest_p);
058e97ec
VM
3141 continue;
3142 }
3143 new_pseudos_p = true;
058e97ec
VM
3144 for (;;)
3145 {
058e97ec
VM
3146 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
3147 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
ea1c67e6
VM
3148 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
3149 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3150 -= ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3151 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3152 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
6c476222
RS
3153 /* Assume that ALLOCNO_CROSSED_CALLS_ABIS and
3154 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS stay the same.
3155 We'd need to rebuild the IR to do better. */
ea1c67e6
VM
3156 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3157 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
058e97ec
VM
3158 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
3159 && ALLOCNO_NREFS (parent_a) >= 0
3160 && ALLOCNO_FREQ (parent_a) >= 0);
1756cb66
VM
3161 aclass = ALLOCNO_CLASS (parent_a);
3162 hard_regs_num = ira_class_hard_regs_num[aclass];
058e97ec
VM
3163 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
3164 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
3165 for (j = 0; j < hard_regs_num; j++)
3166 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
3167 -= ALLOCNO_HARD_REG_COSTS (a)[j];
3168 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
3169 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
3170 for (j = 0; j < hard_regs_num; j++)
3171 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
3172 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
1756cb66
VM
3173 ALLOCNO_CLASS_COST (parent_a)
3174 -= ALLOCNO_CLASS_COST (a);
058e97ec 3175 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
029da7d4
BS
3176 parent_a = ira_parent_allocno (parent_a);
3177 if (parent_a == NULL)
058e97ec
VM
3178 break;
3179 }
058e97ec 3180 ALLOCNO_COPIES (a) = NULL;
1756cb66 3181 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec 3182 }
801f03e3 3183 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
82b33628 3184 merged_p = true;
058e97ec 3185 }
058e97ec
VM
3186 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
3187 if (merged_p || ira_max_point_before_emit != ira_max_point)
3188 ira_rebuild_start_finish_chains ();
3189 if (new_pseudos_p)
3190 {
9140d27b
BS
3191 sparseset objects_live;
3192
058e97ec
VM
3193 /* Rebuild conflicts. */
3194 FOR_EACH_ALLOCNO (a, ai)
3195 {
ac0ab4f7
BS
3196 ira_allocno_object_iterator oi;
3197 ira_object_t obj;
1756cb66
VM
3198
3199 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3200 || ALLOCNO_CAP_MEMBER (a) != NULL)
3201 continue;
ac0ab4f7
BS
3202 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3203 {
3204 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3205 ira_assert (r->object == obj);
3206 clear_conflicts (obj);
3207 }
058e97ec 3208 }
9140d27b 3209 objects_live = sparseset_alloc (ira_objects_num);
058e97ec
VM
3210 for (i = 0; i < ira_max_point; i++)
3211 {
3212 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
3213 {
9140d27b 3214 ira_object_t obj = r->object;
1756cb66 3215
9140d27b 3216 a = OBJECT_ALLOCNO (obj);
1756cb66 3217 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3218 || ALLOCNO_CAP_MEMBER (a) != NULL)
3219 continue;
ac0ab4f7 3220
1756cb66 3221 aclass = ALLOCNO_CLASS (a);
9140d27b 3222 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
058e97ec 3223 {
9140d27b
BS
3224 ira_object_t live_obj = ira_object_id_map[n];
3225 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
1756cb66
VM
3226 enum reg_class live_aclass = ALLOCNO_CLASS (live_a);
3227
3228 if (ira_reg_classes_intersect_p[aclass][live_aclass]
058e97ec 3229 /* Don't set up conflict for the allocno with itself. */
9140d27b
BS
3230 && live_a != a)
3231 ira_add_conflict (obj, live_obj);
058e97ec 3232 }
3feb0298 3233 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
058e97ec 3234 }
b8698a0f 3235
058e97ec 3236 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
9140d27b 3237 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
058e97ec 3238 }
9140d27b 3239 sparseset_free (objects_live);
058e97ec
VM
3240 compress_conflict_vecs ();
3241 }
3242 /* Mark some copies for removing and change allocnos in the rest
3243 copies. */
3244 FOR_EACH_COPY (cp, ci)
3245 {
3246 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
3247 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
3248 {
3249 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3250 fprintf
3251 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
3252 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
1756cb66
VM
3253 ALLOCNO_NUM (cp->first),
3254 REGNO (allocno_emit_reg (cp->first)),
058e97ec 3255 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
1756cb66
VM
3256 ALLOCNO_NUM (cp->second),
3257 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3258 cp->loop_tree_node = NULL;
3259 continue;
3260 }
1756cb66
VM
3261 first
3262 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->first))];
3263 second
3264 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->second))];
058e97ec
VM
3265 node = cp->loop_tree_node;
3266 if (node == NULL)
3267 keep_p = true; /* It copy generated in ira-emit.c. */
3268 else
3269 {
3270 /* Check that the copy was not propagated from level on
3271 which we will have different pseudos. */
3272 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
3273 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
1756cb66
VM
3274 keep_p = ((REGNO (allocno_emit_reg (first))
3275 == REGNO (allocno_emit_reg (node_first)))
3276 && (REGNO (allocno_emit_reg (second))
3277 == REGNO (allocno_emit_reg (node_second))));
058e97ec
VM
3278 }
3279 if (keep_p)
3280 {
3281 cp->loop_tree_node = ira_loop_tree_root;
3282 cp->first = first;
3283 cp->second = second;
3284 }
3285 else
3286 {
3287 cp->loop_tree_node = NULL;
3288 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3289 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
3290 cp->num, ALLOCNO_NUM (cp->first),
1756cb66
VM
3291 REGNO (allocno_emit_reg (cp->first)),
3292 ALLOCNO_NUM (cp->second),
3293 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3294 }
3295 }
3296 /* Remove unnecessary allocnos on lower levels of the loop tree. */
3297 FOR_EACH_ALLOCNO (a, ai)
3298 {
1756cb66 3299 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3300 || ALLOCNO_CAP_MEMBER (a) != NULL)
3301 {
3302 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3303 fprintf (ira_dump_file, " Remove a%dr%d\n",
1756cb66 3304 ALLOCNO_NUM (a), REGNO (allocno_emit_reg (a)));
3b6d1699 3305 ira_remove_allocno_prefs (a);
058e97ec
VM
3306 finish_allocno (a);
3307 continue;
3308 }
3309 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
1756cb66 3310 ALLOCNO_REGNO (a) = REGNO (allocno_emit_reg (a));
058e97ec 3311 ALLOCNO_CAP (a) = NULL;
cb1ca6ac 3312 /* Restore updated costs for assignments from reload. */
058e97ec 3313 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
1756cb66 3314 ALLOCNO_UPDATED_CLASS_COST (a) = ALLOCNO_CLASS_COST (a);
058e97ec
VM
3315 if (! ALLOCNO_ASSIGNED_P (a))
3316 ira_free_allocno_updated_costs (a);
3317 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
3318 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
3319 }
3320 /* Remove unnecessary copies. */
3321 FOR_EACH_COPY (cp, ci)
3322 {
3323 if (cp->loop_tree_node == NULL)
3324 {
3325 ira_copies[cp->num] = NULL;
3326 finish_copy (cp);
3327 continue;
3328 }
3329 ira_assert
3330 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
3331 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
3b6d1699
VM
3332 add_allocno_copy_to_list (cp);
3333 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
3334 }
3335 rebuild_regno_allocno_maps ();
b15a7ae6
VM
3336 if (ira_max_point != ira_max_point_before_emit)
3337 ira_compress_allocno_live_ranges ();
058e97ec
VM
3338 ira_free (regno_top_level_allocno_map);
3339}
3340
3341\f
3342
3343#ifdef ENABLE_IRA_CHECKING
3344/* Check creation of all allocnos. Allocnos on lower levels should
3345 have allocnos or caps on all upper levels. */
3346static void
3347check_allocno_creation (void)
3348{
3349 ira_allocno_t a;
3350 ira_allocno_iterator ai;
3351 ira_loop_tree_node_t loop_tree_node;
3352
3353 FOR_EACH_ALLOCNO (a, ai)
3354 {
49d988e7
VM
3355 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3356 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
3357 ALLOCNO_NUM (a)));
3358 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
3359 continue;
3360 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 3361 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 3362 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
3363 ira_assert (loop_tree_node->parent
3364 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
3365 && bitmap_bit_p (loop_tree_node->border_allocnos,
3366 ALLOCNO_NUM (a)));
058e97ec
VM
3367 }
3368}
3369#endif
3370
4ac293e2
VM
3371/* Identify allocnos which prefer a register class with a single hard register.
3372 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3373 less likely to use the preferred singleton register. */
3374static void
3375update_conflict_hard_reg_costs (void)
3376{
3377 ira_allocno_t a;
3378 ira_allocno_iterator ai;
3379 int i, index, min;
3380
3381 FOR_EACH_ALLOCNO (a, ai)
3382 {
6f76a878
AS
3383 reg_class_t aclass = ALLOCNO_CLASS (a);
3384 reg_class_t pref = reg_preferred_class (ALLOCNO_REGNO (a));
c9d74da6
RS
3385 int singleton = ira_class_singleton[pref][ALLOCNO_MODE (a)];
3386 if (singleton < 0)
4ac293e2 3387 continue;
c9d74da6 3388 index = ira_class_hard_reg_index[(int) aclass][singleton];
4ac293e2
VM
3389 if (index < 0)
3390 continue;
3391 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
3392 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
3393 continue;
3394 min = INT_MAX;
6f76a878 3395 for (i = ira_class_hard_regs_num[(int) aclass] - 1; i >= 0; i--)
1756cb66 3396 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_CLASS_COST (a)
4ac293e2
VM
3397 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
3398 min = ALLOCNO_HARD_REG_COSTS (a)[i];
3399 if (min == INT_MAX)
3400 continue;
3401 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 3402 aclass, 0);
4ac293e2 3403 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
1756cb66 3404 -= min - ALLOCNO_CLASS_COST (a);
4ac293e2
VM
3405 }
3406}
3407
058e97ec 3408/* Create a internal representation (IR) for IRA (allocnos, copies,
2608d841
VM
3409 loop tree nodes). The function returns TRUE if we generate loop
3410 structure (besides nodes representing all function and the basic
3411 blocks) for regional allocation. A true return means that we
3412 really need to flatten IR before the reload. */
058e97ec 3413bool
2608d841 3414ira_build (void)
058e97ec 3415{
2608d841 3416 bool loops_p;
058e97ec 3417
2608d841 3418 df_analyze ();
058e97ec
VM
3419 initiate_cost_vectors ();
3420 initiate_allocnos ();
3b6d1699 3421 initiate_prefs ();
058e97ec 3422 initiate_copies ();
2608d841 3423 create_loop_tree_nodes ();
058e97ec
VM
3424 form_loop_tree ();
3425 create_allocnos ();
3426 ira_costs ();
a49ae217 3427 create_allocno_objects ();
058e97ec 3428 ira_create_allocno_live_ranges ();
311aab06 3429 remove_unnecessary_regions (false);
b15a7ae6 3430 ira_compress_allocno_live_ranges ();
927425df 3431 update_bad_spill_attribute ();
058e97ec
VM
3432 loops_p = more_one_region_p ();
3433 if (loops_p)
3434 {
3435 propagate_allocno_info ();
3436 create_caps ();
3437 }
1756cb66 3438 ira_tune_allocno_costs ();
058e97ec
VM
3439#ifdef ENABLE_IRA_CHECKING
3440 check_allocno_creation ();
3441#endif
3442 setup_min_max_allocno_live_range_point ();
a49ae217 3443 sort_conflict_id_map ();
058e97ec
VM
3444 setup_min_max_conflict_allocno_ids ();
3445 ira_build_conflicts ();
4ac293e2 3446 update_conflict_hard_reg_costs ();
311aab06
VM
3447 if (! ira_conflicts_p)
3448 {
3449 ira_allocno_t a;
3450 ira_allocno_iterator ai;
3451
3452 /* Remove all regions but root one. */
3453 if (loops_p)
3454 {
3455 remove_unnecessary_regions (true);
3456 loops_p = false;
3457 }
3458 /* We don't save hard registers around calls for fast allocation
3459 -- add caller clobbered registers as conflicting ones to
3460 allocno crossing calls. */
3461 FOR_EACH_ALLOCNO (a, ai)
3462 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
6c476222 3463 ior_hard_reg_conflicts (a, ira_need_caller_save_regs (a));
311aab06 3464 }
4cda38d5
VM
3465 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3466 print_copies (ira_dump_file);
3b6d1699
VM
3467 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3468 print_prefs (ira_dump_file);
058e97ec
VM
3469 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3470 {
ac0ab4f7 3471 int n, nr, nr_big;
058e97ec 3472 ira_allocno_t a;
b14151b5 3473 live_range_t r;
058e97ec
VM
3474 ira_allocno_iterator ai;
3475
3476 n = 0;
ac0ab4f7
BS
3477 nr = 0;
3478 nr_big = 0;
058e97ec 3479 FOR_EACH_ALLOCNO (a, ai)
a49ae217 3480 {
ac0ab4f7 3481 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
1756cb66 3482
ac0ab4f7
BS
3483 if (nobj > 1)
3484 nr_big++;
3485 for (j = 0; j < nobj; j++)
3486 {
3487 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3488 n += OBJECT_NUM_CONFLICTS (obj);
3489 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3490 nr++;
3491 }
a49ae217 3492 }
058e97ec 3493 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
0fc822d0 3494 current_loops == NULL ? 1 : number_of_loops (cfun),
0cae8d31 3495 n_basic_blocks_for_fn (cfun), ira_max_point);
058e97ec 3496 fprintf (ira_dump_file,
ac0ab4f7
BS
3497 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3498 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
058e97ec
VM
3499 }
3500 return loops_p;
3501}
3502
3503/* Release the data created by function ira_build. */
3504void
3505ira_destroy (void)
3506{
3507 finish_loop_tree_nodes ();
3b6d1699 3508 finish_prefs ();
058e97ec
VM
3509 finish_copies ();
3510 finish_allocnos ();
3511 finish_cost_vectors ();
3512 ira_finish_allocno_live_ranges ();
3513}