]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ira-build.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec 1/* Building internal representation for IRA.
5624e564 2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
058e97ec
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
058e97ec 25#include "rtl.h"
c7131fb2 26#include "df.h"
058e97ec
VM
27#include "tm_p.h"
28#include "target.h"
29#include "regs.h"
30#include "flags.h"
058e97ec
VM
31#include "insn-config.h"
32#include "recog.h"
718f9c0f 33#include "diagnostic-core.h"
058e97ec 34#include "params.h"
058e97ec
VM
35#include "reload.h"
36#include "sparseset.h"
c7131fb2
AM
37#include "cfgloop.h"
38#include "ira.h"
39#include "alloc-pool.h"
058e97ec 40#include "ira-int.h"
5936d944 41#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
058e97ec 42
070a1983 43static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx_insn *,
058e97ec
VM
44 ira_loop_tree_node_t);
45
46/* The root of the loop tree corresponding to the all function. */
47ira_loop_tree_node_t ira_loop_tree_root;
48
49/* Height of the loop tree. */
50int ira_loop_tree_height;
51
52/* All nodes representing basic blocks are referred through the
53 following array. We can not use basic block member `aux' for this
54 because it is used for insertion of insns on edges. */
55ira_loop_tree_node_t ira_bb_nodes;
56
57/* All nodes representing loops are referred through the following
58 array. */
59ira_loop_tree_node_t ira_loop_nodes;
60
caff7edf
JJ
61/* And size of the ira_loop_nodes array. */
62unsigned int ira_loop_nodes_count;
63
b8698a0f 64/* Map regno -> allocnos with given regno (see comments for
058e97ec
VM
65 allocno member `next_regno_allocno'). */
66ira_allocno_t *ira_regno_allocno_map;
67
68/* Array of references to all allocnos. The order number of the
69 allocno corresponds to the index in the array. Removed allocnos
70 have NULL element value. */
71ira_allocno_t *ira_allocnos;
72
73/* Sizes of the previous array. */
74int ira_allocnos_num;
75
a49ae217
BS
76/* Count of conflict record structures we've created, used when creating
77 a new conflict id. */
78int ira_objects_num;
79
80/* Map a conflict id to its conflict record. */
81ira_object_t *ira_object_id_map;
058e97ec 82
3b6d1699
VM
83/* Array of references to all allocno preferences. The order number
84 of the preference corresponds to the index in the array. */
85ira_pref_t *ira_prefs;
86
87/* Size of the previous array. */
88int ira_prefs_num;
89
058e97ec
VM
90/* Array of references to all copies. The order number of the copy
91 corresponds to the index in the array. Removed copies have NULL
92 element value. */
93ira_copy_t *ira_copies;
94
95/* Size of the previous array. */
96int ira_copies_num;
97
98\f
99
100/* LAST_BASIC_BLOCK before generating additional insns because of live
101 range splitting. Emitting insns on a critical edge creates a new
102 basic block. */
103static int last_basic_block_before_change;
104
2608d841
VM
105/* Initialize some members in loop tree node NODE. Use LOOP_NUM for
106 the member loop_num. */
058e97ec 107static void
2608d841
VM
108init_loop_tree_node (struct ira_loop_tree_node *node, int loop_num)
109{
110 int max_regno = max_reg_num ();
111
112 node->regno_allocno_map
113 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
114 memset (node->regno_allocno_map, 0, sizeof (ira_allocno_t) * max_regno);
115 memset (node->reg_pressure, 0, sizeof (node->reg_pressure));
116 node->all_allocnos = ira_allocate_bitmap ();
117 node->modified_regnos = ira_allocate_bitmap ();
118 node->border_allocnos = ira_allocate_bitmap ();
119 node->local_copies = ira_allocate_bitmap ();
120 node->loop_num = loop_num;
121 node->children = NULL;
122 node->subloops = NULL;
123}
124
125
126/* The following function allocates the loop tree nodes. If
127 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
128 the root which corresponds the all function) will be not allocated
129 but nodes will still be allocated for basic blocks. */
130static void
131create_loop_tree_nodes (void)
058e97ec
VM
132{
133 unsigned int i, j;
058e97ec
VM
134 bool skip_p;
135 edge_iterator ei;
136 edge e;
9771b263 137 vec<edge> edges;
058e97ec
VM
138 loop_p loop;
139
140 ira_bb_nodes
141 = ((struct ira_loop_tree_node *)
8b1c6fd7
DM
142 ira_allocate (sizeof (struct ira_loop_tree_node)
143 * last_basic_block_for_fn (cfun)));
144 last_basic_block_before_change = last_basic_block_for_fn (cfun);
145 for (i = 0; i < (unsigned int) last_basic_block_for_fn (cfun); i++)
058e97ec
VM
146 {
147 ira_bb_nodes[i].regno_allocno_map = NULL;
148 memset (ira_bb_nodes[i].reg_pressure, 0,
149 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 150 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
151 ira_bb_nodes[i].modified_regnos = NULL;
152 ira_bb_nodes[i].border_allocnos = NULL;
153 ira_bb_nodes[i].local_copies = NULL;
154 }
2608d841
VM
155 if (current_loops == NULL)
156 {
caff7edf 157 ira_loop_nodes_count = 1;
2608d841
VM
158 ira_loop_nodes = ((struct ira_loop_tree_node *)
159 ira_allocate (sizeof (struct ira_loop_tree_node)));
160 init_loop_tree_node (ira_loop_nodes, 0);
161 return;
162 }
0fc822d0 163 ira_loop_nodes_count = number_of_loops (cfun);
058e97ec
VM
164 ira_loop_nodes = ((struct ira_loop_tree_node *)
165 ira_allocate (sizeof (struct ira_loop_tree_node)
caff7edf 166 * ira_loop_nodes_count));
0fc822d0 167 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
058e97ec 168 {
661bc682 169 if (loop_outer (loop) != NULL)
058e97ec
VM
170 {
171 ira_loop_nodes[i].regno_allocno_map = NULL;
058e97ec
VM
172 skip_p = false;
173 FOR_EACH_EDGE (e, ei, loop->header->preds)
174 if (e->src != loop->latch
175 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
176 {
177 skip_p = true;
178 break;
179 }
180 if (skip_p)
181 continue;
182 edges = get_loop_exit_edges (loop);
9771b263 183 FOR_EACH_VEC_ELT (edges, j, e)
058e97ec
VM
184 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
185 {
186 skip_p = true;
187 break;
188 }
9771b263 189 edges.release ();
058e97ec
VM
190 if (skip_p)
191 continue;
192 }
2608d841 193 init_loop_tree_node (&ira_loop_nodes[i], loop->num);
058e97ec
VM
194 }
195}
196
197/* The function returns TRUE if there are more one allocation
198 region. */
199static bool
200more_one_region_p (void)
201{
202 unsigned int i;
203 loop_p loop;
204
2608d841 205 if (current_loops != NULL)
0fc822d0 206 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
2608d841
VM
207 if (ira_loop_nodes[i].regno_allocno_map != NULL
208 && ira_loop_tree_root != &ira_loop_nodes[i])
209 return true;
058e97ec
VM
210 return false;
211}
212
213/* Free the loop tree node of a loop. */
214static void
215finish_loop_tree_node (ira_loop_tree_node_t loop)
216{
217 if (loop->regno_allocno_map != NULL)
218 {
219 ira_assert (loop->bb == NULL);
220 ira_free_bitmap (loop->local_copies);
221 ira_free_bitmap (loop->border_allocnos);
222 ira_free_bitmap (loop->modified_regnos);
49d988e7 223 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
224 ira_free (loop->regno_allocno_map);
225 loop->regno_allocno_map = NULL;
226 }
227}
228
229/* Free the loop tree nodes. */
230static void
231finish_loop_tree_nodes (void)
232{
233 unsigned int i;
058e97ec 234
caff7edf
JJ
235 for (i = 0; i < ira_loop_nodes_count; i++)
236 finish_loop_tree_node (&ira_loop_nodes[i]);
058e97ec
VM
237 ira_free (ira_loop_nodes);
238 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
239 {
240 if (ira_bb_nodes[i].local_copies != NULL)
241 ira_free_bitmap (ira_bb_nodes[i].local_copies);
242 if (ira_bb_nodes[i].border_allocnos != NULL)
243 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
244 if (ira_bb_nodes[i].modified_regnos != NULL)
245 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
246 if (ira_bb_nodes[i].all_allocnos != NULL)
247 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
248 if (ira_bb_nodes[i].regno_allocno_map != NULL)
249 ira_free (ira_bb_nodes[i].regno_allocno_map);
250 }
251 ira_free (ira_bb_nodes);
252}
253
254\f
255
256/* The following recursive function adds LOOP to the loop tree
2608d841
VM
257 hierarchy. LOOP is added only once. If LOOP is NULL we adding
258 loop designating the whole function when CFG loops are not
259 built. */
058e97ec
VM
260static void
261add_loop_to_tree (struct loop *loop)
262{
2608d841 263 int loop_num;
058e97ec
VM
264 struct loop *parent;
265 ira_loop_tree_node_t loop_node, parent_node;
266
267 /* We can not use loop node access macros here because of potential
268 checking and because the nodes are not initialized enough
269 yet. */
2608d841 270 if (loop != NULL && loop_outer (loop) != NULL)
058e97ec 271 add_loop_to_tree (loop_outer (loop));
2608d841
VM
272 loop_num = loop != NULL ? loop->num : 0;
273 if (ira_loop_nodes[loop_num].regno_allocno_map != NULL
274 && ira_loop_nodes[loop_num].children == NULL)
058e97ec
VM
275 {
276 /* We have not added loop node to the tree yet. */
2608d841 277 loop_node = &ira_loop_nodes[loop_num];
058e97ec
VM
278 loop_node->loop = loop;
279 loop_node->bb = NULL;
2608d841
VM
280 if (loop == NULL)
281 parent = NULL;
282 else
283 {
284 for (parent = loop_outer (loop);
285 parent != NULL;
286 parent = loop_outer (parent))
287 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
288 break;
289 }
058e97ec
VM
290 if (parent == NULL)
291 {
292 loop_node->next = NULL;
293 loop_node->subloop_next = NULL;
294 loop_node->parent = NULL;
295 }
296 else
297 {
298 parent_node = &ira_loop_nodes[parent->num];
299 loop_node->next = parent_node->children;
300 parent_node->children = loop_node;
301 loop_node->subloop_next = parent_node->subloops;
302 parent_node->subloops = loop_node;
303 loop_node->parent = parent_node;
304 }
305 }
306}
307
308/* The following recursive function sets up levels of nodes of the
309 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
310 The function returns maximal value of level in the tree + 1. */
311static int
312setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
313{
314 int height, max_height;
315 ira_loop_tree_node_t subloop_node;
316
317 ira_assert (loop_node->bb == NULL);
318 loop_node->level = level;
319 max_height = level + 1;
320 for (subloop_node = loop_node->subloops;
321 subloop_node != NULL;
322 subloop_node = subloop_node->subloop_next)
323 {
324 ira_assert (subloop_node->bb == NULL);
325 height = setup_loop_tree_level (subloop_node, level + 1);
326 if (height > max_height)
327 max_height = height;
328 }
329 return max_height;
330}
331
332/* Create the loop tree. The algorithm is designed to provide correct
333 order of loops (they are ordered by their last loop BB) and basic
334 blocks in the chain formed by member next. */
335static void
336form_loop_tree (void)
337{
058e97ec
VM
338 basic_block bb;
339 struct loop *parent;
340 ira_loop_tree_node_t bb_node, loop_node;
058e97ec
VM
341
342 /* We can not use loop/bb node access macros because of potential
343 checking and because the nodes are not initialized enough
344 yet. */
11cd3bed 345 FOR_EACH_BB_FN (bb, cfun)
058e97ec
VM
346 {
347 bb_node = &ira_bb_nodes[bb->index];
348 bb_node->bb = bb;
349 bb_node->loop = NULL;
350 bb_node->subloops = NULL;
351 bb_node->children = NULL;
352 bb_node->subloop_next = NULL;
353 bb_node->next = NULL;
2608d841
VM
354 if (current_loops == NULL)
355 parent = NULL;
356 else
357 {
358 for (parent = bb->loop_father;
359 parent != NULL;
360 parent = loop_outer (parent))
361 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
362 break;
363 }
058e97ec 364 add_loop_to_tree (parent);
2608d841 365 loop_node = &ira_loop_nodes[parent == NULL ? 0 : parent->num];
058e97ec
VM
366 bb_node->next = loop_node->children;
367 bb_node->parent = loop_node;
368 loop_node->children = bb_node;
369 }
2608d841 370 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (0);
058e97ec
VM
371 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
372 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
373}
374
375\f
376
377/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
378 tree nodes. */
379static void
380rebuild_regno_allocno_maps (void)
381{
382 unsigned int l;
383 int max_regno, regno;
384 ira_allocno_t a;
385 ira_loop_tree_node_t loop_tree_node;
386 loop_p loop;
387 ira_allocno_iterator ai;
388
2608d841 389 ira_assert (current_loops != NULL);
058e97ec 390 max_regno = max_reg_num ();
0fc822d0 391 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), l, loop)
058e97ec
VM
392 if (ira_loop_nodes[l].regno_allocno_map != NULL)
393 {
394 ira_free (ira_loop_nodes[l].regno_allocno_map);
395 ira_loop_nodes[l].regno_allocno_map
396 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
397 * max_regno);
398 memset (ira_loop_nodes[l].regno_allocno_map, 0,
399 sizeof (ira_allocno_t) * max_regno);
400 }
401 ira_free (ira_regno_allocno_map);
402 ira_regno_allocno_map
403 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
404 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
405 FOR_EACH_ALLOCNO (a, ai)
406 {
407 if (ALLOCNO_CAP_MEMBER (a) != NULL)
408 /* Caps are not in the regno allocno maps. */
409 continue;
410 regno = ALLOCNO_REGNO (a);
411 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
412 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
413 ira_regno_allocno_map[regno] = a;
414 if (loop_tree_node->regno_allocno_map[regno] == NULL)
415 /* Remember that we can create temporary allocnos to break
416 cycles in register shuffle. */
417 loop_tree_node->regno_allocno_map[regno] = a;
418 }
419}
058e97ec
VM
420\f
421
a49ae217 422/* Pools for allocnos, allocno live ranges and objects. */
0b470bae
ML
423static pool_allocator<live_range> live_range_pool ("live ranges", 100);
424static pool_allocator<ira_allocno> allocno_pool ("allocnos", 100);
425static pool_allocator<ira_object> object_pool ("objects", 100);
058e97ec
VM
426
427/* Vec containing references to all created allocnos. It is a
428 container of array allocnos. */
9771b263 429static vec<ira_allocno_t> allocno_vec;
058e97ec 430
a49ae217
BS
431/* Vec containing references to all created ira_objects. It is a
432 container of ira_object_id_map. */
9771b263 433static vec<ira_object_t> ira_object_id_map_vec;
058e97ec
VM
434
435/* Initialize data concerning allocnos. */
436static void
437initiate_allocnos (void)
438{
9771b263 439 allocno_vec.create (max_reg_num () * 2);
058e97ec
VM
440 ira_allocnos = NULL;
441 ira_allocnos_num = 0;
a49ae217 442 ira_objects_num = 0;
9771b263 443 ira_object_id_map_vec.create (max_reg_num () * 2);
a49ae217 444 ira_object_id_map = NULL;
058e97ec 445 ira_regno_allocno_map
1756cb66
VM
446 = (ira_allocno_t *) ira_allocate (max_reg_num ()
447 * sizeof (ira_allocno_t));
058e97ec
VM
448 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
449}
450
a49ae217
BS
451/* Create and return an object corresponding to a new allocno A. */
452static ira_object_t
ac0ab4f7 453ira_create_object (ira_allocno_t a, int subword)
a49ae217 454{
1756cb66 455 enum reg_class aclass = ALLOCNO_CLASS (a);
0b470bae 456 ira_object_t obj = object_pool.allocate ();
a49ae217
BS
457
458 OBJECT_ALLOCNO (obj) = a;
ac0ab4f7 459 OBJECT_SUBWORD (obj) = subword;
a49ae217
BS
460 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
461 OBJECT_CONFLICT_VEC_P (obj) = false;
462 OBJECT_CONFLICT_ARRAY (obj) = NULL;
463 OBJECT_NUM_CONFLICTS (obj) = 0;
464 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
465 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
466 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1756cb66 467 reg_class_contents[aclass]);
a49ae217 468 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1756cb66 469 reg_class_contents[aclass]);
a49ae217
BS
470 OBJECT_MIN (obj) = INT_MAX;
471 OBJECT_MAX (obj) = -1;
9140d27b 472 OBJECT_LIVE_RANGES (obj) = NULL;
a49ae217 473
9771b263 474 ira_object_id_map_vec.safe_push (obj);
a49ae217 475 ira_object_id_map
9771b263
DN
476 = ira_object_id_map_vec.address ();
477 ira_objects_num = ira_object_id_map_vec.length ();
ac0ab4f7 478
a49ae217
BS
479 return obj;
480}
481
058e97ec
VM
482/* Create and return the allocno corresponding to REGNO in
483 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
484 same regno if CAP_P is FALSE. */
485ira_allocno_t
1756cb66
VM
486ira_create_allocno (int regno, bool cap_p,
487 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
488{
489 ira_allocno_t a;
490
0b470bae 491 a = allocno_pool.allocate ();
058e97ec
VM
492 ALLOCNO_REGNO (a) = regno;
493 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
494 if (! cap_p)
495 {
496 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
497 ira_regno_allocno_map[regno] = a;
498 if (loop_tree_node->regno_allocno_map[regno] == NULL)
499 /* Remember that we can create temporary allocnos to break
500 cycles in register shuffle on region borders (see
501 ira-emit.c). */
502 loop_tree_node->regno_allocno_map[regno] = a;
503 }
504 ALLOCNO_CAP (a) = NULL;
505 ALLOCNO_CAP_MEMBER (a) = NULL;
506 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 507 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec 508 ALLOCNO_NREFS (a) = 0;
854bd721 509 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
510 ALLOCNO_HARD_REGNO (a) = -1;
511 ALLOCNO_CALL_FREQ (a) = 0;
512 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
e384e6b5 513 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a) = 0;
c2ba7e7a 514 CLEAR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
515#ifdef STACK_REGS
516 ALLOCNO_NO_STACK_REG_P (a) = false;
517 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
518#endif
058e97ec 519 ALLOCNO_DONT_REASSIGN_P (a) = false;
927425df 520 ALLOCNO_BAD_SPILL_P (a) = false;
058e97ec 521 ALLOCNO_ASSIGNED_P (a) = false;
058e97ec 522 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
d1bb282e 523 ALLOCNO_WMODE (a) = ALLOCNO_MODE (a);
3b6d1699 524 ALLOCNO_PREFS (a) = NULL;
058e97ec
VM
525 ALLOCNO_COPIES (a) = NULL;
526 ALLOCNO_HARD_REG_COSTS (a) = NULL;
527 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
528 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
529 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1756cb66
VM
530 ALLOCNO_CLASS (a) = NO_REGS;
531 ALLOCNO_UPDATED_CLASS_COST (a) = 0;
532 ALLOCNO_CLASS_COST (a) = 0;
058e97ec
VM
533 ALLOCNO_MEMORY_COST (a) = 0;
534 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
535 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
ac0ab4f7 536 ALLOCNO_NUM_OBJECTS (a) = 0;
a49ae217 537
1756cb66 538 ALLOCNO_ADD_DATA (a) = NULL;
9771b263
DN
539 allocno_vec.safe_push (a);
540 ira_allocnos = allocno_vec.address ();
541 ira_allocnos_num = allocno_vec.length ();
ac0ab4f7 542
058e97ec
VM
543 return a;
544}
545
1756cb66
VM
546/* Set up register class for A and update its conflict hard
547 registers. */
058e97ec 548void
1756cb66 549ira_set_allocno_class (ira_allocno_t a, enum reg_class aclass)
058e97ec 550{
1756cb66
VM
551 ira_allocno_object_iterator oi;
552 ira_object_t obj;
553
554 ALLOCNO_CLASS (a) = aclass;
555 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
556 {
557 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
558 reg_class_contents[aclass]);
559 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
560 reg_class_contents[aclass]);
561 }
a49ae217
BS
562}
563
ac0ab4f7
BS
564/* Determine the number of objects we should associate with allocno A
565 and allocate them. */
a49ae217 566void
ac0ab4f7 567ira_create_allocno_objects (ira_allocno_t a)
a49ae217 568{
ef4bddc2 569 machine_mode mode = ALLOCNO_MODE (a);
1756cb66
VM
570 enum reg_class aclass = ALLOCNO_CLASS (a);
571 int n = ira_reg_class_max_nregs[aclass][mode];
ac0ab4f7
BS
572 int i;
573
574 if (GET_MODE_SIZE (mode) != 2 * UNITS_PER_WORD || n != 2)
575 n = 1;
576
577 ALLOCNO_NUM_OBJECTS (a) = n;
578 for (i = 0; i < n; i++)
579 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
a49ae217
BS
580}
581
ac0ab4f7 582/* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
1756cb66 583 ALLOCNO_OBJECT structures. This must be called after the allocno
ac0ab4f7 584 classes are known. */
a49ae217
BS
585static void
586create_allocno_objects (void)
587{
588 ira_allocno_t a;
589 ira_allocno_iterator ai;
590
591 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7 592 ira_create_allocno_objects (a);
058e97ec
VM
593}
594
ac0ab4f7
BS
595/* Merge hard register conflict information for all objects associated with
596 allocno TO into the corresponding objects associated with FROM.
597 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
3c55880a
BS
598static void
599merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
600 bool total_only)
601{
ac0ab4f7
BS
602 int i;
603 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
604 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
605 {
606 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
607 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1756cb66 608
ac0ab4f7
BS
609 if (!total_only)
610 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj),
611 OBJECT_CONFLICT_HARD_REGS (from_obj));
612 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj),
613 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj));
614 }
3c55880a
BS
615#ifdef STACK_REGS
616 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
617 ALLOCNO_NO_STACK_REG_P (to) = true;
618 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
619 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
620#endif
621}
622
ac0ab4f7
BS
623/* Update hard register conflict information for all objects associated with
624 A to include the regs in SET. */
625void
626ior_hard_reg_conflicts (ira_allocno_t a, HARD_REG_SET *set)
627{
628 ira_allocno_object_iterator i;
629 ira_object_t obj;
1756cb66 630
ac0ab4f7
BS
631 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
632 {
633 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), *set);
634 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), *set);
635 }
636}
637
a49ae217
BS
638/* Return TRUE if a conflict vector with NUM elements is more
639 profitable than a conflict bit vector for OBJ. */
058e97ec 640bool
a49ae217 641ira_conflict_vector_profitable_p (ira_object_t obj, int num)
058e97ec
VM
642{
643 int nw;
a49ae217
BS
644 int max = OBJECT_MAX (obj);
645 int min = OBJECT_MIN (obj);
058e97ec 646
a49ae217
BS
647 if (max < min)
648 /* We prefer a bit vector in such case because it does not result
649 in allocation. */
058e97ec
VM
650 return false;
651
a49ae217
BS
652 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
653 return (2 * sizeof (ira_object_t) * (num + 1)
058e97ec
VM
654 < 3 * nw * sizeof (IRA_INT_TYPE));
655}
656
a49ae217
BS
657/* Allocates and initialize the conflict vector of OBJ for NUM
658 conflicting objects. */
058e97ec 659void
a49ae217 660ira_allocate_conflict_vec (ira_object_t obj, int num)
058e97ec
VM
661{
662 int size;
a49ae217 663 ira_object_t *vec;
058e97ec 664
a49ae217 665 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
058e97ec 666 num++; /* for NULL end marker */
a49ae217
BS
667 size = sizeof (ira_object_t) * num;
668 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
669 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
058e97ec 670 vec[0] = NULL;
a49ae217
BS
671 OBJECT_NUM_CONFLICTS (obj) = 0;
672 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
673 OBJECT_CONFLICT_VEC_P (obj) = true;
058e97ec
VM
674}
675
a49ae217 676/* Allocate and initialize the conflict bit vector of OBJ. */
058e97ec 677static void
a49ae217 678allocate_conflict_bit_vec (ira_object_t obj)
058e97ec
VM
679{
680 unsigned int size;
681
a49ae217
BS
682 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
683 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
058e97ec 684 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
a49ae217
BS
685 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
686 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
687 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
688 OBJECT_CONFLICT_VEC_P (obj) = false;
058e97ec
VM
689}
690
691/* Allocate and initialize the conflict vector or conflict bit vector
ac0ab4f7 692 of OBJ for NUM conflicting allocnos whatever is more profitable. */
058e97ec 693void
ac0ab4f7 694ira_allocate_object_conflicts (ira_object_t obj, int num)
058e97ec 695{
ac0ab4f7
BS
696 if (ira_conflict_vector_profitable_p (obj, num))
697 ira_allocate_conflict_vec (obj, num);
058e97ec 698 else
ac0ab4f7 699 allocate_conflict_bit_vec (obj);
058e97ec
VM
700}
701
a49ae217 702/* Add OBJ2 to the conflicts of OBJ1. */
058e97ec 703static void
a49ae217 704add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
058e97ec
VM
705{
706 int num;
707 unsigned int size;
708
a49ae217 709 if (OBJECT_CONFLICT_VEC_P (obj1))
058e97ec 710 {
a49ae217
BS
711 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
712 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
713 num = curr_num + 2;
714 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
058e97ec 715 {
a49ae217 716 ira_object_t *newvec;
058e97ec 717 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
a49ae217
BS
718 newvec = (ira_object_t *) ira_allocate (size);
719 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
720 ira_free (vec);
721 vec = newvec;
722 OBJECT_CONFLICT_ARRAY (obj1) = vec;
723 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 724 }
a49ae217 725 vec[num - 2] = obj2;
058e97ec 726 vec[num - 1] = NULL;
a49ae217 727 OBJECT_NUM_CONFLICTS (obj1)++;
058e97ec
VM
728 }
729 else
730 {
731 int nw, added_head_nw, id;
a49ae217 732 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
058e97ec 733
a49ae217
BS
734 id = OBJECT_CONFLICT_ID (obj2);
735 if (OBJECT_MIN (obj1) > id)
058e97ec
VM
736 {
737 /* Expand head of the bit vector. */
a49ae217
BS
738 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
739 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 740 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
a49ae217 741 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
058e97ec
VM
742 {
743 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
744 vec, nw * sizeof (IRA_INT_TYPE));
745 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
746 }
747 else
748 {
749 size
750 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
751 vec = (IRA_INT_TYPE *) ira_allocate (size);
752 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
a49ae217 753 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
754 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
755 memset ((char *) vec
756 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
757 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
a49ae217
BS
758 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
759 OBJECT_CONFLICT_ARRAY (obj1) = vec;
760 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 761 }
a49ae217 762 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
058e97ec 763 }
a49ae217 764 else if (OBJECT_MAX (obj1) < id)
058e97ec 765 {
a49ae217 766 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 767 size = nw * sizeof (IRA_INT_TYPE);
a49ae217 768 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
058e97ec
VM
769 {
770 /* Expand tail of the bit vector. */
771 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
772 vec = (IRA_INT_TYPE *) ira_allocate (size);
a49ae217
BS
773 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
774 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
775 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
776 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
777 OBJECT_CONFLICT_ARRAY (obj1) = vec;
778 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 779 }
a49ae217 780 OBJECT_MAX (obj1) = id;
058e97ec 781 }
a49ae217 782 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
058e97ec
VM
783 }
784}
785
a49ae217
BS
786/* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
787static void
788ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
058e97ec 789{
a49ae217
BS
790 add_to_conflicts (obj1, obj2);
791 add_to_conflicts (obj2, obj1);
058e97ec
VM
792}
793
a49ae217 794/* Clear all conflicts of OBJ. */
058e97ec 795static void
a49ae217 796clear_conflicts (ira_object_t obj)
058e97ec 797{
a49ae217 798 if (OBJECT_CONFLICT_VEC_P (obj))
058e97ec 799 {
a49ae217
BS
800 OBJECT_NUM_CONFLICTS (obj) = 0;
801 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
058e97ec 802 }
a49ae217 803 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
058e97ec
VM
804 {
805 int nw;
806
a49ae217
BS
807 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
808 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
809 }
810}
811
812/* The array used to find duplications in conflict vectors of
813 allocnos. */
a49ae217 814static int *conflict_check;
058e97ec
VM
815
816/* The value used to mark allocation presence in conflict vector of
817 the current allocno. */
a49ae217 818static int curr_conflict_check_tick;
058e97ec 819
a49ae217 820/* Remove duplications in conflict vector of OBJ. */
058e97ec 821static void
a49ae217 822compress_conflict_vec (ira_object_t obj)
058e97ec 823{
a49ae217 824 ira_object_t *vec, conflict_obj;
058e97ec
VM
825 int i, j;
826
a49ae217
BS
827 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
828 vec = OBJECT_CONFLICT_VEC (obj);
829 curr_conflict_check_tick++;
830 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
058e97ec 831 {
a49ae217
BS
832 int id = OBJECT_CONFLICT_ID (conflict_obj);
833 if (conflict_check[id] != curr_conflict_check_tick)
058e97ec 834 {
a49ae217
BS
835 conflict_check[id] = curr_conflict_check_tick;
836 vec[j++] = conflict_obj;
058e97ec
VM
837 }
838 }
a49ae217 839 OBJECT_NUM_CONFLICTS (obj) = j;
058e97ec
VM
840 vec[j] = NULL;
841}
842
843/* Remove duplications in conflict vectors of all allocnos. */
844static void
845compress_conflict_vecs (void)
846{
ac0ab4f7
BS
847 ira_object_t obj;
848 ira_object_iterator oi;
058e97ec 849
a49ae217
BS
850 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
851 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
852 curr_conflict_check_tick = 0;
ac0ab4f7 853 FOR_EACH_OBJECT (obj, oi)
a49ae217 854 {
a49ae217
BS
855 if (OBJECT_CONFLICT_VEC_P (obj))
856 compress_conflict_vec (obj);
857 }
858 ira_free (conflict_check);
058e97ec
VM
859}
860
861/* This recursive function outputs allocno A and if it is a cap the
862 function outputs its members. */
863void
864ira_print_expanded_allocno (ira_allocno_t a)
865{
866 basic_block bb;
867
868 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
869 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
870 fprintf (ira_dump_file, ",b%d", bb->index);
871 else
2608d841 872 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop_num);
058e97ec
VM
873 if (ALLOCNO_CAP_MEMBER (a) != NULL)
874 {
875 fprintf (ira_dump_file, ":");
876 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
877 }
878 fprintf (ira_dump_file, ")");
879}
880
881/* Create and return the cap representing allocno A in the
882 parent loop. */
883static ira_allocno_t
884create_cap_allocno (ira_allocno_t a)
885{
886 ira_allocno_t cap;
887 ira_loop_tree_node_t parent;
1756cb66 888 enum reg_class aclass;
058e97ec 889
058e97ec
VM
890 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
891 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
892 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
d1bb282e 893 ALLOCNO_WMODE (cap) = ALLOCNO_WMODE (a);
1756cb66
VM
894 aclass = ALLOCNO_CLASS (a);
895 ira_set_allocno_class (cap, aclass);
ac0ab4f7 896 ira_create_allocno_objects (cap);
058e97ec 897 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec 898 ALLOCNO_CAP (a) = cap;
1756cb66 899 ALLOCNO_CLASS_COST (cap) = ALLOCNO_CLASS_COST (a);
058e97ec 900 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
058e97ec 901 ira_allocate_and_copy_costs
1756cb66 902 (&ALLOCNO_HARD_REG_COSTS (cap), aclass, ALLOCNO_HARD_REG_COSTS (a));
058e97ec 903 ira_allocate_and_copy_costs
1756cb66 904 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), aclass,
058e97ec 905 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
927425df 906 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
058e97ec
VM
907 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
908 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
909 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
ac0ab4f7 910
3c55880a 911 merge_hard_reg_conflicts (a, cap, false);
ac0ab4f7 912
058e97ec 913 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5 914 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
c2ba7e7a
RO
915 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (cap),
916 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
917 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
918 {
919 fprintf (ira_dump_file, " Creating cap ");
920 ira_print_expanded_allocno (cap);
921 fprintf (ira_dump_file, "\n");
922 }
923 return cap;
924}
925
ac0ab4f7 926/* Create and return a live range for OBJECT with given attributes. */
b14151b5 927live_range_t
9140d27b
BS
928ira_create_live_range (ira_object_t obj, int start, int finish,
929 live_range_t next)
058e97ec 930{
b14151b5 931 live_range_t p;
058e97ec 932
0b470bae 933 p = live_range_pool.allocate ();
9140d27b 934 p->object = obj;
058e97ec
VM
935 p->start = start;
936 p->finish = finish;
937 p->next = next;
938 return p;
939}
940
ac0ab4f7
BS
941/* Create a new live range for OBJECT and queue it at the head of its
942 live range list. */
943void
944ira_add_live_range_to_object (ira_object_t object, int start, int finish)
945{
946 live_range_t p;
947 p = ira_create_live_range (object, start, finish,
948 OBJECT_LIVE_RANGES (object));
949 OBJECT_LIVE_RANGES (object) = p;
950}
951
058e97ec 952/* Copy allocno live range R and return the result. */
b14151b5 953static live_range_t
9140d27b 954copy_live_range (live_range_t r)
058e97ec 955{
b14151b5 956 live_range_t p;
058e97ec 957
0b470bae 958 p = live_range_pool.allocate ();
058e97ec
VM
959 *p = *r;
960 return p;
961}
962
963/* Copy allocno live range list given by its head R and return the
964 result. */
b14151b5 965live_range_t
9140d27b 966ira_copy_live_range_list (live_range_t r)
058e97ec 967{
b14151b5 968 live_range_t p, first, last;
058e97ec
VM
969
970 if (r == NULL)
971 return NULL;
972 for (first = last = NULL; r != NULL; r = r->next)
973 {
9140d27b 974 p = copy_live_range (r);
058e97ec
VM
975 if (first == NULL)
976 first = p;
977 else
978 last->next = p;
979 last = p;
980 }
981 return first;
982}
983
3553f0bb
VM
984/* Merge ranges R1 and R2 and returns the result. The function
985 maintains the order of ranges and tries to minimize number of the
986 result ranges. */
b14151b5 987live_range_t
9140d27b 988ira_merge_live_ranges (live_range_t r1, live_range_t r2)
3553f0bb 989{
fab27f52 990 live_range_t first, last;
3553f0bb
VM
991
992 if (r1 == NULL)
993 return r2;
994 if (r2 == NULL)
995 return r1;
996 for (first = last = NULL; r1 != NULL && r2 != NULL;)
997 {
998 if (r1->start < r2->start)
fab27f52 999 std::swap (r1, r2);
3553f0bb
VM
1000 if (r1->start <= r2->finish + 1)
1001 {
1002 /* Intersected ranges: merge r1 and r2 into r1. */
1003 r1->start = r2->start;
1004 if (r1->finish < r2->finish)
1005 r1->finish = r2->finish;
fab27f52 1006 live_range_t temp = r2;
3553f0bb 1007 r2 = r2->next;
9140d27b 1008 ira_finish_live_range (temp);
3553f0bb
VM
1009 if (r2 == NULL)
1010 {
1011 /* To try to merge with subsequent ranges in r1. */
1012 r2 = r1->next;
1013 r1->next = NULL;
1014 }
1015 }
1016 else
1017 {
1018 /* Add r1 to the result. */
1019 if (first == NULL)
1020 first = last = r1;
1021 else
1022 {
1023 last->next = r1;
1024 last = r1;
1025 }
1026 r1 = r1->next;
1027 if (r1 == NULL)
1028 {
1029 /* To try to merge with subsequent ranges in r2. */
1030 r1 = r2->next;
1031 r2->next = NULL;
1032 }
1033 }
1034 }
1035 if (r1 != NULL)
1036 {
1037 if (first == NULL)
1038 first = r1;
1039 else
1040 last->next = r1;
1041 ira_assert (r1->next == NULL);
1042 }
1043 else if (r2 != NULL)
1044 {
1045 if (first == NULL)
1046 first = r2;
1047 else
1048 last->next = r2;
1049 ira_assert (r2->next == NULL);
1050 }
1051 else
1052 {
1053 ira_assert (last->next == NULL);
1054 }
1055 return first;
1056}
1057
1058/* Return TRUE if live ranges R1 and R2 intersect. */
1059bool
9140d27b 1060ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
3553f0bb
VM
1061{
1062 /* Remember the live ranges are always kept ordered. */
1063 while (r1 != NULL && r2 != NULL)
1064 {
1065 if (r1->start > r2->finish)
1066 r1 = r1->next;
1067 else if (r2->start > r1->finish)
1068 r2 = r2->next;
1069 else
1070 return true;
1071 }
1072 return false;
1073}
1074
058e97ec
VM
1075/* Free allocno live range R. */
1076void
9140d27b 1077ira_finish_live_range (live_range_t r)
058e97ec 1078{
0b470bae 1079 live_range_pool.remove (r);
058e97ec
VM
1080}
1081
3553f0bb
VM
1082/* Free list of allocno live ranges starting with R. */
1083void
9140d27b 1084ira_finish_live_range_list (live_range_t r)
3553f0bb 1085{
b14151b5 1086 live_range_t next_r;
3553f0bb
VM
1087
1088 for (; r != NULL; r = next_r)
1089 {
1090 next_r = r->next;
9140d27b 1091 ira_finish_live_range (r);
3553f0bb
VM
1092 }
1093}
1094
058e97ec
VM
1095/* Free updated register costs of allocno A. */
1096void
1097ira_free_allocno_updated_costs (ira_allocno_t a)
1098{
1756cb66 1099 enum reg_class aclass;
058e97ec 1100
1756cb66 1101 aclass = ALLOCNO_CLASS (a);
058e97ec 1102 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1103 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1104 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1105 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1106 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66 1107 aclass);
058e97ec
VM
1108 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1109}
1110
1756cb66
VM
1111/* Free and nullify all cost vectors allocated earlier for allocno
1112 A. */
058e97ec 1113static void
1756cb66 1114ira_free_allocno_costs (ira_allocno_t a)
058e97ec 1115{
1756cb66 1116 enum reg_class aclass = ALLOCNO_CLASS (a);
ac0ab4f7
BS
1117 ira_object_t obj;
1118 ira_allocno_object_iterator oi;
058e97ec 1119
ac0ab4f7
BS
1120 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1121 {
1122 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1123 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1124 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1125 ira_free (OBJECT_CONFLICT_ARRAY (obj));
0b470bae 1126 object_pool.remove (obj);
ac0ab4f7 1127 }
9140d27b 1128
058e97ec 1129 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
058e97ec 1130 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1756cb66 1131 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), aclass);
058e97ec 1132 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1756cb66 1133 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), aclass);
058e97ec 1134 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1135 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1136 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1137 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66
VM
1138 aclass);
1139 ALLOCNO_HARD_REG_COSTS (a) = NULL;
1140 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
1141 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1142 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1143}
1144
1145/* Free the memory allocated for allocno A. */
1146static void
1147finish_allocno (ira_allocno_t a)
1148{
1149 ira_free_allocno_costs (a);
0b470bae 1150 allocno_pool.remove (a);
058e97ec
VM
1151}
1152
1153/* Free the memory allocated for all allocnos. */
1154static void
1155finish_allocnos (void)
1156{
1157 ira_allocno_t a;
1158 ira_allocno_iterator ai;
1159
1160 FOR_EACH_ALLOCNO (a, ai)
1161 finish_allocno (a);
1162 ira_free (ira_regno_allocno_map);
9771b263
DN
1163 ira_object_id_map_vec.release ();
1164 allocno_vec.release ();
0b470bae
ML
1165 allocno_pool.release ();
1166 object_pool.release ();
1167 live_range_pool.release ();
058e97ec
VM
1168}
1169
1170\f
1171
3b6d1699 1172/* Pools for allocno preferences. */
0b470bae 1173static pool_allocator <ira_allocno_pref> pref_pool ("prefs", 100);
3b6d1699
VM
1174
1175/* Vec containing references to all created preferences. It is a
1176 container of array ira_prefs. */
1177static vec<ira_pref_t> pref_vec;
1178
1179/* The function initializes data concerning allocno prefs. */
1180static void
1181initiate_prefs (void)
1182{
3b6d1699
VM
1183 pref_vec.create (get_max_uid ());
1184 ira_prefs = NULL;
1185 ira_prefs_num = 0;
1186}
1187
1188/* Return pref for A and HARD_REGNO if any. */
1189static ira_pref_t
1190find_allocno_pref (ira_allocno_t a, int hard_regno)
1191{
1192 ira_pref_t pref;
1193
1194 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1195 if (pref->allocno == a && pref->hard_regno == hard_regno)
1196 return pref;
1197 return NULL;
1198}
1199
1200/* Create and return pref with given attributes A, HARD_REGNO, and FREQ. */
1201ira_pref_t
1202ira_create_pref (ira_allocno_t a, int hard_regno, int freq)
1203{
1204 ira_pref_t pref;
1205
0b470bae 1206 pref = pref_pool.allocate ();
3b6d1699
VM
1207 pref->num = ira_prefs_num;
1208 pref->allocno = a;
1209 pref->hard_regno = hard_regno;
1210 pref->freq = freq;
1211 pref_vec.safe_push (pref);
1212 ira_prefs = pref_vec.address ();
1213 ira_prefs_num = pref_vec.length ();
1214 return pref;
1215}
1216
df3e3493 1217/* Attach a pref PREF to the corresponding allocno. */
3b6d1699
VM
1218static void
1219add_allocno_pref_to_list (ira_pref_t pref)
1220{
1221 ira_allocno_t a = pref->allocno;
1222
1223 pref->next_pref = ALLOCNO_PREFS (a);
1224 ALLOCNO_PREFS (a) = pref;
1225}
1226
1227/* Create (or update frequency if the pref already exists) the pref of
1228 allocnos A preferring HARD_REGNO with frequency FREQ. */
1229void
1230ira_add_allocno_pref (ira_allocno_t a, int hard_regno, int freq)
1231{
1232 ira_pref_t pref;
1233
1234 if (freq <= 0)
1235 return;
1236 if ((pref = find_allocno_pref (a, hard_regno)) != NULL)
1237 {
1238 pref->freq += freq;
1239 return;
1240 }
1241 pref = ira_create_pref (a, hard_regno, freq);
1242 ira_assert (a != NULL);
1243 add_allocno_pref_to_list (pref);
1244}
1245
1246/* Print info about PREF into file F. */
1247static void
1248print_pref (FILE *f, ira_pref_t pref)
1249{
1250 fprintf (f, " pref%d:a%d(r%d)<-hr%d@%d\n", pref->num,
1251 ALLOCNO_NUM (pref->allocno), ALLOCNO_REGNO (pref->allocno),
1252 pref->hard_regno, pref->freq);
1253}
1254
1255/* Print info about PREF into stderr. */
1256void
1257ira_debug_pref (ira_pref_t pref)
1258{
1259 print_pref (stderr, pref);
1260}
1261
1262/* Print info about all prefs into file F. */
1263static void
1264print_prefs (FILE *f)
1265{
1266 ira_pref_t pref;
1267 ira_pref_iterator pi;
1268
1269 FOR_EACH_PREF (pref, pi)
1270 print_pref (f, pref);
1271}
1272
1273/* Print info about all prefs into stderr. */
1274void
1275ira_debug_prefs (void)
1276{
1277 print_prefs (stderr);
1278}
1279
1280/* Print info about prefs involving allocno A into file F. */
1281static void
1282print_allocno_prefs (FILE *f, ira_allocno_t a)
1283{
1284 ira_pref_t pref;
1285
1286 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1287 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1288 fprintf (f, " pref%d:hr%d@%d", pref->num, pref->hard_regno, pref->freq);
1289 fprintf (f, "\n");
1290}
1291
1292/* Print info about prefs involving allocno A into stderr. */
1293void
1294ira_debug_allocno_prefs (ira_allocno_t a)
1295{
1296 print_allocno_prefs (stderr, a);
1297}
1298
1299/* The function frees memory allocated for PREF. */
1300static void
1301finish_pref (ira_pref_t pref)
1302{
1303 ira_prefs[pref->num] = NULL;
0b470bae 1304 pref_pool.remove (pref);
3b6d1699
VM
1305}
1306
1307/* Remove PREF from the list of allocno prefs and free memory for
1308 it. */
1309void
1310ira_remove_pref (ira_pref_t pref)
1311{
1312 ira_pref_t cpref, prev;
1313
1314 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1315 fprintf (ira_dump_file, " Removing pref%d:hr%d@%d\n",
1316 pref->num, pref->hard_regno, pref->freq);
1317 for (prev = NULL, cpref = ALLOCNO_PREFS (pref->allocno);
1318 cpref != NULL;
1319 prev = cpref, cpref = cpref->next_pref)
1320 if (cpref == pref)
1321 break;
1322 ira_assert (cpref != NULL);
1323 if (prev == NULL)
1324 ALLOCNO_PREFS (pref->allocno) = pref->next_pref;
1325 else
1326 prev->next_pref = pref->next_pref;
1327 finish_pref (pref);
1328}
1329
1330/* Remove all prefs of allocno A. */
1331void
1332ira_remove_allocno_prefs (ira_allocno_t a)
1333{
1334 ira_pref_t pref, next_pref;
1335
1336 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = next_pref)
1337 {
1338 next_pref = pref->next_pref;
1339 finish_pref (pref);
1340 }
1341 ALLOCNO_PREFS (a) = NULL;
1342}
1343
1344/* Free memory allocated for all prefs. */
1345static void
1346finish_prefs (void)
1347{
1348 ira_pref_t pref;
1349 ira_pref_iterator pi;
1350
1351 FOR_EACH_PREF (pref, pi)
1352 finish_pref (pref);
1353 pref_vec.release ();
0b470bae 1354 pref_pool.release ();
3b6d1699
VM
1355}
1356
1357\f
1358
058e97ec 1359/* Pools for copies. */
0b470bae 1360static pool_allocator<ira_allocno_copy> copy_pool ("copies", 100);
058e97ec
VM
1361
1362/* Vec containing references to all created copies. It is a
1363 container of array ira_copies. */
9771b263 1364static vec<ira_copy_t> copy_vec;
058e97ec
VM
1365
1366/* The function initializes data concerning allocno copies. */
1367static void
1368initiate_copies (void)
1369{
9771b263 1370 copy_vec.create (get_max_uid ());
058e97ec
VM
1371 ira_copies = NULL;
1372 ira_copies_num = 0;
1373}
1374
1375/* Return copy connecting A1 and A2 and originated from INSN of
1376 LOOP_TREE_NODE if any. */
1377static ira_copy_t
070a1983 1378find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx_insn *insn,
058e97ec
VM
1379 ira_loop_tree_node_t loop_tree_node)
1380{
1381 ira_copy_t cp, next_cp;
1382 ira_allocno_t another_a;
1383
1384 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1385 {
1386 if (cp->first == a1)
1387 {
1388 next_cp = cp->next_first_allocno_copy;
1389 another_a = cp->second;
1390 }
1391 else if (cp->second == a1)
1392 {
1393 next_cp = cp->next_second_allocno_copy;
1394 another_a = cp->first;
1395 }
1396 else
1397 gcc_unreachable ();
1398 if (another_a == a2 && cp->insn == insn
1399 && cp->loop_tree_node == loop_tree_node)
1400 return cp;
1401 }
1402 return NULL;
1403}
1404
1405/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
548a6322 1406 SECOND, FREQ, CONSTRAINT_P, and INSN. */
058e97ec 1407ira_copy_t
548a6322 1408ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1409 bool constraint_p, rtx_insn *insn,
058e97ec
VM
1410 ira_loop_tree_node_t loop_tree_node)
1411{
1412 ira_copy_t cp;
1413
0b470bae 1414 cp = copy_pool.allocate ();
058e97ec
VM
1415 cp->num = ira_copies_num;
1416 cp->first = first;
1417 cp->second = second;
1418 cp->freq = freq;
548a6322 1419 cp->constraint_p = constraint_p;
058e97ec
VM
1420 cp->insn = insn;
1421 cp->loop_tree_node = loop_tree_node;
9771b263
DN
1422 copy_vec.safe_push (cp);
1423 ira_copies = copy_vec.address ();
1424 ira_copies_num = copy_vec.length ();
058e97ec
VM
1425 return cp;
1426}
1427
1428/* Attach a copy CP to allocnos involved into the copy. */
3b6d1699
VM
1429static void
1430add_allocno_copy_to_list (ira_copy_t cp)
058e97ec
VM
1431{
1432 ira_allocno_t first = cp->first, second = cp->second;
1433
1434 cp->prev_first_allocno_copy = NULL;
1435 cp->prev_second_allocno_copy = NULL;
1436 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1437 if (cp->next_first_allocno_copy != NULL)
1438 {
1439 if (cp->next_first_allocno_copy->first == first)
1440 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1441 else
1442 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1443 }
1444 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1445 if (cp->next_second_allocno_copy != NULL)
1446 {
1447 if (cp->next_second_allocno_copy->second == second)
1448 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1449 else
1450 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1451 }
1452 ALLOCNO_COPIES (first) = cp;
1453 ALLOCNO_COPIES (second) = cp;
1454}
1455
058e97ec
VM
1456/* Make a copy CP a canonical copy where number of the
1457 first allocno is less than the second one. */
3b6d1699
VM
1458static void
1459swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
058e97ec 1460{
058e97ec
VM
1461 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1462 return;
1463
fab27f52
MM
1464 std::swap (cp->first, cp->second);
1465 std::swap (cp->prev_first_allocno_copy, cp->prev_second_allocno_copy);
1466 std::swap (cp->next_first_allocno_copy, cp->next_second_allocno_copy);
058e97ec
VM
1467}
1468
1469/* Create (or update frequency if the copy already exists) and return
1470 the copy of allocnos FIRST and SECOND with frequency FREQ
1471 corresponding to move insn INSN (if any) and originated from
1472 LOOP_TREE_NODE. */
1473ira_copy_t
1474ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
070a1983 1475 bool constraint_p, rtx_insn *insn,
548a6322 1476 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
1477{
1478 ira_copy_t cp;
1479
1480 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1481 {
1482 cp->freq += freq;
1483 return cp;
1484 }
548a6322
VM
1485 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1486 loop_tree_node);
058e97ec 1487 ira_assert (first != NULL && second != NULL);
3b6d1699
VM
1488 add_allocno_copy_to_list (cp);
1489 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
1490 return cp;
1491}
1492
4cda38d5
VM
1493/* Print info about copy CP into file F. */
1494static void
1495print_copy (FILE *f, ira_copy_t cp)
1496{
548a6322 1497 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
4cda38d5 1498 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
548a6322
VM
1499 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1500 cp->insn != NULL
1501 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
4cda38d5
VM
1502}
1503
7b3b6ae4
LC
1504DEBUG_FUNCTION void
1505debug (ira_allocno_copy &ref)
1506{
1507 print_copy (stderr, &ref);
1508}
1509
1510DEBUG_FUNCTION void
1511debug (ira_allocno_copy *ptr)
1512{
1513 if (ptr)
1514 debug (*ptr);
1515 else
1516 fprintf (stderr, "<nil>\n");
1517}
1518
4cda38d5
VM
1519/* Print info about copy CP into stderr. */
1520void
1521ira_debug_copy (ira_copy_t cp)
1522{
1523 print_copy (stderr, cp);
1524}
1525
1526/* Print info about all copies into file F. */
1527static void
1528print_copies (FILE *f)
1529{
1530 ira_copy_t cp;
1531 ira_copy_iterator ci;
1532
1533 FOR_EACH_COPY (cp, ci)
1534 print_copy (f, cp);
1535}
1536
1537/* Print info about all copies into stderr. */
1538void
1539ira_debug_copies (void)
1540{
1541 print_copies (stderr);
1542}
1543
058e97ec
VM
1544/* Print info about copies involving allocno A into file F. */
1545static void
1546print_allocno_copies (FILE *f, ira_allocno_t a)
1547{
1548 ira_allocno_t another_a;
1549 ira_copy_t cp, next_cp;
1550
1551 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1552 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1553 {
1554 if (cp->first == a)
1555 {
1556 next_cp = cp->next_first_allocno_copy;
1557 another_a = cp->second;
1558 }
1559 else if (cp->second == a)
1560 {
1561 next_cp = cp->next_second_allocno_copy;
1562 another_a = cp->first;
1563 }
1564 else
1565 gcc_unreachable ();
1566 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1567 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1568 }
1569 fprintf (f, "\n");
1570}
1571
7b3b6ae4
LC
1572DEBUG_FUNCTION void
1573debug (ira_allocno &ref)
1574{
1575 print_allocno_copies (stderr, &ref);
1576}
1577
1578DEBUG_FUNCTION void
1579debug (ira_allocno *ptr)
1580{
1581 if (ptr)
1582 debug (*ptr);
1583 else
1584 fprintf (stderr, "<nil>\n");
1585}
1586
1587
058e97ec
VM
1588/* Print info about copies involving allocno A into stderr. */
1589void
1590ira_debug_allocno_copies (ira_allocno_t a)
1591{
1592 print_allocno_copies (stderr, a);
1593}
1594
1595/* The function frees memory allocated for copy CP. */
1596static void
1597finish_copy (ira_copy_t cp)
1598{
0b470bae 1599 copy_pool.remove (cp);
058e97ec
VM
1600}
1601
1602
1603/* Free memory allocated for all copies. */
1604static void
1605finish_copies (void)
1606{
1607 ira_copy_t cp;
1608 ira_copy_iterator ci;
1609
1610 FOR_EACH_COPY (cp, ci)
1611 finish_copy (cp);
9771b263 1612 copy_vec.release ();
0b470bae 1613 copy_pool.release ();
058e97ec
VM
1614}
1615
1616\f
1617
1756cb66 1618/* Pools for cost vectors. It is defined only for allocno classes. */
3599f64a 1619static pool_allocator<int> * cost_vector_pool[N_REG_CLASSES];
058e97ec
VM
1620
1621/* The function initiates work with hard register cost vectors. It
1756cb66 1622 creates allocation pool for each allocno class. */
058e97ec
VM
1623static void
1624initiate_cost_vectors (void)
1625{
1626 int i;
1756cb66 1627 enum reg_class aclass;
058e97ec 1628
1756cb66 1629 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1630 {
1756cb66 1631 aclass = ira_allocno_classes[i];
3599f64a
ML
1632 cost_vector_pool[aclass] = new pool_allocator<int>
1633 ("cost vectors", 100,
1634 sizeof (int) * (ira_class_hard_regs_num[aclass] - 1));
058e97ec
VM
1635 }
1636}
1637
1756cb66 1638/* Allocate and return a cost vector VEC for ACLASS. */
058e97ec 1639int *
6f76a878 1640ira_allocate_cost_vector (reg_class_t aclass)
058e97ec 1641{
3599f64a 1642 return cost_vector_pool[(int) aclass]->allocate ();
058e97ec
VM
1643}
1644
1756cb66 1645/* Free a cost vector VEC for ACLASS. */
058e97ec 1646void
6f76a878 1647ira_free_cost_vector (int *vec, reg_class_t aclass)
058e97ec
VM
1648{
1649 ira_assert (vec != NULL);
3599f64a 1650 cost_vector_pool[(int) aclass]->remove (vec);
058e97ec
VM
1651}
1652
1653/* Finish work with hard register cost vectors. Release allocation
1756cb66 1654 pool for each allocno class. */
058e97ec
VM
1655static void
1656finish_cost_vectors (void)
1657{
1658 int i;
1756cb66 1659 enum reg_class aclass;
058e97ec 1660
1756cb66 1661 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1662 {
1756cb66 1663 aclass = ira_allocno_classes[i];
3599f64a 1664 delete cost_vector_pool[aclass];
058e97ec
VM
1665 }
1666}
1667
1668\f
1669
e6a7da82
SB
1670/* Compute a post-ordering of the reverse control flow of the loop body
1671 designated by the children nodes of LOOP_NODE, whose body nodes in
1672 pre-order are input as LOOP_PREORDER. Return a VEC with a post-order
1673 of the reverse loop body.
1674
1675 For the post-order of the reverse CFG, we visit the basic blocks in
1676 LOOP_PREORDER array in the reverse order of where they appear.
1677 This is important: We do not just want to compute a post-order of
1678 the reverse CFG, we want to make a best-guess for a visiting order that
1679 minimizes the number of chain elements per allocno live range. If the
1680 blocks would be visited in a different order, we would still compute a
1681 correct post-ordering but it would be less likely that two nodes
1682 connected by an edge in the CFG are neighbours in the topsort. */
1683
9771b263 1684static vec<ira_loop_tree_node_t>
e6a7da82 1685ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
9771b263 1686 vec<ira_loop_tree_node_t> loop_preorder)
e6a7da82 1687{
6e1aa848 1688 vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
e6a7da82
SB
1689 unsigned int n_loop_preorder;
1690
9771b263 1691 n_loop_preorder = loop_preorder.length ();
e6a7da82
SB
1692 if (n_loop_preorder != 0)
1693 {
1694 ira_loop_tree_node_t subloop_node;
1695 unsigned int i;
ef062b13 1696 auto_vec<ira_loop_tree_node_t> dfs_stack;
e6a7da82
SB
1697
1698 /* This is a bit of strange abuse of the BB_VISITED flag: We use
1699 the flag to mark blocks we still have to visit to add them to
1700 our post-order. Define an alias to avoid confusion. */
1701#define BB_TO_VISIT BB_VISITED
1702
9771b263 1703 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1704 {
1705 gcc_checking_assert (! (subloop_node->bb->flags & BB_TO_VISIT));
1706 subloop_node->bb->flags |= BB_TO_VISIT;
1707 }
1708
9771b263
DN
1709 topsort_nodes.create (n_loop_preorder);
1710 dfs_stack.create (n_loop_preorder);
e6a7da82 1711
9771b263 1712 FOR_EACH_VEC_ELT_REVERSE (loop_preorder, i, subloop_node)
e6a7da82
SB
1713 {
1714 if (! (subloop_node->bb->flags & BB_TO_VISIT))
1715 continue;
1716
1717 subloop_node->bb->flags &= ~BB_TO_VISIT;
9771b263
DN
1718 dfs_stack.quick_push (subloop_node);
1719 while (! dfs_stack.is_empty ())
e6a7da82
SB
1720 {
1721 edge e;
1722 edge_iterator ei;
1723
9771b263 1724 ira_loop_tree_node_t n = dfs_stack.last ();
e6a7da82
SB
1725 FOR_EACH_EDGE (e, ei, n->bb->preds)
1726 {
1727 ira_loop_tree_node_t pred_node;
1728 basic_block pred_bb = e->src;
1729
fefa31b5 1730 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
e6a7da82
SB
1731 continue;
1732
1733 pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
1734 if (pred_node != n
1735 && (pred_node->bb->flags & BB_TO_VISIT))
1736 {
1737 pred_node->bb->flags &= ~BB_TO_VISIT;
9771b263 1738 dfs_stack.quick_push (pred_node);
e6a7da82
SB
1739 }
1740 }
9771b263 1741 if (n == dfs_stack.last ())
e6a7da82 1742 {
9771b263
DN
1743 dfs_stack.pop ();
1744 topsort_nodes.quick_push (n);
e6a7da82
SB
1745 }
1746 }
1747 }
1748
1749#undef BB_TO_VISIT
e6a7da82
SB
1750 }
1751
9771b263 1752 gcc_assert (topsort_nodes.length () == n_loop_preorder);
e6a7da82
SB
1753 return topsort_nodes;
1754}
1755
058e97ec
VM
1756/* The current loop tree node and its regno allocno map. */
1757ira_loop_tree_node_t ira_curr_loop_tree_node;
1758ira_allocno_t *ira_curr_regno_allocno_map;
1759
1760/* This recursive function traverses loop tree with root LOOP_NODE
1761 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1762 correspondingly in preorder and postorder. The function sets up
1763 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1764 basic block nodes of LOOP_NODE is also processed (before its
e6a7da82
SB
1765 subloop nodes).
1766
1767 If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
1768 the loop are passed in the *reverse* post-order of the *reverse*
1769 CFG. This is only used by ira_create_allocno_live_ranges, which
1770 wants to visit basic blocks in this order to minimize the number
1771 of elements per live range chain.
1772 Note that the loop tree nodes are still visited in the normal,
1773 forward post-order of the loop tree. */
1774
058e97ec
VM
1775void
1776ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1777 void (*preorder_func) (ira_loop_tree_node_t),
1778 void (*postorder_func) (ira_loop_tree_node_t))
1779{
1780 ira_loop_tree_node_t subloop_node;
1781
1782 ira_assert (loop_node->bb == NULL);
1783 ira_curr_loop_tree_node = loop_node;
1784 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1785
1786 if (preorder_func != NULL)
1787 (*preorder_func) (loop_node);
b8698a0f 1788
058e97ec 1789 if (bb_p)
e6a7da82 1790 {
ef062b13 1791 auto_vec<ira_loop_tree_node_t> loop_preorder;
e6a7da82
SB
1792 unsigned int i;
1793
1794 /* Add all nodes to the set of nodes to visit. The IRA loop tree
1795 is set up such that nodes in the loop body appear in a pre-order
1796 of their place in the CFG. */
1797 for (subloop_node = loop_node->children;
1798 subloop_node != NULL;
1799 subloop_node = subloop_node->next)
1800 if (subloop_node->bb != NULL)
9771b263 1801 loop_preorder.safe_push (subloop_node);
e6a7da82
SB
1802
1803 if (preorder_func != NULL)
9771b263 1804 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1805 (*preorder_func) (subloop_node);
1806
1807 if (postorder_func != NULL)
058e97ec 1808 {
9771b263 1809 vec<ira_loop_tree_node_t> loop_rev_postorder =
e6a7da82 1810 ira_loop_tree_body_rev_postorder (loop_node, loop_preorder);
9771b263 1811 FOR_EACH_VEC_ELT_REVERSE (loop_rev_postorder, i, subloop_node)
058e97ec 1812 (*postorder_func) (subloop_node);
9771b263 1813 loop_rev_postorder.release ();
058e97ec 1814 }
e6a7da82
SB
1815 }
1816
058e97ec
VM
1817 for (subloop_node = loop_node->subloops;
1818 subloop_node != NULL;
1819 subloop_node = subloop_node->subloop_next)
1820 {
1821 ira_assert (subloop_node->bb == NULL);
1822 ira_traverse_loop_tree (bb_p, subloop_node,
1823 preorder_func, postorder_func);
1824 }
1825
1826 ira_curr_loop_tree_node = loop_node;
1827 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1828
1829 if (postorder_func != NULL)
1830 (*postorder_func) (loop_node);
1831}
1832
1833\f
1834
1835/* The basic block currently being processed. */
1836static basic_block curr_bb;
1837
1838/* This recursive function creates allocnos corresponding to
1839 pseudo-registers containing in X. True OUTPUT_P means that X is
d1bb282e 1840 an lvalue. PARENT corresponds to the parent expression of X. */
058e97ec 1841static void
d1bb282e 1842create_insn_allocnos (rtx x, rtx outer, bool output_p)
058e97ec
VM
1843{
1844 int i, j;
1845 const char *fmt;
1846 enum rtx_code code = GET_CODE (x);
1847
1848 if (code == REG)
1849 {
1850 int regno;
1851
1852 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1853 {
1854 ira_allocno_t a;
1855
1856 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
d1bb282e
DS
1857 {
1858 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1859 if (outer != NULL && GET_CODE (outer) == SUBREG)
1860 {
ef4bddc2 1861 machine_mode wmode = GET_MODE (outer);
d1bb282e
DS
1862 if (GET_MODE_SIZE (wmode) > GET_MODE_SIZE (ALLOCNO_WMODE (a)))
1863 ALLOCNO_WMODE (a) = wmode;
1864 }
1865 }
b8698a0f 1866
058e97ec
VM
1867 ALLOCNO_NREFS (a)++;
1868 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1869 if (output_p)
1870 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1871 }
1872 return;
1873 }
1874 else if (code == SET)
1875 {
d1bb282e
DS
1876 create_insn_allocnos (SET_DEST (x), NULL, true);
1877 create_insn_allocnos (SET_SRC (x), NULL, false);
058e97ec
VM
1878 return;
1879 }
1880 else if (code == CLOBBER)
1881 {
d1bb282e 1882 create_insn_allocnos (XEXP (x, 0), NULL, true);
058e97ec
VM
1883 return;
1884 }
1885 else if (code == MEM)
1886 {
d1bb282e 1887 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1888 return;
1889 }
b8698a0f 1890 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
058e97ec
VM
1891 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1892 {
d1bb282e
DS
1893 create_insn_allocnos (XEXP (x, 0), NULL, true);
1894 create_insn_allocnos (XEXP (x, 0), NULL, false);
058e97ec
VM
1895 return;
1896 }
1897
1898 fmt = GET_RTX_FORMAT (code);
1899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1900 {
1901 if (fmt[i] == 'e')
d1bb282e 1902 create_insn_allocnos (XEXP (x, i), x, output_p);
058e97ec
VM
1903 else if (fmt[i] == 'E')
1904 for (j = 0; j < XVECLEN (x, i); j++)
d1bb282e 1905 create_insn_allocnos (XVECEXP (x, i, j), x, output_p);
058e97ec
VM
1906 }
1907}
1908
1909/* Create allocnos corresponding to pseudo-registers living in the
1910 basic block represented by the corresponding loop tree node
1911 BB_NODE. */
1912static void
1913create_bb_allocnos (ira_loop_tree_node_t bb_node)
1914{
1915 basic_block bb;
070a1983 1916 rtx_insn *insn;
058e97ec
VM
1917 unsigned int i;
1918 bitmap_iterator bi;
1919
1920 curr_bb = bb = bb_node->bb;
1921 ira_assert (bb != NULL);
acb37d29 1922 FOR_BB_INSNS_REVERSE (bb, insn)
b5b8b0ac 1923 if (NONDEBUG_INSN_P (insn))
d1bb282e 1924 create_insn_allocnos (PATTERN (insn), NULL, false);
058e97ec
VM
1925 /* It might be a allocno living through from one subloop to
1926 another. */
bf744527 1927 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb), FIRST_PSEUDO_REGISTER, i, bi)
058e97ec
VM
1928 if (ira_curr_regno_allocno_map[i] == NULL)
1929 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1930}
1931
1932/* Create allocnos corresponding to pseudo-registers living on edge E
1933 (a loop entry or exit). Also mark the allocnos as living on the
1934 loop border. */
1935static void
1936create_loop_allocnos (edge e)
1937{
1938 unsigned int i;
1939 bitmap live_in_regs, border_allocnos;
1940 bitmap_iterator bi;
1941 ira_loop_tree_node_t parent;
1942
bf744527 1943 live_in_regs = df_get_live_in (e->dest);
058e97ec 1944 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
bf744527 1945 EXECUTE_IF_SET_IN_REG_SET (df_get_live_out (e->src),
058e97ec
VM
1946 FIRST_PSEUDO_REGISTER, i, bi)
1947 if (bitmap_bit_p (live_in_regs, i))
1948 {
1949 if (ira_curr_regno_allocno_map[i] == NULL)
1950 {
1951 /* The order of creations is important for right
1952 ira_regno_allocno_map. */
1953 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1954 && parent->regno_allocno_map[i] == NULL)
1955 ira_create_allocno (i, false, parent);
1956 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1957 }
1958 bitmap_set_bit (border_allocnos,
1959 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1960 }
1961}
1962
1963/* Create allocnos corresponding to pseudo-registers living in loop
1964 represented by the corresponding loop tree node LOOP_NODE. This
1965 function is called by ira_traverse_loop_tree. */
1966static void
1967create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1968{
1969 if (loop_node->bb != NULL)
1970 create_bb_allocnos (loop_node);
1971 else if (loop_node != ira_loop_tree_root)
1972 {
1973 int i;
1974 edge_iterator ei;
1975 edge e;
9771b263 1976 vec<edge> edges;
058e97ec 1977
2608d841 1978 ira_assert (current_loops != NULL);
058e97ec
VM
1979 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1980 if (e->src != loop_node->loop->latch)
1981 create_loop_allocnos (e);
b8698a0f 1982
058e97ec 1983 edges = get_loop_exit_edges (loop_node->loop);
9771b263 1984 FOR_EACH_VEC_ELT (edges, i, e)
058e97ec 1985 create_loop_allocnos (e);
9771b263 1986 edges.release ();
058e97ec
VM
1987 }
1988}
1989
1990/* Propagate information about allocnos modified inside the loop given
1991 by its LOOP_TREE_NODE to its parent. */
1992static void
1993propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1994{
1995 if (loop_tree_node == ira_loop_tree_root)
1996 return;
1997 ira_assert (loop_tree_node->bb == NULL);
1998 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1999 loop_tree_node->modified_regnos);
2000}
2001
2002/* Propagate new info about allocno A (see comments about accumulated
2003 info in allocno definition) to the corresponding allocno on upper
2004 loop tree level. So allocnos on upper levels accumulate
2005 information about the corresponding allocnos in nested regions.
2006 The new info means allocno info finally calculated in this
2007 file. */
2008static void
2009propagate_allocno_info (void)
2010{
2011 int i;
2012 ira_allocno_t a, parent_a;
2013 ira_loop_tree_node_t parent;
1756cb66 2014 enum reg_class aclass;
058e97ec 2015
7db7ed3c
VM
2016 if (flag_ira_region != IRA_REGION_ALL
2017 && flag_ira_region != IRA_REGION_MIXED)
058e97ec
VM
2018 return;
2019 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2020 for (a = ira_regno_allocno_map[i];
2021 a != NULL;
2022 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2023 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
2024 && (parent_a = parent->regno_allocno_map[i]) != NULL
2025 /* There are no caps yet at this point. So use
2026 border_allocnos to find allocnos for the propagation. */
2027 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
2028 ALLOCNO_NUM (a)))
2029 {
927425df
VM
2030 if (! ALLOCNO_BAD_SPILL_P (a))
2031 ALLOCNO_BAD_SPILL_P (parent_a) = false;
058e97ec
VM
2032 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
2033 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
2034 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3c55880a 2035 merge_hard_reg_conflicts (a, parent_a, true);
058e97ec
VM
2036 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2037 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
2038 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2039 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
c2ba7e7a
RO
2040 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a),
2041 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
058e97ec
VM
2042 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2043 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1756cb66
VM
2044 aclass = ALLOCNO_CLASS (a);
2045 ira_assert (aclass == ALLOCNO_CLASS (parent_a));
058e97ec 2046 ira_allocate_and_accumulate_costs
1756cb66 2047 (&ALLOCNO_HARD_REG_COSTS (parent_a), aclass,
058e97ec
VM
2048 ALLOCNO_HARD_REG_COSTS (a));
2049 ira_allocate_and_accumulate_costs
2050 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1756cb66 2051 aclass,
058e97ec 2052 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1756cb66
VM
2053 ALLOCNO_CLASS_COST (parent_a)
2054 += ALLOCNO_CLASS_COST (a);
058e97ec 2055 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
058e97ec
VM
2056 }
2057}
2058
2059/* Create allocnos corresponding to pseudo-registers in the current
2060 function. Traverse the loop tree for this. */
2061static void
2062create_allocnos (void)
2063{
2064 /* We need to process BB first to correctly link allocnos by member
2065 next_regno_allocno. */
2066 ira_traverse_loop_tree (true, ira_loop_tree_root,
2067 create_loop_tree_node_allocnos, NULL);
2068 if (optimize)
2069 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
2070 propagate_modified_regnos);
2071}
2072
2073\f
2074
2075/* The page contains function to remove some regions from a separate
2076 register allocation. We remove regions whose separate allocation
2077 will hardly improve the result. As a result we speed up regional
2078 register allocation. */
2079
9140d27b 2080/* The function changes the object in range list given by R to OBJ. */
058e97ec 2081static void
9140d27b 2082change_object_in_range_list (live_range_t r, ira_object_t obj)
058e97ec
VM
2083{
2084 for (; r != NULL; r = r->next)
9140d27b 2085 r->object = obj;
058e97ec
VM
2086}
2087
3c55880a
BS
2088/* Move all live ranges associated with allocno FROM to allocno TO. */
2089static void
2090move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2091{
ac0ab4f7
BS
2092 int i;
2093 int n = ALLOCNO_NUM_OBJECTS (from);
2094
2095 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
3c55880a 2096
ac0ab4f7 2097 for (i = 0; i < n; i++)
3c55880a 2098 {
ac0ab4f7
BS
2099 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2100 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2101 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2102
2103 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2104 {
2105 fprintf (ira_dump_file,
2106 " Moving ranges of a%dr%d to a%dr%d: ",
2107 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2108 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2109 ira_print_live_range_list (ira_dump_file, lr);
2110 }
2111 change_object_in_range_list (lr, to_obj);
2112 OBJECT_LIVE_RANGES (to_obj)
2113 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
2114 OBJECT_LIVE_RANGES (from_obj) = NULL;
3c55880a 2115 }
3c55880a
BS
2116}
2117
3c55880a
BS
2118static void
2119copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2120{
ac0ab4f7
BS
2121 int i;
2122 int n = ALLOCNO_NUM_OBJECTS (from);
3c55880a 2123
ac0ab4f7
BS
2124 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
2125
2126 for (i = 0; i < n; i++)
3c55880a 2127 {
ac0ab4f7
BS
2128 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2129 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2130 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2131
2132 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2133 {
2134 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
2135 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2136 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2137 ira_print_live_range_list (ira_dump_file, lr);
2138 }
2139 lr = ira_copy_live_range_list (lr);
2140 change_object_in_range_list (lr, to_obj);
2141 OBJECT_LIVE_RANGES (to_obj)
2142 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
3c55880a 2143 }
3c55880a
BS
2144}
2145
058e97ec
VM
2146/* Return TRUE if NODE represents a loop with low register
2147 pressure. */
2148static bool
2149low_pressure_loop_node_p (ira_loop_tree_node_t node)
2150{
2151 int i;
1756cb66 2152 enum reg_class pclass;
b8698a0f 2153
058e97ec
VM
2154 if (node->bb != NULL)
2155 return false;
b8698a0f 2156
1756cb66 2157 for (i = 0; i < ira_pressure_classes_num; i++)
058e97ec 2158 {
1756cb66 2159 pclass = ira_pressure_classes[i];
f508f827
RS
2160 if (node->reg_pressure[pclass] > ira_class_hard_regs_num[pclass]
2161 && ira_class_hard_regs_num[pclass] > 1)
058e97ec
VM
2162 return false;
2163 }
2164 return true;
2165}
2166
30a435d8
VM
2167#ifdef STACK_REGS
2168/* Return TRUE if LOOP has a complex enter or exit edge. We don't
2169 form a region from such loop if the target use stack register
2170 because reg-stack.c can not deal with such edges. */
8c5fdaae 2171static bool
30a435d8 2172loop_with_complex_edge_p (struct loop *loop)
8c5fdaae
VM
2173{
2174 int i;
2175 edge_iterator ei;
2176 edge e;
9771b263 2177 vec<edge> edges;
f5843d08 2178 bool res;
8c5fdaae
VM
2179
2180 FOR_EACH_EDGE (e, ei, loop->header->preds)
2181 if (e->flags & EDGE_EH)
2182 return true;
2183 edges = get_loop_exit_edges (loop);
f5843d08 2184 res = false;
9771b263 2185 FOR_EACH_VEC_ELT (edges, i, e)
30a435d8 2186 if (e->flags & EDGE_COMPLEX)
f5843d08
RG
2187 {
2188 res = true;
2189 break;
2190 }
9771b263 2191 edges.release ();
f5843d08 2192 return res;
8c5fdaae 2193}
30a435d8 2194#endif
8c5fdaae 2195
30ea859e
VM
2196/* Sort loops for marking them for removal. We put already marked
2197 loops first, then less frequent loops next, and then outer loops
2198 next. */
2199static int
2200loop_compare_func (const void *v1p, const void *v2p)
2201{
2202 int diff;
2203 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
2204 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
2205
2206 ira_assert (l1->parent != NULL && l2->parent != NULL);
2207 if (l1->to_remove_p && ! l2->to_remove_p)
2208 return -1;
2209 if (! l1->to_remove_p && l2->to_remove_p)
2210 return 1;
2211 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
2212 return diff;
2213 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
2214 return diff;
2215 /* Make sorting stable. */
2608d841 2216 return l1->loop_num - l2->loop_num;
30ea859e
VM
2217}
2218
30ea859e
VM
2219/* Mark loops which should be removed from regional allocation. We
2220 remove a loop with low register pressure inside another loop with
2221 register pressure. In this case a separate allocation of the loop
2222 hardly helps (for irregular register file architecture it could
2223 help by choosing a better hard register in the loop but we prefer
2224 faster allocation even in this case). We also remove cheap loops
8c5fdaae
VM
2225 if there are more than IRA_MAX_LOOPS_NUM of them. Loop with EH
2226 exit or enter edges are removed too because the allocation might
2227 require put pseudo moves on the EH edges (we could still do this
2228 for pseudos with caller saved hard registers in some cases but it
2229 is impossible to say here or during top-down allocation pass what
2230 hard register the pseudos get finally). */
30ea859e
VM
2231static void
2232mark_loops_for_removal (void)
058e97ec 2233{
30ea859e
VM
2234 int i, n;
2235 ira_loop_tree_node_t *sorted_loops;
2236 loop_p loop;
2237
2608d841 2238 ira_assert (current_loops != NULL);
30ea859e
VM
2239 sorted_loops
2240 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
0fc822d0
RB
2241 * number_of_loops (cfun));
2242 for (n = i = 0; vec_safe_iterate (get_loops (cfun), i, &loop); i++)
30ea859e
VM
2243 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2244 {
2245 if (ira_loop_nodes[i].parent == NULL)
2246 {
2247 /* Don't remove the root. */
2248 ira_loop_nodes[i].to_remove_p = false;
2249 continue;
2250 }
2251 sorted_loops[n++] = &ira_loop_nodes[i];
2252 ira_loop_nodes[i].to_remove_p
8c5fdaae
VM
2253 = ((low_pressure_loop_node_p (ira_loop_nodes[i].parent)
2254 && low_pressure_loop_node_p (&ira_loop_nodes[i]))
30a435d8
VM
2255#ifdef STACK_REGS
2256 || loop_with_complex_edge_p (ira_loop_nodes[i].loop)
2257#endif
2258 );
30ea859e
VM
2259 }
2260 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
2261 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
2262 {
2263 sorted_loops[i]->to_remove_p = true;
2264 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2265 fprintf
2266 (ira_dump_file,
2267 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
2608d841 2268 sorted_loops[i]->loop_num, sorted_loops[i]->loop->header->index,
30ea859e
VM
2269 sorted_loops[i]->loop->header->frequency,
2270 loop_depth (sorted_loops[i]->loop),
2271 low_pressure_loop_node_p (sorted_loops[i]->parent)
2272 && low_pressure_loop_node_p (sorted_loops[i])
2273 ? "low pressure" : "cheap loop");
2274 }
2275 ira_free (sorted_loops);
058e97ec
VM
2276}
2277
311aab06
VM
2278/* Mark all loops but root for removing. */
2279static void
2280mark_all_loops_for_removal (void)
2281{
2282 int i;
2283 loop_p loop;
2284
2608d841 2285 ira_assert (current_loops != NULL);
0fc822d0 2286 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
311aab06
VM
2287 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2288 {
2289 if (ira_loop_nodes[i].parent == NULL)
2290 {
2291 /* Don't remove the root. */
2292 ira_loop_nodes[i].to_remove_p = false;
2293 continue;
2294 }
2295 ira_loop_nodes[i].to_remove_p = true;
2296 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2297 fprintf
2298 (ira_dump_file,
2299 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
2608d841 2300 ira_loop_nodes[i].loop_num,
311aab06
VM
2301 ira_loop_nodes[i].loop->header->index,
2302 ira_loop_nodes[i].loop->header->frequency,
2303 loop_depth (ira_loop_nodes[i].loop));
2304 }
2305}
30ea859e 2306
058e97ec 2307/* Definition of vector of loop tree nodes. */
058e97ec
VM
2308
2309/* Vec containing references to all removed loop tree nodes. */
9771b263 2310static vec<ira_loop_tree_node_t> removed_loop_vec;
058e97ec
VM
2311
2312/* Vec containing references to all children of loop tree nodes. */
9771b263 2313static vec<ira_loop_tree_node_t> children_vec;
058e97ec
VM
2314
2315/* Remove subregions of NODE if their separate allocation will not
2316 improve the result. */
2317static void
2318remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
2319{
2320 unsigned int start;
2321 bool remove_p;
2322 ira_loop_tree_node_t subnode;
2323
30ea859e 2324 remove_p = node->to_remove_p;
058e97ec 2325 if (! remove_p)
9771b263
DN
2326 children_vec.safe_push (node);
2327 start = children_vec.length ();
058e97ec
VM
2328 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
2329 if (subnode->bb == NULL)
2330 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
2331 else
9771b263 2332 children_vec.safe_push (subnode);
058e97ec
VM
2333 node->children = node->subloops = NULL;
2334 if (remove_p)
2335 {
9771b263 2336 removed_loop_vec.safe_push (node);
058e97ec
VM
2337 return;
2338 }
9771b263 2339 while (children_vec.length () > start)
058e97ec 2340 {
9771b263 2341 subnode = children_vec.pop ();
058e97ec
VM
2342 subnode->parent = node;
2343 subnode->next = node->children;
2344 node->children = subnode;
2345 if (subnode->bb == NULL)
2346 {
2347 subnode->subloop_next = node->subloops;
2348 node->subloops = subnode;
2349 }
2350 }
2351}
2352
c6bb4c93
VM
2353/* Return TRUE if NODE is inside PARENT. */
2354static bool
2355loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
2356{
2357 for (node = node->parent; node != NULL; node = node->parent)
2358 if (node == parent)
2359 return true;
2360 return false;
2361}
2362
2363/* Sort allocnos according to their order in regno allocno list. */
2364static int
2365regno_allocno_order_compare_func (const void *v1p, const void *v2p)
2366{
2367 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2368 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2369 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
2370 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
2371
2372 if (loop_is_inside_p (n1, n2))
2373 return -1;
2374 else if (loop_is_inside_p (n2, n1))
2375 return 1;
2376 /* If allocnos are equally good, sort by allocno numbers, so that
2377 the results of qsort leave nothing to chance. We put allocnos
2378 with higher number first in the list because it is the original
2379 order for allocnos from loops on the same levels. */
2380 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
2381}
2382
2383/* This array is used to sort allocnos to restore allocno order in
2384 the regno allocno list. */
2385static ira_allocno_t *regno_allocnos;
2386
2387/* Restore allocno order for REGNO in the regno allocno list. */
2388static void
2389ira_rebuild_regno_allocno_list (int regno)
2390{
2391 int i, n;
2392 ira_allocno_t a;
2393
2394 for (n = 0, a = ira_regno_allocno_map[regno];
2395 a != NULL;
2396 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2397 regno_allocnos[n++] = a;
2398 ira_assert (n > 0);
b8698a0f 2399 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
c6bb4c93
VM
2400 regno_allocno_order_compare_func);
2401 for (i = 1; i < n; i++)
2402 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
2403 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
2404 ira_regno_allocno_map[regno] = regno_allocnos[0];
2405 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2406 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2407}
2408
311aab06
VM
2409/* Propagate info from allocno FROM_A to allocno A. */
2410static void
2411propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2412{
1756cb66 2413 enum reg_class aclass;
311aab06 2414
3c55880a 2415 merge_hard_reg_conflicts (from_a, a, false);
311aab06
VM
2416 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2417 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2418 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
311aab06 2419 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
e384e6b5
BS
2420 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)
2421 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a);
c2ba7e7a
RO
2422 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a),
2423 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (from_a));
2424
311aab06
VM
2425 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2426 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2427 if (! ALLOCNO_BAD_SPILL_P (from_a))
2428 ALLOCNO_BAD_SPILL_P (a) = false;
1756cb66
VM
2429 aclass = ALLOCNO_CLASS (from_a);
2430 ira_assert (aclass == ALLOCNO_CLASS (a));
2431 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), aclass,
311aab06
VM
2432 ALLOCNO_HARD_REG_COSTS (from_a));
2433 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 2434 aclass,
311aab06 2435 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1756cb66 2436 ALLOCNO_CLASS_COST (a) += ALLOCNO_CLASS_COST (from_a);
311aab06
VM
2437 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2438}
2439
058e97ec
VM
2440/* Remove allocnos from loops removed from the allocation
2441 consideration. */
2442static void
2443remove_unnecessary_allocnos (void)
2444{
2445 int regno;
c6bb4c93 2446 bool merged_p, rebuild_p;
058e97ec
VM
2447 ira_allocno_t a, prev_a, next_a, parent_a;
2448 ira_loop_tree_node_t a_node, parent;
058e97ec
VM
2449
2450 merged_p = false;
c6bb4c93 2451 regno_allocnos = NULL;
058e97ec 2452 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
c6bb4c93
VM
2453 {
2454 rebuild_p = false;
2455 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2456 a != NULL;
2457 a = next_a)
2458 {
2459 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2460 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2461 if (! a_node->to_remove_p)
2462 prev_a = a;
2463 else
2464 {
2465 for (parent = a_node->parent;
2466 (parent_a = parent->regno_allocno_map[regno]) == NULL
2467 && parent->to_remove_p;
2468 parent = parent->parent)
2469 ;
2470 if (parent_a == NULL)
2471 {
311aab06
VM
2472 /* There are no allocnos with the same regno in
2473 upper region -- just move the allocno to the
2474 upper region. */
c6bb4c93
VM
2475 prev_a = a;
2476 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2477 parent->regno_allocno_map[regno] = a;
2478 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2479 rebuild_p = true;
2480 }
2481 else
2482 {
2483 /* Remove the allocno and update info of allocno in
2484 the upper region. */
2485 if (prev_a == NULL)
2486 ira_regno_allocno_map[regno] = next_a;
2487 else
2488 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
3c55880a 2489 move_allocno_live_ranges (a, parent_a);
c6bb4c93 2490 merged_p = true;
311aab06 2491 propagate_some_info_from_allocno (parent_a, a);
46044dd9
L
2492 /* Remove it from the corresponding regno allocno
2493 map to avoid info propagation of subsequent
2494 allocno into this already removed allocno. */
2495 a_node->regno_allocno_map[regno] = NULL;
3b6d1699 2496 ira_remove_allocno_prefs (a);
c6bb4c93
VM
2497 finish_allocno (a);
2498 }
2499 }
2500 }
2501 if (rebuild_p)
2502 /* We need to restore the order in regno allocno list. */
2503 {
2504 if (regno_allocnos == NULL)
2505 regno_allocnos
2506 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2507 * ira_allocnos_num);
2508 ira_rebuild_regno_allocno_list (regno);
2509 }
2510 }
058e97ec
VM
2511 if (merged_p)
2512 ira_rebuild_start_finish_chains ();
c6bb4c93
VM
2513 if (regno_allocnos != NULL)
2514 ira_free (regno_allocnos);
058e97ec
VM
2515}
2516
311aab06 2517/* Remove allocnos from all loops but the root. */
058e97ec 2518static void
311aab06 2519remove_low_level_allocnos (void)
058e97ec 2520{
311aab06
VM
2521 int regno;
2522 bool merged_p, propagate_p;
2523 ira_allocno_t a, top_a;
2524 ira_loop_tree_node_t a_node, parent;
311aab06
VM
2525 ira_allocno_iterator ai;
2526
2527 merged_p = false;
2528 FOR_EACH_ALLOCNO (a, ai)
2529 {
2530 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2531 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2532 continue;
2533 regno = ALLOCNO_REGNO (a);
2534 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2535 {
2536 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2537 ira_loop_tree_root->regno_allocno_map[regno] = a;
2538 continue;
2539 }
2540 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2541 /* Remove the allocno and update info of allocno in the upper
2542 region. */
3c55880a 2543 move_allocno_live_ranges (a, top_a);
311aab06 2544 merged_p = true;
311aab06
VM
2545 if (propagate_p)
2546 propagate_some_info_from_allocno (top_a, a);
2547 }
2548 FOR_EACH_ALLOCNO (a, ai)
2549 {
2550 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2551 if (a_node == ira_loop_tree_root)
2552 continue;
2553 parent = a_node->parent;
2554 regno = ALLOCNO_REGNO (a);
2555 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2556 ira_assert (ALLOCNO_CAP (a) != NULL);
2557 else if (ALLOCNO_CAP (a) == NULL)
2558 ira_assert (parent->regno_allocno_map[regno] != NULL);
2559 }
2560 FOR_EACH_ALLOCNO (a, ai)
2561 {
2562 regno = ALLOCNO_REGNO (a);
2563 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2564 {
ac0ab4f7
BS
2565 ira_object_t obj;
2566 ira_allocno_object_iterator oi;
a49ae217 2567
311aab06
VM
2568 ira_regno_allocno_map[regno] = a;
2569 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2570 ALLOCNO_CAP_MEMBER (a) = NULL;
ac0ab4f7
BS
2571 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2572 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2573 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
311aab06
VM
2574#ifdef STACK_REGS
2575 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2576 ALLOCNO_NO_STACK_REG_P (a) = true;
2577#endif
2578 }
2579 else
3b6d1699
VM
2580 {
2581 ira_remove_allocno_prefs (a);
2582 finish_allocno (a);
2583 }
311aab06
VM
2584 }
2585 if (merged_p)
2586 ira_rebuild_start_finish_chains ();
2587}
2588
2589/* Remove loops from consideration. We remove all loops except for
2590 root if ALL_P or loops for which a separate allocation will not
2591 improve the result. We have to do this after allocno creation and
1756cb66
VM
2592 their costs and allocno class evaluation because only after that
2593 the register pressure can be known and is calculated. */
311aab06
VM
2594static void
2595remove_unnecessary_regions (bool all_p)
2596{
2608d841
VM
2597 if (current_loops == NULL)
2598 return;
311aab06
VM
2599 if (all_p)
2600 mark_all_loops_for_removal ();
2601 else
2602 mark_loops_for_removal ();
8b1c6fd7
DM
2603 children_vec.create (last_basic_block_for_fn (cfun)
2604 + number_of_loops (cfun));
2605 removed_loop_vec.create (last_basic_block_for_fn (cfun)
2606 + number_of_loops (cfun));
9771b263
DN
2607 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
2608 children_vec.release ();
311aab06
VM
2609 if (all_p)
2610 remove_low_level_allocnos ();
2611 else
2612 remove_unnecessary_allocnos ();
9771b263
DN
2613 while (removed_loop_vec.length () > 0)
2614 finish_loop_tree_node (removed_loop_vec.pop ());
2615 removed_loop_vec.release ();
058e97ec
VM
2616}
2617
2618\f
2619
927425df
VM
2620/* At this point true value of allocno attribute bad_spill_p means
2621 that there is an insn where allocno occurs and where the allocno
2622 can not be used as memory. The function updates the attribute, now
2623 it can be true only for allocnos which can not be used as memory in
2624 an insn and in whose live ranges there is other allocno deaths.
2625 Spilling allocnos with true value will not improve the code because
2626 it will not make other allocnos colorable and additional reloads
2627 for the corresponding pseudo will be generated in reload pass for
2628 each insn it occurs.
2629
2630 This is a trick mentioned in one classic article of Chaitin etc
2631 which is frequently omitted in other implementations of RA based on
2632 graph coloring. */
2633static void
2634update_bad_spill_attribute (void)
2635{
2636 int i;
2637 ira_allocno_t a;
2638 ira_allocno_iterator ai;
ac0ab4f7
BS
2639 ira_allocno_object_iterator aoi;
2640 ira_object_t obj;
b14151b5 2641 live_range_t r;
1756cb66 2642 enum reg_class aclass;
927425df
VM
2643 bitmap_head dead_points[N_REG_CLASSES];
2644
1756cb66 2645 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2646 {
1756cb66
VM
2647 aclass = ira_allocno_classes[i];
2648 bitmap_initialize (&dead_points[aclass], &reg_obstack);
927425df
VM
2649 }
2650 FOR_EACH_ALLOCNO (a, ai)
2651 {
1756cb66
VM
2652 aclass = ALLOCNO_CLASS (a);
2653 if (aclass == NO_REGS)
927425df 2654 continue;
ac0ab4f7
BS
2655 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2656 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1756cb66 2657 bitmap_set_bit (&dead_points[aclass], r->finish);
927425df
VM
2658 }
2659 FOR_EACH_ALLOCNO (a, ai)
2660 {
1756cb66
VM
2661 aclass = ALLOCNO_CLASS (a);
2662 if (aclass == NO_REGS)
927425df
VM
2663 continue;
2664 if (! ALLOCNO_BAD_SPILL_P (a))
2665 continue;
ac0ab4f7 2666 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
927425df 2667 {
ac0ab4f7
BS
2668 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2669 {
2670 for (i = r->start + 1; i < r->finish; i++)
1756cb66 2671 if (bitmap_bit_p (&dead_points[aclass], i))
ac0ab4f7
BS
2672 break;
2673 if (i < r->finish)
2674 break;
2675 }
2676 if (r != NULL)
2677 {
2678 ALLOCNO_BAD_SPILL_P (a) = false;
927425df 2679 break;
ac0ab4f7 2680 }
927425df 2681 }
927425df 2682 }
1756cb66 2683 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2684 {
1756cb66
VM
2685 aclass = ira_allocno_classes[i];
2686 bitmap_clear (&dead_points[aclass]);
927425df
VM
2687 }
2688}
2689
2690\f
2691
058e97ec
VM
2692/* Set up minimal and maximal live range points for allocnos. */
2693static void
2694setup_min_max_allocno_live_range_point (void)
2695{
2696 int i;
2697 ira_allocno_t a, parent_a, cap;
2698 ira_allocno_iterator ai;
ac0ab4f7
BS
2699#ifdef ENABLE_IRA_CHECKING
2700 ira_object_iterator oi;
2701 ira_object_t obj;
2702#endif
b14151b5 2703 live_range_t r;
058e97ec
VM
2704 ira_loop_tree_node_t parent;
2705
2706 FOR_EACH_ALLOCNO (a, ai)
2707 {
ac0ab4f7 2708 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2709
ac0ab4f7
BS
2710 for (i = 0; i < n; i++)
2711 {
2712 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2713 r = OBJECT_LIVE_RANGES (obj);
2714 if (r == NULL)
2715 continue;
2716 OBJECT_MAX (obj) = r->finish;
2717 for (; r->next != NULL; r = r->next)
2718 ;
2719 OBJECT_MIN (obj) = r->start;
2720 }
058e97ec
VM
2721 }
2722 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2723 for (a = ira_regno_allocno_map[i];
2724 a != NULL;
2725 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2726 {
ac0ab4f7
BS
2727 int j;
2728 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2729
ac0ab4f7 2730 for (j = 0; j < n; j++)
058e97ec 2731 {
ac0ab4f7
BS
2732 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2733 ira_object_t parent_obj;
2734
2735 if (OBJECT_MAX (obj) < 0)
2736 continue;
2737 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2738 /* Accumulation of range info. */
2739 if (ALLOCNO_CAP (a) != NULL)
058e97ec 2740 {
ac0ab4f7
BS
2741 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2742 {
2743 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2744 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2745 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2746 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2747 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2748 }
2749 continue;
058e97ec 2750 }
ac0ab4f7
BS
2751 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2752 continue;
2753 parent_a = parent->regno_allocno_map[i];
2754 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2755 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2756 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2757 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2758 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
058e97ec 2759 }
058e97ec
VM
2760 }
2761#ifdef ENABLE_IRA_CHECKING
ac0ab4f7 2762 FOR_EACH_OBJECT (obj, oi)
058e97ec 2763 {
a49ae217
BS
2764 if ((0 <= OBJECT_MIN (obj) && OBJECT_MIN (obj) <= ira_max_point)
2765 && (0 <= OBJECT_MAX (obj) && OBJECT_MAX (obj) <= ira_max_point))
058e97ec
VM
2766 continue;
2767 gcc_unreachable ();
2768 }
2769#endif
2770}
2771
2772/* Sort allocnos according to their live ranges. Allocnos with
1756cb66
VM
2773 smaller allocno class are put first unless we use priority
2774 coloring. Allocnos with the same class are ordered according
2775 their start (min). Allocnos with the same start are ordered
2776 according their finish (max). */
058e97ec 2777static int
ac0ab4f7 2778object_range_compare_func (const void *v1p, const void *v2p)
058e97ec
VM
2779{
2780 int diff;
a49ae217
BS
2781 ira_object_t obj1 = *(const ira_object_t *) v1p;
2782 ira_object_t obj2 = *(const ira_object_t *) v2p;
2783 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2784 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
058e97ec 2785
a49ae217 2786 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
058e97ec 2787 return diff;
a49ae217 2788 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
058e97ec
VM
2789 return diff;
2790 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2791}
2792
a49ae217 2793/* Sort ira_object_id_map and set up conflict id of allocnos. */
058e97ec 2794static void
a49ae217 2795sort_conflict_id_map (void)
058e97ec
VM
2796{
2797 int i, num;
2798 ira_allocno_t a;
2799 ira_allocno_iterator ai;
2800
2801 num = 0;
2802 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7
BS
2803 {
2804 ira_allocno_object_iterator oi;
2805 ira_object_t obj;
2806
2807 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2808 ira_object_id_map[num++] = obj;
2809 }
85c00e0b
JJ
2810 if (num > 1)
2811 qsort (ira_object_id_map, num, sizeof (ira_object_t),
2812 object_range_compare_func);
058e97ec 2813 for (i = 0; i < num; i++)
a49ae217
BS
2814 {
2815 ira_object_t obj = ira_object_id_map[i];
1756cb66 2816
a49ae217
BS
2817 gcc_assert (obj != NULL);
2818 OBJECT_CONFLICT_ID (obj) = i;
2819 }
2820 for (i = num; i < ira_objects_num; i++)
2821 ira_object_id_map[i] = NULL;
058e97ec
VM
2822}
2823
2824/* Set up minimal and maximal conflict ids of allocnos with which
2825 given allocno can conflict. */
2826static void
2827setup_min_max_conflict_allocno_ids (void)
2828{
1756cb66 2829 int aclass;
058e97ec
VM
2830 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2831 int *live_range_min, *last_lived;
ac0ab4f7 2832 int word0_min, word0_max;
058e97ec 2833 ira_allocno_t a;
ac0ab4f7 2834 ira_allocno_iterator ai;
058e97ec 2835
a49ae217 2836 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
1756cb66 2837 aclass = -1;
058e97ec 2838 first_not_finished = -1;
a49ae217 2839 for (i = 0; i < ira_objects_num; i++)
058e97ec 2840 {
a49ae217 2841 ira_object_t obj = ira_object_id_map[i];
1756cb66 2842
a49ae217 2843 if (obj == NULL)
058e97ec 2844 continue;
a49ae217
BS
2845
2846 a = OBJECT_ALLOCNO (obj);
2847
1756cb66 2848 if (aclass < 0)
058e97ec 2849 {
1756cb66 2850 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2851 min = i;
2852 first_not_finished = i;
2853 }
2854 else
2855 {
a49ae217 2856 start = OBJECT_MIN (obj);
058e97ec
VM
2857 /* If we skip an allocno, the allocno with smaller ids will
2858 be also skipped because of the secondary sorting the
2859 range finishes (see function
ac0ab4f7 2860 object_range_compare_func). */
058e97ec 2861 while (first_not_finished < i
a49ae217 2862 && start > OBJECT_MAX (ira_object_id_map
ac0ab4f7 2863 [first_not_finished]))
058e97ec
VM
2864 first_not_finished++;
2865 min = first_not_finished;
b8698a0f 2866 }
058e97ec
VM
2867 if (min == i)
2868 /* We could increase min further in this case but it is good
2869 enough. */
2870 min++;
a49ae217
BS
2871 live_range_min[i] = OBJECT_MIN (obj);
2872 OBJECT_MIN (obj) = min;
058e97ec
VM
2873 }
2874 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1756cb66 2875 aclass = -1;
058e97ec 2876 filled_area_start = -1;
a49ae217 2877 for (i = ira_objects_num - 1; i >= 0; i--)
058e97ec 2878 {
a49ae217 2879 ira_object_t obj = ira_object_id_map[i];
1756cb66 2880
a49ae217 2881 if (obj == NULL)
058e97ec 2882 continue;
a49ae217
BS
2883
2884 a = OBJECT_ALLOCNO (obj);
1756cb66 2885 if (aclass < 0)
058e97ec 2886 {
1756cb66 2887 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2888 for (j = 0; j < ira_max_point; j++)
2889 last_lived[j] = -1;
2890 filled_area_start = ira_max_point;
2891 }
2892 min = live_range_min[i];
a49ae217 2893 finish = OBJECT_MAX (obj);
058e97ec
VM
2894 max = last_lived[finish];
2895 if (max < 0)
2896 /* We could decrease max further in this case but it is good
2897 enough. */
a49ae217
BS
2898 max = OBJECT_CONFLICT_ID (obj) - 1;
2899 OBJECT_MAX (obj) = max;
058e97ec
VM
2900 /* In filling, we can go further A range finish to recognize
2901 intersection quickly because if the finish of subsequently
2902 processed allocno (it has smaller conflict id) range is
2903 further A range finish than they are definitely intersected
2904 (the reason for this is the allocnos with bigger conflict id
2905 have their range starts not smaller than allocnos with
2906 smaller ids. */
2907 for (j = min; j < filled_area_start; j++)
2908 last_lived[j] = i;
2909 filled_area_start = min;
2910 }
2911 ira_free (last_lived);
2912 ira_free (live_range_min);
ac0ab4f7
BS
2913
2914 /* For allocnos with more than one object, we may later record extra conflicts in
2915 subobject 0 that we cannot really know about here.
2916 For now, simply widen the min/max range of these subobjects. */
2917
2918 word0_min = INT_MAX;
2919 word0_max = INT_MIN;
2920
2921 FOR_EACH_ALLOCNO (a, ai)
2922 {
2923 int n = ALLOCNO_NUM_OBJECTS (a);
2924 ira_object_t obj0;
1756cb66 2925
ac0ab4f7
BS
2926 if (n < 2)
2927 continue;
2928 obj0 = ALLOCNO_OBJECT (a, 0);
2929 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2930 word0_min = OBJECT_CONFLICT_ID (obj0);
2931 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2932 word0_max = OBJECT_CONFLICT_ID (obj0);
2933 }
2934 FOR_EACH_ALLOCNO (a, ai)
2935 {
2936 int n = ALLOCNO_NUM_OBJECTS (a);
2937 ira_object_t obj0;
1756cb66 2938
ac0ab4f7
BS
2939 if (n < 2)
2940 continue;
2941 obj0 = ALLOCNO_OBJECT (a, 0);
2942 if (OBJECT_MIN (obj0) > word0_min)
2943 OBJECT_MIN (obj0) = word0_min;
2944 if (OBJECT_MAX (obj0) < word0_max)
2945 OBJECT_MAX (obj0) = word0_max;
2946 }
058e97ec
VM
2947}
2948
2949\f
2950
2951static void
2952create_caps (void)
2953{
2954 ira_allocno_t a;
2955 ira_allocno_iterator ai;
2956 ira_loop_tree_node_t loop_tree_node;
2957
2958 FOR_EACH_ALLOCNO (a, ai)
2959 {
2960 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2961 continue;
2962 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2963 create_cap_allocno (a);
2964 else if (ALLOCNO_CAP (a) == NULL)
2965 {
2966 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2967 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2968 create_cap_allocno (a);
2969 }
2970 }
2971}
2972
2973\f
2974
2975/* The page contains code transforming more one region internal
2976 representation (IR) to one region IR which is necessary for reload.
2977 This transformation is called IR flattening. We might just rebuild
2978 the IR for one region but we don't do it because it takes a lot of
2979 time. */
2980
82b33628
VM
2981/* Map: regno -> allocnos which will finally represent the regno for
2982 IR with one region. */
2983static ira_allocno_t *regno_top_level_allocno_map;
2984
029da7d4
BS
2985/* Find the allocno that corresponds to A at a level one higher up in the
2986 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2987ira_allocno_t
2988ira_parent_allocno (ira_allocno_t a)
2989{
2990 ira_loop_tree_node_t parent;
2991
2992 if (ALLOCNO_CAP (a) != NULL)
2993 return NULL;
2994
2995 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2996 if (parent == NULL)
2997 return NULL;
2998
2999 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
3000}
3001
3002/* Find the allocno that corresponds to A at a level one higher up in the
3003 loop tree. If ALLOCNO_CAP is set for A, return that. */
3004ira_allocno_t
3005ira_parent_or_cap_allocno (ira_allocno_t a)
3006{
3007 if (ALLOCNO_CAP (a) != NULL)
3008 return ALLOCNO_CAP (a);
3009
3010 return ira_parent_allocno (a);
3011}
3012
82b33628 3013/* Process all allocnos originated from pseudo REGNO and copy live
801f03e3
VM
3014 ranges, hard reg conflicts, and allocno stack reg attributes from
3015 low level allocnos to final allocnos which are destinations of
3016 removed stores at a loop exit. Return true if we copied live
3017 ranges. */
82b33628 3018static bool
801f03e3 3019copy_info_to_removed_store_destinations (int regno)
82b33628 3020{
504b33d8
ILT
3021 ira_allocno_t a;
3022 ira_allocno_t parent_a = NULL;
82b33628 3023 ira_loop_tree_node_t parent;
82b33628
VM
3024 bool merged_p;
3025
3026 merged_p = false;
3027 for (a = ira_regno_allocno_map[regno];
3028 a != NULL;
3029 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3030 {
1756cb66 3031 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))])
82b33628
VM
3032 /* This allocno will be removed. */
3033 continue;
ac0ab4f7 3034
82b33628
VM
3035 /* Caps will be removed. */
3036 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
3037 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3038 parent != NULL;
3039 parent = parent->parent)
3040 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
1756cb66
VM
3041 || (parent_a
3042 == regno_top_level_allocno_map[REGNO
3043 (allocno_emit_reg (parent_a))]
3044 && ALLOCNO_EMIT_DATA (parent_a)->mem_optimized_dest_p))
82b33628
VM
3045 break;
3046 if (parent == NULL || parent_a == NULL)
3047 continue;
ac0ab4f7 3048
3c55880a
BS
3049 copy_allocno_live_ranges (a, parent_a);
3050 merge_hard_reg_conflicts (a, parent_a, true);
ac0ab4f7 3051
ea1c67e6
VM
3052 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3053 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3054 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3055 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3056 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
c2ba7e7a
RO
3057 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (parent_a),
3058 ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a));
ea1c67e6
VM
3059 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3060 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
82b33628
VM
3061 merged_p = true;
3062 }
3063 return merged_p;
058e97ec
VM
3064}
3065
3066/* Flatten the IR. In other words, this function transforms IR as if
3067 it were built with one region (without loops). We could make it
3068 much simpler by rebuilding IR with one region, but unfortunately it
3069 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
3070 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
3071 IRA_MAX_POINT before emitting insns on the loop borders. */
3072void
3073ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
3074{
9140d27b 3075 int i, j;
ea1c67e6 3076 bool keep_p;
058e97ec 3077 int hard_regs_num;
82b33628 3078 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec 3079 unsigned int n;
1756cb66 3080 enum reg_class aclass;
058e97ec 3081 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 3082 ira_copy_t cp;
029da7d4 3083 ira_loop_tree_node_t node;
b14151b5 3084 live_range_t r;
058e97ec
VM
3085 ira_allocno_iterator ai;
3086 ira_copy_iterator ci;
058e97ec
VM
3087
3088 regno_top_level_allocno_map
1756cb66
VM
3089 = (ira_allocno_t *) ira_allocate (max_reg_num ()
3090 * sizeof (ira_allocno_t));
058e97ec
VM
3091 memset (regno_top_level_allocno_map, 0,
3092 max_reg_num () * sizeof (ira_allocno_t));
058e97ec 3093 new_pseudos_p = merged_p = false;
0ca9fa56
VM
3094 FOR_EACH_ALLOCNO (a, ai)
3095 {
ac0ab4f7
BS
3096 ira_allocno_object_iterator oi;
3097 ira_object_t obj;
1756cb66 3098
0ca9fa56
VM
3099 if (ALLOCNO_CAP_MEMBER (a) != NULL)
3100 /* Caps are not in the regno allocno maps and they are never
3101 will be transformed into allocnos existing after IR
3102 flattening. */
3103 continue;
ac0ab4f7
BS
3104 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3105 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
3106 OBJECT_CONFLICT_HARD_REGS (obj));
0ca9fa56
VM
3107#ifdef STACK_REGS
3108 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
3109#endif
3110 }
058e97ec
VM
3111 /* Fix final allocno attributes. */
3112 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
3113 {
0ca9fa56 3114 mem_dest_p = false;
058e97ec
VM
3115 for (a = ira_regno_allocno_map[i];
3116 a != NULL;
3117 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3118 {
1756cb66
VM
3119 ira_emit_data_t parent_data, data = ALLOCNO_EMIT_DATA (a);
3120
058e97ec 3121 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1756cb66 3122 if (data->somewhere_renamed_p)
058e97ec 3123 new_pseudos_p = true;
029da7d4
BS
3124 parent_a = ira_parent_allocno (a);
3125 if (parent_a == NULL)
058e97ec
VM
3126 {
3127 ALLOCNO_COPIES (a) = NULL;
1756cb66 3128 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec
VM
3129 continue;
3130 }
3131 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
b8698a0f 3132
1756cb66 3133 if (data->mem_optimized_dest != NULL)
82b33628 3134 mem_dest_p = true;
1756cb66
VM
3135 parent_data = ALLOCNO_EMIT_DATA (parent_a);
3136 if (REGNO (data->reg) == REGNO (parent_data->reg))
058e97ec 3137 {
3c55880a
BS
3138 merge_hard_reg_conflicts (a, parent_a, true);
3139 move_allocno_live_ranges (a, parent_a);
058e97ec 3140 merged_p = true;
1756cb66
VM
3141 parent_data->mem_optimized_dest_p
3142 = (parent_data->mem_optimized_dest_p
3143 || data->mem_optimized_dest_p);
058e97ec
VM
3144 continue;
3145 }
3146 new_pseudos_p = true;
058e97ec
VM
3147 for (;;)
3148 {
058e97ec
VM
3149 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
3150 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
ea1c67e6
VM
3151 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
3152 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3153 -= ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3154 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3155 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
ea1c67e6
VM
3156 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3157 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
058e97ec
VM
3158 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
3159 && ALLOCNO_NREFS (parent_a) >= 0
3160 && ALLOCNO_FREQ (parent_a) >= 0);
1756cb66
VM
3161 aclass = ALLOCNO_CLASS (parent_a);
3162 hard_regs_num = ira_class_hard_regs_num[aclass];
058e97ec
VM
3163 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
3164 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
3165 for (j = 0; j < hard_regs_num; j++)
3166 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
3167 -= ALLOCNO_HARD_REG_COSTS (a)[j];
3168 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
3169 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
3170 for (j = 0; j < hard_regs_num; j++)
3171 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
3172 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
1756cb66
VM
3173 ALLOCNO_CLASS_COST (parent_a)
3174 -= ALLOCNO_CLASS_COST (a);
058e97ec 3175 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
029da7d4
BS
3176 parent_a = ira_parent_allocno (parent_a);
3177 if (parent_a == NULL)
058e97ec
VM
3178 break;
3179 }
058e97ec 3180 ALLOCNO_COPIES (a) = NULL;
1756cb66 3181 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec 3182 }
801f03e3 3183 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
82b33628 3184 merged_p = true;
058e97ec 3185 }
058e97ec
VM
3186 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
3187 if (merged_p || ira_max_point_before_emit != ira_max_point)
3188 ira_rebuild_start_finish_chains ();
3189 if (new_pseudos_p)
3190 {
9140d27b
BS
3191 sparseset objects_live;
3192
058e97ec
VM
3193 /* Rebuild conflicts. */
3194 FOR_EACH_ALLOCNO (a, ai)
3195 {
ac0ab4f7
BS
3196 ira_allocno_object_iterator oi;
3197 ira_object_t obj;
1756cb66
VM
3198
3199 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3200 || ALLOCNO_CAP_MEMBER (a) != NULL)
3201 continue;
ac0ab4f7
BS
3202 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3203 {
3204 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3205 ira_assert (r->object == obj);
3206 clear_conflicts (obj);
3207 }
058e97ec 3208 }
9140d27b 3209 objects_live = sparseset_alloc (ira_objects_num);
058e97ec
VM
3210 for (i = 0; i < ira_max_point; i++)
3211 {
3212 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
3213 {
9140d27b 3214 ira_object_t obj = r->object;
1756cb66 3215
9140d27b 3216 a = OBJECT_ALLOCNO (obj);
1756cb66 3217 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3218 || ALLOCNO_CAP_MEMBER (a) != NULL)
3219 continue;
ac0ab4f7 3220
1756cb66 3221 aclass = ALLOCNO_CLASS (a);
9140d27b 3222 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
058e97ec 3223 {
9140d27b
BS
3224 ira_object_t live_obj = ira_object_id_map[n];
3225 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
1756cb66
VM
3226 enum reg_class live_aclass = ALLOCNO_CLASS (live_a);
3227
3228 if (ira_reg_classes_intersect_p[aclass][live_aclass]
058e97ec 3229 /* Don't set up conflict for the allocno with itself. */
9140d27b
BS
3230 && live_a != a)
3231 ira_add_conflict (obj, live_obj);
058e97ec 3232 }
3feb0298 3233 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
058e97ec 3234 }
b8698a0f 3235
058e97ec 3236 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
9140d27b 3237 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
058e97ec 3238 }
9140d27b 3239 sparseset_free (objects_live);
058e97ec
VM
3240 compress_conflict_vecs ();
3241 }
3242 /* Mark some copies for removing and change allocnos in the rest
3243 copies. */
3244 FOR_EACH_COPY (cp, ci)
3245 {
3246 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
3247 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
3248 {
3249 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3250 fprintf
3251 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
3252 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
1756cb66
VM
3253 ALLOCNO_NUM (cp->first),
3254 REGNO (allocno_emit_reg (cp->first)),
058e97ec 3255 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
1756cb66
VM
3256 ALLOCNO_NUM (cp->second),
3257 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3258 cp->loop_tree_node = NULL;
3259 continue;
3260 }
1756cb66
VM
3261 first
3262 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->first))];
3263 second
3264 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->second))];
058e97ec
VM
3265 node = cp->loop_tree_node;
3266 if (node == NULL)
3267 keep_p = true; /* It copy generated in ira-emit.c. */
3268 else
3269 {
3270 /* Check that the copy was not propagated from level on
3271 which we will have different pseudos. */
3272 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
3273 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
1756cb66
VM
3274 keep_p = ((REGNO (allocno_emit_reg (first))
3275 == REGNO (allocno_emit_reg (node_first)))
3276 && (REGNO (allocno_emit_reg (second))
3277 == REGNO (allocno_emit_reg (node_second))));
058e97ec
VM
3278 }
3279 if (keep_p)
3280 {
3281 cp->loop_tree_node = ira_loop_tree_root;
3282 cp->first = first;
3283 cp->second = second;
3284 }
3285 else
3286 {
3287 cp->loop_tree_node = NULL;
3288 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3289 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
3290 cp->num, ALLOCNO_NUM (cp->first),
1756cb66
VM
3291 REGNO (allocno_emit_reg (cp->first)),
3292 ALLOCNO_NUM (cp->second),
3293 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3294 }
3295 }
3296 /* Remove unnecessary allocnos on lower levels of the loop tree. */
3297 FOR_EACH_ALLOCNO (a, ai)
3298 {
1756cb66 3299 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3300 || ALLOCNO_CAP_MEMBER (a) != NULL)
3301 {
3302 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3303 fprintf (ira_dump_file, " Remove a%dr%d\n",
1756cb66 3304 ALLOCNO_NUM (a), REGNO (allocno_emit_reg (a)));
3b6d1699 3305 ira_remove_allocno_prefs (a);
058e97ec
VM
3306 finish_allocno (a);
3307 continue;
3308 }
3309 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
1756cb66 3310 ALLOCNO_REGNO (a) = REGNO (allocno_emit_reg (a));
058e97ec 3311 ALLOCNO_CAP (a) = NULL;
cb1ca6ac 3312 /* Restore updated costs for assignments from reload. */
058e97ec 3313 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
1756cb66 3314 ALLOCNO_UPDATED_CLASS_COST (a) = ALLOCNO_CLASS_COST (a);
058e97ec
VM
3315 if (! ALLOCNO_ASSIGNED_P (a))
3316 ira_free_allocno_updated_costs (a);
3317 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
3318 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
3319 }
3320 /* Remove unnecessary copies. */
3321 FOR_EACH_COPY (cp, ci)
3322 {
3323 if (cp->loop_tree_node == NULL)
3324 {
3325 ira_copies[cp->num] = NULL;
3326 finish_copy (cp);
3327 continue;
3328 }
3329 ira_assert
3330 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
3331 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
3b6d1699
VM
3332 add_allocno_copy_to_list (cp);
3333 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
3334 }
3335 rebuild_regno_allocno_maps ();
b15a7ae6
VM
3336 if (ira_max_point != ira_max_point_before_emit)
3337 ira_compress_allocno_live_ranges ();
058e97ec
VM
3338 ira_free (regno_top_level_allocno_map);
3339}
3340
3341\f
3342
3343#ifdef ENABLE_IRA_CHECKING
3344/* Check creation of all allocnos. Allocnos on lower levels should
3345 have allocnos or caps on all upper levels. */
3346static void
3347check_allocno_creation (void)
3348{
3349 ira_allocno_t a;
3350 ira_allocno_iterator ai;
3351 ira_loop_tree_node_t loop_tree_node;
3352
3353 FOR_EACH_ALLOCNO (a, ai)
3354 {
49d988e7
VM
3355 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3356 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
3357 ALLOCNO_NUM (a)));
3358 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
3359 continue;
3360 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 3361 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 3362 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
3363 ira_assert (loop_tree_node->parent
3364 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
3365 && bitmap_bit_p (loop_tree_node->border_allocnos,
3366 ALLOCNO_NUM (a)));
058e97ec
VM
3367 }
3368}
3369#endif
3370
4ac293e2
VM
3371/* Identify allocnos which prefer a register class with a single hard register.
3372 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3373 less likely to use the preferred singleton register. */
3374static void
3375update_conflict_hard_reg_costs (void)
3376{
3377 ira_allocno_t a;
3378 ira_allocno_iterator ai;
3379 int i, index, min;
3380
3381 FOR_EACH_ALLOCNO (a, ai)
3382 {
6f76a878
AS
3383 reg_class_t aclass = ALLOCNO_CLASS (a);
3384 reg_class_t pref = reg_preferred_class (ALLOCNO_REGNO (a));
c9d74da6
RS
3385 int singleton = ira_class_singleton[pref][ALLOCNO_MODE (a)];
3386 if (singleton < 0)
4ac293e2 3387 continue;
c9d74da6 3388 index = ira_class_hard_reg_index[(int) aclass][singleton];
4ac293e2
VM
3389 if (index < 0)
3390 continue;
3391 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
3392 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
3393 continue;
3394 min = INT_MAX;
6f76a878 3395 for (i = ira_class_hard_regs_num[(int) aclass] - 1; i >= 0; i--)
1756cb66 3396 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_CLASS_COST (a)
4ac293e2
VM
3397 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
3398 min = ALLOCNO_HARD_REG_COSTS (a)[i];
3399 if (min == INT_MAX)
3400 continue;
3401 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 3402 aclass, 0);
4ac293e2 3403 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
1756cb66 3404 -= min - ALLOCNO_CLASS_COST (a);
4ac293e2
VM
3405 }
3406}
3407
058e97ec 3408/* Create a internal representation (IR) for IRA (allocnos, copies,
2608d841
VM
3409 loop tree nodes). The function returns TRUE if we generate loop
3410 structure (besides nodes representing all function and the basic
3411 blocks) for regional allocation. A true return means that we
3412 really need to flatten IR before the reload. */
058e97ec 3413bool
2608d841 3414ira_build (void)
058e97ec 3415{
2608d841 3416 bool loops_p;
058e97ec 3417
2608d841 3418 df_analyze ();
058e97ec
VM
3419 initiate_cost_vectors ();
3420 initiate_allocnos ();
3b6d1699 3421 initiate_prefs ();
058e97ec 3422 initiate_copies ();
2608d841 3423 create_loop_tree_nodes ();
058e97ec
VM
3424 form_loop_tree ();
3425 create_allocnos ();
3426 ira_costs ();
a49ae217 3427 create_allocno_objects ();
058e97ec 3428 ira_create_allocno_live_ranges ();
311aab06 3429 remove_unnecessary_regions (false);
b15a7ae6 3430 ira_compress_allocno_live_ranges ();
927425df 3431 update_bad_spill_attribute ();
058e97ec
VM
3432 loops_p = more_one_region_p ();
3433 if (loops_p)
3434 {
3435 propagate_allocno_info ();
3436 create_caps ();
3437 }
1756cb66 3438 ira_tune_allocno_costs ();
058e97ec
VM
3439#ifdef ENABLE_IRA_CHECKING
3440 check_allocno_creation ();
3441#endif
3442 setup_min_max_allocno_live_range_point ();
a49ae217 3443 sort_conflict_id_map ();
058e97ec
VM
3444 setup_min_max_conflict_allocno_ids ();
3445 ira_build_conflicts ();
4ac293e2 3446 update_conflict_hard_reg_costs ();
311aab06
VM
3447 if (! ira_conflicts_p)
3448 {
3449 ira_allocno_t a;
3450 ira_allocno_iterator ai;
3451
3452 /* Remove all regions but root one. */
3453 if (loops_p)
3454 {
3455 remove_unnecessary_regions (true);
3456 loops_p = false;
3457 }
3458 /* We don't save hard registers around calls for fast allocation
3459 -- add caller clobbered registers as conflicting ones to
3460 allocno crossing calls. */
3461 FOR_EACH_ALLOCNO (a, ai)
3462 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
ac0ab4f7 3463 ior_hard_reg_conflicts (a, &call_used_reg_set);
311aab06 3464 }
4cda38d5
VM
3465 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3466 print_copies (ira_dump_file);
3b6d1699
VM
3467 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3468 print_prefs (ira_dump_file);
058e97ec
VM
3469 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3470 {
ac0ab4f7 3471 int n, nr, nr_big;
058e97ec 3472 ira_allocno_t a;
b14151b5 3473 live_range_t r;
058e97ec
VM
3474 ira_allocno_iterator ai;
3475
3476 n = 0;
ac0ab4f7
BS
3477 nr = 0;
3478 nr_big = 0;
058e97ec 3479 FOR_EACH_ALLOCNO (a, ai)
a49ae217 3480 {
ac0ab4f7 3481 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
1756cb66 3482
ac0ab4f7
BS
3483 if (nobj > 1)
3484 nr_big++;
3485 for (j = 0; j < nobj; j++)
3486 {
3487 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3488 n += OBJECT_NUM_CONFLICTS (obj);
3489 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3490 nr++;
3491 }
a49ae217 3492 }
058e97ec 3493 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
0fc822d0 3494 current_loops == NULL ? 1 : number_of_loops (cfun),
0cae8d31 3495 n_basic_blocks_for_fn (cfun), ira_max_point);
058e97ec 3496 fprintf (ira_dump_file,
ac0ab4f7
BS
3497 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3498 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
058e97ec
VM
3499 }
3500 return loops_p;
3501}
3502
3503/* Release the data created by function ira_build. */
3504void
3505ira_destroy (void)
3506{
3507 finish_loop_tree_nodes ();
3b6d1699 3508 finish_prefs ();
058e97ec
VM
3509 finish_copies ();
3510 finish_allocnos ();
3511 finish_cost_vectors ();
3512 ira_finish_allocno_live_ranges ();
3513}