]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reginfo.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / reginfo.c
1 /* Compute different info about registers.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file contains regscan pass of the compiler and passes for
22 dealing with info about modes of pseudo-registers inside
23 subregisters. It also defines some tables of information about the
24 hardware registers, function init_reg_sets to initialize the
25 tables, and other auxiliary functions to deal with info about
26 registers and their classes. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "df.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "insn-config.h"
39 #include "regs.h"
40 #include "ira.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "tree-pass.h"
46
47 /* Maximum register number used in this function, plus one. */
48
49 int max_regno;
50
51 /* Used to cache the results of simplifiable_subregs. SHAPE is the input
52 parameter and SIMPLIFIABLE_REGS is the result. */
53 class simplifiable_subreg
54 {
55 public:
56 simplifiable_subreg (const subreg_shape &);
57
58 subreg_shape shape;
59 HARD_REG_SET simplifiable_regs;
60 };
61 \f
62 struct target_hard_regs default_target_hard_regs;
63 struct target_regs default_target_regs;
64 #if SWITCHABLE_TARGET
65 struct target_hard_regs *this_target_hard_regs = &default_target_hard_regs;
66 struct target_regs *this_target_regs = &default_target_regs;
67 #endif
68
69 /* Data for initializing fixed_regs. */
70 static const char initial_fixed_regs[] = FIXED_REGISTERS;
71
72 /* Data for initializing call_used_regs. */
73 static const char initial_call_used_regs[] = CALL_USED_REGISTERS;
74
75 #ifdef CALL_REALLY_USED_REGISTERS
76 /* Data for initializing call_really_used_regs. */
77 static const char initial_call_really_used_regs[] = CALL_REALLY_USED_REGISTERS;
78 #endif
79
80 #ifdef CALL_REALLY_USED_REGISTERS
81 #define CALL_REALLY_USED_REGNO_P(X) call_really_used_regs[X]
82 #else
83 #define CALL_REALLY_USED_REGNO_P(X) call_used_regs[X]
84 #endif
85
86 /* Indexed by hard register number, contains 1 for registers
87 that are being used for global register decls.
88 These must be exempt from ordinary flow analysis
89 and are also considered fixed. */
90 char global_regs[FIRST_PSEUDO_REGISTER];
91
92 /* Declaration for the global register. */
93 tree global_regs_decl[FIRST_PSEUDO_REGISTER];
94
95 /* Same information as REGS_INVALIDATED_BY_CALL but in regset form to be used
96 in dataflow more conveniently. */
97 regset regs_invalidated_by_call_regset;
98
99 /* Same information as FIXED_REG_SET but in regset form. */
100 regset fixed_reg_set_regset;
101
102 /* The bitmap_obstack is used to hold some static variables that
103 should not be reset after each function is compiled. */
104 static bitmap_obstack persistent_obstack;
105
106 /* Used to initialize reg_alloc_order. */
107 #ifdef REG_ALLOC_ORDER
108 static int initial_reg_alloc_order[FIRST_PSEUDO_REGISTER] = REG_ALLOC_ORDER;
109 #endif
110
111 /* The same information, but as an array of unsigned ints. We copy from
112 these unsigned ints to the table above. We do this so the tm.h files
113 do not have to be aware of the wordsize for machines with <= 64 regs.
114 Note that we hard-code 32 here, not HOST_BITS_PER_INT. */
115 #define N_REG_INTS \
116 ((FIRST_PSEUDO_REGISTER + (32 - 1)) / 32)
117
118 static const unsigned int_reg_class_contents[N_REG_CLASSES][N_REG_INTS]
119 = REG_CLASS_CONTENTS;
120
121 /* Array containing all of the register names. */
122 static const char *const initial_reg_names[] = REGISTER_NAMES;
123
124 /* Array containing all of the register class names. */
125 const char * reg_class_names[] = REG_CLASS_NAMES;
126
127 /* No more global register variables may be declared; true once
128 reginfo has been initialized. */
129 static int no_global_reg_vars = 0;
130
131 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
132 correspond to the hard registers, if any, set in that map. This
133 could be done far more efficiently by having all sorts of special-cases
134 with moving single words, but probably isn't worth the trouble. */
135 void
136 reg_set_to_hard_reg_set (HARD_REG_SET *to, const_bitmap from)
137 {
138 unsigned i;
139 bitmap_iterator bi;
140
141 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
142 {
143 if (i >= FIRST_PSEUDO_REGISTER)
144 return;
145 SET_HARD_REG_BIT (*to, i);
146 }
147 }
148
149 /* Function called only once per target_globals to initialize the
150 target_hard_regs structure. Once this is done, various switches
151 may override. */
152 void
153 init_reg_sets (void)
154 {
155 int i, j;
156
157 /* First copy the register information from the initial int form into
158 the regsets. */
159
160 for (i = 0; i < N_REG_CLASSES; i++)
161 {
162 CLEAR_HARD_REG_SET (reg_class_contents[i]);
163
164 /* Note that we hard-code 32 here, not HOST_BITS_PER_INT. */
165 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
166 if (int_reg_class_contents[i][j / 32]
167 & ((unsigned) 1 << (j % 32)))
168 SET_HARD_REG_BIT (reg_class_contents[i], j);
169 }
170
171 /* Sanity check: make sure the target macros FIXED_REGISTERS and
172 CALL_USED_REGISTERS had the right number of initializers. */
173 gcc_assert (sizeof fixed_regs == sizeof initial_fixed_regs);
174 gcc_assert (sizeof call_used_regs == sizeof initial_call_used_regs);
175 #ifdef CALL_REALLY_USED_REGISTERS
176 gcc_assert (sizeof call_really_used_regs
177 == sizeof initial_call_really_used_regs);
178 #endif
179 #ifdef REG_ALLOC_ORDER
180 gcc_assert (sizeof reg_alloc_order == sizeof initial_reg_alloc_order);
181 #endif
182 gcc_assert (sizeof reg_names == sizeof initial_reg_names);
183
184 memcpy (fixed_regs, initial_fixed_regs, sizeof fixed_regs);
185 memcpy (call_used_regs, initial_call_used_regs, sizeof call_used_regs);
186 #ifdef CALL_REALLY_USED_REGISTERS
187 memcpy (call_really_used_regs, initial_call_really_used_regs,
188 sizeof call_really_used_regs);
189 #endif
190 #ifdef REG_ALLOC_ORDER
191 memcpy (reg_alloc_order, initial_reg_alloc_order, sizeof reg_alloc_order);
192 #endif
193 memcpy (reg_names, initial_reg_names, sizeof reg_names);
194
195 SET_HARD_REG_SET (accessible_reg_set);
196 SET_HARD_REG_SET (operand_reg_set);
197 }
198
199 /* We need to save copies of some of the register information which
200 can be munged by command-line switches so we can restore it during
201 subsequent back-end reinitialization. */
202 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
203 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
204 #ifdef CALL_REALLY_USED_REGISTERS
205 static char saved_call_really_used_regs[FIRST_PSEUDO_REGISTER];
206 #endif
207 static const char *saved_reg_names[FIRST_PSEUDO_REGISTER];
208 static HARD_REG_SET saved_accessible_reg_set;
209 static HARD_REG_SET saved_operand_reg_set;
210
211 /* Save the register information. */
212 void
213 save_register_info (void)
214 {
215 /* Sanity check: make sure the target macros FIXED_REGISTERS and
216 CALL_USED_REGISTERS had the right number of initializers. */
217 gcc_assert (sizeof fixed_regs == sizeof saved_fixed_regs);
218 gcc_assert (sizeof call_used_regs == sizeof saved_call_used_regs);
219 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
220 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
221
222 /* Likewise for call_really_used_regs. */
223 #ifdef CALL_REALLY_USED_REGISTERS
224 gcc_assert (sizeof call_really_used_regs
225 == sizeof saved_call_really_used_regs);
226 memcpy (saved_call_really_used_regs, call_really_used_regs,
227 sizeof call_really_used_regs);
228 #endif
229
230 /* And similarly for reg_names. */
231 gcc_assert (sizeof reg_names == sizeof saved_reg_names);
232 memcpy (saved_reg_names, reg_names, sizeof reg_names);
233 COPY_HARD_REG_SET (saved_accessible_reg_set, accessible_reg_set);
234 COPY_HARD_REG_SET (saved_operand_reg_set, operand_reg_set);
235 }
236
237 /* Restore the register information. */
238 static void
239 restore_register_info (void)
240 {
241 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
242 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
243
244 #ifdef CALL_REALLY_USED_REGISTERS
245 memcpy (call_really_used_regs, saved_call_really_used_regs,
246 sizeof call_really_used_regs);
247 #endif
248
249 memcpy (reg_names, saved_reg_names, sizeof reg_names);
250 COPY_HARD_REG_SET (accessible_reg_set, saved_accessible_reg_set);
251 COPY_HARD_REG_SET (operand_reg_set, saved_operand_reg_set);
252 }
253
254 /* After switches have been processed, which perhaps alter
255 `fixed_regs' and `call_used_regs', convert them to HARD_REG_SETs. */
256 static void
257 init_reg_sets_1 (void)
258 {
259 unsigned int i, j;
260 unsigned int /* machine_mode */ m;
261
262 restore_register_info ();
263
264 #ifdef REG_ALLOC_ORDER
265 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
266 inv_reg_alloc_order[reg_alloc_order[i]] = i;
267 #endif
268
269 /* Let the target tweak things if necessary. */
270
271 targetm.conditional_register_usage ();
272
273 /* Compute number of hard regs in each class. */
274
275 memset (reg_class_size, 0, sizeof reg_class_size);
276 for (i = 0; i < N_REG_CLASSES; i++)
277 {
278 bool any_nonfixed = false;
279 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
280 if (TEST_HARD_REG_BIT (reg_class_contents[i], j))
281 {
282 reg_class_size[i]++;
283 if (!fixed_regs[j])
284 any_nonfixed = true;
285 }
286 class_only_fixed_regs[i] = !any_nonfixed;
287 }
288
289 /* Initialize the table of subunions.
290 reg_class_subunion[I][J] gets the largest-numbered reg-class
291 that is contained in the union of classes I and J. */
292
293 memset (reg_class_subunion, 0, sizeof reg_class_subunion);
294 for (i = 0; i < N_REG_CLASSES; i++)
295 {
296 for (j = 0; j < N_REG_CLASSES; j++)
297 {
298 HARD_REG_SET c;
299 int k;
300
301 COPY_HARD_REG_SET (c, reg_class_contents[i]);
302 IOR_HARD_REG_SET (c, reg_class_contents[j]);
303 for (k = 0; k < N_REG_CLASSES; k++)
304 if (hard_reg_set_subset_p (reg_class_contents[k], c)
305 && !hard_reg_set_subset_p (reg_class_contents[k],
306 reg_class_contents
307 [(int) reg_class_subunion[i][j]]))
308 reg_class_subunion[i][j] = (enum reg_class) k;
309 }
310 }
311
312 /* Initialize the table of superunions.
313 reg_class_superunion[I][J] gets the smallest-numbered reg-class
314 containing the union of classes I and J. */
315
316 memset (reg_class_superunion, 0, sizeof reg_class_superunion);
317 for (i = 0; i < N_REG_CLASSES; i++)
318 {
319 for (j = 0; j < N_REG_CLASSES; j++)
320 {
321 HARD_REG_SET c;
322 int k;
323
324 COPY_HARD_REG_SET (c, reg_class_contents[i]);
325 IOR_HARD_REG_SET (c, reg_class_contents[j]);
326 for (k = 0; k < N_REG_CLASSES; k++)
327 if (hard_reg_set_subset_p (c, reg_class_contents[k]))
328 break;
329
330 reg_class_superunion[i][j] = (enum reg_class) k;
331 }
332 }
333
334 /* Initialize the tables of subclasses and superclasses of each reg class.
335 First clear the whole table, then add the elements as they are found. */
336
337 for (i = 0; i < N_REG_CLASSES; i++)
338 {
339 for (j = 0; j < N_REG_CLASSES; j++)
340 reg_class_subclasses[i][j] = LIM_REG_CLASSES;
341 }
342
343 for (i = 0; i < N_REG_CLASSES; i++)
344 {
345 if (i == (int) NO_REGS)
346 continue;
347
348 for (j = i + 1; j < N_REG_CLASSES; j++)
349 if (hard_reg_set_subset_p (reg_class_contents[i],
350 reg_class_contents[j]))
351 {
352 /* Reg class I is a subclass of J.
353 Add J to the table of superclasses of I. */
354 enum reg_class *p;
355
356 /* Add I to the table of superclasses of J. */
357 p = &reg_class_subclasses[j][0];
358 while (*p != LIM_REG_CLASSES) p++;
359 *p = (enum reg_class) i;
360 }
361 }
362
363 /* Initialize "constant" tables. */
364
365 CLEAR_HARD_REG_SET (fixed_reg_set);
366 CLEAR_HARD_REG_SET (call_used_reg_set);
367 CLEAR_HARD_REG_SET (call_fixed_reg_set);
368 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
369 if (!regs_invalidated_by_call_regset)
370 {
371 bitmap_obstack_initialize (&persistent_obstack);
372 regs_invalidated_by_call_regset = ALLOC_REG_SET (&persistent_obstack);
373 }
374 else
375 CLEAR_REG_SET (regs_invalidated_by_call_regset);
376 if (!fixed_reg_set_regset)
377 fixed_reg_set_regset = ALLOC_REG_SET (&persistent_obstack);
378 else
379 CLEAR_REG_SET (fixed_reg_set_regset);
380
381 AND_HARD_REG_SET (operand_reg_set, accessible_reg_set);
382 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
383 {
384 /* As a special exception, registers whose class is NO_REGS are
385 not accepted by `register_operand'. The reason for this change
386 is to allow the representation of special architecture artifacts
387 (such as a condition code register) without extending the rtl
388 definitions. Since registers of class NO_REGS cannot be used
389 as registers in any case where register classes are examined,
390 it is better to apply this exception in a target-independent way. */
391 if (REGNO_REG_CLASS (i) == NO_REGS)
392 CLEAR_HARD_REG_BIT (operand_reg_set, i);
393
394 /* If a register is too limited to be treated as a register operand,
395 then it should never be allocated to a pseudo. */
396 if (!TEST_HARD_REG_BIT (operand_reg_set, i))
397 {
398 fixed_regs[i] = 1;
399 call_used_regs[i] = 1;
400 }
401
402 /* call_used_regs must include fixed_regs. */
403 gcc_assert (!fixed_regs[i] || call_used_regs[i]);
404 #ifdef CALL_REALLY_USED_REGISTERS
405 /* call_used_regs must include call_really_used_regs. */
406 gcc_assert (!call_really_used_regs[i] || call_used_regs[i]);
407 #endif
408
409 if (fixed_regs[i])
410 {
411 SET_HARD_REG_BIT (fixed_reg_set, i);
412 SET_REGNO_REG_SET (fixed_reg_set_regset, i);
413 }
414
415 if (call_used_regs[i])
416 SET_HARD_REG_BIT (call_used_reg_set, i);
417
418 /* There are a couple of fixed registers that we know are safe to
419 exclude from being clobbered by calls:
420
421 The frame pointer is always preserved across calls. The arg
422 pointer is if it is fixed. The stack pointer usually is,
423 unless TARGET_RETURN_POPS_ARGS, in which case an explicit
424 CLOBBER will be present. If we are generating PIC code, the
425 PIC offset table register is preserved across calls, though the
426 target can override that. */
427
428 if (i == STACK_POINTER_REGNUM)
429 ;
430 else if (global_regs[i])
431 {
432 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
433 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
434 }
435 else if (i == FRAME_POINTER_REGNUM)
436 ;
437 else if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
438 && i == HARD_FRAME_POINTER_REGNUM)
439 ;
440 else if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
441 && i == ARG_POINTER_REGNUM && fixed_regs[i])
442 ;
443 else if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
444 && i == (unsigned) PIC_OFFSET_TABLE_REGNUM && fixed_regs[i])
445 ;
446 else if (CALL_REALLY_USED_REGNO_P (i))
447 {
448 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
449 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
450 }
451 }
452
453 COPY_HARD_REG_SET (call_fixed_reg_set, fixed_reg_set);
454 COPY_HARD_REG_SET (fixed_nonglobal_reg_set, fixed_reg_set);
455
456 /* Preserve global registers if called more than once. */
457 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
458 {
459 if (global_regs[i])
460 {
461 fixed_regs[i] = call_used_regs[i] = 1;
462 SET_HARD_REG_BIT (fixed_reg_set, i);
463 SET_HARD_REG_BIT (call_used_reg_set, i);
464 SET_HARD_REG_BIT (call_fixed_reg_set, i);
465 }
466 }
467
468 memset (have_regs_of_mode, 0, sizeof (have_regs_of_mode));
469 memset (contains_reg_of_mode, 0, sizeof (contains_reg_of_mode));
470 for (m = 0; m < (unsigned int) MAX_MACHINE_MODE; m++)
471 {
472 HARD_REG_SET ok_regs, ok_regs2;
473 CLEAR_HARD_REG_SET (ok_regs);
474 CLEAR_HARD_REG_SET (ok_regs2);
475 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
476 if (!TEST_HARD_REG_BIT (fixed_nonglobal_reg_set, j)
477 && targetm.hard_regno_mode_ok (j, (machine_mode) m))
478 {
479 SET_HARD_REG_BIT (ok_regs, j);
480 if (!fixed_regs[j])
481 SET_HARD_REG_BIT (ok_regs2, j);
482 }
483
484 for (i = 0; i < N_REG_CLASSES; i++)
485 if ((targetm.class_max_nregs ((reg_class_t) i, (machine_mode) m)
486 <= reg_class_size[i])
487 && hard_reg_set_intersect_p (ok_regs, reg_class_contents[i]))
488 {
489 contains_reg_of_mode[i][m] = 1;
490 if (hard_reg_set_intersect_p (ok_regs2, reg_class_contents[i]))
491 {
492 have_regs_of_mode[m] = 1;
493 contains_allocatable_reg_of_mode[i][m] = 1;
494 }
495 }
496 }
497 }
498
499 /* Compute the table of register modes.
500 These values are used to record death information for individual registers
501 (as opposed to a multi-register mode).
502 This function might be invoked more than once, if the target has support
503 for changing register usage conventions on a per-function basis.
504 */
505 void
506 init_reg_modes_target (void)
507 {
508 int i, j;
509
510 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
511 for (j = 0; j < MAX_MACHINE_MODE; j++)
512 this_target_regs->x_hard_regno_nregs[i][j]
513 = targetm.hard_regno_nregs (i, (machine_mode) j);
514
515 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
516 {
517 reg_raw_mode[i] = choose_hard_reg_mode (i, 1, false);
518
519 /* If we couldn't find a valid mode, just use the previous mode
520 if it is suitable, otherwise fall back on word_mode. */
521 if (reg_raw_mode[i] == VOIDmode)
522 {
523 if (i > 0 && hard_regno_nregs (i, reg_raw_mode[i - 1]) == 1)
524 reg_raw_mode[i] = reg_raw_mode[i - 1];
525 else
526 reg_raw_mode[i] = word_mode;
527 }
528 }
529 }
530
531 /* Finish initializing the register sets and initialize the register modes.
532 This function might be invoked more than once, if the target has support
533 for changing register usage conventions on a per-function basis.
534 */
535 void
536 init_regs (void)
537 {
538 /* This finishes what was started by init_reg_sets, but couldn't be done
539 until after register usage was specified. */
540 init_reg_sets_1 ();
541 }
542
543 /* The same as previous function plus initializing IRA. */
544 void
545 reinit_regs (void)
546 {
547 init_regs ();
548 /* caller_save needs to be re-initialized. */
549 caller_save_initialized_p = false;
550 if (this_target_rtl->target_specific_initialized)
551 {
552 ira_init ();
553 recog_init ();
554 }
555 }
556
557 /* Initialize some fake stack-frame MEM references for use in
558 memory_move_secondary_cost. */
559 void
560 init_fake_stack_mems (void)
561 {
562 int i;
563
564 for (i = 0; i < MAX_MACHINE_MODE; i++)
565 top_of_stack[i] = gen_rtx_MEM ((machine_mode) i, stack_pointer_rtx);
566 }
567
568
569 /* Compute cost of moving data from a register of class FROM to one of
570 TO, using MODE. */
571
572 int
573 register_move_cost (machine_mode mode, reg_class_t from, reg_class_t to)
574 {
575 return targetm.register_move_cost (mode, from, to);
576 }
577
578 /* Compute cost of moving registers to/from memory. */
579
580 int
581 memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
582 {
583 return targetm.memory_move_cost (mode, rclass, in);
584 }
585
586 /* Compute extra cost of moving registers to/from memory due to reloads.
587 Only needed if secondary reloads are required for memory moves. */
588 int
589 memory_move_secondary_cost (machine_mode mode, reg_class_t rclass,
590 bool in)
591 {
592 reg_class_t altclass;
593 int partial_cost = 0;
594 /* We need a memory reference to feed to SECONDARY... macros. */
595 /* mem may be unused even if the SECONDARY_ macros are defined. */
596 rtx mem ATTRIBUTE_UNUSED = top_of_stack[(int) mode];
597
598 altclass = secondary_reload_class (in ? 1 : 0, rclass, mode, mem);
599
600 if (altclass == NO_REGS)
601 return 0;
602
603 if (in)
604 partial_cost = register_move_cost (mode, altclass, rclass);
605 else
606 partial_cost = register_move_cost (mode, rclass, altclass);
607
608 if (rclass == altclass)
609 /* This isn't simply a copy-to-temporary situation. Can't guess
610 what it is, so TARGET_MEMORY_MOVE_COST really ought not to be
611 calling here in that case.
612
613 I'm tempted to put in an assert here, but returning this will
614 probably only give poor estimates, which is what we would've
615 had before this code anyways. */
616 return partial_cost;
617
618 /* Check if the secondary reload register will also need a
619 secondary reload. */
620 return memory_move_secondary_cost (mode, altclass, in) + partial_cost;
621 }
622
623 /* Return a machine mode that is legitimate for hard reg REGNO and large
624 enough to save nregs. If we can't find one, return VOIDmode.
625 If CALL_SAVED is true, only consider modes that are call saved. */
626 machine_mode
627 choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
628 unsigned int nregs, bool call_saved)
629 {
630 unsigned int /* machine_mode */ m;
631 machine_mode found_mode = VOIDmode, mode;
632
633 /* We first look for the largest integer mode that can be validly
634 held in REGNO. If none, we look for the largest floating-point mode.
635 If we still didn't find a valid mode, try CCmode.
636
637 The tests use maybe_gt rather than known_gt because we want (for example)
638 N V4SFs to win over plain V4SF even though N might be 1. */
639 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
640 if (hard_regno_nregs (regno, mode) == nregs
641 && targetm.hard_regno_mode_ok (regno, mode)
642 && (!call_saved
643 || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
644 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
645 found_mode = mode;
646
647 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
648 if (hard_regno_nregs (regno, mode) == nregs
649 && targetm.hard_regno_mode_ok (regno, mode)
650 && (!call_saved
651 || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
652 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
653 found_mode = mode;
654
655 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
656 if (hard_regno_nregs (regno, mode) == nregs
657 && targetm.hard_regno_mode_ok (regno, mode)
658 && (!call_saved
659 || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
660 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
661 found_mode = mode;
662
663 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
664 if (hard_regno_nregs (regno, mode) == nregs
665 && targetm.hard_regno_mode_ok (regno, mode)
666 && (!call_saved
667 || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
668 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
669 found_mode = mode;
670
671 if (found_mode != VOIDmode)
672 return found_mode;
673
674 /* Iterate over all of the CCmodes. */
675 for (m = (unsigned int) CCmode; m < (unsigned int) NUM_MACHINE_MODES; ++m)
676 {
677 mode = (machine_mode) m;
678 if (hard_regno_nregs (regno, mode) == nregs
679 && targetm.hard_regno_mode_ok (regno, mode)
680 && (!call_saved
681 || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode)))
682 return mode;
683 }
684
685 /* We can't find a mode valid for this register. */
686 return VOIDmode;
687 }
688
689 /* Specify the usage characteristics of the register named NAME.
690 It should be a fixed register if FIXED and a
691 call-used register if CALL_USED. */
692 void
693 fix_register (const char *name, int fixed, int call_used)
694 {
695 int i;
696 int reg, nregs;
697
698 /* Decode the name and update the primary form of
699 the register info. */
700
701 if ((reg = decode_reg_name_and_count (name, &nregs)) >= 0)
702 {
703 gcc_assert (nregs >= 1);
704 for (i = reg; i < reg + nregs; i++)
705 {
706 if ((i == STACK_POINTER_REGNUM
707 #ifdef HARD_FRAME_POINTER_REGNUM
708 || i == HARD_FRAME_POINTER_REGNUM
709 #else
710 || i == FRAME_POINTER_REGNUM
711 #endif
712 )
713 && (fixed == 0 || call_used == 0))
714 {
715 switch (fixed)
716 {
717 case 0:
718 switch (call_used)
719 {
720 case 0:
721 error ("cannot use %qs as a call-saved register", name);
722 break;
723
724 case 1:
725 error ("cannot use %qs as a call-used register", name);
726 break;
727
728 default:
729 gcc_unreachable ();
730 }
731 break;
732
733 case 1:
734 switch (call_used)
735 {
736 case 1:
737 error ("cannot use %qs as a fixed register", name);
738 break;
739
740 case 0:
741 default:
742 gcc_unreachable ();
743 }
744 break;
745
746 default:
747 gcc_unreachable ();
748 }
749 }
750 else
751 {
752 fixed_regs[i] = fixed;
753 call_used_regs[i] = call_used;
754 #ifdef CALL_REALLY_USED_REGISTERS
755 if (fixed == 0)
756 call_really_used_regs[i] = call_used;
757 #endif
758 }
759 }
760 }
761 else
762 {
763 warning (0, "unknown register name: %s", name);
764 }
765 }
766
767 /* Mark register number I as global. */
768 void
769 globalize_reg (tree decl, int i)
770 {
771 location_t loc = DECL_SOURCE_LOCATION (decl);
772
773 #ifdef STACK_REGS
774 if (IN_RANGE (i, FIRST_STACK_REG, LAST_STACK_REG))
775 {
776 error ("stack register used for global register variable");
777 return;
778 }
779 #endif
780
781 if (fixed_regs[i] == 0 && no_global_reg_vars)
782 error_at (loc, "global register variable follows a function definition");
783
784 if (global_regs[i])
785 {
786 auto_diagnostic_group d;
787 warning_at (loc, 0,
788 "register of %qD used for multiple global register variables",
789 decl);
790 inform (DECL_SOURCE_LOCATION (global_regs_decl[i]),
791 "conflicts with %qD", global_regs_decl[i]);
792 return;
793 }
794
795 if (call_used_regs[i] && ! fixed_regs[i])
796 warning_at (loc, 0, "call-clobbered register used for global register variable");
797
798 global_regs[i] = 1;
799 global_regs_decl[i] = decl;
800
801 /* If we're globalizing the frame pointer, we need to set the
802 appropriate regs_invalidated_by_call bit, even if it's already
803 set in fixed_regs. */
804 if (i != STACK_POINTER_REGNUM)
805 {
806 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
807 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
808 }
809
810 /* If already fixed, nothing else to do. */
811 if (fixed_regs[i])
812 return;
813
814 fixed_regs[i] = call_used_regs[i] = 1;
815 #ifdef CALL_REALLY_USED_REGISTERS
816 call_really_used_regs[i] = 1;
817 #endif
818
819 SET_HARD_REG_BIT (fixed_reg_set, i);
820 SET_HARD_REG_BIT (call_used_reg_set, i);
821 SET_HARD_REG_BIT (call_fixed_reg_set, i);
822
823 reinit_regs ();
824 }
825 \f
826
827 /* Structure used to record preferences of given pseudo. */
828 struct reg_pref
829 {
830 /* (enum reg_class) prefclass is the preferred class. May be
831 NO_REGS if no class is better than memory. */
832 char prefclass;
833
834 /* altclass is a register class that we should use for allocating
835 pseudo if no register in the preferred class is available.
836 If no register in this class is available, memory is preferred.
837
838 It might appear to be more general to have a bitmask of classes here,
839 but since it is recommended that there be a class corresponding to the
840 union of most major pair of classes, that generality is not required. */
841 char altclass;
842
843 /* allocnoclass is a register class that IRA uses for allocating
844 the pseudo. */
845 char allocnoclass;
846 };
847
848 /* Record preferences of each pseudo. This is available after RA is
849 run. */
850 static struct reg_pref *reg_pref;
851
852 /* Current size of reg_info. */
853 static int reg_info_size;
854 /* Max_reg_num still last resize_reg_info call. */
855 static int max_regno_since_last_resize;
856
857 /* Return the reg_class in which pseudo reg number REGNO is best allocated.
858 This function is sometimes called before the info has been computed.
859 When that happens, just return GENERAL_REGS, which is innocuous. */
860 enum reg_class
861 reg_preferred_class (int regno)
862 {
863 if (reg_pref == 0)
864 return GENERAL_REGS;
865
866 gcc_assert (regno < reg_info_size);
867 return (enum reg_class) reg_pref[regno].prefclass;
868 }
869
870 enum reg_class
871 reg_alternate_class (int regno)
872 {
873 if (reg_pref == 0)
874 return ALL_REGS;
875
876 gcc_assert (regno < reg_info_size);
877 return (enum reg_class) reg_pref[regno].altclass;
878 }
879
880 /* Return the reg_class which is used by IRA for its allocation. */
881 enum reg_class
882 reg_allocno_class (int regno)
883 {
884 if (reg_pref == 0)
885 return NO_REGS;
886
887 gcc_assert (regno < reg_info_size);
888 return (enum reg_class) reg_pref[regno].allocnoclass;
889 }
890
891 \f
892
893 /* Allocate space for reg info and initilize it. */
894 static void
895 allocate_reg_info (void)
896 {
897 int i;
898
899 max_regno_since_last_resize = max_reg_num ();
900 reg_info_size = max_regno_since_last_resize * 3 / 2 + 1;
901 gcc_assert (! reg_pref && ! reg_renumber);
902 reg_renumber = XNEWVEC (short, reg_info_size);
903 reg_pref = XCNEWVEC (struct reg_pref, reg_info_size);
904 memset (reg_renumber, -1, reg_info_size * sizeof (short));
905 for (i = 0; i < reg_info_size; i++)
906 {
907 reg_pref[i].prefclass = GENERAL_REGS;
908 reg_pref[i].altclass = ALL_REGS;
909 reg_pref[i].allocnoclass = GENERAL_REGS;
910 }
911 }
912
913
914 /* Resize reg info. The new elements will be initialized. Return TRUE
915 if new pseudos were added since the last call. */
916 bool
917 resize_reg_info (void)
918 {
919 int old, i;
920 bool change_p;
921
922 if (reg_pref == NULL)
923 {
924 allocate_reg_info ();
925 return true;
926 }
927 change_p = max_regno_since_last_resize != max_reg_num ();
928 max_regno_since_last_resize = max_reg_num ();
929 if (reg_info_size >= max_reg_num ())
930 return change_p;
931 old = reg_info_size;
932 reg_info_size = max_reg_num () * 3 / 2 + 1;
933 gcc_assert (reg_pref && reg_renumber);
934 reg_renumber = XRESIZEVEC (short, reg_renumber, reg_info_size);
935 reg_pref = XRESIZEVEC (struct reg_pref, reg_pref, reg_info_size);
936 memset (reg_pref + old, -1,
937 (reg_info_size - old) * sizeof (struct reg_pref));
938 memset (reg_renumber + old, -1, (reg_info_size - old) * sizeof (short));
939 for (i = old; i < reg_info_size; i++)
940 {
941 reg_pref[i].prefclass = GENERAL_REGS;
942 reg_pref[i].altclass = ALL_REGS;
943 reg_pref[i].allocnoclass = GENERAL_REGS;
944 }
945 return true;
946 }
947
948
949 /* Free up the space allocated by allocate_reg_info. */
950 void
951 free_reg_info (void)
952 {
953 if (reg_pref)
954 {
955 free (reg_pref);
956 reg_pref = NULL;
957 }
958
959 if (reg_renumber)
960 {
961 free (reg_renumber);
962 reg_renumber = NULL;
963 }
964 }
965
966 /* Initialize some global data for this pass. */
967 static unsigned int
968 reginfo_init (void)
969 {
970 if (df)
971 df_compute_regs_ever_live (true);
972
973 /* This prevents dump_reg_info from losing if called
974 before reginfo is run. */
975 reg_pref = NULL;
976 reg_info_size = max_regno_since_last_resize = 0;
977 /* No more global register variables may be declared. */
978 no_global_reg_vars = 1;
979 return 1;
980 }
981
982 namespace {
983
984 const pass_data pass_data_reginfo_init =
985 {
986 RTL_PASS, /* type */
987 "reginfo", /* name */
988 OPTGROUP_NONE, /* optinfo_flags */
989 TV_NONE, /* tv_id */
990 0, /* properties_required */
991 0, /* properties_provided */
992 0, /* properties_destroyed */
993 0, /* todo_flags_start */
994 0, /* todo_flags_finish */
995 };
996
997 class pass_reginfo_init : public rtl_opt_pass
998 {
999 public:
1000 pass_reginfo_init (gcc::context *ctxt)
1001 : rtl_opt_pass (pass_data_reginfo_init, ctxt)
1002 {}
1003
1004 /* opt_pass methods: */
1005 virtual unsigned int execute (function *) { return reginfo_init (); }
1006
1007 }; // class pass_reginfo_init
1008
1009 } // anon namespace
1010
1011 rtl_opt_pass *
1012 make_pass_reginfo_init (gcc::context *ctxt)
1013 {
1014 return new pass_reginfo_init (ctxt);
1015 }
1016
1017 \f
1018
1019 /* Set up preferred, alternate, and allocno classes for REGNO as
1020 PREFCLASS, ALTCLASS, and ALLOCNOCLASS. */
1021 void
1022 setup_reg_classes (int regno,
1023 enum reg_class prefclass, enum reg_class altclass,
1024 enum reg_class allocnoclass)
1025 {
1026 if (reg_pref == NULL)
1027 return;
1028 gcc_assert (reg_info_size >= max_reg_num ());
1029 reg_pref[regno].prefclass = prefclass;
1030 reg_pref[regno].altclass = altclass;
1031 reg_pref[regno].allocnoclass = allocnoclass;
1032 }
1033
1034 \f
1035 /* This is the `regscan' pass of the compiler, run just before cse and
1036 again just before loop. It finds the first and last use of each
1037 pseudo-register. */
1038
1039 static void reg_scan_mark_refs (rtx, rtx_insn *);
1040
1041 void
1042 reg_scan (rtx_insn *f, unsigned int nregs ATTRIBUTE_UNUSED)
1043 {
1044 rtx_insn *insn;
1045
1046 timevar_push (TV_REG_SCAN);
1047
1048 for (insn = f; insn; insn = NEXT_INSN (insn))
1049 if (INSN_P (insn))
1050 {
1051 reg_scan_mark_refs (PATTERN (insn), insn);
1052 if (REG_NOTES (insn))
1053 reg_scan_mark_refs (REG_NOTES (insn), insn);
1054 }
1055
1056 timevar_pop (TV_REG_SCAN);
1057 }
1058
1059
1060 /* X is the expression to scan. INSN is the insn it appears in.
1061 NOTE_FLAG is nonzero if X is from INSN's notes rather than its body.
1062 We should only record information for REGs with numbers
1063 greater than or equal to MIN_REGNO. */
1064 static void
1065 reg_scan_mark_refs (rtx x, rtx_insn *insn)
1066 {
1067 enum rtx_code code;
1068 rtx dest;
1069 rtx note;
1070
1071 if (!x)
1072 return;
1073 code = GET_CODE (x);
1074 switch (code)
1075 {
1076 case CONST:
1077 CASE_CONST_ANY:
1078 case CC0:
1079 case PC:
1080 case SYMBOL_REF:
1081 case LABEL_REF:
1082 case ADDR_VEC:
1083 case ADDR_DIFF_VEC:
1084 case REG:
1085 return;
1086
1087 case EXPR_LIST:
1088 if (XEXP (x, 0))
1089 reg_scan_mark_refs (XEXP (x, 0), insn);
1090 if (XEXP (x, 1))
1091 reg_scan_mark_refs (XEXP (x, 1), insn);
1092 break;
1093
1094 case INSN_LIST:
1095 case INT_LIST:
1096 if (XEXP (x, 1))
1097 reg_scan_mark_refs (XEXP (x, 1), insn);
1098 break;
1099
1100 case CLOBBER:
1101 if (MEM_P (XEXP (x, 0)))
1102 reg_scan_mark_refs (XEXP (XEXP (x, 0), 0), insn);
1103 break;
1104
1105 case CLOBBER_HIGH:
1106 gcc_assert (!(MEM_P (XEXP (x, 0))));
1107 break;
1108
1109 case SET:
1110 /* Count a set of the destination if it is a register. */
1111 for (dest = SET_DEST (x);
1112 GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1113 || GET_CODE (dest) == ZERO_EXTRACT;
1114 dest = XEXP (dest, 0))
1115 ;
1116
1117 /* If this is setting a pseudo from another pseudo or the sum of a
1118 pseudo and a constant integer and the other pseudo is known to be
1119 a pointer, set the destination to be a pointer as well.
1120
1121 Likewise if it is setting the destination from an address or from a
1122 value equivalent to an address or to the sum of an address and
1123 something else.
1124
1125 But don't do any of this if the pseudo corresponds to a user
1126 variable since it should have already been set as a pointer based
1127 on the type. */
1128
1129 if (REG_P (SET_DEST (x))
1130 && REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER
1131 /* If the destination pseudo is set more than once, then other
1132 sets might not be to a pointer value (consider access to a
1133 union in two threads of control in the presence of global
1134 optimizations). So only set REG_POINTER on the destination
1135 pseudo if this is the only set of that pseudo. */
1136 && DF_REG_DEF_COUNT (REGNO (SET_DEST (x))) == 1
1137 && ! REG_USERVAR_P (SET_DEST (x))
1138 && ! REG_POINTER (SET_DEST (x))
1139 && ((REG_P (SET_SRC (x))
1140 && REG_POINTER (SET_SRC (x)))
1141 || ((GET_CODE (SET_SRC (x)) == PLUS
1142 || GET_CODE (SET_SRC (x)) == LO_SUM)
1143 && CONST_INT_P (XEXP (SET_SRC (x), 1))
1144 && REG_P (XEXP (SET_SRC (x), 0))
1145 && REG_POINTER (XEXP (SET_SRC (x), 0)))
1146 || GET_CODE (SET_SRC (x)) == CONST
1147 || GET_CODE (SET_SRC (x)) == SYMBOL_REF
1148 || GET_CODE (SET_SRC (x)) == LABEL_REF
1149 || (GET_CODE (SET_SRC (x)) == HIGH
1150 && (GET_CODE (XEXP (SET_SRC (x), 0)) == CONST
1151 || GET_CODE (XEXP (SET_SRC (x), 0)) == SYMBOL_REF
1152 || GET_CODE (XEXP (SET_SRC (x), 0)) == LABEL_REF))
1153 || ((GET_CODE (SET_SRC (x)) == PLUS
1154 || GET_CODE (SET_SRC (x)) == LO_SUM)
1155 && (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST
1156 || GET_CODE (XEXP (SET_SRC (x), 1)) == SYMBOL_REF
1157 || GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF))
1158 || ((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1159 && (GET_CODE (XEXP (note, 0)) == CONST
1160 || GET_CODE (XEXP (note, 0)) == SYMBOL_REF
1161 || GET_CODE (XEXP (note, 0)) == LABEL_REF))))
1162 REG_POINTER (SET_DEST (x)) = 1;
1163
1164 /* If this is setting a register from a register or from a simple
1165 conversion of a register, propagate REG_EXPR. */
1166 if (REG_P (dest) && !REG_ATTRS (dest))
1167 set_reg_attrs_from_value (dest, SET_SRC (x));
1168
1169 /* fall through */
1170
1171 default:
1172 {
1173 const char *fmt = GET_RTX_FORMAT (code);
1174 int i;
1175 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1176 {
1177 if (fmt[i] == 'e')
1178 reg_scan_mark_refs (XEXP (x, i), insn);
1179 else if (fmt[i] == 'E' && XVEC (x, i) != 0)
1180 {
1181 int j;
1182 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1183 reg_scan_mark_refs (XVECEXP (x, i, j), insn);
1184 }
1185 }
1186 }
1187 }
1188 }
1189 \f
1190
1191 /* Return nonzero if C1 is a subset of C2, i.e., if every register in C1
1192 is also in C2. */
1193 int
1194 reg_class_subset_p (reg_class_t c1, reg_class_t c2)
1195 {
1196 return (c1 == c2
1197 || c2 == ALL_REGS
1198 || hard_reg_set_subset_p (reg_class_contents[(int) c1],
1199 reg_class_contents[(int) c2]));
1200 }
1201
1202 /* Return nonzero if there is a register that is in both C1 and C2. */
1203 int
1204 reg_classes_intersect_p (reg_class_t c1, reg_class_t c2)
1205 {
1206 return (c1 == c2
1207 || c1 == ALL_REGS
1208 || c2 == ALL_REGS
1209 || hard_reg_set_intersect_p (reg_class_contents[(int) c1],
1210 reg_class_contents[(int) c2]));
1211 }
1212
1213 \f
1214 inline hashval_t
1215 simplifiable_subregs_hasher::hash (const simplifiable_subreg *value)
1216 {
1217 inchash::hash h;
1218 h.add_hwi (value->shape.unique_id ());
1219 return h.end ();
1220 }
1221
1222 inline bool
1223 simplifiable_subregs_hasher::equal (const simplifiable_subreg *value,
1224 const subreg_shape *compare)
1225 {
1226 return value->shape == *compare;
1227 }
1228
1229 inline simplifiable_subreg::simplifiable_subreg (const subreg_shape &shape_in)
1230 : shape (shape_in)
1231 {
1232 CLEAR_HARD_REG_SET (simplifiable_regs);
1233 }
1234
1235 /* Return the set of hard registers that are able to form the subreg
1236 described by SHAPE. */
1237
1238 const HARD_REG_SET &
1239 simplifiable_subregs (const subreg_shape &shape)
1240 {
1241 if (!this_target_hard_regs->x_simplifiable_subregs)
1242 this_target_hard_regs->x_simplifiable_subregs
1243 = new hash_table <simplifiable_subregs_hasher> (30);
1244 inchash::hash h;
1245 h.add_hwi (shape.unique_id ());
1246 simplifiable_subreg **slot
1247 = (this_target_hard_regs->x_simplifiable_subregs
1248 ->find_slot_with_hash (&shape, h.end (), INSERT));
1249
1250 if (!*slot)
1251 {
1252 simplifiable_subreg *info = new simplifiable_subreg (shape);
1253 for (unsigned int i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1254 if (targetm.hard_regno_mode_ok (i, shape.inner_mode)
1255 && simplify_subreg_regno (i, shape.inner_mode, shape.offset,
1256 shape.outer_mode) >= 0)
1257 SET_HARD_REG_BIT (info->simplifiable_regs, i);
1258 *slot = info;
1259 }
1260 return (*slot)->simplifiable_regs;
1261 }
1262
1263 /* Passes for keeping and updating info about modes of registers
1264 inside subregisters. */
1265
1266 static HARD_REG_SET **valid_mode_changes;
1267 static obstack valid_mode_changes_obstack;
1268
1269 /* Restrict the choice of register for SUBREG_REG (SUBREG) based
1270 on information about SUBREG.
1271
1272 If PARTIAL_DEF, SUBREG is a partial definition of a multipart inner
1273 register and we want to ensure that the other parts of the inner
1274 register are correctly preserved. If !PARTIAL_DEF we need to
1275 ensure that SUBREG itself can be formed. */
1276
1277 static void
1278 record_subregs_of_mode (rtx subreg, bool partial_def)
1279 {
1280 unsigned int regno;
1281
1282 if (!REG_P (SUBREG_REG (subreg)))
1283 return;
1284
1285 regno = REGNO (SUBREG_REG (subreg));
1286 if (regno < FIRST_PSEUDO_REGISTER)
1287 return;
1288
1289 subreg_shape shape (shape_of_subreg (subreg));
1290 if (partial_def)
1291 {
1292 /* The number of independently-accessible SHAPE.outer_mode values
1293 in SHAPE.inner_mode is GET_MODE_SIZE (SHAPE.inner_mode) / SIZE.
1294 We need to check that the assignment will preserve all the other
1295 SIZE-byte chunks in the inner register besides the one that
1296 includes SUBREG.
1297
1298 In practice it is enough to check whether an equivalent
1299 SHAPE.inner_mode value in an adjacent SIZE-byte chunk can be formed.
1300 If the underlying registers are small enough, both subregs will
1301 be valid. If the underlying registers are too large, one of the
1302 subregs will be invalid.
1303
1304 This relies on the fact that we've already been passed
1305 SUBREG with PARTIAL_DEF set to false.
1306
1307 The size of the outer mode must ordered wrt the size of the
1308 inner mode's registers, since otherwise we wouldn't know at
1309 compile time how many registers the outer mode occupies. */
1310 poly_uint64 size = ordered_max (REGMODE_NATURAL_SIZE (shape.inner_mode),
1311 GET_MODE_SIZE (shape.outer_mode));
1312 gcc_checking_assert (known_lt (size, GET_MODE_SIZE (shape.inner_mode)));
1313 if (known_ge (shape.offset, size))
1314 shape.offset -= size;
1315 else
1316 shape.offset += size;
1317 }
1318
1319 if (valid_mode_changes[regno])
1320 AND_HARD_REG_SET (*valid_mode_changes[regno],
1321 simplifiable_subregs (shape));
1322 else
1323 {
1324 valid_mode_changes[regno]
1325 = XOBNEW (&valid_mode_changes_obstack, HARD_REG_SET);
1326 COPY_HARD_REG_SET (*valid_mode_changes[regno],
1327 simplifiable_subregs (shape));
1328 }
1329 }
1330
1331 /* Call record_subregs_of_mode for all the subregs in X. */
1332 static void
1333 find_subregs_of_mode (rtx x)
1334 {
1335 enum rtx_code code = GET_CODE (x);
1336 const char * const fmt = GET_RTX_FORMAT (code);
1337 int i;
1338
1339 if (code == SUBREG)
1340 record_subregs_of_mode (x, false);
1341
1342 /* Time for some deep diving. */
1343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1344 {
1345 if (fmt[i] == 'e')
1346 find_subregs_of_mode (XEXP (x, i));
1347 else if (fmt[i] == 'E')
1348 {
1349 int j;
1350 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1351 find_subregs_of_mode (XVECEXP (x, i, j));
1352 }
1353 }
1354 }
1355
1356 void
1357 init_subregs_of_mode (void)
1358 {
1359 basic_block bb;
1360 rtx_insn *insn;
1361
1362 gcc_obstack_init (&valid_mode_changes_obstack);
1363 valid_mode_changes = XCNEWVEC (HARD_REG_SET *, max_reg_num ());
1364
1365 FOR_EACH_BB_FN (bb, cfun)
1366 FOR_BB_INSNS (bb, insn)
1367 if (NONDEBUG_INSN_P (insn))
1368 {
1369 find_subregs_of_mode (PATTERN (insn));
1370 df_ref def;
1371 FOR_EACH_INSN_DEF (def, insn)
1372 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
1373 && read_modify_subreg_p (DF_REF_REG (def)))
1374 record_subregs_of_mode (DF_REF_REG (def), true);
1375 }
1376 }
1377
1378 const HARD_REG_SET *
1379 valid_mode_changes_for_regno (unsigned int regno)
1380 {
1381 return valid_mode_changes[regno];
1382 }
1383
1384 void
1385 finish_subregs_of_mode (void)
1386 {
1387 XDELETEVEC (valid_mode_changes);
1388 obstack_free (&valid_mode_changes_obstack, NULL);
1389 }
1390
1391 /* Free all data attached to the structure. This isn't a destructor because
1392 we don't want to run on exit. */
1393
1394 void
1395 target_hard_regs::finalize ()
1396 {
1397 delete x_simplifiable_subregs;
1398 }