]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/lower-subreg.c
gcc/
[thirdparty/gcc.git] / gcc / lower-subreg.c
1 /* Decompose multiword subregs.
2 Copyright (C) 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Richard Henderson <rth@redhat.com>
4 Ian Lance Taylor <iant@google.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "machmode.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "timevar.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "obstack.h"
33 #include "basic-block.h"
34 #include "recog.h"
35 #include "bitmap.h"
36 #include "dce.h"
37 #include "expr.h"
38 #include "except.h"
39 #include "regs.h"
40 #include "tree-pass.h"
41 #include "df.h"
42
43 #ifdef STACK_GROWS_DOWNWARD
44 # undef STACK_GROWS_DOWNWARD
45 # define STACK_GROWS_DOWNWARD 1
46 #else
47 # define STACK_GROWS_DOWNWARD 0
48 #endif
49
50 DEF_VEC_P (bitmap);
51 DEF_VEC_ALLOC_P (bitmap,heap);
52
53 /* Decompose multi-word pseudo-registers into individual
54 pseudo-registers when possible. This is possible when all the uses
55 of a multi-word register are via SUBREG, or are copies of the
56 register to another location. Breaking apart the register permits
57 more CSE and permits better register allocation. */
58
59 /* Bit N in this bitmap is set if regno N is used in a context in
60 which we can decompose it. */
61 static bitmap decomposable_context;
62
63 /* Bit N in this bitmap is set if regno N is used in a context in
64 which it can not be decomposed. */
65 static bitmap non_decomposable_context;
66
67 /* Bit N in this bitmap is set if regno N is used in a subreg
68 which changes the mode but not the size. This typically happens
69 when the register accessed as a floating-point value; we want to
70 avoid generating accesses to its subwords in integer modes. */
71 static bitmap subreg_context;
72
73 /* Bit N in the bitmap in element M of this array is set if there is a
74 copy from reg M to reg N. */
75 static VEC(bitmap,heap) *reg_copy_graph;
76
77 /* Return whether X is a simple object which we can take a word_mode
78 subreg of. */
79
80 static bool
81 simple_move_operand (rtx x)
82 {
83 if (GET_CODE (x) == SUBREG)
84 x = SUBREG_REG (x);
85
86 if (!OBJECT_P (x))
87 return false;
88
89 if (GET_CODE (x) == LABEL_REF
90 || GET_CODE (x) == SYMBOL_REF
91 || GET_CODE (x) == HIGH
92 || GET_CODE (x) == CONST)
93 return false;
94
95 if (MEM_P (x)
96 && (MEM_VOLATILE_P (x)
97 || mode_dependent_address_p (XEXP (x, 0))))
98 return false;
99
100 return true;
101 }
102
103 /* If INSN is a single set between two objects, return the single set.
104 Such an insn can always be decomposed. INSN should have been
105 passed to recog and extract_insn before this is called. */
106
107 static rtx
108 simple_move (rtx insn)
109 {
110 rtx x;
111 rtx set;
112 enum machine_mode mode;
113
114 if (recog_data.n_operands != 2)
115 return NULL_RTX;
116
117 set = single_set (insn);
118 if (!set)
119 return NULL_RTX;
120
121 x = SET_DEST (set);
122 if (x != recog_data.operand[0] && x != recog_data.operand[1])
123 return NULL_RTX;
124 if (!simple_move_operand (x))
125 return NULL_RTX;
126
127 x = SET_SRC (set);
128 if (x != recog_data.operand[0] && x != recog_data.operand[1])
129 return NULL_RTX;
130 /* For the src we can handle ASM_OPERANDS, and it is beneficial for
131 things like x86 rdtsc which returns a DImode value. */
132 if (GET_CODE (x) != ASM_OPERANDS
133 && !simple_move_operand (x))
134 return NULL_RTX;
135
136 /* We try to decompose in integer modes, to avoid generating
137 inefficient code copying between integer and floating point
138 registers. That means that we can't decompose if this is a
139 non-integer mode for which there is no integer mode of the same
140 size. */
141 mode = GET_MODE (SET_SRC (set));
142 if (!SCALAR_INT_MODE_P (mode)
143 && (mode_for_size (GET_MODE_SIZE (mode) * BITS_PER_UNIT, MODE_INT, 0)
144 == BLKmode))
145 return NULL_RTX;
146
147 /* Reject PARTIAL_INT modes. They are used for processor specific
148 purposes and it's probably best not to tamper with them. */
149 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
150 return NULL_RTX;
151
152 return set;
153 }
154
155 /* If SET is a copy from one multi-word pseudo-register to another,
156 record that in reg_copy_graph. Return whether it is such a
157 copy. */
158
159 static bool
160 find_pseudo_copy (rtx set)
161 {
162 rtx dest = SET_DEST (set);
163 rtx src = SET_SRC (set);
164 unsigned int rd, rs;
165 bitmap b;
166
167 if (!REG_P (dest) || !REG_P (src))
168 return false;
169
170 rd = REGNO (dest);
171 rs = REGNO (src);
172 if (HARD_REGISTER_NUM_P (rd) || HARD_REGISTER_NUM_P (rs))
173 return false;
174
175 if (GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD)
176 return false;
177
178 b = VEC_index (bitmap, reg_copy_graph, rs);
179 if (b == NULL)
180 {
181 b = BITMAP_ALLOC (NULL);
182 VEC_replace (bitmap, reg_copy_graph, rs, b);
183 }
184
185 bitmap_set_bit (b, rd);
186
187 return true;
188 }
189
190 /* Look through the registers in DECOMPOSABLE_CONTEXT. For each case
191 where they are copied to another register, add the register to
192 which they are copied to DECOMPOSABLE_CONTEXT. Use
193 NON_DECOMPOSABLE_CONTEXT to limit this--we don't bother to track
194 copies of registers which are in NON_DECOMPOSABLE_CONTEXT. */
195
196 static void
197 propagate_pseudo_copies (void)
198 {
199 bitmap queue, propagate;
200
201 queue = BITMAP_ALLOC (NULL);
202 propagate = BITMAP_ALLOC (NULL);
203
204 bitmap_copy (queue, decomposable_context);
205 do
206 {
207 bitmap_iterator iter;
208 unsigned int i;
209
210 bitmap_clear (propagate);
211
212 EXECUTE_IF_SET_IN_BITMAP (queue, 0, i, iter)
213 {
214 bitmap b = VEC_index (bitmap, reg_copy_graph, i);
215 if (b)
216 bitmap_ior_and_compl_into (propagate, b, non_decomposable_context);
217 }
218
219 bitmap_and_compl (queue, propagate, decomposable_context);
220 bitmap_ior_into (decomposable_context, propagate);
221 }
222 while (!bitmap_empty_p (queue));
223
224 BITMAP_FREE (queue);
225 BITMAP_FREE (propagate);
226 }
227
228 /* A pointer to one of these values is passed to
229 find_decomposable_subregs via for_each_rtx. */
230
231 enum classify_move_insn
232 {
233 /* Not a simple move from one location to another. */
234 NOT_SIMPLE_MOVE,
235 /* A simple move from one pseudo-register to another. */
236 SIMPLE_PSEUDO_REG_MOVE,
237 /* A simple move involving a non-pseudo-register. */
238 SIMPLE_MOVE
239 };
240
241 /* This is called via for_each_rtx. If we find a SUBREG which we
242 could use to decompose a pseudo-register, set a bit in
243 DECOMPOSABLE_CONTEXT. If we find an unadorned register which is
244 not a simple pseudo-register copy, DATA will point at the type of
245 move, and we set a bit in DECOMPOSABLE_CONTEXT or
246 NON_DECOMPOSABLE_CONTEXT as appropriate. */
247
248 static int
249 find_decomposable_subregs (rtx *px, void *data)
250 {
251 enum classify_move_insn *pcmi = (enum classify_move_insn *) data;
252 rtx x = *px;
253
254 if (x == NULL_RTX)
255 return 0;
256
257 if (GET_CODE (x) == SUBREG)
258 {
259 rtx inner = SUBREG_REG (x);
260 unsigned int regno, outer_size, inner_size, outer_words, inner_words;
261
262 if (!REG_P (inner))
263 return 0;
264
265 regno = REGNO (inner);
266 if (HARD_REGISTER_NUM_P (regno))
267 return -1;
268
269 outer_size = GET_MODE_SIZE (GET_MODE (x));
270 inner_size = GET_MODE_SIZE (GET_MODE (inner));
271 outer_words = (outer_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
272 inner_words = (inner_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
273
274 /* We only try to decompose single word subregs of multi-word
275 registers. When we find one, we return -1 to avoid iterating
276 over the inner register.
277
278 ??? This doesn't allow, e.g., DImode subregs of TImode values
279 on 32-bit targets. We would need to record the way the
280 pseudo-register was used, and only decompose if all the uses
281 were the same number and size of pieces. Hopefully this
282 doesn't happen much. */
283
284 if (outer_words == 1 && inner_words > 1)
285 {
286 bitmap_set_bit (decomposable_context, regno);
287 return -1;
288 }
289
290 /* If this is a cast from one mode to another, where the modes
291 have the same size, and they are not tieable, then mark this
292 register as non-decomposable. If we decompose it we are
293 likely to mess up whatever the backend is trying to do. */
294 if (outer_words > 1
295 && outer_size == inner_size
296 && !MODES_TIEABLE_P (GET_MODE (x), GET_MODE (inner)))
297 {
298 bitmap_set_bit (non_decomposable_context, regno);
299 bitmap_set_bit (subreg_context, regno);
300 return -1;
301 }
302 }
303 else if (REG_P (x))
304 {
305 unsigned int regno;
306
307 /* We will see an outer SUBREG before we see the inner REG, so
308 when we see a plain REG here it means a direct reference to
309 the register.
310
311 If this is not a simple copy from one location to another,
312 then we can not decompose this register. If this is a simple
313 copy from one pseudo-register to another, and the mode is right
314 then we mark the register as decomposable.
315 Otherwise we don't say anything about this register --
316 it could be decomposed, but whether that would be
317 profitable depends upon how it is used elsewhere.
318
319 We only set bits in the bitmap for multi-word
320 pseudo-registers, since those are the only ones we care about
321 and it keeps the size of the bitmaps down. */
322
323 regno = REGNO (x);
324 if (!HARD_REGISTER_NUM_P (regno)
325 && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
326 {
327 switch (*pcmi)
328 {
329 case NOT_SIMPLE_MOVE:
330 bitmap_set_bit (non_decomposable_context, regno);
331 break;
332 case SIMPLE_PSEUDO_REG_MOVE:
333 if (MODES_TIEABLE_P (GET_MODE (x), word_mode))
334 bitmap_set_bit (decomposable_context, regno);
335 break;
336 case SIMPLE_MOVE:
337 break;
338 default:
339 gcc_unreachable ();
340 }
341 }
342 }
343 else if (MEM_P (x))
344 {
345 enum classify_move_insn cmi_mem = NOT_SIMPLE_MOVE;
346
347 /* Any registers used in a MEM do not participate in a
348 SIMPLE_MOVE or SIMPLE_PSEUDO_REG_MOVE. Do our own recursion
349 here, and return -1 to block the parent's recursion. */
350 for_each_rtx (&XEXP (x, 0), find_decomposable_subregs, &cmi_mem);
351 return -1;
352 }
353
354 return 0;
355 }
356
357 /* Decompose REGNO into word-sized components. We smash the REG node
358 in place. This ensures that (1) something goes wrong quickly if we
359 fail to make some replacement, and (2) the debug information inside
360 the symbol table is automatically kept up to date. */
361
362 static void
363 decompose_register (unsigned int regno)
364 {
365 rtx reg;
366 unsigned int words, i;
367 rtvec v;
368
369 reg = regno_reg_rtx[regno];
370
371 regno_reg_rtx[regno] = NULL_RTX;
372
373 words = GET_MODE_SIZE (GET_MODE (reg));
374 words = (words + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
375
376 v = rtvec_alloc (words);
377 for (i = 0; i < words; ++i)
378 RTVEC_ELT (v, i) = gen_reg_rtx_offset (reg, word_mode, i * UNITS_PER_WORD);
379
380 PUT_CODE (reg, CONCATN);
381 XVEC (reg, 0) = v;
382
383 if (dump_file)
384 {
385 fprintf (dump_file, "; Splitting reg %u ->", regno);
386 for (i = 0; i < words; ++i)
387 fprintf (dump_file, " %u", REGNO (XVECEXP (reg, 0, i)));
388 fputc ('\n', dump_file);
389 }
390 }
391
392 /* Get a SUBREG of a CONCATN. */
393
394 static rtx
395 simplify_subreg_concatn (enum machine_mode outermode, rtx op,
396 unsigned int byte)
397 {
398 unsigned int inner_size;
399 enum machine_mode innermode;
400 rtx part;
401 unsigned int final_offset;
402
403 gcc_assert (GET_CODE (op) == CONCATN);
404 gcc_assert (byte % GET_MODE_SIZE (outermode) == 0);
405
406 innermode = GET_MODE (op);
407 gcc_assert (byte < GET_MODE_SIZE (innermode));
408 gcc_assert (GET_MODE_SIZE (outermode) <= GET_MODE_SIZE (innermode));
409
410 inner_size = GET_MODE_SIZE (innermode) / XVECLEN (op, 0);
411 part = XVECEXP (op, 0, byte / inner_size);
412 final_offset = byte % inner_size;
413 if (final_offset + GET_MODE_SIZE (outermode) > inner_size)
414 return NULL_RTX;
415
416 return simplify_gen_subreg (outermode, part, GET_MODE (part), final_offset);
417 }
418
419 /* Wrapper around simplify_gen_subreg which handles CONCATN. */
420
421 static rtx
422 simplify_gen_subreg_concatn (enum machine_mode outermode, rtx op,
423 enum machine_mode innermode, unsigned int byte)
424 {
425 rtx ret;
426
427 /* We have to handle generating a SUBREG of a SUBREG of a CONCATN.
428 If OP is a SUBREG of a CONCATN, then it must be a simple mode
429 change with the same size and offset 0, or it must extract a
430 part. We shouldn't see anything else here. */
431 if (GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == CONCATN)
432 {
433 rtx op2;
434
435 if ((GET_MODE_SIZE (GET_MODE (op))
436 == GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
437 && SUBREG_BYTE (op) == 0)
438 return simplify_gen_subreg_concatn (outermode, SUBREG_REG (op),
439 GET_MODE (SUBREG_REG (op)), byte);
440
441 op2 = simplify_subreg_concatn (GET_MODE (op), SUBREG_REG (op),
442 SUBREG_BYTE (op));
443 if (op2 == NULL_RTX)
444 {
445 /* We don't handle paradoxical subregs here. */
446 gcc_assert (GET_MODE_SIZE (outermode)
447 <= GET_MODE_SIZE (GET_MODE (op)));
448 gcc_assert (GET_MODE_SIZE (GET_MODE (op))
449 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))));
450 op2 = simplify_subreg_concatn (outermode, SUBREG_REG (op),
451 byte + SUBREG_BYTE (op));
452 gcc_assert (op2 != NULL_RTX);
453 return op2;
454 }
455
456 op = op2;
457 gcc_assert (op != NULL_RTX);
458 gcc_assert (innermode == GET_MODE (op));
459 }
460
461 if (GET_CODE (op) == CONCATN)
462 return simplify_subreg_concatn (outermode, op, byte);
463
464 ret = simplify_gen_subreg (outermode, op, innermode, byte);
465
466 /* If we see an insn like (set (reg:DI) (subreg:DI (reg:SI) 0)) then
467 resolve_simple_move will ask for the high part of the paradoxical
468 subreg, which does not have a value. Just return a zero. */
469 if (ret == NULL_RTX
470 && GET_CODE (op) == SUBREG
471 && SUBREG_BYTE (op) == 0
472 && (GET_MODE_SIZE (innermode)
473 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))))
474 return CONST0_RTX (outermode);
475
476 gcc_assert (ret != NULL_RTX);
477 return ret;
478 }
479
480 /* Return whether we should resolve X into the registers into which it
481 was decomposed. */
482
483 static bool
484 resolve_reg_p (rtx x)
485 {
486 return GET_CODE (x) == CONCATN;
487 }
488
489 /* Return whether X is a SUBREG of a register which we need to
490 resolve. */
491
492 static bool
493 resolve_subreg_p (rtx x)
494 {
495 if (GET_CODE (x) != SUBREG)
496 return false;
497 return resolve_reg_p (SUBREG_REG (x));
498 }
499
500 /* This is called via for_each_rtx. Look for SUBREGs which need to be
501 decomposed. */
502
503 static int
504 resolve_subreg_use (rtx *px, void *data)
505 {
506 rtx insn = (rtx) data;
507 rtx x = *px;
508
509 if (x == NULL_RTX)
510 return 0;
511
512 if (resolve_subreg_p (x))
513 {
514 x = simplify_subreg_concatn (GET_MODE (x), SUBREG_REG (x),
515 SUBREG_BYTE (x));
516
517 /* It is possible for a note to contain a reference which we can
518 decompose. In this case, return 1 to the caller to indicate
519 that the note must be removed. */
520 if (!x)
521 {
522 gcc_assert (!insn);
523 return 1;
524 }
525
526 validate_change (insn, px, x, 1);
527 return -1;
528 }
529
530 if (resolve_reg_p (x))
531 {
532 /* Return 1 to the caller to indicate that we found a direct
533 reference to a register which is being decomposed. This can
534 happen inside notes, multiword shift or zero-extend
535 instructions. */
536 return 1;
537 }
538
539 return 0;
540 }
541
542 /* This is called via for_each_rtx. Look for SUBREGs which can be
543 decomposed and decomposed REGs that need copying. */
544
545 static int
546 adjust_decomposed_uses (rtx *px, void *data ATTRIBUTE_UNUSED)
547 {
548 rtx x = *px;
549
550 if (x == NULL_RTX)
551 return 0;
552
553 if (resolve_subreg_p (x))
554 {
555 x = simplify_subreg_concatn (GET_MODE (x), SUBREG_REG (x),
556 SUBREG_BYTE (x));
557
558 if (x)
559 *px = x;
560 else
561 x = copy_rtx (*px);
562 }
563
564 if (resolve_reg_p (x))
565 *px = copy_rtx (x);
566
567 return 0;
568 }
569
570 /* Resolve any decomposed registers which appear in register notes on
571 INSN. */
572
573 static void
574 resolve_reg_notes (rtx insn)
575 {
576 rtx *pnote, note;
577
578 note = find_reg_equal_equiv_note (insn);
579 if (note)
580 {
581 int old_count = num_validated_changes ();
582 if (for_each_rtx (&XEXP (note, 0), resolve_subreg_use, NULL))
583 remove_note (insn, note);
584 else
585 if (old_count != num_validated_changes ())
586 df_notes_rescan (insn);
587 }
588
589 pnote = &REG_NOTES (insn);
590 while (*pnote != NULL_RTX)
591 {
592 bool del = false;
593
594 note = *pnote;
595 switch (REG_NOTE_KIND (note))
596 {
597 case REG_DEAD:
598 case REG_UNUSED:
599 if (resolve_reg_p (XEXP (note, 0)))
600 del = true;
601 break;
602
603 default:
604 break;
605 }
606
607 if (del)
608 *pnote = XEXP (note, 1);
609 else
610 pnote = &XEXP (note, 1);
611 }
612 }
613
614 /* Return whether X can be decomposed into subwords. */
615
616 static bool
617 can_decompose_p (rtx x)
618 {
619 if (REG_P (x))
620 {
621 unsigned int regno = REGNO (x);
622
623 if (HARD_REGISTER_NUM_P (regno))
624 return (validate_subreg (word_mode, GET_MODE (x), x, UNITS_PER_WORD)
625 && HARD_REGNO_MODE_OK (regno, word_mode));
626 else
627 return !bitmap_bit_p (subreg_context, regno);
628 }
629
630 return true;
631 }
632
633 /* Decompose the registers used in a simple move SET within INSN. If
634 we don't change anything, return INSN, otherwise return the start
635 of the sequence of moves. */
636
637 static rtx
638 resolve_simple_move (rtx set, rtx insn)
639 {
640 rtx src, dest, real_dest, insns;
641 enum machine_mode orig_mode, dest_mode;
642 unsigned int words;
643 bool pushing;
644
645 src = SET_SRC (set);
646 dest = SET_DEST (set);
647 orig_mode = GET_MODE (dest);
648
649 words = (GET_MODE_SIZE (orig_mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
650 if (words <= 1)
651 return insn;
652
653 start_sequence ();
654
655 /* We have to handle copying from a SUBREG of a decomposed reg where
656 the SUBREG is larger than word size. Rather than assume that we
657 can take a word_mode SUBREG of the destination, we copy to a new
658 register and then copy that to the destination. */
659
660 real_dest = NULL_RTX;
661
662 if (GET_CODE (src) == SUBREG
663 && resolve_reg_p (SUBREG_REG (src))
664 && (SUBREG_BYTE (src) != 0
665 || (GET_MODE_SIZE (orig_mode)
666 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))))
667 {
668 real_dest = dest;
669 dest = gen_reg_rtx (orig_mode);
670 if (REG_P (real_dest))
671 REG_ATTRS (dest) = REG_ATTRS (real_dest);
672 }
673
674 /* Similarly if we are copying to a SUBREG of a decomposed reg where
675 the SUBREG is larger than word size. */
676
677 if (GET_CODE (dest) == SUBREG
678 && resolve_reg_p (SUBREG_REG (dest))
679 && (SUBREG_BYTE (dest) != 0
680 || (GET_MODE_SIZE (orig_mode)
681 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))))
682 {
683 rtx reg, minsn, smove;
684
685 reg = gen_reg_rtx (orig_mode);
686 minsn = emit_move_insn (reg, src);
687 smove = single_set (minsn);
688 gcc_assert (smove != NULL_RTX);
689 resolve_simple_move (smove, minsn);
690 src = reg;
691 }
692
693 /* If we didn't have any big SUBREGS of decomposed registers, and
694 neither side of the move is a register we are decomposing, then
695 we don't have to do anything here. */
696
697 if (src == SET_SRC (set)
698 && dest == SET_DEST (set)
699 && !resolve_reg_p (src)
700 && !resolve_subreg_p (src)
701 && !resolve_reg_p (dest)
702 && !resolve_subreg_p (dest))
703 {
704 end_sequence ();
705 return insn;
706 }
707
708 /* It's possible for the code to use a subreg of a decomposed
709 register while forming an address. We need to handle that before
710 passing the address to emit_move_insn. We pass NULL_RTX as the
711 insn parameter to resolve_subreg_use because we can not validate
712 the insn yet. */
713 if (MEM_P (src) || MEM_P (dest))
714 {
715 int acg;
716
717 if (MEM_P (src))
718 for_each_rtx (&XEXP (src, 0), resolve_subreg_use, NULL_RTX);
719 if (MEM_P (dest))
720 for_each_rtx (&XEXP (dest, 0), resolve_subreg_use, NULL_RTX);
721 acg = apply_change_group ();
722 gcc_assert (acg);
723 }
724
725 /* If SRC is a register which we can't decompose, or has side
726 effects, we need to move via a temporary register. */
727
728 if (!can_decompose_p (src)
729 || side_effects_p (src)
730 || GET_CODE (src) == ASM_OPERANDS)
731 {
732 rtx reg;
733
734 reg = gen_reg_rtx (orig_mode);
735 emit_move_insn (reg, src);
736 src = reg;
737 }
738
739 /* If DEST is a register which we can't decompose, or has side
740 effects, we need to first move to a temporary register. We
741 handle the common case of pushing an operand directly. We also
742 go through a temporary register if it holds a floating point
743 value. This gives us better code on systems which can't move
744 data easily between integer and floating point registers. */
745
746 dest_mode = orig_mode;
747 pushing = push_operand (dest, dest_mode);
748 if (!can_decompose_p (dest)
749 || (side_effects_p (dest) && !pushing)
750 || (!SCALAR_INT_MODE_P (dest_mode)
751 && !resolve_reg_p (dest)
752 && !resolve_subreg_p (dest)))
753 {
754 if (real_dest == NULL_RTX)
755 real_dest = dest;
756 if (!SCALAR_INT_MODE_P (dest_mode))
757 {
758 dest_mode = mode_for_size (GET_MODE_SIZE (dest_mode) * BITS_PER_UNIT,
759 MODE_INT, 0);
760 gcc_assert (dest_mode != BLKmode);
761 }
762 dest = gen_reg_rtx (dest_mode);
763 if (REG_P (real_dest))
764 REG_ATTRS (dest) = REG_ATTRS (real_dest);
765 }
766
767 if (pushing)
768 {
769 unsigned int i, j, jinc;
770
771 gcc_assert (GET_MODE_SIZE (orig_mode) % UNITS_PER_WORD == 0);
772 gcc_assert (GET_CODE (XEXP (dest, 0)) != PRE_MODIFY);
773 gcc_assert (GET_CODE (XEXP (dest, 0)) != POST_MODIFY);
774
775 if (WORDS_BIG_ENDIAN == STACK_GROWS_DOWNWARD)
776 {
777 j = 0;
778 jinc = 1;
779 }
780 else
781 {
782 j = words - 1;
783 jinc = -1;
784 }
785
786 for (i = 0; i < words; ++i, j += jinc)
787 {
788 rtx temp;
789
790 temp = copy_rtx (XEXP (dest, 0));
791 temp = adjust_automodify_address_nv (dest, word_mode, temp,
792 j * UNITS_PER_WORD);
793 emit_move_insn (temp,
794 simplify_gen_subreg_concatn (word_mode, src,
795 orig_mode,
796 j * UNITS_PER_WORD));
797 }
798 }
799 else
800 {
801 unsigned int i;
802
803 if (REG_P (dest) && !HARD_REGISTER_NUM_P (REGNO (dest)))
804 emit_clobber (dest);
805
806 for (i = 0; i < words; ++i)
807 emit_move_insn (simplify_gen_subreg_concatn (word_mode, dest,
808 dest_mode,
809 i * UNITS_PER_WORD),
810 simplify_gen_subreg_concatn (word_mode, src,
811 orig_mode,
812 i * UNITS_PER_WORD));
813 }
814
815 if (real_dest != NULL_RTX)
816 {
817 rtx mdest, minsn, smove;
818
819 if (dest_mode == orig_mode)
820 mdest = dest;
821 else
822 mdest = simplify_gen_subreg (orig_mode, dest, GET_MODE (dest), 0);
823 minsn = emit_move_insn (real_dest, mdest);
824
825 smove = single_set (minsn);
826 gcc_assert (smove != NULL_RTX);
827
828 resolve_simple_move (smove, minsn);
829 }
830
831 insns = get_insns ();
832 end_sequence ();
833
834 copy_reg_eh_region_note_forward (insn, insns, NULL_RTX);
835
836 emit_insn_before (insns, insn);
837
838 delete_insn (insn);
839
840 return insns;
841 }
842
843 /* Change a CLOBBER of a decomposed register into a CLOBBER of the
844 component registers. Return whether we changed something. */
845
846 static bool
847 resolve_clobber (rtx pat, rtx insn)
848 {
849 rtx reg;
850 enum machine_mode orig_mode;
851 unsigned int words, i;
852 int ret;
853
854 reg = XEXP (pat, 0);
855 if (!resolve_reg_p (reg) && !resolve_subreg_p (reg))
856 return false;
857
858 orig_mode = GET_MODE (reg);
859 words = GET_MODE_SIZE (orig_mode);
860 words = (words + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
861
862 ret = validate_change (NULL_RTX, &XEXP (pat, 0),
863 simplify_gen_subreg_concatn (word_mode, reg,
864 orig_mode, 0),
865 0);
866 df_insn_rescan (insn);
867 gcc_assert (ret != 0);
868
869 for (i = words - 1; i > 0; --i)
870 {
871 rtx x;
872
873 x = simplify_gen_subreg_concatn (word_mode, reg, orig_mode,
874 i * UNITS_PER_WORD);
875 x = gen_rtx_CLOBBER (VOIDmode, x);
876 emit_insn_after (x, insn);
877 }
878
879 resolve_reg_notes (insn);
880
881 return true;
882 }
883
884 /* A USE of a decomposed register is no longer meaningful. Return
885 whether we changed something. */
886
887 static bool
888 resolve_use (rtx pat, rtx insn)
889 {
890 if (resolve_reg_p (XEXP (pat, 0)) || resolve_subreg_p (XEXP (pat, 0)))
891 {
892 delete_insn (insn);
893 return true;
894 }
895
896 resolve_reg_notes (insn);
897
898 return false;
899 }
900
901 /* A VAR_LOCATION can be simplified. */
902
903 static void
904 resolve_debug (rtx insn)
905 {
906 for_each_rtx (&PATTERN (insn), adjust_decomposed_uses, NULL_RTX);
907
908 df_insn_rescan (insn);
909
910 resolve_reg_notes (insn);
911 }
912
913 /* Checks if INSN is a decomposable multiword-shift or zero-extend and
914 sets the decomposable_context bitmap accordingly. A non-zero value
915 is returned if a decomposable insn has been found. */
916
917 static int
918 find_decomposable_shift_zext (rtx insn)
919 {
920 rtx set;
921 rtx op;
922 rtx op_operand;
923
924 set = single_set (insn);
925 if (!set)
926 return 0;
927
928 op = SET_SRC (set);
929 if (GET_CODE (op) != ASHIFT
930 && GET_CODE (op) != LSHIFTRT
931 && GET_CODE (op) != ZERO_EXTEND)
932 return 0;
933
934 op_operand = XEXP (op, 0);
935 if (!REG_P (SET_DEST (set)) || !REG_P (op_operand)
936 || HARD_REGISTER_NUM_P (REGNO (SET_DEST (set)))
937 || HARD_REGISTER_NUM_P (REGNO (op_operand))
938 || !SCALAR_INT_MODE_P (GET_MODE (op)))
939 return 0;
940
941 if (GET_CODE (op) == ZERO_EXTEND)
942 {
943 if (GET_MODE (op_operand) != word_mode
944 || GET_MODE_BITSIZE (GET_MODE (op)) != 2 * BITS_PER_WORD)
945 return 0;
946 }
947 else /* left or right shift */
948 {
949 if (!CONST_INT_P (XEXP (op, 1))
950 || INTVAL (XEXP (op, 1)) < BITS_PER_WORD
951 || GET_MODE_BITSIZE (GET_MODE (op_operand)) != 2 * BITS_PER_WORD)
952 return 0;
953 }
954
955 bitmap_set_bit (decomposable_context, REGNO (SET_DEST (set)));
956
957 if (GET_CODE (op) != ZERO_EXTEND)
958 bitmap_set_bit (decomposable_context, REGNO (op_operand));
959
960 return 1;
961 }
962
963 /* Decompose a more than word wide shift (in INSN) of a multiword
964 pseudo or a multiword zero-extend of a wordmode pseudo into a move
965 and 'set to zero' insn. Return a pointer to the new insn when a
966 replacement was done. */
967
968 static rtx
969 resolve_shift_zext (rtx insn)
970 {
971 rtx set;
972 rtx op;
973 rtx op_operand;
974 rtx insns;
975 rtx src_reg, dest_reg, dest_zero;
976 int src_reg_num, dest_reg_num, offset1, offset2, src_offset;
977
978 set = single_set (insn);
979 if (!set)
980 return NULL_RTX;
981
982 op = SET_SRC (set);
983 if (GET_CODE (op) != ASHIFT
984 && GET_CODE (op) != LSHIFTRT
985 && GET_CODE (op) != ZERO_EXTEND)
986 return NULL_RTX;
987
988 op_operand = XEXP (op, 0);
989
990 if (!resolve_reg_p (SET_DEST (set)) && !resolve_reg_p (op_operand))
991 return NULL_RTX;
992
993 /* src_reg_num is the number of the word mode register which we
994 are operating on. For a left shift and a zero_extend on little
995 endian machines this is register 0. */
996 src_reg_num = GET_CODE (op) == LSHIFTRT ? 1 : 0;
997
998 if (WORDS_BIG_ENDIAN
999 && GET_MODE_SIZE (GET_MODE (op_operand)) > UNITS_PER_WORD)
1000 src_reg_num = 1 - src_reg_num;
1001
1002 if (GET_CODE (op) == ZERO_EXTEND)
1003 dest_reg_num = WORDS_BIG_ENDIAN ? 1 : 0;
1004 else
1005 dest_reg_num = 1 - src_reg_num;
1006
1007 offset1 = UNITS_PER_WORD * dest_reg_num;
1008 offset2 = UNITS_PER_WORD * (1 - dest_reg_num);
1009 src_offset = UNITS_PER_WORD * src_reg_num;
1010
1011 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
1012 {
1013 offset1 += UNITS_PER_WORD - 1;
1014 offset2 += UNITS_PER_WORD - 1;
1015 src_offset += UNITS_PER_WORD - 1;
1016 }
1017
1018 start_sequence ();
1019
1020 dest_reg = simplify_gen_subreg_concatn (word_mode, SET_DEST (set),
1021 GET_MODE (SET_DEST (set)),
1022 offset1);
1023 dest_zero = simplify_gen_subreg_concatn (word_mode, SET_DEST (set),
1024 GET_MODE (SET_DEST (set)),
1025 offset2);
1026 src_reg = simplify_gen_subreg_concatn (word_mode, op_operand,
1027 GET_MODE (op_operand),
1028 src_offset);
1029 if (GET_CODE (op) != ZERO_EXTEND)
1030 {
1031 int shift_count = INTVAL (XEXP (op, 1));
1032 if (shift_count > BITS_PER_WORD)
1033 src_reg = expand_shift (GET_CODE (op) == ASHIFT ?
1034 LSHIFT_EXPR : RSHIFT_EXPR,
1035 word_mode, src_reg,
1036 build_int_cst (NULL_TREE,
1037 shift_count - BITS_PER_WORD),
1038 dest_reg, 1);
1039 }
1040
1041 if (dest_reg != src_reg)
1042 emit_move_insn (dest_reg, src_reg);
1043 emit_move_insn (dest_zero, CONST0_RTX (word_mode));
1044 insns = get_insns ();
1045
1046 end_sequence ();
1047
1048 emit_insn_before (insns, insn);
1049
1050 if (dump_file)
1051 {
1052 rtx in;
1053 fprintf (dump_file, "; Replacing insn: %d with insns: ", INSN_UID (insn));
1054 for (in = insns; in != insn; in = NEXT_INSN (in))
1055 fprintf (dump_file, "%d ", INSN_UID (in));
1056 fprintf (dump_file, "\n");
1057 }
1058
1059 delete_insn (insn);
1060 return insns;
1061 }
1062
1063 /* Look for registers which are always accessed via word-sized SUBREGs
1064 or via copies. Decompose these registers into several word-sized
1065 pseudo-registers. */
1066
1067 static void
1068 decompose_multiword_subregs (void)
1069 {
1070 unsigned int max;
1071 basic_block bb;
1072
1073 if (df)
1074 df_set_flags (DF_DEFER_INSN_RESCAN);
1075
1076 max = max_reg_num ();
1077
1078 /* First see if there are any multi-word pseudo-registers. If there
1079 aren't, there is nothing we can do. This should speed up this
1080 pass in the normal case, since it should be faster than scanning
1081 all the insns. */
1082 {
1083 unsigned int i;
1084
1085 for (i = FIRST_PSEUDO_REGISTER; i < max; ++i)
1086 {
1087 if (regno_reg_rtx[i] != NULL
1088 && GET_MODE_SIZE (GET_MODE (regno_reg_rtx[i])) > UNITS_PER_WORD)
1089 break;
1090 }
1091 if (i == max)
1092 return;
1093 }
1094
1095 if (df)
1096 run_word_dce ();
1097
1098 /* FIXME: When the dataflow branch is merged, we can change this
1099 code to look for each multi-word pseudo-register and to find each
1100 insn which sets or uses that register. That should be faster
1101 than scanning all the insns. */
1102
1103 decomposable_context = BITMAP_ALLOC (NULL);
1104 non_decomposable_context = BITMAP_ALLOC (NULL);
1105 subreg_context = BITMAP_ALLOC (NULL);
1106
1107 reg_copy_graph = VEC_alloc (bitmap, heap, max);
1108 VEC_safe_grow (bitmap, heap, reg_copy_graph, max);
1109 memset (VEC_address (bitmap, reg_copy_graph), 0, sizeof (bitmap) * max);
1110
1111 FOR_EACH_BB (bb)
1112 {
1113 rtx insn;
1114
1115 FOR_BB_INSNS (bb, insn)
1116 {
1117 rtx set;
1118 enum classify_move_insn cmi;
1119 int i, n;
1120
1121 if (!INSN_P (insn)
1122 || GET_CODE (PATTERN (insn)) == CLOBBER
1123 || GET_CODE (PATTERN (insn)) == USE)
1124 continue;
1125
1126 if (find_decomposable_shift_zext (insn))
1127 continue;
1128
1129 recog_memoized (insn);
1130 extract_insn (insn);
1131
1132 set = simple_move (insn);
1133
1134 if (!set)
1135 cmi = NOT_SIMPLE_MOVE;
1136 else
1137 {
1138 if (find_pseudo_copy (set))
1139 cmi = SIMPLE_PSEUDO_REG_MOVE;
1140 else
1141 cmi = SIMPLE_MOVE;
1142 }
1143
1144 n = recog_data.n_operands;
1145 for (i = 0; i < n; ++i)
1146 {
1147 for_each_rtx (&recog_data.operand[i],
1148 find_decomposable_subregs,
1149 &cmi);
1150
1151 /* We handle ASM_OPERANDS as a special case to support
1152 things like x86 rdtsc which returns a DImode value.
1153 We can decompose the output, which will certainly be
1154 operand 0, but not the inputs. */
1155
1156 if (cmi == SIMPLE_MOVE
1157 && GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1158 {
1159 gcc_assert (i == 0);
1160 cmi = NOT_SIMPLE_MOVE;
1161 }
1162 }
1163 }
1164 }
1165
1166 bitmap_and_compl_into (decomposable_context, non_decomposable_context);
1167 if (!bitmap_empty_p (decomposable_context))
1168 {
1169 sbitmap sub_blocks;
1170 unsigned int i;
1171 sbitmap_iterator sbi;
1172 bitmap_iterator iter;
1173 unsigned int regno;
1174
1175 propagate_pseudo_copies ();
1176
1177 sub_blocks = sbitmap_alloc (last_basic_block);
1178 sbitmap_zero (sub_blocks);
1179
1180 EXECUTE_IF_SET_IN_BITMAP (decomposable_context, 0, regno, iter)
1181 decompose_register (regno);
1182
1183 FOR_EACH_BB (bb)
1184 {
1185 rtx insn;
1186
1187 FOR_BB_INSNS (bb, insn)
1188 {
1189 rtx pat;
1190
1191 if (!INSN_P (insn))
1192 continue;
1193
1194 pat = PATTERN (insn);
1195 if (GET_CODE (pat) == CLOBBER)
1196 resolve_clobber (pat, insn);
1197 else if (GET_CODE (pat) == USE)
1198 resolve_use (pat, insn);
1199 else if (DEBUG_INSN_P (insn))
1200 resolve_debug (insn);
1201 else
1202 {
1203 rtx set;
1204 int i;
1205
1206 recog_memoized (insn);
1207 extract_insn (insn);
1208
1209 set = simple_move (insn);
1210 if (set)
1211 {
1212 rtx orig_insn = insn;
1213 bool cfi = control_flow_insn_p (insn);
1214
1215 /* We can end up splitting loads to multi-word pseudos
1216 into separate loads to machine word size pseudos.
1217 When this happens, we first had one load that can
1218 throw, and after resolve_simple_move we'll have a
1219 bunch of loads (at least two). All those loads may
1220 trap if we can have non-call exceptions, so they
1221 all will end the current basic block. We split the
1222 block after the outer loop over all insns, but we
1223 make sure here that we will be able to split the
1224 basic block and still produce the correct control
1225 flow graph for it. */
1226 gcc_assert (!cfi
1227 || (cfun->can_throw_non_call_exceptions
1228 && can_throw_internal (insn)));
1229
1230 insn = resolve_simple_move (set, insn);
1231 if (insn != orig_insn)
1232 {
1233 recog_memoized (insn);
1234 extract_insn (insn);
1235
1236 if (cfi)
1237 SET_BIT (sub_blocks, bb->index);
1238 }
1239 }
1240 else
1241 {
1242 rtx decomposed_shift;
1243
1244 decomposed_shift = resolve_shift_zext (insn);
1245 if (decomposed_shift != NULL_RTX)
1246 {
1247 insn = decomposed_shift;
1248 recog_memoized (insn);
1249 extract_insn (insn);
1250 }
1251 }
1252
1253 for (i = recog_data.n_operands - 1; i >= 0; --i)
1254 for_each_rtx (recog_data.operand_loc[i],
1255 resolve_subreg_use,
1256 insn);
1257
1258 resolve_reg_notes (insn);
1259
1260 if (num_validated_changes () > 0)
1261 {
1262 for (i = recog_data.n_dups - 1; i >= 0; --i)
1263 {
1264 rtx *pl = recog_data.dup_loc[i];
1265 int dup_num = recog_data.dup_num[i];
1266 rtx *px = recog_data.operand_loc[dup_num];
1267
1268 validate_unshare_change (insn, pl, *px, 1);
1269 }
1270
1271 i = apply_change_group ();
1272 gcc_assert (i);
1273 }
1274 }
1275 }
1276 }
1277
1278 /* If we had insns to split that caused control flow insns in the middle
1279 of a basic block, split those blocks now. Note that we only handle
1280 the case where splitting a load has caused multiple possibly trapping
1281 loads to appear. */
1282 EXECUTE_IF_SET_IN_SBITMAP (sub_blocks, 0, i, sbi)
1283 {
1284 rtx insn, end;
1285 edge fallthru;
1286
1287 bb = BASIC_BLOCK (i);
1288 insn = BB_HEAD (bb);
1289 end = BB_END (bb);
1290
1291 while (insn != end)
1292 {
1293 if (control_flow_insn_p (insn))
1294 {
1295 /* Split the block after insn. There will be a fallthru
1296 edge, which is OK so we keep it. We have to create the
1297 exception edges ourselves. */
1298 fallthru = split_block (bb, insn);
1299 rtl_make_eh_edge (NULL, bb, BB_END (bb));
1300 bb = fallthru->dest;
1301 insn = BB_HEAD (bb);
1302 }
1303 else
1304 insn = NEXT_INSN (insn);
1305 }
1306 }
1307
1308 sbitmap_free (sub_blocks);
1309 }
1310
1311 {
1312 unsigned int i;
1313 bitmap b;
1314
1315 FOR_EACH_VEC_ELT (bitmap, reg_copy_graph, i, b)
1316 if (b)
1317 BITMAP_FREE (b);
1318 }
1319
1320 VEC_free (bitmap, heap, reg_copy_graph);
1321
1322 BITMAP_FREE (decomposable_context);
1323 BITMAP_FREE (non_decomposable_context);
1324 BITMAP_FREE (subreg_context);
1325 }
1326 \f
1327 /* Gate function for lower subreg pass. */
1328
1329 static bool
1330 gate_handle_lower_subreg (void)
1331 {
1332 return flag_split_wide_types != 0;
1333 }
1334
1335 /* Implement first lower subreg pass. */
1336
1337 static unsigned int
1338 rest_of_handle_lower_subreg (void)
1339 {
1340 decompose_multiword_subregs ();
1341 return 0;
1342 }
1343
1344 /* Implement second lower subreg pass. */
1345
1346 static unsigned int
1347 rest_of_handle_lower_subreg2 (void)
1348 {
1349 decompose_multiword_subregs ();
1350 return 0;
1351 }
1352
1353 struct rtl_opt_pass pass_lower_subreg =
1354 {
1355 {
1356 RTL_PASS,
1357 "subreg1", /* name */
1358 gate_handle_lower_subreg, /* gate */
1359 rest_of_handle_lower_subreg, /* execute */
1360 NULL, /* sub */
1361 NULL, /* next */
1362 0, /* static_pass_number */
1363 TV_LOWER_SUBREG, /* tv_id */
1364 0, /* properties_required */
1365 0, /* properties_provided */
1366 0, /* properties_destroyed */
1367 0, /* todo_flags_start */
1368 TODO_dump_func |
1369 TODO_ggc_collect |
1370 TODO_verify_flow /* todo_flags_finish */
1371 }
1372 };
1373
1374 struct rtl_opt_pass pass_lower_subreg2 =
1375 {
1376 {
1377 RTL_PASS,
1378 "subreg2", /* name */
1379 gate_handle_lower_subreg, /* gate */
1380 rest_of_handle_lower_subreg2, /* execute */
1381 NULL, /* sub */
1382 NULL, /* next */
1383 0, /* static_pass_number */
1384 TV_LOWER_SUBREG, /* tv_id */
1385 0, /* properties_required */
1386 0, /* properties_provided */
1387 0, /* properties_destroyed */
1388 0, /* todo_flags_start */
1389 TODO_df_finish | TODO_verify_rtl_sharing |
1390 TODO_dump_func |
1391 TODO_ggc_collect |
1392 TODO_verify_flow /* todo_flags_finish */
1393 }
1394 };