]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/recog.c
Merge dataflow branch into mainline
[thirdparty/gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
33 #include "recog.h"
34 #include "regs.h"
35 #include "addresses.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "flags.h"
39 #include "real.h"
40 #include "toplev.h"
41 #include "basic-block.h"
42 #include "output.h"
43 #include "reload.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
47
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
55
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
63
64 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
65 static void validate_replace_src_1 (rtx *, void *);
66 static rtx split_insn (rtx);
67
68 /* Nonzero means allow operands to be volatile.
69 This should be 0 if you are generating rtl, such as if you are calling
70 the functions in optabs.c and expmed.c (most of the time).
71 This should be 1 if all valid insns need to be recognized,
72 such as in regclass.c and final.c and reload.c.
73
74 init_recog and init_recog_no_volatile are responsible for setting this. */
75
76 int volatile_ok;
77
78 struct recog_data recog_data;
79
80 /* Contains a vector of operand_alternative structures for every operand.
81 Set up by preprocess_constraints. */
82 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
83
84 /* On return from `constrain_operands', indicate which alternative
85 was satisfied. */
86
87 int which_alternative;
88
89 /* Nonzero after end of reload pass.
90 Set to 1 or 0 by toplev.c.
91 Controls the significance of (SUBREG (MEM)). */
92
93 int reload_completed;
94
95 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
96 int epilogue_completed;
97
98 /* Initialize data used by the function `recog'.
99 This must be called once in the compilation of a function
100 before any insn recognition may be done in the function. */
101
102 void
103 init_recog_no_volatile (void)
104 {
105 volatile_ok = 0;
106 }
107
108 void
109 init_recog (void)
110 {
111 volatile_ok = 1;
112 }
113
114 \f
115 /* Check that X is an insn-body for an `asm' with operands
116 and that the operands mentioned in it are legitimate. */
117
118 int
119 check_asm_operands (rtx x)
120 {
121 int noperands;
122 rtx *operands;
123 const char **constraints;
124 int i;
125
126 /* Post-reload, be more strict with things. */
127 if (reload_completed)
128 {
129 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
130 extract_insn (make_insn_raw (x));
131 constrain_operands (1);
132 return which_alternative >= 0;
133 }
134
135 noperands = asm_noperands (x);
136 if (noperands < 0)
137 return 0;
138 if (noperands == 0)
139 return 1;
140
141 operands = alloca (noperands * sizeof (rtx));
142 constraints = alloca (noperands * sizeof (char *));
143
144 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
145
146 for (i = 0; i < noperands; i++)
147 {
148 const char *c = constraints[i];
149 if (c[0] == '%')
150 c++;
151 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
152 c = constraints[c[0] - '0'];
153
154 if (! asm_operand_ok (operands[i], c))
155 return 0;
156 }
157
158 return 1;
159 }
160 \f
161 /* Static data for the next two routines. */
162
163 typedef struct change_t
164 {
165 rtx object;
166 int old_code;
167 rtx *loc;
168 rtx old;
169 } change_t;
170
171 static change_t *changes;
172 static int changes_allocated;
173
174 static int num_changes = 0;
175
176 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
177 at which NEW will be placed. If OBJECT is zero, no validation is done,
178 the change is simply made.
179
180 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
181 will be called with the address and mode as parameters. If OBJECT is
182 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
183 the change in place.
184
185 IN_GROUP is nonzero if this is part of a group of changes that must be
186 performed as a group. In that case, the changes will be stored. The
187 function `apply_change_group' will validate and apply the changes.
188
189 If IN_GROUP is zero, this is a single change. Try to recognize the insn
190 or validate the memory reference with the change applied. If the result
191 is not valid for the machine, suppress the change and return zero.
192 Otherwise, perform the change and return 1. */
193
194 int
195 validate_change (rtx object, rtx *loc, rtx new, int in_group)
196 {
197 rtx old = *loc;
198
199 if (old == new || rtx_equal_p (old, new))
200 return 1;
201
202 gcc_assert (in_group != 0 || num_changes == 0);
203
204 *loc = new;
205
206 /* Save the information describing this change. */
207 if (num_changes >= changes_allocated)
208 {
209 if (changes_allocated == 0)
210 /* This value allows for repeated substitutions inside complex
211 indexed addresses, or changes in up to 5 insns. */
212 changes_allocated = MAX_RECOG_OPERANDS * 5;
213 else
214 changes_allocated *= 2;
215
216 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
217 }
218
219 changes[num_changes].object = object;
220 changes[num_changes].loc = loc;
221 changes[num_changes].old = old;
222
223 if (object && !MEM_P (object))
224 {
225 /* Set INSN_CODE to force rerecognition of insn. Save old code in
226 case invalid. */
227 changes[num_changes].old_code = INSN_CODE (object);
228 INSN_CODE (object) = -1;
229 }
230
231 num_changes++;
232
233 /* If we are making a group of changes, return 1. Otherwise, validate the
234 change group we made. */
235
236 if (in_group)
237 return 1;
238 else
239 return apply_change_group ();
240 }
241
242 /* Keep X canonicalized if some changes have made it non-canonical; only
243 modifies the operands of X, not (for example) its code. Simplifications
244 are not the job of this routine.
245
246 Return true if anything was changed. */
247 bool
248 canonicalize_change_group (rtx insn, rtx x)
249 {
250 if (COMMUTATIVE_P (x)
251 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
252 {
253 /* Oops, the caller has made X no longer canonical.
254 Let's redo the changes in the correct order. */
255 rtx tem = XEXP (x, 0);
256 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
257 validate_change (insn, &XEXP (x, 1), tem, 1);
258 return true;
259 }
260 else
261 return false;
262 }
263
264
265 /* This subroutine of apply_change_group verifies whether the changes to INSN
266 were valid; i.e. whether INSN can still be recognized. */
267
268 int
269 insn_invalid_p (rtx insn)
270 {
271 rtx pat = PATTERN (insn);
272 int num_clobbers = 0;
273 /* If we are before reload and the pattern is a SET, see if we can add
274 clobbers. */
275 int icode = recog (pat, insn,
276 (GET_CODE (pat) == SET
277 && ! reload_completed && ! reload_in_progress)
278 ? &num_clobbers : 0);
279 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
280
281
282 /* If this is an asm and the operand aren't legal, then fail. Likewise if
283 this is not an asm and the insn wasn't recognized. */
284 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
285 || (!is_asm && icode < 0))
286 return 1;
287
288 /* If we have to add CLOBBERs, fail if we have to add ones that reference
289 hard registers since our callers can't know if they are live or not.
290 Otherwise, add them. */
291 if (num_clobbers > 0)
292 {
293 rtx newpat;
294
295 if (added_clobbers_hard_reg_p (icode))
296 return 1;
297
298 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
299 XVECEXP (newpat, 0, 0) = pat;
300 add_clobbers (newpat, icode);
301 PATTERN (insn) = pat = newpat;
302 }
303
304 /* After reload, verify that all constraints are satisfied. */
305 if (reload_completed)
306 {
307 extract_insn (insn);
308
309 if (! constrain_operands (1))
310 return 1;
311 }
312
313 INSN_CODE (insn) = icode;
314 return 0;
315 }
316
317 /* Return number of changes made and not validated yet. */
318 int
319 num_changes_pending (void)
320 {
321 return num_changes;
322 }
323
324 /* Tentatively apply the changes numbered NUM and up.
325 Return 1 if all changes are valid, zero otherwise. */
326
327 int
328 verify_changes (int num)
329 {
330 int i;
331 rtx last_validated = NULL_RTX;
332
333 /* The changes have been applied and all INSN_CODEs have been reset to force
334 rerecognition.
335
336 The changes are valid if we aren't given an object, or if we are
337 given a MEM and it still is a valid address, or if this is in insn
338 and it is recognized. In the latter case, if reload has completed,
339 we also require that the operands meet the constraints for
340 the insn. */
341
342 for (i = num; i < num_changes; i++)
343 {
344 rtx object = changes[i].object;
345
346 /* If there is no object to test or if it is the same as the one we
347 already tested, ignore it. */
348 if (object == 0 || object == last_validated)
349 continue;
350
351 if (MEM_P (object))
352 {
353 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
354 break;
355 }
356 else if (insn_invalid_p (object))
357 {
358 rtx pat = PATTERN (object);
359
360 /* Perhaps we couldn't recognize the insn because there were
361 extra CLOBBERs at the end. If so, try to re-recognize
362 without the last CLOBBER (later iterations will cause each of
363 them to be eliminated, in turn). But don't do this if we
364 have an ASM_OPERAND. */
365 if (GET_CODE (pat) == PARALLEL
366 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
367 && asm_noperands (PATTERN (object)) < 0)
368 {
369 rtx newpat;
370
371 if (XVECLEN (pat, 0) == 2)
372 newpat = XVECEXP (pat, 0, 0);
373 else
374 {
375 int j;
376
377 newpat
378 = gen_rtx_PARALLEL (VOIDmode,
379 rtvec_alloc (XVECLEN (pat, 0) - 1));
380 for (j = 0; j < XVECLEN (newpat, 0); j++)
381 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
382 }
383
384 /* Add a new change to this group to replace the pattern
385 with this new pattern. Then consider this change
386 as having succeeded. The change we added will
387 cause the entire call to fail if things remain invalid.
388
389 Note that this can lose if a later change than the one
390 we are processing specified &XVECEXP (PATTERN (object), 0, X)
391 but this shouldn't occur. */
392
393 validate_change (object, &PATTERN (object), newpat, 1);
394 continue;
395 }
396 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
397 /* If this insn is a CLOBBER or USE, it is always valid, but is
398 never recognized. */
399 continue;
400 else
401 break;
402 }
403 last_validated = object;
404 }
405
406 return (i == num_changes);
407 }
408
409 /* A group of changes has previously been issued with validate_change
410 and verified with verify_changes. Call df_insn_rescan for each of
411 the insn changed and clear num_changes. */
412
413 void
414 confirm_change_group (void)
415 {
416 int i;
417
418 for (i = 0; i < num_changes; i++)
419 {
420 rtx object = changes[i].object;
421 if (object && INSN_P (object))
422 df_insn_rescan (object);
423 }
424
425 num_changes = 0;
426 }
427
428 /* Apply a group of changes previously issued with `validate_change'.
429 If all changes are valid, call confirm_change_group and return 1,
430 otherwise, call cancel_changes and return 0. */
431
432 int
433 apply_change_group (void)
434 {
435 if (verify_changes (0))
436 {
437 confirm_change_group ();
438 return 1;
439 }
440 else
441 {
442 cancel_changes (0);
443 return 0;
444 }
445 }
446
447
448 /* Return the number of changes so far in the current group. */
449
450 int
451 num_validated_changes (void)
452 {
453 return num_changes;
454 }
455
456 /* Retract the changes numbered NUM and up. */
457
458 void
459 cancel_changes (int num)
460 {
461 int i;
462
463 /* Back out all the changes. Do this in the opposite order in which
464 they were made. */
465 for (i = num_changes - 1; i >= num; i--)
466 {
467 *changes[i].loc = changes[i].old;
468 if (changes[i].object && !MEM_P (changes[i].object))
469 INSN_CODE (changes[i].object) = changes[i].old_code;
470 }
471 num_changes = num;
472 }
473
474 /* Replace every occurrence of FROM in X with TO. Mark each change with
475 validate_change passing OBJECT. */
476
477 static void
478 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
479 {
480 int i, j;
481 const char *fmt;
482 rtx x = *loc;
483 enum rtx_code code;
484 enum machine_mode op0_mode = VOIDmode;
485 int prev_changes = num_changes;
486 rtx new;
487
488 if (!x)
489 return;
490
491 code = GET_CODE (x);
492 fmt = GET_RTX_FORMAT (code);
493 if (fmt[0] == 'e')
494 op0_mode = GET_MODE (XEXP (x, 0));
495
496 /* X matches FROM if it is the same rtx or they are both referring to the
497 same register in the same mode. Avoid calling rtx_equal_p unless the
498 operands look similar. */
499
500 if (x == from
501 || (REG_P (x) && REG_P (from)
502 && GET_MODE (x) == GET_MODE (from)
503 && REGNO (x) == REGNO (from))
504 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
505 && rtx_equal_p (x, from)))
506 {
507 validate_change (object, loc, to, 1);
508 return;
509 }
510
511 /* Call ourself recursively to perform the replacements.
512 We must not replace inside already replaced expression, otherwise we
513 get infinite recursion for replacements like (reg X)->(subreg (reg X))
514 done by regmove, so we must special case shared ASM_OPERANDS. */
515
516 if (GET_CODE (x) == PARALLEL)
517 {
518 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
519 {
520 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
521 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
522 {
523 /* Verify that operands are really shared. */
524 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
525 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
526 (x, 0, j))));
527 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
528 from, to, object);
529 }
530 else
531 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
532 }
533 }
534 else
535 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
536 {
537 if (fmt[i] == 'e')
538 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
539 else if (fmt[i] == 'E')
540 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
541 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
542 }
543
544 /* If we didn't substitute, there is nothing more to do. */
545 if (num_changes == prev_changes)
546 return;
547
548 /* Allow substituted expression to have different mode. This is used by
549 regmove to change mode of pseudo register. */
550 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
551 op0_mode = GET_MODE (XEXP (x, 0));
552
553 /* Do changes needed to keep rtx consistent. Don't do any other
554 simplifications, as it is not our job. */
555
556 if (SWAPPABLE_OPERANDS_P (x)
557 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
558 {
559 validate_change (object, loc,
560 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
561 : swap_condition (code),
562 GET_MODE (x), XEXP (x, 1),
563 XEXP (x, 0)), 1);
564 x = *loc;
565 code = GET_CODE (x);
566 }
567
568 switch (code)
569 {
570 case PLUS:
571 /* If we have a PLUS whose second operand is now a CONST_INT, use
572 simplify_gen_binary to try to simplify it.
573 ??? We may want later to remove this, once simplification is
574 separated from this function. */
575 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
576 validate_change (object, loc,
577 simplify_gen_binary
578 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
579 break;
580 case MINUS:
581 if (GET_CODE (XEXP (x, 1)) == CONST_INT
582 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
583 validate_change (object, loc,
584 simplify_gen_binary
585 (PLUS, GET_MODE (x), XEXP (x, 0),
586 simplify_gen_unary (NEG,
587 GET_MODE (x), XEXP (x, 1),
588 GET_MODE (x))), 1);
589 break;
590 case ZERO_EXTEND:
591 case SIGN_EXTEND:
592 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
593 {
594 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
595 op0_mode);
596 /* If any of the above failed, substitute in something that
597 we know won't be recognized. */
598 if (!new)
599 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
600 validate_change (object, loc, new, 1);
601 }
602 break;
603 case SUBREG:
604 /* All subregs possible to simplify should be simplified. */
605 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
606 SUBREG_BYTE (x));
607
608 /* Subregs of VOIDmode operands are incorrect. */
609 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
610 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
611 if (new)
612 validate_change (object, loc, new, 1);
613 break;
614 case ZERO_EXTRACT:
615 case SIGN_EXTRACT:
616 /* If we are replacing a register with memory, try to change the memory
617 to be the mode required for memory in extract operations (this isn't
618 likely to be an insertion operation; if it was, nothing bad will
619 happen, we might just fail in some cases). */
620
621 if (MEM_P (XEXP (x, 0))
622 && GET_CODE (XEXP (x, 1)) == CONST_INT
623 && GET_CODE (XEXP (x, 2)) == CONST_INT
624 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
625 && !MEM_VOLATILE_P (XEXP (x, 0)))
626 {
627 enum machine_mode wanted_mode = VOIDmode;
628 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
629 int pos = INTVAL (XEXP (x, 2));
630
631 if (GET_CODE (x) == ZERO_EXTRACT)
632 {
633 enum machine_mode new_mode
634 = mode_for_extraction (EP_extzv, 1);
635 if (new_mode != MAX_MACHINE_MODE)
636 wanted_mode = new_mode;
637 }
638 else if (GET_CODE (x) == SIGN_EXTRACT)
639 {
640 enum machine_mode new_mode
641 = mode_for_extraction (EP_extv, 1);
642 if (new_mode != MAX_MACHINE_MODE)
643 wanted_mode = new_mode;
644 }
645
646 /* If we have a narrower mode, we can do something. */
647 if (wanted_mode != VOIDmode
648 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
649 {
650 int offset = pos / BITS_PER_UNIT;
651 rtx newmem;
652
653 /* If the bytes and bits are counted differently, we
654 must adjust the offset. */
655 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
656 offset =
657 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
658 offset);
659
660 pos %= GET_MODE_BITSIZE (wanted_mode);
661
662 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
663
664 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
665 validate_change (object, &XEXP (x, 0), newmem, 1);
666 }
667 }
668
669 break;
670
671 default:
672 break;
673 }
674 }
675
676 /* Try replacing every occurrence of FROM in INSN with TO. After all
677 changes have been made, validate by seeing if INSN is still valid. */
678
679 int
680 validate_replace_rtx (rtx from, rtx to, rtx insn)
681 {
682 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
683 return apply_change_group ();
684 }
685
686 /* Try replacing every occurrence of FROM in INSN with TO. */
687
688 void
689 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
690 {
691 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
692 }
693
694 /* Function called by note_uses to replace used subexpressions. */
695 struct validate_replace_src_data
696 {
697 rtx from; /* Old RTX */
698 rtx to; /* New RTX */
699 rtx insn; /* Insn in which substitution is occurring. */
700 };
701
702 static void
703 validate_replace_src_1 (rtx *x, void *data)
704 {
705 struct validate_replace_src_data *d
706 = (struct validate_replace_src_data *) data;
707
708 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
709 }
710
711 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
712 SET_DESTs. */
713
714 void
715 validate_replace_src_group (rtx from, rtx to, rtx insn)
716 {
717 struct validate_replace_src_data d;
718
719 d.from = from;
720 d.to = to;
721 d.insn = insn;
722 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
723 }
724
725 /* Try simplify INSN.
726 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
727 pattern and return true if something was simplified. */
728
729 bool
730 validate_simplify_insn (rtx insn)
731 {
732 int i;
733 rtx pat = NULL;
734 rtx newpat = NULL;
735
736 pat = PATTERN (insn);
737
738 if (GET_CODE (pat) == SET)
739 {
740 newpat = simplify_rtx (SET_SRC (pat));
741 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
742 validate_change (insn, &SET_SRC (pat), newpat, 1);
743 newpat = simplify_rtx (SET_DEST (pat));
744 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
745 validate_change (insn, &SET_DEST (pat), newpat, 1);
746 }
747 else if (GET_CODE (pat) == PARALLEL)
748 for (i = 0; i < XVECLEN (pat, 0); i++)
749 {
750 rtx s = XVECEXP (pat, 0, i);
751
752 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
753 {
754 newpat = simplify_rtx (SET_SRC (s));
755 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
756 validate_change (insn, &SET_SRC (s), newpat, 1);
757 newpat = simplify_rtx (SET_DEST (s));
758 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
759 validate_change (insn, &SET_DEST (s), newpat, 1);
760 }
761 }
762 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
763 }
764 \f
765 #ifdef HAVE_cc0
766 /* Return 1 if the insn using CC0 set by INSN does not contain
767 any ordered tests applied to the condition codes.
768 EQ and NE tests do not count. */
769
770 int
771 next_insn_tests_no_inequality (rtx insn)
772 {
773 rtx next = next_cc0_user (insn);
774
775 /* If there is no next insn, we have to take the conservative choice. */
776 if (next == 0)
777 return 0;
778
779 return (INSN_P (next)
780 && ! inequality_comparisons_p (PATTERN (next)));
781 }
782 #endif
783 \f
784 /* Return 1 if OP is a valid general operand for machine mode MODE.
785 This is either a register reference, a memory reference,
786 or a constant. In the case of a memory reference, the address
787 is checked for general validity for the target machine.
788
789 Register and memory references must have mode MODE in order to be valid,
790 but some constants have no machine mode and are valid for any mode.
791
792 If MODE is VOIDmode, OP is checked for validity for whatever mode
793 it has.
794
795 The main use of this function is as a predicate in match_operand
796 expressions in the machine description.
797
798 For an explanation of this function's behavior for registers of
799 class NO_REGS, see the comment for `register_operand'. */
800
801 int
802 general_operand (rtx op, enum machine_mode mode)
803 {
804 enum rtx_code code = GET_CODE (op);
805
806 if (mode == VOIDmode)
807 mode = GET_MODE (op);
808
809 /* Don't accept CONST_INT or anything similar
810 if the caller wants something floating. */
811 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
812 && GET_MODE_CLASS (mode) != MODE_INT
813 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
814 return 0;
815
816 if (GET_CODE (op) == CONST_INT
817 && mode != VOIDmode
818 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
819 return 0;
820
821 if (CONSTANT_P (op))
822 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
823 || mode == VOIDmode)
824 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
825 && LEGITIMATE_CONSTANT_P (op));
826
827 /* Except for certain constants with VOIDmode, already checked for,
828 OP's mode must match MODE if MODE specifies a mode. */
829
830 if (GET_MODE (op) != mode)
831 return 0;
832
833 if (code == SUBREG)
834 {
835 rtx sub = SUBREG_REG (op);
836
837 #ifdef INSN_SCHEDULING
838 /* On machines that have insn scheduling, we want all memory
839 reference to be explicit, so outlaw paradoxical SUBREGs.
840 However, we must allow them after reload so that they can
841 get cleaned up by cleanup_subreg_operands. */
842 if (!reload_completed && MEM_P (sub)
843 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
844 return 0;
845 #endif
846 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
847 may result in incorrect reference. We should simplify all valid
848 subregs of MEM anyway. But allow this after reload because we
849 might be called from cleanup_subreg_operands.
850
851 ??? This is a kludge. */
852 if (!reload_completed && SUBREG_BYTE (op) != 0
853 && MEM_P (sub))
854 return 0;
855
856 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
857 create such rtl, and we must reject it. */
858 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
859 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
860 return 0;
861
862 op = sub;
863 code = GET_CODE (op);
864 }
865
866 if (code == REG)
867 /* A register whose class is NO_REGS is not a general operand. */
868 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
869 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
870
871 if (code == MEM)
872 {
873 rtx y = XEXP (op, 0);
874
875 if (! volatile_ok && MEM_VOLATILE_P (op))
876 return 0;
877
878 /* Use the mem's mode, since it will be reloaded thus. */
879 if (memory_address_p (GET_MODE (op), y))
880 return 1;
881 }
882
883 return 0;
884 }
885 \f
886 /* Return 1 if OP is a valid memory address for a memory reference
887 of mode MODE.
888
889 The main use of this function is as a predicate in match_operand
890 expressions in the machine description. */
891
892 int
893 address_operand (rtx op, enum machine_mode mode)
894 {
895 return memory_address_p (mode, op);
896 }
897
898 /* Return 1 if OP is a register reference of mode MODE.
899 If MODE is VOIDmode, accept a register in any mode.
900
901 The main use of this function is as a predicate in match_operand
902 expressions in the machine description.
903
904 As a special exception, registers whose class is NO_REGS are
905 not accepted by `register_operand'. The reason for this change
906 is to allow the representation of special architecture artifacts
907 (such as a condition code register) without extending the rtl
908 definitions. Since registers of class NO_REGS cannot be used
909 as registers in any case where register classes are examined,
910 it is most consistent to keep this function from accepting them. */
911
912 int
913 register_operand (rtx op, enum machine_mode mode)
914 {
915 if (GET_MODE (op) != mode && mode != VOIDmode)
916 return 0;
917
918 if (GET_CODE (op) == SUBREG)
919 {
920 rtx sub = SUBREG_REG (op);
921
922 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
923 because it is guaranteed to be reloaded into one.
924 Just make sure the MEM is valid in itself.
925 (Ideally, (SUBREG (MEM)...) should not exist after reload,
926 but currently it does result from (SUBREG (REG)...) where the
927 reg went on the stack.) */
928 if (! reload_completed && MEM_P (sub))
929 return general_operand (op, mode);
930
931 #ifdef CANNOT_CHANGE_MODE_CLASS
932 if (REG_P (sub)
933 && REGNO (sub) < FIRST_PSEUDO_REGISTER
934 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
935 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
936 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
937 return 0;
938 #endif
939
940 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
941 create such rtl, and we must reject it. */
942 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
943 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
944 return 0;
945
946 op = sub;
947 }
948
949 /* We don't consider registers whose class is NO_REGS
950 to be a register operand. */
951 return (REG_P (op)
952 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
953 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
954 }
955
956 /* Return 1 for a register in Pmode; ignore the tested mode. */
957
958 int
959 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
960 {
961 return register_operand (op, Pmode);
962 }
963
964 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
965 or a hard register. */
966
967 int
968 scratch_operand (rtx op, enum machine_mode mode)
969 {
970 if (GET_MODE (op) != mode && mode != VOIDmode)
971 return 0;
972
973 return (GET_CODE (op) == SCRATCH
974 || (REG_P (op)
975 && REGNO (op) < FIRST_PSEUDO_REGISTER));
976 }
977
978 /* Return 1 if OP is a valid immediate operand for mode MODE.
979
980 The main use of this function is as a predicate in match_operand
981 expressions in the machine description. */
982
983 int
984 immediate_operand (rtx op, enum machine_mode mode)
985 {
986 /* Don't accept CONST_INT or anything similar
987 if the caller wants something floating. */
988 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
989 && GET_MODE_CLASS (mode) != MODE_INT
990 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
991 return 0;
992
993 if (GET_CODE (op) == CONST_INT
994 && mode != VOIDmode
995 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
996 return 0;
997
998 return (CONSTANT_P (op)
999 && (GET_MODE (op) == mode || mode == VOIDmode
1000 || GET_MODE (op) == VOIDmode)
1001 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1002 && LEGITIMATE_CONSTANT_P (op));
1003 }
1004
1005 /* Returns 1 if OP is an operand that is a CONST_INT. */
1006
1007 int
1008 const_int_operand (rtx op, enum machine_mode mode)
1009 {
1010 if (GET_CODE (op) != CONST_INT)
1011 return 0;
1012
1013 if (mode != VOIDmode
1014 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1015 return 0;
1016
1017 return 1;
1018 }
1019
1020 /* Returns 1 if OP is an operand that is a constant integer or constant
1021 floating-point number. */
1022
1023 int
1024 const_double_operand (rtx op, enum machine_mode mode)
1025 {
1026 /* Don't accept CONST_INT or anything similar
1027 if the caller wants something floating. */
1028 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1029 && GET_MODE_CLASS (mode) != MODE_INT
1030 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1031 return 0;
1032
1033 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1034 && (mode == VOIDmode || GET_MODE (op) == mode
1035 || GET_MODE (op) == VOIDmode));
1036 }
1037
1038 /* Return 1 if OP is a general operand that is not an immediate operand. */
1039
1040 int
1041 nonimmediate_operand (rtx op, enum machine_mode mode)
1042 {
1043 return (general_operand (op, mode) && ! CONSTANT_P (op));
1044 }
1045
1046 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1047
1048 int
1049 nonmemory_operand (rtx op, enum machine_mode mode)
1050 {
1051 if (CONSTANT_P (op))
1052 {
1053 /* Don't accept CONST_INT or anything similar
1054 if the caller wants something floating. */
1055 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1056 && GET_MODE_CLASS (mode) != MODE_INT
1057 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1058 return 0;
1059
1060 if (GET_CODE (op) == CONST_INT
1061 && mode != VOIDmode
1062 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1063 return 0;
1064
1065 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1066 || mode == VOIDmode)
1067 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1068 && LEGITIMATE_CONSTANT_P (op));
1069 }
1070
1071 if (GET_MODE (op) != mode && mode != VOIDmode)
1072 return 0;
1073
1074 if (GET_CODE (op) == SUBREG)
1075 {
1076 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1077 because it is guaranteed to be reloaded into one.
1078 Just make sure the MEM is valid in itself.
1079 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1080 but currently it does result from (SUBREG (REG)...) where the
1081 reg went on the stack.) */
1082 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1083 return general_operand (op, mode);
1084 op = SUBREG_REG (op);
1085 }
1086
1087 /* We don't consider registers whose class is NO_REGS
1088 to be a register operand. */
1089 return (REG_P (op)
1090 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1091 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1092 }
1093
1094 /* Return 1 if OP is a valid operand that stands for pushing a
1095 value of mode MODE onto the stack.
1096
1097 The main use of this function is as a predicate in match_operand
1098 expressions in the machine description. */
1099
1100 int
1101 push_operand (rtx op, enum machine_mode mode)
1102 {
1103 unsigned int rounded_size = GET_MODE_SIZE (mode);
1104
1105 #ifdef PUSH_ROUNDING
1106 rounded_size = PUSH_ROUNDING (rounded_size);
1107 #endif
1108
1109 if (!MEM_P (op))
1110 return 0;
1111
1112 if (mode != VOIDmode && GET_MODE (op) != mode)
1113 return 0;
1114
1115 op = XEXP (op, 0);
1116
1117 if (rounded_size == GET_MODE_SIZE (mode))
1118 {
1119 if (GET_CODE (op) != STACK_PUSH_CODE)
1120 return 0;
1121 }
1122 else
1123 {
1124 if (GET_CODE (op) != PRE_MODIFY
1125 || GET_CODE (XEXP (op, 1)) != PLUS
1126 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1127 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1128 #ifdef STACK_GROWS_DOWNWARD
1129 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1130 #else
1131 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1132 #endif
1133 )
1134 return 0;
1135 }
1136
1137 return XEXP (op, 0) == stack_pointer_rtx;
1138 }
1139
1140 /* Return 1 if OP is a valid operand that stands for popping a
1141 value of mode MODE off the stack.
1142
1143 The main use of this function is as a predicate in match_operand
1144 expressions in the machine description. */
1145
1146 int
1147 pop_operand (rtx op, enum machine_mode mode)
1148 {
1149 if (!MEM_P (op))
1150 return 0;
1151
1152 if (mode != VOIDmode && GET_MODE (op) != mode)
1153 return 0;
1154
1155 op = XEXP (op, 0);
1156
1157 if (GET_CODE (op) != STACK_POP_CODE)
1158 return 0;
1159
1160 return XEXP (op, 0) == stack_pointer_rtx;
1161 }
1162
1163 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1164
1165 int
1166 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1167 {
1168 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1169 return 0;
1170
1171 win:
1172 return 1;
1173 }
1174
1175 /* Return 1 if OP is a valid memory reference with mode MODE,
1176 including a valid address.
1177
1178 The main use of this function is as a predicate in match_operand
1179 expressions in the machine description. */
1180
1181 int
1182 memory_operand (rtx op, enum machine_mode mode)
1183 {
1184 rtx inner;
1185
1186 if (! reload_completed)
1187 /* Note that no SUBREG is a memory operand before end of reload pass,
1188 because (SUBREG (MEM...)) forces reloading into a register. */
1189 return MEM_P (op) && general_operand (op, mode);
1190
1191 if (mode != VOIDmode && GET_MODE (op) != mode)
1192 return 0;
1193
1194 inner = op;
1195 if (GET_CODE (inner) == SUBREG)
1196 inner = SUBREG_REG (inner);
1197
1198 return (MEM_P (inner) && general_operand (op, mode));
1199 }
1200
1201 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1202 that is, a memory reference whose address is a general_operand. */
1203
1204 int
1205 indirect_operand (rtx op, enum machine_mode mode)
1206 {
1207 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1208 if (! reload_completed
1209 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1210 {
1211 int offset = SUBREG_BYTE (op);
1212 rtx inner = SUBREG_REG (op);
1213
1214 if (mode != VOIDmode && GET_MODE (op) != mode)
1215 return 0;
1216
1217 /* The only way that we can have a general_operand as the resulting
1218 address is if OFFSET is zero and the address already is an operand
1219 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1220 operand. */
1221
1222 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1223 || (GET_CODE (XEXP (inner, 0)) == PLUS
1224 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1225 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1226 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1227 }
1228
1229 return (MEM_P (op)
1230 && memory_operand (op, mode)
1231 && general_operand (XEXP (op, 0), Pmode));
1232 }
1233
1234 /* Return 1 if this is a comparison operator. This allows the use of
1235 MATCH_OPERATOR to recognize all the branch insns. */
1236
1237 int
1238 comparison_operator (rtx op, enum machine_mode mode)
1239 {
1240 return ((mode == VOIDmode || GET_MODE (op) == mode)
1241 && COMPARISON_P (op));
1242 }
1243 \f
1244 /* If BODY is an insn body that uses ASM_OPERANDS,
1245 return the number of operands (both input and output) in the insn.
1246 Otherwise return -1. */
1247
1248 int
1249 asm_noperands (rtx body)
1250 {
1251 switch (GET_CODE (body))
1252 {
1253 case ASM_OPERANDS:
1254 /* No output operands: return number of input operands. */
1255 return ASM_OPERANDS_INPUT_LENGTH (body);
1256 case SET:
1257 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1258 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1259 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1260 else
1261 return -1;
1262 case PARALLEL:
1263 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1264 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1265 {
1266 /* Multiple output operands, or 1 output plus some clobbers:
1267 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1268 int i;
1269 int n_sets;
1270
1271 /* Count backwards through CLOBBERs to determine number of SETs. */
1272 for (i = XVECLEN (body, 0); i > 0; i--)
1273 {
1274 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1275 break;
1276 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1277 return -1;
1278 }
1279
1280 /* N_SETS is now number of output operands. */
1281 n_sets = i;
1282
1283 /* Verify that all the SETs we have
1284 came from a single original asm_operands insn
1285 (so that invalid combinations are blocked). */
1286 for (i = 0; i < n_sets; i++)
1287 {
1288 rtx elt = XVECEXP (body, 0, i);
1289 if (GET_CODE (elt) != SET)
1290 return -1;
1291 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1292 return -1;
1293 /* If these ASM_OPERANDS rtx's came from different original insns
1294 then they aren't allowed together. */
1295 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1296 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1297 return -1;
1298 }
1299 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1300 + n_sets);
1301 }
1302 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1303 {
1304 /* 0 outputs, but some clobbers:
1305 body is [(asm_operands ...) (clobber (reg ...))...]. */
1306 int i;
1307
1308 /* Make sure all the other parallel things really are clobbers. */
1309 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1310 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1311 return -1;
1312
1313 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1314 }
1315 else
1316 return -1;
1317 default:
1318 return -1;
1319 }
1320 }
1321
1322 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1323 copy its operands (both input and output) into the vector OPERANDS,
1324 the locations of the operands within the insn into the vector OPERAND_LOCS,
1325 and the constraints for the operands into CONSTRAINTS.
1326 Write the modes of the operands into MODES.
1327 Return the assembler-template.
1328
1329 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1330 we don't store that info. */
1331
1332 const char *
1333 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1334 const char **constraints, enum machine_mode *modes,
1335 location_t *loc)
1336 {
1337 int i;
1338 int noperands;
1339 rtx asmop = 0;
1340
1341 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1342 {
1343 asmop = SET_SRC (body);
1344 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1345
1346 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1347
1348 for (i = 1; i < noperands; i++)
1349 {
1350 if (operand_locs)
1351 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1352 if (operands)
1353 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1354 if (constraints)
1355 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1356 if (modes)
1357 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1358 }
1359
1360 /* The output is in the SET.
1361 Its constraint is in the ASM_OPERANDS itself. */
1362 if (operands)
1363 operands[0] = SET_DEST (body);
1364 if (operand_locs)
1365 operand_locs[0] = &SET_DEST (body);
1366 if (constraints)
1367 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1368 if (modes)
1369 modes[0] = GET_MODE (SET_DEST (body));
1370 }
1371 else if (GET_CODE (body) == ASM_OPERANDS)
1372 {
1373 asmop = body;
1374 /* No output operands: BODY is (asm_operands ....). */
1375
1376 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1377
1378 /* The input operands are found in the 1st element vector. */
1379 /* Constraints for inputs are in the 2nd element vector. */
1380 for (i = 0; i < noperands; i++)
1381 {
1382 if (operand_locs)
1383 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1384 if (operands)
1385 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1386 if (constraints)
1387 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1388 if (modes)
1389 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1390 }
1391 }
1392 else if (GET_CODE (body) == PARALLEL
1393 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1394 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1395 {
1396 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1397 int nin;
1398 int nout = 0; /* Does not include CLOBBERs. */
1399
1400 asmop = SET_SRC (XVECEXP (body, 0, 0));
1401 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1402
1403 /* At least one output, plus some CLOBBERs. */
1404
1405 /* The outputs are in the SETs.
1406 Their constraints are in the ASM_OPERANDS itself. */
1407 for (i = 0; i < nparallel; i++)
1408 {
1409 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1410 break; /* Past last SET */
1411
1412 if (operands)
1413 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1414 if (operand_locs)
1415 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1416 if (constraints)
1417 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1418 if (modes)
1419 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1420 nout++;
1421 }
1422
1423 for (i = 0; i < nin; i++)
1424 {
1425 if (operand_locs)
1426 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1427 if (operands)
1428 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1429 if (constraints)
1430 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1431 if (modes)
1432 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1433 }
1434 }
1435 else if (GET_CODE (body) == PARALLEL
1436 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1437 {
1438 /* No outputs, but some CLOBBERs. */
1439
1440 int nin;
1441
1442 asmop = XVECEXP (body, 0, 0);
1443 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1444
1445 for (i = 0; i < nin; i++)
1446 {
1447 if (operand_locs)
1448 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1449 if (operands)
1450 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1451 if (constraints)
1452 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1453 if (modes)
1454 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1455 }
1456
1457 }
1458
1459 if (loc)
1460 {
1461 #ifdef USE_MAPPED_LOCATION
1462 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1463 #else
1464 loc->file = ASM_OPERANDS_SOURCE_FILE (asmop);
1465 loc->line = ASM_OPERANDS_SOURCE_LINE (asmop);
1466 #endif
1467 }
1468
1469 return ASM_OPERANDS_TEMPLATE (asmop);
1470 }
1471
1472 /* Check if an asm_operand matches its constraints.
1473 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1474
1475 int
1476 asm_operand_ok (rtx op, const char *constraint)
1477 {
1478 int result = 0;
1479
1480 /* Use constrain_operands after reload. */
1481 gcc_assert (!reload_completed);
1482
1483 while (*constraint)
1484 {
1485 char c = *constraint;
1486 int len;
1487 switch (c)
1488 {
1489 case ',':
1490 constraint++;
1491 continue;
1492 case '=':
1493 case '+':
1494 case '*':
1495 case '%':
1496 case '!':
1497 case '#':
1498 case '&':
1499 case '?':
1500 break;
1501
1502 case '0': case '1': case '2': case '3': case '4':
1503 case '5': case '6': case '7': case '8': case '9':
1504 /* For best results, our caller should have given us the
1505 proper matching constraint, but we can't actually fail
1506 the check if they didn't. Indicate that results are
1507 inconclusive. */
1508 do
1509 constraint++;
1510 while (ISDIGIT (*constraint));
1511 if (! result)
1512 result = -1;
1513 continue;
1514
1515 case 'p':
1516 if (address_operand (op, VOIDmode))
1517 result = 1;
1518 break;
1519
1520 case 'm':
1521 case 'V': /* non-offsettable */
1522 if (memory_operand (op, VOIDmode))
1523 result = 1;
1524 break;
1525
1526 case 'o': /* offsettable */
1527 if (offsettable_nonstrict_memref_p (op))
1528 result = 1;
1529 break;
1530
1531 case '<':
1532 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1533 excepting those that expand_call created. Further, on some
1534 machines which do not have generalized auto inc/dec, an inc/dec
1535 is not a memory_operand.
1536
1537 Match any memory and hope things are resolved after reload. */
1538
1539 if (MEM_P (op)
1540 && (1
1541 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1542 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1543 result = 1;
1544 break;
1545
1546 case '>':
1547 if (MEM_P (op)
1548 && (1
1549 || GET_CODE (XEXP (op, 0)) == PRE_INC
1550 || GET_CODE (XEXP (op, 0)) == POST_INC))
1551 result = 1;
1552 break;
1553
1554 case 'E':
1555 case 'F':
1556 if (GET_CODE (op) == CONST_DOUBLE
1557 || (GET_CODE (op) == CONST_VECTOR
1558 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1559 result = 1;
1560 break;
1561
1562 case 'G':
1563 if (GET_CODE (op) == CONST_DOUBLE
1564 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1565 result = 1;
1566 break;
1567 case 'H':
1568 if (GET_CODE (op) == CONST_DOUBLE
1569 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1570 result = 1;
1571 break;
1572
1573 case 's':
1574 if (GET_CODE (op) == CONST_INT
1575 || (GET_CODE (op) == CONST_DOUBLE
1576 && GET_MODE (op) == VOIDmode))
1577 break;
1578 /* Fall through. */
1579
1580 case 'i':
1581 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1582 result = 1;
1583 break;
1584
1585 case 'n':
1586 if (GET_CODE (op) == CONST_INT
1587 || (GET_CODE (op) == CONST_DOUBLE
1588 && GET_MODE (op) == VOIDmode))
1589 result = 1;
1590 break;
1591
1592 case 'I':
1593 if (GET_CODE (op) == CONST_INT
1594 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1595 result = 1;
1596 break;
1597 case 'J':
1598 if (GET_CODE (op) == CONST_INT
1599 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1600 result = 1;
1601 break;
1602 case 'K':
1603 if (GET_CODE (op) == CONST_INT
1604 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1605 result = 1;
1606 break;
1607 case 'L':
1608 if (GET_CODE (op) == CONST_INT
1609 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1610 result = 1;
1611 break;
1612 case 'M':
1613 if (GET_CODE (op) == CONST_INT
1614 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1615 result = 1;
1616 break;
1617 case 'N':
1618 if (GET_CODE (op) == CONST_INT
1619 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1620 result = 1;
1621 break;
1622 case 'O':
1623 if (GET_CODE (op) == CONST_INT
1624 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1625 result = 1;
1626 break;
1627 case 'P':
1628 if (GET_CODE (op) == CONST_INT
1629 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1630 result = 1;
1631 break;
1632
1633 case 'X':
1634 result = 1;
1635 break;
1636
1637 case 'g':
1638 if (general_operand (op, VOIDmode))
1639 result = 1;
1640 break;
1641
1642 default:
1643 /* For all other letters, we first check for a register class,
1644 otherwise it is an EXTRA_CONSTRAINT. */
1645 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1646 {
1647 case 'r':
1648 if (GET_MODE (op) == BLKmode)
1649 break;
1650 if (register_operand (op, VOIDmode))
1651 result = 1;
1652 }
1653 #ifdef EXTRA_CONSTRAINT_STR
1654 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1655 result = 1;
1656 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1657 /* Every memory operand can be reloaded to fit. */
1658 && memory_operand (op, VOIDmode))
1659 result = 1;
1660 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1661 /* Every address operand can be reloaded to fit. */
1662 && address_operand (op, VOIDmode))
1663 result = 1;
1664 #endif
1665 break;
1666 }
1667 len = CONSTRAINT_LEN (c, constraint);
1668 do
1669 constraint++;
1670 while (--len && *constraint);
1671 if (len)
1672 return 0;
1673 }
1674
1675 return result;
1676 }
1677 \f
1678 /* Given an rtx *P, if it is a sum containing an integer constant term,
1679 return the location (type rtx *) of the pointer to that constant term.
1680 Otherwise, return a null pointer. */
1681
1682 rtx *
1683 find_constant_term_loc (rtx *p)
1684 {
1685 rtx *tem;
1686 enum rtx_code code = GET_CODE (*p);
1687
1688 /* If *P IS such a constant term, P is its location. */
1689
1690 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1691 || code == CONST)
1692 return p;
1693
1694 /* Otherwise, if not a sum, it has no constant term. */
1695
1696 if (GET_CODE (*p) != PLUS)
1697 return 0;
1698
1699 /* If one of the summands is constant, return its location. */
1700
1701 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1702 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1703 return p;
1704
1705 /* Otherwise, check each summand for containing a constant term. */
1706
1707 if (XEXP (*p, 0) != 0)
1708 {
1709 tem = find_constant_term_loc (&XEXP (*p, 0));
1710 if (tem != 0)
1711 return tem;
1712 }
1713
1714 if (XEXP (*p, 1) != 0)
1715 {
1716 tem = find_constant_term_loc (&XEXP (*p, 1));
1717 if (tem != 0)
1718 return tem;
1719 }
1720
1721 return 0;
1722 }
1723 \f
1724 /* Return 1 if OP is a memory reference
1725 whose address contains no side effects
1726 and remains valid after the addition
1727 of a positive integer less than the
1728 size of the object being referenced.
1729
1730 We assume that the original address is valid and do not check it.
1731
1732 This uses strict_memory_address_p as a subroutine, so
1733 don't use it before reload. */
1734
1735 int
1736 offsettable_memref_p (rtx op)
1737 {
1738 return ((MEM_P (op))
1739 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1740 }
1741
1742 /* Similar, but don't require a strictly valid mem ref:
1743 consider pseudo-regs valid as index or base regs. */
1744
1745 int
1746 offsettable_nonstrict_memref_p (rtx op)
1747 {
1748 return ((MEM_P (op))
1749 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1750 }
1751
1752 /* Return 1 if Y is a memory address which contains no side effects
1753 and would remain valid after the addition of a positive integer
1754 less than the size of that mode.
1755
1756 We assume that the original address is valid and do not check it.
1757 We do check that it is valid for narrower modes.
1758
1759 If STRICTP is nonzero, we require a strictly valid address,
1760 for the sake of use in reload.c. */
1761
1762 int
1763 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1764 {
1765 enum rtx_code ycode = GET_CODE (y);
1766 rtx z;
1767 rtx y1 = y;
1768 rtx *y2;
1769 int (*addressp) (enum machine_mode, rtx) =
1770 (strictp ? strict_memory_address_p : memory_address_p);
1771 unsigned int mode_sz = GET_MODE_SIZE (mode);
1772
1773 if (CONSTANT_ADDRESS_P (y))
1774 return 1;
1775
1776 /* Adjusting an offsettable address involves changing to a narrower mode.
1777 Make sure that's OK. */
1778
1779 if (mode_dependent_address_p (y))
1780 return 0;
1781
1782 /* ??? How much offset does an offsettable BLKmode reference need?
1783 Clearly that depends on the situation in which it's being used.
1784 However, the current situation in which we test 0xffffffff is
1785 less than ideal. Caveat user. */
1786 if (mode_sz == 0)
1787 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1788
1789 /* If the expression contains a constant term,
1790 see if it remains valid when max possible offset is added. */
1791
1792 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1793 {
1794 int good;
1795
1796 y1 = *y2;
1797 *y2 = plus_constant (*y2, mode_sz - 1);
1798 /* Use QImode because an odd displacement may be automatically invalid
1799 for any wider mode. But it should be valid for a single byte. */
1800 good = (*addressp) (QImode, y);
1801
1802 /* In any case, restore old contents of memory. */
1803 *y2 = y1;
1804 return good;
1805 }
1806
1807 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1808 return 0;
1809
1810 /* The offset added here is chosen as the maximum offset that
1811 any instruction could need to add when operating on something
1812 of the specified mode. We assume that if Y and Y+c are
1813 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1814 go inside a LO_SUM here, so we do so as well. */
1815 if (GET_CODE (y) == LO_SUM
1816 && mode != BLKmode
1817 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1818 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1819 plus_constant (XEXP (y, 1), mode_sz - 1));
1820 else
1821 z = plus_constant (y, mode_sz - 1);
1822
1823 /* Use QImode because an odd displacement may be automatically invalid
1824 for any wider mode. But it should be valid for a single byte. */
1825 return (*addressp) (QImode, z);
1826 }
1827
1828 /* Return 1 if ADDR is an address-expression whose effect depends
1829 on the mode of the memory reference it is used in.
1830
1831 Autoincrement addressing is a typical example of mode-dependence
1832 because the amount of the increment depends on the mode. */
1833
1834 int
1835 mode_dependent_address_p (rtx addr)
1836 {
1837 /* Auto-increment addressing with anything other than post_modify
1838 or pre_modify always introduces a mode dependency. Catch such
1839 cases now instead of deferring to the target. */
1840 if (GET_CODE (addr) == PRE_INC
1841 || GET_CODE (addr) == POST_INC
1842 || GET_CODE (addr) == PRE_DEC
1843 || GET_CODE (addr) == POST_DEC)
1844 return 1;
1845
1846 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1847 return 0;
1848 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1849 win: ATTRIBUTE_UNUSED_LABEL
1850 return 1;
1851 }
1852 \f
1853 /* Like extract_insn, but save insn extracted and don't extract again, when
1854 called again for the same insn expecting that recog_data still contain the
1855 valid information. This is used primary by gen_attr infrastructure that
1856 often does extract insn again and again. */
1857 void
1858 extract_insn_cached (rtx insn)
1859 {
1860 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1861 return;
1862 extract_insn (insn);
1863 recog_data.insn = insn;
1864 }
1865
1866 /* Do cached extract_insn, constrain_operands and complain about failures.
1867 Used by insn_attrtab. */
1868 void
1869 extract_constrain_insn_cached (rtx insn)
1870 {
1871 extract_insn_cached (insn);
1872 if (which_alternative == -1
1873 && !constrain_operands (reload_completed))
1874 fatal_insn_not_found (insn);
1875 }
1876
1877 /* Do cached constrain_operands and complain about failures. */
1878 int
1879 constrain_operands_cached (int strict)
1880 {
1881 if (which_alternative == -1)
1882 return constrain_operands (strict);
1883 else
1884 return 1;
1885 }
1886 \f
1887 /* Analyze INSN and fill in recog_data. */
1888
1889 void
1890 extract_insn (rtx insn)
1891 {
1892 int i;
1893 int icode;
1894 int noperands;
1895 rtx body = PATTERN (insn);
1896
1897 recog_data.insn = NULL;
1898 recog_data.n_operands = 0;
1899 recog_data.n_alternatives = 0;
1900 recog_data.n_dups = 0;
1901 which_alternative = -1;
1902
1903 switch (GET_CODE (body))
1904 {
1905 case USE:
1906 case CLOBBER:
1907 case ASM_INPUT:
1908 case ADDR_VEC:
1909 case ADDR_DIFF_VEC:
1910 return;
1911
1912 case SET:
1913 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1914 goto asm_insn;
1915 else
1916 goto normal_insn;
1917 case PARALLEL:
1918 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1919 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1920 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1921 goto asm_insn;
1922 else
1923 goto normal_insn;
1924 case ASM_OPERANDS:
1925 asm_insn:
1926 recog_data.n_operands = noperands = asm_noperands (body);
1927 if (noperands >= 0)
1928 {
1929 /* This insn is an `asm' with operands. */
1930
1931 /* expand_asm_operands makes sure there aren't too many operands. */
1932 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
1933
1934 /* Now get the operand values and constraints out of the insn. */
1935 decode_asm_operands (body, recog_data.operand,
1936 recog_data.operand_loc,
1937 recog_data.constraints,
1938 recog_data.operand_mode, NULL);
1939 if (noperands > 0)
1940 {
1941 const char *p = recog_data.constraints[0];
1942 recog_data.n_alternatives = 1;
1943 while (*p)
1944 recog_data.n_alternatives += (*p++ == ',');
1945 }
1946 break;
1947 }
1948 fatal_insn_not_found (insn);
1949
1950 default:
1951 normal_insn:
1952 /* Ordinary insn: recognize it, get the operands via insn_extract
1953 and get the constraints. */
1954
1955 icode = recog_memoized (insn);
1956 if (icode < 0)
1957 fatal_insn_not_found (insn);
1958
1959 recog_data.n_operands = noperands = insn_data[icode].n_operands;
1960 recog_data.n_alternatives = insn_data[icode].n_alternatives;
1961 recog_data.n_dups = insn_data[icode].n_dups;
1962
1963 insn_extract (insn);
1964
1965 for (i = 0; i < noperands; i++)
1966 {
1967 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
1968 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
1969 /* VOIDmode match_operands gets mode from their real operand. */
1970 if (recog_data.operand_mode[i] == VOIDmode)
1971 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
1972 }
1973 }
1974 for (i = 0; i < noperands; i++)
1975 recog_data.operand_type[i]
1976 = (recog_data.constraints[i][0] == '=' ? OP_OUT
1977 : recog_data.constraints[i][0] == '+' ? OP_INOUT
1978 : OP_IN);
1979
1980 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
1981 }
1982
1983 /* After calling extract_insn, you can use this function to extract some
1984 information from the constraint strings into a more usable form.
1985 The collected data is stored in recog_op_alt. */
1986 void
1987 preprocess_constraints (void)
1988 {
1989 int i;
1990
1991 for (i = 0; i < recog_data.n_operands; i++)
1992 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
1993 * sizeof (struct operand_alternative)));
1994
1995 for (i = 0; i < recog_data.n_operands; i++)
1996 {
1997 int j;
1998 struct operand_alternative *op_alt;
1999 const char *p = recog_data.constraints[i];
2000
2001 op_alt = recog_op_alt[i];
2002
2003 for (j = 0; j < recog_data.n_alternatives; j++)
2004 {
2005 op_alt[j].cl = NO_REGS;
2006 op_alt[j].constraint = p;
2007 op_alt[j].matches = -1;
2008 op_alt[j].matched = -1;
2009
2010 if (*p == '\0' || *p == ',')
2011 {
2012 op_alt[j].anything_ok = 1;
2013 continue;
2014 }
2015
2016 for (;;)
2017 {
2018 char c = *p;
2019 if (c == '#')
2020 do
2021 c = *++p;
2022 while (c != ',' && c != '\0');
2023 if (c == ',' || c == '\0')
2024 {
2025 p++;
2026 break;
2027 }
2028
2029 switch (c)
2030 {
2031 case '=': case '+': case '*': case '%':
2032 case 'E': case 'F': case 'G': case 'H':
2033 case 's': case 'i': case 'n':
2034 case 'I': case 'J': case 'K': case 'L':
2035 case 'M': case 'N': case 'O': case 'P':
2036 /* These don't say anything we care about. */
2037 break;
2038
2039 case '?':
2040 op_alt[j].reject += 6;
2041 break;
2042 case '!':
2043 op_alt[j].reject += 600;
2044 break;
2045 case '&':
2046 op_alt[j].earlyclobber = 1;
2047 break;
2048
2049 case '0': case '1': case '2': case '3': case '4':
2050 case '5': case '6': case '7': case '8': case '9':
2051 {
2052 char *end;
2053 op_alt[j].matches = strtoul (p, &end, 10);
2054 recog_op_alt[op_alt[j].matches][j].matched = i;
2055 p = end;
2056 }
2057 continue;
2058
2059 case 'm':
2060 op_alt[j].memory_ok = 1;
2061 break;
2062 case '<':
2063 op_alt[j].decmem_ok = 1;
2064 break;
2065 case '>':
2066 op_alt[j].incmem_ok = 1;
2067 break;
2068 case 'V':
2069 op_alt[j].nonoffmem_ok = 1;
2070 break;
2071 case 'o':
2072 op_alt[j].offmem_ok = 1;
2073 break;
2074 case 'X':
2075 op_alt[j].anything_ok = 1;
2076 break;
2077
2078 case 'p':
2079 op_alt[j].is_address = 1;
2080 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2081 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2082 break;
2083
2084 case 'g':
2085 case 'r':
2086 op_alt[j].cl =
2087 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2088 break;
2089
2090 default:
2091 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2092 {
2093 op_alt[j].memory_ok = 1;
2094 break;
2095 }
2096 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2097 {
2098 op_alt[j].is_address = 1;
2099 op_alt[j].cl
2100 = (reg_class_subunion
2101 [(int) op_alt[j].cl]
2102 [(int) base_reg_class (VOIDmode, ADDRESS,
2103 SCRATCH)]);
2104 break;
2105 }
2106
2107 op_alt[j].cl
2108 = (reg_class_subunion
2109 [(int) op_alt[j].cl]
2110 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2111 break;
2112 }
2113 p += CONSTRAINT_LEN (c, p);
2114 }
2115 }
2116 }
2117 }
2118
2119 /* Check the operands of an insn against the insn's operand constraints
2120 and return 1 if they are valid.
2121 The information about the insn's operands, constraints, operand modes
2122 etc. is obtained from the global variables set up by extract_insn.
2123
2124 WHICH_ALTERNATIVE is set to a number which indicates which
2125 alternative of constraints was matched: 0 for the first alternative,
2126 1 for the next, etc.
2127
2128 In addition, when two operands are required to match
2129 and it happens that the output operand is (reg) while the
2130 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2131 make the output operand look like the input.
2132 This is because the output operand is the one the template will print.
2133
2134 This is used in final, just before printing the assembler code and by
2135 the routines that determine an insn's attribute.
2136
2137 If STRICT is a positive nonzero value, it means that we have been
2138 called after reload has been completed. In that case, we must
2139 do all checks strictly. If it is zero, it means that we have been called
2140 before reload has completed. In that case, we first try to see if we can
2141 find an alternative that matches strictly. If not, we try again, this
2142 time assuming that reload will fix up the insn. This provides a "best
2143 guess" for the alternative and is used to compute attributes of insns prior
2144 to reload. A negative value of STRICT is used for this internal call. */
2145
2146 struct funny_match
2147 {
2148 int this, other;
2149 };
2150
2151 int
2152 constrain_operands (int strict)
2153 {
2154 const char *constraints[MAX_RECOG_OPERANDS];
2155 int matching_operands[MAX_RECOG_OPERANDS];
2156 int earlyclobber[MAX_RECOG_OPERANDS];
2157 int c;
2158
2159 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2160 int funny_match_index;
2161
2162 which_alternative = 0;
2163 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2164 return 1;
2165
2166 for (c = 0; c < recog_data.n_operands; c++)
2167 {
2168 constraints[c] = recog_data.constraints[c];
2169 matching_operands[c] = -1;
2170 }
2171
2172 do
2173 {
2174 int seen_earlyclobber_at = -1;
2175 int opno;
2176 int lose = 0;
2177 funny_match_index = 0;
2178
2179 for (opno = 0; opno < recog_data.n_operands; opno++)
2180 {
2181 rtx op = recog_data.operand[opno];
2182 enum machine_mode mode = GET_MODE (op);
2183 const char *p = constraints[opno];
2184 int offset = 0;
2185 int win = 0;
2186 int val;
2187 int len;
2188
2189 earlyclobber[opno] = 0;
2190
2191 /* A unary operator may be accepted by the predicate, but it
2192 is irrelevant for matching constraints. */
2193 if (UNARY_P (op))
2194 op = XEXP (op, 0);
2195
2196 if (GET_CODE (op) == SUBREG)
2197 {
2198 if (REG_P (SUBREG_REG (op))
2199 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2200 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2201 GET_MODE (SUBREG_REG (op)),
2202 SUBREG_BYTE (op),
2203 GET_MODE (op));
2204 op = SUBREG_REG (op);
2205 }
2206
2207 /* An empty constraint or empty alternative
2208 allows anything which matched the pattern. */
2209 if (*p == 0 || *p == ',')
2210 win = 1;
2211
2212 do
2213 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2214 {
2215 case '\0':
2216 len = 0;
2217 break;
2218 case ',':
2219 c = '\0';
2220 break;
2221
2222 case '?': case '!': case '*': case '%':
2223 case '=': case '+':
2224 break;
2225
2226 case '#':
2227 /* Ignore rest of this alternative as far as
2228 constraint checking is concerned. */
2229 do
2230 p++;
2231 while (*p && *p != ',');
2232 len = 0;
2233 break;
2234
2235 case '&':
2236 earlyclobber[opno] = 1;
2237 if (seen_earlyclobber_at < 0)
2238 seen_earlyclobber_at = opno;
2239 break;
2240
2241 case '0': case '1': case '2': case '3': case '4':
2242 case '5': case '6': case '7': case '8': case '9':
2243 {
2244 /* This operand must be the same as a previous one.
2245 This kind of constraint is used for instructions such
2246 as add when they take only two operands.
2247
2248 Note that the lower-numbered operand is passed first.
2249
2250 If we are not testing strictly, assume that this
2251 constraint will be satisfied. */
2252
2253 char *end;
2254 int match;
2255
2256 match = strtoul (p, &end, 10);
2257 p = end;
2258
2259 if (strict < 0)
2260 val = 1;
2261 else
2262 {
2263 rtx op1 = recog_data.operand[match];
2264 rtx op2 = recog_data.operand[opno];
2265
2266 /* A unary operator may be accepted by the predicate,
2267 but it is irrelevant for matching constraints. */
2268 if (UNARY_P (op1))
2269 op1 = XEXP (op1, 0);
2270 if (UNARY_P (op2))
2271 op2 = XEXP (op2, 0);
2272
2273 val = operands_match_p (op1, op2);
2274 }
2275
2276 matching_operands[opno] = match;
2277 matching_operands[match] = opno;
2278
2279 if (val != 0)
2280 win = 1;
2281
2282 /* If output is *x and input is *--x, arrange later
2283 to change the output to *--x as well, since the
2284 output op is the one that will be printed. */
2285 if (val == 2 && strict > 0)
2286 {
2287 funny_match[funny_match_index].this = opno;
2288 funny_match[funny_match_index++].other = match;
2289 }
2290 }
2291 len = 0;
2292 break;
2293
2294 case 'p':
2295 /* p is used for address_operands. When we are called by
2296 gen_reload, no one will have checked that the address is
2297 strictly valid, i.e., that all pseudos requiring hard regs
2298 have gotten them. */
2299 if (strict <= 0
2300 || (strict_memory_address_p (recog_data.operand_mode[opno],
2301 op)))
2302 win = 1;
2303 break;
2304
2305 /* No need to check general_operand again;
2306 it was done in insn-recog.c. Well, except that reload
2307 doesn't check the validity of its replacements, but
2308 that should only matter when there's a bug. */
2309 case 'g':
2310 /* Anything goes unless it is a REG and really has a hard reg
2311 but the hard reg is not in the class GENERAL_REGS. */
2312 if (REG_P (op))
2313 {
2314 if (strict < 0
2315 || GENERAL_REGS == ALL_REGS
2316 || (reload_in_progress
2317 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2318 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2319 win = 1;
2320 }
2321 else if (strict < 0 || general_operand (op, mode))
2322 win = 1;
2323 break;
2324
2325 case 'X':
2326 /* This is used for a MATCH_SCRATCH in the cases when
2327 we don't actually need anything. So anything goes
2328 any time. */
2329 win = 1;
2330 break;
2331
2332 case 'm':
2333 /* Memory operands must be valid, to the extent
2334 required by STRICT. */
2335 if (MEM_P (op))
2336 {
2337 if (strict > 0
2338 && !strict_memory_address_p (GET_MODE (op),
2339 XEXP (op, 0)))
2340 break;
2341 if (strict == 0
2342 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2343 break;
2344 win = 1;
2345 }
2346 /* Before reload, accept what reload can turn into mem. */
2347 else if (strict < 0 && CONSTANT_P (op))
2348 win = 1;
2349 /* During reload, accept a pseudo */
2350 else if (reload_in_progress && REG_P (op)
2351 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2352 win = 1;
2353 break;
2354
2355 case '<':
2356 if (MEM_P (op)
2357 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2358 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2359 win = 1;
2360 break;
2361
2362 case '>':
2363 if (MEM_P (op)
2364 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2365 || GET_CODE (XEXP (op, 0)) == POST_INC))
2366 win = 1;
2367 break;
2368
2369 case 'E':
2370 case 'F':
2371 if (GET_CODE (op) == CONST_DOUBLE
2372 || (GET_CODE (op) == CONST_VECTOR
2373 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2374 win = 1;
2375 break;
2376
2377 case 'G':
2378 case 'H':
2379 if (GET_CODE (op) == CONST_DOUBLE
2380 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2381 win = 1;
2382 break;
2383
2384 case 's':
2385 if (GET_CODE (op) == CONST_INT
2386 || (GET_CODE (op) == CONST_DOUBLE
2387 && GET_MODE (op) == VOIDmode))
2388 break;
2389 case 'i':
2390 if (CONSTANT_P (op))
2391 win = 1;
2392 break;
2393
2394 case 'n':
2395 if (GET_CODE (op) == CONST_INT
2396 || (GET_CODE (op) == CONST_DOUBLE
2397 && GET_MODE (op) == VOIDmode))
2398 win = 1;
2399 break;
2400
2401 case 'I':
2402 case 'J':
2403 case 'K':
2404 case 'L':
2405 case 'M':
2406 case 'N':
2407 case 'O':
2408 case 'P':
2409 if (GET_CODE (op) == CONST_INT
2410 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2411 win = 1;
2412 break;
2413
2414 case 'V':
2415 if (MEM_P (op)
2416 && ((strict > 0 && ! offsettable_memref_p (op))
2417 || (strict < 0
2418 && !(CONSTANT_P (op) || MEM_P (op)))
2419 || (reload_in_progress
2420 && !(REG_P (op)
2421 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2422 win = 1;
2423 break;
2424
2425 case 'o':
2426 if ((strict > 0 && offsettable_memref_p (op))
2427 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2428 /* Before reload, accept what reload can handle. */
2429 || (strict < 0
2430 && (CONSTANT_P (op) || MEM_P (op)))
2431 /* During reload, accept a pseudo */
2432 || (reload_in_progress && REG_P (op)
2433 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2434 win = 1;
2435 break;
2436
2437 default:
2438 {
2439 enum reg_class cl;
2440
2441 cl = (c == 'r'
2442 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2443 if (cl != NO_REGS)
2444 {
2445 if (strict < 0
2446 || (strict == 0
2447 && REG_P (op)
2448 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2449 || (strict == 0 && GET_CODE (op) == SCRATCH)
2450 || (REG_P (op)
2451 && reg_fits_class_p (op, cl, offset, mode)))
2452 win = 1;
2453 }
2454 #ifdef EXTRA_CONSTRAINT_STR
2455 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2456 win = 1;
2457
2458 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2459 /* Every memory operand can be reloaded to fit. */
2460 && ((strict < 0 && MEM_P (op))
2461 /* Before reload, accept what reload can turn
2462 into mem. */
2463 || (strict < 0 && CONSTANT_P (op))
2464 /* During reload, accept a pseudo */
2465 || (reload_in_progress && REG_P (op)
2466 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2467 win = 1;
2468 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2469 /* Every address operand can be reloaded to fit. */
2470 && strict < 0)
2471 win = 1;
2472 #endif
2473 break;
2474 }
2475 }
2476 while (p += len, c);
2477
2478 constraints[opno] = p;
2479 /* If this operand did not win somehow,
2480 this alternative loses. */
2481 if (! win)
2482 lose = 1;
2483 }
2484 /* This alternative won; the operands are ok.
2485 Change whichever operands this alternative says to change. */
2486 if (! lose)
2487 {
2488 int opno, eopno;
2489
2490 /* See if any earlyclobber operand conflicts with some other
2491 operand. */
2492
2493 if (strict > 0 && seen_earlyclobber_at >= 0)
2494 for (eopno = seen_earlyclobber_at;
2495 eopno < recog_data.n_operands;
2496 eopno++)
2497 /* Ignore earlyclobber operands now in memory,
2498 because we would often report failure when we have
2499 two memory operands, one of which was formerly a REG. */
2500 if (earlyclobber[eopno]
2501 && REG_P (recog_data.operand[eopno]))
2502 for (opno = 0; opno < recog_data.n_operands; opno++)
2503 if ((MEM_P (recog_data.operand[opno])
2504 || recog_data.operand_type[opno] != OP_OUT)
2505 && opno != eopno
2506 /* Ignore things like match_operator operands. */
2507 && *recog_data.constraints[opno] != 0
2508 && ! (matching_operands[opno] == eopno
2509 && operands_match_p (recog_data.operand[opno],
2510 recog_data.operand[eopno]))
2511 && ! safe_from_earlyclobber (recog_data.operand[opno],
2512 recog_data.operand[eopno]))
2513 lose = 1;
2514
2515 if (! lose)
2516 {
2517 while (--funny_match_index >= 0)
2518 {
2519 recog_data.operand[funny_match[funny_match_index].other]
2520 = recog_data.operand[funny_match[funny_match_index].this];
2521 }
2522
2523 return 1;
2524 }
2525 }
2526
2527 which_alternative++;
2528 }
2529 while (which_alternative < recog_data.n_alternatives);
2530
2531 which_alternative = -1;
2532 /* If we are about to reject this, but we are not to test strictly,
2533 try a very loose test. Only return failure if it fails also. */
2534 if (strict == 0)
2535 return constrain_operands (-1);
2536 else
2537 return 0;
2538 }
2539
2540 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2541 is a hard reg in class CLASS when its regno is offset by OFFSET
2542 and changed to mode MODE.
2543 If REG occupies multiple hard regs, all of them must be in CLASS. */
2544
2545 int
2546 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2547 enum machine_mode mode)
2548 {
2549 int regno = REGNO (operand);
2550
2551 if (cl == NO_REGS)
2552 return 0;
2553
2554 return (regno < FIRST_PSEUDO_REGISTER
2555 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2556 mode, regno + offset));
2557 }
2558 \f
2559 /* Split single instruction. Helper function for split_all_insns and
2560 split_all_insns_noflow. Return last insn in the sequence if successful,
2561 or NULL if unsuccessful. */
2562
2563 static rtx
2564 split_insn (rtx insn)
2565 {
2566 /* Split insns here to get max fine-grain parallelism. */
2567 rtx first = PREV_INSN (insn);
2568 rtx last = try_split (PATTERN (insn), insn, 1);
2569
2570 if (last == insn)
2571 return NULL_RTX;
2572
2573 /* try_split returns the NOTE that INSN became. */
2574 SET_INSN_DELETED (insn);
2575
2576 /* ??? Coddle to md files that generate subregs in post-reload
2577 splitters instead of computing the proper hard register. */
2578 if (reload_completed && first != last)
2579 {
2580 first = NEXT_INSN (first);
2581 for (;;)
2582 {
2583 if (INSN_P (first))
2584 cleanup_subreg_operands (first);
2585 if (first == last)
2586 break;
2587 first = NEXT_INSN (first);
2588 }
2589 }
2590 return last;
2591 }
2592
2593 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2594
2595 void
2596 split_all_insns (void)
2597 {
2598 sbitmap blocks;
2599 bool changed;
2600 basic_block bb;
2601
2602 blocks = sbitmap_alloc (last_basic_block);
2603 sbitmap_zero (blocks);
2604 changed = false;
2605
2606 FOR_EACH_BB_REVERSE (bb)
2607 {
2608 rtx insn, next;
2609 bool finish = false;
2610
2611 for (insn = BB_HEAD (bb); !finish ; insn = next)
2612 {
2613 /* Can't use `next_real_insn' because that might go across
2614 CODE_LABELS and short-out basic blocks. */
2615 next = NEXT_INSN (insn);
2616 finish = (insn == BB_END (bb));
2617 if (INSN_P (insn))
2618 {
2619 rtx set = single_set (insn);
2620
2621 /* Don't split no-op move insns. These should silently
2622 disappear later in final. Splitting such insns would
2623 break the code that handles REG_NO_CONFLICT blocks. */
2624 if (set && set_noop_p (set))
2625 {
2626 /* Nops get in the way while scheduling, so delete them
2627 now if register allocation has already been done. It
2628 is too risky to try to do this before register
2629 allocation, and there are unlikely to be very many
2630 nops then anyways. */
2631 if (reload_completed)
2632 delete_insn_and_edges (insn);
2633 }
2634 else
2635 {
2636 rtx last = split_insn (insn);
2637 if (last)
2638 {
2639 /* The split sequence may include barrier, but the
2640 BB boundary we are interested in will be set to
2641 previous one. */
2642
2643 while (BARRIER_P (last))
2644 last = PREV_INSN (last);
2645 SET_BIT (blocks, bb->index);
2646 changed = true;
2647 }
2648 }
2649 }
2650 }
2651 }
2652
2653 if (changed)
2654 find_many_sub_basic_blocks (blocks);
2655
2656 #ifdef ENABLE_CHECKING
2657 verify_flow_info ();
2658 #endif
2659
2660 sbitmap_free (blocks);
2661 }
2662
2663 /* Same as split_all_insns, but do not expect CFG to be available.
2664 Used by machine dependent reorg passes. */
2665
2666 unsigned int
2667 split_all_insns_noflow (void)
2668 {
2669 rtx next, insn;
2670
2671 for (insn = get_insns (); insn; insn = next)
2672 {
2673 next = NEXT_INSN (insn);
2674 if (INSN_P (insn))
2675 {
2676 /* Don't split no-op move insns. These should silently
2677 disappear later in final. Splitting such insns would
2678 break the code that handles REG_NO_CONFLICT blocks. */
2679 rtx set = single_set (insn);
2680 if (set && set_noop_p (set))
2681 {
2682 /* Nops get in the way while scheduling, so delete them
2683 now if register allocation has already been done. It
2684 is too risky to try to do this before register
2685 allocation, and there are unlikely to be very many
2686 nops then anyways.
2687
2688 ??? Should we use delete_insn when the CFG isn't valid? */
2689 if (reload_completed)
2690 delete_insn_and_edges (insn);
2691 }
2692 else
2693 split_insn (insn);
2694 }
2695 }
2696 return 0;
2697 }
2698 \f
2699 #ifdef HAVE_peephole2
2700 struct peep2_insn_data
2701 {
2702 rtx insn;
2703 regset live_before;
2704 };
2705
2706 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2707 static int peep2_current;
2708 /* The number of instructions available to match a peep2. */
2709 int peep2_current_count;
2710
2711 /* A non-insn marker indicating the last insn of the block.
2712 The live_before regset for this element is correct, indicating
2713 DF_LIVE_OUT for the block. */
2714 #define PEEP2_EOB pc_rtx
2715
2716 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2717 does not exist. Used by the recognizer to find the next insn to match
2718 in a multi-insn pattern. */
2719
2720 rtx
2721 peep2_next_insn (int n)
2722 {
2723 gcc_assert (n <= peep2_current_count);
2724
2725 n += peep2_current;
2726 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2727 n -= MAX_INSNS_PER_PEEP2 + 1;
2728
2729 return peep2_insn_data[n].insn;
2730 }
2731
2732 /* Return true if REGNO is dead before the Nth non-note insn
2733 after `current'. */
2734
2735 int
2736 peep2_regno_dead_p (int ofs, int regno)
2737 {
2738 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2739
2740 ofs += peep2_current;
2741 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2742 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2743
2744 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2745
2746 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2747 }
2748
2749 /* Similarly for a REG. */
2750
2751 int
2752 peep2_reg_dead_p (int ofs, rtx reg)
2753 {
2754 int regno, n;
2755
2756 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2757
2758 ofs += peep2_current;
2759 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2760 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2761
2762 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2763
2764 regno = REGNO (reg);
2765 n = hard_regno_nregs[regno][GET_MODE (reg)];
2766 while (--n >= 0)
2767 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2768 return 0;
2769 return 1;
2770 }
2771
2772 /* Try to find a hard register of mode MODE, matching the register class in
2773 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2774 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2775 in which case the only condition is that the register must be available
2776 before CURRENT_INSN.
2777 Registers that already have bits set in REG_SET will not be considered.
2778
2779 If an appropriate register is available, it will be returned and the
2780 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2781 returned. */
2782
2783 rtx
2784 peep2_find_free_register (int from, int to, const char *class_str,
2785 enum machine_mode mode, HARD_REG_SET *reg_set)
2786 {
2787 static int search_ofs;
2788 enum reg_class cl;
2789 HARD_REG_SET live;
2790 int i;
2791
2792 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2793 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2794
2795 from += peep2_current;
2796 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2797 from -= MAX_INSNS_PER_PEEP2 + 1;
2798 to += peep2_current;
2799 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2800 to -= MAX_INSNS_PER_PEEP2 + 1;
2801
2802 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2803 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2804
2805 while (from != to)
2806 {
2807 HARD_REG_SET this_live;
2808
2809 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2810 from = 0;
2811 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2812 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2813 IOR_HARD_REG_SET (live, this_live);
2814 }
2815
2816 cl = (class_str[0] == 'r' ? GENERAL_REGS
2817 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2818
2819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2820 {
2821 int raw_regno, regno, success, j;
2822
2823 /* Distribute the free registers as much as possible. */
2824 raw_regno = search_ofs + i;
2825 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2826 raw_regno -= FIRST_PSEUDO_REGISTER;
2827 #ifdef REG_ALLOC_ORDER
2828 regno = reg_alloc_order[raw_regno];
2829 #else
2830 regno = raw_regno;
2831 #endif
2832
2833 /* Don't allocate fixed registers. */
2834 if (fixed_regs[regno])
2835 continue;
2836 /* Make sure the register is of the right class. */
2837 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2838 continue;
2839 /* And can support the mode we need. */
2840 if (! HARD_REGNO_MODE_OK (regno, mode))
2841 continue;
2842 /* And that we don't create an extra save/restore. */
2843 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
2844 continue;
2845 /* And we don't clobber traceback for noreturn functions. */
2846 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2847 && (! reload_completed || frame_pointer_needed))
2848 continue;
2849
2850 success = 1;
2851 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2852 {
2853 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2854 || TEST_HARD_REG_BIT (live, regno + j))
2855 {
2856 success = 0;
2857 break;
2858 }
2859 }
2860 if (success)
2861 {
2862 add_to_hard_reg_set (reg_set, mode, regno);
2863
2864 /* Start the next search with the next register. */
2865 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2866 raw_regno = 0;
2867 search_ofs = raw_regno;
2868
2869 return gen_rtx_REG (mode, regno);
2870 }
2871 }
2872
2873 search_ofs = 0;
2874 return NULL_RTX;
2875 }
2876
2877 /* Perform the peephole2 optimization pass. */
2878
2879 static void
2880 peephole2_optimize (void)
2881 {
2882 rtx insn, prev;
2883 bitmap live;
2884 int i;
2885 basic_block bb;
2886 bool do_cleanup_cfg = false;
2887 bool do_rebuild_jump_labels = false;
2888
2889 df_set_flags (DF_LR_RUN_DCE);
2890 df_analyze ();
2891
2892 /* Initialize the regsets we're going to use. */
2893 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2894 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
2895 live = BITMAP_ALLOC (&reg_obstack);
2896
2897 FOR_EACH_BB_REVERSE (bb)
2898 {
2899 /* Indicate that all slots except the last holds invalid data. */
2900 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
2901 peep2_insn_data[i].insn = NULL_RTX;
2902 peep2_current_count = 0;
2903
2904 /* Indicate that the last slot contains live_after data. */
2905 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
2906 peep2_current = MAX_INSNS_PER_PEEP2;
2907
2908 /* Start up propagation. */
2909 bitmap_copy (live, DF_LR_OUT (bb));
2910 df_simulate_artificial_refs_at_end (bb, live);
2911 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
2912
2913 for (insn = BB_END (bb); ; insn = prev)
2914 {
2915 prev = PREV_INSN (insn);
2916 if (INSN_P (insn))
2917 {
2918 rtx try, before_try, x;
2919 int match_len;
2920 rtx note;
2921 bool was_call = false;
2922
2923 /* Record this insn. */
2924 if (--peep2_current < 0)
2925 peep2_current = MAX_INSNS_PER_PEEP2;
2926 if (peep2_current_count < MAX_INSNS_PER_PEEP2
2927 && peep2_insn_data[peep2_current].insn == NULL_RTX)
2928 peep2_current_count++;
2929 peep2_insn_data[peep2_current].insn = insn;
2930 df_simulate_one_insn_backwards (bb, insn, live);
2931 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
2932
2933 if (RTX_FRAME_RELATED_P (insn))
2934 {
2935 /* If an insn has RTX_FRAME_RELATED_P set, peephole
2936 substitution would lose the
2937 REG_FRAME_RELATED_EXPR that is attached. */
2938 peep2_current_count = 0;
2939 try = NULL;
2940 }
2941 else
2942 /* Match the peephole. */
2943 try = peephole2_insns (PATTERN (insn), insn, &match_len);
2944
2945 if (try != NULL)
2946 {
2947 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2948 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
2949 cfg-related call notes. */
2950 for (i = 0; i <= match_len; ++i)
2951 {
2952 int j;
2953 rtx old_insn, new_insn, note;
2954
2955 j = i + peep2_current;
2956 if (j >= MAX_INSNS_PER_PEEP2 + 1)
2957 j -= MAX_INSNS_PER_PEEP2 + 1;
2958 old_insn = peep2_insn_data[j].insn;
2959 if (!CALL_P (old_insn))
2960 continue;
2961 was_call = true;
2962
2963 new_insn = try;
2964 while (new_insn != NULL_RTX)
2965 {
2966 if (CALL_P (new_insn))
2967 break;
2968 new_insn = NEXT_INSN (new_insn);
2969 }
2970
2971 gcc_assert (new_insn != NULL_RTX);
2972
2973 CALL_INSN_FUNCTION_USAGE (new_insn)
2974 = CALL_INSN_FUNCTION_USAGE (old_insn);
2975
2976 for (note = REG_NOTES (old_insn);
2977 note;
2978 note = XEXP (note, 1))
2979 switch (REG_NOTE_KIND (note))
2980 {
2981 case REG_NORETURN:
2982 case REG_SETJMP:
2983 REG_NOTES (new_insn)
2984 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2985 XEXP (note, 0),
2986 REG_NOTES (new_insn));
2987 default:
2988 /* Discard all other reg notes. */
2989 break;
2990 }
2991
2992 /* Croak if there is another call in the sequence. */
2993 while (++i <= match_len)
2994 {
2995 j = i + peep2_current;
2996 if (j >= MAX_INSNS_PER_PEEP2 + 1)
2997 j -= MAX_INSNS_PER_PEEP2 + 1;
2998 old_insn = peep2_insn_data[j].insn;
2999 gcc_assert (!CALL_P (old_insn));
3000 }
3001 break;
3002 }
3003
3004 i = match_len + peep2_current;
3005 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3006 i -= MAX_INSNS_PER_PEEP2 + 1;
3007
3008 note = find_reg_note (peep2_insn_data[i].insn,
3009 REG_EH_REGION, NULL_RTX);
3010
3011 /* Replace the old sequence with the new. */
3012 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3013 INSN_LOCATOR (peep2_insn_data[i].insn));
3014 before_try = PREV_INSN (insn);
3015 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3016
3017 /* Re-insert the EH_REGION notes. */
3018 if (note || (was_call && nonlocal_goto_handler_labels))
3019 {
3020 edge eh_edge;
3021 edge_iterator ei;
3022
3023 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3024 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3025 break;
3026
3027 for (x = try ; x != before_try ; x = PREV_INSN (x))
3028 if (CALL_P (x)
3029 || (flag_non_call_exceptions
3030 && may_trap_p (PATTERN (x))
3031 && !find_reg_note (x, REG_EH_REGION, NULL)))
3032 {
3033 if (note)
3034 REG_NOTES (x)
3035 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3036 XEXP (note, 0),
3037 REG_NOTES (x));
3038
3039 if (x != BB_END (bb) && eh_edge)
3040 {
3041 edge nfte, nehe;
3042 int flags;
3043
3044 nfte = split_block (bb, x);
3045 flags = (eh_edge->flags
3046 & (EDGE_EH | EDGE_ABNORMAL));
3047 if (CALL_P (x))
3048 flags |= EDGE_ABNORMAL_CALL;
3049 nehe = make_edge (nfte->src, eh_edge->dest,
3050 flags);
3051
3052 nehe->probability = eh_edge->probability;
3053 nfte->probability
3054 = REG_BR_PROB_BASE - nehe->probability;
3055
3056 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3057 bb = nfte->src;
3058 eh_edge = nehe;
3059 }
3060 }
3061
3062 /* Converting possibly trapping insn to non-trapping is
3063 possible. Zap dummy outgoing edges. */
3064 do_cleanup_cfg |= purge_dead_edges (bb);
3065 }
3066
3067 #ifdef HAVE_conditional_execution
3068 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3069 peep2_insn_data[i].insn = NULL_RTX;
3070 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3071 peep2_current_count = 0;
3072 #else
3073 /* Back up lifetime information past the end of the
3074 newly created sequence. */
3075 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3076 i = 0;
3077 bitmap_copy (live, peep2_insn_data[i].live_before);
3078
3079 /* Update life information for the new sequence. */
3080 x = try;
3081 do
3082 {
3083 if (INSN_P (x))
3084 {
3085 if (--i < 0)
3086 i = MAX_INSNS_PER_PEEP2;
3087 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3088 && peep2_insn_data[i].insn == NULL_RTX)
3089 peep2_current_count++;
3090 peep2_insn_data[i].insn = x;
3091 df_insn_rescan (x);
3092 df_simulate_one_insn_backwards (bb, x, live);
3093 bitmap_copy (peep2_insn_data[i].live_before, live);
3094 }
3095 x = PREV_INSN (x);
3096 }
3097 while (x != prev);
3098
3099 peep2_current = i;
3100 #endif
3101
3102 /* If we generated a jump instruction, it won't have
3103 JUMP_LABEL set. Recompute after we're done. */
3104 for (x = try; x != before_try; x = PREV_INSN (x))
3105 if (JUMP_P (x))
3106 {
3107 do_rebuild_jump_labels = true;
3108 break;
3109 }
3110 }
3111 }
3112
3113 if (insn == BB_HEAD (bb))
3114 break;
3115 }
3116 }
3117
3118 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3119 BITMAP_FREE (peep2_insn_data[i].live_before);
3120 BITMAP_FREE (live);
3121 if (do_rebuild_jump_labels)
3122 rebuild_jump_labels (get_insns ());
3123 }
3124 #endif /* HAVE_peephole2 */
3125
3126 /* Common predicates for use with define_bypass. */
3127
3128 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3129 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3130 must be either a single_set or a PARALLEL with SETs inside. */
3131
3132 int
3133 store_data_bypass_p (rtx out_insn, rtx in_insn)
3134 {
3135 rtx out_set, in_set;
3136 rtx out_pat, in_pat;
3137 rtx out_exp, in_exp;
3138 int i, j;
3139
3140 in_set = single_set (in_insn);
3141 if (in_set)
3142 {
3143 if (!MEM_P (SET_DEST (in_set)))
3144 return false;
3145
3146 out_set = single_set (out_insn);
3147 if (out_set)
3148 {
3149 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3150 return false;
3151 }
3152 else
3153 {
3154 out_pat = PATTERN (out_insn);
3155
3156 if (GET_CODE (out_pat) != PARALLEL)
3157 return false;
3158
3159 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3160 {
3161 out_exp = XVECEXP (out_pat, 0, i);
3162
3163 if (GET_CODE (out_exp) == CLOBBER)
3164 continue;
3165
3166 gcc_assert (GET_CODE (out_exp) == SET);
3167
3168 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3169 return false;
3170 }
3171 }
3172 }
3173 else
3174 {
3175 in_pat = PATTERN (in_insn);
3176 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3177
3178 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3179 {
3180 in_exp = XVECEXP (in_pat, 0, i);
3181
3182 if (GET_CODE (in_exp) == CLOBBER)
3183 continue;
3184
3185 gcc_assert (GET_CODE (in_exp) == SET);
3186
3187 if (!MEM_P (SET_DEST (in_exp)))
3188 return false;
3189
3190 out_set = single_set (out_insn);
3191 if (out_set)
3192 {
3193 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3194 return false;
3195 }
3196 else
3197 {
3198 out_pat = PATTERN (out_insn);
3199 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3200
3201 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3202 {
3203 out_exp = XVECEXP (out_pat, 0, j);
3204
3205 if (GET_CODE (out_exp) == CLOBBER)
3206 continue;
3207
3208 gcc_assert (GET_CODE (out_exp) == SET);
3209
3210 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3211 return false;
3212 }
3213 }
3214 }
3215 }
3216
3217 return true;
3218 }
3219
3220 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3221 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3222 or multiple set; IN_INSN should be single_set for truth, but for convenience
3223 of insn categorization may be any JUMP or CALL insn. */
3224
3225 int
3226 if_test_bypass_p (rtx out_insn, rtx in_insn)
3227 {
3228 rtx out_set, in_set;
3229
3230 in_set = single_set (in_insn);
3231 if (! in_set)
3232 {
3233 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3234 return false;
3235 }
3236
3237 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3238 return false;
3239 in_set = SET_SRC (in_set);
3240
3241 out_set = single_set (out_insn);
3242 if (out_set)
3243 {
3244 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3245 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3246 return false;
3247 }
3248 else
3249 {
3250 rtx out_pat;
3251 int i;
3252
3253 out_pat = PATTERN (out_insn);
3254 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3255
3256 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3257 {
3258 rtx exp = XVECEXP (out_pat, 0, i);
3259
3260 if (GET_CODE (exp) == CLOBBER)
3261 continue;
3262
3263 gcc_assert (GET_CODE (exp) == SET);
3264
3265 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3266 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3267 return false;
3268 }
3269 }
3270
3271 return true;
3272 }
3273 \f
3274 static bool
3275 gate_handle_peephole2 (void)
3276 {
3277 return (optimize > 0 && flag_peephole2);
3278 }
3279
3280 static unsigned int
3281 rest_of_handle_peephole2 (void)
3282 {
3283 #ifdef HAVE_peephole2
3284 peephole2_optimize ();
3285 #endif
3286 return 0;
3287 }
3288
3289 struct tree_opt_pass pass_peephole2 =
3290 {
3291 "peephole2", /* name */
3292 gate_handle_peephole2, /* gate */
3293 rest_of_handle_peephole2, /* execute */
3294 NULL, /* sub */
3295 NULL, /* next */
3296 0, /* static_pass_number */
3297 TV_PEEPHOLE2, /* tv_id */
3298 0, /* properties_required */
3299 0, /* properties_provided */
3300 0, /* properties_destroyed */
3301 0, /* todo_flags_start */
3302 TODO_df_finish |
3303 TODO_dump_func, /* todo_flags_finish */
3304 'z' /* letter */
3305 };
3306
3307 static unsigned int
3308 rest_of_handle_split_all_insns (void)
3309 {
3310 split_all_insns ();
3311 return 0;
3312 }
3313
3314 struct tree_opt_pass pass_split_all_insns =
3315 {
3316 "split1", /* name */
3317 NULL, /* gate */
3318 rest_of_handle_split_all_insns, /* execute */
3319 NULL, /* sub */
3320 NULL, /* next */
3321 0, /* static_pass_number */
3322 0, /* tv_id */
3323 0, /* properties_required */
3324 0, /* properties_provided */
3325 0, /* properties_destroyed */
3326 0, /* todo_flags_start */
3327 TODO_dump_func, /* todo_flags_finish */
3328 0 /* letter */
3329 };
3330
3331 static unsigned int
3332 rest_of_handle_split_after_reload (void)
3333 {
3334 /* If optimizing, then go ahead and split insns now. */
3335 #ifndef STACK_REGS
3336 if (optimize > 0)
3337 #endif
3338 split_all_insns ();
3339 return 0;
3340 }
3341
3342 struct tree_opt_pass pass_split_after_reload =
3343 {
3344 "split2", /* name */
3345 NULL, /* gate */
3346 rest_of_handle_split_after_reload, /* execute */
3347 NULL, /* sub */
3348 NULL, /* next */
3349 0, /* static_pass_number */
3350 0, /* tv_id */
3351 0, /* properties_required */
3352 0, /* properties_provided */
3353 0, /* properties_destroyed */
3354 0, /* todo_flags_start */
3355 TODO_dump_func, /* todo_flags_finish */
3356 0 /* letter */
3357 };
3358
3359 static bool
3360 gate_handle_split_before_regstack (void)
3361 {
3362 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3363 /* If flow2 creates new instructions which need splitting
3364 and scheduling after reload is not done, they might not be
3365 split until final which doesn't allow splitting
3366 if HAVE_ATTR_length. */
3367 # ifdef INSN_SCHEDULING
3368 return (optimize && !flag_schedule_insns_after_reload);
3369 # else
3370 return (optimize);
3371 # endif
3372 #else
3373 return 0;
3374 #endif
3375 }
3376
3377 static unsigned int
3378 rest_of_handle_split_before_regstack (void)
3379 {
3380 split_all_insns ();
3381 return 0;
3382 }
3383
3384 struct tree_opt_pass pass_split_before_regstack =
3385 {
3386 "split3", /* name */
3387 gate_handle_split_before_regstack, /* gate */
3388 rest_of_handle_split_before_regstack, /* execute */
3389 NULL, /* sub */
3390 NULL, /* next */
3391 0, /* static_pass_number */
3392 0, /* tv_id */
3393 0, /* properties_required */
3394 0, /* properties_provided */
3395 0, /* properties_destroyed */
3396 0, /* todo_flags_start */
3397 TODO_dump_func, /* todo_flags_finish */
3398 0 /* letter */
3399 };
3400
3401 static bool
3402 gate_handle_split_before_sched2 (void)
3403 {
3404 #ifdef INSN_SCHEDULING
3405 return optimize > 0 && flag_schedule_insns_after_reload;
3406 #else
3407 return 0;
3408 #endif
3409 }
3410
3411 static unsigned int
3412 rest_of_handle_split_before_sched2 (void)
3413 {
3414 #ifdef INSN_SCHEDULING
3415 split_all_insns ();
3416 #endif
3417 return 0;
3418 }
3419
3420 struct tree_opt_pass pass_split_before_sched2 =
3421 {
3422 "split4", /* name */
3423 gate_handle_split_before_sched2, /* gate */
3424 rest_of_handle_split_before_sched2, /* execute */
3425 NULL, /* sub */
3426 NULL, /* next */
3427 0, /* static_pass_number */
3428 0, /* tv_id */
3429 0, /* properties_required */
3430 0, /* properties_provided */
3431 0, /* properties_destroyed */
3432 0, /* todo_flags_start */
3433 TODO_verify_flow |
3434 TODO_dump_func, /* todo_flags_finish */
3435 0 /* letter */
3436 };
3437
3438 /* The placement of the splitting that we do for shorten_branches
3439 depends on whether regstack is used by the target or not. */
3440 static bool
3441 gate_do_final_split (void)
3442 {
3443 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3444 return 1;
3445 #else
3446 return 0;
3447 #endif
3448 }
3449
3450 struct tree_opt_pass pass_split_for_shorten_branches =
3451 {
3452 "split5", /* name */
3453 gate_do_final_split, /* gate */
3454 split_all_insns_noflow, /* execute */
3455 NULL, /* sub */
3456 NULL, /* next */
3457 0, /* static_pass_number */
3458 0, /* tv_id */
3459 0, /* properties_required */
3460 0, /* properties_provided */
3461 0, /* properties_destroyed */
3462 0, /* todo_flags_start */
3463 TODO_dump_func, /* todo_flags_finish */
3464 0 /* letter */
3465 };
3466
3467