]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/recog.c
Makefile.in (recog.o): Don't depend on resource.h.
[thirdparty/gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
40
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
48
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
56
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static rtx *find_constant_term_loc PARAMS ((rtx *));
60 static int insn_invalid_p PARAMS ((rtx));
61
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70 int volatile_ok;
71
72 struct recog_data recog_data;
73
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
80
81 int which_alternative;
82
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
86
87 int reload_completed;
88
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
92
93 void
94 init_recog_no_volatile ()
95 {
96 volatile_ok = 0;
97 }
98
99 void
100 init_recog ()
101 {
102 volatile_ok = 1;
103 }
104
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
109
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
113
114 int
115 recog_memoized (insn)
116 rtx insn;
117 {
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
120 return INSN_CODE (insn);
121 }
122 \f
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
125
126 int
127 check_asm_operands (x)
128 rtx x;
129 {
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
134
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
137 {
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
142 }
143
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
149
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
152
153 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
154
155 for (i = 0; i < noperands; i++)
156 {
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
162
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
165 }
166
167 return 1;
168 }
169 \f
170 /* Static data for the next two routines. */
171
172 typedef struct change_t
173 {
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
179
180 static change_t *changes;
181 static int changes_allocated;
182
183 static int num_changes = 0;
184
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
188
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
193
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
197
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
202
203 int
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
209 {
210 rtx old = *loc;
211
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
214
215 if (in_group == 0 && num_changes != 0)
216 abort ();
217
218 *loc = new;
219
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
222 {
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
229
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238
239 if (object && GET_CODE (object) != MEM)
240 {
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
245 }
246
247 num_changes++;
248
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
251
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
256 }
257
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
260
261 static int
262 insn_invalid_p (insn)
263 rtx insn;
264 {
265 int icode = recog_memoized (insn);
266 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
267
268 if (is_asm && ! check_asm_operands (PATTERN (insn)))
269 return 1;
270 if (! is_asm && icode < 0)
271 return 1;
272
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed)
275 {
276 extract_insn (insn);
277
278 if (! constrain_operands (1))
279 return 1;
280 }
281
282 return 0;
283 }
284
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
287
288 int
289 apply_change_group ()
290 {
291 int i;
292
293 /* The changes have been applied and all INSN_CODEs have been reset to force
294 rerecognition.
295
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
300 the insn. */
301
302 for (i = 0; i < num_changes; i++)
303 {
304 rtx object = changes[i].object;
305
306 if (object == 0)
307 continue;
308
309 if (GET_CODE (object) == MEM)
310 {
311 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
312 break;
313 }
314 else if (insn_invalid_p (object))
315 {
316 rtx pat = PATTERN (object);
317
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat) == PARALLEL
324 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object)) < 0)
326 {
327 rtx newpat;
328
329 if (XVECLEN (pat, 0) == 2)
330 newpat = XVECEXP (pat, 0, 0);
331 else
332 {
333 int j;
334
335 newpat
336 = gen_rtx_PARALLEL (VOIDmode,
337 gen_rtvec (XVECLEN (pat, 0) - 1));
338 for (j = 0; j < XVECLEN (newpat, 0); j++)
339 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
340 }
341
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
346
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
350
351 validate_change (object, &PATTERN (object), newpat, 1);
352 }
353 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
355 never recognized. */
356 continue;
357 else
358 break;
359 }
360 }
361
362 if (i == num_changes)
363 {
364 num_changes = 0;
365 return 1;
366 }
367 else
368 {
369 cancel_changes (0);
370 return 0;
371 }
372 }
373
374 /* Return the number of changes so far in the current group. */
375
376 int
377 num_validated_changes ()
378 {
379 return num_changes;
380 }
381
382 /* Retract the changes numbered NUM and up. */
383
384 void
385 cancel_changes (num)
386 int num;
387 {
388 int i;
389
390 /* Back out all the changes. Do this in the opposite order in which
391 they were made. */
392 for (i = num_changes - 1; i >= num; i--)
393 {
394 *changes[i].loc = changes[i].old;
395 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
396 INSN_CODE (changes[i].object) = changes[i].old_code;
397 }
398 num_changes = num;
399 }
400
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
403
404 static void
405 validate_replace_rtx_1 (loc, from, to, object)
406 rtx *loc;
407 rtx from, to, object;
408 {
409 register int i, j;
410 register const char *fmt;
411 register rtx x = *loc;
412 enum rtx_code code = GET_CODE (x);
413
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
417
418 if (x == from
419 || (GET_CODE (x) == REG && GET_CODE (from) == REG
420 && GET_MODE (x) == GET_MODE (from)
421 && REGNO (x) == REGNO (from))
422 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
423 && rtx_equal_p (x, from)))
424 {
425 validate_change (object, loc, to, 1);
426 return;
427 }
428
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
431 argument last.*/
432 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
433 {
434 int prev_changes = num_changes;
435
436 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
437 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
438 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
439 {
440 validate_change (object, loc,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
442 : swap_condition (code),
443 GET_MODE (x), XEXP (x, 1),
444 XEXP (x, 0)),
445 1);
446 x = *loc;
447 code = GET_CODE (x);
448 }
449 }
450
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
453
454 switch (code)
455 {
456 case PLUS:
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
460 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
461 1);
462 return;
463
464 case MINUS:
465 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
466 {
467 validate_change (object, loc,
468 plus_constant (XEXP (x, 0), - INTVAL (to)),
469 1);
470 return;
471 }
472 break;
473
474 case ZERO_EXTEND:
475 case SIGN_EXTEND:
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to) == VOIDmode
482 && (XEXP (x, 0) == from
483 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
484 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
485 && REGNO (XEXP (x, 0)) == REGNO (from))))
486 {
487 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
488 GET_MODE (from));
489 if (new == 0)
490 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
491
492 validate_change (object, loc, new, 1);
493 return;
494 }
495 break;
496
497 case SUBREG:
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
502
503 if (SUBREG_REG (x) == from
504 && GET_CODE (from) == REG
505 && GET_CODE (to) == MEM
506 && ! mode_dependent_address_p (XEXP (to, 0))
507 && ! MEM_VOLATILE_P (to)
508 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
509 {
510 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
511 enum machine_mode mode = GET_MODE (x);
512 rtx new;
513
514 if (BYTES_BIG_ENDIAN)
515 offset += (MIN (UNITS_PER_WORD,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
517 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
518
519 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
520 MEM_COPY_ATTRIBUTES (new, to);
521 validate_change (object, loc, new, 1);
522 return;
523 }
524 break;
525
526 case ZERO_EXTRACT:
527 case SIGN_EXTRACT:
528 /* If we are replacing a register with memory, try to change the memory
529 to be the mode required for memory in extract operations (this isn't
530 likely to be an insertion operation; if it was, nothing bad will
531 happen, we might just fail in some cases). */
532
533 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
534 && GET_CODE (XEXP (x, 1)) == CONST_INT
535 && GET_CODE (XEXP (x, 2)) == CONST_INT
536 && ! mode_dependent_address_p (XEXP (to, 0))
537 && ! MEM_VOLATILE_P (to))
538 {
539 enum machine_mode wanted_mode = VOIDmode;
540 enum machine_mode is_mode = GET_MODE (to);
541 int pos = INTVAL (XEXP (x, 2));
542
543 #ifdef HAVE_extzv
544 if (code == ZERO_EXTRACT)
545 {
546 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
547 if (wanted_mode == VOIDmode)
548 wanted_mode = word_mode;
549 }
550 #endif
551 #ifdef HAVE_extv
552 if (code == SIGN_EXTRACT)
553 {
554 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
555 if (wanted_mode == VOIDmode)
556 wanted_mode = word_mode;
557 }
558 #endif
559
560 /* If we have a narrower mode, we can do something. */
561 if (wanted_mode != VOIDmode
562 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
563 {
564 int offset = pos / BITS_PER_UNIT;
565 rtx newmem;
566
567 /* If the bytes and bits are counted differently, we
568 must adjust the offset. */
569 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
570 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
571 - offset);
572
573 pos %= GET_MODE_BITSIZE (wanted_mode);
574
575 newmem = gen_rtx_MEM (wanted_mode,
576 plus_constant (XEXP (to, 0), offset));
577 MEM_COPY_ATTRIBUTES (newmem, to);
578
579 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
580 validate_change (object, &XEXP (x, 0), newmem, 1);
581 }
582 }
583
584 break;
585
586 default:
587 break;
588 }
589
590 /* For commutative or comparison operations we've already performed
591 replacements. Don't try to perform them again. */
592 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
593 {
594 fmt = GET_RTX_FORMAT (code);
595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
596 {
597 if (fmt[i] == 'e')
598 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
599 else if (fmt[i] == 'E')
600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
601 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
602 }
603 }
604 }
605
606 /* Try replacing every occurrence of FROM in INSN with TO. After all
607 changes have been made, validate by seeing if INSN is still valid. */
608
609 int
610 validate_replace_rtx (from, to, insn)
611 rtx from, to, insn;
612 {
613 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
614 return apply_change_group ();
615 }
616
617 /* Try replacing every occurrence of FROM in INSN with TO. After all
618 changes have been made, validate by seeing if INSN is still valid. */
619
620 void
621 validate_replace_rtx_group (from, to, insn)
622 rtx from, to, insn;
623 {
624 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
625 }
626
627 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
628 SET_DESTs. After all changes have been made, validate by seeing if
629 INSN is still valid. */
630
631 int
632 validate_replace_src (from, to, insn)
633 rtx from, to, insn;
634 {
635 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
636 || GET_CODE (PATTERN (insn)) != SET)
637 abort ();
638
639 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
640 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
641 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
642 from, to, insn);
643 return apply_change_group ();
644 }
645 \f
646 #ifdef HAVE_cc0
647 /* Return 1 if the insn using CC0 set by INSN does not contain
648 any ordered tests applied to the condition codes.
649 EQ and NE tests do not count. */
650
651 int
652 next_insn_tests_no_inequality (insn)
653 rtx insn;
654 {
655 register rtx next = next_cc0_user (insn);
656
657 /* If there is no next insn, we have to take the conservative choice. */
658 if (next == 0)
659 return 0;
660
661 return ((GET_CODE (next) == JUMP_INSN
662 || GET_CODE (next) == INSN
663 || GET_CODE (next) == CALL_INSN)
664 && ! inequality_comparisons_p (PATTERN (next)));
665 }
666
667 #if 0 /* This is useless since the insn that sets the cc's
668 must be followed immediately by the use of them. */
669 /* Return 1 if the CC value set up by INSN is not used. */
670
671 int
672 next_insns_test_no_inequality (insn)
673 rtx insn;
674 {
675 register rtx next = NEXT_INSN (insn);
676
677 for (; next != 0; next = NEXT_INSN (next))
678 {
679 if (GET_CODE (next) == CODE_LABEL
680 || GET_CODE (next) == BARRIER)
681 return 1;
682 if (GET_CODE (next) == NOTE)
683 continue;
684 if (inequality_comparisons_p (PATTERN (next)))
685 return 0;
686 if (sets_cc0_p (PATTERN (next)) == 1)
687 return 1;
688 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
689 return 1;
690 }
691 return 1;
692 }
693 #endif
694 #endif
695 \f
696 /* This is used by find_single_use to locate an rtx that contains exactly one
697 use of DEST, which is typically either a REG or CC0. It returns a
698 pointer to the innermost rtx expression containing DEST. Appearances of
699 DEST that are being used to totally replace it are not counted. */
700
701 static rtx *
702 find_single_use_1 (dest, loc)
703 rtx dest;
704 rtx *loc;
705 {
706 rtx x = *loc;
707 enum rtx_code code = GET_CODE (x);
708 rtx *result = 0;
709 rtx *this_result;
710 int i;
711 const char *fmt;
712
713 switch (code)
714 {
715 case CONST_INT:
716 case CONST:
717 case LABEL_REF:
718 case SYMBOL_REF:
719 case CONST_DOUBLE:
720 case CLOBBER:
721 return 0;
722
723 case SET:
724 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
725 of a REG that occupies all of the REG, the insn uses DEST if
726 it is mentioned in the destination or the source. Otherwise, we
727 need just check the source. */
728 if (GET_CODE (SET_DEST (x)) != CC0
729 && GET_CODE (SET_DEST (x)) != PC
730 && GET_CODE (SET_DEST (x)) != REG
731 && ! (GET_CODE (SET_DEST (x)) == SUBREG
732 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
733 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
734 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
735 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
737 break;
738
739 return find_single_use_1 (dest, &SET_SRC (x));
740
741 case MEM:
742 case SUBREG:
743 return find_single_use_1 (dest, &XEXP (x, 0));
744
745 default:
746 break;
747 }
748
749 /* If it wasn't one of the common cases above, check each expression and
750 vector of this code. Look for a unique usage of DEST. */
751
752 fmt = GET_RTX_FORMAT (code);
753 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
754 {
755 if (fmt[i] == 'e')
756 {
757 if (dest == XEXP (x, i)
758 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
759 && REGNO (dest) == REGNO (XEXP (x, i))))
760 this_result = loc;
761 else
762 this_result = find_single_use_1 (dest, &XEXP (x, i));
763
764 if (result == 0)
765 result = this_result;
766 else if (this_result)
767 /* Duplicate usage. */
768 return 0;
769 }
770 else if (fmt[i] == 'E')
771 {
772 int j;
773
774 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
775 {
776 if (XVECEXP (x, i, j) == dest
777 || (GET_CODE (dest) == REG
778 && GET_CODE (XVECEXP (x, i, j)) == REG
779 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
780 this_result = loc;
781 else
782 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
783
784 if (result == 0)
785 result = this_result;
786 else if (this_result)
787 return 0;
788 }
789 }
790 }
791
792 return result;
793 }
794 \f
795 /* See if DEST, produced in INSN, is used only a single time in the
796 sequel. If so, return a pointer to the innermost rtx expression in which
797 it is used.
798
799 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
800
801 This routine will return usually zero either before flow is called (because
802 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
803 note can't be trusted).
804
805 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
806 care about REG_DEAD notes or LOG_LINKS.
807
808 Otherwise, we find the single use by finding an insn that has a
809 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
810 only referenced once in that insn, we know that it must be the first
811 and last insn referencing DEST. */
812
813 rtx *
814 find_single_use (dest, insn, ploc)
815 rtx dest;
816 rtx insn;
817 rtx *ploc;
818 {
819 rtx next;
820 rtx *result;
821 rtx link;
822
823 #ifdef HAVE_cc0
824 if (dest == cc0_rtx)
825 {
826 next = NEXT_INSN (insn);
827 if (next == 0
828 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
829 return 0;
830
831 result = find_single_use_1 (dest, &PATTERN (next));
832 if (result && ploc)
833 *ploc = next;
834 return result;
835 }
836 #endif
837
838 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
839 return 0;
840
841 for (next = next_nonnote_insn (insn);
842 next != 0 && GET_CODE (next) != CODE_LABEL;
843 next = next_nonnote_insn (next))
844 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
845 {
846 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
847 if (XEXP (link, 0) == insn)
848 break;
849
850 if (link)
851 {
852 result = find_single_use_1 (dest, &PATTERN (next));
853 if (ploc)
854 *ploc = next;
855 return result;
856 }
857 }
858
859 return 0;
860 }
861 \f
862 /* Return 1 if OP is a valid general operand for machine mode MODE.
863 This is either a register reference, a memory reference,
864 or a constant. In the case of a memory reference, the address
865 is checked for general validity for the target machine.
866
867 Register and memory references must have mode MODE in order to be valid,
868 but some constants have no machine mode and are valid for any mode.
869
870 If MODE is VOIDmode, OP is checked for validity for whatever mode
871 it has.
872
873 The main use of this function is as a predicate in match_operand
874 expressions in the machine description.
875
876 For an explanation of this function's behavior for registers of
877 class NO_REGS, see the comment for `register_operand'. */
878
879 int
880 general_operand (op, mode)
881 register rtx op;
882 enum machine_mode mode;
883 {
884 register enum rtx_code code = GET_CODE (op);
885 int mode_altering_drug = 0;
886
887 if (mode == VOIDmode)
888 mode = GET_MODE (op);
889
890 /* Don't accept CONST_INT or anything similar
891 if the caller wants something floating. */
892 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
893 && GET_MODE_CLASS (mode) != MODE_INT
894 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
895 return 0;
896
897 if (CONSTANT_P (op))
898 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
899 #ifdef LEGITIMATE_PIC_OPERAND_P
900 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
901 #endif
902 && LEGITIMATE_CONSTANT_P (op));
903
904 /* Except for certain constants with VOIDmode, already checked for,
905 OP's mode must match MODE if MODE specifies a mode. */
906
907 if (GET_MODE (op) != mode)
908 return 0;
909
910 if (code == SUBREG)
911 {
912 #ifdef INSN_SCHEDULING
913 /* On machines that have insn scheduling, we want all memory
914 reference to be explicit, so outlaw paradoxical SUBREGs. */
915 if (GET_CODE (SUBREG_REG (op)) == MEM
916 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
917 return 0;
918 #endif
919
920 op = SUBREG_REG (op);
921 code = GET_CODE (op);
922 #if 0
923 /* No longer needed, since (SUBREG (MEM...))
924 will load the MEM into a reload reg in the MEM's own mode. */
925 mode_altering_drug = 1;
926 #endif
927 }
928
929 if (code == REG)
930 /* A register whose class is NO_REGS is not a general operand. */
931 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
932 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
933
934 if (code == MEM)
935 {
936 register rtx y = XEXP (op, 0);
937 if (! volatile_ok && MEM_VOLATILE_P (op))
938 return 0;
939 if (GET_CODE (y) == ADDRESSOF)
940 return 1;
941 /* Use the mem's mode, since it will be reloaded thus. */
942 mode = GET_MODE (op);
943 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
944 }
945
946 /* Pretend this is an operand for now; we'll run force_operand
947 on its replacement in fixup_var_refs_1. */
948 if (code == ADDRESSOF)
949 return 1;
950
951 return 0;
952
953 win:
954 if (mode_altering_drug)
955 return ! mode_dependent_address_p (XEXP (op, 0));
956 return 1;
957 }
958 \f
959 /* Return 1 if OP is a valid memory address for a memory reference
960 of mode MODE.
961
962 The main use of this function is as a predicate in match_operand
963 expressions in the machine description. */
964
965 int
966 address_operand (op, mode)
967 register rtx op;
968 enum machine_mode mode;
969 {
970 return memory_address_p (mode, op);
971 }
972
973 /* Return 1 if OP is a register reference of mode MODE.
974 If MODE is VOIDmode, accept a register in any mode.
975
976 The main use of this function is as a predicate in match_operand
977 expressions in the machine description.
978
979 As a special exception, registers whose class is NO_REGS are
980 not accepted by `register_operand'. The reason for this change
981 is to allow the representation of special architecture artifacts
982 (such as a condition code register) without extending the rtl
983 definitions. Since registers of class NO_REGS cannot be used
984 as registers in any case where register classes are examined,
985 it is most consistent to keep this function from accepting them. */
986
987 int
988 register_operand (op, mode)
989 register rtx op;
990 enum machine_mode mode;
991 {
992 if (GET_MODE (op) != mode && mode != VOIDmode)
993 return 0;
994
995 if (GET_CODE (op) == SUBREG)
996 {
997 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
998 because it is guaranteed to be reloaded into one.
999 Just make sure the MEM is valid in itself.
1000 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1001 but currently it does result from (SUBREG (REG)...) where the
1002 reg went on the stack.) */
1003 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1004 return general_operand (op, mode);
1005
1006 #ifdef CLASS_CANNOT_CHANGE_SIZE
1007 if (GET_CODE (SUBREG_REG (op)) == REG
1008 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1009 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1010 REGNO (SUBREG_REG (op)))
1011 && (GET_MODE_SIZE (mode)
1012 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1013 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1014 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1015 return 0;
1016 #endif
1017
1018 op = SUBREG_REG (op);
1019 }
1020
1021 /* If we have an ADDRESSOF, consider it valid since it will be
1022 converted into something that will not be a MEM. */
1023 if (GET_CODE (op) == ADDRESSOF)
1024 return 1;
1025
1026 /* We don't consider registers whose class is NO_REGS
1027 to be a register operand. */
1028 return (GET_CODE (op) == REG
1029 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1030 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1031 }
1032
1033 /* Return 1 for a register in Pmode; ignore the tested mode. */
1034
1035 int
1036 pmode_register_operand (op, mode)
1037 rtx op;
1038 enum machine_mode mode ATTRIBUTE_UNUSED;
1039 {
1040 return register_operand (op, Pmode);
1041 }
1042
1043 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1044 or a hard register. */
1045
1046 int
1047 scratch_operand (op, mode)
1048 register rtx op;
1049 enum machine_mode mode;
1050 {
1051 if (GET_MODE (op) != mode && mode != VOIDmode)
1052 return 0;
1053
1054 return (GET_CODE (op) == SCRATCH
1055 || (GET_CODE (op) == REG
1056 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1057 }
1058
1059 /* Return 1 if OP is a valid immediate operand for mode MODE.
1060
1061 The main use of this function is as a predicate in match_operand
1062 expressions in the machine description. */
1063
1064 int
1065 immediate_operand (op, mode)
1066 register rtx op;
1067 enum machine_mode mode;
1068 {
1069 /* Don't accept CONST_INT or anything similar
1070 if the caller wants something floating. */
1071 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1072 && GET_MODE_CLASS (mode) != MODE_INT
1073 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1074 return 0;
1075
1076 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1077 result in 0/1. It seems a safe assumption that this is
1078 in range for everyone. */
1079 if (GET_CODE (op) == CONSTANT_P_RTX)
1080 return 1;
1081
1082 return (CONSTANT_P (op)
1083 && (GET_MODE (op) == mode || mode == VOIDmode
1084 || GET_MODE (op) == VOIDmode)
1085 #ifdef LEGITIMATE_PIC_OPERAND_P
1086 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1087 #endif
1088 && LEGITIMATE_CONSTANT_P (op));
1089 }
1090
1091 /* Returns 1 if OP is an operand that is a CONST_INT. */
1092
1093 int
1094 const_int_operand (op, mode)
1095 register rtx op;
1096 enum machine_mode mode ATTRIBUTE_UNUSED;
1097 {
1098 return GET_CODE (op) == CONST_INT;
1099 }
1100
1101 /* Returns 1 if OP is an operand that is a constant integer or constant
1102 floating-point number. */
1103
1104 int
1105 const_double_operand (op, mode)
1106 register rtx op;
1107 enum machine_mode mode;
1108 {
1109 /* Don't accept CONST_INT or anything similar
1110 if the caller wants something floating. */
1111 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1112 && GET_MODE_CLASS (mode) != MODE_INT
1113 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1114 return 0;
1115
1116 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1117 && (mode == VOIDmode || GET_MODE (op) == mode
1118 || GET_MODE (op) == VOIDmode));
1119 }
1120
1121 /* Return 1 if OP is a general operand that is not an immediate operand. */
1122
1123 int
1124 nonimmediate_operand (op, mode)
1125 register rtx op;
1126 enum machine_mode mode;
1127 {
1128 return (general_operand (op, mode) && ! CONSTANT_P (op));
1129 }
1130
1131 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1132
1133 int
1134 nonmemory_operand (op, mode)
1135 register rtx op;
1136 enum machine_mode mode;
1137 {
1138 if (CONSTANT_P (op))
1139 {
1140 /* Don't accept CONST_INT or anything similar
1141 if the caller wants something floating. */
1142 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1143 && GET_MODE_CLASS (mode) != MODE_INT
1144 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1145 return 0;
1146
1147 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1148 #ifdef LEGITIMATE_PIC_OPERAND_P
1149 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1150 #endif
1151 && LEGITIMATE_CONSTANT_P (op));
1152 }
1153
1154 if (GET_MODE (op) != mode && mode != VOIDmode)
1155 return 0;
1156
1157 if (GET_CODE (op) == SUBREG)
1158 {
1159 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1160 because it is guaranteed to be reloaded into one.
1161 Just make sure the MEM is valid in itself.
1162 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1163 but currently it does result from (SUBREG (REG)...) where the
1164 reg went on the stack.) */
1165 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1166 return general_operand (op, mode);
1167 op = SUBREG_REG (op);
1168 }
1169
1170 /* We don't consider registers whose class is NO_REGS
1171 to be a register operand. */
1172 return (GET_CODE (op) == REG
1173 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1174 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1175 }
1176
1177 /* Return 1 if OP is a valid operand that stands for pushing a
1178 value of mode MODE onto the stack.
1179
1180 The main use of this function is as a predicate in match_operand
1181 expressions in the machine description. */
1182
1183 int
1184 push_operand (op, mode)
1185 rtx op;
1186 enum machine_mode mode;
1187 {
1188 if (GET_CODE (op) != MEM)
1189 return 0;
1190
1191 if (mode != VOIDmode && GET_MODE (op) != mode)
1192 return 0;
1193
1194 op = XEXP (op, 0);
1195
1196 if (GET_CODE (op) != STACK_PUSH_CODE)
1197 return 0;
1198
1199 return XEXP (op, 0) == stack_pointer_rtx;
1200 }
1201
1202 /* Return 1 if OP is a valid operand that stands for popping a
1203 value of mode MODE off the stack.
1204
1205 The main use of this function is as a predicate in match_operand
1206 expressions in the machine description. */
1207
1208 int
1209 pop_operand (op, mode)
1210 rtx op;
1211 enum machine_mode mode;
1212 {
1213 if (GET_CODE (op) != MEM)
1214 return 0;
1215
1216 if (mode != VOIDmode && GET_MODE (op) != mode)
1217 return 0;
1218
1219 op = XEXP (op, 0);
1220
1221 if (GET_CODE (op) != STACK_POP_CODE)
1222 return 0;
1223
1224 return XEXP (op, 0) == stack_pointer_rtx;
1225 }
1226
1227 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1228
1229 int
1230 memory_address_p (mode, addr)
1231 enum machine_mode mode ATTRIBUTE_UNUSED;
1232 register rtx addr;
1233 {
1234 if (GET_CODE (addr) == ADDRESSOF)
1235 return 1;
1236
1237 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1238 return 0;
1239
1240 win:
1241 return 1;
1242 }
1243
1244 /* Return 1 if OP is a valid memory reference with mode MODE,
1245 including a valid address.
1246
1247 The main use of this function is as a predicate in match_operand
1248 expressions in the machine description. */
1249
1250 int
1251 memory_operand (op, mode)
1252 register rtx op;
1253 enum machine_mode mode;
1254 {
1255 rtx inner;
1256
1257 if (! reload_completed)
1258 /* Note that no SUBREG is a memory operand before end of reload pass,
1259 because (SUBREG (MEM...)) forces reloading into a register. */
1260 return GET_CODE (op) == MEM && general_operand (op, mode);
1261
1262 if (mode != VOIDmode && GET_MODE (op) != mode)
1263 return 0;
1264
1265 inner = op;
1266 if (GET_CODE (inner) == SUBREG)
1267 inner = SUBREG_REG (inner);
1268
1269 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1270 }
1271
1272 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1273 that is, a memory reference whose address is a general_operand. */
1274
1275 int
1276 indirect_operand (op, mode)
1277 register rtx op;
1278 enum machine_mode mode;
1279 {
1280 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1281 if (! reload_completed
1282 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1283 {
1284 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1285 rtx inner = SUBREG_REG (op);
1286
1287 if (BYTES_BIG_ENDIAN)
1288 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1289 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1290
1291 if (mode != VOIDmode && GET_MODE (op) != mode)
1292 return 0;
1293
1294 /* The only way that we can have a general_operand as the resulting
1295 address is if OFFSET is zero and the address already is an operand
1296 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1297 operand. */
1298
1299 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1300 || (GET_CODE (XEXP (inner, 0)) == PLUS
1301 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1302 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1303 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1304 }
1305
1306 return (GET_CODE (op) == MEM
1307 && memory_operand (op, mode)
1308 && general_operand (XEXP (op, 0), Pmode));
1309 }
1310
1311 /* Return 1 if this is a comparison operator. This allows the use of
1312 MATCH_OPERATOR to recognize all the branch insns. */
1313
1314 int
1315 comparison_operator (op, mode)
1316 register rtx op;
1317 enum machine_mode mode;
1318 {
1319 return ((mode == VOIDmode || GET_MODE (op) == mode)
1320 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1321 }
1322 \f
1323 /* If BODY is an insn body that uses ASM_OPERANDS,
1324 return the number of operands (both input and output) in the insn.
1325 Otherwise return -1. */
1326
1327 int
1328 asm_noperands (body)
1329 rtx body;
1330 {
1331 if (GET_CODE (body) == ASM_OPERANDS)
1332 /* No output operands: return number of input operands. */
1333 return ASM_OPERANDS_INPUT_LENGTH (body);
1334 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1335 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1336 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1337 else if (GET_CODE (body) == PARALLEL
1338 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1339 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1340 {
1341 /* Multiple output operands, or 1 output plus some clobbers:
1342 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1343 int i;
1344 int n_sets;
1345
1346 /* Count backwards through CLOBBERs to determine number of SETs. */
1347 for (i = XVECLEN (body, 0); i > 0; i--)
1348 {
1349 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1350 break;
1351 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1352 return -1;
1353 }
1354
1355 /* N_SETS is now number of output operands. */
1356 n_sets = i;
1357
1358 /* Verify that all the SETs we have
1359 came from a single original asm_operands insn
1360 (so that invalid combinations are blocked). */
1361 for (i = 0; i < n_sets; i++)
1362 {
1363 rtx elt = XVECEXP (body, 0, i);
1364 if (GET_CODE (elt) != SET)
1365 return -1;
1366 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1367 return -1;
1368 /* If these ASM_OPERANDS rtx's came from different original insns
1369 then they aren't allowed together. */
1370 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1371 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1372 return -1;
1373 }
1374 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1375 + n_sets);
1376 }
1377 else if (GET_CODE (body) == PARALLEL
1378 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1379 {
1380 /* 0 outputs, but some clobbers:
1381 body is [(asm_operands ...) (clobber (reg ...))...]. */
1382 int i;
1383
1384 /* Make sure all the other parallel things really are clobbers. */
1385 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1386 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1387 return -1;
1388
1389 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1390 }
1391 else
1392 return -1;
1393 }
1394
1395 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1396 copy its operands (both input and output) into the vector OPERANDS,
1397 the locations of the operands within the insn into the vector OPERAND_LOCS,
1398 and the constraints for the operands into CONSTRAINTS.
1399 Write the modes of the operands into MODES.
1400 Return the assembler-template.
1401
1402 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1403 we don't store that info. */
1404
1405 const char *
1406 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1407 rtx body;
1408 rtx *operands;
1409 rtx **operand_locs;
1410 const char **constraints;
1411 enum machine_mode *modes;
1412 {
1413 register int i;
1414 int noperands;
1415 const char *template = 0;
1416
1417 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1418 {
1419 rtx asmop = SET_SRC (body);
1420 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1421
1422 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1423
1424 for (i = 1; i < noperands; i++)
1425 {
1426 if (operand_locs)
1427 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1428 if (operands)
1429 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1430 if (constraints)
1431 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1432 if (modes)
1433 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1434 }
1435
1436 /* The output is in the SET.
1437 Its constraint is in the ASM_OPERANDS itself. */
1438 if (operands)
1439 operands[0] = SET_DEST (body);
1440 if (operand_locs)
1441 operand_locs[0] = &SET_DEST (body);
1442 if (constraints)
1443 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1444 if (modes)
1445 modes[0] = GET_MODE (SET_DEST (body));
1446 template = ASM_OPERANDS_TEMPLATE (asmop);
1447 }
1448 else if (GET_CODE (body) == ASM_OPERANDS)
1449 {
1450 rtx asmop = body;
1451 /* No output operands: BODY is (asm_operands ....). */
1452
1453 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1454
1455 /* The input operands are found in the 1st element vector. */
1456 /* Constraints for inputs are in the 2nd element vector. */
1457 for (i = 0; i < noperands; i++)
1458 {
1459 if (operand_locs)
1460 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1461 if (operands)
1462 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1463 if (constraints)
1464 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1465 if (modes)
1466 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1467 }
1468 template = ASM_OPERANDS_TEMPLATE (asmop);
1469 }
1470 else if (GET_CODE (body) == PARALLEL
1471 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1472 {
1473 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1474 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1475 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1476 int nout = 0; /* Does not include CLOBBERs. */
1477
1478 /* At least one output, plus some CLOBBERs. */
1479
1480 /* The outputs are in the SETs.
1481 Their constraints are in the ASM_OPERANDS itself. */
1482 for (i = 0; i < nparallel; i++)
1483 {
1484 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1485 break; /* Past last SET */
1486
1487 if (operands)
1488 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1489 if (operand_locs)
1490 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1491 if (constraints)
1492 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1493 if (modes)
1494 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1495 nout++;
1496 }
1497
1498 for (i = 0; i < nin; i++)
1499 {
1500 if (operand_locs)
1501 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1502 if (operands)
1503 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1504 if (constraints)
1505 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1506 if (modes)
1507 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1508 }
1509
1510 template = ASM_OPERANDS_TEMPLATE (asmop);
1511 }
1512 else if (GET_CODE (body) == PARALLEL
1513 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1514 {
1515 /* No outputs, but some CLOBBERs. */
1516
1517 rtx asmop = XVECEXP (body, 0, 0);
1518 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1519
1520 for (i = 0; i < nin; i++)
1521 {
1522 if (operand_locs)
1523 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1524 if (operands)
1525 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1526 if (constraints)
1527 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1528 if (modes)
1529 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1530 }
1531
1532 template = ASM_OPERANDS_TEMPLATE (asmop);
1533 }
1534
1535 return template;
1536 }
1537
1538 /* Check if an asm_operand matches it's constraints.
1539 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1540
1541 int
1542 asm_operand_ok (op, constraint)
1543 rtx op;
1544 const char *constraint;
1545 {
1546 int result = 0;
1547
1548 /* Use constrain_operands after reload. */
1549 if (reload_completed)
1550 abort ();
1551
1552 while (*constraint)
1553 {
1554 switch (*constraint++)
1555 {
1556 case '=':
1557 case '+':
1558 case '*':
1559 case '%':
1560 case '?':
1561 case '!':
1562 case '#':
1563 case '&':
1564 case ',':
1565 break;
1566
1567 case '0': case '1': case '2': case '3': case '4':
1568 case '5': case '6': case '7': case '8': case '9':
1569 /* For best results, our caller should have given us the
1570 proper matching constraint, but we can't actually fail
1571 the check if they didn't. Indicate that results are
1572 inconclusive. */
1573 result = -1;
1574 break;
1575
1576 case 'p':
1577 if (address_operand (op, VOIDmode))
1578 return 1;
1579 break;
1580
1581 case 'm':
1582 case 'V': /* non-offsettable */
1583 if (memory_operand (op, VOIDmode))
1584 return 1;
1585 break;
1586
1587 case 'o': /* offsettable */
1588 if (offsettable_nonstrict_memref_p (op))
1589 return 1;
1590 break;
1591
1592 case '<':
1593 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1594 excepting those that expand_call created. Further, on some
1595 machines which do not have generalized auto inc/dec, an inc/dec
1596 is not a memory_operand.
1597
1598 Match any memory and hope things are resolved after reload. */
1599
1600 if (GET_CODE (op) == MEM
1601 && (1
1602 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1603 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1604 return 1;
1605 break;
1606
1607 case '>':
1608 if (GET_CODE (op) == MEM
1609 && (1
1610 || GET_CODE (XEXP (op, 0)) == PRE_INC
1611 || GET_CODE (XEXP (op, 0)) == POST_INC))
1612 return 1;
1613 break;
1614
1615 case 'E':
1616 #ifndef REAL_ARITHMETIC
1617 /* Match any floating double constant, but only if
1618 we can examine the bits of it reliably. */
1619 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1620 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1621 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1622 break;
1623 #endif
1624 /* FALLTHRU */
1625
1626 case 'F':
1627 if (GET_CODE (op) == CONST_DOUBLE)
1628 return 1;
1629 break;
1630
1631 case 'G':
1632 if (GET_CODE (op) == CONST_DOUBLE
1633 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1634 return 1;
1635 break;
1636 case 'H':
1637 if (GET_CODE (op) == CONST_DOUBLE
1638 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1639 return 1;
1640 break;
1641
1642 case 's':
1643 if (GET_CODE (op) == CONST_INT
1644 || (GET_CODE (op) == CONST_DOUBLE
1645 && GET_MODE (op) == VOIDmode))
1646 break;
1647 /* FALLTHRU */
1648
1649 case 'i':
1650 if (CONSTANT_P (op)
1651 #ifdef LEGITIMATE_PIC_OPERAND_P
1652 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1653 #endif
1654 )
1655 return 1;
1656 break;
1657
1658 case 'n':
1659 if (GET_CODE (op) == CONST_INT
1660 || (GET_CODE (op) == CONST_DOUBLE
1661 && GET_MODE (op) == VOIDmode))
1662 return 1;
1663 break;
1664
1665 case 'I':
1666 if (GET_CODE (op) == CONST_INT
1667 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1668 return 1;
1669 break;
1670 case 'J':
1671 if (GET_CODE (op) == CONST_INT
1672 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1673 return 1;
1674 break;
1675 case 'K':
1676 if (GET_CODE (op) == CONST_INT
1677 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1678 return 1;
1679 break;
1680 case 'L':
1681 if (GET_CODE (op) == CONST_INT
1682 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1683 return 1;
1684 break;
1685 case 'M':
1686 if (GET_CODE (op) == CONST_INT
1687 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1688 return 1;
1689 break;
1690 case 'N':
1691 if (GET_CODE (op) == CONST_INT
1692 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1693 return 1;
1694 break;
1695 case 'O':
1696 if (GET_CODE (op) == CONST_INT
1697 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1698 return 1;
1699 break;
1700 case 'P':
1701 if (GET_CODE (op) == CONST_INT
1702 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1703 return 1;
1704 break;
1705
1706 case 'X':
1707 return 1;
1708
1709 case 'g':
1710 if (general_operand (op, VOIDmode))
1711 return 1;
1712 break;
1713
1714 #ifdef EXTRA_CONSTRAINT
1715 case 'Q':
1716 if (EXTRA_CONSTRAINT (op, 'Q'))
1717 return 1;
1718 break;
1719 case 'R':
1720 if (EXTRA_CONSTRAINT (op, 'R'))
1721 return 1;
1722 break;
1723 case 'S':
1724 if (EXTRA_CONSTRAINT (op, 'S'))
1725 return 1;
1726 break;
1727 case 'T':
1728 if (EXTRA_CONSTRAINT (op, 'T'))
1729 return 1;
1730 break;
1731 case 'U':
1732 if (EXTRA_CONSTRAINT (op, 'U'))
1733 return 1;
1734 break;
1735 #endif
1736
1737 case 'r':
1738 default:
1739 if (GET_MODE (op) == BLKmode)
1740 break;
1741 if (register_operand (op, VOIDmode))
1742 return 1;
1743 break;
1744 }
1745 }
1746
1747 return result;
1748 }
1749 \f
1750 /* Given an rtx *P, if it is a sum containing an integer constant term,
1751 return the location (type rtx *) of the pointer to that constant term.
1752 Otherwise, return a null pointer. */
1753
1754 static rtx *
1755 find_constant_term_loc (p)
1756 rtx *p;
1757 {
1758 register rtx *tem;
1759 register enum rtx_code code = GET_CODE (*p);
1760
1761 /* If *P IS such a constant term, P is its location. */
1762
1763 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1764 || code == CONST)
1765 return p;
1766
1767 /* Otherwise, if not a sum, it has no constant term. */
1768
1769 if (GET_CODE (*p) != PLUS)
1770 return 0;
1771
1772 /* If one of the summands is constant, return its location. */
1773
1774 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1775 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1776 return p;
1777
1778 /* Otherwise, check each summand for containing a constant term. */
1779
1780 if (XEXP (*p, 0) != 0)
1781 {
1782 tem = find_constant_term_loc (&XEXP (*p, 0));
1783 if (tem != 0)
1784 return tem;
1785 }
1786
1787 if (XEXP (*p, 1) != 0)
1788 {
1789 tem = find_constant_term_loc (&XEXP (*p, 1));
1790 if (tem != 0)
1791 return tem;
1792 }
1793
1794 return 0;
1795 }
1796 \f
1797 /* Return 1 if OP is a memory reference
1798 whose address contains no side effects
1799 and remains valid after the addition
1800 of a positive integer less than the
1801 size of the object being referenced.
1802
1803 We assume that the original address is valid and do not check it.
1804
1805 This uses strict_memory_address_p as a subroutine, so
1806 don't use it before reload. */
1807
1808 int
1809 offsettable_memref_p (op)
1810 rtx op;
1811 {
1812 return ((GET_CODE (op) == MEM)
1813 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1814 }
1815
1816 /* Similar, but don't require a strictly valid mem ref:
1817 consider pseudo-regs valid as index or base regs. */
1818
1819 int
1820 offsettable_nonstrict_memref_p (op)
1821 rtx op;
1822 {
1823 return ((GET_CODE (op) == MEM)
1824 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1825 }
1826
1827 /* Return 1 if Y is a memory address which contains no side effects
1828 and would remain valid after the addition of a positive integer
1829 less than the size of that mode.
1830
1831 We assume that the original address is valid and do not check it.
1832 We do check that it is valid for narrower modes.
1833
1834 If STRICTP is nonzero, we require a strictly valid address,
1835 for the sake of use in reload.c. */
1836
1837 int
1838 offsettable_address_p (strictp, mode, y)
1839 int strictp;
1840 enum machine_mode mode;
1841 register rtx y;
1842 {
1843 register enum rtx_code ycode = GET_CODE (y);
1844 register rtx z;
1845 rtx y1 = y;
1846 rtx *y2;
1847 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1848 (strictp ? strict_memory_address_p : memory_address_p);
1849 unsigned int mode_sz = GET_MODE_SIZE (mode);
1850
1851 if (CONSTANT_ADDRESS_P (y))
1852 return 1;
1853
1854 /* Adjusting an offsettable address involves changing to a narrower mode.
1855 Make sure that's OK. */
1856
1857 if (mode_dependent_address_p (y))
1858 return 0;
1859
1860 /* ??? How much offset does an offsettable BLKmode reference need?
1861 Clearly that depends on the situation in which it's being used.
1862 However, the current situation in which we test 0xffffffff is
1863 less than ideal. Caveat user. */
1864 if (mode_sz == 0)
1865 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1866
1867 /* If the expression contains a constant term,
1868 see if it remains valid when max possible offset is added. */
1869
1870 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1871 {
1872 int good;
1873
1874 y1 = *y2;
1875 *y2 = plus_constant (*y2, mode_sz - 1);
1876 /* Use QImode because an odd displacement may be automatically invalid
1877 for any wider mode. But it should be valid for a single byte. */
1878 good = (*addressp) (QImode, y);
1879
1880 /* In any case, restore old contents of memory. */
1881 *y2 = y1;
1882 return good;
1883 }
1884
1885 if (ycode == PRE_DEC || ycode == PRE_INC
1886 || ycode == POST_DEC || ycode == POST_INC)
1887 return 0;
1888
1889 /* The offset added here is chosen as the maximum offset that
1890 any instruction could need to add when operating on something
1891 of the specified mode. We assume that if Y and Y+c are
1892 valid addresses then so is Y+d for all 0<d<c. */
1893
1894 z = plus_constant_for_output (y, mode_sz - 1);
1895
1896 /* Use QImode because an odd displacement may be automatically invalid
1897 for any wider mode. But it should be valid for a single byte. */
1898 return (*addressp) (QImode, z);
1899 }
1900
1901 /* Return 1 if ADDR is an address-expression whose effect depends
1902 on the mode of the memory reference it is used in.
1903
1904 Autoincrement addressing is a typical example of mode-dependence
1905 because the amount of the increment depends on the mode. */
1906
1907 int
1908 mode_dependent_address_p (addr)
1909 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1910 {
1911 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1912 return 0;
1913 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1914 win: ATTRIBUTE_UNUSED_LABEL
1915 return 1;
1916 }
1917
1918 /* Return 1 if OP is a general operand
1919 other than a memory ref with a mode dependent address. */
1920
1921 int
1922 mode_independent_operand (op, mode)
1923 enum machine_mode mode;
1924 rtx op;
1925 {
1926 rtx addr;
1927
1928 if (! general_operand (op, mode))
1929 return 0;
1930
1931 if (GET_CODE (op) != MEM)
1932 return 1;
1933
1934 addr = XEXP (op, 0);
1935 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1936 return 1;
1937 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1938 lose: ATTRIBUTE_UNUSED_LABEL
1939 return 0;
1940 }
1941
1942 /* Given an operand OP that is a valid memory reference
1943 which satisfies offsettable_memref_p,
1944 return a new memory reference whose address has been adjusted by OFFSET.
1945 OFFSET should be positive and less than the size of the object referenced.
1946 */
1947
1948 rtx
1949 adj_offsettable_operand (op, offset)
1950 rtx op;
1951 int offset;
1952 {
1953 register enum rtx_code code = GET_CODE (op);
1954
1955 if (code == MEM)
1956 {
1957 register rtx y = XEXP (op, 0);
1958 register rtx new;
1959
1960 if (CONSTANT_ADDRESS_P (y))
1961 {
1962 new = gen_rtx_MEM (GET_MODE (op),
1963 plus_constant_for_output (y, offset));
1964 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1965 return new;
1966 }
1967
1968 if (GET_CODE (y) == PLUS)
1969 {
1970 rtx z = y;
1971 register rtx *const_loc;
1972
1973 op = copy_rtx (op);
1974 z = XEXP (op, 0);
1975 const_loc = find_constant_term_loc (&z);
1976 if (const_loc)
1977 {
1978 *const_loc = plus_constant_for_output (*const_loc, offset);
1979 return op;
1980 }
1981 }
1982
1983 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1984 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1985 return new;
1986 }
1987 abort ();
1988 }
1989 \f
1990 /* Analyze INSN and fill in recog_data. */
1991
1992 void
1993 extract_insn (insn)
1994 rtx insn;
1995 {
1996 int i;
1997 int icode;
1998 int noperands;
1999 rtx body = PATTERN (insn);
2000
2001 recog_data.n_operands = 0;
2002 recog_data.n_alternatives = 0;
2003 recog_data.n_dups = 0;
2004
2005 switch (GET_CODE (body))
2006 {
2007 case USE:
2008 case CLOBBER:
2009 case ASM_INPUT:
2010 case ADDR_VEC:
2011 case ADDR_DIFF_VEC:
2012 return;
2013
2014 case SET:
2015 case PARALLEL:
2016 case ASM_OPERANDS:
2017 recog_data.n_operands = noperands = asm_noperands (body);
2018 if (noperands >= 0)
2019 {
2020 /* This insn is an `asm' with operands. */
2021
2022 /* expand_asm_operands makes sure there aren't too many operands. */
2023 if (noperands > MAX_RECOG_OPERANDS)
2024 abort ();
2025
2026 /* Now get the operand values and constraints out of the insn. */
2027 decode_asm_operands (body, recog_data.operand,
2028 recog_data.operand_loc,
2029 recog_data.constraints,
2030 recog_data.operand_mode);
2031 if (noperands > 0)
2032 {
2033 const char *p = recog_data.constraints[0];
2034 recog_data.n_alternatives = 1;
2035 while (*p)
2036 recog_data.n_alternatives += (*p++ == ',');
2037 }
2038 break;
2039 }
2040
2041 /* FALLTHROUGH */
2042
2043 default:
2044 /* Ordinary insn: recognize it, get the operands via insn_extract
2045 and get the constraints. */
2046
2047 icode = recog_memoized (insn);
2048 if (icode < 0)
2049 fatal_insn_not_found (insn);
2050
2051 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2052 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2053 recog_data.n_dups = insn_data[icode].n_dups;
2054
2055 insn_extract (insn);
2056
2057 for (i = 0; i < noperands; i++)
2058 {
2059 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2060 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2061 }
2062 }
2063 for (i = 0; i < noperands; i++)
2064 recog_data.operand_type[i]
2065 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2066 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2067 : OP_IN);
2068
2069 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2070 abort ();
2071 }
2072
2073 /* After calling extract_insn, you can use this function to extract some
2074 information from the constraint strings into a more usable form.
2075 The collected data is stored in recog_op_alt. */
2076 void
2077 preprocess_constraints ()
2078 {
2079 int i;
2080
2081 memset (recog_op_alt, 0, sizeof recog_op_alt);
2082 for (i = 0; i < recog_data.n_operands; i++)
2083 {
2084 int j;
2085 struct operand_alternative *op_alt;
2086 const char *p = recog_data.constraints[i];
2087
2088 op_alt = recog_op_alt[i];
2089
2090 for (j = 0; j < recog_data.n_alternatives; j++)
2091 {
2092 op_alt[j].class = NO_REGS;
2093 op_alt[j].constraint = p;
2094 op_alt[j].matches = -1;
2095 op_alt[j].matched = -1;
2096
2097 if (*p == '\0' || *p == ',')
2098 {
2099 op_alt[j].anything_ok = 1;
2100 continue;
2101 }
2102
2103 for (;;)
2104 {
2105 char c = *p++;
2106 if (c == '#')
2107 do
2108 c = *p++;
2109 while (c != ',' && c != '\0');
2110 if (c == ',' || c == '\0')
2111 break;
2112
2113 switch (c)
2114 {
2115 case '=': case '+': case '*': case '%':
2116 case 'E': case 'F': case 'G': case 'H':
2117 case 's': case 'i': case 'n':
2118 case 'I': case 'J': case 'K': case 'L':
2119 case 'M': case 'N': case 'O': case 'P':
2120 #ifdef EXTRA_CONSTRAINT
2121 case 'Q': case 'R': case 'S': case 'T': case 'U':
2122 #endif
2123 /* These don't say anything we care about. */
2124 break;
2125
2126 case '?':
2127 op_alt[j].reject += 6;
2128 break;
2129 case '!':
2130 op_alt[j].reject += 600;
2131 break;
2132 case '&':
2133 op_alt[j].earlyclobber = 1;
2134 break;
2135
2136 case '0': case '1': case '2': case '3': case '4':
2137 case '5': case '6': case '7': case '8': case '9':
2138 op_alt[j].matches = c - '0';
2139 recog_op_alt[op_alt[j].matches][j].matched = i;
2140 break;
2141
2142 case 'm':
2143 op_alt[j].memory_ok = 1;
2144 break;
2145 case '<':
2146 op_alt[j].decmem_ok = 1;
2147 break;
2148 case '>':
2149 op_alt[j].incmem_ok = 1;
2150 break;
2151 case 'V':
2152 op_alt[j].nonoffmem_ok = 1;
2153 break;
2154 case 'o':
2155 op_alt[j].offmem_ok = 1;
2156 break;
2157 case 'X':
2158 op_alt[j].anything_ok = 1;
2159 break;
2160
2161 case 'p':
2162 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2163 break;
2164
2165 case 'g': case 'r':
2166 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2167 break;
2168
2169 default:
2170 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2171 break;
2172 }
2173 }
2174 }
2175 }
2176 }
2177
2178 /* Check the operands of an insn against the insn's operand constraints
2179 and return 1 if they are valid.
2180 The information about the insn's operands, constraints, operand modes
2181 etc. is obtained from the global variables set up by extract_insn.
2182
2183 WHICH_ALTERNATIVE is set to a number which indicates which
2184 alternative of constraints was matched: 0 for the first alternative,
2185 1 for the next, etc.
2186
2187 In addition, when two operands are match
2188 and it happens that the output operand is (reg) while the
2189 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2190 make the output operand look like the input.
2191 This is because the output operand is the one the template will print.
2192
2193 This is used in final, just before printing the assembler code and by
2194 the routines that determine an insn's attribute.
2195
2196 If STRICT is a positive non-zero value, it means that we have been
2197 called after reload has been completed. In that case, we must
2198 do all checks strictly. If it is zero, it means that we have been called
2199 before reload has completed. In that case, we first try to see if we can
2200 find an alternative that matches strictly. If not, we try again, this
2201 time assuming that reload will fix up the insn. This provides a "best
2202 guess" for the alternative and is used to compute attributes of insns prior
2203 to reload. A negative value of STRICT is used for this internal call. */
2204
2205 struct funny_match
2206 {
2207 int this, other;
2208 };
2209
2210 int
2211 constrain_operands (strict)
2212 int strict;
2213 {
2214 const char *constraints[MAX_RECOG_OPERANDS];
2215 int matching_operands[MAX_RECOG_OPERANDS];
2216 int earlyclobber[MAX_RECOG_OPERANDS];
2217 register int c;
2218
2219 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2220 int funny_match_index;
2221
2222 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2223 return 1;
2224
2225 for (c = 0; c < recog_data.n_operands; c++)
2226 {
2227 constraints[c] = recog_data.constraints[c];
2228 matching_operands[c] = -1;
2229 }
2230
2231 which_alternative = 0;
2232
2233 while (which_alternative < recog_data.n_alternatives)
2234 {
2235 register int opno;
2236 int lose = 0;
2237 funny_match_index = 0;
2238
2239 for (opno = 0; opno < recog_data.n_operands; opno++)
2240 {
2241 register rtx op = recog_data.operand[opno];
2242 enum machine_mode mode = GET_MODE (op);
2243 register const char *p = constraints[opno];
2244 int offset = 0;
2245 int win = 0;
2246 int val;
2247
2248 earlyclobber[opno] = 0;
2249
2250 /* A unary operator may be accepted by the predicate, but it
2251 is irrelevant for matching constraints. */
2252 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2253 op = XEXP (op, 0);
2254
2255 if (GET_CODE (op) == SUBREG)
2256 {
2257 if (GET_CODE (SUBREG_REG (op)) == REG
2258 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2259 offset = SUBREG_WORD (op);
2260 op = SUBREG_REG (op);
2261 }
2262
2263 /* An empty constraint or empty alternative
2264 allows anything which matched the pattern. */
2265 if (*p == 0 || *p == ',')
2266 win = 1;
2267
2268 while (*p && (c = *p++) != ',')
2269 switch (c)
2270 {
2271 case '?': case '!': case '*': case '%':
2272 case '=': case '+':
2273 break;
2274
2275 case '#':
2276 /* Ignore rest of this alternative as far as
2277 constraint checking is concerned. */
2278 while (*p && *p != ',')
2279 p++;
2280 break;
2281
2282 case '&':
2283 earlyclobber[opno] = 1;
2284 break;
2285
2286 case '0': case '1': case '2': case '3': case '4':
2287 case '5': case '6': case '7': case '8': case '9':
2288
2289 /* This operand must be the same as a previous one.
2290 This kind of constraint is used for instructions such
2291 as add when they take only two operands.
2292
2293 Note that the lower-numbered operand is passed first.
2294
2295 If we are not testing strictly, assume that this constraint
2296 will be satisfied. */
2297 if (strict < 0)
2298 val = 1;
2299 else
2300 {
2301 rtx op1 = recog_data.operand[c - '0'];
2302 rtx op2 = recog_data.operand[opno];
2303
2304 /* A unary operator may be accepted by the predicate,
2305 but it is irrelevant for matching constraints. */
2306 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2307 op1 = XEXP (op1, 0);
2308 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2309 op2 = XEXP (op2, 0);
2310
2311 val = operands_match_p (op1, op2);
2312 }
2313
2314 matching_operands[opno] = c - '0';
2315 matching_operands[c - '0'] = opno;
2316
2317 if (val != 0)
2318 win = 1;
2319 /* If output is *x and input is *--x,
2320 arrange later to change the output to *--x as well,
2321 since the output op is the one that will be printed. */
2322 if (val == 2 && strict > 0)
2323 {
2324 funny_match[funny_match_index].this = opno;
2325 funny_match[funny_match_index++].other = c - '0';
2326 }
2327 break;
2328
2329 case 'p':
2330 /* p is used for address_operands. When we are called by
2331 gen_reload, no one will have checked that the address is
2332 strictly valid, i.e., that all pseudos requiring hard regs
2333 have gotten them. */
2334 if (strict <= 0
2335 || (strict_memory_address_p (recog_data.operand_mode[opno],
2336 op)))
2337 win = 1;
2338 break;
2339
2340 /* No need to check general_operand again;
2341 it was done in insn-recog.c. */
2342 case 'g':
2343 /* Anything goes unless it is a REG and really has a hard reg
2344 but the hard reg is not in the class GENERAL_REGS. */
2345 if (strict < 0
2346 || GENERAL_REGS == ALL_REGS
2347 || GET_CODE (op) != REG
2348 || (reload_in_progress
2349 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2350 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2351 win = 1;
2352 break;
2353
2354 case 'r':
2355 if (strict < 0
2356 || (strict == 0
2357 && GET_CODE (op) == REG
2358 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2359 || (strict == 0 && GET_CODE (op) == SCRATCH)
2360 || (GET_CODE (op) == REG
2361 && ((GENERAL_REGS == ALL_REGS
2362 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2363 || reg_fits_class_p (op, GENERAL_REGS,
2364 offset, mode))))
2365 win = 1;
2366 break;
2367
2368 case 'X':
2369 /* This is used for a MATCH_SCRATCH in the cases when
2370 we don't actually need anything. So anything goes
2371 any time. */
2372 win = 1;
2373 break;
2374
2375 case 'm':
2376 if (GET_CODE (op) == MEM
2377 /* Before reload, accept what reload can turn into mem. */
2378 || (strict < 0 && CONSTANT_P (op))
2379 /* During reload, accept a pseudo */
2380 || (reload_in_progress && GET_CODE (op) == REG
2381 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2382 win = 1;
2383 break;
2384
2385 case '<':
2386 if (GET_CODE (op) == MEM
2387 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2388 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2389 win = 1;
2390 break;
2391
2392 case '>':
2393 if (GET_CODE (op) == MEM
2394 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2395 || GET_CODE (XEXP (op, 0)) == POST_INC))
2396 win = 1;
2397 break;
2398
2399 case 'E':
2400 #ifndef REAL_ARITHMETIC
2401 /* Match any CONST_DOUBLE, but only if
2402 we can examine the bits of it reliably. */
2403 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2404 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2405 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2406 break;
2407 #endif
2408 if (GET_CODE (op) == CONST_DOUBLE)
2409 win = 1;
2410 break;
2411
2412 case 'F':
2413 if (GET_CODE (op) == CONST_DOUBLE)
2414 win = 1;
2415 break;
2416
2417 case 'G':
2418 case 'H':
2419 if (GET_CODE (op) == CONST_DOUBLE
2420 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2421 win = 1;
2422 break;
2423
2424 case 's':
2425 if (GET_CODE (op) == CONST_INT
2426 || (GET_CODE (op) == CONST_DOUBLE
2427 && GET_MODE (op) == VOIDmode))
2428 break;
2429 case 'i':
2430 if (CONSTANT_P (op))
2431 win = 1;
2432 break;
2433
2434 case 'n':
2435 if (GET_CODE (op) == CONST_INT
2436 || (GET_CODE (op) == CONST_DOUBLE
2437 && GET_MODE (op) == VOIDmode))
2438 win = 1;
2439 break;
2440
2441 case 'I':
2442 case 'J':
2443 case 'K':
2444 case 'L':
2445 case 'M':
2446 case 'N':
2447 case 'O':
2448 case 'P':
2449 if (GET_CODE (op) == CONST_INT
2450 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2451 win = 1;
2452 break;
2453
2454 #ifdef EXTRA_CONSTRAINT
2455 case 'Q':
2456 case 'R':
2457 case 'S':
2458 case 'T':
2459 case 'U':
2460 if (EXTRA_CONSTRAINT (op, c))
2461 win = 1;
2462 break;
2463 #endif
2464
2465 case 'V':
2466 if (GET_CODE (op) == MEM
2467 && ((strict > 0 && ! offsettable_memref_p (op))
2468 || (strict < 0
2469 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2470 || (reload_in_progress
2471 && !(GET_CODE (op) == REG
2472 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2473 win = 1;
2474 break;
2475
2476 case 'o':
2477 if ((strict > 0 && offsettable_memref_p (op))
2478 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2479 /* Before reload, accept what reload can handle. */
2480 || (strict < 0
2481 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2482 /* During reload, accept a pseudo */
2483 || (reload_in_progress && GET_CODE (op) == REG
2484 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2485 win = 1;
2486 break;
2487
2488 default:
2489 if (strict < 0
2490 || (strict == 0
2491 && GET_CODE (op) == REG
2492 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2493 || (strict == 0 && GET_CODE (op) == SCRATCH)
2494 || (GET_CODE (op) == REG
2495 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2496 offset, mode)))
2497 win = 1;
2498 }
2499
2500 constraints[opno] = p;
2501 /* If this operand did not win somehow,
2502 this alternative loses. */
2503 if (! win)
2504 lose = 1;
2505 }
2506 /* This alternative won; the operands are ok.
2507 Change whichever operands this alternative says to change. */
2508 if (! lose)
2509 {
2510 int opno, eopno;
2511
2512 /* See if any earlyclobber operand conflicts with some other
2513 operand. */
2514
2515 if (strict > 0)
2516 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2517 /* Ignore earlyclobber operands now in memory,
2518 because we would often report failure when we have
2519 two memory operands, one of which was formerly a REG. */
2520 if (earlyclobber[eopno]
2521 && GET_CODE (recog_data.operand[eopno]) == REG)
2522 for (opno = 0; opno < recog_data.n_operands; opno++)
2523 if ((GET_CODE (recog_data.operand[opno]) == MEM
2524 || recog_data.operand_type[opno] != OP_OUT)
2525 && opno != eopno
2526 /* Ignore things like match_operator operands. */
2527 && *recog_data.constraints[opno] != 0
2528 && ! (matching_operands[opno] == eopno
2529 && operands_match_p (recog_data.operand[opno],
2530 recog_data.operand[eopno]))
2531 && ! safe_from_earlyclobber (recog_data.operand[opno],
2532 recog_data.operand[eopno]))
2533 lose = 1;
2534
2535 if (! lose)
2536 {
2537 while (--funny_match_index >= 0)
2538 {
2539 recog_data.operand[funny_match[funny_match_index].other]
2540 = recog_data.operand[funny_match[funny_match_index].this];
2541 }
2542
2543 return 1;
2544 }
2545 }
2546
2547 which_alternative++;
2548 }
2549
2550 /* If we are about to reject this, but we are not to test strictly,
2551 try a very loose test. Only return failure if it fails also. */
2552 if (strict == 0)
2553 return constrain_operands (-1);
2554 else
2555 return 0;
2556 }
2557
2558 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2559 is a hard reg in class CLASS when its regno is offset by OFFSET
2560 and changed to mode MODE.
2561 If REG occupies multiple hard regs, all of them must be in CLASS. */
2562
2563 int
2564 reg_fits_class_p (operand, class, offset, mode)
2565 rtx operand;
2566 register enum reg_class class;
2567 int offset;
2568 enum machine_mode mode;
2569 {
2570 register int regno = REGNO (operand);
2571 if (regno < FIRST_PSEUDO_REGISTER
2572 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2573 regno + offset))
2574 {
2575 register int sr;
2576 regno += offset;
2577 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2578 sr > 0; sr--)
2579 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2580 regno + sr))
2581 break;
2582 return sr == 0;
2583 }
2584
2585 return 0;
2586 }
2587 \f
2588 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2589
2590 void
2591 split_all_insns (upd_life)
2592 int upd_life;
2593 {
2594 sbitmap blocks;
2595 int changed;
2596 int i;
2597
2598 blocks = sbitmap_alloc (n_basic_blocks);
2599 sbitmap_zero (blocks);
2600 changed = 0;
2601
2602 for (i = n_basic_blocks - 1; i >= 0; --i)
2603 {
2604 basic_block bb = BASIC_BLOCK (i);
2605 rtx insn, next;
2606
2607 for (insn = bb->head; insn ; insn = next)
2608 {
2609 rtx set;
2610
2611 /* Can't use `next_real_insn' because that might go across
2612 CODE_LABELS and short-out basic blocks. */
2613 next = NEXT_INSN (insn);
2614 if (GET_CODE (insn) != INSN)
2615 ;
2616
2617 /* Don't split no-op move insns. These should silently
2618 disappear later in final. Splitting such insns would
2619 break the code that handles REG_NO_CONFLICT blocks. */
2620
2621 else if ((set = single_set (insn)) != NULL
2622 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2623 {
2624 /* Nops get in the way while scheduling, so delete them
2625 now if register allocation has already been done. It
2626 is too risky to try to do this before register
2627 allocation, and there are unlikely to be very many
2628 nops then anyways. */
2629 if (reload_completed)
2630 {
2631 PUT_CODE (insn, NOTE);
2632 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2633 NOTE_SOURCE_FILE (insn) = 0;
2634 }
2635 }
2636 else
2637 {
2638 /* Split insns here to get max fine-grain parallelism. */
2639 rtx first = PREV_INSN (insn);
2640 rtx last = try_split (PATTERN (insn), insn, 1);
2641
2642 if (last != insn)
2643 {
2644 SET_BIT (blocks, i);
2645 changed = 1;
2646
2647 /* try_split returns the NOTE that INSN became. */
2648 first = NEXT_INSN (first);
2649 PUT_CODE (insn, NOTE);
2650 NOTE_SOURCE_FILE (insn) = 0;
2651 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2652
2653 if (insn == bb->end)
2654 {
2655 bb->end = last;
2656 break;
2657 }
2658 }
2659 }
2660
2661 if (insn == bb->end)
2662 break;
2663 }
2664
2665 /* ??? When we're called from just after reload, the CFG is in bad
2666 shape, and we may have fallen off the end. This could be fixed
2667 by having reload not try to delete unreachable code. Otherwise
2668 assert we found the end insn. */
2669 if (insn == NULL && upd_life)
2670 abort ();
2671 }
2672
2673 if (changed && upd_life)
2674 {
2675 compute_bb_for_insn (get_max_uid ());
2676 count_or_remove_death_notes (blocks, 1);
2677 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2678 }
2679
2680 sbitmap_free (blocks);
2681 }
2682 \f
2683 #ifdef HAVE_peephole2
2684 struct peep2_insn_data
2685 {
2686 rtx insn;
2687 regset live_before;
2688 };
2689
2690 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2691 static int peep2_current;
2692
2693 /* A non-insn marker indicating the last insn of the block.
2694 The live_before regset for this element is correct, indicating
2695 global_live_at_end for the block. */
2696 #define PEEP2_EOB pc_rtx
2697
2698 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2699 does not exist. Used by the recognizer to find the next insn to match
2700 in a multi-insn pattern. */
2701
2702 rtx
2703 peep2_next_insn (n)
2704 int n;
2705 {
2706 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2707 abort ();
2708
2709 n += peep2_current;
2710 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2711 n -= MAX_INSNS_PER_PEEP2 + 1;
2712
2713 if (peep2_insn_data[n].insn == PEEP2_EOB)
2714 return NULL_RTX;
2715 return peep2_insn_data[n].insn;
2716 }
2717
2718 /* Return true if REGNO is dead before the Nth non-note insn
2719 after `current'. */
2720
2721 int
2722 peep2_regno_dead_p (ofs, regno)
2723 int ofs;
2724 int regno;
2725 {
2726 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2727 abort ();
2728
2729 ofs += peep2_current;
2730 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2731 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2732
2733 if (peep2_insn_data[ofs].insn == NULL_RTX)
2734 abort ();
2735
2736 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2737 }
2738
2739 /* Similarly for a REG. */
2740
2741 int
2742 peep2_reg_dead_p (ofs, reg)
2743 int ofs;
2744 rtx reg;
2745 {
2746 int regno, n;
2747
2748 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2749 abort ();
2750
2751 ofs += peep2_current;
2752 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2753 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2754
2755 if (peep2_insn_data[ofs].insn == NULL_RTX)
2756 abort ();
2757
2758 regno = REGNO (reg);
2759 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2760 while (--n >= 0)
2761 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2762 return 0;
2763 return 1;
2764 }
2765
2766 /* Try to find a hard register of mode MODE, matching the register class in
2767 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2768 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2769 in which case the only condition is that the register must be available
2770 before CURRENT_INSN.
2771 Registers that already have bits set in REG_SET will not be considered.
2772
2773 If an appropriate register is available, it will be returned and the
2774 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2775 returned. */
2776
2777 rtx
2778 peep2_find_free_register (from, to, class_str, mode, reg_set)
2779 int from, to;
2780 const char *class_str;
2781 enum machine_mode mode;
2782 HARD_REG_SET *reg_set;
2783 {
2784 static int search_ofs;
2785 enum reg_class class;
2786 HARD_REG_SET live;
2787 int i;
2788
2789 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2790 abort ();
2791
2792 from += peep2_current;
2793 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2794 from -= MAX_INSNS_PER_PEEP2 + 1;
2795 to += peep2_current;
2796 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2797 to -= MAX_INSNS_PER_PEEP2 + 1;
2798
2799 if (peep2_insn_data[from].insn == NULL_RTX)
2800 abort ();
2801 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2802
2803 while (from != to)
2804 {
2805 HARD_REG_SET this_live;
2806
2807 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2808 from = 0;
2809 if (peep2_insn_data[from].insn == NULL_RTX)
2810 abort ();
2811 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2812 IOR_HARD_REG_SET (live, this_live);
2813 }
2814
2815 class = (class_str[0] == 'r' ? GENERAL_REGS
2816 : REG_CLASS_FROM_LETTER (class_str[0]));
2817
2818 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2819 {
2820 int raw_regno, regno, success, j;
2821
2822 /* Distribute the free registers as much as possible. */
2823 raw_regno = search_ofs + i;
2824 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2825 raw_regno -= FIRST_PSEUDO_REGISTER;
2826 #ifdef REG_ALLOC_ORDER
2827 regno = reg_alloc_order[raw_regno];
2828 #else
2829 regno = raw_regno;
2830 #endif
2831
2832 /* Don't allocate fixed registers. */
2833 if (fixed_regs[regno])
2834 continue;
2835 /* Make sure the register is of the right class. */
2836 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2837 continue;
2838 /* And can support the mode we need. */
2839 if (! HARD_REGNO_MODE_OK (regno, mode))
2840 continue;
2841 /* And that we don't create an extra save/restore. */
2842 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2843 continue;
2844 /* And we don't clobber traceback for noreturn functions. */
2845 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2846 && (! reload_completed || frame_pointer_needed))
2847 continue;
2848
2849 success = 1;
2850 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2851 {
2852 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2853 || TEST_HARD_REG_BIT (live, regno + j))
2854 {
2855 success = 0;
2856 break;
2857 }
2858 }
2859 if (success)
2860 {
2861 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2862 SET_HARD_REG_BIT (*reg_set, regno + j);
2863
2864 /* Start the next search with the next register. */
2865 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2866 raw_regno = 0;
2867 search_ofs = raw_regno;
2868
2869 return gen_rtx_REG (mode, regno);
2870 }
2871 }
2872
2873 search_ofs = 0;
2874 return NULL_RTX;
2875 }
2876
2877 /* Perform the peephole2 optimization pass. */
2878
2879 void
2880 peephole2_optimize (dump_file)
2881 FILE *dump_file ATTRIBUTE_UNUSED;
2882 {
2883 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2884 rtx insn, prev;
2885 regset live;
2886 int i, b;
2887 #ifdef HAVE_conditional_execution
2888 sbitmap blocks;
2889 int changed;
2890 #endif
2891
2892 /* Initialize the regsets we're going to use. */
2893 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2894 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
2895 live = INITIALIZE_REG_SET (rs_heads[i]);
2896
2897 #ifdef HAVE_conditional_execution
2898 blocks = sbitmap_alloc (n_basic_blocks);
2899 sbitmap_zero (blocks);
2900 changed = 0;
2901 #else
2902 count_or_remove_death_notes (NULL, 1);
2903 #endif
2904
2905 for (b = n_basic_blocks - 1; b >= 0; --b)
2906 {
2907 basic_block bb = BASIC_BLOCK (b);
2908 struct propagate_block_info *pbi;
2909
2910 /* Indicate that all slots except the last holds invalid data. */
2911 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
2912 peep2_insn_data[i].insn = NULL_RTX;
2913
2914 /* Indicate that the last slot contains live_after data. */
2915 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
2916 peep2_current = MAX_INSNS_PER_PEEP2;
2917
2918 /* Start up propagation. */
2919 COPY_REG_SET (live, bb->global_live_at_end);
2920 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
2921
2922 #ifdef HAVE_conditional_execution
2923 pbi = init_propagate_block_info (bb, live, NULL, 0);
2924 #else
2925 pbi = init_propagate_block_info (bb, live, NULL, PROP_DEATH_NOTES);
2926 #endif
2927
2928 for (insn = bb->end; ; insn = prev)
2929 {
2930 prev = PREV_INSN (insn);
2931 if (INSN_P (insn))
2932 {
2933 rtx try;
2934 int match_len;
2935
2936 /* Record this insn. */
2937 if (--peep2_current < 0)
2938 peep2_current = MAX_INSNS_PER_PEEP2;
2939 peep2_insn_data[peep2_current].insn = insn;
2940 propagate_one_insn (pbi, insn);
2941 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
2942
2943 /* Match the peephole. */
2944 try = peephole2_insns (PATTERN (insn), insn, &match_len);
2945 if (try != NULL)
2946 {
2947 i = match_len + peep2_current;
2948 if (i >= MAX_INSNS_PER_PEEP2 + 1)
2949 i -= MAX_INSNS_PER_PEEP2 + 1;
2950
2951 /* Replace the old sequence with the new. */
2952 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
2953 try = emit_insn_after (try, prev);
2954
2955 /* Adjust the basic block boundaries. */
2956 if (peep2_insn_data[i].insn == bb->end)
2957 bb->end = try;
2958 if (insn == bb->head)
2959 bb->head = NEXT_INSN (prev);
2960
2961 #ifdef HAVE_conditional_execution
2962 /* With conditional execution, we cannot back up the
2963 live information so easily, since the conditional
2964 death data structures are not so self-contained.
2965 So record that we've made a modification to this
2966 block and update life information at the end. */
2967 SET_BIT (blocks, b);
2968 changed = 1;
2969
2970 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2971 peep2_insn_data[i].insn = NULL_RTX;
2972 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
2973 #else
2974 /* Back up lifetime information past the end of the
2975 newly created sequence. */
2976 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
2977 i = 0;
2978 COPY_REG_SET (live, peep2_insn_data[i].live_before);
2979
2980 /* Update life information for the new sequence. */
2981 do
2982 {
2983 if (INSN_P (try))
2984 {
2985 if (--i < 0)
2986 i = MAX_INSNS_PER_PEEP2;
2987 peep2_insn_data[i].insn = try;
2988 propagate_one_insn (pbi, try);
2989 COPY_REG_SET (peep2_insn_data[i].live_before, live);
2990 }
2991 try = PREV_INSN (try);
2992 }
2993 while (try != prev);
2994
2995 /* ??? Should verify that LIVE now matches what we
2996 had before the new sequence. */
2997
2998 peep2_current = i;
2999 #endif
3000 }
3001 }
3002
3003 if (insn == bb->head)
3004 break;
3005 }
3006
3007 free_propagate_block_info (pbi);
3008 }
3009
3010 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3011 FREE_REG_SET (peep2_insn_data[i].live_before);
3012 FREE_REG_SET (live);
3013
3014 #ifdef HAVE_conditional_execution
3015 count_or_remove_death_notes (blocks, 1);
3016 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3017 sbitmap_free (blocks);
3018 #endif
3019 }
3020 #endif /* HAVE_peephole2 */