]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/recog.c
typo typo fixes fixes
[thirdparty/gcc.git] / gcc / recog.c
CommitLineData
2055cea7 1/* Subroutines used by or related to instruction recognition.
29a82058 2 Copyright (C) 1987, 1988, 91-97, 1998 Free Software Foundation, Inc.
2055cea7
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
2055cea7
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
38a448ca 24#include "rtl.h"
2055cea7
RK
25#include "insn-config.h"
26#include "insn-attr.h"
27#include "insn-flags.h"
28#include "insn-codes.h"
29#include "recog.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "flags.h"
33#include "real.h"
34
35#ifndef STACK_PUSH_CODE
36#ifdef STACK_GROWS_DOWNWARD
37#define STACK_PUSH_CODE PRE_DEC
38#else
39#define STACK_PUSH_CODE PRE_INC
40#endif
41#endif
42
43/* Import from final.c: */
44extern rtx alter_subreg ();
45
ac546323 46static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
38a448ca 47static rtx *find_single_use_1 PROTO((rtx, rtx *));
ac546323 48static rtx *find_constant_term_loc PROTO((rtx *));
2055cea7
RK
49
50/* Nonzero means allow operands to be volatile.
51 This should be 0 if you are generating rtl, such as if you are calling
52 the functions in optabs.c and expmed.c (most of the time).
53 This should be 1 if all valid insns need to be recognized,
54 such as in regclass.c and final.c and reload.c.
55
56 init_recog and init_recog_no_volatile are responsible for setting this. */
57
58int volatile_ok;
59
60/* On return from `constrain_operands', indicate which alternative
61 was satisfied. */
62
63int which_alternative;
64
65/* Nonzero after end of reload pass.
66 Set to 1 or 0 by toplev.c.
67 Controls the significance of (SUBREG (MEM)). */
68
69int reload_completed;
70
71/* Initialize data used by the function `recog'.
72 This must be called once in the compilation of a function
73 before any insn recognition may be done in the function. */
74
75void
76init_recog_no_volatile ()
77{
78 volatile_ok = 0;
79}
80
e0069e43 81void
2055cea7
RK
82init_recog ()
83{
84 volatile_ok = 1;
85}
86
87/* Try recognizing the instruction INSN,
88 and return the code number that results.
9faa82d8 89 Remember the code so that repeated calls do not
2055cea7
RK
90 need to spend the time for actual rerecognition.
91
92 This function is the normal interface to instruction recognition.
93 The automatically-generated function `recog' is normally called
94 through this one. (The only exception is in combine.c.) */
95
96int
97recog_memoized (insn)
98 rtx insn;
99{
100 if (INSN_CODE (insn) < 0)
9e4223f2 101 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
2055cea7
RK
102 return INSN_CODE (insn);
103}
104\f
105/* Check that X is an insn-body for an `asm' with operands
106 and that the operands mentioned in it are legitimate. */
107
108int
109check_asm_operands (x)
110 rtx x;
111{
112 int noperands = asm_noperands (x);
113 rtx *operands;
114 int i;
115
116 if (noperands < 0)
117 return 0;
118 if (noperands == 0)
119 return 1;
120
121 operands = (rtx *) alloca (noperands * sizeof (rtx));
9e4223f2 122 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
2055cea7
RK
123
124 for (i = 0; i < noperands; i++)
125 if (!general_operand (operands[i], VOIDmode))
126 return 0;
127
128 return 1;
129}
130\f
131/* Static data for the next two routines.
132
133 The maximum number of changes supported is defined as the maximum
134 number of operands times 5. This allows for repeated substitutions
135 inside complex indexed address, or, alternatively, changes in up
136 to 5 insns. */
137
138#define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
139
140static rtx change_objects[MAX_CHANGE_LOCS];
141static int change_old_codes[MAX_CHANGE_LOCS];
142static rtx *change_locs[MAX_CHANGE_LOCS];
143static rtx change_olds[MAX_CHANGE_LOCS];
144
145static int num_changes = 0;
146
147/* Validate a proposed change to OBJECT. LOC is the location in the rtl for
148 at which NEW will be placed. If OBJECT is zero, no validation is done,
149 the change is simply made.
150
151 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
152 will be called with the address and mode as parameters. If OBJECT is
153 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
154 the change in place.
155
156 IN_GROUP is non-zero if this is part of a group of changes that must be
157 performed as a group. In that case, the changes will be stored. The
158 function `apply_change_group' will validate and apply the changes.
159
160 If IN_GROUP is zero, this is a single change. Try to recognize the insn
161 or validate the memory reference with the change applied. If the result
162 is not valid for the machine, suppress the change and return zero.
163 Otherwise, perform the change and return 1. */
164
165int
166validate_change (object, loc, new, in_group)
167 rtx object;
168 rtx *loc;
169 rtx new;
170 int in_group;
171{
172 rtx old = *loc;
173
174 if (old == new || rtx_equal_p (old, new))
175 return 1;
176
177 if (num_changes >= MAX_CHANGE_LOCS
178 || (in_group == 0 && num_changes != 0))
179 abort ();
180
181 *loc = new;
182
183 /* Save the information describing this change. */
184 change_objects[num_changes] = object;
185 change_locs[num_changes] = loc;
186 change_olds[num_changes] = old;
187
188 if (object && GET_CODE (object) != MEM)
189 {
190 /* Set INSN_CODE to force rerecognition of insn. Save old code in
191 case invalid. */
192 change_old_codes[num_changes] = INSN_CODE (object);
193 INSN_CODE (object) = -1;
194 }
195
196 num_changes++;
197
198 /* If we are making a group of changes, return 1. Otherwise, validate the
199 change group we made. */
200
201 if (in_group)
202 return 1;
203 else
204 return apply_change_group ();
205}
206
207/* Apply a group of changes previously issued with `validate_change'.
208 Return 1 if all changes are valid, zero otherwise. */
209
210int
211apply_change_group ()
212{
213 int i;
214
215 /* The changes have been applied and all INSN_CODEs have been reset to force
216 rerecognition.
217
218 The changes are valid if we aren't given an object, or if we are
219 given a MEM and it still is a valid address, or if this is in insn
220 and it is recognized. In the latter case, if reload has completed,
221 we also require that the operands meet the constraints for
222 the insn. We do not allow modifying an ASM_OPERANDS after reload
223 has completed because verifying the constraints is too difficult. */
224
225 for (i = 0; i < num_changes; i++)
226 {
227 rtx object = change_objects[i];
228
229 if (object == 0)
230 continue;
231
232 if (GET_CODE (object) == MEM)
233 {
234 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
235 break;
236 }
237 else if ((recog_memoized (object) < 0
238 && (asm_noperands (PATTERN (object)) < 0
239 || ! check_asm_operands (PATTERN (object))
240 || reload_completed))
241 || (reload_completed
242 && (insn_extract (object),
243 ! constrain_operands (INSN_CODE (object), 1))))
244 {
245 rtx pat = PATTERN (object);
246
247 /* Perhaps we couldn't recognize the insn because there were
248 extra CLOBBERs at the end. If so, try to re-recognize
249 without the last CLOBBER (later iterations will cause each of
250 them to be eliminated, in turn). But don't do this if we
251 have an ASM_OPERAND. */
252 if (GET_CODE (pat) == PARALLEL
253 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
254 && asm_noperands (PATTERN (object)) < 0)
255 {
256 rtx newpat;
257
258 if (XVECLEN (pat, 0) == 2)
259 newpat = XVECEXP (pat, 0, 0);
260 else
261 {
262 int j;
263
38a448ca
RH
264 newpat = gen_rtx_PARALLEL (VOIDmode,
265 gen_rtvec (XVECLEN (pat, 0) - 1));
2055cea7
RK
266 for (j = 0; j < XVECLEN (newpat, 0); j++)
267 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
268 }
269
270 /* Add a new change to this group to replace the pattern
271 with this new pattern. Then consider this change
272 as having succeeded. The change we added will
273 cause the entire call to fail if things remain invalid.
274
275 Note that this can lose if a later change than the one
276 we are processing specified &XVECEXP (PATTERN (object), 0, X)
277 but this shouldn't occur. */
278
279 validate_change (object, &PATTERN (object), newpat, 1);
280 }
281 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
282 /* If this insn is a CLOBBER or USE, it is always valid, but is
283 never recognized. */
284 continue;
285 else
286 break;
287 }
288 }
289
290 if (i == num_changes)
291 {
292 num_changes = 0;
293 return 1;
294 }
295 else
296 {
297 cancel_changes (0);
298 return 0;
299 }
300}
301
302/* Return the number of changes so far in the current group. */
303
304int
305num_validated_changes ()
306{
307 return num_changes;
308}
309
310/* Retract the changes numbered NUM and up. */
311
312void
313cancel_changes (num)
314 int num;
315{
316 int i;
317
318 /* Back out all the changes. Do this in the opposite order in which
319 they were made. */
320 for (i = num_changes - 1; i >= num; i--)
321 {
322 *change_locs[i] = change_olds[i];
323 if (change_objects[i] && GET_CODE (change_objects[i]) != MEM)
324 INSN_CODE (change_objects[i]) = change_old_codes[i];
325 }
326 num_changes = num;
327}
328
329/* Replace every occurrence of FROM in X with TO. Mark each change with
330 validate_change passing OBJECT. */
331
332static void
333validate_replace_rtx_1 (loc, from, to, object)
334 rtx *loc;
335 rtx from, to, object;
336{
337 register int i, j;
338 register char *fmt;
339 register rtx x = *loc;
340 enum rtx_code code = GET_CODE (x);
341
342 /* X matches FROM if it is the same rtx or they are both referring to the
343 same register in the same mode. Avoid calling rtx_equal_p unless the
344 operands look similar. */
345
346 if (x == from
347 || (GET_CODE (x) == REG && GET_CODE (from) == REG
348 && GET_MODE (x) == GET_MODE (from)
349 && REGNO (x) == REGNO (from))
350 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
351 && rtx_equal_p (x, from)))
352 {
353 validate_change (object, loc, to, 1);
354 return;
355 }
356
357 /* For commutative or comparison operations, try replacing each argument
358 separately and seeing if we made any changes. If so, put a constant
359 argument last.*/
360 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
361 {
362 int prev_changes = num_changes;
363
364 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
365 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
366 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
367 {
368 validate_change (object, loc,
38a448ca
RH
369 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
370 : swap_condition (code),
371 GET_MODE (x), XEXP (x, 1),
372 XEXP (x, 0)),
2055cea7
RK
373 1);
374 x = *loc;
375 code = GET_CODE (x);
376 }
377 }
378
06140bdf
RK
379 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
380 done the substitution, otherwise we won't. */
381
2055cea7
RK
382 switch (code)
383 {
384 case PLUS:
38e01259 385 /* If we have a PLUS whose second operand is now a CONST_INT, use
2055cea7
RK
386 plus_constant to try to simplify it. */
387 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
06140bdf
RK
388 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
389 1);
2055cea7 390 return;
06140bdf
RK
391
392 case MINUS:
393 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
394 {
395 validate_change (object, loc,
396 plus_constant (XEXP (x, 0), - INTVAL (to)),
397 1);
398 return;
399 }
400 break;
2055cea7
RK
401
402 case ZERO_EXTEND:
403 case SIGN_EXTEND:
404 /* In these cases, the operation to be performed depends on the mode
405 of the operand. If we are replacing the operand with a VOIDmode
406 constant, we lose the information. So try to simplify the operation
407 in that case. If it fails, substitute in something that we know
6dc42e49 408 won't be recognized. */
2055cea7
RK
409 if (GET_MODE (to) == VOIDmode
410 && (XEXP (x, 0) == from
411 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
412 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
413 && REGNO (XEXP (x, 0)) == REGNO (from))))
414 {
415 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
416 GET_MODE (from));
417 if (new == 0)
38a448ca 418 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
2055cea7
RK
419
420 validate_change (object, loc, new, 1);
421 return;
422 }
423 break;
424
425 case SUBREG:
426 /* If we have a SUBREG of a register that we are replacing and we are
427 replacing it with a MEM, make a new MEM and try replacing the
428 SUBREG with it. Don't do this if the MEM has a mode-dependent address
429 or if we would be widening it. */
430
431 if (SUBREG_REG (x) == from
432 && GET_CODE (from) == REG
433 && GET_CODE (to) == MEM
434 && ! mode_dependent_address_p (XEXP (to, 0))
435 && ! MEM_VOLATILE_P (to)
436 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
437 {
438 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
439 enum machine_mode mode = GET_MODE (x);
440 rtx new;
441
f76b9db2
ILT
442 if (BYTES_BIG_ENDIAN)
443 offset += (MIN (UNITS_PER_WORD,
444 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
445 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2055cea7 446
38a448ca 447 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
2055cea7
RK
448 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
449 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
450 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
451 validate_change (object, loc, new, 1);
452 return;
453 }
454 break;
455
456 case ZERO_EXTRACT:
457 case SIGN_EXTRACT:
458 /* If we are replacing a register with memory, try to change the memory
459 to be the mode required for memory in extract operations (this isn't
460 likely to be an insertion operation; if it was, nothing bad will
461 happen, we might just fail in some cases). */
462
463 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
464 && GET_CODE (XEXP (x, 1)) == CONST_INT
465 && GET_CODE (XEXP (x, 2)) == CONST_INT
466 && ! mode_dependent_address_p (XEXP (to, 0))
467 && ! MEM_VOLATILE_P (to))
468 {
469 enum machine_mode wanted_mode = VOIDmode;
470 enum machine_mode is_mode = GET_MODE (to);
2055cea7
RK
471 int pos = INTVAL (XEXP (x, 2));
472
473#ifdef HAVE_extzv
474 if (code == ZERO_EXTRACT)
475 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
476#endif
477#ifdef HAVE_extv
478 if (code == SIGN_EXTRACT)
479 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
480#endif
481
6dc42e49 482 /* If we have a narrower mode, we can do something. */
2055cea7
RK
483 if (wanted_mode != VOIDmode
484 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
485 {
486 int offset = pos / BITS_PER_UNIT;
487 rtx newmem;
488
489 /* If the bytes and bits are counted differently, we
490 must adjust the offset. */
f76b9db2
ILT
491 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
492 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
493 - offset);
2055cea7
RK
494
495 pos %= GET_MODE_BITSIZE (wanted_mode);
496
38a448ca
RH
497 newmem = gen_rtx_MEM (wanted_mode,
498 plus_constant (XEXP (to, 0), offset));
2055cea7
RK
499 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
500 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
501 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
502
9e4223f2 503 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
2055cea7
RK
504 validate_change (object, &XEXP (x, 0), newmem, 1);
505 }
506 }
507
508 break;
38a448ca
RH
509
510 default:
511 break;
2055cea7
RK
512 }
513
f745c7a2
AB
514 /* For commutative or comparison operations we've already performed
515 replacements. Don't try to perform them again. */
516 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
2055cea7 517 {
f745c7a2
AB
518 fmt = GET_RTX_FORMAT (code);
519 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
520 {
521 if (fmt[i] == 'e')
522 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
523 else if (fmt[i] == 'E')
524 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
525 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
526 }
2055cea7
RK
527 }
528}
529
530/* Try replacing every occurrence of FROM in INSN with TO. After all
531 changes have been made, validate by seeing if INSN is still valid. */
532
533int
534validate_replace_rtx (from, to, insn)
535 rtx from, to, insn;
536{
537 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
538 return apply_change_group ();
539}
540\f
541#ifdef HAVE_cc0
542/* Return 1 if the insn using CC0 set by INSN does not contain
543 any ordered tests applied to the condition codes.
544 EQ and NE tests do not count. */
545
546int
547next_insn_tests_no_inequality (insn)
548 rtx insn;
549{
550 register rtx next = next_cc0_user (insn);
551
552 /* If there is no next insn, we have to take the conservative choice. */
553 if (next == 0)
554 return 0;
555
556 return ((GET_CODE (next) == JUMP_INSN
557 || GET_CODE (next) == INSN
558 || GET_CODE (next) == CALL_INSN)
559 && ! inequality_comparisons_p (PATTERN (next)));
560}
561
562#if 0 /* This is useless since the insn that sets the cc's
563 must be followed immediately by the use of them. */
564/* Return 1 if the CC value set up by INSN is not used. */
565
566int
567next_insns_test_no_inequality (insn)
568 rtx insn;
569{
570 register rtx next = NEXT_INSN (insn);
571
572 for (; next != 0; next = NEXT_INSN (next))
573 {
574 if (GET_CODE (next) == CODE_LABEL
575 || GET_CODE (next) == BARRIER)
576 return 1;
577 if (GET_CODE (next) == NOTE)
578 continue;
579 if (inequality_comparisons_p (PATTERN (next)))
580 return 0;
581 if (sets_cc0_p (PATTERN (next)) == 1)
582 return 1;
583 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
584 return 1;
585 }
586 return 1;
587}
588#endif
589#endif
590\f
591/* This is used by find_single_use to locate an rtx that contains exactly one
592 use of DEST, which is typically either a REG or CC0. It returns a
593 pointer to the innermost rtx expression containing DEST. Appearances of
594 DEST that are being used to totally replace it are not counted. */
595
596static rtx *
597find_single_use_1 (dest, loc)
598 rtx dest;
599 rtx *loc;
600{
601 rtx x = *loc;
602 enum rtx_code code = GET_CODE (x);
603 rtx *result = 0;
604 rtx *this_result;
605 int i;
606 char *fmt;
607
608 switch (code)
609 {
610 case CONST_INT:
611 case CONST:
612 case LABEL_REF:
613 case SYMBOL_REF:
614 case CONST_DOUBLE:
615 case CLOBBER:
616 return 0;
617
618 case SET:
619 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
620 of a REG that occupies all of the REG, the insn uses DEST if
621 it is mentioned in the destination or the source. Otherwise, we
622 need just check the source. */
623 if (GET_CODE (SET_DEST (x)) != CC0
624 && GET_CODE (SET_DEST (x)) != PC
625 && GET_CODE (SET_DEST (x)) != REG
626 && ! (GET_CODE (SET_DEST (x)) == SUBREG
627 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
628 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
629 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
630 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
631 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
632 break;
633
634 return find_single_use_1 (dest, &SET_SRC (x));
635
636 case MEM:
637 case SUBREG:
638 return find_single_use_1 (dest, &XEXP (x, 0));
38a448ca
RH
639
640 default:
641 break;
2055cea7
RK
642 }
643
644 /* If it wasn't one of the common cases above, check each expression and
645 vector of this code. Look for a unique usage of DEST. */
646
647 fmt = GET_RTX_FORMAT (code);
648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
649 {
650 if (fmt[i] == 'e')
651 {
652 if (dest == XEXP (x, i)
653 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
654 && REGNO (dest) == REGNO (XEXP (x, i))))
655 this_result = loc;
656 else
657 this_result = find_single_use_1 (dest, &XEXP (x, i));
658
659 if (result == 0)
660 result = this_result;
661 else if (this_result)
662 /* Duplicate usage. */
663 return 0;
664 }
665 else if (fmt[i] == 'E')
666 {
667 int j;
668
669 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
670 {
671 if (XVECEXP (x, i, j) == dest
672 || (GET_CODE (dest) == REG
673 && GET_CODE (XVECEXP (x, i, j)) == REG
674 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
675 this_result = loc;
676 else
677 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
678
679 if (result == 0)
680 result = this_result;
681 else if (this_result)
682 return 0;
683 }
684 }
685 }
686
687 return result;
688}
689\f
690/* See if DEST, produced in INSN, is used only a single time in the
691 sequel. If so, return a pointer to the innermost rtx expression in which
692 it is used.
693
694 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
695
696 This routine will return usually zero either before flow is called (because
697 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
698 note can't be trusted).
699
700 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
701 care about REG_DEAD notes or LOG_LINKS.
702
703 Otherwise, we find the single use by finding an insn that has a
704 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
705 only referenced once in that insn, we know that it must be the first
706 and last insn referencing DEST. */
707
708rtx *
709find_single_use (dest, insn, ploc)
710 rtx dest;
711 rtx insn;
712 rtx *ploc;
713{
714 rtx next;
715 rtx *result;
716 rtx link;
717
718#ifdef HAVE_cc0
719 if (dest == cc0_rtx)
720 {
721 next = NEXT_INSN (insn);
722 if (next == 0
723 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
724 return 0;
725
726 result = find_single_use_1 (dest, &PATTERN (next));
727 if (result && ploc)
728 *ploc = next;
729 return result;
730 }
731#endif
732
733 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
734 return 0;
735
736 for (next = next_nonnote_insn (insn);
737 next != 0 && GET_CODE (next) != CODE_LABEL;
738 next = next_nonnote_insn (next))
739 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
740 {
741 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
742 if (XEXP (link, 0) == insn)
743 break;
744
745 if (link)
746 {
747 result = find_single_use_1 (dest, &PATTERN (next));
748 if (ploc)
749 *ploc = next;
750 return result;
751 }
752 }
753
754 return 0;
755}
756\f
757/* Return 1 if OP is a valid general operand for machine mode MODE.
758 This is either a register reference, a memory reference,
759 or a constant. In the case of a memory reference, the address
760 is checked for general validity for the target machine.
761
762 Register and memory references must have mode MODE in order to be valid,
763 but some constants have no machine mode and are valid for any mode.
764
765 If MODE is VOIDmode, OP is checked for validity for whatever mode
766 it has.
767
768 The main use of this function is as a predicate in match_operand
769 expressions in the machine description.
770
6dc42e49 771 For an explanation of this function's behavior for registers of
2055cea7
RK
772 class NO_REGS, see the comment for `register_operand'. */
773
774int
775general_operand (op, mode)
776 register rtx op;
777 enum machine_mode mode;
778{
779 register enum rtx_code code = GET_CODE (op);
780 int mode_altering_drug = 0;
781
782 if (mode == VOIDmode)
783 mode = GET_MODE (op);
784
785 /* Don't accept CONST_INT or anything similar
786 if the caller wants something floating. */
787 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
788 && GET_MODE_CLASS (mode) != MODE_INT
789 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
790 return 0;
791
792 if (CONSTANT_P (op))
793 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
794#ifdef LEGITIMATE_PIC_OPERAND_P
795 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
796#endif
797 && LEGITIMATE_CONSTANT_P (op));
798
799 /* Except for certain constants with VOIDmode, already checked for,
800 OP's mode must match MODE if MODE specifies a mode. */
801
802 if (GET_MODE (op) != mode)
803 return 0;
804
805 if (code == SUBREG)
806 {
807#ifdef INSN_SCHEDULING
808 /* On machines that have insn scheduling, we want all memory
809 reference to be explicit, so outlaw paradoxical SUBREGs. */
810 if (GET_CODE (SUBREG_REG (op)) == MEM
811 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
812 return 0;
813#endif
814
815 op = SUBREG_REG (op);
816 code = GET_CODE (op);
817#if 0
818 /* No longer needed, since (SUBREG (MEM...))
819 will load the MEM into a reload reg in the MEM's own mode. */
820 mode_altering_drug = 1;
821#endif
822 }
823
824 if (code == REG)
825 /* A register whose class is NO_REGS is not a general operand. */
826 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
827 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
828
829 if (code == MEM)
830 {
831 register rtx y = XEXP (op, 0);
832 if (! volatile_ok && MEM_VOLATILE_P (op))
833 return 0;
38a448ca
RH
834 if (GET_CODE (y) == ADDRESSOF)
835 return 1;
2055cea7
RK
836 /* Use the mem's mode, since it will be reloaded thus. */
837 mode = GET_MODE (op);
838 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
839 }
38a448ca
RH
840
841 /* Pretend this is an operand for now; we'll run force_operand
842 on its replacement in fixup_var_refs_1. */
843 if (code == ADDRESSOF)
844 return 1;
845
2055cea7
RK
846 return 0;
847
848 win:
849 if (mode_altering_drug)
850 return ! mode_dependent_address_p (XEXP (op, 0));
851 return 1;
852}
853\f
854/* Return 1 if OP is a valid memory address for a memory reference
855 of mode MODE.
856
857 The main use of this function is as a predicate in match_operand
858 expressions in the machine description. */
859
860int
861address_operand (op, mode)
862 register rtx op;
863 enum machine_mode mode;
864{
865 return memory_address_p (mode, op);
866}
867
868/* Return 1 if OP is a register reference of mode MODE.
869 If MODE is VOIDmode, accept a register in any mode.
870
871 The main use of this function is as a predicate in match_operand
872 expressions in the machine description.
873
874 As a special exception, registers whose class is NO_REGS are
875 not accepted by `register_operand'. The reason for this change
876 is to allow the representation of special architecture artifacts
877 (such as a condition code register) without extending the rtl
878 definitions. Since registers of class NO_REGS cannot be used
879 as registers in any case where register classes are examined,
880 it is most consistent to keep this function from accepting them. */
881
882int
883register_operand (op, mode)
884 register rtx op;
885 enum machine_mode mode;
886{
887 if (GET_MODE (op) != mode && mode != VOIDmode)
888 return 0;
889
890 if (GET_CODE (op) == SUBREG)
891 {
892 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
893 because it is guaranteed to be reloaded into one.
894 Just make sure the MEM is valid in itself.
895 (Ideally, (SUBREG (MEM)...) should not exist after reload,
896 but currently it does result from (SUBREG (REG)...) where the
897 reg went on the stack.) */
898 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
899 return general_operand (op, mode);
cba057ed
RK
900
901#ifdef CLASS_CANNOT_CHANGE_SIZE
902 if (GET_CODE (SUBREG_REG (op)) == REG
903 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
904 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
905 REGNO (SUBREG_REG (op)))
906 && (GET_MODE_SIZE (mode)
50dc6373
RK
907 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
908 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
909 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
cba057ed
RK
910 return 0;
911#endif
912
2055cea7
RK
913 op = SUBREG_REG (op);
914 }
915
916 /* We don't consider registers whose class is NO_REGS
917 to be a register operand. */
918 return (GET_CODE (op) == REG
919 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
920 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
921}
922
923/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
924 or a hard register. */
925
926int
927scratch_operand (op, mode)
928 register rtx op;
929 enum machine_mode mode;
930{
931 return (GET_MODE (op) == mode
932 && (GET_CODE (op) == SCRATCH
933 || (GET_CODE (op) == REG
934 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
935}
936
937/* Return 1 if OP is a valid immediate operand for mode MODE.
938
939 The main use of this function is as a predicate in match_operand
940 expressions in the machine description. */
941
942int
943immediate_operand (op, mode)
944 register rtx op;
945 enum machine_mode mode;
946{
947 /* Don't accept CONST_INT or anything similar
948 if the caller wants something floating. */
949 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
950 && GET_MODE_CLASS (mode) != MODE_INT
951 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
952 return 0;
953
954 return (CONSTANT_P (op)
955 && (GET_MODE (op) == mode || mode == VOIDmode
956 || GET_MODE (op) == VOIDmode)
957#ifdef LEGITIMATE_PIC_OPERAND_P
958 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
959#endif
960 && LEGITIMATE_CONSTANT_P (op));
961}
962
963/* Returns 1 if OP is an operand that is a CONST_INT. */
964
965int
966const_int_operand (op, mode)
967 register rtx op;
968 enum machine_mode mode;
969{
970 return GET_CODE (op) == CONST_INT;
971}
972
973/* Returns 1 if OP is an operand that is a constant integer or constant
974 floating-point number. */
975
976int
977const_double_operand (op, mode)
978 register rtx op;
979 enum machine_mode mode;
980{
981 /* Don't accept CONST_INT or anything similar
982 if the caller wants something floating. */
983 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
984 && GET_MODE_CLASS (mode) != MODE_INT
985 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
986 return 0;
987
988 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
989 && (mode == VOIDmode || GET_MODE (op) == mode
990 || GET_MODE (op) == VOIDmode));
991}
992
993/* Return 1 if OP is a general operand that is not an immediate operand. */
994
995int
996nonimmediate_operand (op, mode)
997 register rtx op;
998 enum machine_mode mode;
999{
1000 return (general_operand (op, mode) && ! CONSTANT_P (op));
1001}
1002
1003/* Return 1 if OP is a register reference or immediate value of mode MODE. */
1004
1005int
1006nonmemory_operand (op, mode)
1007 register rtx op;
1008 enum machine_mode mode;
1009{
1010 if (CONSTANT_P (op))
1011 {
1012 /* Don't accept CONST_INT or anything similar
1013 if the caller wants something floating. */
1014 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
1015 && GET_MODE_CLASS (mode) != MODE_INT
1016 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
1017 return 0;
1018
1019 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1020#ifdef LEGITIMATE_PIC_OPERAND_P
1021 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1022#endif
1023 && LEGITIMATE_CONSTANT_P (op));
1024 }
1025
1026 if (GET_MODE (op) != mode && mode != VOIDmode)
1027 return 0;
1028
1029 if (GET_CODE (op) == SUBREG)
1030 {
1031 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1032 because it is guaranteed to be reloaded into one.
1033 Just make sure the MEM is valid in itself.
1034 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1035 but currently it does result from (SUBREG (REG)...) where the
1036 reg went on the stack.) */
1037 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1038 return general_operand (op, mode);
1039 op = SUBREG_REG (op);
1040 }
1041
1042 /* We don't consider registers whose class is NO_REGS
1043 to be a register operand. */
1044 return (GET_CODE (op) == REG
1045 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1046 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1047}
1048
1049/* Return 1 if OP is a valid operand that stands for pushing a
1050 value of mode MODE onto the stack.
1051
1052 The main use of this function is as a predicate in match_operand
1053 expressions in the machine description. */
1054
1055int
1056push_operand (op, mode)
1057 rtx op;
1058 enum machine_mode mode;
1059{
1060 if (GET_CODE (op) != MEM)
1061 return 0;
1062
1063 if (GET_MODE (op) != mode)
1064 return 0;
1065
1066 op = XEXP (op, 0);
1067
1068 if (GET_CODE (op) != STACK_PUSH_CODE)
1069 return 0;
1070
1071 return XEXP (op, 0) == stack_pointer_rtx;
1072}
1073
1074/* Return 1 if ADDR is a valid memory address for mode MODE. */
1075
1076int
1077memory_address_p (mode, addr)
1078 enum machine_mode mode;
1079 register rtx addr;
1080{
38a448ca
RH
1081 if (GET_CODE (addr) == ADDRESSOF)
1082 return 1;
1083
2055cea7
RK
1084 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1085 return 0;
1086
1087 win:
1088 return 1;
1089}
1090
1091/* Return 1 if OP is a valid memory reference with mode MODE,
1092 including a valid address.
1093
1094 The main use of this function is as a predicate in match_operand
1095 expressions in the machine description. */
1096
1097int
1098memory_operand (op, mode)
1099 register rtx op;
1100 enum machine_mode mode;
1101{
1102 rtx inner;
1103
1104 if (! reload_completed)
1105 /* Note that no SUBREG is a memory operand before end of reload pass,
1106 because (SUBREG (MEM...)) forces reloading into a register. */
1107 return GET_CODE (op) == MEM && general_operand (op, mode);
1108
1109 if (mode != VOIDmode && GET_MODE (op) != mode)
1110 return 0;
1111
1112 inner = op;
1113 if (GET_CODE (inner) == SUBREG)
1114 inner = SUBREG_REG (inner);
1115
1116 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1117}
1118
1119/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1120 that is, a memory reference whose address is a general_operand. */
1121
1122int
1123indirect_operand (op, mode)
1124 register rtx op;
1125 enum machine_mode mode;
1126{
1127 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1128 if (! reload_completed
1129 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1130 {
1131 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1132 rtx inner = SUBREG_REG (op);
1133
f76b9db2
ILT
1134 if (BYTES_BIG_ENDIAN)
1135 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1136 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
2055cea7 1137
b0e0a0f9
RK
1138 if (mode != VOIDmode && GET_MODE (op) != mode)
1139 return 0;
1140
2055cea7
RK
1141 /* The only way that we can have a general_operand as the resulting
1142 address is if OFFSET is zero and the address already is an operand
1143 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1144 operand. */
1145
1146 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1147 || (GET_CODE (XEXP (inner, 0)) == PLUS
1148 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1149 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1150 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1151 }
1152
1153 return (GET_CODE (op) == MEM
1154 && memory_operand (op, mode)
1155 && general_operand (XEXP (op, 0), Pmode));
1156}
1157
1158/* Return 1 if this is a comparison operator. This allows the use of
1159 MATCH_OPERATOR to recognize all the branch insns. */
1160
1161int
1162comparison_operator (op, mode)
1163 register rtx op;
1164 enum machine_mode mode;
1165{
1166 return ((mode == VOIDmode || GET_MODE (op) == mode)
1167 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1168}
1169\f
1170/* If BODY is an insn body that uses ASM_OPERANDS,
1171 return the number of operands (both input and output) in the insn.
1172 Otherwise return -1. */
1173
1174int
1175asm_noperands (body)
1176 rtx body;
1177{
1178 if (GET_CODE (body) == ASM_OPERANDS)
1179 /* No output operands: return number of input operands. */
1180 return ASM_OPERANDS_INPUT_LENGTH (body);
1181 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1182 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1183 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1184 else if (GET_CODE (body) == PARALLEL
1185 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1186 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1187 {
1188 /* Multiple output operands, or 1 output plus some clobbers:
1189 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1190 int i;
1191 int n_sets;
1192
1193 /* Count backwards through CLOBBERs to determine number of SETs. */
1194 for (i = XVECLEN (body, 0); i > 0; i--)
1195 {
1196 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1197 break;
1198 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1199 return -1;
1200 }
1201
1202 /* N_SETS is now number of output operands. */
1203 n_sets = i;
1204
1205 /* Verify that all the SETs we have
1206 came from a single original asm_operands insn
1207 (so that invalid combinations are blocked). */
1208 for (i = 0; i < n_sets; i++)
1209 {
1210 rtx elt = XVECEXP (body, 0, i);
1211 if (GET_CODE (elt) != SET)
1212 return -1;
1213 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1214 return -1;
1215 /* If these ASM_OPERANDS rtx's came from different original insns
1216 then they aren't allowed together. */
1217 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1218 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1219 return -1;
1220 }
1221 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1222 + n_sets);
1223 }
1224 else if (GET_CODE (body) == PARALLEL
1225 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1226 {
1227 /* 0 outputs, but some clobbers:
1228 body is [(asm_operands ...) (clobber (reg ...))...]. */
1229 int i;
1230
1231 /* Make sure all the other parallel things really are clobbers. */
1232 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1233 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1234 return -1;
1235
1236 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1237 }
1238 else
1239 return -1;
1240}
1241
1242/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1243 copy its operands (both input and output) into the vector OPERANDS,
1244 the locations of the operands within the insn into the vector OPERAND_LOCS,
1245 and the constraints for the operands into CONSTRAINTS.
1246 Write the modes of the operands into MODES.
1247 Return the assembler-template.
1248
1249 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1250 we don't store that info. */
1251
1252char *
1253decode_asm_operands (body, operands, operand_locs, constraints, modes)
1254 rtx body;
1255 rtx *operands;
1256 rtx **operand_locs;
1257 char **constraints;
1258 enum machine_mode *modes;
1259{
1260 register int i;
1261 int noperands;
1262 char *template = 0;
1263
1264 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1265 {
1266 rtx asmop = SET_SRC (body);
1267 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1268
1269 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1270
1271 for (i = 1; i < noperands; i++)
1272 {
1273 if (operand_locs)
1274 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1275 if (operands)
1276 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1277 if (constraints)
1278 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1279 if (modes)
1280 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1281 }
1282
1283 /* The output is in the SET.
1284 Its constraint is in the ASM_OPERANDS itself. */
1285 if (operands)
1286 operands[0] = SET_DEST (body);
1287 if (operand_locs)
1288 operand_locs[0] = &SET_DEST (body);
1289 if (constraints)
1290 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1291 if (modes)
1292 modes[0] = GET_MODE (SET_DEST (body));
1293 template = ASM_OPERANDS_TEMPLATE (asmop);
1294 }
1295 else if (GET_CODE (body) == ASM_OPERANDS)
1296 {
1297 rtx asmop = body;
1298 /* No output operands: BODY is (asm_operands ....). */
1299
1300 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1301
1302 /* The input operands are found in the 1st element vector. */
1303 /* Constraints for inputs are in the 2nd element vector. */
1304 for (i = 0; i < noperands; i++)
1305 {
1306 if (operand_locs)
1307 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1308 if (operands)
1309 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1310 if (constraints)
1311 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1312 if (modes)
1313 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1314 }
1315 template = ASM_OPERANDS_TEMPLATE (asmop);
1316 }
1317 else if (GET_CODE (body) == PARALLEL
1318 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1319 {
1320 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1321 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1322 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1323 int nout = 0; /* Does not include CLOBBERs. */
1324
1325 /* At least one output, plus some CLOBBERs. */
1326
1327 /* The outputs are in the SETs.
1328 Their constraints are in the ASM_OPERANDS itself. */
1329 for (i = 0; i < nparallel; i++)
1330 {
1331 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1332 break; /* Past last SET */
1333
1334 if (operands)
1335 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1336 if (operand_locs)
1337 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1338 if (constraints)
1339 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1340 if (modes)
1341 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1342 nout++;
1343 }
1344
1345 for (i = 0; i < nin; i++)
1346 {
1347 if (operand_locs)
1348 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1349 if (operands)
1350 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1351 if (constraints)
1352 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1353 if (modes)
1354 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1355 }
1356
1357 template = ASM_OPERANDS_TEMPLATE (asmop);
1358 }
1359 else if (GET_CODE (body) == PARALLEL
1360 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1361 {
1362 /* No outputs, but some CLOBBERs. */
1363
1364 rtx asmop = XVECEXP (body, 0, 0);
1365 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1366
1367 for (i = 0; i < nin; i++)
1368 {
1369 if (operand_locs)
1370 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1371 if (operands)
1372 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1373 if (constraints)
1374 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1375 if (modes)
1376 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1377 }
1378
1379 template = ASM_OPERANDS_TEMPLATE (asmop);
1380 }
1381
1382 return template;
1383}
1384\f
2055cea7
RK
1385/* Given an rtx *P, if it is a sum containing an integer constant term,
1386 return the location (type rtx *) of the pointer to that constant term.
1387 Otherwise, return a null pointer. */
1388
1389static rtx *
1390find_constant_term_loc (p)
1391 rtx *p;
1392{
1393 register rtx *tem;
1394 register enum rtx_code code = GET_CODE (*p);
1395
1396 /* If *P IS such a constant term, P is its location. */
1397
1398 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1399 || code == CONST)
1400 return p;
1401
1402 /* Otherwise, if not a sum, it has no constant term. */
1403
1404 if (GET_CODE (*p) != PLUS)
1405 return 0;
1406
1407 /* If one of the summands is constant, return its location. */
1408
1409 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1410 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1411 return p;
1412
1413 /* Otherwise, check each summand for containing a constant term. */
1414
1415 if (XEXP (*p, 0) != 0)
1416 {
1417 tem = find_constant_term_loc (&XEXP (*p, 0));
1418 if (tem != 0)
1419 return tem;
1420 }
1421
1422 if (XEXP (*p, 1) != 0)
1423 {
1424 tem = find_constant_term_loc (&XEXP (*p, 1));
1425 if (tem != 0)
1426 return tem;
1427 }
1428
1429 return 0;
1430}
1431\f
1432/* Return 1 if OP is a memory reference
1433 whose address contains no side effects
1434 and remains valid after the addition
1435 of a positive integer less than the
1436 size of the object being referenced.
1437
1438 We assume that the original address is valid and do not check it.
1439
1440 This uses strict_memory_address_p as a subroutine, so
1441 don't use it before reload. */
1442
1443int
1444offsettable_memref_p (op)
1445 rtx op;
1446{
1447 return ((GET_CODE (op) == MEM)
1448 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1449}
1450
1451/* Similar, but don't require a strictly valid mem ref:
1452 consider pseudo-regs valid as index or base regs. */
1453
1454int
1455offsettable_nonstrict_memref_p (op)
1456 rtx op;
1457{
1458 return ((GET_CODE (op) == MEM)
1459 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1460}
1461
1462/* Return 1 if Y is a memory address which contains no side effects
1463 and would remain valid after the addition of a positive integer
1464 less than the size of that mode.
1465
1466 We assume that the original address is valid and do not check it.
1467 We do check that it is valid for narrower modes.
1468
1469 If STRICTP is nonzero, we require a strictly valid address,
1470 for the sake of use in reload.c. */
1471
1472int
1473offsettable_address_p (strictp, mode, y)
1474 int strictp;
1475 enum machine_mode mode;
1476 register rtx y;
1477{
1478 register enum rtx_code ycode = GET_CODE (y);
1479 register rtx z;
1480 rtx y1 = y;
1481 rtx *y2;
1482 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1483
1484 if (CONSTANT_ADDRESS_P (y))
1485 return 1;
1486
1487 /* Adjusting an offsettable address involves changing to a narrower mode.
1488 Make sure that's OK. */
1489
1490 if (mode_dependent_address_p (y))
1491 return 0;
1492
1493 /* If the expression contains a constant term,
1494 see if it remains valid when max possible offset is added. */
1495
1496 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1497 {
1498 int good;
1499
1500 y1 = *y2;
1501 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1502 /* Use QImode because an odd displacement may be automatically invalid
1503 for any wider mode. But it should be valid for a single byte. */
1504 good = (*addressp) (QImode, y);
1505
1506 /* In any case, restore old contents of memory. */
1507 *y2 = y1;
1508 return good;
1509 }
1510
1511 if (ycode == PRE_DEC || ycode == PRE_INC
1512 || ycode == POST_DEC || ycode == POST_INC)
1513 return 0;
1514
1515 /* The offset added here is chosen as the maximum offset that
1516 any instruction could need to add when operating on something
1517 of the specified mode. We assume that if Y and Y+c are
1518 valid addresses then so is Y+d for all 0<d<c. */
1519
1520 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1521
1522 /* Use QImode because an odd displacement may be automatically invalid
1523 for any wider mode. But it should be valid for a single byte. */
1524 return (*addressp) (QImode, z);
1525}
1526
1527/* Return 1 if ADDR is an address-expression whose effect depends
1528 on the mode of the memory reference it is used in.
1529
1530 Autoincrement addressing is a typical example of mode-dependence
1531 because the amount of the increment depends on the mode. */
1532
1533int
1534mode_dependent_address_p (addr)
1535 rtx addr;
1536{
1537 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1538 return 0;
1539 win:
1540 return 1;
1541}
1542
1543/* Return 1 if OP is a general operand
1544 other than a memory ref with a mode dependent address. */
1545
1546int
1547mode_independent_operand (op, mode)
1548 enum machine_mode mode;
1549 rtx op;
1550{
1551 rtx addr;
1552
1553 if (! general_operand (op, mode))
1554 return 0;
1555
1556 if (GET_CODE (op) != MEM)
1557 return 1;
1558
1559 addr = XEXP (op, 0);
1560 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1561 return 1;
1562 lose:
1563 return 0;
1564}
1565
1566/* Given an operand OP that is a valid memory reference
1567 which satisfies offsettable_memref_p,
1568 return a new memory reference whose address has been adjusted by OFFSET.
1569 OFFSET should be positive and less than the size of the object referenced.
1570*/
1571
1572rtx
1573adj_offsettable_operand (op, offset)
1574 rtx op;
1575 int offset;
1576{
1577 register enum rtx_code code = GET_CODE (op);
1578
1579 if (code == MEM)
1580 {
1581 register rtx y = XEXP (op, 0);
1582 register rtx new;
1583
1584 if (CONSTANT_ADDRESS_P (y))
1585 {
38a448ca 1586 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2055cea7
RK
1587 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1588 return new;
1589 }
1590
1591 if (GET_CODE (y) == PLUS)
1592 {
1593 rtx z = y;
1594 register rtx *const_loc;
1595
1596 op = copy_rtx (op);
1597 z = XEXP (op, 0);
1598 const_loc = find_constant_term_loc (&z);
1599 if (const_loc)
1600 {
1601 *const_loc = plus_constant_for_output (*const_loc, offset);
1602 return op;
1603 }
1604 }
1605
38a448ca 1606 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2055cea7
RK
1607 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1608 return new;
1609 }
1610 abort ();
1611}
1612\f
1613#ifdef REGISTER_CONSTRAINTS
1614
1615/* Check the operands of an insn (found in recog_operands)
1616 against the insn's operand constraints (found via INSN_CODE_NUM)
1617 and return 1 if they are valid.
1618
1619 WHICH_ALTERNATIVE is set to a number which indicates which
1620 alternative of constraints was matched: 0 for the first alternative,
1621 1 for the next, etc.
1622
1623 In addition, when two operands are match
1624 and it happens that the output operand is (reg) while the
1625 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1626 make the output operand look like the input.
1627 This is because the output operand is the one the template will print.
1628
1629 This is used in final, just before printing the assembler code and by
1630 the routines that determine an insn's attribute.
1631
1632 If STRICT is a positive non-zero value, it means that we have been
1633 called after reload has been completed. In that case, we must
1634 do all checks strictly. If it is zero, it means that we have been called
1635 before reload has completed. In that case, we first try to see if we can
1636 find an alternative that matches strictly. If not, we try again, this
1637 time assuming that reload will fix up the insn. This provides a "best
1638 guess" for the alternative and is used to compute attributes of insns prior
1639 to reload. A negative value of STRICT is used for this internal call. */
1640
1641struct funny_match
1642{
1643 int this, other;
1644};
1645
1646int
1647constrain_operands (insn_code_num, strict)
1648 int insn_code_num;
1649 int strict;
1650{
1651 char *constraints[MAX_RECOG_OPERANDS];
9e21be9d
RK
1652 int matching_operands[MAX_RECOG_OPERANDS];
1653 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1654 int earlyclobber[MAX_RECOG_OPERANDS];
2055cea7
RK
1655 register int c;
1656 int noperands = insn_n_operands[insn_code_num];
1657
1658 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1659 int funny_match_index;
1660 int nalternatives = insn_n_alternatives[insn_code_num];
1661
1662 if (noperands == 0 || nalternatives == 0)
1663 return 1;
1664
1665 for (c = 0; c < noperands; c++)
9e21be9d
RK
1666 {
1667 constraints[c] = insn_operand_constraint[insn_code_num][c];
1668 matching_operands[c] = -1;
1669 op_types[c] = OP_IN;
1670 }
2055cea7
RK
1671
1672 which_alternative = 0;
1673
1674 while (which_alternative < nalternatives)
1675 {
1676 register int opno;
1677 int lose = 0;
1678 funny_match_index = 0;
1679
1680 for (opno = 0; opno < noperands; opno++)
1681 {
1682 register rtx op = recog_operand[opno];
1683 enum machine_mode mode = GET_MODE (op);
1684 register char *p = constraints[opno];
1685 int offset = 0;
1686 int win = 0;
1687 int val;
1688
9e21be9d
RK
1689 earlyclobber[opno] = 0;
1690
b85f21c0 1691 /* A unary operator may be accepted by the predicate, but it
38a448ca 1692 is irrelevant for matching constraints. */
b85f21c0
ILT
1693 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
1694 op = XEXP (op, 0);
1695
2055cea7
RK
1696 if (GET_CODE (op) == SUBREG)
1697 {
1698 if (GET_CODE (SUBREG_REG (op)) == REG
1699 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1700 offset = SUBREG_WORD (op);
1701 op = SUBREG_REG (op);
1702 }
1703
1704 /* An empty constraint or empty alternative
1705 allows anything which matched the pattern. */
1706 if (*p == 0 || *p == ',')
1707 win = 1;
1708
1709 while (*p && (c = *p++) != ',')
1710 switch (c)
1711 {
2055cea7 1712 case '?':
2055cea7
RK
1713 case '!':
1714 case '*':
1715 case '%':
1716 break;
1717
4d3067db
RK
1718 case '#':
1719 /* Ignore rest of this alternative as far as
1720 constraint checking is concerned. */
1721 while (*p && *p != ',')
1722 p++;
1723 break;
1724
9e21be9d
RK
1725 case '=':
1726 op_types[opno] = OP_OUT;
1727 break;
1728
1729 case '+':
1730 op_types[opno] = OP_INOUT;
1731 break;
1732
1733 case '&':
1734 earlyclobber[opno] = 1;
1735 break;
1736
2055cea7
RK
1737 case '0':
1738 case '1':
1739 case '2':
1740 case '3':
1741 case '4':
1742 /* This operand must be the same as a previous one.
1743 This kind of constraint is used for instructions such
1744 as add when they take only two operands.
1745
1746 Note that the lower-numbered operand is passed first.
1747
1748 If we are not testing strictly, assume that this constraint
1749 will be satisfied. */
1750 if (strict < 0)
1751 val = 1;
1752 else
1753 val = operands_match_p (recog_operand[c - '0'],
1754 recog_operand[opno]);
1755
9e21be9d
RK
1756 matching_operands[opno] = c - '0';
1757 matching_operands[c - '0'] = opno;
1758
2055cea7
RK
1759 if (val != 0)
1760 win = 1;
1761 /* If output is *x and input is *--x,
1762 arrange later to change the output to *--x as well,
1763 since the output op is the one that will be printed. */
1764 if (val == 2 && strict > 0)
1765 {
1766 funny_match[funny_match_index].this = opno;
1767 funny_match[funny_match_index++].other = c - '0';
1768 }
1769 break;
1770
1771 case 'p':
1772 /* p is used for address_operands. When we are called by
a8647766
RK
1773 gen_reload, no one will have checked that the address is
1774 strictly valid, i.e., that all pseudos requiring hard regs
1775 have gotten them. */
2055cea7
RK
1776 if (strict <= 0
1777 || (strict_memory_address_p
1778 (insn_operand_mode[insn_code_num][opno], op)))
1779 win = 1;
1780 break;
1781
1782 /* No need to check general_operand again;
1783 it was done in insn-recog.c. */
1784 case 'g':
1785 /* Anything goes unless it is a REG and really has a hard reg
1786 but the hard reg is not in the class GENERAL_REGS. */
1787 if (strict < 0
1788 || GENERAL_REGS == ALL_REGS
1789 || GET_CODE (op) != REG
3c3eeea6
RK
1790 || (reload_in_progress
1791 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2055cea7
RK
1792 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1793 win = 1;
1794 break;
1795
1796 case 'r':
1797 if (strict < 0
1798 || (strict == 0
1799 && GET_CODE (op) == REG
1800 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1801 || (strict == 0 && GET_CODE (op) == SCRATCH)
1802 || (GET_CODE (op) == REG
5a19ad3f
RK
1803 && ((GENERAL_REGS == ALL_REGS
1804 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2055cea7
RK
1805 || reg_fits_class_p (op, GENERAL_REGS,
1806 offset, mode))))
1807 win = 1;
1808 break;
1809
1810 case 'X':
0f41302f
MS
1811 /* This is used for a MATCH_SCRATCH in the cases when
1812 we don't actually need anything. So anything goes
1813 any time. */
2055cea7
RK
1814 win = 1;
1815 break;
1816
1817 case 'm':
1818 if (GET_CODE (op) == MEM
1819 /* Before reload, accept what reload can turn into mem. */
3c3eeea6
RK
1820 || (strict < 0 && CONSTANT_P (op))
1821 /* During reload, accept a pseudo */
1822 || (reload_in_progress && GET_CODE (op) == REG
1823 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2055cea7
RK
1824 win = 1;
1825 break;
1826
1827 case '<':
1828 if (GET_CODE (op) == MEM
1829 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1830 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1831 win = 1;
1832 break;
1833
1834 case '>':
1835 if (GET_CODE (op) == MEM
1836 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1837 || GET_CODE (XEXP (op, 0)) == POST_INC))
1838 win = 1;
1839 break;
1840
1841 case 'E':
b990f635 1842#ifndef REAL_ARITHMETIC
2055cea7
RK
1843 /* Match any CONST_DOUBLE, but only if
1844 we can examine the bits of it reliably. */
1845 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
9e4223f2 1846 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
d1b765a5 1847 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2055cea7 1848 break;
b990f635 1849#endif
2055cea7
RK
1850 if (GET_CODE (op) == CONST_DOUBLE)
1851 win = 1;
1852 break;
1853
1854 case 'F':
1855 if (GET_CODE (op) == CONST_DOUBLE)
1856 win = 1;
1857 break;
1858
1859 case 'G':
1860 case 'H':
1861 if (GET_CODE (op) == CONST_DOUBLE
1862 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
1863 win = 1;
1864 break;
1865
1866 case 's':
1867 if (GET_CODE (op) == CONST_INT
1868 || (GET_CODE (op) == CONST_DOUBLE
1869 && GET_MODE (op) == VOIDmode))
1870 break;
1871 case 'i':
1872 if (CONSTANT_P (op))
1873 win = 1;
1874 break;
1875
1876 case 'n':
1877 if (GET_CODE (op) == CONST_INT
1878 || (GET_CODE (op) == CONST_DOUBLE
1879 && GET_MODE (op) == VOIDmode))
1880 win = 1;
1881 break;
1882
1883 case 'I':
1884 case 'J':
1885 case 'K':
1886 case 'L':
1887 case 'M':
1888 case 'N':
1889 case 'O':
1890 case 'P':
1891 if (GET_CODE (op) == CONST_INT
1892 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
1893 win = 1;
1894 break;
1895
1896#ifdef EXTRA_CONSTRAINT
1897 case 'Q':
1898 case 'R':
1899 case 'S':
1900 case 'T':
1901 case 'U':
1902 if (EXTRA_CONSTRAINT (op, c))
1903 win = 1;
1904 break;
1905#endif
1906
1907 case 'V':
1908 if (GET_CODE (op) == MEM
69f724c0
JL
1909 && ((strict > 0 && ! offsettable_memref_p (op))
1910 || (strict < 0
1911 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
1912 || (reload_in_progress
1913 && !(GET_CODE (op) == REG
1914 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2055cea7
RK
1915 win = 1;
1916 break;
1917
1918 case 'o':
1919 if ((strict > 0 && offsettable_memref_p (op))
1920 || (strict == 0 && offsettable_nonstrict_memref_p (op))
1921 /* Before reload, accept what reload can handle. */
1922 || (strict < 0
3c3eeea6
RK
1923 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
1924 /* During reload, accept a pseudo */
1925 || (reload_in_progress && GET_CODE (op) == REG
1926 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2055cea7
RK
1927 win = 1;
1928 break;
1929
1930 default:
1931 if (strict < 0
1932 || (strict == 0
1933 && GET_CODE (op) == REG
1934 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1935 || (strict == 0 && GET_CODE (op) == SCRATCH)
1936 || (GET_CODE (op) == REG
1937 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
1938 offset, mode)))
1939 win = 1;
1940 }
1941
1942 constraints[opno] = p;
1943 /* If this operand did not win somehow,
1944 this alternative loses. */
1945 if (! win)
1946 lose = 1;
1947 }
1948 /* This alternative won; the operands are ok.
1949 Change whichever operands this alternative says to change. */
1950 if (! lose)
1951 {
9e21be9d
RK
1952 int opno, eopno;
1953
1954 /* See if any earlyclobber operand conflicts with some other
1955 operand. */
1956
1957 if (strict > 0)
1958 for (eopno = 0; eopno < noperands; eopno++)
62946075
RS
1959 /* Ignore earlyclobber operands now in memory,
1960 because we would often report failure when we have
1961 two memory operands, one of which was formerly a REG. */
1962 if (earlyclobber[eopno]
1963 && GET_CODE (recog_operand[eopno]) == REG)
9e21be9d
RK
1964 for (opno = 0; opno < noperands; opno++)
1965 if ((GET_CODE (recog_operand[opno]) == MEM
1966 || op_types[opno] != OP_OUT)
1967 && opno != eopno
0f41302f 1968 /* Ignore things like match_operator operands. */
0b97ea96 1969 && *insn_operand_constraint[insn_code_num][opno] != 0
9e21be9d 1970 && ! (matching_operands[opno] == eopno
011063bd
JW
1971 && operands_match_p (recog_operand[opno],
1972 recog_operand[eopno]))
9e21be9d
RK
1973 && ! safe_from_earlyclobber (recog_operand[opno],
1974 recog_operand[eopno]))
1975 lose = 1;
1976
1977 if (! lose)
2055cea7 1978 {
9e21be9d
RK
1979 while (--funny_match_index >= 0)
1980 {
1981 recog_operand[funny_match[funny_match_index].other]
1982 = recog_operand[funny_match[funny_match_index].this];
1983 }
1984
1985 return 1;
2055cea7 1986 }
2055cea7
RK
1987 }
1988
1989 which_alternative++;
1990 }
1991
1992 /* If we are about to reject this, but we are not to test strictly,
1993 try a very loose test. Only return failure if it fails also. */
1994 if (strict == 0)
1995 return constrain_operands (insn_code_num, -1);
1996 else
1997 return 0;
1998}
1999
2000/* Return 1 iff OPERAND (assumed to be a REG rtx)
38a448ca 2001 is a hard reg in class CLASS when its regno is offset by OFFSET
2055cea7
RK
2002 and changed to mode MODE.
2003 If REG occupies multiple hard regs, all of them must be in CLASS. */
2004
2005int
2006reg_fits_class_p (operand, class, offset, mode)
2007 rtx operand;
2008 register enum reg_class class;
2009 int offset;
2010 enum machine_mode mode;
2011{
2012 register int regno = REGNO (operand);
2013 if (regno < FIRST_PSEUDO_REGISTER
2014 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2015 regno + offset))
2016 {
2017 register int sr;
2018 regno += offset;
2019 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2020 sr > 0; sr--)
2021 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2022 regno + sr))
2023 break;
2024 return sr == 0;
2025 }
2026
2027 return 0;
2028}
2029
2030#endif /* REGISTER_CONSTRAINTS */