]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/recog.c
Eliminate last_basic_block macro.
[thirdparty/gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
43
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
51
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
59
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data_d recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 \f
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
112
113 static bool
114 asm_labels_ok (rtx body)
115 {
116 rtx asmop;
117 int i;
118
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
122
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
126
127 return true;
128 }
129
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
132
133 int
134 check_asm_operands (rtx x)
135 {
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
140
141 if (!asm_labels_ok (x))
142 return 0;
143
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
146 {
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
151 }
152
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
158
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
161
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163
164 for (i = 0; i < noperands; i++)
165 {
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
171 }
172
173 return 1;
174 }
175 \f
176 /* Static data for the next two routines. */
177
178 typedef struct change_t
179 {
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
186
187 static change_t *changes;
188 static int changes_allocated;
189
190 static int num_changes = 0;
191
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
195
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
200
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
204
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
209
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 {
213 rtx old = *loc;
214
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
217
218 gcc_assert (in_group != 0 || num_changes == 0);
219
220 *loc = new_rtx;
221
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
224 {
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
231
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
239
240 if (object && !MEM_P (object))
241 {
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
246 }
247
248 num_changes++;
249
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
252
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
257 }
258
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
261
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 {
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
266 }
267
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
270
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 {
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
275 }
276
277
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
281
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
285 {
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288 {
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
295 }
296 else
297 return false;
298 }
299
300
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
303
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
307
308 int
309 insn_invalid_p (rtx insn, bool in_group)
310 {
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed
318 && ! reload_in_progress)
319 ? &num_clobbers : 0);
320 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
321
322
323 /* If this is an asm and the operand aren't legal, then fail. Likewise if
324 this is not an asm and the insn wasn't recognized. */
325 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
326 || (!is_asm && icode < 0))
327 return 1;
328
329 /* If we have to add CLOBBERs, fail if we have to add ones that reference
330 hard registers since our callers can't know if they are live or not.
331 Otherwise, add them. */
332 if (num_clobbers > 0)
333 {
334 rtx newpat;
335
336 if (added_clobbers_hard_reg_p (icode))
337 return 1;
338
339 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
340 XVECEXP (newpat, 0, 0) = pat;
341 add_clobbers (newpat, icode);
342 if (in_group)
343 validate_change (insn, &PATTERN (insn), newpat, 1);
344 else
345 PATTERN (insn) = pat = newpat;
346 }
347
348 /* After reload, verify that all constraints are satisfied. */
349 if (reload_completed)
350 {
351 extract_insn (insn);
352
353 if (! constrain_operands (1))
354 return 1;
355 }
356
357 INSN_CODE (insn) = icode;
358 return 0;
359 }
360
361 /* Return number of changes made and not validated yet. */
362 int
363 num_changes_pending (void)
364 {
365 return num_changes;
366 }
367
368 /* Tentatively apply the changes numbered NUM and up.
369 Return 1 if all changes are valid, zero otherwise. */
370
371 int
372 verify_changes (int num)
373 {
374 int i;
375 rtx last_validated = NULL_RTX;
376
377 /* The changes have been applied and all INSN_CODEs have been reset to force
378 rerecognition.
379
380 The changes are valid if we aren't given an object, or if we are
381 given a MEM and it still is a valid address, or if this is in insn
382 and it is recognized. In the latter case, if reload has completed,
383 we also require that the operands meet the constraints for
384 the insn. */
385
386 for (i = num; i < num_changes; i++)
387 {
388 rtx object = changes[i].object;
389
390 /* If there is no object to test or if it is the same as the one we
391 already tested, ignore it. */
392 if (object == 0 || object == last_validated)
393 continue;
394
395 if (MEM_P (object))
396 {
397 if (! memory_address_addr_space_p (GET_MODE (object),
398 XEXP (object, 0),
399 MEM_ADDR_SPACE (object)))
400 break;
401 }
402 else if (/* changes[i].old might be zero, e.g. when putting a
403 REG_FRAME_RELATED_EXPR into a previously empty list. */
404 changes[i].old
405 && REG_P (changes[i].old)
406 && asm_noperands (PATTERN (object)) > 0
407 && REG_EXPR (changes[i].old) != NULL_TREE
408 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
409 && DECL_REGISTER (REG_EXPR (changes[i].old)))
410 {
411 /* Don't allow changes of hard register operands to inline
412 assemblies if they have been defined as register asm ("x"). */
413 break;
414 }
415 else if (DEBUG_INSN_P (object))
416 continue;
417 else if (insn_invalid_p (object, true))
418 {
419 rtx pat = PATTERN (object);
420
421 /* Perhaps we couldn't recognize the insn because there were
422 extra CLOBBERs at the end. If so, try to re-recognize
423 without the last CLOBBER (later iterations will cause each of
424 them to be eliminated, in turn). But don't do this if we
425 have an ASM_OPERAND. */
426 if (GET_CODE (pat) == PARALLEL
427 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
428 && asm_noperands (PATTERN (object)) < 0)
429 {
430 rtx newpat;
431
432 if (XVECLEN (pat, 0) == 2)
433 newpat = XVECEXP (pat, 0, 0);
434 else
435 {
436 int j;
437
438 newpat
439 = gen_rtx_PARALLEL (VOIDmode,
440 rtvec_alloc (XVECLEN (pat, 0) - 1));
441 for (j = 0; j < XVECLEN (newpat, 0); j++)
442 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
443 }
444
445 /* Add a new change to this group to replace the pattern
446 with this new pattern. Then consider this change
447 as having succeeded. The change we added will
448 cause the entire call to fail if things remain invalid.
449
450 Note that this can lose if a later change than the one
451 we are processing specified &XVECEXP (PATTERN (object), 0, X)
452 but this shouldn't occur. */
453
454 validate_change (object, &PATTERN (object), newpat, 1);
455 continue;
456 }
457 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
458 || GET_CODE (pat) == VAR_LOCATION)
459 /* If this insn is a CLOBBER or USE, it is always valid, but is
460 never recognized. */
461 continue;
462 else
463 break;
464 }
465 last_validated = object;
466 }
467
468 return (i == num_changes);
469 }
470
471 /* A group of changes has previously been issued with validate_change
472 and verified with verify_changes. Call df_insn_rescan for each of
473 the insn changed and clear num_changes. */
474
475 void
476 confirm_change_group (void)
477 {
478 int i;
479 rtx last_object = NULL;
480
481 for (i = 0; i < num_changes; i++)
482 {
483 rtx object = changes[i].object;
484
485 if (changes[i].unshare)
486 *changes[i].loc = copy_rtx (*changes[i].loc);
487
488 /* Avoid unnecessary rescanning when multiple changes to same instruction
489 are made. */
490 if (object)
491 {
492 if (object != last_object && last_object && INSN_P (last_object))
493 df_insn_rescan (last_object);
494 last_object = object;
495 }
496 }
497
498 if (last_object && INSN_P (last_object))
499 df_insn_rescan (last_object);
500 num_changes = 0;
501 }
502
503 /* Apply a group of changes previously issued with `validate_change'.
504 If all changes are valid, call confirm_change_group and return 1,
505 otherwise, call cancel_changes and return 0. */
506
507 int
508 apply_change_group (void)
509 {
510 if (verify_changes (0))
511 {
512 confirm_change_group ();
513 return 1;
514 }
515 else
516 {
517 cancel_changes (0);
518 return 0;
519 }
520 }
521
522
523 /* Return the number of changes so far in the current group. */
524
525 int
526 num_validated_changes (void)
527 {
528 return num_changes;
529 }
530
531 /* Retract the changes numbered NUM and up. */
532
533 void
534 cancel_changes (int num)
535 {
536 int i;
537
538 /* Back out all the changes. Do this in the opposite order in which
539 they were made. */
540 for (i = num_changes - 1; i >= num; i--)
541 {
542 *changes[i].loc = changes[i].old;
543 if (changes[i].object && !MEM_P (changes[i].object))
544 INSN_CODE (changes[i].object) = changes[i].old_code;
545 }
546 num_changes = num;
547 }
548
549 /* Reduce conditional compilation elsewhere. */
550 #ifndef HAVE_extv
551 #define HAVE_extv 0
552 #define CODE_FOR_extv CODE_FOR_nothing
553 #endif
554 #ifndef HAVE_extzv
555 #define HAVE_extzv 0
556 #define CODE_FOR_extzv CODE_FOR_nothing
557 #endif
558
559 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
560 rtx. */
561
562 static void
563 simplify_while_replacing (rtx *loc, rtx to, rtx object,
564 enum machine_mode op0_mode)
565 {
566 rtx x = *loc;
567 enum rtx_code code = GET_CODE (x);
568 rtx new_rtx;
569
570 if (SWAPPABLE_OPERANDS_P (x)
571 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
572 {
573 validate_unshare_change (object, loc,
574 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
575 : swap_condition (code),
576 GET_MODE (x), XEXP (x, 1),
577 XEXP (x, 0)), 1);
578 x = *loc;
579 code = GET_CODE (x);
580 }
581
582 switch (code)
583 {
584 case PLUS:
585 /* If we have a PLUS whose second operand is now a CONST_INT, use
586 simplify_gen_binary to try to simplify it.
587 ??? We may want later to remove this, once simplification is
588 separated from this function. */
589 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
590 validate_change (object, loc,
591 simplify_gen_binary
592 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
593 break;
594 case MINUS:
595 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
596 validate_change (object, loc,
597 simplify_gen_binary
598 (PLUS, GET_MODE (x), XEXP (x, 0),
599 simplify_gen_unary (NEG,
600 GET_MODE (x), XEXP (x, 1),
601 GET_MODE (x))), 1);
602 break;
603 case ZERO_EXTEND:
604 case SIGN_EXTEND:
605 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
606 {
607 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
608 op0_mode);
609 /* If any of the above failed, substitute in something that
610 we know won't be recognized. */
611 if (!new_rtx)
612 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
613 validate_change (object, loc, new_rtx, 1);
614 }
615 break;
616 case SUBREG:
617 /* All subregs possible to simplify should be simplified. */
618 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
619 SUBREG_BYTE (x));
620
621 /* Subregs of VOIDmode operands are incorrect. */
622 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
623 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
624 if (new_rtx)
625 validate_change (object, loc, new_rtx, 1);
626 break;
627 case ZERO_EXTRACT:
628 case SIGN_EXTRACT:
629 /* If we are replacing a register with memory, try to change the memory
630 to be the mode required for memory in extract operations (this isn't
631 likely to be an insertion operation; if it was, nothing bad will
632 happen, we might just fail in some cases). */
633
634 if (MEM_P (XEXP (x, 0))
635 && CONST_INT_P (XEXP (x, 1))
636 && CONST_INT_P (XEXP (x, 2))
637 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
638 MEM_ADDR_SPACE (XEXP (x, 0)))
639 && !MEM_VOLATILE_P (XEXP (x, 0)))
640 {
641 enum machine_mode wanted_mode = VOIDmode;
642 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
643 int pos = INTVAL (XEXP (x, 2));
644
645 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
646 {
647 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
648 if (wanted_mode == VOIDmode)
649 wanted_mode = word_mode;
650 }
651 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
652 {
653 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
654 if (wanted_mode == VOIDmode)
655 wanted_mode = word_mode;
656 }
657
658 /* If we have a narrower mode, we can do something. */
659 if (wanted_mode != VOIDmode
660 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
661 {
662 int offset = pos / BITS_PER_UNIT;
663 rtx newmem;
664
665 /* If the bytes and bits are counted differently, we
666 must adjust the offset. */
667 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
668 offset =
669 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
670 offset);
671
672 gcc_assert (GET_MODE_PRECISION (wanted_mode)
673 == GET_MODE_BITSIZE (wanted_mode));
674 pos %= GET_MODE_BITSIZE (wanted_mode);
675
676 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
677
678 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
679 validate_change (object, &XEXP (x, 0), newmem, 1);
680 }
681 }
682
683 break;
684
685 default:
686 break;
687 }
688 }
689
690 /* Replace every occurrence of FROM in X with TO. Mark each change with
691 validate_change passing OBJECT. */
692
693 static void
694 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
695 bool simplify)
696 {
697 int i, j;
698 const char *fmt;
699 rtx x = *loc;
700 enum rtx_code code;
701 enum machine_mode op0_mode = VOIDmode;
702 int prev_changes = num_changes;
703
704 if (!x)
705 return;
706
707 code = GET_CODE (x);
708 fmt = GET_RTX_FORMAT (code);
709 if (fmt[0] == 'e')
710 op0_mode = GET_MODE (XEXP (x, 0));
711
712 /* X matches FROM if it is the same rtx or they are both referring to the
713 same register in the same mode. Avoid calling rtx_equal_p unless the
714 operands look similar. */
715
716 if (x == from
717 || (REG_P (x) && REG_P (from)
718 && GET_MODE (x) == GET_MODE (from)
719 && REGNO (x) == REGNO (from))
720 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
721 && rtx_equal_p (x, from)))
722 {
723 validate_unshare_change (object, loc, to, 1);
724 return;
725 }
726
727 /* Call ourself recursively to perform the replacements.
728 We must not replace inside already replaced expression, otherwise we
729 get infinite recursion for replacements like (reg X)->(subreg (reg X))
730 so we must special case shared ASM_OPERANDS. */
731
732 if (GET_CODE (x) == PARALLEL)
733 {
734 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
735 {
736 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
737 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
738 {
739 /* Verify that operands are really shared. */
740 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
741 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
742 (x, 0, j))));
743 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
744 from, to, object, simplify);
745 }
746 else
747 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
748 simplify);
749 }
750 }
751 else
752 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
753 {
754 if (fmt[i] == 'e')
755 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
756 else if (fmt[i] == 'E')
757 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
758 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
759 simplify);
760 }
761
762 /* If we didn't substitute, there is nothing more to do. */
763 if (num_changes == prev_changes)
764 return;
765
766 /* ??? The regmove is no more, so is this aberration still necessary? */
767 /* Allow substituted expression to have different mode. This is used by
768 regmove to change mode of pseudo register. */
769 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
770 op0_mode = GET_MODE (XEXP (x, 0));
771
772 /* Do changes needed to keep rtx consistent. Don't do any other
773 simplifications, as it is not our job. */
774 if (simplify)
775 simplify_while_replacing (loc, to, object, op0_mode);
776 }
777
778 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
779 with TO. After all changes have been made, validate by seeing
780 if INSN is still valid. */
781
782 int
783 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
784 {
785 validate_replace_rtx_1 (loc, from, to, insn, true);
786 return apply_change_group ();
787 }
788
789 /* Try replacing every occurrence of FROM in INSN with TO. After all
790 changes have been made, validate by seeing if INSN is still valid. */
791
792 int
793 validate_replace_rtx (rtx from, rtx to, rtx insn)
794 {
795 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
796 return apply_change_group ();
797 }
798
799 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
800 is a part of INSN. After all changes have been made, validate by seeing if
801 INSN is still valid.
802 validate_replace_rtx (from, to, insn) is equivalent to
803 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
804
805 int
806 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
807 {
808 validate_replace_rtx_1 (where, from, to, insn, true);
809 return apply_change_group ();
810 }
811
812 /* Same as above, but do not simplify rtx afterwards. */
813 int
814 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
815 rtx insn)
816 {
817 validate_replace_rtx_1 (where, from, to, insn, false);
818 return apply_change_group ();
819
820 }
821
822 /* Try replacing every occurrence of FROM in INSN with TO. This also
823 will replace in REG_EQUAL and REG_EQUIV notes. */
824
825 void
826 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
827 {
828 rtx note;
829 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
830 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
831 if (REG_NOTE_KIND (note) == REG_EQUAL
832 || REG_NOTE_KIND (note) == REG_EQUIV)
833 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
834 }
835
836 /* Function called by note_uses to replace used subexpressions. */
837 struct validate_replace_src_data
838 {
839 rtx from; /* Old RTX */
840 rtx to; /* New RTX */
841 rtx insn; /* Insn in which substitution is occurring. */
842 };
843
844 static void
845 validate_replace_src_1 (rtx *x, void *data)
846 {
847 struct validate_replace_src_data *d
848 = (struct validate_replace_src_data *) data;
849
850 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
851 }
852
853 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
854 SET_DESTs. */
855
856 void
857 validate_replace_src_group (rtx from, rtx to, rtx insn)
858 {
859 struct validate_replace_src_data d;
860
861 d.from = from;
862 d.to = to;
863 d.insn = insn;
864 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
865 }
866
867 /* Try simplify INSN.
868 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
869 pattern and return true if something was simplified. */
870
871 bool
872 validate_simplify_insn (rtx insn)
873 {
874 int i;
875 rtx pat = NULL;
876 rtx newpat = NULL;
877
878 pat = PATTERN (insn);
879
880 if (GET_CODE (pat) == SET)
881 {
882 newpat = simplify_rtx (SET_SRC (pat));
883 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
884 validate_change (insn, &SET_SRC (pat), newpat, 1);
885 newpat = simplify_rtx (SET_DEST (pat));
886 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
887 validate_change (insn, &SET_DEST (pat), newpat, 1);
888 }
889 else if (GET_CODE (pat) == PARALLEL)
890 for (i = 0; i < XVECLEN (pat, 0); i++)
891 {
892 rtx s = XVECEXP (pat, 0, i);
893
894 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
895 {
896 newpat = simplify_rtx (SET_SRC (s));
897 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
898 validate_change (insn, &SET_SRC (s), newpat, 1);
899 newpat = simplify_rtx (SET_DEST (s));
900 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
901 validate_change (insn, &SET_DEST (s), newpat, 1);
902 }
903 }
904 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
905 }
906 \f
907 #ifdef HAVE_cc0
908 /* Return 1 if the insn using CC0 set by INSN does not contain
909 any ordered tests applied to the condition codes.
910 EQ and NE tests do not count. */
911
912 int
913 next_insn_tests_no_inequality (rtx insn)
914 {
915 rtx next = next_cc0_user (insn);
916
917 /* If there is no next insn, we have to take the conservative choice. */
918 if (next == 0)
919 return 0;
920
921 return (INSN_P (next)
922 && ! inequality_comparisons_p (PATTERN (next)));
923 }
924 #endif
925 \f
926 /* Return 1 if OP is a valid general operand for machine mode MODE.
927 This is either a register reference, a memory reference,
928 or a constant. In the case of a memory reference, the address
929 is checked for general validity for the target machine.
930
931 Register and memory references must have mode MODE in order to be valid,
932 but some constants have no machine mode and are valid for any mode.
933
934 If MODE is VOIDmode, OP is checked for validity for whatever mode
935 it has.
936
937 The main use of this function is as a predicate in match_operand
938 expressions in the machine description. */
939
940 int
941 general_operand (rtx op, enum machine_mode mode)
942 {
943 enum rtx_code code = GET_CODE (op);
944
945 if (mode == VOIDmode)
946 mode = GET_MODE (op);
947
948 /* Don't accept CONST_INT or anything similar
949 if the caller wants something floating. */
950 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
951 && GET_MODE_CLASS (mode) != MODE_INT
952 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
953 return 0;
954
955 if (CONST_INT_P (op)
956 && mode != VOIDmode
957 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
958 return 0;
959
960 if (CONSTANT_P (op))
961 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
962 || mode == VOIDmode)
963 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
964 && targetm.legitimate_constant_p (mode == VOIDmode
965 ? GET_MODE (op)
966 : mode, op));
967
968 /* Except for certain constants with VOIDmode, already checked for,
969 OP's mode must match MODE if MODE specifies a mode. */
970
971 if (GET_MODE (op) != mode)
972 return 0;
973
974 if (code == SUBREG)
975 {
976 rtx sub = SUBREG_REG (op);
977
978 #ifdef INSN_SCHEDULING
979 /* On machines that have insn scheduling, we want all memory
980 reference to be explicit, so outlaw paradoxical SUBREGs.
981 However, we must allow them after reload so that they can
982 get cleaned up by cleanup_subreg_operands. */
983 if (!reload_completed && MEM_P (sub)
984 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
985 return 0;
986 #endif
987 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
988 may result in incorrect reference. We should simplify all valid
989 subregs of MEM anyway. But allow this after reload because we
990 might be called from cleanup_subreg_operands.
991
992 ??? This is a kludge. */
993 if (!reload_completed && SUBREG_BYTE (op) != 0
994 && MEM_P (sub))
995 return 0;
996
997 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
998 create such rtl, and we must reject it. */
999 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1000 /* LRA can use subreg to store a floating point value in an
1001 integer mode. Although the floating point and the
1002 integer modes need the same number of hard registers, the
1003 size of floating point mode can be less than the integer
1004 mode. */
1005 && ! lra_in_progress
1006 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1007 return 0;
1008
1009 op = sub;
1010 code = GET_CODE (op);
1011 }
1012
1013 if (code == REG)
1014 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1015 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1016
1017 if (code == MEM)
1018 {
1019 rtx y = XEXP (op, 0);
1020
1021 if (! volatile_ok && MEM_VOLATILE_P (op))
1022 return 0;
1023
1024 /* Use the mem's mode, since it will be reloaded thus. */
1025 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1026 return 1;
1027 }
1028
1029 return 0;
1030 }
1031 \f
1032 /* Return 1 if OP is a valid memory address for a memory reference
1033 of mode MODE.
1034
1035 The main use of this function is as a predicate in match_operand
1036 expressions in the machine description. */
1037
1038 int
1039 address_operand (rtx op, enum machine_mode mode)
1040 {
1041 return memory_address_p (mode, op);
1042 }
1043
1044 /* Return 1 if OP is a register reference of mode MODE.
1045 If MODE is VOIDmode, accept a register in any mode.
1046
1047 The main use of this function is as a predicate in match_operand
1048 expressions in the machine description. */
1049
1050 int
1051 register_operand (rtx op, enum machine_mode mode)
1052 {
1053 if (GET_MODE (op) != mode && mode != VOIDmode)
1054 return 0;
1055
1056 if (GET_CODE (op) == SUBREG)
1057 {
1058 rtx sub = SUBREG_REG (op);
1059
1060 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1061 because it is guaranteed to be reloaded into one.
1062 Just make sure the MEM is valid in itself.
1063 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1064 but currently it does result from (SUBREG (REG)...) where the
1065 reg went on the stack.) */
1066 if (! reload_completed && MEM_P (sub))
1067 return general_operand (op, mode);
1068
1069 #ifdef CANNOT_CHANGE_MODE_CLASS
1070 if (REG_P (sub)
1071 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1072 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1073 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1074 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1075 /* LRA can generate some invalid SUBREGS just for matched
1076 operand reload presentation. LRA needs to treat them as
1077 valid. */
1078 && ! LRA_SUBREG_P (op))
1079 return 0;
1080 #endif
1081
1082 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1083 create such rtl, and we must reject it. */
1084 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1085 /* LRA can use subreg to store a floating point value in an
1086 integer mode. Although the floating point and the
1087 integer modes need the same number of hard registers, the
1088 size of floating point mode can be less than the integer
1089 mode. */
1090 && ! lra_in_progress
1091 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1092 return 0;
1093
1094 op = sub;
1095 }
1096
1097 return (REG_P (op)
1098 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1099 || in_hard_reg_set_p (operand_reg_set,
1100 GET_MODE (op), REGNO (op))));
1101 }
1102
1103 /* Return 1 for a register in Pmode; ignore the tested mode. */
1104
1105 int
1106 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1107 {
1108 return register_operand (op, Pmode);
1109 }
1110
1111 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1112 or a hard register. */
1113
1114 int
1115 scratch_operand (rtx op, enum machine_mode mode)
1116 {
1117 if (GET_MODE (op) != mode && mode != VOIDmode)
1118 return 0;
1119
1120 return (GET_CODE (op) == SCRATCH
1121 || (REG_P (op)
1122 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1123 }
1124
1125 /* Return 1 if OP is a valid immediate operand for mode MODE.
1126
1127 The main use of this function is as a predicate in match_operand
1128 expressions in the machine description. */
1129
1130 int
1131 immediate_operand (rtx op, enum machine_mode mode)
1132 {
1133 /* Don't accept CONST_INT or anything similar
1134 if the caller wants something floating. */
1135 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1136 && GET_MODE_CLASS (mode) != MODE_INT
1137 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1138 return 0;
1139
1140 if (CONST_INT_P (op)
1141 && mode != VOIDmode
1142 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1143 return 0;
1144
1145 return (CONSTANT_P (op)
1146 && (GET_MODE (op) == mode || mode == VOIDmode
1147 || GET_MODE (op) == VOIDmode)
1148 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1149 && targetm.legitimate_constant_p (mode == VOIDmode
1150 ? GET_MODE (op)
1151 : mode, op));
1152 }
1153
1154 /* Returns 1 if OP is an operand that is a CONST_INT. */
1155
1156 int
1157 const_int_operand (rtx op, enum machine_mode mode)
1158 {
1159 if (!CONST_INT_P (op))
1160 return 0;
1161
1162 if (mode != VOIDmode
1163 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1164 return 0;
1165
1166 return 1;
1167 }
1168
1169 /* Returns 1 if OP is an operand that is a constant integer or constant
1170 floating-point number. */
1171
1172 int
1173 const_double_operand (rtx op, enum machine_mode mode)
1174 {
1175 /* Don't accept CONST_INT or anything similar
1176 if the caller wants something floating. */
1177 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1178 && GET_MODE_CLASS (mode) != MODE_INT
1179 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1180 return 0;
1181
1182 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1183 && (mode == VOIDmode || GET_MODE (op) == mode
1184 || GET_MODE (op) == VOIDmode));
1185 }
1186
1187 /* Return 1 if OP is a general operand that is not an immediate operand. */
1188
1189 int
1190 nonimmediate_operand (rtx op, enum machine_mode mode)
1191 {
1192 return (general_operand (op, mode) && ! CONSTANT_P (op));
1193 }
1194
1195 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1196
1197 int
1198 nonmemory_operand (rtx op, enum machine_mode mode)
1199 {
1200 if (CONSTANT_P (op))
1201 return immediate_operand (op, mode);
1202
1203 if (GET_MODE (op) != mode && mode != VOIDmode)
1204 return 0;
1205
1206 if (GET_CODE (op) == SUBREG)
1207 {
1208 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1209 because it is guaranteed to be reloaded into one.
1210 Just make sure the MEM is valid in itself.
1211 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1212 but currently it does result from (SUBREG (REG)...) where the
1213 reg went on the stack.) */
1214 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1215 return general_operand (op, mode);
1216 op = SUBREG_REG (op);
1217 }
1218
1219 return (REG_P (op)
1220 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1221 || in_hard_reg_set_p (operand_reg_set,
1222 GET_MODE (op), REGNO (op))));
1223 }
1224
1225 /* Return 1 if OP is a valid operand that stands for pushing a
1226 value of mode MODE onto the stack.
1227
1228 The main use of this function is as a predicate in match_operand
1229 expressions in the machine description. */
1230
1231 int
1232 push_operand (rtx op, enum machine_mode mode)
1233 {
1234 unsigned int rounded_size = GET_MODE_SIZE (mode);
1235
1236 #ifdef PUSH_ROUNDING
1237 rounded_size = PUSH_ROUNDING (rounded_size);
1238 #endif
1239
1240 if (!MEM_P (op))
1241 return 0;
1242
1243 if (mode != VOIDmode && GET_MODE (op) != mode)
1244 return 0;
1245
1246 op = XEXP (op, 0);
1247
1248 if (rounded_size == GET_MODE_SIZE (mode))
1249 {
1250 if (GET_CODE (op) != STACK_PUSH_CODE)
1251 return 0;
1252 }
1253 else
1254 {
1255 if (GET_CODE (op) != PRE_MODIFY
1256 || GET_CODE (XEXP (op, 1)) != PLUS
1257 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1258 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1259 #ifdef STACK_GROWS_DOWNWARD
1260 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1261 #else
1262 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1263 #endif
1264 )
1265 return 0;
1266 }
1267
1268 return XEXP (op, 0) == stack_pointer_rtx;
1269 }
1270
1271 /* Return 1 if OP is a valid operand that stands for popping a
1272 value of mode MODE off the stack.
1273
1274 The main use of this function is as a predicate in match_operand
1275 expressions in the machine description. */
1276
1277 int
1278 pop_operand (rtx op, enum machine_mode mode)
1279 {
1280 if (!MEM_P (op))
1281 return 0;
1282
1283 if (mode != VOIDmode && GET_MODE (op) != mode)
1284 return 0;
1285
1286 op = XEXP (op, 0);
1287
1288 if (GET_CODE (op) != STACK_POP_CODE)
1289 return 0;
1290
1291 return XEXP (op, 0) == stack_pointer_rtx;
1292 }
1293
1294 /* Return 1 if ADDR is a valid memory address
1295 for mode MODE in address space AS. */
1296
1297 int
1298 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1299 rtx addr, addr_space_t as)
1300 {
1301 #ifdef GO_IF_LEGITIMATE_ADDRESS
1302 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1303 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1304 return 0;
1305
1306 win:
1307 return 1;
1308 #else
1309 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1310 #endif
1311 }
1312
1313 /* Return 1 if OP is a valid memory reference with mode MODE,
1314 including a valid address.
1315
1316 The main use of this function is as a predicate in match_operand
1317 expressions in the machine description. */
1318
1319 int
1320 memory_operand (rtx op, enum machine_mode mode)
1321 {
1322 rtx inner;
1323
1324 if (! reload_completed)
1325 /* Note that no SUBREG is a memory operand before end of reload pass,
1326 because (SUBREG (MEM...)) forces reloading into a register. */
1327 return MEM_P (op) && general_operand (op, mode);
1328
1329 if (mode != VOIDmode && GET_MODE (op) != mode)
1330 return 0;
1331
1332 inner = op;
1333 if (GET_CODE (inner) == SUBREG)
1334 inner = SUBREG_REG (inner);
1335
1336 return (MEM_P (inner) && general_operand (op, mode));
1337 }
1338
1339 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1340 that is, a memory reference whose address is a general_operand. */
1341
1342 int
1343 indirect_operand (rtx op, enum machine_mode mode)
1344 {
1345 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1346 if (! reload_completed
1347 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1348 {
1349 int offset = SUBREG_BYTE (op);
1350 rtx inner = SUBREG_REG (op);
1351
1352 if (mode != VOIDmode && GET_MODE (op) != mode)
1353 return 0;
1354
1355 /* The only way that we can have a general_operand as the resulting
1356 address is if OFFSET is zero and the address already is an operand
1357 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1358 operand. */
1359
1360 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1361 || (GET_CODE (XEXP (inner, 0)) == PLUS
1362 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1363 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1364 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1365 }
1366
1367 return (MEM_P (op)
1368 && memory_operand (op, mode)
1369 && general_operand (XEXP (op, 0), Pmode));
1370 }
1371
1372 /* Return 1 if this is an ordered comparison operator (not including
1373 ORDERED and UNORDERED). */
1374
1375 int
1376 ordered_comparison_operator (rtx op, enum machine_mode mode)
1377 {
1378 if (mode != VOIDmode && GET_MODE (op) != mode)
1379 return false;
1380 switch (GET_CODE (op))
1381 {
1382 case EQ:
1383 case NE:
1384 case LT:
1385 case LTU:
1386 case LE:
1387 case LEU:
1388 case GT:
1389 case GTU:
1390 case GE:
1391 case GEU:
1392 return true;
1393 default:
1394 return false;
1395 }
1396 }
1397
1398 /* Return 1 if this is a comparison operator. This allows the use of
1399 MATCH_OPERATOR to recognize all the branch insns. */
1400
1401 int
1402 comparison_operator (rtx op, enum machine_mode mode)
1403 {
1404 return ((mode == VOIDmode || GET_MODE (op) == mode)
1405 && COMPARISON_P (op));
1406 }
1407 \f
1408 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1409
1410 rtx
1411 extract_asm_operands (rtx body)
1412 {
1413 rtx tmp;
1414 switch (GET_CODE (body))
1415 {
1416 case ASM_OPERANDS:
1417 return body;
1418
1419 case SET:
1420 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1421 tmp = SET_SRC (body);
1422 if (GET_CODE (tmp) == ASM_OPERANDS)
1423 return tmp;
1424 break;
1425
1426 case PARALLEL:
1427 tmp = XVECEXP (body, 0, 0);
1428 if (GET_CODE (tmp) == ASM_OPERANDS)
1429 return tmp;
1430 if (GET_CODE (tmp) == SET)
1431 {
1432 tmp = SET_SRC (tmp);
1433 if (GET_CODE (tmp) == ASM_OPERANDS)
1434 return tmp;
1435 }
1436 break;
1437
1438 default:
1439 break;
1440 }
1441 return NULL;
1442 }
1443
1444 /* If BODY is an insn body that uses ASM_OPERANDS,
1445 return the number of operands (both input and output) in the insn.
1446 Otherwise return -1. */
1447
1448 int
1449 asm_noperands (const_rtx body)
1450 {
1451 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1452 int n_sets = 0;
1453
1454 if (asm_op == NULL)
1455 return -1;
1456
1457 if (GET_CODE (body) == SET)
1458 n_sets = 1;
1459 else if (GET_CODE (body) == PARALLEL)
1460 {
1461 int i;
1462 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1463 {
1464 /* Multiple output operands, or 1 output plus some clobbers:
1465 body is
1466 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1467 /* Count backwards through CLOBBERs to determine number of SETs. */
1468 for (i = XVECLEN (body, 0); i > 0; i--)
1469 {
1470 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1471 break;
1472 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1473 return -1;
1474 }
1475
1476 /* N_SETS is now number of output operands. */
1477 n_sets = i;
1478
1479 /* Verify that all the SETs we have
1480 came from a single original asm_operands insn
1481 (so that invalid combinations are blocked). */
1482 for (i = 0; i < n_sets; i++)
1483 {
1484 rtx elt = XVECEXP (body, 0, i);
1485 if (GET_CODE (elt) != SET)
1486 return -1;
1487 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1488 return -1;
1489 /* If these ASM_OPERANDS rtx's came from different original insns
1490 then they aren't allowed together. */
1491 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1492 != ASM_OPERANDS_INPUT_VEC (asm_op))
1493 return -1;
1494 }
1495 }
1496 else
1497 {
1498 /* 0 outputs, but some clobbers:
1499 body is [(asm_operands ...) (clobber (reg ...))...]. */
1500 /* Make sure all the other parallel things really are clobbers. */
1501 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1502 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1503 return -1;
1504 }
1505 }
1506
1507 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1508 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1509 }
1510
1511 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1512 copy its operands (both input and output) into the vector OPERANDS,
1513 the locations of the operands within the insn into the vector OPERAND_LOCS,
1514 and the constraints for the operands into CONSTRAINTS.
1515 Write the modes of the operands into MODES.
1516 Return the assembler-template.
1517
1518 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1519 we don't store that info. */
1520
1521 const char *
1522 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1523 const char **constraints, enum machine_mode *modes,
1524 location_t *loc)
1525 {
1526 int nbase = 0, n, i;
1527 rtx asmop;
1528
1529 switch (GET_CODE (body))
1530 {
1531 case ASM_OPERANDS:
1532 /* Zero output asm: BODY is (asm_operands ...). */
1533 asmop = body;
1534 break;
1535
1536 case SET:
1537 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1538 asmop = SET_SRC (body);
1539
1540 /* The output is in the SET.
1541 Its constraint is in the ASM_OPERANDS itself. */
1542 if (operands)
1543 operands[0] = SET_DEST (body);
1544 if (operand_locs)
1545 operand_locs[0] = &SET_DEST (body);
1546 if (constraints)
1547 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1548 if (modes)
1549 modes[0] = GET_MODE (SET_DEST (body));
1550 nbase = 1;
1551 break;
1552
1553 case PARALLEL:
1554 {
1555 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1556
1557 asmop = XVECEXP (body, 0, 0);
1558 if (GET_CODE (asmop) == SET)
1559 {
1560 asmop = SET_SRC (asmop);
1561
1562 /* At least one output, plus some CLOBBERs. The outputs are in
1563 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1564 for (i = 0; i < nparallel; i++)
1565 {
1566 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1567 break; /* Past last SET */
1568 if (operands)
1569 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1570 if (operand_locs)
1571 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1572 if (constraints)
1573 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1574 if (modes)
1575 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1576 }
1577 nbase = i;
1578 }
1579 break;
1580 }
1581
1582 default:
1583 gcc_unreachable ();
1584 }
1585
1586 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1587 for (i = 0; i < n; i++)
1588 {
1589 if (operand_locs)
1590 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1591 if (operands)
1592 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1593 if (constraints)
1594 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1595 if (modes)
1596 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1597 }
1598 nbase += n;
1599
1600 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1601 for (i = 0; i < n; i++)
1602 {
1603 if (operand_locs)
1604 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1605 if (operands)
1606 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1607 if (constraints)
1608 constraints[nbase + i] = "";
1609 if (modes)
1610 modes[nbase + i] = Pmode;
1611 }
1612
1613 if (loc)
1614 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1615
1616 return ASM_OPERANDS_TEMPLATE (asmop);
1617 }
1618
1619 /* Check if an asm_operand matches its constraints.
1620 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1621
1622 int
1623 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1624 {
1625 int result = 0;
1626 #ifdef AUTO_INC_DEC
1627 bool incdec_ok = false;
1628 #endif
1629
1630 /* Use constrain_operands after reload. */
1631 gcc_assert (!reload_completed);
1632
1633 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1634 many alternatives as required to match the other operands. */
1635 if (*constraint == '\0')
1636 result = 1;
1637
1638 while (*constraint)
1639 {
1640 char c = *constraint;
1641 int len;
1642 switch (c)
1643 {
1644 case ',':
1645 constraint++;
1646 continue;
1647 case '=':
1648 case '+':
1649 case '*':
1650 case '%':
1651 case '!':
1652 case '#':
1653 case '&':
1654 case '?':
1655 break;
1656
1657 case '0': case '1': case '2': case '3': case '4':
1658 case '5': case '6': case '7': case '8': case '9':
1659 /* If caller provided constraints pointer, look up
1660 the matching constraint. Otherwise, our caller should have
1661 given us the proper matching constraint, but we can't
1662 actually fail the check if they didn't. Indicate that
1663 results are inconclusive. */
1664 if (constraints)
1665 {
1666 char *end;
1667 unsigned long match;
1668
1669 match = strtoul (constraint, &end, 10);
1670 if (!result)
1671 result = asm_operand_ok (op, constraints[match], NULL);
1672 constraint = (const char *) end;
1673 }
1674 else
1675 {
1676 do
1677 constraint++;
1678 while (ISDIGIT (*constraint));
1679 if (! result)
1680 result = -1;
1681 }
1682 continue;
1683
1684 case 'p':
1685 if (address_operand (op, VOIDmode))
1686 result = 1;
1687 break;
1688
1689 case TARGET_MEM_CONSTRAINT:
1690 case 'V': /* non-offsettable */
1691 if (memory_operand (op, VOIDmode))
1692 result = 1;
1693 break;
1694
1695 case 'o': /* offsettable */
1696 if (offsettable_nonstrict_memref_p (op))
1697 result = 1;
1698 break;
1699
1700 case '<':
1701 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1702 excepting those that expand_call created. Further, on some
1703 machines which do not have generalized auto inc/dec, an inc/dec
1704 is not a memory_operand.
1705
1706 Match any memory and hope things are resolved after reload. */
1707
1708 if (MEM_P (op)
1709 && (1
1710 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1711 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1712 result = 1;
1713 #ifdef AUTO_INC_DEC
1714 incdec_ok = true;
1715 #endif
1716 break;
1717
1718 case '>':
1719 if (MEM_P (op)
1720 && (1
1721 || GET_CODE (XEXP (op, 0)) == PRE_INC
1722 || GET_CODE (XEXP (op, 0)) == POST_INC))
1723 result = 1;
1724 #ifdef AUTO_INC_DEC
1725 incdec_ok = true;
1726 #endif
1727 break;
1728
1729 case 'E':
1730 case 'F':
1731 if (CONST_DOUBLE_AS_FLOAT_P (op)
1732 || (GET_CODE (op) == CONST_VECTOR
1733 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1734 result = 1;
1735 break;
1736
1737 case 'G':
1738 if (CONST_DOUBLE_AS_FLOAT_P (op)
1739 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1740 result = 1;
1741 break;
1742 case 'H':
1743 if (CONST_DOUBLE_AS_FLOAT_P (op)
1744 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1745 result = 1;
1746 break;
1747
1748 case 's':
1749 if (CONST_SCALAR_INT_P (op))
1750 break;
1751 /* Fall through. */
1752
1753 case 'i':
1754 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1755 result = 1;
1756 break;
1757
1758 case 'n':
1759 if (CONST_SCALAR_INT_P (op))
1760 result = 1;
1761 break;
1762
1763 case 'I':
1764 if (CONST_INT_P (op)
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1766 result = 1;
1767 break;
1768 case 'J':
1769 if (CONST_INT_P (op)
1770 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1771 result = 1;
1772 break;
1773 case 'K':
1774 if (CONST_INT_P (op)
1775 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1776 result = 1;
1777 break;
1778 case 'L':
1779 if (CONST_INT_P (op)
1780 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1781 result = 1;
1782 break;
1783 case 'M':
1784 if (CONST_INT_P (op)
1785 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1786 result = 1;
1787 break;
1788 case 'N':
1789 if (CONST_INT_P (op)
1790 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1791 result = 1;
1792 break;
1793 case 'O':
1794 if (CONST_INT_P (op)
1795 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1796 result = 1;
1797 break;
1798 case 'P':
1799 if (CONST_INT_P (op)
1800 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1801 result = 1;
1802 break;
1803
1804 case 'X':
1805 result = 1;
1806 break;
1807
1808 case 'g':
1809 if (general_operand (op, VOIDmode))
1810 result = 1;
1811 break;
1812
1813 default:
1814 /* For all other letters, we first check for a register class,
1815 otherwise it is an EXTRA_CONSTRAINT. */
1816 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1817 {
1818 case 'r':
1819 if (GET_MODE (op) == BLKmode)
1820 break;
1821 if (register_operand (op, VOIDmode))
1822 result = 1;
1823 }
1824 #ifdef EXTRA_CONSTRAINT_STR
1825 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1826 /* Every memory operand can be reloaded to fit. */
1827 result = result || memory_operand (op, VOIDmode);
1828 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1829 /* Every address operand can be reloaded to fit. */
1830 result = result || address_operand (op, VOIDmode);
1831 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1832 result = 1;
1833 #endif
1834 break;
1835 }
1836 len = CONSTRAINT_LEN (c, constraint);
1837 do
1838 constraint++;
1839 while (--len && *constraint);
1840 if (len)
1841 return 0;
1842 }
1843
1844 #ifdef AUTO_INC_DEC
1845 /* For operands without < or > constraints reject side-effects. */
1846 if (!incdec_ok && result && MEM_P (op))
1847 switch (GET_CODE (XEXP (op, 0)))
1848 {
1849 case PRE_INC:
1850 case POST_INC:
1851 case PRE_DEC:
1852 case POST_DEC:
1853 case PRE_MODIFY:
1854 case POST_MODIFY:
1855 return 0;
1856 default:
1857 break;
1858 }
1859 #endif
1860
1861 return result;
1862 }
1863 \f
1864 /* Given an rtx *P, if it is a sum containing an integer constant term,
1865 return the location (type rtx *) of the pointer to that constant term.
1866 Otherwise, return a null pointer. */
1867
1868 rtx *
1869 find_constant_term_loc (rtx *p)
1870 {
1871 rtx *tem;
1872 enum rtx_code code = GET_CODE (*p);
1873
1874 /* If *P IS such a constant term, P is its location. */
1875
1876 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1877 || code == CONST)
1878 return p;
1879
1880 /* Otherwise, if not a sum, it has no constant term. */
1881
1882 if (GET_CODE (*p) != PLUS)
1883 return 0;
1884
1885 /* If one of the summands is constant, return its location. */
1886
1887 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1888 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1889 return p;
1890
1891 /* Otherwise, check each summand for containing a constant term. */
1892
1893 if (XEXP (*p, 0) != 0)
1894 {
1895 tem = find_constant_term_loc (&XEXP (*p, 0));
1896 if (tem != 0)
1897 return tem;
1898 }
1899
1900 if (XEXP (*p, 1) != 0)
1901 {
1902 tem = find_constant_term_loc (&XEXP (*p, 1));
1903 if (tem != 0)
1904 return tem;
1905 }
1906
1907 return 0;
1908 }
1909 \f
1910 /* Return 1 if OP is a memory reference
1911 whose address contains no side effects
1912 and remains valid after the addition
1913 of a positive integer less than the
1914 size of the object being referenced.
1915
1916 We assume that the original address is valid and do not check it.
1917
1918 This uses strict_memory_address_p as a subroutine, so
1919 don't use it before reload. */
1920
1921 int
1922 offsettable_memref_p (rtx op)
1923 {
1924 return ((MEM_P (op))
1925 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1926 MEM_ADDR_SPACE (op)));
1927 }
1928
1929 /* Similar, but don't require a strictly valid mem ref:
1930 consider pseudo-regs valid as index or base regs. */
1931
1932 int
1933 offsettable_nonstrict_memref_p (rtx op)
1934 {
1935 return ((MEM_P (op))
1936 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1937 MEM_ADDR_SPACE (op)));
1938 }
1939
1940 /* Return 1 if Y is a memory address which contains no side effects
1941 and would remain valid for address space AS after the addition of
1942 a positive integer less than the size of that mode.
1943
1944 We assume that the original address is valid and do not check it.
1945 We do check that it is valid for narrower modes.
1946
1947 If STRICTP is nonzero, we require a strictly valid address,
1948 for the sake of use in reload.c. */
1949
1950 int
1951 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1952 addr_space_t as)
1953 {
1954 enum rtx_code ycode = GET_CODE (y);
1955 rtx z;
1956 rtx y1 = y;
1957 rtx *y2;
1958 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1959 (strictp ? strict_memory_address_addr_space_p
1960 : memory_address_addr_space_p);
1961 unsigned int mode_sz = GET_MODE_SIZE (mode);
1962
1963 if (CONSTANT_ADDRESS_P (y))
1964 return 1;
1965
1966 /* Adjusting an offsettable address involves changing to a narrower mode.
1967 Make sure that's OK. */
1968
1969 if (mode_dependent_address_p (y, as))
1970 return 0;
1971
1972 enum machine_mode address_mode = GET_MODE (y);
1973 if (address_mode == VOIDmode)
1974 address_mode = targetm.addr_space.address_mode (as);
1975 #ifdef POINTERS_EXTEND_UNSIGNED
1976 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1977 #endif
1978
1979 /* ??? How much offset does an offsettable BLKmode reference need?
1980 Clearly that depends on the situation in which it's being used.
1981 However, the current situation in which we test 0xffffffff is
1982 less than ideal. Caveat user. */
1983 if (mode_sz == 0)
1984 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1985
1986 /* If the expression contains a constant term,
1987 see if it remains valid when max possible offset is added. */
1988
1989 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1990 {
1991 int good;
1992
1993 y1 = *y2;
1994 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1995 /* Use QImode because an odd displacement may be automatically invalid
1996 for any wider mode. But it should be valid for a single byte. */
1997 good = (*addressp) (QImode, y, as);
1998
1999 /* In any case, restore old contents of memory. */
2000 *y2 = y1;
2001 return good;
2002 }
2003
2004 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2005 return 0;
2006
2007 /* The offset added here is chosen as the maximum offset that
2008 any instruction could need to add when operating on something
2009 of the specified mode. We assume that if Y and Y+c are
2010 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2011 go inside a LO_SUM here, so we do so as well. */
2012 if (GET_CODE (y) == LO_SUM
2013 && mode != BLKmode
2014 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2015 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2016 plus_constant (address_mode, XEXP (y, 1),
2017 mode_sz - 1));
2018 #ifdef POINTERS_EXTEND_UNSIGNED
2019 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2020 else if (POINTERS_EXTEND_UNSIGNED > 0
2021 && GET_CODE (y) == ZERO_EXTEND
2022 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2023 z = gen_rtx_ZERO_EXTEND (address_mode,
2024 plus_constant (pointer_mode, XEXP (y, 0),
2025 mode_sz - 1));
2026 #endif
2027 else
2028 z = plus_constant (address_mode, y, mode_sz - 1);
2029
2030 /* Use QImode because an odd displacement may be automatically invalid
2031 for any wider mode. But it should be valid for a single byte. */
2032 return (*addressp) (QImode, z, as);
2033 }
2034
2035 /* Return 1 if ADDR is an address-expression whose effect depends
2036 on the mode of the memory reference it is used in.
2037
2038 ADDRSPACE is the address space associated with the address.
2039
2040 Autoincrement addressing is a typical example of mode-dependence
2041 because the amount of the increment depends on the mode. */
2042
2043 bool
2044 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2045 {
2046 /* Auto-increment addressing with anything other than post_modify
2047 or pre_modify always introduces a mode dependency. Catch such
2048 cases now instead of deferring to the target. */
2049 if (GET_CODE (addr) == PRE_INC
2050 || GET_CODE (addr) == POST_INC
2051 || GET_CODE (addr) == PRE_DEC
2052 || GET_CODE (addr) == POST_DEC)
2053 return true;
2054
2055 return targetm.mode_dependent_address_p (addr, addrspace);
2056 }
2057 \f
2058 /* Like extract_insn, but save insn extracted and don't extract again, when
2059 called again for the same insn expecting that recog_data still contain the
2060 valid information. This is used primary by gen_attr infrastructure that
2061 often does extract insn again and again. */
2062 void
2063 extract_insn_cached (rtx insn)
2064 {
2065 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2066 return;
2067 extract_insn (insn);
2068 recog_data.insn = insn;
2069 }
2070
2071 /* Do cached extract_insn, constrain_operands and complain about failures.
2072 Used by insn_attrtab. */
2073 void
2074 extract_constrain_insn_cached (rtx insn)
2075 {
2076 extract_insn_cached (insn);
2077 if (which_alternative == -1
2078 && !constrain_operands (reload_completed))
2079 fatal_insn_not_found (insn);
2080 }
2081
2082 /* Do cached constrain_operands and complain about failures. */
2083 int
2084 constrain_operands_cached (int strict)
2085 {
2086 if (which_alternative == -1)
2087 return constrain_operands (strict);
2088 else
2089 return 1;
2090 }
2091 \f
2092 /* Analyze INSN and fill in recog_data. */
2093
2094 void
2095 extract_insn (rtx insn)
2096 {
2097 int i;
2098 int icode;
2099 int noperands;
2100 rtx body = PATTERN (insn);
2101
2102 recog_data.n_operands = 0;
2103 recog_data.n_alternatives = 0;
2104 recog_data.n_dups = 0;
2105 recog_data.is_asm = false;
2106
2107 switch (GET_CODE (body))
2108 {
2109 case USE:
2110 case CLOBBER:
2111 case ASM_INPUT:
2112 case ADDR_VEC:
2113 case ADDR_DIFF_VEC:
2114 case VAR_LOCATION:
2115 return;
2116
2117 case SET:
2118 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2119 goto asm_insn;
2120 else
2121 goto normal_insn;
2122 case PARALLEL:
2123 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2124 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2125 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2126 goto asm_insn;
2127 else
2128 goto normal_insn;
2129 case ASM_OPERANDS:
2130 asm_insn:
2131 recog_data.n_operands = noperands = asm_noperands (body);
2132 if (noperands >= 0)
2133 {
2134 /* This insn is an `asm' with operands. */
2135
2136 /* expand_asm_operands makes sure there aren't too many operands. */
2137 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2138
2139 /* Now get the operand values and constraints out of the insn. */
2140 decode_asm_operands (body, recog_data.operand,
2141 recog_data.operand_loc,
2142 recog_data.constraints,
2143 recog_data.operand_mode, NULL);
2144 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2145 if (noperands > 0)
2146 {
2147 const char *p = recog_data.constraints[0];
2148 recog_data.n_alternatives = 1;
2149 while (*p)
2150 recog_data.n_alternatives += (*p++ == ',');
2151 }
2152 recog_data.is_asm = true;
2153 break;
2154 }
2155 fatal_insn_not_found (insn);
2156
2157 default:
2158 normal_insn:
2159 /* Ordinary insn: recognize it, get the operands via insn_extract
2160 and get the constraints. */
2161
2162 icode = recog_memoized (insn);
2163 if (icode < 0)
2164 fatal_insn_not_found (insn);
2165
2166 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2167 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2168 recog_data.n_dups = insn_data[icode].n_dups;
2169
2170 insn_extract (insn);
2171
2172 for (i = 0; i < noperands; i++)
2173 {
2174 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2175 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2176 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2177 /* VOIDmode match_operands gets mode from their real operand. */
2178 if (recog_data.operand_mode[i] == VOIDmode)
2179 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2180 }
2181 }
2182 for (i = 0; i < noperands; i++)
2183 recog_data.operand_type[i]
2184 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2185 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2186 : OP_IN);
2187
2188 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2189
2190 if (INSN_CODE (insn) < 0)
2191 for (i = 0; i < recog_data.n_alternatives; i++)
2192 recog_data.alternative_enabled_p[i] = true;
2193 else
2194 {
2195 recog_data.insn = insn;
2196 for (i = 0; i < recog_data.n_alternatives; i++)
2197 {
2198 which_alternative = i;
2199 recog_data.alternative_enabled_p[i]
2200 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2201 }
2202 }
2203
2204 recog_data.insn = NULL;
2205 which_alternative = -1;
2206 }
2207
2208 /* After calling extract_insn, you can use this function to extract some
2209 information from the constraint strings into a more usable form.
2210 The collected data is stored in recog_op_alt. */
2211 void
2212 preprocess_constraints (void)
2213 {
2214 int i;
2215
2216 for (i = 0; i < recog_data.n_operands; i++)
2217 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2218 * sizeof (struct operand_alternative)));
2219
2220 for (i = 0; i < recog_data.n_operands; i++)
2221 {
2222 int j;
2223 struct operand_alternative *op_alt;
2224 const char *p = recog_data.constraints[i];
2225
2226 op_alt = recog_op_alt[i];
2227
2228 for (j = 0; j < recog_data.n_alternatives; j++)
2229 {
2230 op_alt[j].cl = NO_REGS;
2231 op_alt[j].constraint = p;
2232 op_alt[j].matches = -1;
2233 op_alt[j].matched = -1;
2234
2235 if (!recog_data.alternative_enabled_p[j])
2236 {
2237 p = skip_alternative (p);
2238 continue;
2239 }
2240
2241 if (*p == '\0' || *p == ',')
2242 {
2243 op_alt[j].anything_ok = 1;
2244 continue;
2245 }
2246
2247 for (;;)
2248 {
2249 char c = *p;
2250 if (c == '#')
2251 do
2252 c = *++p;
2253 while (c != ',' && c != '\0');
2254 if (c == ',' || c == '\0')
2255 {
2256 p++;
2257 break;
2258 }
2259
2260 switch (c)
2261 {
2262 case '=': case '+': case '*': case '%':
2263 case 'E': case 'F': case 'G': case 'H':
2264 case 's': case 'i': case 'n':
2265 case 'I': case 'J': case 'K': case 'L':
2266 case 'M': case 'N': case 'O': case 'P':
2267 /* These don't say anything we care about. */
2268 break;
2269
2270 case '?':
2271 op_alt[j].reject += 6;
2272 break;
2273 case '!':
2274 op_alt[j].reject += 600;
2275 break;
2276 case '&':
2277 op_alt[j].earlyclobber = 1;
2278 break;
2279
2280 case '0': case '1': case '2': case '3': case '4':
2281 case '5': case '6': case '7': case '8': case '9':
2282 {
2283 char *end;
2284 op_alt[j].matches = strtoul (p, &end, 10);
2285 recog_op_alt[op_alt[j].matches][j].matched = i;
2286 p = end;
2287 }
2288 continue;
2289
2290 case TARGET_MEM_CONSTRAINT:
2291 op_alt[j].memory_ok = 1;
2292 break;
2293 case '<':
2294 op_alt[j].decmem_ok = 1;
2295 break;
2296 case '>':
2297 op_alt[j].incmem_ok = 1;
2298 break;
2299 case 'V':
2300 op_alt[j].nonoffmem_ok = 1;
2301 break;
2302 case 'o':
2303 op_alt[j].offmem_ok = 1;
2304 break;
2305 case 'X':
2306 op_alt[j].anything_ok = 1;
2307 break;
2308
2309 case 'p':
2310 op_alt[j].is_address = 1;
2311 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2312 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2313 ADDRESS, SCRATCH)];
2314 break;
2315
2316 case 'g':
2317 case 'r':
2318 op_alt[j].cl =
2319 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2320 break;
2321
2322 default:
2323 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2324 {
2325 op_alt[j].memory_ok = 1;
2326 break;
2327 }
2328 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2329 {
2330 op_alt[j].is_address = 1;
2331 op_alt[j].cl
2332 = (reg_class_subunion
2333 [(int) op_alt[j].cl]
2334 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2335 ADDRESS, SCRATCH)]);
2336 break;
2337 }
2338
2339 op_alt[j].cl
2340 = (reg_class_subunion
2341 [(int) op_alt[j].cl]
2342 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2343 break;
2344 }
2345 p += CONSTRAINT_LEN (c, p);
2346 }
2347 }
2348 }
2349 }
2350
2351 /* Check the operands of an insn against the insn's operand constraints
2352 and return 1 if they are valid.
2353 The information about the insn's operands, constraints, operand modes
2354 etc. is obtained from the global variables set up by extract_insn.
2355
2356 WHICH_ALTERNATIVE is set to a number which indicates which
2357 alternative of constraints was matched: 0 for the first alternative,
2358 1 for the next, etc.
2359
2360 In addition, when two operands are required to match
2361 and it happens that the output operand is (reg) while the
2362 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2363 make the output operand look like the input.
2364 This is because the output operand is the one the template will print.
2365
2366 This is used in final, just before printing the assembler code and by
2367 the routines that determine an insn's attribute.
2368
2369 If STRICT is a positive nonzero value, it means that we have been
2370 called after reload has been completed. In that case, we must
2371 do all checks strictly. If it is zero, it means that we have been called
2372 before reload has completed. In that case, we first try to see if we can
2373 find an alternative that matches strictly. If not, we try again, this
2374 time assuming that reload will fix up the insn. This provides a "best
2375 guess" for the alternative and is used to compute attributes of insns prior
2376 to reload. A negative value of STRICT is used for this internal call. */
2377
2378 struct funny_match
2379 {
2380 int this_op, other;
2381 };
2382
2383 int
2384 constrain_operands (int strict)
2385 {
2386 const char *constraints[MAX_RECOG_OPERANDS];
2387 int matching_operands[MAX_RECOG_OPERANDS];
2388 int earlyclobber[MAX_RECOG_OPERANDS];
2389 int c;
2390
2391 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2392 int funny_match_index;
2393
2394 which_alternative = 0;
2395 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2396 return 1;
2397
2398 for (c = 0; c < recog_data.n_operands; c++)
2399 {
2400 constraints[c] = recog_data.constraints[c];
2401 matching_operands[c] = -1;
2402 }
2403
2404 do
2405 {
2406 int seen_earlyclobber_at = -1;
2407 int opno;
2408 int lose = 0;
2409 funny_match_index = 0;
2410
2411 if (!recog_data.alternative_enabled_p[which_alternative])
2412 {
2413 int i;
2414
2415 for (i = 0; i < recog_data.n_operands; i++)
2416 constraints[i] = skip_alternative (constraints[i]);
2417
2418 which_alternative++;
2419 continue;
2420 }
2421
2422 for (opno = 0; opno < recog_data.n_operands; opno++)
2423 {
2424 rtx op = recog_data.operand[opno];
2425 enum machine_mode mode = GET_MODE (op);
2426 const char *p = constraints[opno];
2427 int offset = 0;
2428 int win = 0;
2429 int val;
2430 int len;
2431
2432 earlyclobber[opno] = 0;
2433
2434 /* A unary operator may be accepted by the predicate, but it
2435 is irrelevant for matching constraints. */
2436 if (UNARY_P (op))
2437 op = XEXP (op, 0);
2438
2439 if (GET_CODE (op) == SUBREG)
2440 {
2441 if (REG_P (SUBREG_REG (op))
2442 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2443 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2444 GET_MODE (SUBREG_REG (op)),
2445 SUBREG_BYTE (op),
2446 GET_MODE (op));
2447 op = SUBREG_REG (op);
2448 }
2449
2450 /* An empty constraint or empty alternative
2451 allows anything which matched the pattern. */
2452 if (*p == 0 || *p == ',')
2453 win = 1;
2454
2455 do
2456 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2457 {
2458 case '\0':
2459 len = 0;
2460 break;
2461 case ',':
2462 c = '\0';
2463 break;
2464
2465 case '?': case '!': case '*': case '%':
2466 case '=': case '+':
2467 break;
2468
2469 case '#':
2470 /* Ignore rest of this alternative as far as
2471 constraint checking is concerned. */
2472 do
2473 p++;
2474 while (*p && *p != ',');
2475 len = 0;
2476 break;
2477
2478 case '&':
2479 earlyclobber[opno] = 1;
2480 if (seen_earlyclobber_at < 0)
2481 seen_earlyclobber_at = opno;
2482 break;
2483
2484 case '0': case '1': case '2': case '3': case '4':
2485 case '5': case '6': case '7': case '8': case '9':
2486 {
2487 /* This operand must be the same as a previous one.
2488 This kind of constraint is used for instructions such
2489 as add when they take only two operands.
2490
2491 Note that the lower-numbered operand is passed first.
2492
2493 If we are not testing strictly, assume that this
2494 constraint will be satisfied. */
2495
2496 char *end;
2497 int match;
2498
2499 match = strtoul (p, &end, 10);
2500 p = end;
2501
2502 if (strict < 0)
2503 val = 1;
2504 else
2505 {
2506 rtx op1 = recog_data.operand[match];
2507 rtx op2 = recog_data.operand[opno];
2508
2509 /* A unary operator may be accepted by the predicate,
2510 but it is irrelevant for matching constraints. */
2511 if (UNARY_P (op1))
2512 op1 = XEXP (op1, 0);
2513 if (UNARY_P (op2))
2514 op2 = XEXP (op2, 0);
2515
2516 val = operands_match_p (op1, op2);
2517 }
2518
2519 matching_operands[opno] = match;
2520 matching_operands[match] = opno;
2521
2522 if (val != 0)
2523 win = 1;
2524
2525 /* If output is *x and input is *--x, arrange later
2526 to change the output to *--x as well, since the
2527 output op is the one that will be printed. */
2528 if (val == 2 && strict > 0)
2529 {
2530 funny_match[funny_match_index].this_op = opno;
2531 funny_match[funny_match_index++].other = match;
2532 }
2533 }
2534 len = 0;
2535 break;
2536
2537 case 'p':
2538 /* p is used for address_operands. When we are called by
2539 gen_reload, no one will have checked that the address is
2540 strictly valid, i.e., that all pseudos requiring hard regs
2541 have gotten them. */
2542 if (strict <= 0
2543 || (strict_memory_address_p (recog_data.operand_mode[opno],
2544 op)))
2545 win = 1;
2546 break;
2547
2548 /* No need to check general_operand again;
2549 it was done in insn-recog.c. Well, except that reload
2550 doesn't check the validity of its replacements, but
2551 that should only matter when there's a bug. */
2552 case 'g':
2553 /* Anything goes unless it is a REG and really has a hard reg
2554 but the hard reg is not in the class GENERAL_REGS. */
2555 if (REG_P (op))
2556 {
2557 if (strict < 0
2558 || GENERAL_REGS == ALL_REGS
2559 || (reload_in_progress
2560 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2561 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2562 win = 1;
2563 }
2564 else if (strict < 0 || general_operand (op, mode))
2565 win = 1;
2566 break;
2567
2568 case 'X':
2569 /* This is used for a MATCH_SCRATCH in the cases when
2570 we don't actually need anything. So anything goes
2571 any time. */
2572 win = 1;
2573 break;
2574
2575 case TARGET_MEM_CONSTRAINT:
2576 /* Memory operands must be valid, to the extent
2577 required by STRICT. */
2578 if (MEM_P (op))
2579 {
2580 if (strict > 0
2581 && !strict_memory_address_addr_space_p
2582 (GET_MODE (op), XEXP (op, 0),
2583 MEM_ADDR_SPACE (op)))
2584 break;
2585 if (strict == 0
2586 && !memory_address_addr_space_p
2587 (GET_MODE (op), XEXP (op, 0),
2588 MEM_ADDR_SPACE (op)))
2589 break;
2590 win = 1;
2591 }
2592 /* Before reload, accept what reload can turn into mem. */
2593 else if (strict < 0 && CONSTANT_P (op))
2594 win = 1;
2595 /* During reload, accept a pseudo */
2596 else if (reload_in_progress && REG_P (op)
2597 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2598 win = 1;
2599 break;
2600
2601 case '<':
2602 if (MEM_P (op)
2603 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2604 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2605 win = 1;
2606 break;
2607
2608 case '>':
2609 if (MEM_P (op)
2610 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2611 || GET_CODE (XEXP (op, 0)) == POST_INC))
2612 win = 1;
2613 break;
2614
2615 case 'E':
2616 case 'F':
2617 if (CONST_DOUBLE_AS_FLOAT_P (op)
2618 || (GET_CODE (op) == CONST_VECTOR
2619 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2620 win = 1;
2621 break;
2622
2623 case 'G':
2624 case 'H':
2625 if (CONST_DOUBLE_AS_FLOAT_P (op)
2626 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2627 win = 1;
2628 break;
2629
2630 case 's':
2631 if (CONST_SCALAR_INT_P (op))
2632 break;
2633 case 'i':
2634 if (CONSTANT_P (op))
2635 win = 1;
2636 break;
2637
2638 case 'n':
2639 if (CONST_SCALAR_INT_P (op))
2640 win = 1;
2641 break;
2642
2643 case 'I':
2644 case 'J':
2645 case 'K':
2646 case 'L':
2647 case 'M':
2648 case 'N':
2649 case 'O':
2650 case 'P':
2651 if (CONST_INT_P (op)
2652 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2653 win = 1;
2654 break;
2655
2656 case 'V':
2657 if (MEM_P (op)
2658 && ((strict > 0 && ! offsettable_memref_p (op))
2659 || (strict < 0
2660 && !(CONSTANT_P (op) || MEM_P (op)))
2661 || (reload_in_progress
2662 && !(REG_P (op)
2663 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2664 win = 1;
2665 break;
2666
2667 case 'o':
2668 if ((strict > 0 && offsettable_memref_p (op))
2669 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2670 /* Before reload, accept what reload can handle. */
2671 || (strict < 0
2672 && (CONSTANT_P (op) || MEM_P (op)))
2673 /* During reload, accept a pseudo */
2674 || (reload_in_progress && REG_P (op)
2675 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2676 win = 1;
2677 break;
2678
2679 default:
2680 {
2681 enum reg_class cl;
2682
2683 cl = (c == 'r'
2684 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2685 if (cl != NO_REGS)
2686 {
2687 if (strict < 0
2688 || (strict == 0
2689 && REG_P (op)
2690 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2691 || (strict == 0 && GET_CODE (op) == SCRATCH)
2692 || (REG_P (op)
2693 && reg_fits_class_p (op, cl, offset, mode)))
2694 win = 1;
2695 }
2696 #ifdef EXTRA_CONSTRAINT_STR
2697 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2698 win = 1;
2699
2700 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2701 /* Every memory operand can be reloaded to fit. */
2702 && ((strict < 0 && MEM_P (op))
2703 /* Before reload, accept what reload can turn
2704 into mem. */
2705 || (strict < 0 && CONSTANT_P (op))
2706 /* During reload, accept a pseudo */
2707 || (reload_in_progress && REG_P (op)
2708 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2709 win = 1;
2710 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2711 /* Every address operand can be reloaded to fit. */
2712 && strict < 0)
2713 win = 1;
2714 /* Cater to architectures like IA-64 that define extra memory
2715 constraints without using define_memory_constraint. */
2716 else if (reload_in_progress
2717 && REG_P (op)
2718 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2719 && reg_renumber[REGNO (op)] < 0
2720 && reg_equiv_mem (REGNO (op)) != 0
2721 && EXTRA_CONSTRAINT_STR
2722 (reg_equiv_mem (REGNO (op)), c, p))
2723 win = 1;
2724 #endif
2725 break;
2726 }
2727 }
2728 while (p += len, c);
2729
2730 constraints[opno] = p;
2731 /* If this operand did not win somehow,
2732 this alternative loses. */
2733 if (! win)
2734 lose = 1;
2735 }
2736 /* This alternative won; the operands are ok.
2737 Change whichever operands this alternative says to change. */
2738 if (! lose)
2739 {
2740 int opno, eopno;
2741
2742 /* See if any earlyclobber operand conflicts with some other
2743 operand. */
2744
2745 if (strict > 0 && seen_earlyclobber_at >= 0)
2746 for (eopno = seen_earlyclobber_at;
2747 eopno < recog_data.n_operands;
2748 eopno++)
2749 /* Ignore earlyclobber operands now in memory,
2750 because we would often report failure when we have
2751 two memory operands, one of which was formerly a REG. */
2752 if (earlyclobber[eopno]
2753 && REG_P (recog_data.operand[eopno]))
2754 for (opno = 0; opno < recog_data.n_operands; opno++)
2755 if ((MEM_P (recog_data.operand[opno])
2756 || recog_data.operand_type[opno] != OP_OUT)
2757 && opno != eopno
2758 /* Ignore things like match_operator operands. */
2759 && *recog_data.constraints[opno] != 0
2760 && ! (matching_operands[opno] == eopno
2761 && operands_match_p (recog_data.operand[opno],
2762 recog_data.operand[eopno]))
2763 && ! safe_from_earlyclobber (recog_data.operand[opno],
2764 recog_data.operand[eopno]))
2765 lose = 1;
2766
2767 if (! lose)
2768 {
2769 while (--funny_match_index >= 0)
2770 {
2771 recog_data.operand[funny_match[funny_match_index].other]
2772 = recog_data.operand[funny_match[funny_match_index].this_op];
2773 }
2774
2775 #ifdef AUTO_INC_DEC
2776 /* For operands without < or > constraints reject side-effects. */
2777 if (recog_data.is_asm)
2778 {
2779 for (opno = 0; opno < recog_data.n_operands; opno++)
2780 if (MEM_P (recog_data.operand[opno]))
2781 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2782 {
2783 case PRE_INC:
2784 case POST_INC:
2785 case PRE_DEC:
2786 case POST_DEC:
2787 case PRE_MODIFY:
2788 case POST_MODIFY:
2789 if (strchr (recog_data.constraints[opno], '<') == NULL
2790 && strchr (recog_data.constraints[opno], '>')
2791 == NULL)
2792 return 0;
2793 break;
2794 default:
2795 break;
2796 }
2797 }
2798 #endif
2799 return 1;
2800 }
2801 }
2802
2803 which_alternative++;
2804 }
2805 while (which_alternative < recog_data.n_alternatives);
2806
2807 which_alternative = -1;
2808 /* If we are about to reject this, but we are not to test strictly,
2809 try a very loose test. Only return failure if it fails also. */
2810 if (strict == 0)
2811 return constrain_operands (-1);
2812 else
2813 return 0;
2814 }
2815
2816 /* Return true iff OPERAND (assumed to be a REG rtx)
2817 is a hard reg in class CLASS when its regno is offset by OFFSET
2818 and changed to mode MODE.
2819 If REG occupies multiple hard regs, all of them must be in CLASS. */
2820
2821 bool
2822 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2823 enum machine_mode mode)
2824 {
2825 unsigned int regno = REGNO (operand);
2826
2827 if (cl == NO_REGS)
2828 return false;
2829
2830 /* Regno must not be a pseudo register. Offset may be negative. */
2831 return (HARD_REGISTER_NUM_P (regno)
2832 && HARD_REGISTER_NUM_P (regno + offset)
2833 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2834 regno + offset));
2835 }
2836 \f
2837 /* Split single instruction. Helper function for split_all_insns and
2838 split_all_insns_noflow. Return last insn in the sequence if successful,
2839 or NULL if unsuccessful. */
2840
2841 static rtx
2842 split_insn (rtx insn)
2843 {
2844 /* Split insns here to get max fine-grain parallelism. */
2845 rtx first = PREV_INSN (insn);
2846 rtx last = try_split (PATTERN (insn), insn, 1);
2847 rtx insn_set, last_set, note;
2848
2849 if (last == insn)
2850 return NULL_RTX;
2851
2852 /* If the original instruction was a single set that was known to be
2853 equivalent to a constant, see if we can say the same about the last
2854 instruction in the split sequence. The two instructions must set
2855 the same destination. */
2856 insn_set = single_set (insn);
2857 if (insn_set)
2858 {
2859 last_set = single_set (last);
2860 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2861 {
2862 note = find_reg_equal_equiv_note (insn);
2863 if (note && CONSTANT_P (XEXP (note, 0)))
2864 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2865 else if (CONSTANT_P (SET_SRC (insn_set)))
2866 set_unique_reg_note (last, REG_EQUAL,
2867 copy_rtx (SET_SRC (insn_set)));
2868 }
2869 }
2870
2871 /* try_split returns the NOTE that INSN became. */
2872 SET_INSN_DELETED (insn);
2873
2874 /* ??? Coddle to md files that generate subregs in post-reload
2875 splitters instead of computing the proper hard register. */
2876 if (reload_completed && first != last)
2877 {
2878 first = NEXT_INSN (first);
2879 for (;;)
2880 {
2881 if (INSN_P (first))
2882 cleanup_subreg_operands (first);
2883 if (first == last)
2884 break;
2885 first = NEXT_INSN (first);
2886 }
2887 }
2888
2889 return last;
2890 }
2891
2892 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2893
2894 void
2895 split_all_insns (void)
2896 {
2897 sbitmap blocks;
2898 bool changed;
2899 basic_block bb;
2900
2901 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2902 bitmap_clear (blocks);
2903 changed = false;
2904
2905 FOR_EACH_BB_REVERSE (bb)
2906 {
2907 rtx insn, next;
2908 bool finish = false;
2909
2910 rtl_profile_for_bb (bb);
2911 for (insn = BB_HEAD (bb); !finish ; insn = next)
2912 {
2913 /* Can't use `next_real_insn' because that might go across
2914 CODE_LABELS and short-out basic blocks. */
2915 next = NEXT_INSN (insn);
2916 finish = (insn == BB_END (bb));
2917 if (INSN_P (insn))
2918 {
2919 rtx set = single_set (insn);
2920
2921 /* Don't split no-op move insns. These should silently
2922 disappear later in final. Splitting such insns would
2923 break the code that handles LIBCALL blocks. */
2924 if (set && set_noop_p (set))
2925 {
2926 /* Nops get in the way while scheduling, so delete them
2927 now if register allocation has already been done. It
2928 is too risky to try to do this before register
2929 allocation, and there are unlikely to be very many
2930 nops then anyways. */
2931 if (reload_completed)
2932 delete_insn_and_edges (insn);
2933 }
2934 else
2935 {
2936 if (split_insn (insn))
2937 {
2938 bitmap_set_bit (blocks, bb->index);
2939 changed = true;
2940 }
2941 }
2942 }
2943 }
2944 }
2945
2946 default_rtl_profile ();
2947 if (changed)
2948 find_many_sub_basic_blocks (blocks);
2949
2950 #ifdef ENABLE_CHECKING
2951 verify_flow_info ();
2952 #endif
2953
2954 sbitmap_free (blocks);
2955 }
2956
2957 /* Same as split_all_insns, but do not expect CFG to be available.
2958 Used by machine dependent reorg passes. */
2959
2960 unsigned int
2961 split_all_insns_noflow (void)
2962 {
2963 rtx next, insn;
2964
2965 for (insn = get_insns (); insn; insn = next)
2966 {
2967 next = NEXT_INSN (insn);
2968 if (INSN_P (insn))
2969 {
2970 /* Don't split no-op move insns. These should silently
2971 disappear later in final. Splitting such insns would
2972 break the code that handles LIBCALL blocks. */
2973 rtx set = single_set (insn);
2974 if (set && set_noop_p (set))
2975 {
2976 /* Nops get in the way while scheduling, so delete them
2977 now if register allocation has already been done. It
2978 is too risky to try to do this before register
2979 allocation, and there are unlikely to be very many
2980 nops then anyways.
2981
2982 ??? Should we use delete_insn when the CFG isn't valid? */
2983 if (reload_completed)
2984 delete_insn_and_edges (insn);
2985 }
2986 else
2987 split_insn (insn);
2988 }
2989 }
2990 return 0;
2991 }
2992 \f
2993 #ifdef HAVE_peephole2
2994 struct peep2_insn_data
2995 {
2996 rtx insn;
2997 regset live_before;
2998 };
2999
3000 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3001 static int peep2_current;
3002
3003 static bool peep2_do_rebuild_jump_labels;
3004 static bool peep2_do_cleanup_cfg;
3005
3006 /* The number of instructions available to match a peep2. */
3007 int peep2_current_count;
3008
3009 /* A non-insn marker indicating the last insn of the block.
3010 The live_before regset for this element is correct, indicating
3011 DF_LIVE_OUT for the block. */
3012 #define PEEP2_EOB pc_rtx
3013
3014 /* Wrap N to fit into the peep2_insn_data buffer. */
3015
3016 static int
3017 peep2_buf_position (int n)
3018 {
3019 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3020 n -= MAX_INSNS_PER_PEEP2 + 1;
3021 return n;
3022 }
3023
3024 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3025 does not exist. Used by the recognizer to find the next insn to match
3026 in a multi-insn pattern. */
3027
3028 rtx
3029 peep2_next_insn (int n)
3030 {
3031 gcc_assert (n <= peep2_current_count);
3032
3033 n = peep2_buf_position (peep2_current + n);
3034
3035 return peep2_insn_data[n].insn;
3036 }
3037
3038 /* Return true if REGNO is dead before the Nth non-note insn
3039 after `current'. */
3040
3041 int
3042 peep2_regno_dead_p (int ofs, int regno)
3043 {
3044 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3045
3046 ofs = peep2_buf_position (peep2_current + ofs);
3047
3048 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3049
3050 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3051 }
3052
3053 /* Similarly for a REG. */
3054
3055 int
3056 peep2_reg_dead_p (int ofs, rtx reg)
3057 {
3058 int regno, n;
3059
3060 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3061
3062 ofs = peep2_buf_position (peep2_current + ofs);
3063
3064 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3065
3066 regno = REGNO (reg);
3067 n = hard_regno_nregs[regno][GET_MODE (reg)];
3068 while (--n >= 0)
3069 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3070 return 0;
3071 return 1;
3072 }
3073
3074 /* Regno offset to be used in the register search. */
3075 static int search_ofs;
3076
3077 /* Try to find a hard register of mode MODE, matching the register class in
3078 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3079 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3080 in which case the only condition is that the register must be available
3081 before CURRENT_INSN.
3082 Registers that already have bits set in REG_SET will not be considered.
3083
3084 If an appropriate register is available, it will be returned and the
3085 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3086 returned. */
3087
3088 rtx
3089 peep2_find_free_register (int from, int to, const char *class_str,
3090 enum machine_mode mode, HARD_REG_SET *reg_set)
3091 {
3092 enum reg_class cl;
3093 HARD_REG_SET live;
3094 df_ref *def_rec;
3095 int i;
3096
3097 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3098 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3099
3100 from = peep2_buf_position (peep2_current + from);
3101 to = peep2_buf_position (peep2_current + to);
3102
3103 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3104 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3105
3106 while (from != to)
3107 {
3108 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3109
3110 /* Don't use registers set or clobbered by the insn. */
3111 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3112 *def_rec; def_rec++)
3113 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3114
3115 from = peep2_buf_position (from + 1);
3116 }
3117
3118 cl = (class_str[0] == 'r' ? GENERAL_REGS
3119 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3120
3121 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3122 {
3123 int raw_regno, regno, success, j;
3124
3125 /* Distribute the free registers as much as possible. */
3126 raw_regno = search_ofs + i;
3127 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3128 raw_regno -= FIRST_PSEUDO_REGISTER;
3129 #ifdef REG_ALLOC_ORDER
3130 regno = reg_alloc_order[raw_regno];
3131 #else
3132 regno = raw_regno;
3133 #endif
3134
3135 /* Can it support the mode we need? */
3136 if (! HARD_REGNO_MODE_OK (regno, mode))
3137 continue;
3138
3139 success = 1;
3140 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3141 {
3142 /* Don't allocate fixed registers. */
3143 if (fixed_regs[regno + j])
3144 {
3145 success = 0;
3146 break;
3147 }
3148 /* Don't allocate global registers. */
3149 if (global_regs[regno + j])
3150 {
3151 success = 0;
3152 break;
3153 }
3154 /* Make sure the register is of the right class. */
3155 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3156 {
3157 success = 0;
3158 break;
3159 }
3160 /* And that we don't create an extra save/restore. */
3161 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3162 {
3163 success = 0;
3164 break;
3165 }
3166
3167 if (! targetm.hard_regno_scratch_ok (regno + j))
3168 {
3169 success = 0;
3170 break;
3171 }
3172
3173 /* And we don't clobber traceback for noreturn functions. */
3174 if ((regno + j == FRAME_POINTER_REGNUM
3175 || regno + j == HARD_FRAME_POINTER_REGNUM)
3176 && (! reload_completed || frame_pointer_needed))
3177 {
3178 success = 0;
3179 break;
3180 }
3181
3182 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3183 || TEST_HARD_REG_BIT (live, regno + j))
3184 {
3185 success = 0;
3186 break;
3187 }
3188 }
3189
3190 if (success)
3191 {
3192 add_to_hard_reg_set (reg_set, mode, regno);
3193
3194 /* Start the next search with the next register. */
3195 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3196 raw_regno = 0;
3197 search_ofs = raw_regno;
3198
3199 return gen_rtx_REG (mode, regno);
3200 }
3201 }
3202
3203 search_ofs = 0;
3204 return NULL_RTX;
3205 }
3206
3207 /* Forget all currently tracked instructions, only remember current
3208 LIVE regset. */
3209
3210 static void
3211 peep2_reinit_state (regset live)
3212 {
3213 int i;
3214
3215 /* Indicate that all slots except the last holds invalid data. */
3216 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3217 peep2_insn_data[i].insn = NULL_RTX;
3218 peep2_current_count = 0;
3219
3220 /* Indicate that the last slot contains live_after data. */
3221 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3222 peep2_current = MAX_INSNS_PER_PEEP2;
3223
3224 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3225 }
3226
3227 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3228 starting at INSN. Perform the replacement, removing the old insns and
3229 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3230 if the replacement is rejected. */
3231
3232 static rtx
3233 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3234 {
3235 int i;
3236 rtx last, eh_note, as_note, before_try, x;
3237 rtx old_insn, new_insn;
3238 bool was_call = false;
3239
3240 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3241 match more than one insn, or to be split into more than one insn. */
3242 old_insn = peep2_insn_data[peep2_current].insn;
3243 if (RTX_FRAME_RELATED_P (old_insn))
3244 {
3245 bool any_note = false;
3246 rtx note;
3247
3248 if (match_len != 0)
3249 return NULL;
3250
3251 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3252 may be in the stream for the purpose of register allocation. */
3253 if (active_insn_p (attempt))
3254 new_insn = attempt;
3255 else
3256 new_insn = next_active_insn (attempt);
3257 if (next_active_insn (new_insn))
3258 return NULL;
3259
3260 /* We have a 1-1 replacement. Copy over any frame-related info. */
3261 RTX_FRAME_RELATED_P (new_insn) = 1;
3262
3263 /* Allow the backend to fill in a note during the split. */
3264 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3265 switch (REG_NOTE_KIND (note))
3266 {
3267 case REG_FRAME_RELATED_EXPR:
3268 case REG_CFA_DEF_CFA:
3269 case REG_CFA_ADJUST_CFA:
3270 case REG_CFA_OFFSET:
3271 case REG_CFA_REGISTER:
3272 case REG_CFA_EXPRESSION:
3273 case REG_CFA_RESTORE:
3274 case REG_CFA_SET_VDRAP:
3275 any_note = true;
3276 break;
3277 default:
3278 break;
3279 }
3280
3281 /* If the backend didn't supply a note, copy one over. */
3282 if (!any_note)
3283 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3284 switch (REG_NOTE_KIND (note))
3285 {
3286 case REG_FRAME_RELATED_EXPR:
3287 case REG_CFA_DEF_CFA:
3288 case REG_CFA_ADJUST_CFA:
3289 case REG_CFA_OFFSET:
3290 case REG_CFA_REGISTER:
3291 case REG_CFA_EXPRESSION:
3292 case REG_CFA_RESTORE:
3293 case REG_CFA_SET_VDRAP:
3294 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3295 any_note = true;
3296 break;
3297 default:
3298 break;
3299 }
3300
3301 /* If there still isn't a note, make sure the unwind info sees the
3302 same expression as before the split. */
3303 if (!any_note)
3304 {
3305 rtx old_set, new_set;
3306
3307 /* The old insn had better have been simple, or annotated. */
3308 old_set = single_set (old_insn);
3309 gcc_assert (old_set != NULL);
3310
3311 new_set = single_set (new_insn);
3312 if (!new_set || !rtx_equal_p (new_set, old_set))
3313 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3314 }
3315
3316 /* Copy prologue/epilogue status. This is required in order to keep
3317 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3318 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3319 }
3320
3321 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3322 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3323 cfg-related call notes. */
3324 for (i = 0; i <= match_len; ++i)
3325 {
3326 int j;
3327 rtx note;
3328
3329 j = peep2_buf_position (peep2_current + i);
3330 old_insn = peep2_insn_data[j].insn;
3331 if (!CALL_P (old_insn))
3332 continue;
3333 was_call = true;
3334
3335 new_insn = attempt;
3336 while (new_insn != NULL_RTX)
3337 {
3338 if (CALL_P (new_insn))
3339 break;
3340 new_insn = NEXT_INSN (new_insn);
3341 }
3342
3343 gcc_assert (new_insn != NULL_RTX);
3344
3345 CALL_INSN_FUNCTION_USAGE (new_insn)
3346 = CALL_INSN_FUNCTION_USAGE (old_insn);
3347
3348 for (note = REG_NOTES (old_insn);
3349 note;
3350 note = XEXP (note, 1))
3351 switch (REG_NOTE_KIND (note))
3352 {
3353 case REG_NORETURN:
3354 case REG_SETJMP:
3355 case REG_TM:
3356 add_reg_note (new_insn, REG_NOTE_KIND (note),
3357 XEXP (note, 0));
3358 break;
3359 default:
3360 /* Discard all other reg notes. */
3361 break;
3362 }
3363
3364 /* Croak if there is another call in the sequence. */
3365 while (++i <= match_len)
3366 {
3367 j = peep2_buf_position (peep2_current + i);
3368 old_insn = peep2_insn_data[j].insn;
3369 gcc_assert (!CALL_P (old_insn));
3370 }
3371 break;
3372 }
3373
3374 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3375 move those notes over to the new sequence. */
3376 as_note = NULL;
3377 for (i = match_len; i >= 0; --i)
3378 {
3379 int j = peep2_buf_position (peep2_current + i);
3380 old_insn = peep2_insn_data[j].insn;
3381
3382 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3383 if (as_note)
3384 break;
3385 }
3386
3387 i = peep2_buf_position (peep2_current + match_len);
3388 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3389
3390 /* Replace the old sequence with the new. */
3391 last = emit_insn_after_setloc (attempt,
3392 peep2_insn_data[i].insn,
3393 INSN_LOCATION (peep2_insn_data[i].insn));
3394 before_try = PREV_INSN (insn);
3395 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3396
3397 /* Re-insert the EH_REGION notes. */
3398 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3399 {
3400 edge eh_edge;
3401 edge_iterator ei;
3402
3403 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3404 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3405 break;
3406
3407 if (eh_note)
3408 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3409
3410 if (eh_edge)
3411 for (x = last; x != before_try; x = PREV_INSN (x))
3412 if (x != BB_END (bb)
3413 && (can_throw_internal (x)
3414 || can_nonlocal_goto (x)))
3415 {
3416 edge nfte, nehe;
3417 int flags;
3418
3419 nfte = split_block (bb, x);
3420 flags = (eh_edge->flags
3421 & (EDGE_EH | EDGE_ABNORMAL));
3422 if (CALL_P (x))
3423 flags |= EDGE_ABNORMAL_CALL;
3424 nehe = make_edge (nfte->src, eh_edge->dest,
3425 flags);
3426
3427 nehe->probability = eh_edge->probability;
3428 nfte->probability
3429 = REG_BR_PROB_BASE - nehe->probability;
3430
3431 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3432 bb = nfte->src;
3433 eh_edge = nehe;
3434 }
3435
3436 /* Converting possibly trapping insn to non-trapping is
3437 possible. Zap dummy outgoing edges. */
3438 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3439 }
3440
3441 /* Re-insert the ARGS_SIZE notes. */
3442 if (as_note)
3443 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3444
3445 /* If we generated a jump instruction, it won't have
3446 JUMP_LABEL set. Recompute after we're done. */
3447 for (x = last; x != before_try; x = PREV_INSN (x))
3448 if (JUMP_P (x))
3449 {
3450 peep2_do_rebuild_jump_labels = true;
3451 break;
3452 }
3453
3454 return last;
3455 }
3456
3457 /* After performing a replacement in basic block BB, fix up the life
3458 information in our buffer. LAST is the last of the insns that we
3459 emitted as a replacement. PREV is the insn before the start of
3460 the replacement. MATCH_LEN is the number of instructions that were
3461 matched, and which now need to be replaced in the buffer. */
3462
3463 static void
3464 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3465 {
3466 int i = peep2_buf_position (peep2_current + match_len + 1);
3467 rtx x;
3468 regset_head live;
3469
3470 INIT_REG_SET (&live);
3471 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3472
3473 gcc_assert (peep2_current_count >= match_len + 1);
3474 peep2_current_count -= match_len + 1;
3475
3476 x = last;
3477 do
3478 {
3479 if (INSN_P (x))
3480 {
3481 df_insn_rescan (x);
3482 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3483 {
3484 peep2_current_count++;
3485 if (--i < 0)
3486 i = MAX_INSNS_PER_PEEP2;
3487 peep2_insn_data[i].insn = x;
3488 df_simulate_one_insn_backwards (bb, x, &live);
3489 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3490 }
3491 }
3492 x = PREV_INSN (x);
3493 }
3494 while (x != prev);
3495 CLEAR_REG_SET (&live);
3496
3497 peep2_current = i;
3498 }
3499
3500 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3501 Return true if we added it, false otherwise. The caller will try to match
3502 peepholes against the buffer if we return false; otherwise it will try to
3503 add more instructions to the buffer. */
3504
3505 static bool
3506 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3507 {
3508 int pos;
3509
3510 /* Once we have filled the maximum number of insns the buffer can hold,
3511 allow the caller to match the insns against peepholes. We wait until
3512 the buffer is full in case the target has similar peepholes of different
3513 length; we always want to match the longest if possible. */
3514 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3515 return false;
3516
3517 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3518 any other pattern, lest it change the semantics of the frame info. */
3519 if (RTX_FRAME_RELATED_P (insn))
3520 {
3521 /* Let the buffer drain first. */
3522 if (peep2_current_count > 0)
3523 return false;
3524 /* Now the insn will be the only thing in the buffer. */
3525 }
3526
3527 pos = peep2_buf_position (peep2_current + peep2_current_count);
3528 peep2_insn_data[pos].insn = insn;
3529 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3530 peep2_current_count++;
3531
3532 df_simulate_one_insn_forwards (bb, insn, live);
3533 return true;
3534 }
3535
3536 /* Perform the peephole2 optimization pass. */
3537
3538 static void
3539 peephole2_optimize (void)
3540 {
3541 rtx insn;
3542 bitmap live;
3543 int i;
3544 basic_block bb;
3545
3546 peep2_do_cleanup_cfg = false;
3547 peep2_do_rebuild_jump_labels = false;
3548
3549 df_set_flags (DF_LR_RUN_DCE);
3550 df_note_add_problem ();
3551 df_analyze ();
3552
3553 /* Initialize the regsets we're going to use. */
3554 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3555 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3556 search_ofs = 0;
3557 live = BITMAP_ALLOC (&reg_obstack);
3558
3559 FOR_EACH_BB_REVERSE (bb)
3560 {
3561 bool past_end = false;
3562 int pos;
3563
3564 rtl_profile_for_bb (bb);
3565
3566 /* Start up propagation. */
3567 bitmap_copy (live, DF_LR_IN (bb));
3568 df_simulate_initialize_forwards (bb, live);
3569 peep2_reinit_state (live);
3570
3571 insn = BB_HEAD (bb);
3572 for (;;)
3573 {
3574 rtx attempt, head;
3575 int match_len;
3576
3577 if (!past_end && !NONDEBUG_INSN_P (insn))
3578 {
3579 next_insn:
3580 insn = NEXT_INSN (insn);
3581 if (insn == NEXT_INSN (BB_END (bb)))
3582 past_end = true;
3583 continue;
3584 }
3585 if (!past_end && peep2_fill_buffer (bb, insn, live))
3586 goto next_insn;
3587
3588 /* If we did not fill an empty buffer, it signals the end of the
3589 block. */
3590 if (peep2_current_count == 0)
3591 break;
3592
3593 /* The buffer filled to the current maximum, so try to match. */
3594
3595 pos = peep2_buf_position (peep2_current + peep2_current_count);
3596 peep2_insn_data[pos].insn = PEEP2_EOB;
3597 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3598
3599 /* Match the peephole. */
3600 head = peep2_insn_data[peep2_current].insn;
3601 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3602 if (attempt != NULL)
3603 {
3604 rtx last = peep2_attempt (bb, head, match_len, attempt);
3605 if (last)
3606 {
3607 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3608 continue;
3609 }
3610 }
3611
3612 /* No match: advance the buffer by one insn. */
3613 peep2_current = peep2_buf_position (peep2_current + 1);
3614 peep2_current_count--;
3615 }
3616 }
3617
3618 default_rtl_profile ();
3619 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3620 BITMAP_FREE (peep2_insn_data[i].live_before);
3621 BITMAP_FREE (live);
3622 if (peep2_do_rebuild_jump_labels)
3623 rebuild_jump_labels (get_insns ());
3624 }
3625 #endif /* HAVE_peephole2 */
3626
3627 /* Common predicates for use with define_bypass. */
3628
3629 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3630 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3631 must be either a single_set or a PARALLEL with SETs inside. */
3632
3633 int
3634 store_data_bypass_p (rtx out_insn, rtx in_insn)
3635 {
3636 rtx out_set, in_set;
3637 rtx out_pat, in_pat;
3638 rtx out_exp, in_exp;
3639 int i, j;
3640
3641 in_set = single_set (in_insn);
3642 if (in_set)
3643 {
3644 if (!MEM_P (SET_DEST (in_set)))
3645 return false;
3646
3647 out_set = single_set (out_insn);
3648 if (out_set)
3649 {
3650 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3651 return false;
3652 }
3653 else
3654 {
3655 out_pat = PATTERN (out_insn);
3656
3657 if (GET_CODE (out_pat) != PARALLEL)
3658 return false;
3659
3660 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3661 {
3662 out_exp = XVECEXP (out_pat, 0, i);
3663
3664 if (GET_CODE (out_exp) == CLOBBER)
3665 continue;
3666
3667 gcc_assert (GET_CODE (out_exp) == SET);
3668
3669 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3670 return false;
3671 }
3672 }
3673 }
3674 else
3675 {
3676 in_pat = PATTERN (in_insn);
3677 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3678
3679 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3680 {
3681 in_exp = XVECEXP (in_pat, 0, i);
3682
3683 if (GET_CODE (in_exp) == CLOBBER)
3684 continue;
3685
3686 gcc_assert (GET_CODE (in_exp) == SET);
3687
3688 if (!MEM_P (SET_DEST (in_exp)))
3689 return false;
3690
3691 out_set = single_set (out_insn);
3692 if (out_set)
3693 {
3694 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3695 return false;
3696 }
3697 else
3698 {
3699 out_pat = PATTERN (out_insn);
3700 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3701
3702 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3703 {
3704 out_exp = XVECEXP (out_pat, 0, j);
3705
3706 if (GET_CODE (out_exp) == CLOBBER)
3707 continue;
3708
3709 gcc_assert (GET_CODE (out_exp) == SET);
3710
3711 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3712 return false;
3713 }
3714 }
3715 }
3716 }
3717
3718 return true;
3719 }
3720
3721 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3722 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3723 or multiple set; IN_INSN should be single_set for truth, but for convenience
3724 of insn categorization may be any JUMP or CALL insn. */
3725
3726 int
3727 if_test_bypass_p (rtx out_insn, rtx in_insn)
3728 {
3729 rtx out_set, in_set;
3730
3731 in_set = single_set (in_insn);
3732 if (! in_set)
3733 {
3734 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3735 return false;
3736 }
3737
3738 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3739 return false;
3740 in_set = SET_SRC (in_set);
3741
3742 out_set = single_set (out_insn);
3743 if (out_set)
3744 {
3745 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3746 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3747 return false;
3748 }
3749 else
3750 {
3751 rtx out_pat;
3752 int i;
3753
3754 out_pat = PATTERN (out_insn);
3755 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3756
3757 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3758 {
3759 rtx exp = XVECEXP (out_pat, 0, i);
3760
3761 if (GET_CODE (exp) == CLOBBER)
3762 continue;
3763
3764 gcc_assert (GET_CODE (exp) == SET);
3765
3766 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3767 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3768 return false;
3769 }
3770 }
3771
3772 return true;
3773 }
3774 \f
3775 static bool
3776 gate_handle_peephole2 (void)
3777 {
3778 return (optimize > 0 && flag_peephole2);
3779 }
3780
3781 static unsigned int
3782 rest_of_handle_peephole2 (void)
3783 {
3784 #ifdef HAVE_peephole2
3785 peephole2_optimize ();
3786 #endif
3787 return 0;
3788 }
3789
3790 namespace {
3791
3792 const pass_data pass_data_peephole2 =
3793 {
3794 RTL_PASS, /* type */
3795 "peephole2", /* name */
3796 OPTGROUP_NONE, /* optinfo_flags */
3797 true, /* has_gate */
3798 true, /* has_execute */
3799 TV_PEEPHOLE2, /* tv_id */
3800 0, /* properties_required */
3801 0, /* properties_provided */
3802 0, /* properties_destroyed */
3803 0, /* todo_flags_start */
3804 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3805 };
3806
3807 class pass_peephole2 : public rtl_opt_pass
3808 {
3809 public:
3810 pass_peephole2 (gcc::context *ctxt)
3811 : rtl_opt_pass (pass_data_peephole2, ctxt)
3812 {}
3813
3814 /* opt_pass methods: */
3815 /* The epiphany backend creates a second instance of this pass, so we need
3816 a clone method. */
3817 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3818 bool gate () { return gate_handle_peephole2 (); }
3819 unsigned int execute () { return rest_of_handle_peephole2 (); }
3820
3821 }; // class pass_peephole2
3822
3823 } // anon namespace
3824
3825 rtl_opt_pass *
3826 make_pass_peephole2 (gcc::context *ctxt)
3827 {
3828 return new pass_peephole2 (ctxt);
3829 }
3830
3831 static unsigned int
3832 rest_of_handle_split_all_insns (void)
3833 {
3834 split_all_insns ();
3835 return 0;
3836 }
3837
3838 namespace {
3839
3840 const pass_data pass_data_split_all_insns =
3841 {
3842 RTL_PASS, /* type */
3843 "split1", /* name */
3844 OPTGROUP_NONE, /* optinfo_flags */
3845 false, /* has_gate */
3846 true, /* has_execute */
3847 TV_NONE, /* tv_id */
3848 0, /* properties_required */
3849 0, /* properties_provided */
3850 0, /* properties_destroyed */
3851 0, /* todo_flags_start */
3852 0, /* todo_flags_finish */
3853 };
3854
3855 class pass_split_all_insns : public rtl_opt_pass
3856 {
3857 public:
3858 pass_split_all_insns (gcc::context *ctxt)
3859 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3860 {}
3861
3862 /* opt_pass methods: */
3863 /* The epiphany backend creates a second instance of this pass, so
3864 we need a clone method. */
3865 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3866 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3867
3868 }; // class pass_split_all_insns
3869
3870 } // anon namespace
3871
3872 rtl_opt_pass *
3873 make_pass_split_all_insns (gcc::context *ctxt)
3874 {
3875 return new pass_split_all_insns (ctxt);
3876 }
3877
3878 static unsigned int
3879 rest_of_handle_split_after_reload (void)
3880 {
3881 /* If optimizing, then go ahead and split insns now. */
3882 #ifndef STACK_REGS
3883 if (optimize > 0)
3884 #endif
3885 split_all_insns ();
3886 return 0;
3887 }
3888
3889 namespace {
3890
3891 const pass_data pass_data_split_after_reload =
3892 {
3893 RTL_PASS, /* type */
3894 "split2", /* name */
3895 OPTGROUP_NONE, /* optinfo_flags */
3896 false, /* has_gate */
3897 true, /* has_execute */
3898 TV_NONE, /* tv_id */
3899 0, /* properties_required */
3900 0, /* properties_provided */
3901 0, /* properties_destroyed */
3902 0, /* todo_flags_start */
3903 0, /* todo_flags_finish */
3904 };
3905
3906 class pass_split_after_reload : public rtl_opt_pass
3907 {
3908 public:
3909 pass_split_after_reload (gcc::context *ctxt)
3910 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3911 {}
3912
3913 /* opt_pass methods: */
3914 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3915
3916 }; // class pass_split_after_reload
3917
3918 } // anon namespace
3919
3920 rtl_opt_pass *
3921 make_pass_split_after_reload (gcc::context *ctxt)
3922 {
3923 return new pass_split_after_reload (ctxt);
3924 }
3925
3926 static bool
3927 gate_handle_split_before_regstack (void)
3928 {
3929 #if HAVE_ATTR_length && defined (STACK_REGS)
3930 /* If flow2 creates new instructions which need splitting
3931 and scheduling after reload is not done, they might not be
3932 split until final which doesn't allow splitting
3933 if HAVE_ATTR_length. */
3934 # ifdef INSN_SCHEDULING
3935 return (optimize && !flag_schedule_insns_after_reload);
3936 # else
3937 return (optimize);
3938 # endif
3939 #else
3940 return 0;
3941 #endif
3942 }
3943
3944 static unsigned int
3945 rest_of_handle_split_before_regstack (void)
3946 {
3947 split_all_insns ();
3948 return 0;
3949 }
3950
3951 namespace {
3952
3953 const pass_data pass_data_split_before_regstack =
3954 {
3955 RTL_PASS, /* type */
3956 "split3", /* name */
3957 OPTGROUP_NONE, /* optinfo_flags */
3958 true, /* has_gate */
3959 true, /* has_execute */
3960 TV_NONE, /* tv_id */
3961 0, /* properties_required */
3962 0, /* properties_provided */
3963 0, /* properties_destroyed */
3964 0, /* todo_flags_start */
3965 0, /* todo_flags_finish */
3966 };
3967
3968 class pass_split_before_regstack : public rtl_opt_pass
3969 {
3970 public:
3971 pass_split_before_regstack (gcc::context *ctxt)
3972 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3973 {}
3974
3975 /* opt_pass methods: */
3976 bool gate () { return gate_handle_split_before_regstack (); }
3977 unsigned int execute () {
3978 return rest_of_handle_split_before_regstack ();
3979 }
3980
3981 }; // class pass_split_before_regstack
3982
3983 } // anon namespace
3984
3985 rtl_opt_pass *
3986 make_pass_split_before_regstack (gcc::context *ctxt)
3987 {
3988 return new pass_split_before_regstack (ctxt);
3989 }
3990
3991 static bool
3992 gate_handle_split_before_sched2 (void)
3993 {
3994 #ifdef INSN_SCHEDULING
3995 return optimize > 0 && flag_schedule_insns_after_reload;
3996 #else
3997 return 0;
3998 #endif
3999 }
4000
4001 static unsigned int
4002 rest_of_handle_split_before_sched2 (void)
4003 {
4004 #ifdef INSN_SCHEDULING
4005 split_all_insns ();
4006 #endif
4007 return 0;
4008 }
4009
4010 namespace {
4011
4012 const pass_data pass_data_split_before_sched2 =
4013 {
4014 RTL_PASS, /* type */
4015 "split4", /* name */
4016 OPTGROUP_NONE, /* optinfo_flags */
4017 true, /* has_gate */
4018 true, /* has_execute */
4019 TV_NONE, /* tv_id */
4020 0, /* properties_required */
4021 0, /* properties_provided */
4022 0, /* properties_destroyed */
4023 0, /* todo_flags_start */
4024 TODO_verify_flow, /* todo_flags_finish */
4025 };
4026
4027 class pass_split_before_sched2 : public rtl_opt_pass
4028 {
4029 public:
4030 pass_split_before_sched2 (gcc::context *ctxt)
4031 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4032 {}
4033
4034 /* opt_pass methods: */
4035 bool gate () { return gate_handle_split_before_sched2 (); }
4036 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4037
4038 }; // class pass_split_before_sched2
4039
4040 } // anon namespace
4041
4042 rtl_opt_pass *
4043 make_pass_split_before_sched2 (gcc::context *ctxt)
4044 {
4045 return new pass_split_before_sched2 (ctxt);
4046 }
4047
4048 /* The placement of the splitting that we do for shorten_branches
4049 depends on whether regstack is used by the target or not. */
4050 static bool
4051 gate_do_final_split (void)
4052 {
4053 #if HAVE_ATTR_length && !defined (STACK_REGS)
4054 return 1;
4055 #else
4056 return 0;
4057 #endif
4058 }
4059
4060 namespace {
4061
4062 const pass_data pass_data_split_for_shorten_branches =
4063 {
4064 RTL_PASS, /* type */
4065 "split5", /* name */
4066 OPTGROUP_NONE, /* optinfo_flags */
4067 true, /* has_gate */
4068 true, /* has_execute */
4069 TV_NONE, /* tv_id */
4070 0, /* properties_required */
4071 0, /* properties_provided */
4072 0, /* properties_destroyed */
4073 0, /* todo_flags_start */
4074 TODO_verify_rtl_sharing, /* todo_flags_finish */
4075 };
4076
4077 class pass_split_for_shorten_branches : public rtl_opt_pass
4078 {
4079 public:
4080 pass_split_for_shorten_branches (gcc::context *ctxt)
4081 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4082 {}
4083
4084 /* opt_pass methods: */
4085 bool gate () { return gate_do_final_split (); }
4086 unsigned int execute () { return split_all_insns_noflow (); }
4087
4088 }; // class pass_split_for_shorten_branches
4089
4090 } // anon namespace
4091
4092 rtl_opt_pass *
4093 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4094 {
4095 return new pass_split_for_shorten_branches (ctxt);
4096 }