]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/recog.c
Remove trailing whitespace. Add missing dbxout.c hunk.
[thirdparty/gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
43
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
51
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
59
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data_d recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 \f
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
112
113 static bool
114 asm_labels_ok (rtx body)
115 {
116 rtx asmop;
117 int i;
118
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
122
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
126
127 return true;
128 }
129
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
132
133 int
134 check_asm_operands (rtx x)
135 {
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
140
141 if (!asm_labels_ok (x))
142 return 0;
143
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
146 {
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
151 }
152
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
158
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
161
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163
164 for (i = 0; i < noperands; i++)
165 {
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
171 }
172
173 return 1;
174 }
175 \f
176 /* Static data for the next two routines. */
177
178 typedef struct change_t
179 {
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
186
187 static change_t *changes;
188 static int changes_allocated;
189
190 static int num_changes = 0;
191
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
195
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
200
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
204
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
209
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 {
213 rtx old = *loc;
214
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
217
218 gcc_assert (in_group != 0 || num_changes == 0);
219
220 *loc = new_rtx;
221
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
224 {
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
231
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
239
240 if (object && !MEM_P (object))
241 {
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
246 }
247
248 num_changes++;
249
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
252
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
257 }
258
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
261
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 {
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
266 }
267
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
270
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 {
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
275 }
276
277
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
281
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
285 {
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288 {
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
295 }
296 else
297 return false;
298 }
299
300
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
303
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
307
308 int
309 insn_invalid_p (rtx insn, bool in_group)
310 {
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed && ! reload_in_progress)
318 ? &num_clobbers : 0);
319 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
320
321
322 /* If this is an asm and the operand aren't legal, then fail. Likewise if
323 this is not an asm and the insn wasn't recognized. */
324 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
325 || (!is_asm && icode < 0))
326 return 1;
327
328 /* If we have to add CLOBBERs, fail if we have to add ones that reference
329 hard registers since our callers can't know if they are live or not.
330 Otherwise, add them. */
331 if (num_clobbers > 0)
332 {
333 rtx newpat;
334
335 if (added_clobbers_hard_reg_p (icode))
336 return 1;
337
338 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
339 XVECEXP (newpat, 0, 0) = pat;
340 add_clobbers (newpat, icode);
341 if (in_group)
342 validate_change (insn, &PATTERN (insn), newpat, 1);
343 else
344 PATTERN (insn) = pat = newpat;
345 }
346
347 /* After reload, verify that all constraints are satisfied. */
348 if (reload_completed)
349 {
350 extract_insn (insn);
351
352 if (! constrain_operands (1))
353 return 1;
354 }
355
356 INSN_CODE (insn) = icode;
357 return 0;
358 }
359
360 /* Return number of changes made and not validated yet. */
361 int
362 num_changes_pending (void)
363 {
364 return num_changes;
365 }
366
367 /* Tentatively apply the changes numbered NUM and up.
368 Return 1 if all changes are valid, zero otherwise. */
369
370 int
371 verify_changes (int num)
372 {
373 int i;
374 rtx last_validated = NULL_RTX;
375
376 /* The changes have been applied and all INSN_CODEs have been reset to force
377 rerecognition.
378
379 The changes are valid if we aren't given an object, or if we are
380 given a MEM and it still is a valid address, or if this is in insn
381 and it is recognized. In the latter case, if reload has completed,
382 we also require that the operands meet the constraints for
383 the insn. */
384
385 for (i = num; i < num_changes; i++)
386 {
387 rtx object = changes[i].object;
388
389 /* If there is no object to test or if it is the same as the one we
390 already tested, ignore it. */
391 if (object == 0 || object == last_validated)
392 continue;
393
394 if (MEM_P (object))
395 {
396 if (! memory_address_addr_space_p (GET_MODE (object),
397 XEXP (object, 0),
398 MEM_ADDR_SPACE (object)))
399 break;
400 }
401 else if (/* changes[i].old might be zero, e.g. when putting a
402 REG_FRAME_RELATED_EXPR into a previously empty list. */
403 changes[i].old
404 && REG_P (changes[i].old)
405 && asm_noperands (PATTERN (object)) > 0
406 && REG_EXPR (changes[i].old) != NULL_TREE
407 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
408 && DECL_REGISTER (REG_EXPR (changes[i].old)))
409 {
410 /* Don't allow changes of hard register operands to inline
411 assemblies if they have been defined as register asm ("x"). */
412 break;
413 }
414 else if (DEBUG_INSN_P (object))
415 continue;
416 else if (insn_invalid_p (object, true))
417 {
418 rtx pat = PATTERN (object);
419
420 /* Perhaps we couldn't recognize the insn because there were
421 extra CLOBBERs at the end. If so, try to re-recognize
422 without the last CLOBBER (later iterations will cause each of
423 them to be eliminated, in turn). But don't do this if we
424 have an ASM_OPERAND. */
425 if (GET_CODE (pat) == PARALLEL
426 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
427 && asm_noperands (PATTERN (object)) < 0)
428 {
429 rtx newpat;
430
431 if (XVECLEN (pat, 0) == 2)
432 newpat = XVECEXP (pat, 0, 0);
433 else
434 {
435 int j;
436
437 newpat
438 = gen_rtx_PARALLEL (VOIDmode,
439 rtvec_alloc (XVECLEN (pat, 0) - 1));
440 for (j = 0; j < XVECLEN (newpat, 0); j++)
441 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
442 }
443
444 /* Add a new change to this group to replace the pattern
445 with this new pattern. Then consider this change
446 as having succeeded. The change we added will
447 cause the entire call to fail if things remain invalid.
448
449 Note that this can lose if a later change than the one
450 we are processing specified &XVECEXP (PATTERN (object), 0, X)
451 but this shouldn't occur. */
452
453 validate_change (object, &PATTERN (object), newpat, 1);
454 continue;
455 }
456 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
457 || GET_CODE (pat) == VAR_LOCATION)
458 /* If this insn is a CLOBBER or USE, it is always valid, but is
459 never recognized. */
460 continue;
461 else
462 break;
463 }
464 last_validated = object;
465 }
466
467 return (i == num_changes);
468 }
469
470 /* A group of changes has previously been issued with validate_change
471 and verified with verify_changes. Call df_insn_rescan for each of
472 the insn changed and clear num_changes. */
473
474 void
475 confirm_change_group (void)
476 {
477 int i;
478 rtx last_object = NULL;
479
480 for (i = 0; i < num_changes; i++)
481 {
482 rtx object = changes[i].object;
483
484 if (changes[i].unshare)
485 *changes[i].loc = copy_rtx (*changes[i].loc);
486
487 /* Avoid unnecessary rescanning when multiple changes to same instruction
488 are made. */
489 if (object)
490 {
491 if (object != last_object && last_object && INSN_P (last_object))
492 df_insn_rescan (last_object);
493 last_object = object;
494 }
495 }
496
497 if (last_object && INSN_P (last_object))
498 df_insn_rescan (last_object);
499 num_changes = 0;
500 }
501
502 /* Apply a group of changes previously issued with `validate_change'.
503 If all changes are valid, call confirm_change_group and return 1,
504 otherwise, call cancel_changes and return 0. */
505
506 int
507 apply_change_group (void)
508 {
509 if (verify_changes (0))
510 {
511 confirm_change_group ();
512 return 1;
513 }
514 else
515 {
516 cancel_changes (0);
517 return 0;
518 }
519 }
520
521
522 /* Return the number of changes so far in the current group. */
523
524 int
525 num_validated_changes (void)
526 {
527 return num_changes;
528 }
529
530 /* Retract the changes numbered NUM and up. */
531
532 void
533 cancel_changes (int num)
534 {
535 int i;
536
537 /* Back out all the changes. Do this in the opposite order in which
538 they were made. */
539 for (i = num_changes - 1; i >= num; i--)
540 {
541 *changes[i].loc = changes[i].old;
542 if (changes[i].object && !MEM_P (changes[i].object))
543 INSN_CODE (changes[i].object) = changes[i].old_code;
544 }
545 num_changes = num;
546 }
547
548 /* Reduce conditional compilation elsewhere. */
549 #ifndef HAVE_extv
550 #define HAVE_extv 0
551 #define CODE_FOR_extv CODE_FOR_nothing
552 #endif
553 #ifndef HAVE_extzv
554 #define HAVE_extzv 0
555 #define CODE_FOR_extzv CODE_FOR_nothing
556 #endif
557
558 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
559 rtx. */
560
561 static void
562 simplify_while_replacing (rtx *loc, rtx to, rtx object,
563 enum machine_mode op0_mode)
564 {
565 rtx x = *loc;
566 enum rtx_code code = GET_CODE (x);
567 rtx new_rtx;
568
569 if (SWAPPABLE_OPERANDS_P (x)
570 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
571 {
572 validate_unshare_change (object, loc,
573 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
574 : swap_condition (code),
575 GET_MODE (x), XEXP (x, 1),
576 XEXP (x, 0)), 1);
577 x = *loc;
578 code = GET_CODE (x);
579 }
580
581 switch (code)
582 {
583 case PLUS:
584 /* If we have a PLUS whose second operand is now a CONST_INT, use
585 simplify_gen_binary to try to simplify it.
586 ??? We may want later to remove this, once simplification is
587 separated from this function. */
588 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
589 validate_change (object, loc,
590 simplify_gen_binary
591 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
592 break;
593 case MINUS:
594 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
595 validate_change (object, loc,
596 simplify_gen_binary
597 (PLUS, GET_MODE (x), XEXP (x, 0),
598 simplify_gen_unary (NEG,
599 GET_MODE (x), XEXP (x, 1),
600 GET_MODE (x))), 1);
601 break;
602 case ZERO_EXTEND:
603 case SIGN_EXTEND:
604 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
605 {
606 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
607 op0_mode);
608 /* If any of the above failed, substitute in something that
609 we know won't be recognized. */
610 if (!new_rtx)
611 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
612 validate_change (object, loc, new_rtx, 1);
613 }
614 break;
615 case SUBREG:
616 /* All subregs possible to simplify should be simplified. */
617 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
618 SUBREG_BYTE (x));
619
620 /* Subregs of VOIDmode operands are incorrect. */
621 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
622 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
623 if (new_rtx)
624 validate_change (object, loc, new_rtx, 1);
625 break;
626 case ZERO_EXTRACT:
627 case SIGN_EXTRACT:
628 /* If we are replacing a register with memory, try to change the memory
629 to be the mode required for memory in extract operations (this isn't
630 likely to be an insertion operation; if it was, nothing bad will
631 happen, we might just fail in some cases). */
632
633 if (MEM_P (XEXP (x, 0))
634 && CONST_INT_P (XEXP (x, 1))
635 && CONST_INT_P (XEXP (x, 2))
636 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
637 MEM_ADDR_SPACE (XEXP (x, 0)))
638 && !MEM_VOLATILE_P (XEXP (x, 0)))
639 {
640 enum machine_mode wanted_mode = VOIDmode;
641 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
642 int pos = INTVAL (XEXP (x, 2));
643
644 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
645 {
646 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
647 if (wanted_mode == VOIDmode)
648 wanted_mode = word_mode;
649 }
650 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
651 {
652 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
653 if (wanted_mode == VOIDmode)
654 wanted_mode = word_mode;
655 }
656
657 /* If we have a narrower mode, we can do something. */
658 if (wanted_mode != VOIDmode
659 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
660 {
661 int offset = pos / BITS_PER_UNIT;
662 rtx newmem;
663
664 /* If the bytes and bits are counted differently, we
665 must adjust the offset. */
666 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
667 offset =
668 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
669 offset);
670
671 gcc_assert (GET_MODE_PRECISION (wanted_mode)
672 == GET_MODE_BITSIZE (wanted_mode));
673 pos %= GET_MODE_BITSIZE (wanted_mode);
674
675 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
676
677 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
678 validate_change (object, &XEXP (x, 0), newmem, 1);
679 }
680 }
681
682 break;
683
684 default:
685 break;
686 }
687 }
688
689 /* Replace every occurrence of FROM in X with TO. Mark each change with
690 validate_change passing OBJECT. */
691
692 static void
693 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
694 bool simplify)
695 {
696 int i, j;
697 const char *fmt;
698 rtx x = *loc;
699 enum rtx_code code;
700 enum machine_mode op0_mode = VOIDmode;
701 int prev_changes = num_changes;
702
703 if (!x)
704 return;
705
706 code = GET_CODE (x);
707 fmt = GET_RTX_FORMAT (code);
708 if (fmt[0] == 'e')
709 op0_mode = GET_MODE (XEXP (x, 0));
710
711 /* X matches FROM if it is the same rtx or they are both referring to the
712 same register in the same mode. Avoid calling rtx_equal_p unless the
713 operands look similar. */
714
715 if (x == from
716 || (REG_P (x) && REG_P (from)
717 && GET_MODE (x) == GET_MODE (from)
718 && REGNO (x) == REGNO (from))
719 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
720 && rtx_equal_p (x, from)))
721 {
722 validate_unshare_change (object, loc, to, 1);
723 return;
724 }
725
726 /* Call ourself recursively to perform the replacements.
727 We must not replace inside already replaced expression, otherwise we
728 get infinite recursion for replacements like (reg X)->(subreg (reg X))
729 so we must special case shared ASM_OPERANDS. */
730
731 if (GET_CODE (x) == PARALLEL)
732 {
733 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
734 {
735 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
736 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
737 {
738 /* Verify that operands are really shared. */
739 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
740 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
741 (x, 0, j))));
742 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
743 from, to, object, simplify);
744 }
745 else
746 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
747 simplify);
748 }
749 }
750 else
751 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
752 {
753 if (fmt[i] == 'e')
754 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
755 else if (fmt[i] == 'E')
756 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
757 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
758 simplify);
759 }
760
761 /* If we didn't substitute, there is nothing more to do. */
762 if (num_changes == prev_changes)
763 return;
764
765 /* ??? The regmove is no more, so is this aberration still necessary? */
766 /* Allow substituted expression to have different mode. This is used by
767 regmove to change mode of pseudo register. */
768 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
769 op0_mode = GET_MODE (XEXP (x, 0));
770
771 /* Do changes needed to keep rtx consistent. Don't do any other
772 simplifications, as it is not our job. */
773 if (simplify)
774 simplify_while_replacing (loc, to, object, op0_mode);
775 }
776
777 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
778 with TO. After all changes have been made, validate by seeing
779 if INSN is still valid. */
780
781 int
782 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
783 {
784 validate_replace_rtx_1 (loc, from, to, insn, true);
785 return apply_change_group ();
786 }
787
788 /* Try replacing every occurrence of FROM in INSN with TO. After all
789 changes have been made, validate by seeing if INSN is still valid. */
790
791 int
792 validate_replace_rtx (rtx from, rtx to, rtx insn)
793 {
794 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
795 return apply_change_group ();
796 }
797
798 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
799 is a part of INSN. After all changes have been made, validate by seeing if
800 INSN is still valid.
801 validate_replace_rtx (from, to, insn) is equivalent to
802 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
803
804 int
805 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
806 {
807 validate_replace_rtx_1 (where, from, to, insn, true);
808 return apply_change_group ();
809 }
810
811 /* Same as above, but do not simplify rtx afterwards. */
812 int
813 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
814 rtx insn)
815 {
816 validate_replace_rtx_1 (where, from, to, insn, false);
817 return apply_change_group ();
818
819 }
820
821 /* Try replacing every occurrence of FROM in INSN with TO. This also
822 will replace in REG_EQUAL and REG_EQUIV notes. */
823
824 void
825 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
826 {
827 rtx note;
828 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
829 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
830 if (REG_NOTE_KIND (note) == REG_EQUAL
831 || REG_NOTE_KIND (note) == REG_EQUIV)
832 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
833 }
834
835 /* Function called by note_uses to replace used subexpressions. */
836 struct validate_replace_src_data
837 {
838 rtx from; /* Old RTX */
839 rtx to; /* New RTX */
840 rtx insn; /* Insn in which substitution is occurring. */
841 };
842
843 static void
844 validate_replace_src_1 (rtx *x, void *data)
845 {
846 struct validate_replace_src_data *d
847 = (struct validate_replace_src_data *) data;
848
849 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
850 }
851
852 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
853 SET_DESTs. */
854
855 void
856 validate_replace_src_group (rtx from, rtx to, rtx insn)
857 {
858 struct validate_replace_src_data d;
859
860 d.from = from;
861 d.to = to;
862 d.insn = insn;
863 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
864 }
865
866 /* Try simplify INSN.
867 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
868 pattern and return true if something was simplified. */
869
870 bool
871 validate_simplify_insn (rtx insn)
872 {
873 int i;
874 rtx pat = NULL;
875 rtx newpat = NULL;
876
877 pat = PATTERN (insn);
878
879 if (GET_CODE (pat) == SET)
880 {
881 newpat = simplify_rtx (SET_SRC (pat));
882 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
883 validate_change (insn, &SET_SRC (pat), newpat, 1);
884 newpat = simplify_rtx (SET_DEST (pat));
885 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
886 validate_change (insn, &SET_DEST (pat), newpat, 1);
887 }
888 else if (GET_CODE (pat) == PARALLEL)
889 for (i = 0; i < XVECLEN (pat, 0); i++)
890 {
891 rtx s = XVECEXP (pat, 0, i);
892
893 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
894 {
895 newpat = simplify_rtx (SET_SRC (s));
896 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
897 validate_change (insn, &SET_SRC (s), newpat, 1);
898 newpat = simplify_rtx (SET_DEST (s));
899 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
900 validate_change (insn, &SET_DEST (s), newpat, 1);
901 }
902 }
903 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
904 }
905 \f
906 #ifdef HAVE_cc0
907 /* Return 1 if the insn using CC0 set by INSN does not contain
908 any ordered tests applied to the condition codes.
909 EQ and NE tests do not count. */
910
911 int
912 next_insn_tests_no_inequality (rtx insn)
913 {
914 rtx next = next_cc0_user (insn);
915
916 /* If there is no next insn, we have to take the conservative choice. */
917 if (next == 0)
918 return 0;
919
920 return (INSN_P (next)
921 && ! inequality_comparisons_p (PATTERN (next)));
922 }
923 #endif
924 \f
925 /* Return 1 if OP is a valid general operand for machine mode MODE.
926 This is either a register reference, a memory reference,
927 or a constant. In the case of a memory reference, the address
928 is checked for general validity for the target machine.
929
930 Register and memory references must have mode MODE in order to be valid,
931 but some constants have no machine mode and are valid for any mode.
932
933 If MODE is VOIDmode, OP is checked for validity for whatever mode
934 it has.
935
936 The main use of this function is as a predicate in match_operand
937 expressions in the machine description. */
938
939 int
940 general_operand (rtx op, enum machine_mode mode)
941 {
942 enum rtx_code code = GET_CODE (op);
943
944 if (mode == VOIDmode)
945 mode = GET_MODE (op);
946
947 /* Don't accept CONST_INT or anything similar
948 if the caller wants something floating. */
949 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
950 && GET_MODE_CLASS (mode) != MODE_INT
951 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
952 return 0;
953
954 if (CONST_INT_P (op)
955 && mode != VOIDmode
956 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
957 return 0;
958
959 if (CONSTANT_P (op))
960 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
961 || mode == VOIDmode)
962 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
963 && targetm.legitimate_constant_p (mode == VOIDmode
964 ? GET_MODE (op)
965 : mode, op));
966
967 /* Except for certain constants with VOIDmode, already checked for,
968 OP's mode must match MODE if MODE specifies a mode. */
969
970 if (GET_MODE (op) != mode)
971 return 0;
972
973 if (code == SUBREG)
974 {
975 rtx sub = SUBREG_REG (op);
976
977 #ifdef INSN_SCHEDULING
978 /* On machines that have insn scheduling, we want all memory
979 reference to be explicit, so outlaw paradoxical SUBREGs.
980 However, we must allow them after reload so that they can
981 get cleaned up by cleanup_subreg_operands. */
982 if (!reload_completed && MEM_P (sub)
983 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
984 return 0;
985 #endif
986 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
987 may result in incorrect reference. We should simplify all valid
988 subregs of MEM anyway. But allow this after reload because we
989 might be called from cleanup_subreg_operands.
990
991 ??? This is a kludge. */
992 if (!reload_completed && SUBREG_BYTE (op) != 0
993 && MEM_P (sub))
994 return 0;
995
996 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
997 create such rtl, and we must reject it. */
998 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
999 /* LRA can use subreg to store a floating point value in an
1000 integer mode. Although the floating point and the
1001 integer modes need the same number of hard registers, the
1002 size of floating point mode can be less than the integer
1003 mode. */
1004 && ! lra_in_progress
1005 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1006 return 0;
1007
1008 op = sub;
1009 code = GET_CODE (op);
1010 }
1011
1012 if (code == REG)
1013 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1014 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1015
1016 if (code == MEM)
1017 {
1018 rtx y = XEXP (op, 0);
1019
1020 if (! volatile_ok && MEM_VOLATILE_P (op))
1021 return 0;
1022
1023 /* Use the mem's mode, since it will be reloaded thus. */
1024 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1025 return 1;
1026 }
1027
1028 return 0;
1029 }
1030 \f
1031 /* Return 1 if OP is a valid memory address for a memory reference
1032 of mode MODE.
1033
1034 The main use of this function is as a predicate in match_operand
1035 expressions in the machine description. */
1036
1037 int
1038 address_operand (rtx op, enum machine_mode mode)
1039 {
1040 return memory_address_p (mode, op);
1041 }
1042
1043 /* Return 1 if OP is a register reference of mode MODE.
1044 If MODE is VOIDmode, accept a register in any mode.
1045
1046 The main use of this function is as a predicate in match_operand
1047 expressions in the machine description. */
1048
1049 int
1050 register_operand (rtx op, enum machine_mode mode)
1051 {
1052 if (GET_MODE (op) != mode && mode != VOIDmode)
1053 return 0;
1054
1055 if (GET_CODE (op) == SUBREG)
1056 {
1057 rtx sub = SUBREG_REG (op);
1058
1059 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1060 because it is guaranteed to be reloaded into one.
1061 Just make sure the MEM is valid in itself.
1062 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1063 but currently it does result from (SUBREG (REG)...) where the
1064 reg went on the stack.) */
1065 if (! reload_completed && MEM_P (sub))
1066 return general_operand (op, mode);
1067
1068 #ifdef CANNOT_CHANGE_MODE_CLASS
1069 if (REG_P (sub)
1070 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1071 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1072 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1073 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1074 /* LRA can generate some invalid SUBREGS just for matched
1075 operand reload presentation. LRA needs to treat them as
1076 valid. */
1077 && ! LRA_SUBREG_P (op))
1078 return 0;
1079 #endif
1080
1081 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1082 create such rtl, and we must reject it. */
1083 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1084 /* LRA can use subreg to store a floating point value in an
1085 integer mode. Although the floating point and the
1086 integer modes need the same number of hard registers, the
1087 size of floating point mode can be less than the integer
1088 mode. */
1089 && ! lra_in_progress
1090 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1091 return 0;
1092
1093 op = sub;
1094 }
1095
1096 return (REG_P (op)
1097 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1098 || in_hard_reg_set_p (operand_reg_set,
1099 GET_MODE (op), REGNO (op))));
1100 }
1101
1102 /* Return 1 for a register in Pmode; ignore the tested mode. */
1103
1104 int
1105 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1106 {
1107 return register_operand (op, Pmode);
1108 }
1109
1110 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1111 or a hard register. */
1112
1113 int
1114 scratch_operand (rtx op, enum machine_mode mode)
1115 {
1116 if (GET_MODE (op) != mode && mode != VOIDmode)
1117 return 0;
1118
1119 return (GET_CODE (op) == SCRATCH
1120 || (REG_P (op)
1121 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1122 }
1123
1124 /* Return 1 if OP is a valid immediate operand for mode MODE.
1125
1126 The main use of this function is as a predicate in match_operand
1127 expressions in the machine description. */
1128
1129 int
1130 immediate_operand (rtx op, enum machine_mode mode)
1131 {
1132 /* Don't accept CONST_INT or anything similar
1133 if the caller wants something floating. */
1134 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1135 && GET_MODE_CLASS (mode) != MODE_INT
1136 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1137 return 0;
1138
1139 if (CONST_INT_P (op)
1140 && mode != VOIDmode
1141 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1142 return 0;
1143
1144 return (CONSTANT_P (op)
1145 && (GET_MODE (op) == mode || mode == VOIDmode
1146 || GET_MODE (op) == VOIDmode)
1147 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1148 && targetm.legitimate_constant_p (mode == VOIDmode
1149 ? GET_MODE (op)
1150 : mode, op));
1151 }
1152
1153 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1154
1155 int
1156 const_int_operand (rtx op, enum machine_mode mode)
1157 {
1158 if (!CONST_INT_P (op))
1159 return 0;
1160
1161 if (mode != VOIDmode
1162 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163 return 0;
1164
1165 return 1;
1166 }
1167
1168 #if TARGET_SUPPORTS_WIDE_INT
1169 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1170 of mode MODE. */
1171 int
1172 const_scalar_int_operand (rtx op, enum machine_mode mode)
1173 {
1174 if (!CONST_SCALAR_INT_P (op))
1175 return 0;
1176
1177 if (CONST_INT_P (op))
1178 return const_int_operand (op, mode);
1179
1180 if (mode != VOIDmode)
1181 {
1182 int prec = GET_MODE_PRECISION (mode);
1183 int bitsize = GET_MODE_BITSIZE (mode);
1184
1185 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1186 return 0;
1187
1188 if (prec == bitsize)
1189 return 1;
1190 else
1191 {
1192 /* Multiword partial int. */
1193 HOST_WIDE_INT x
1194 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1195 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1196 }
1197 }
1198 return 1;
1199 }
1200
1201 /* Returns 1 if OP is an operand that is a constant integer or constant
1202 floating-point number of MODE. */
1203
1204 int
1205 const_double_operand (rtx op, enum machine_mode mode)
1206 {
1207 return (GET_CODE (op) == CONST_DOUBLE)
1208 && (GET_MODE (op) == mode || mode == VOIDmode);
1209 }
1210 #else
1211 /* Returns 1 if OP is an operand that is a constant integer or constant
1212 floating-point number of MODE. */
1213
1214 int
1215 const_double_operand (rtx op, enum machine_mode mode)
1216 {
1217 /* Don't accept CONST_INT or anything similar
1218 if the caller wants something floating. */
1219 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1220 && GET_MODE_CLASS (mode) != MODE_INT
1221 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1222 return 0;
1223
1224 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1225 && (mode == VOIDmode || GET_MODE (op) == mode
1226 || GET_MODE (op) == VOIDmode));
1227 }
1228 #endif
1229 /* Return 1 if OP is a general operand that is not an immediate
1230 operand of mode MODE. */
1231
1232 int
1233 nonimmediate_operand (rtx op, enum machine_mode mode)
1234 {
1235 return (general_operand (op, mode) && ! CONSTANT_P (op));
1236 }
1237
1238 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1239
1240 int
1241 nonmemory_operand (rtx op, enum machine_mode mode)
1242 {
1243 if (CONSTANT_P (op))
1244 return immediate_operand (op, mode);
1245
1246 if (GET_MODE (op) != mode && mode != VOIDmode)
1247 return 0;
1248
1249 if (GET_CODE (op) == SUBREG)
1250 {
1251 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1252 because it is guaranteed to be reloaded into one.
1253 Just make sure the MEM is valid in itself.
1254 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1255 but currently it does result from (SUBREG (REG)...) where the
1256 reg went on the stack.) */
1257 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1258 return general_operand (op, mode);
1259 op = SUBREG_REG (op);
1260 }
1261
1262 return (REG_P (op)
1263 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1264 || in_hard_reg_set_p (operand_reg_set,
1265 GET_MODE (op), REGNO (op))));
1266 }
1267
1268 /* Return 1 if OP is a valid operand that stands for pushing a
1269 value of mode MODE onto the stack.
1270
1271 The main use of this function is as a predicate in match_operand
1272 expressions in the machine description. */
1273
1274 int
1275 push_operand (rtx op, enum machine_mode mode)
1276 {
1277 unsigned int rounded_size = GET_MODE_SIZE (mode);
1278
1279 #ifdef PUSH_ROUNDING
1280 rounded_size = PUSH_ROUNDING (rounded_size);
1281 #endif
1282
1283 if (!MEM_P (op))
1284 return 0;
1285
1286 if (mode != VOIDmode && GET_MODE (op) != mode)
1287 return 0;
1288
1289 op = XEXP (op, 0);
1290
1291 if (rounded_size == GET_MODE_SIZE (mode))
1292 {
1293 if (GET_CODE (op) != STACK_PUSH_CODE)
1294 return 0;
1295 }
1296 else
1297 {
1298 if (GET_CODE (op) != PRE_MODIFY
1299 || GET_CODE (XEXP (op, 1)) != PLUS
1300 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1301 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1302 #ifdef STACK_GROWS_DOWNWARD
1303 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1304 #else
1305 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1306 #endif
1307 )
1308 return 0;
1309 }
1310
1311 return XEXP (op, 0) == stack_pointer_rtx;
1312 }
1313
1314 /* Return 1 if OP is a valid operand that stands for popping a
1315 value of mode MODE off the stack.
1316
1317 The main use of this function is as a predicate in match_operand
1318 expressions in the machine description. */
1319
1320 int
1321 pop_operand (rtx op, enum machine_mode mode)
1322 {
1323 if (!MEM_P (op))
1324 return 0;
1325
1326 if (mode != VOIDmode && GET_MODE (op) != mode)
1327 return 0;
1328
1329 op = XEXP (op, 0);
1330
1331 if (GET_CODE (op) != STACK_POP_CODE)
1332 return 0;
1333
1334 return XEXP (op, 0) == stack_pointer_rtx;
1335 }
1336
1337 /* Return 1 if ADDR is a valid memory address
1338 for mode MODE in address space AS. */
1339
1340 int
1341 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1342 rtx addr, addr_space_t as)
1343 {
1344 #ifdef GO_IF_LEGITIMATE_ADDRESS
1345 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1346 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1347 return 0;
1348
1349 win:
1350 return 1;
1351 #else
1352 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1353 #endif
1354 }
1355
1356 /* Return 1 if OP is a valid memory reference with mode MODE,
1357 including a valid address.
1358
1359 The main use of this function is as a predicate in match_operand
1360 expressions in the machine description. */
1361
1362 int
1363 memory_operand (rtx op, enum machine_mode mode)
1364 {
1365 rtx inner;
1366
1367 if (! reload_completed)
1368 /* Note that no SUBREG is a memory operand before end of reload pass,
1369 because (SUBREG (MEM...)) forces reloading into a register. */
1370 return MEM_P (op) && general_operand (op, mode);
1371
1372 if (mode != VOIDmode && GET_MODE (op) != mode)
1373 return 0;
1374
1375 inner = op;
1376 if (GET_CODE (inner) == SUBREG)
1377 inner = SUBREG_REG (inner);
1378
1379 return (MEM_P (inner) && general_operand (op, mode));
1380 }
1381
1382 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1383 that is, a memory reference whose address is a general_operand. */
1384
1385 int
1386 indirect_operand (rtx op, enum machine_mode mode)
1387 {
1388 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1389 if (! reload_completed
1390 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1391 {
1392 int offset = SUBREG_BYTE (op);
1393 rtx inner = SUBREG_REG (op);
1394
1395 if (mode != VOIDmode && GET_MODE (op) != mode)
1396 return 0;
1397
1398 /* The only way that we can have a general_operand as the resulting
1399 address is if OFFSET is zero and the address already is an operand
1400 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1401 operand. */
1402
1403 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1404 || (GET_CODE (XEXP (inner, 0)) == PLUS
1405 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1406 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1407 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1408 }
1409
1410 return (MEM_P (op)
1411 && memory_operand (op, mode)
1412 && general_operand (XEXP (op, 0), Pmode));
1413 }
1414
1415 /* Return 1 if this is an ordered comparison operator (not including
1416 ORDERED and UNORDERED). */
1417
1418 int
1419 ordered_comparison_operator (rtx op, enum machine_mode mode)
1420 {
1421 if (mode != VOIDmode && GET_MODE (op) != mode)
1422 return false;
1423 switch (GET_CODE (op))
1424 {
1425 case EQ:
1426 case NE:
1427 case LT:
1428 case LTU:
1429 case LE:
1430 case LEU:
1431 case GT:
1432 case GTU:
1433 case GE:
1434 case GEU:
1435 return true;
1436 default:
1437 return false;
1438 }
1439 }
1440
1441 /* Return 1 if this is a comparison operator. This allows the use of
1442 MATCH_OPERATOR to recognize all the branch insns. */
1443
1444 int
1445 comparison_operator (rtx op, enum machine_mode mode)
1446 {
1447 return ((mode == VOIDmode || GET_MODE (op) == mode)
1448 && COMPARISON_P (op));
1449 }
1450 \f
1451 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1452
1453 rtx
1454 extract_asm_operands (rtx body)
1455 {
1456 rtx tmp;
1457 switch (GET_CODE (body))
1458 {
1459 case ASM_OPERANDS:
1460 return body;
1461
1462 case SET:
1463 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1464 tmp = SET_SRC (body);
1465 if (GET_CODE (tmp) == ASM_OPERANDS)
1466 return tmp;
1467 break;
1468
1469 case PARALLEL:
1470 tmp = XVECEXP (body, 0, 0);
1471 if (GET_CODE (tmp) == ASM_OPERANDS)
1472 return tmp;
1473 if (GET_CODE (tmp) == SET)
1474 {
1475 tmp = SET_SRC (tmp);
1476 if (GET_CODE (tmp) == ASM_OPERANDS)
1477 return tmp;
1478 }
1479 break;
1480
1481 default:
1482 break;
1483 }
1484 return NULL;
1485 }
1486
1487 /* If BODY is an insn body that uses ASM_OPERANDS,
1488 return the number of operands (both input and output) in the insn.
1489 Otherwise return -1. */
1490
1491 int
1492 asm_noperands (const_rtx body)
1493 {
1494 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1495 int n_sets = 0;
1496
1497 if (asm_op == NULL)
1498 return -1;
1499
1500 if (GET_CODE (body) == SET)
1501 n_sets = 1;
1502 else if (GET_CODE (body) == PARALLEL)
1503 {
1504 int i;
1505 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1506 {
1507 /* Multiple output operands, or 1 output plus some clobbers:
1508 body is
1509 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1510 /* Count backwards through CLOBBERs to determine number of SETs. */
1511 for (i = XVECLEN (body, 0); i > 0; i--)
1512 {
1513 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1514 break;
1515 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1516 return -1;
1517 }
1518
1519 /* N_SETS is now number of output operands. */
1520 n_sets = i;
1521
1522 /* Verify that all the SETs we have
1523 came from a single original asm_operands insn
1524 (so that invalid combinations are blocked). */
1525 for (i = 0; i < n_sets; i++)
1526 {
1527 rtx elt = XVECEXP (body, 0, i);
1528 if (GET_CODE (elt) != SET)
1529 return -1;
1530 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1531 return -1;
1532 /* If these ASM_OPERANDS rtx's came from different original insns
1533 then they aren't allowed together. */
1534 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1535 != ASM_OPERANDS_INPUT_VEC (asm_op))
1536 return -1;
1537 }
1538 }
1539 else
1540 {
1541 /* 0 outputs, but some clobbers:
1542 body is [(asm_operands ...) (clobber (reg ...))...]. */
1543 /* Make sure all the other parallel things really are clobbers. */
1544 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1545 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1546 return -1;
1547 }
1548 }
1549
1550 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1551 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1552 }
1553
1554 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1555 copy its operands (both input and output) into the vector OPERANDS,
1556 the locations of the operands within the insn into the vector OPERAND_LOCS,
1557 and the constraints for the operands into CONSTRAINTS.
1558 Write the modes of the operands into MODES.
1559 Return the assembler-template.
1560
1561 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1562 we don't store that info. */
1563
1564 const char *
1565 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1566 const char **constraints, enum machine_mode *modes,
1567 location_t *loc)
1568 {
1569 int nbase = 0, n, i;
1570 rtx asmop;
1571
1572 switch (GET_CODE (body))
1573 {
1574 case ASM_OPERANDS:
1575 /* Zero output asm: BODY is (asm_operands ...). */
1576 asmop = body;
1577 break;
1578
1579 case SET:
1580 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1581 asmop = SET_SRC (body);
1582
1583 /* The output is in the SET.
1584 Its constraint is in the ASM_OPERANDS itself. */
1585 if (operands)
1586 operands[0] = SET_DEST (body);
1587 if (operand_locs)
1588 operand_locs[0] = &SET_DEST (body);
1589 if (constraints)
1590 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1591 if (modes)
1592 modes[0] = GET_MODE (SET_DEST (body));
1593 nbase = 1;
1594 break;
1595
1596 case PARALLEL:
1597 {
1598 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1599
1600 asmop = XVECEXP (body, 0, 0);
1601 if (GET_CODE (asmop) == SET)
1602 {
1603 asmop = SET_SRC (asmop);
1604
1605 /* At least one output, plus some CLOBBERs. The outputs are in
1606 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1607 for (i = 0; i < nparallel; i++)
1608 {
1609 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1610 break; /* Past last SET */
1611 if (operands)
1612 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1613 if (operand_locs)
1614 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1615 if (constraints)
1616 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1617 if (modes)
1618 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1619 }
1620 nbase = i;
1621 }
1622 break;
1623 }
1624
1625 default:
1626 gcc_unreachable ();
1627 }
1628
1629 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1630 for (i = 0; i < n; i++)
1631 {
1632 if (operand_locs)
1633 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1634 if (operands)
1635 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1636 if (constraints)
1637 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1638 if (modes)
1639 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1640 }
1641 nbase += n;
1642
1643 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1644 for (i = 0; i < n; i++)
1645 {
1646 if (operand_locs)
1647 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1648 if (operands)
1649 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1650 if (constraints)
1651 constraints[nbase + i] = "";
1652 if (modes)
1653 modes[nbase + i] = Pmode;
1654 }
1655
1656 if (loc)
1657 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1658
1659 return ASM_OPERANDS_TEMPLATE (asmop);
1660 }
1661
1662 /* Check if an asm_operand matches its constraints.
1663 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1664
1665 int
1666 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1667 {
1668 int result = 0;
1669 #ifdef AUTO_INC_DEC
1670 bool incdec_ok = false;
1671 #endif
1672
1673 /* Use constrain_operands after reload. */
1674 gcc_assert (!reload_completed);
1675
1676 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1677 many alternatives as required to match the other operands. */
1678 if (*constraint == '\0')
1679 result = 1;
1680
1681 while (*constraint)
1682 {
1683 char c = *constraint;
1684 int len;
1685 switch (c)
1686 {
1687 case ',':
1688 constraint++;
1689 continue;
1690 case '=':
1691 case '+':
1692 case '*':
1693 case '%':
1694 case '!':
1695 case '#':
1696 case '&':
1697 case '?':
1698 break;
1699
1700 case '0': case '1': case '2': case '3': case '4':
1701 case '5': case '6': case '7': case '8': case '9':
1702 /* If caller provided constraints pointer, look up
1703 the matching constraint. Otherwise, our caller should have
1704 given us the proper matching constraint, but we can't
1705 actually fail the check if they didn't. Indicate that
1706 results are inconclusive. */
1707 if (constraints)
1708 {
1709 char *end;
1710 unsigned long match;
1711
1712 match = strtoul (constraint, &end, 10);
1713 if (!result)
1714 result = asm_operand_ok (op, constraints[match], NULL);
1715 constraint = (const char *) end;
1716 }
1717 else
1718 {
1719 do
1720 constraint++;
1721 while (ISDIGIT (*constraint));
1722 if (! result)
1723 result = -1;
1724 }
1725 continue;
1726
1727 case 'p':
1728 if (address_operand (op, VOIDmode))
1729 result = 1;
1730 break;
1731
1732 case TARGET_MEM_CONSTRAINT:
1733 case 'V': /* non-offsettable */
1734 if (memory_operand (op, VOIDmode))
1735 result = 1;
1736 break;
1737
1738 case 'o': /* offsettable */
1739 if (offsettable_nonstrict_memref_p (op))
1740 result = 1;
1741 break;
1742
1743 case '<':
1744 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1745 excepting those that expand_call created. Further, on some
1746 machines which do not have generalized auto inc/dec, an inc/dec
1747 is not a memory_operand.
1748
1749 Match any memory and hope things are resolved after reload. */
1750
1751 if (MEM_P (op)
1752 && (1
1753 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1754 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1755 result = 1;
1756 #ifdef AUTO_INC_DEC
1757 incdec_ok = true;
1758 #endif
1759 break;
1760
1761 case '>':
1762 if (MEM_P (op)
1763 && (1
1764 || GET_CODE (XEXP (op, 0)) == PRE_INC
1765 || GET_CODE (XEXP (op, 0)) == POST_INC))
1766 result = 1;
1767 #ifdef AUTO_INC_DEC
1768 incdec_ok = true;
1769 #endif
1770 break;
1771
1772 case 'E':
1773 case 'F':
1774 if (CONST_DOUBLE_AS_FLOAT_P (op)
1775 || (GET_CODE (op) == CONST_VECTOR
1776 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1777 result = 1;
1778 break;
1779
1780 case 'G':
1781 if (CONST_DOUBLE_AS_FLOAT_P (op)
1782 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1783 result = 1;
1784 break;
1785 case 'H':
1786 if (CONST_DOUBLE_AS_FLOAT_P (op)
1787 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1788 result = 1;
1789 break;
1790
1791 case 's':
1792 if (CONST_SCALAR_INT_P (op))
1793 break;
1794 /* Fall through. */
1795
1796 case 'i':
1797 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1798 result = 1;
1799 break;
1800
1801 case 'n':
1802 if (CONST_SCALAR_INT_P (op))
1803 result = 1;
1804 break;
1805
1806 case 'I':
1807 if (CONST_INT_P (op)
1808 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1809 result = 1;
1810 break;
1811 case 'J':
1812 if (CONST_INT_P (op)
1813 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1814 result = 1;
1815 break;
1816 case 'K':
1817 if (CONST_INT_P (op)
1818 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1819 result = 1;
1820 break;
1821 case 'L':
1822 if (CONST_INT_P (op)
1823 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1824 result = 1;
1825 break;
1826 case 'M':
1827 if (CONST_INT_P (op)
1828 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1829 result = 1;
1830 break;
1831 case 'N':
1832 if (CONST_INT_P (op)
1833 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1834 result = 1;
1835 break;
1836 case 'O':
1837 if (CONST_INT_P (op)
1838 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1839 result = 1;
1840 break;
1841 case 'P':
1842 if (CONST_INT_P (op)
1843 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1844 result = 1;
1845 break;
1846
1847 case 'X':
1848 result = 1;
1849 break;
1850
1851 case 'g':
1852 if (general_operand (op, VOIDmode))
1853 result = 1;
1854 break;
1855
1856 default:
1857 /* For all other letters, we first check for a register class,
1858 otherwise it is an EXTRA_CONSTRAINT. */
1859 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1860 {
1861 case 'r':
1862 if (GET_MODE (op) == BLKmode)
1863 break;
1864 if (register_operand (op, VOIDmode))
1865 result = 1;
1866 }
1867 #ifdef EXTRA_CONSTRAINT_STR
1868 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1869 /* Every memory operand can be reloaded to fit. */
1870 result = result || memory_operand (op, VOIDmode);
1871 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1872 /* Every address operand can be reloaded to fit. */
1873 result = result || address_operand (op, VOIDmode);
1874 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1875 result = 1;
1876 #endif
1877 break;
1878 }
1879 len = CONSTRAINT_LEN (c, constraint);
1880 do
1881 constraint++;
1882 while (--len && *constraint);
1883 if (len)
1884 return 0;
1885 }
1886
1887 #ifdef AUTO_INC_DEC
1888 /* For operands without < or > constraints reject side-effects. */
1889 if (!incdec_ok && result && MEM_P (op))
1890 switch (GET_CODE (XEXP (op, 0)))
1891 {
1892 case PRE_INC:
1893 case POST_INC:
1894 case PRE_DEC:
1895 case POST_DEC:
1896 case PRE_MODIFY:
1897 case POST_MODIFY:
1898 return 0;
1899 default:
1900 break;
1901 }
1902 #endif
1903
1904 return result;
1905 }
1906 \f
1907 /* Given an rtx *P, if it is a sum containing an integer constant term,
1908 return the location (type rtx *) of the pointer to that constant term.
1909 Otherwise, return a null pointer. */
1910
1911 rtx *
1912 find_constant_term_loc (rtx *p)
1913 {
1914 rtx *tem;
1915 enum rtx_code code = GET_CODE (*p);
1916
1917 /* If *P IS such a constant term, P is its location. */
1918
1919 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1920 || code == CONST)
1921 return p;
1922
1923 /* Otherwise, if not a sum, it has no constant term. */
1924
1925 if (GET_CODE (*p) != PLUS)
1926 return 0;
1927
1928 /* If one of the summands is constant, return its location. */
1929
1930 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1931 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1932 return p;
1933
1934 /* Otherwise, check each summand for containing a constant term. */
1935
1936 if (XEXP (*p, 0) != 0)
1937 {
1938 tem = find_constant_term_loc (&XEXP (*p, 0));
1939 if (tem != 0)
1940 return tem;
1941 }
1942
1943 if (XEXP (*p, 1) != 0)
1944 {
1945 tem = find_constant_term_loc (&XEXP (*p, 1));
1946 if (tem != 0)
1947 return tem;
1948 }
1949
1950 return 0;
1951 }
1952 \f
1953 /* Return 1 if OP is a memory reference
1954 whose address contains no side effects
1955 and remains valid after the addition
1956 of a positive integer less than the
1957 size of the object being referenced.
1958
1959 We assume that the original address is valid and do not check it.
1960
1961 This uses strict_memory_address_p as a subroutine, so
1962 don't use it before reload. */
1963
1964 int
1965 offsettable_memref_p (rtx op)
1966 {
1967 return ((MEM_P (op))
1968 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1969 MEM_ADDR_SPACE (op)));
1970 }
1971
1972 /* Similar, but don't require a strictly valid mem ref:
1973 consider pseudo-regs valid as index or base regs. */
1974
1975 int
1976 offsettable_nonstrict_memref_p (rtx op)
1977 {
1978 return ((MEM_P (op))
1979 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1980 MEM_ADDR_SPACE (op)));
1981 }
1982
1983 /* Return 1 if Y is a memory address which contains no side effects
1984 and would remain valid for address space AS after the addition of
1985 a positive integer less than the size of that mode.
1986
1987 We assume that the original address is valid and do not check it.
1988 We do check that it is valid for narrower modes.
1989
1990 If STRICTP is nonzero, we require a strictly valid address,
1991 for the sake of use in reload.c. */
1992
1993 int
1994 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1995 addr_space_t as)
1996 {
1997 enum rtx_code ycode = GET_CODE (y);
1998 rtx z;
1999 rtx y1 = y;
2000 rtx *y2;
2001 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
2002 (strictp ? strict_memory_address_addr_space_p
2003 : memory_address_addr_space_p);
2004 unsigned int mode_sz = GET_MODE_SIZE (mode);
2005
2006 if (CONSTANT_ADDRESS_P (y))
2007 return 1;
2008
2009 /* Adjusting an offsettable address involves changing to a narrower mode.
2010 Make sure that's OK. */
2011
2012 if (mode_dependent_address_p (y, as))
2013 return 0;
2014
2015 enum machine_mode address_mode = GET_MODE (y);
2016 if (address_mode == VOIDmode)
2017 address_mode = targetm.addr_space.address_mode (as);
2018 #ifdef POINTERS_EXTEND_UNSIGNED
2019 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2020 #endif
2021
2022 /* ??? How much offset does an offsettable BLKmode reference need?
2023 Clearly that depends on the situation in which it's being used.
2024 However, the current situation in which we test 0xffffffff is
2025 less than ideal. Caveat user. */
2026 if (mode_sz == 0)
2027 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2028
2029 /* If the expression contains a constant term,
2030 see if it remains valid when max possible offset is added. */
2031
2032 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2033 {
2034 int good;
2035
2036 y1 = *y2;
2037 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2038 /* Use QImode because an odd displacement may be automatically invalid
2039 for any wider mode. But it should be valid for a single byte. */
2040 good = (*addressp) (QImode, y, as);
2041
2042 /* In any case, restore old contents of memory. */
2043 *y2 = y1;
2044 return good;
2045 }
2046
2047 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2048 return 0;
2049
2050 /* The offset added here is chosen as the maximum offset that
2051 any instruction could need to add when operating on something
2052 of the specified mode. We assume that if Y and Y+c are
2053 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2054 go inside a LO_SUM here, so we do so as well. */
2055 if (GET_CODE (y) == LO_SUM
2056 && mode != BLKmode
2057 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2058 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2059 plus_constant (address_mode, XEXP (y, 1),
2060 mode_sz - 1));
2061 #ifdef POINTERS_EXTEND_UNSIGNED
2062 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2063 else if (POINTERS_EXTEND_UNSIGNED > 0
2064 && GET_CODE (y) == ZERO_EXTEND
2065 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2066 z = gen_rtx_ZERO_EXTEND (address_mode,
2067 plus_constant (pointer_mode, XEXP (y, 0),
2068 mode_sz - 1));
2069 #endif
2070 else
2071 z = plus_constant (address_mode, y, mode_sz - 1);
2072
2073 /* Use QImode because an odd displacement may be automatically invalid
2074 for any wider mode. But it should be valid for a single byte. */
2075 return (*addressp) (QImode, z, as);
2076 }
2077
2078 /* Return 1 if ADDR is an address-expression whose effect depends
2079 on the mode of the memory reference it is used in.
2080
2081 ADDRSPACE is the address space associated with the address.
2082
2083 Autoincrement addressing is a typical example of mode-dependence
2084 because the amount of the increment depends on the mode. */
2085
2086 bool
2087 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2088 {
2089 /* Auto-increment addressing with anything other than post_modify
2090 or pre_modify always introduces a mode dependency. Catch such
2091 cases now instead of deferring to the target. */
2092 if (GET_CODE (addr) == PRE_INC
2093 || GET_CODE (addr) == POST_INC
2094 || GET_CODE (addr) == PRE_DEC
2095 || GET_CODE (addr) == POST_DEC)
2096 return true;
2097
2098 return targetm.mode_dependent_address_p (addr, addrspace);
2099 }
2100 \f
2101 /* Like extract_insn, but save insn extracted and don't extract again, when
2102 called again for the same insn expecting that recog_data still contain the
2103 valid information. This is used primary by gen_attr infrastructure that
2104 often does extract insn again and again. */
2105 void
2106 extract_insn_cached (rtx insn)
2107 {
2108 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2109 return;
2110 extract_insn (insn);
2111 recog_data.insn = insn;
2112 }
2113
2114 /* Do cached extract_insn, constrain_operands and complain about failures.
2115 Used by insn_attrtab. */
2116 void
2117 extract_constrain_insn_cached (rtx insn)
2118 {
2119 extract_insn_cached (insn);
2120 if (which_alternative == -1
2121 && !constrain_operands (reload_completed))
2122 fatal_insn_not_found (insn);
2123 }
2124
2125 /* Do cached constrain_operands and complain about failures. */
2126 int
2127 constrain_operands_cached (int strict)
2128 {
2129 if (which_alternative == -1)
2130 return constrain_operands (strict);
2131 else
2132 return 1;
2133 }
2134 \f
2135 /* Analyze INSN and fill in recog_data. */
2136
2137 void
2138 extract_insn (rtx insn)
2139 {
2140 int i;
2141 int icode;
2142 int noperands;
2143 rtx body = PATTERN (insn);
2144
2145 recog_data.n_operands = 0;
2146 recog_data.n_alternatives = 0;
2147 recog_data.n_dups = 0;
2148 recog_data.is_asm = false;
2149
2150 switch (GET_CODE (body))
2151 {
2152 case USE:
2153 case CLOBBER:
2154 case ASM_INPUT:
2155 case ADDR_VEC:
2156 case ADDR_DIFF_VEC:
2157 case VAR_LOCATION:
2158 return;
2159
2160 case SET:
2161 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2162 goto asm_insn;
2163 else
2164 goto normal_insn;
2165 case PARALLEL:
2166 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2167 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2168 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2169 goto asm_insn;
2170 else
2171 goto normal_insn;
2172 case ASM_OPERANDS:
2173 asm_insn:
2174 recog_data.n_operands = noperands = asm_noperands (body);
2175 if (noperands >= 0)
2176 {
2177 /* This insn is an `asm' with operands. */
2178
2179 /* expand_asm_operands makes sure there aren't too many operands. */
2180 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2181
2182 /* Now get the operand values and constraints out of the insn. */
2183 decode_asm_operands (body, recog_data.operand,
2184 recog_data.operand_loc,
2185 recog_data.constraints,
2186 recog_data.operand_mode, NULL);
2187 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2188 if (noperands > 0)
2189 {
2190 const char *p = recog_data.constraints[0];
2191 recog_data.n_alternatives = 1;
2192 while (*p)
2193 recog_data.n_alternatives += (*p++ == ',');
2194 }
2195 recog_data.is_asm = true;
2196 break;
2197 }
2198 fatal_insn_not_found (insn);
2199
2200 default:
2201 normal_insn:
2202 /* Ordinary insn: recognize it, get the operands via insn_extract
2203 and get the constraints. */
2204
2205 icode = recog_memoized (insn);
2206 if (icode < 0)
2207 fatal_insn_not_found (insn);
2208
2209 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2210 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2211 recog_data.n_dups = insn_data[icode].n_dups;
2212
2213 insn_extract (insn);
2214
2215 for (i = 0; i < noperands; i++)
2216 {
2217 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2218 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2219 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2220 /* VOIDmode match_operands gets mode from their real operand. */
2221 if (recog_data.operand_mode[i] == VOIDmode)
2222 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2223 }
2224 }
2225 for (i = 0; i < noperands; i++)
2226 recog_data.operand_type[i]
2227 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2228 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2229 : OP_IN);
2230
2231 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2232
2233 if (INSN_CODE (insn) < 0)
2234 for (i = 0; i < recog_data.n_alternatives; i++)
2235 recog_data.alternative_enabled_p[i] = true;
2236 else
2237 {
2238 recog_data.insn = insn;
2239 for (i = 0; i < recog_data.n_alternatives; i++)
2240 {
2241 which_alternative = i;
2242 recog_data.alternative_enabled_p[i]
2243 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2244 }
2245 }
2246
2247 recog_data.insn = NULL;
2248 which_alternative = -1;
2249 }
2250
2251 /* After calling extract_insn, you can use this function to extract some
2252 information from the constraint strings into a more usable form.
2253 The collected data is stored in recog_op_alt. */
2254 void
2255 preprocess_constraints (void)
2256 {
2257 int i;
2258
2259 for (i = 0; i < recog_data.n_operands; i++)
2260 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2261 * sizeof (struct operand_alternative)));
2262
2263 for (i = 0; i < recog_data.n_operands; i++)
2264 {
2265 int j;
2266 struct operand_alternative *op_alt;
2267 const char *p = recog_data.constraints[i];
2268
2269 op_alt = recog_op_alt[i];
2270
2271 for (j = 0; j < recog_data.n_alternatives; j++)
2272 {
2273 op_alt[j].cl = NO_REGS;
2274 op_alt[j].constraint = p;
2275 op_alt[j].matches = -1;
2276 op_alt[j].matched = -1;
2277
2278 if (!recog_data.alternative_enabled_p[j])
2279 {
2280 p = skip_alternative (p);
2281 continue;
2282 }
2283
2284 if (*p == '\0' || *p == ',')
2285 {
2286 op_alt[j].anything_ok = 1;
2287 continue;
2288 }
2289
2290 for (;;)
2291 {
2292 char c = *p;
2293 if (c == '#')
2294 do
2295 c = *++p;
2296 while (c != ',' && c != '\0');
2297 if (c == ',' || c == '\0')
2298 {
2299 p++;
2300 break;
2301 }
2302
2303 switch (c)
2304 {
2305 case '=': case '+': case '*': case '%':
2306 case 'E': case 'F': case 'G': case 'H':
2307 case 's': case 'i': case 'n':
2308 case 'I': case 'J': case 'K': case 'L':
2309 case 'M': case 'N': case 'O': case 'P':
2310 /* These don't say anything we care about. */
2311 break;
2312
2313 case '?':
2314 op_alt[j].reject += 6;
2315 break;
2316 case '!':
2317 op_alt[j].reject += 600;
2318 break;
2319 case '&':
2320 op_alt[j].earlyclobber = 1;
2321 break;
2322
2323 case '0': case '1': case '2': case '3': case '4':
2324 case '5': case '6': case '7': case '8': case '9':
2325 {
2326 char *end;
2327 op_alt[j].matches = strtoul (p, &end, 10);
2328 recog_op_alt[op_alt[j].matches][j].matched = i;
2329 p = end;
2330 }
2331 continue;
2332
2333 case TARGET_MEM_CONSTRAINT:
2334 op_alt[j].memory_ok = 1;
2335 break;
2336 case '<':
2337 op_alt[j].decmem_ok = 1;
2338 break;
2339 case '>':
2340 op_alt[j].incmem_ok = 1;
2341 break;
2342 case 'V':
2343 op_alt[j].nonoffmem_ok = 1;
2344 break;
2345 case 'o':
2346 op_alt[j].offmem_ok = 1;
2347 break;
2348 case 'X':
2349 op_alt[j].anything_ok = 1;
2350 break;
2351
2352 case 'p':
2353 op_alt[j].is_address = 1;
2354 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2355 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2356 ADDRESS, SCRATCH)];
2357 break;
2358
2359 case 'g':
2360 case 'r':
2361 op_alt[j].cl =
2362 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2363 break;
2364
2365 default:
2366 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2367 {
2368 op_alt[j].memory_ok = 1;
2369 break;
2370 }
2371 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2372 {
2373 op_alt[j].is_address = 1;
2374 op_alt[j].cl
2375 = (reg_class_subunion
2376 [(int) op_alt[j].cl]
2377 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2378 ADDRESS, SCRATCH)]);
2379 break;
2380 }
2381
2382 op_alt[j].cl
2383 = (reg_class_subunion
2384 [(int) op_alt[j].cl]
2385 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2386 break;
2387 }
2388 p += CONSTRAINT_LEN (c, p);
2389 }
2390 }
2391 }
2392 }
2393
2394 /* Check the operands of an insn against the insn's operand constraints
2395 and return 1 if they are valid.
2396 The information about the insn's operands, constraints, operand modes
2397 etc. is obtained from the global variables set up by extract_insn.
2398
2399 WHICH_ALTERNATIVE is set to a number which indicates which
2400 alternative of constraints was matched: 0 for the first alternative,
2401 1 for the next, etc.
2402
2403 In addition, when two operands are required to match
2404 and it happens that the output operand is (reg) while the
2405 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2406 make the output operand look like the input.
2407 This is because the output operand is the one the template will print.
2408
2409 This is used in final, just before printing the assembler code and by
2410 the routines that determine an insn's attribute.
2411
2412 If STRICT is a positive nonzero value, it means that we have been
2413 called after reload has been completed. In that case, we must
2414 do all checks strictly. If it is zero, it means that we have been called
2415 before reload has completed. In that case, we first try to see if we can
2416 find an alternative that matches strictly. If not, we try again, this
2417 time assuming that reload will fix up the insn. This provides a "best
2418 guess" for the alternative and is used to compute attributes of insns prior
2419 to reload. A negative value of STRICT is used for this internal call. */
2420
2421 struct funny_match
2422 {
2423 int this_op, other;
2424 };
2425
2426 int
2427 constrain_operands (int strict)
2428 {
2429 const char *constraints[MAX_RECOG_OPERANDS];
2430 int matching_operands[MAX_RECOG_OPERANDS];
2431 int earlyclobber[MAX_RECOG_OPERANDS];
2432 int c;
2433
2434 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2435 int funny_match_index;
2436
2437 which_alternative = 0;
2438 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2439 return 1;
2440
2441 for (c = 0; c < recog_data.n_operands; c++)
2442 {
2443 constraints[c] = recog_data.constraints[c];
2444 matching_operands[c] = -1;
2445 }
2446
2447 do
2448 {
2449 int seen_earlyclobber_at = -1;
2450 int opno;
2451 int lose = 0;
2452 funny_match_index = 0;
2453
2454 if (!recog_data.alternative_enabled_p[which_alternative])
2455 {
2456 int i;
2457
2458 for (i = 0; i < recog_data.n_operands; i++)
2459 constraints[i] = skip_alternative (constraints[i]);
2460
2461 which_alternative++;
2462 continue;
2463 }
2464
2465 for (opno = 0; opno < recog_data.n_operands; opno++)
2466 {
2467 rtx op = recog_data.operand[opno];
2468 enum machine_mode mode = GET_MODE (op);
2469 const char *p = constraints[opno];
2470 int offset = 0;
2471 int win = 0;
2472 int val;
2473 int len;
2474
2475 earlyclobber[opno] = 0;
2476
2477 /* A unary operator may be accepted by the predicate, but it
2478 is irrelevant for matching constraints. */
2479 if (UNARY_P (op))
2480 op = XEXP (op, 0);
2481
2482 if (GET_CODE (op) == SUBREG)
2483 {
2484 if (REG_P (SUBREG_REG (op))
2485 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2486 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2487 GET_MODE (SUBREG_REG (op)),
2488 SUBREG_BYTE (op),
2489 GET_MODE (op));
2490 op = SUBREG_REG (op);
2491 }
2492
2493 /* An empty constraint or empty alternative
2494 allows anything which matched the pattern. */
2495 if (*p == 0 || *p == ',')
2496 win = 1;
2497
2498 do
2499 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2500 {
2501 case '\0':
2502 len = 0;
2503 break;
2504 case ',':
2505 c = '\0';
2506 break;
2507
2508 case '?': case '!': case '*': case '%':
2509 case '=': case '+':
2510 break;
2511
2512 case '#':
2513 /* Ignore rest of this alternative as far as
2514 constraint checking is concerned. */
2515 do
2516 p++;
2517 while (*p && *p != ',');
2518 len = 0;
2519 break;
2520
2521 case '&':
2522 earlyclobber[opno] = 1;
2523 if (seen_earlyclobber_at < 0)
2524 seen_earlyclobber_at = opno;
2525 break;
2526
2527 case '0': case '1': case '2': case '3': case '4':
2528 case '5': case '6': case '7': case '8': case '9':
2529 {
2530 /* This operand must be the same as a previous one.
2531 This kind of constraint is used for instructions such
2532 as add when they take only two operands.
2533
2534 Note that the lower-numbered operand is passed first.
2535
2536 If we are not testing strictly, assume that this
2537 constraint will be satisfied. */
2538
2539 char *end;
2540 int match;
2541
2542 match = strtoul (p, &end, 10);
2543 p = end;
2544
2545 if (strict < 0)
2546 val = 1;
2547 else
2548 {
2549 rtx op1 = recog_data.operand[match];
2550 rtx op2 = recog_data.operand[opno];
2551
2552 /* A unary operator may be accepted by the predicate,
2553 but it is irrelevant for matching constraints. */
2554 if (UNARY_P (op1))
2555 op1 = XEXP (op1, 0);
2556 if (UNARY_P (op2))
2557 op2 = XEXP (op2, 0);
2558
2559 val = operands_match_p (op1, op2);
2560 }
2561
2562 matching_operands[opno] = match;
2563 matching_operands[match] = opno;
2564
2565 if (val != 0)
2566 win = 1;
2567
2568 /* If output is *x and input is *--x, arrange later
2569 to change the output to *--x as well, since the
2570 output op is the one that will be printed. */
2571 if (val == 2 && strict > 0)
2572 {
2573 funny_match[funny_match_index].this_op = opno;
2574 funny_match[funny_match_index++].other = match;
2575 }
2576 }
2577 len = 0;
2578 break;
2579
2580 case 'p':
2581 /* p is used for address_operands. When we are called by
2582 gen_reload, no one will have checked that the address is
2583 strictly valid, i.e., that all pseudos requiring hard regs
2584 have gotten them. */
2585 if (strict <= 0
2586 || (strict_memory_address_p (recog_data.operand_mode[opno],
2587 op)))
2588 win = 1;
2589 break;
2590
2591 /* No need to check general_operand again;
2592 it was done in insn-recog.c. Well, except that reload
2593 doesn't check the validity of its replacements, but
2594 that should only matter when there's a bug. */
2595 case 'g':
2596 /* Anything goes unless it is a REG and really has a hard reg
2597 but the hard reg is not in the class GENERAL_REGS. */
2598 if (REG_P (op))
2599 {
2600 if (strict < 0
2601 || GENERAL_REGS == ALL_REGS
2602 || (reload_in_progress
2603 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2604 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2605 win = 1;
2606 }
2607 else if (strict < 0 || general_operand (op, mode))
2608 win = 1;
2609 break;
2610
2611 case 'X':
2612 /* This is used for a MATCH_SCRATCH in the cases when
2613 we don't actually need anything. So anything goes
2614 any time. */
2615 win = 1;
2616 break;
2617
2618 case TARGET_MEM_CONSTRAINT:
2619 /* Memory operands must be valid, to the extent
2620 required by STRICT. */
2621 if (MEM_P (op))
2622 {
2623 if (strict > 0
2624 && !strict_memory_address_addr_space_p
2625 (GET_MODE (op), XEXP (op, 0),
2626 MEM_ADDR_SPACE (op)))
2627 break;
2628 if (strict == 0
2629 && !memory_address_addr_space_p
2630 (GET_MODE (op), XEXP (op, 0),
2631 MEM_ADDR_SPACE (op)))
2632 break;
2633 win = 1;
2634 }
2635 /* Before reload, accept what reload can turn into mem. */
2636 else if (strict < 0 && CONSTANT_P (op))
2637 win = 1;
2638 /* During reload, accept a pseudo */
2639 else if (reload_in_progress && REG_P (op)
2640 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2641 win = 1;
2642 break;
2643
2644 case '<':
2645 if (MEM_P (op)
2646 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2647 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2648 win = 1;
2649 break;
2650
2651 case '>':
2652 if (MEM_P (op)
2653 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2654 || GET_CODE (XEXP (op, 0)) == POST_INC))
2655 win = 1;
2656 break;
2657
2658 case 'E':
2659 case 'F':
2660 if (CONST_DOUBLE_AS_FLOAT_P (op)
2661 || (GET_CODE (op) == CONST_VECTOR
2662 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2663 win = 1;
2664 break;
2665
2666 case 'G':
2667 case 'H':
2668 if (CONST_DOUBLE_AS_FLOAT_P (op)
2669 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2670 win = 1;
2671 break;
2672
2673 case 's':
2674 if (CONST_SCALAR_INT_P (op))
2675 break;
2676 case 'i':
2677 if (CONSTANT_P (op))
2678 win = 1;
2679 break;
2680
2681 case 'n':
2682 if (CONST_SCALAR_INT_P (op))
2683 win = 1;
2684 break;
2685
2686 case 'I':
2687 case 'J':
2688 case 'K':
2689 case 'L':
2690 case 'M':
2691 case 'N':
2692 case 'O':
2693 case 'P':
2694 if (CONST_INT_P (op)
2695 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2696 win = 1;
2697 break;
2698
2699 case 'V':
2700 if (MEM_P (op)
2701 && ((strict > 0 && ! offsettable_memref_p (op))
2702 || (strict < 0
2703 && !(CONSTANT_P (op) || MEM_P (op)))
2704 || (reload_in_progress
2705 && !(REG_P (op)
2706 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2707 win = 1;
2708 break;
2709
2710 case 'o':
2711 if ((strict > 0 && offsettable_memref_p (op))
2712 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2713 /* Before reload, accept what reload can handle. */
2714 || (strict < 0
2715 && (CONSTANT_P (op) || MEM_P (op)))
2716 /* During reload, accept a pseudo */
2717 || (reload_in_progress && REG_P (op)
2718 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2719 win = 1;
2720 break;
2721
2722 default:
2723 {
2724 enum reg_class cl;
2725
2726 cl = (c == 'r'
2727 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2728 if (cl != NO_REGS)
2729 {
2730 if (strict < 0
2731 || (strict == 0
2732 && REG_P (op)
2733 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2734 || (strict == 0 && GET_CODE (op) == SCRATCH)
2735 || (REG_P (op)
2736 && reg_fits_class_p (op, cl, offset, mode)))
2737 win = 1;
2738 }
2739 #ifdef EXTRA_CONSTRAINT_STR
2740 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2741 win = 1;
2742
2743 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2744 /* Every memory operand can be reloaded to fit. */
2745 && ((strict < 0 && MEM_P (op))
2746 /* Before reload, accept what reload can turn
2747 into mem. */
2748 || (strict < 0 && CONSTANT_P (op))
2749 /* During reload, accept a pseudo */
2750 || (reload_in_progress && REG_P (op)
2751 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2752 win = 1;
2753 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2754 /* Every address operand can be reloaded to fit. */
2755 && strict < 0)
2756 win = 1;
2757 /* Cater to architectures like IA-64 that define extra memory
2758 constraints without using define_memory_constraint. */
2759 else if (reload_in_progress
2760 && REG_P (op)
2761 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2762 && reg_renumber[REGNO (op)] < 0
2763 && reg_equiv_mem (REGNO (op)) != 0
2764 && EXTRA_CONSTRAINT_STR
2765 (reg_equiv_mem (REGNO (op)), c, p))
2766 win = 1;
2767 #endif
2768 break;
2769 }
2770 }
2771 while (p += len, c);
2772
2773 constraints[opno] = p;
2774 /* If this operand did not win somehow,
2775 this alternative loses. */
2776 if (! win)
2777 lose = 1;
2778 }
2779 /* This alternative won; the operands are ok.
2780 Change whichever operands this alternative says to change. */
2781 if (! lose)
2782 {
2783 int opno, eopno;
2784
2785 /* See if any earlyclobber operand conflicts with some other
2786 operand. */
2787
2788 if (strict > 0 && seen_earlyclobber_at >= 0)
2789 for (eopno = seen_earlyclobber_at;
2790 eopno < recog_data.n_operands;
2791 eopno++)
2792 /* Ignore earlyclobber operands now in memory,
2793 because we would often report failure when we have
2794 two memory operands, one of which was formerly a REG. */
2795 if (earlyclobber[eopno]
2796 && REG_P (recog_data.operand[eopno]))
2797 for (opno = 0; opno < recog_data.n_operands; opno++)
2798 if ((MEM_P (recog_data.operand[opno])
2799 || recog_data.operand_type[opno] != OP_OUT)
2800 && opno != eopno
2801 /* Ignore things like match_operator operands. */
2802 && *recog_data.constraints[opno] != 0
2803 && ! (matching_operands[opno] == eopno
2804 && operands_match_p (recog_data.operand[opno],
2805 recog_data.operand[eopno]))
2806 && ! safe_from_earlyclobber (recog_data.operand[opno],
2807 recog_data.operand[eopno]))
2808 lose = 1;
2809
2810 if (! lose)
2811 {
2812 while (--funny_match_index >= 0)
2813 {
2814 recog_data.operand[funny_match[funny_match_index].other]
2815 = recog_data.operand[funny_match[funny_match_index].this_op];
2816 }
2817
2818 #ifdef AUTO_INC_DEC
2819 /* For operands without < or > constraints reject side-effects. */
2820 if (recog_data.is_asm)
2821 {
2822 for (opno = 0; opno < recog_data.n_operands; opno++)
2823 if (MEM_P (recog_data.operand[opno]))
2824 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2825 {
2826 case PRE_INC:
2827 case POST_INC:
2828 case PRE_DEC:
2829 case POST_DEC:
2830 case PRE_MODIFY:
2831 case POST_MODIFY:
2832 if (strchr (recog_data.constraints[opno], '<') == NULL
2833 && strchr (recog_data.constraints[opno], '>')
2834 == NULL)
2835 return 0;
2836 break;
2837 default:
2838 break;
2839 }
2840 }
2841 #endif
2842 return 1;
2843 }
2844 }
2845
2846 which_alternative++;
2847 }
2848 while (which_alternative < recog_data.n_alternatives);
2849
2850 which_alternative = -1;
2851 /* If we are about to reject this, but we are not to test strictly,
2852 try a very loose test. Only return failure if it fails also. */
2853 if (strict == 0)
2854 return constrain_operands (-1);
2855 else
2856 return 0;
2857 }
2858
2859 /* Return true iff OPERAND (assumed to be a REG rtx)
2860 is a hard reg in class CLASS when its regno is offset by OFFSET
2861 and changed to mode MODE.
2862 If REG occupies multiple hard regs, all of them must be in CLASS. */
2863
2864 bool
2865 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2866 enum machine_mode mode)
2867 {
2868 unsigned int regno = REGNO (operand);
2869
2870 if (cl == NO_REGS)
2871 return false;
2872
2873 /* Regno must not be a pseudo register. Offset may be negative. */
2874 return (HARD_REGISTER_NUM_P (regno)
2875 && HARD_REGISTER_NUM_P (regno + offset)
2876 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2877 regno + offset));
2878 }
2879 \f
2880 /* Split single instruction. Helper function for split_all_insns and
2881 split_all_insns_noflow. Return last insn in the sequence if successful,
2882 or NULL if unsuccessful. */
2883
2884 static rtx
2885 split_insn (rtx insn)
2886 {
2887 /* Split insns here to get max fine-grain parallelism. */
2888 rtx first = PREV_INSN (insn);
2889 rtx last = try_split (PATTERN (insn), insn, 1);
2890 rtx insn_set, last_set, note;
2891
2892 if (last == insn)
2893 return NULL_RTX;
2894
2895 /* If the original instruction was a single set that was known to be
2896 equivalent to a constant, see if we can say the same about the last
2897 instruction in the split sequence. The two instructions must set
2898 the same destination. */
2899 insn_set = single_set (insn);
2900 if (insn_set)
2901 {
2902 last_set = single_set (last);
2903 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2904 {
2905 note = find_reg_equal_equiv_note (insn);
2906 if (note && CONSTANT_P (XEXP (note, 0)))
2907 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2908 else if (CONSTANT_P (SET_SRC (insn_set)))
2909 set_unique_reg_note (last, REG_EQUAL,
2910 copy_rtx (SET_SRC (insn_set)));
2911 }
2912 }
2913
2914 /* try_split returns the NOTE that INSN became. */
2915 SET_INSN_DELETED (insn);
2916
2917 /* ??? Coddle to md files that generate subregs in post-reload
2918 splitters instead of computing the proper hard register. */
2919 if (reload_completed && first != last)
2920 {
2921 first = NEXT_INSN (first);
2922 for (;;)
2923 {
2924 if (INSN_P (first))
2925 cleanup_subreg_operands (first);
2926 if (first == last)
2927 break;
2928 first = NEXT_INSN (first);
2929 }
2930 }
2931
2932 return last;
2933 }
2934
2935 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2936
2937 void
2938 split_all_insns (void)
2939 {
2940 sbitmap blocks;
2941 bool changed;
2942 basic_block bb;
2943
2944 blocks = sbitmap_alloc (last_basic_block);
2945 bitmap_clear (blocks);
2946 changed = false;
2947
2948 FOR_EACH_BB_REVERSE (bb)
2949 {
2950 rtx insn, next;
2951 bool finish = false;
2952
2953 rtl_profile_for_bb (bb);
2954 for (insn = BB_HEAD (bb); !finish ; insn = next)
2955 {
2956 /* Can't use `next_real_insn' because that might go across
2957 CODE_LABELS and short-out basic blocks. */
2958 next = NEXT_INSN (insn);
2959 finish = (insn == BB_END (bb));
2960 if (INSN_P (insn))
2961 {
2962 rtx set = single_set (insn);
2963
2964 /* Don't split no-op move insns. These should silently
2965 disappear later in final. Splitting such insns would
2966 break the code that handles LIBCALL blocks. */
2967 if (set && set_noop_p (set))
2968 {
2969 /* Nops get in the way while scheduling, so delete them
2970 now if register allocation has already been done. It
2971 is too risky to try to do this before register
2972 allocation, and there are unlikely to be very many
2973 nops then anyways. */
2974 if (reload_completed)
2975 delete_insn_and_edges (insn);
2976 }
2977 else
2978 {
2979 if (split_insn (insn))
2980 {
2981 bitmap_set_bit (blocks, bb->index);
2982 changed = true;
2983 }
2984 }
2985 }
2986 }
2987 }
2988
2989 default_rtl_profile ();
2990 if (changed)
2991 find_many_sub_basic_blocks (blocks);
2992
2993 #ifdef ENABLE_CHECKING
2994 verify_flow_info ();
2995 #endif
2996
2997 sbitmap_free (blocks);
2998 }
2999
3000 /* Same as split_all_insns, but do not expect CFG to be available.
3001 Used by machine dependent reorg passes. */
3002
3003 unsigned int
3004 split_all_insns_noflow (void)
3005 {
3006 rtx next, insn;
3007
3008 for (insn = get_insns (); insn; insn = next)
3009 {
3010 next = NEXT_INSN (insn);
3011 if (INSN_P (insn))
3012 {
3013 /* Don't split no-op move insns. These should silently
3014 disappear later in final. Splitting such insns would
3015 break the code that handles LIBCALL blocks. */
3016 rtx set = single_set (insn);
3017 if (set && set_noop_p (set))
3018 {
3019 /* Nops get in the way while scheduling, so delete them
3020 now if register allocation has already been done. It
3021 is too risky to try to do this before register
3022 allocation, and there are unlikely to be very many
3023 nops then anyways.
3024
3025 ??? Should we use delete_insn when the CFG isn't valid? */
3026 if (reload_completed)
3027 delete_insn_and_edges (insn);
3028 }
3029 else
3030 split_insn (insn);
3031 }
3032 }
3033 return 0;
3034 }
3035 \f
3036 #ifdef HAVE_peephole2
3037 struct peep2_insn_data
3038 {
3039 rtx insn;
3040 regset live_before;
3041 };
3042
3043 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3044 static int peep2_current;
3045
3046 static bool peep2_do_rebuild_jump_labels;
3047 static bool peep2_do_cleanup_cfg;
3048
3049 /* The number of instructions available to match a peep2. */
3050 int peep2_current_count;
3051
3052 /* A non-insn marker indicating the last insn of the block.
3053 The live_before regset for this element is correct, indicating
3054 DF_LIVE_OUT for the block. */
3055 #define PEEP2_EOB pc_rtx
3056
3057 /* Wrap N to fit into the peep2_insn_data buffer. */
3058
3059 static int
3060 peep2_buf_position (int n)
3061 {
3062 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3063 n -= MAX_INSNS_PER_PEEP2 + 1;
3064 return n;
3065 }
3066
3067 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3068 does not exist. Used by the recognizer to find the next insn to match
3069 in a multi-insn pattern. */
3070
3071 rtx
3072 peep2_next_insn (int n)
3073 {
3074 gcc_assert (n <= peep2_current_count);
3075
3076 n = peep2_buf_position (peep2_current + n);
3077
3078 return peep2_insn_data[n].insn;
3079 }
3080
3081 /* Return true if REGNO is dead before the Nth non-note insn
3082 after `current'. */
3083
3084 int
3085 peep2_regno_dead_p (int ofs, int regno)
3086 {
3087 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3088
3089 ofs = peep2_buf_position (peep2_current + ofs);
3090
3091 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3092
3093 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3094 }
3095
3096 /* Similarly for a REG. */
3097
3098 int
3099 peep2_reg_dead_p (int ofs, rtx reg)
3100 {
3101 int regno, n;
3102
3103 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3104
3105 ofs = peep2_buf_position (peep2_current + ofs);
3106
3107 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3108
3109 regno = REGNO (reg);
3110 n = hard_regno_nregs[regno][GET_MODE (reg)];
3111 while (--n >= 0)
3112 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3113 return 0;
3114 return 1;
3115 }
3116
3117 /* Regno offset to be used in the register search. */
3118 static int search_ofs;
3119
3120 /* Try to find a hard register of mode MODE, matching the register class in
3121 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3122 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3123 in which case the only condition is that the register must be available
3124 before CURRENT_INSN.
3125 Registers that already have bits set in REG_SET will not be considered.
3126
3127 If an appropriate register is available, it will be returned and the
3128 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3129 returned. */
3130
3131 rtx
3132 peep2_find_free_register (int from, int to, const char *class_str,
3133 enum machine_mode mode, HARD_REG_SET *reg_set)
3134 {
3135 enum reg_class cl;
3136 HARD_REG_SET live;
3137 df_ref *def_rec;
3138 int i;
3139
3140 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3141 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3142
3143 from = peep2_buf_position (peep2_current + from);
3144 to = peep2_buf_position (peep2_current + to);
3145
3146 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3147 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3148
3149 while (from != to)
3150 {
3151 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3152
3153 /* Don't use registers set or clobbered by the insn. */
3154 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3155 *def_rec; def_rec++)
3156 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3157
3158 from = peep2_buf_position (from + 1);
3159 }
3160
3161 cl = (class_str[0] == 'r' ? GENERAL_REGS
3162 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3163
3164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3165 {
3166 int raw_regno, regno, success, j;
3167
3168 /* Distribute the free registers as much as possible. */
3169 raw_regno = search_ofs + i;
3170 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3171 raw_regno -= FIRST_PSEUDO_REGISTER;
3172 #ifdef REG_ALLOC_ORDER
3173 regno = reg_alloc_order[raw_regno];
3174 #else
3175 regno = raw_regno;
3176 #endif
3177
3178 /* Can it support the mode we need? */
3179 if (! HARD_REGNO_MODE_OK (regno, mode))
3180 continue;
3181
3182 success = 1;
3183 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3184 {
3185 /* Don't allocate fixed registers. */
3186 if (fixed_regs[regno + j])
3187 {
3188 success = 0;
3189 break;
3190 }
3191 /* Don't allocate global registers. */
3192 if (global_regs[regno + j])
3193 {
3194 success = 0;
3195 break;
3196 }
3197 /* Make sure the register is of the right class. */
3198 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3199 {
3200 success = 0;
3201 break;
3202 }
3203 /* And that we don't create an extra save/restore. */
3204 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3205 {
3206 success = 0;
3207 break;
3208 }
3209
3210 if (! targetm.hard_regno_scratch_ok (regno + j))
3211 {
3212 success = 0;
3213 break;
3214 }
3215
3216 /* And we don't clobber traceback for noreturn functions. */
3217 if ((regno + j == FRAME_POINTER_REGNUM
3218 || regno + j == HARD_FRAME_POINTER_REGNUM)
3219 && (! reload_completed || frame_pointer_needed))
3220 {
3221 success = 0;
3222 break;
3223 }
3224
3225 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3226 || TEST_HARD_REG_BIT (live, regno + j))
3227 {
3228 success = 0;
3229 break;
3230 }
3231 }
3232
3233 if (success)
3234 {
3235 add_to_hard_reg_set (reg_set, mode, regno);
3236
3237 /* Start the next search with the next register. */
3238 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3239 raw_regno = 0;
3240 search_ofs = raw_regno;
3241
3242 return gen_rtx_REG (mode, regno);
3243 }
3244 }
3245
3246 search_ofs = 0;
3247 return NULL_RTX;
3248 }
3249
3250 /* Forget all currently tracked instructions, only remember current
3251 LIVE regset. */
3252
3253 static void
3254 peep2_reinit_state (regset live)
3255 {
3256 int i;
3257
3258 /* Indicate that all slots except the last holds invalid data. */
3259 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3260 peep2_insn_data[i].insn = NULL_RTX;
3261 peep2_current_count = 0;
3262
3263 /* Indicate that the last slot contains live_after data. */
3264 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3265 peep2_current = MAX_INSNS_PER_PEEP2;
3266
3267 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3268 }
3269
3270 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3271 starting at INSN. Perform the replacement, removing the old insns and
3272 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3273 if the replacement is rejected. */
3274
3275 static rtx
3276 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3277 {
3278 int i;
3279 rtx last, eh_note, as_note, before_try, x;
3280 rtx old_insn, new_insn;
3281 bool was_call = false;
3282
3283 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3284 match more than one insn, or to be split into more than one insn. */
3285 old_insn = peep2_insn_data[peep2_current].insn;
3286 if (RTX_FRAME_RELATED_P (old_insn))
3287 {
3288 bool any_note = false;
3289 rtx note;
3290
3291 if (match_len != 0)
3292 return NULL;
3293
3294 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3295 may be in the stream for the purpose of register allocation. */
3296 if (active_insn_p (attempt))
3297 new_insn = attempt;
3298 else
3299 new_insn = next_active_insn (attempt);
3300 if (next_active_insn (new_insn))
3301 return NULL;
3302
3303 /* We have a 1-1 replacement. Copy over any frame-related info. */
3304 RTX_FRAME_RELATED_P (new_insn) = 1;
3305
3306 /* Allow the backend to fill in a note during the split. */
3307 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3308 switch (REG_NOTE_KIND (note))
3309 {
3310 case REG_FRAME_RELATED_EXPR:
3311 case REG_CFA_DEF_CFA:
3312 case REG_CFA_ADJUST_CFA:
3313 case REG_CFA_OFFSET:
3314 case REG_CFA_REGISTER:
3315 case REG_CFA_EXPRESSION:
3316 case REG_CFA_RESTORE:
3317 case REG_CFA_SET_VDRAP:
3318 any_note = true;
3319 break;
3320 default:
3321 break;
3322 }
3323
3324 /* If the backend didn't supply a note, copy one over. */
3325 if (!any_note)
3326 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3327 switch (REG_NOTE_KIND (note))
3328 {
3329 case REG_FRAME_RELATED_EXPR:
3330 case REG_CFA_DEF_CFA:
3331 case REG_CFA_ADJUST_CFA:
3332 case REG_CFA_OFFSET:
3333 case REG_CFA_REGISTER:
3334 case REG_CFA_EXPRESSION:
3335 case REG_CFA_RESTORE:
3336 case REG_CFA_SET_VDRAP:
3337 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3338 any_note = true;
3339 break;
3340 default:
3341 break;
3342 }
3343
3344 /* If there still isn't a note, make sure the unwind info sees the
3345 same expression as before the split. */
3346 if (!any_note)
3347 {
3348 rtx old_set, new_set;
3349
3350 /* The old insn had better have been simple, or annotated. */
3351 old_set = single_set (old_insn);
3352 gcc_assert (old_set != NULL);
3353
3354 new_set = single_set (new_insn);
3355 if (!new_set || !rtx_equal_p (new_set, old_set))
3356 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3357 }
3358
3359 /* Copy prologue/epilogue status. This is required in order to keep
3360 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3361 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3362 }
3363
3364 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3365 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3366 cfg-related call notes. */
3367 for (i = 0; i <= match_len; ++i)
3368 {
3369 int j;
3370 rtx note;
3371
3372 j = peep2_buf_position (peep2_current + i);
3373 old_insn = peep2_insn_data[j].insn;
3374 if (!CALL_P (old_insn))
3375 continue;
3376 was_call = true;
3377
3378 new_insn = attempt;
3379 while (new_insn != NULL_RTX)
3380 {
3381 if (CALL_P (new_insn))
3382 break;
3383 new_insn = NEXT_INSN (new_insn);
3384 }
3385
3386 gcc_assert (new_insn != NULL_RTX);
3387
3388 CALL_INSN_FUNCTION_USAGE (new_insn)
3389 = CALL_INSN_FUNCTION_USAGE (old_insn);
3390
3391 for (note = REG_NOTES (old_insn);
3392 note;
3393 note = XEXP (note, 1))
3394 switch (REG_NOTE_KIND (note))
3395 {
3396 case REG_NORETURN:
3397 case REG_SETJMP:
3398 case REG_TM:
3399 add_reg_note (new_insn, REG_NOTE_KIND (note),
3400 XEXP (note, 0));
3401 break;
3402 default:
3403 /* Discard all other reg notes. */
3404 break;
3405 }
3406
3407 /* Croak if there is another call in the sequence. */
3408 while (++i <= match_len)
3409 {
3410 j = peep2_buf_position (peep2_current + i);
3411 old_insn = peep2_insn_data[j].insn;
3412 gcc_assert (!CALL_P (old_insn));
3413 }
3414 break;
3415 }
3416
3417 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3418 move those notes over to the new sequence. */
3419 as_note = NULL;
3420 for (i = match_len; i >= 0; --i)
3421 {
3422 int j = peep2_buf_position (peep2_current + i);
3423 old_insn = peep2_insn_data[j].insn;
3424
3425 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3426 if (as_note)
3427 break;
3428 }
3429
3430 i = peep2_buf_position (peep2_current + match_len);
3431 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3432
3433 /* Replace the old sequence with the new. */
3434 last = emit_insn_after_setloc (attempt,
3435 peep2_insn_data[i].insn,
3436 INSN_LOCATION (peep2_insn_data[i].insn));
3437 before_try = PREV_INSN (insn);
3438 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3439
3440 /* Re-insert the EH_REGION notes. */
3441 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3442 {
3443 edge eh_edge;
3444 edge_iterator ei;
3445
3446 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3447 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3448 break;
3449
3450 if (eh_note)
3451 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3452
3453 if (eh_edge)
3454 for (x = last; x != before_try; x = PREV_INSN (x))
3455 if (x != BB_END (bb)
3456 && (can_throw_internal (x)
3457 || can_nonlocal_goto (x)))
3458 {
3459 edge nfte, nehe;
3460 int flags;
3461
3462 nfte = split_block (bb, x);
3463 flags = (eh_edge->flags
3464 & (EDGE_EH | EDGE_ABNORMAL));
3465 if (CALL_P (x))
3466 flags |= EDGE_ABNORMAL_CALL;
3467 nehe = make_edge (nfte->src, eh_edge->dest,
3468 flags);
3469
3470 nehe->probability = eh_edge->probability;
3471 nfte->probability
3472 = REG_BR_PROB_BASE - nehe->probability;
3473
3474 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3475 bb = nfte->src;
3476 eh_edge = nehe;
3477 }
3478
3479 /* Converting possibly trapping insn to non-trapping is
3480 possible. Zap dummy outgoing edges. */
3481 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3482 }
3483
3484 /* Re-insert the ARGS_SIZE notes. */
3485 if (as_note)
3486 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3487
3488 /* If we generated a jump instruction, it won't have
3489 JUMP_LABEL set. Recompute after we're done. */
3490 for (x = last; x != before_try; x = PREV_INSN (x))
3491 if (JUMP_P (x))
3492 {
3493 peep2_do_rebuild_jump_labels = true;
3494 break;
3495 }
3496
3497 return last;
3498 }
3499
3500 /* After performing a replacement in basic block BB, fix up the life
3501 information in our buffer. LAST is the last of the insns that we
3502 emitted as a replacement. PREV is the insn before the start of
3503 the replacement. MATCH_LEN is the number of instructions that were
3504 matched, and which now need to be replaced in the buffer. */
3505
3506 static void
3507 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3508 {
3509 int i = peep2_buf_position (peep2_current + match_len + 1);
3510 rtx x;
3511 regset_head live;
3512
3513 INIT_REG_SET (&live);
3514 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3515
3516 gcc_assert (peep2_current_count >= match_len + 1);
3517 peep2_current_count -= match_len + 1;
3518
3519 x = last;
3520 do
3521 {
3522 if (INSN_P (x))
3523 {
3524 df_insn_rescan (x);
3525 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3526 {
3527 peep2_current_count++;
3528 if (--i < 0)
3529 i = MAX_INSNS_PER_PEEP2;
3530 peep2_insn_data[i].insn = x;
3531 df_simulate_one_insn_backwards (bb, x, &live);
3532 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3533 }
3534 }
3535 x = PREV_INSN (x);
3536 }
3537 while (x != prev);
3538 CLEAR_REG_SET (&live);
3539
3540 peep2_current = i;
3541 }
3542
3543 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3544 Return true if we added it, false otherwise. The caller will try to match
3545 peepholes against the buffer if we return false; otherwise it will try to
3546 add more instructions to the buffer. */
3547
3548 static bool
3549 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3550 {
3551 int pos;
3552
3553 /* Once we have filled the maximum number of insns the buffer can hold,
3554 allow the caller to match the insns against peepholes. We wait until
3555 the buffer is full in case the target has similar peepholes of different
3556 length; we always want to match the longest if possible. */
3557 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3558 return false;
3559
3560 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3561 any other pattern, lest it change the semantics of the frame info. */
3562 if (RTX_FRAME_RELATED_P (insn))
3563 {
3564 /* Let the buffer drain first. */
3565 if (peep2_current_count > 0)
3566 return false;
3567 /* Now the insn will be the only thing in the buffer. */
3568 }
3569
3570 pos = peep2_buf_position (peep2_current + peep2_current_count);
3571 peep2_insn_data[pos].insn = insn;
3572 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3573 peep2_current_count++;
3574
3575 df_simulate_one_insn_forwards (bb, insn, live);
3576 return true;
3577 }
3578
3579 /* Perform the peephole2 optimization pass. */
3580
3581 static void
3582 peephole2_optimize (void)
3583 {
3584 rtx insn;
3585 bitmap live;
3586 int i;
3587 basic_block bb;
3588
3589 peep2_do_cleanup_cfg = false;
3590 peep2_do_rebuild_jump_labels = false;
3591
3592 df_set_flags (DF_LR_RUN_DCE);
3593 df_note_add_problem ();
3594 df_analyze ();
3595
3596 /* Initialize the regsets we're going to use. */
3597 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3598 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3599 search_ofs = 0;
3600 live = BITMAP_ALLOC (&reg_obstack);
3601
3602 FOR_EACH_BB_REVERSE (bb)
3603 {
3604 bool past_end = false;
3605 int pos;
3606
3607 rtl_profile_for_bb (bb);
3608
3609 /* Start up propagation. */
3610 bitmap_copy (live, DF_LR_IN (bb));
3611 df_simulate_initialize_forwards (bb, live);
3612 peep2_reinit_state (live);
3613
3614 insn = BB_HEAD (bb);
3615 for (;;)
3616 {
3617 rtx attempt, head;
3618 int match_len;
3619
3620 if (!past_end && !NONDEBUG_INSN_P (insn))
3621 {
3622 next_insn:
3623 insn = NEXT_INSN (insn);
3624 if (insn == NEXT_INSN (BB_END (bb)))
3625 past_end = true;
3626 continue;
3627 }
3628 if (!past_end && peep2_fill_buffer (bb, insn, live))
3629 goto next_insn;
3630
3631 /* If we did not fill an empty buffer, it signals the end of the
3632 block. */
3633 if (peep2_current_count == 0)
3634 break;
3635
3636 /* The buffer filled to the current maximum, so try to match. */
3637
3638 pos = peep2_buf_position (peep2_current + peep2_current_count);
3639 peep2_insn_data[pos].insn = PEEP2_EOB;
3640 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3641
3642 /* Match the peephole. */
3643 head = peep2_insn_data[peep2_current].insn;
3644 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3645 if (attempt != NULL)
3646 {
3647 rtx last = peep2_attempt (bb, head, match_len, attempt);
3648 if (last)
3649 {
3650 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3651 continue;
3652 }
3653 }
3654
3655 /* No match: advance the buffer by one insn. */
3656 peep2_current = peep2_buf_position (peep2_current + 1);
3657 peep2_current_count--;
3658 }
3659 }
3660
3661 default_rtl_profile ();
3662 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3663 BITMAP_FREE (peep2_insn_data[i].live_before);
3664 BITMAP_FREE (live);
3665 if (peep2_do_rebuild_jump_labels)
3666 rebuild_jump_labels (get_insns ());
3667 }
3668 #endif /* HAVE_peephole2 */
3669
3670 /* Common predicates for use with define_bypass. */
3671
3672 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3673 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3674 must be either a single_set or a PARALLEL with SETs inside. */
3675
3676 int
3677 store_data_bypass_p (rtx out_insn, rtx in_insn)
3678 {
3679 rtx out_set, in_set;
3680 rtx out_pat, in_pat;
3681 rtx out_exp, in_exp;
3682 int i, j;
3683
3684 in_set = single_set (in_insn);
3685 if (in_set)
3686 {
3687 if (!MEM_P (SET_DEST (in_set)))
3688 return false;
3689
3690 out_set = single_set (out_insn);
3691 if (out_set)
3692 {
3693 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3694 return false;
3695 }
3696 else
3697 {
3698 out_pat = PATTERN (out_insn);
3699
3700 if (GET_CODE (out_pat) != PARALLEL)
3701 return false;
3702
3703 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3704 {
3705 out_exp = XVECEXP (out_pat, 0, i);
3706
3707 if (GET_CODE (out_exp) == CLOBBER)
3708 continue;
3709
3710 gcc_assert (GET_CODE (out_exp) == SET);
3711
3712 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3713 return false;
3714 }
3715 }
3716 }
3717 else
3718 {
3719 in_pat = PATTERN (in_insn);
3720 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3721
3722 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3723 {
3724 in_exp = XVECEXP (in_pat, 0, i);
3725
3726 if (GET_CODE (in_exp) == CLOBBER)
3727 continue;
3728
3729 gcc_assert (GET_CODE (in_exp) == SET);
3730
3731 if (!MEM_P (SET_DEST (in_exp)))
3732 return false;
3733
3734 out_set = single_set (out_insn);
3735 if (out_set)
3736 {
3737 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3738 return false;
3739 }
3740 else
3741 {
3742 out_pat = PATTERN (out_insn);
3743 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3744
3745 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3746 {
3747 out_exp = XVECEXP (out_pat, 0, j);
3748
3749 if (GET_CODE (out_exp) == CLOBBER)
3750 continue;
3751
3752 gcc_assert (GET_CODE (out_exp) == SET);
3753
3754 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3755 return false;
3756 }
3757 }
3758 }
3759 }
3760
3761 return true;
3762 }
3763
3764 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3765 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3766 or multiple set; IN_INSN should be single_set for truth, but for convenience
3767 of insn categorization may be any JUMP or CALL insn. */
3768
3769 int
3770 if_test_bypass_p (rtx out_insn, rtx in_insn)
3771 {
3772 rtx out_set, in_set;
3773
3774 in_set = single_set (in_insn);
3775 if (! in_set)
3776 {
3777 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3778 return false;
3779 }
3780
3781 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3782 return false;
3783 in_set = SET_SRC (in_set);
3784
3785 out_set = single_set (out_insn);
3786 if (out_set)
3787 {
3788 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3789 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3790 return false;
3791 }
3792 else
3793 {
3794 rtx out_pat;
3795 int i;
3796
3797 out_pat = PATTERN (out_insn);
3798 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3799
3800 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3801 {
3802 rtx exp = XVECEXP (out_pat, 0, i);
3803
3804 if (GET_CODE (exp) == CLOBBER)
3805 continue;
3806
3807 gcc_assert (GET_CODE (exp) == SET);
3808
3809 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3810 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3811 return false;
3812 }
3813 }
3814
3815 return true;
3816 }
3817 \f
3818 static bool
3819 gate_handle_peephole2 (void)
3820 {
3821 return (optimize > 0 && flag_peephole2);
3822 }
3823
3824 static unsigned int
3825 rest_of_handle_peephole2 (void)
3826 {
3827 #ifdef HAVE_peephole2
3828 peephole2_optimize ();
3829 #endif
3830 return 0;
3831 }
3832
3833 namespace {
3834
3835 const pass_data pass_data_peephole2 =
3836 {
3837 RTL_PASS, /* type */
3838 "peephole2", /* name */
3839 OPTGROUP_NONE, /* optinfo_flags */
3840 true, /* has_gate */
3841 true, /* has_execute */
3842 TV_PEEPHOLE2, /* tv_id */
3843 0, /* properties_required */
3844 0, /* properties_provided */
3845 0, /* properties_destroyed */
3846 0, /* todo_flags_start */
3847 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3848 };
3849
3850 class pass_peephole2 : public rtl_opt_pass
3851 {
3852 public:
3853 pass_peephole2 (gcc::context *ctxt)
3854 : rtl_opt_pass (pass_data_peephole2, ctxt)
3855 {}
3856
3857 /* opt_pass methods: */
3858 /* The epiphany backend creates a second instance of this pass, so we need
3859 a clone method. */
3860 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3861 bool gate () { return gate_handle_peephole2 (); }
3862 unsigned int execute () { return rest_of_handle_peephole2 (); }
3863
3864 }; // class pass_peephole2
3865
3866 } // anon namespace
3867
3868 rtl_opt_pass *
3869 make_pass_peephole2 (gcc::context *ctxt)
3870 {
3871 return new pass_peephole2 (ctxt);
3872 }
3873
3874 static unsigned int
3875 rest_of_handle_split_all_insns (void)
3876 {
3877 split_all_insns ();
3878 return 0;
3879 }
3880
3881 namespace {
3882
3883 const pass_data pass_data_split_all_insns =
3884 {
3885 RTL_PASS, /* type */
3886 "split1", /* name */
3887 OPTGROUP_NONE, /* optinfo_flags */
3888 false, /* has_gate */
3889 true, /* has_execute */
3890 TV_NONE, /* tv_id */
3891 0, /* properties_required */
3892 0, /* properties_provided */
3893 0, /* properties_destroyed */
3894 0, /* todo_flags_start */
3895 0, /* todo_flags_finish */
3896 };
3897
3898 class pass_split_all_insns : public rtl_opt_pass
3899 {
3900 public:
3901 pass_split_all_insns (gcc::context *ctxt)
3902 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3903 {}
3904
3905 /* opt_pass methods: */
3906 /* The epiphany backend creates a second instance of this pass, so
3907 we need a clone method. */
3908 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3909 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3910
3911 }; // class pass_split_all_insns
3912
3913 } // anon namespace
3914
3915 rtl_opt_pass *
3916 make_pass_split_all_insns (gcc::context *ctxt)
3917 {
3918 return new pass_split_all_insns (ctxt);
3919 }
3920
3921 static unsigned int
3922 rest_of_handle_split_after_reload (void)
3923 {
3924 /* If optimizing, then go ahead and split insns now. */
3925 #ifndef STACK_REGS
3926 if (optimize > 0)
3927 #endif
3928 split_all_insns ();
3929 return 0;
3930 }
3931
3932 namespace {
3933
3934 const pass_data pass_data_split_after_reload =
3935 {
3936 RTL_PASS, /* type */
3937 "split2", /* name */
3938 OPTGROUP_NONE, /* optinfo_flags */
3939 false, /* has_gate */
3940 true, /* has_execute */
3941 TV_NONE, /* tv_id */
3942 0, /* properties_required */
3943 0, /* properties_provided */
3944 0, /* properties_destroyed */
3945 0, /* todo_flags_start */
3946 0, /* todo_flags_finish */
3947 };
3948
3949 class pass_split_after_reload : public rtl_opt_pass
3950 {
3951 public:
3952 pass_split_after_reload (gcc::context *ctxt)
3953 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3954 {}
3955
3956 /* opt_pass methods: */
3957 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3958
3959 }; // class pass_split_after_reload
3960
3961 } // anon namespace
3962
3963 rtl_opt_pass *
3964 make_pass_split_after_reload (gcc::context *ctxt)
3965 {
3966 return new pass_split_after_reload (ctxt);
3967 }
3968
3969 static bool
3970 gate_handle_split_before_regstack (void)
3971 {
3972 #if HAVE_ATTR_length && defined (STACK_REGS)
3973 /* If flow2 creates new instructions which need splitting
3974 and scheduling after reload is not done, they might not be
3975 split until final which doesn't allow splitting
3976 if HAVE_ATTR_length. */
3977 # ifdef INSN_SCHEDULING
3978 return (optimize && !flag_schedule_insns_after_reload);
3979 # else
3980 return (optimize);
3981 # endif
3982 #else
3983 return 0;
3984 #endif
3985 }
3986
3987 static unsigned int
3988 rest_of_handle_split_before_regstack (void)
3989 {
3990 split_all_insns ();
3991 return 0;
3992 }
3993
3994 namespace {
3995
3996 const pass_data pass_data_split_before_regstack =
3997 {
3998 RTL_PASS, /* type */
3999 "split3", /* name */
4000 OPTGROUP_NONE, /* optinfo_flags */
4001 true, /* has_gate */
4002 true, /* has_execute */
4003 TV_NONE, /* tv_id */
4004 0, /* properties_required */
4005 0, /* properties_provided */
4006 0, /* properties_destroyed */
4007 0, /* todo_flags_start */
4008 0, /* todo_flags_finish */
4009 };
4010
4011 class pass_split_before_regstack : public rtl_opt_pass
4012 {
4013 public:
4014 pass_split_before_regstack (gcc::context *ctxt)
4015 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4016 {}
4017
4018 /* opt_pass methods: */
4019 bool gate () { return gate_handle_split_before_regstack (); }
4020 unsigned int execute () {
4021 return rest_of_handle_split_before_regstack ();
4022 }
4023
4024 }; // class pass_split_before_regstack
4025
4026 } // anon namespace
4027
4028 rtl_opt_pass *
4029 make_pass_split_before_regstack (gcc::context *ctxt)
4030 {
4031 return new pass_split_before_regstack (ctxt);
4032 }
4033
4034 static bool
4035 gate_handle_split_before_sched2 (void)
4036 {
4037 #ifdef INSN_SCHEDULING
4038 return optimize > 0 && flag_schedule_insns_after_reload;
4039 #else
4040 return 0;
4041 #endif
4042 }
4043
4044 static unsigned int
4045 rest_of_handle_split_before_sched2 (void)
4046 {
4047 #ifdef INSN_SCHEDULING
4048 split_all_insns ();
4049 #endif
4050 return 0;
4051 }
4052
4053 namespace {
4054
4055 const pass_data pass_data_split_before_sched2 =
4056 {
4057 RTL_PASS, /* type */
4058 "split4", /* name */
4059 OPTGROUP_NONE, /* optinfo_flags */
4060 true, /* has_gate */
4061 true, /* has_execute */
4062 TV_NONE, /* tv_id */
4063 0, /* properties_required */
4064 0, /* properties_provided */
4065 0, /* properties_destroyed */
4066 0, /* todo_flags_start */
4067 TODO_verify_flow, /* todo_flags_finish */
4068 };
4069
4070 class pass_split_before_sched2 : public rtl_opt_pass
4071 {
4072 public:
4073 pass_split_before_sched2 (gcc::context *ctxt)
4074 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4075 {}
4076
4077 /* opt_pass methods: */
4078 bool gate () { return gate_handle_split_before_sched2 (); }
4079 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4080
4081 }; // class pass_split_before_sched2
4082
4083 } // anon namespace
4084
4085 rtl_opt_pass *
4086 make_pass_split_before_sched2 (gcc::context *ctxt)
4087 {
4088 return new pass_split_before_sched2 (ctxt);
4089 }
4090
4091 /* The placement of the splitting that we do for shorten_branches
4092 depends on whether regstack is used by the target or not. */
4093 static bool
4094 gate_do_final_split (void)
4095 {
4096 #if HAVE_ATTR_length && !defined (STACK_REGS)
4097 return 1;
4098 #else
4099 return 0;
4100 #endif
4101 }
4102
4103 namespace {
4104
4105 const pass_data pass_data_split_for_shorten_branches =
4106 {
4107 RTL_PASS, /* type */
4108 "split5", /* name */
4109 OPTGROUP_NONE, /* optinfo_flags */
4110 true, /* has_gate */
4111 true, /* has_execute */
4112 TV_NONE, /* tv_id */
4113 0, /* properties_required */
4114 0, /* properties_provided */
4115 0, /* properties_destroyed */
4116 0, /* todo_flags_start */
4117 TODO_verify_rtl_sharing, /* todo_flags_finish */
4118 };
4119
4120 class pass_split_for_shorten_branches : public rtl_opt_pass
4121 {
4122 public:
4123 pass_split_for_shorten_branches (gcc::context *ctxt)
4124 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4125 {}
4126
4127 /* opt_pass methods: */
4128 bool gate () { return gate_do_final_split (); }
4129 unsigned int execute () { return split_all_insns_noflow (); }
4130
4131 }; // class pass_split_for_shorten_branches
4132
4133 } // anon namespace
4134
4135 rtl_opt_pass *
4136 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4137 {
4138 return new pass_split_for_shorten_branches (ctxt);
4139 }