]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/recog.c
gcc/
[thirdparty/gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
43
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
51
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
59
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 struct target_recog default_target_recog;
65 #if SWITCHABLE_TARGET
66 struct target_recog *this_target_recog = &default_target_recog;
67 #endif
68
69 /* Nonzero means allow operands to be volatile.
70 This should be 0 if you are generating rtl, such as if you are calling
71 the functions in optabs.c and expmed.c (most of the time).
72 This should be 1 if all valid insns need to be recognized,
73 such as in reginfo.c and final.c and reload.c.
74
75 init_recog and init_recog_no_volatile are responsible for setting this. */
76
77 int volatile_ok;
78
79 struct recog_data_d recog_data;
80
81 /* Contains a vector of operand_alternative structures, such that
82 operand OP of alternative A is at index A * n_operands + OP.
83 Set up by preprocess_constraints. */
84 const operand_alternative *recog_op_alt;
85
86 /* Used to provide recog_op_alt for asms. */
87 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
88 * MAX_RECOG_ALTERNATIVES];
89
90 /* On return from `constrain_operands', indicate which alternative
91 was satisfied. */
92
93 int which_alternative;
94
95 /* Nonzero after end of reload pass.
96 Set to 1 or 0 by toplev.c.
97 Controls the significance of (SUBREG (MEM)). */
98
99 int reload_completed;
100
101 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
102 int epilogue_completed;
103
104 /* Initialize data used by the function `recog'.
105 This must be called once in the compilation of a function
106 before any insn recognition may be done in the function. */
107
108 void
109 init_recog_no_volatile (void)
110 {
111 volatile_ok = 0;
112 }
113
114 void
115 init_recog (void)
116 {
117 volatile_ok = 1;
118 }
119
120 \f
121 /* Return true if labels in asm operands BODY are LABEL_REFs. */
122
123 static bool
124 asm_labels_ok (rtx body)
125 {
126 rtx asmop;
127 int i;
128
129 asmop = extract_asm_operands (body);
130 if (asmop == NULL_RTX)
131 return true;
132
133 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
134 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
135 return false;
136
137 return true;
138 }
139
140 /* Check that X is an insn-body for an `asm' with operands
141 and that the operands mentioned in it are legitimate. */
142
143 int
144 check_asm_operands (rtx x)
145 {
146 int noperands;
147 rtx *operands;
148 const char **constraints;
149 int i;
150
151 if (!asm_labels_ok (x))
152 return 0;
153
154 /* Post-reload, be more strict with things. */
155 if (reload_completed)
156 {
157 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
158 extract_insn (make_insn_raw (x));
159 constrain_operands (1);
160 return which_alternative >= 0;
161 }
162
163 noperands = asm_noperands (x);
164 if (noperands < 0)
165 return 0;
166 if (noperands == 0)
167 return 1;
168
169 operands = XALLOCAVEC (rtx, noperands);
170 constraints = XALLOCAVEC (const char *, noperands);
171
172 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
173
174 for (i = 0; i < noperands; i++)
175 {
176 const char *c = constraints[i];
177 if (c[0] == '%')
178 c++;
179 if (! asm_operand_ok (operands[i], c, constraints))
180 return 0;
181 }
182
183 return 1;
184 }
185 \f
186 /* Static data for the next two routines. */
187
188 typedef struct change_t
189 {
190 rtx object;
191 int old_code;
192 rtx *loc;
193 rtx old;
194 bool unshare;
195 } change_t;
196
197 static change_t *changes;
198 static int changes_allocated;
199
200 static int num_changes = 0;
201
202 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
203 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
204 the change is simply made.
205
206 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
207 will be called with the address and mode as parameters. If OBJECT is
208 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
209 the change in place.
210
211 IN_GROUP is nonzero if this is part of a group of changes that must be
212 performed as a group. In that case, the changes will be stored. The
213 function `apply_change_group' will validate and apply the changes.
214
215 If IN_GROUP is zero, this is a single change. Try to recognize the insn
216 or validate the memory reference with the change applied. If the result
217 is not valid for the machine, suppress the change and return zero.
218 Otherwise, perform the change and return 1. */
219
220 static bool
221 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
222 {
223 rtx old = *loc;
224
225 if (old == new_rtx || rtx_equal_p (old, new_rtx))
226 return 1;
227
228 gcc_assert (in_group != 0 || num_changes == 0);
229
230 *loc = new_rtx;
231
232 /* Save the information describing this change. */
233 if (num_changes >= changes_allocated)
234 {
235 if (changes_allocated == 0)
236 /* This value allows for repeated substitutions inside complex
237 indexed addresses, or changes in up to 5 insns. */
238 changes_allocated = MAX_RECOG_OPERANDS * 5;
239 else
240 changes_allocated *= 2;
241
242 changes = XRESIZEVEC (change_t, changes, changes_allocated);
243 }
244
245 changes[num_changes].object = object;
246 changes[num_changes].loc = loc;
247 changes[num_changes].old = old;
248 changes[num_changes].unshare = unshare;
249
250 if (object && !MEM_P (object))
251 {
252 /* Set INSN_CODE to force rerecognition of insn. Save old code in
253 case invalid. */
254 changes[num_changes].old_code = INSN_CODE (object);
255 INSN_CODE (object) = -1;
256 }
257
258 num_changes++;
259
260 /* If we are making a group of changes, return 1. Otherwise, validate the
261 change group we made. */
262
263 if (in_group)
264 return 1;
265 else
266 return apply_change_group ();
267 }
268
269 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
270 UNSHARE to false. */
271
272 bool
273 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
274 {
275 return validate_change_1 (object, loc, new_rtx, in_group, false);
276 }
277
278 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
279 UNSHARE to true. */
280
281 bool
282 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
283 {
284 return validate_change_1 (object, loc, new_rtx, in_group, true);
285 }
286
287
288 /* Keep X canonicalized if some changes have made it non-canonical; only
289 modifies the operands of X, not (for example) its code. Simplifications
290 are not the job of this routine.
291
292 Return true if anything was changed. */
293 bool
294 canonicalize_change_group (rtx insn, rtx x)
295 {
296 if (COMMUTATIVE_P (x)
297 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
298 {
299 /* Oops, the caller has made X no longer canonical.
300 Let's redo the changes in the correct order. */
301 rtx tem = XEXP (x, 0);
302 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
303 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
304 return true;
305 }
306 else
307 return false;
308 }
309
310
311 /* This subroutine of apply_change_group verifies whether the changes to INSN
312 were valid; i.e. whether INSN can still be recognized.
313
314 If IN_GROUP is true clobbers which have to be added in order to
315 match the instructions will be added to the current change group.
316 Otherwise the changes will take effect immediately. */
317
318 int
319 insn_invalid_p (rtx insn, bool in_group)
320 {
321 rtx pat = PATTERN (insn);
322 int num_clobbers = 0;
323 /* If we are before reload and the pattern is a SET, see if we can add
324 clobbers. */
325 int icode = recog (pat, insn,
326 (GET_CODE (pat) == SET
327 && ! reload_completed
328 && ! reload_in_progress)
329 ? &num_clobbers : 0);
330 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
331
332
333 /* If this is an asm and the operand aren't legal, then fail. Likewise if
334 this is not an asm and the insn wasn't recognized. */
335 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
336 || (!is_asm && icode < 0))
337 return 1;
338
339 /* If we have to add CLOBBERs, fail if we have to add ones that reference
340 hard registers since our callers can't know if they are live or not.
341 Otherwise, add them. */
342 if (num_clobbers > 0)
343 {
344 rtx newpat;
345
346 if (added_clobbers_hard_reg_p (icode))
347 return 1;
348
349 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
350 XVECEXP (newpat, 0, 0) = pat;
351 add_clobbers (newpat, icode);
352 if (in_group)
353 validate_change (insn, &PATTERN (insn), newpat, 1);
354 else
355 PATTERN (insn) = pat = newpat;
356 }
357
358 /* After reload, verify that all constraints are satisfied. */
359 if (reload_completed)
360 {
361 extract_insn (insn);
362
363 if (! constrain_operands (1))
364 return 1;
365 }
366
367 INSN_CODE (insn) = icode;
368 return 0;
369 }
370
371 /* Return number of changes made and not validated yet. */
372 int
373 num_changes_pending (void)
374 {
375 return num_changes;
376 }
377
378 /* Tentatively apply the changes numbered NUM and up.
379 Return 1 if all changes are valid, zero otherwise. */
380
381 int
382 verify_changes (int num)
383 {
384 int i;
385 rtx last_validated = NULL_RTX;
386
387 /* The changes have been applied and all INSN_CODEs have been reset to force
388 rerecognition.
389
390 The changes are valid if we aren't given an object, or if we are
391 given a MEM and it still is a valid address, or if this is in insn
392 and it is recognized. In the latter case, if reload has completed,
393 we also require that the operands meet the constraints for
394 the insn. */
395
396 for (i = num; i < num_changes; i++)
397 {
398 rtx object = changes[i].object;
399
400 /* If there is no object to test or if it is the same as the one we
401 already tested, ignore it. */
402 if (object == 0 || object == last_validated)
403 continue;
404
405 if (MEM_P (object))
406 {
407 if (! memory_address_addr_space_p (GET_MODE (object),
408 XEXP (object, 0),
409 MEM_ADDR_SPACE (object)))
410 break;
411 }
412 else if (/* changes[i].old might be zero, e.g. when putting a
413 REG_FRAME_RELATED_EXPR into a previously empty list. */
414 changes[i].old
415 && REG_P (changes[i].old)
416 && asm_noperands (PATTERN (object)) > 0
417 && REG_EXPR (changes[i].old) != NULL_TREE
418 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
419 && DECL_REGISTER (REG_EXPR (changes[i].old)))
420 {
421 /* Don't allow changes of hard register operands to inline
422 assemblies if they have been defined as register asm ("x"). */
423 break;
424 }
425 else if (DEBUG_INSN_P (object))
426 continue;
427 else if (insn_invalid_p (object, true))
428 {
429 rtx pat = PATTERN (object);
430
431 /* Perhaps we couldn't recognize the insn because there were
432 extra CLOBBERs at the end. If so, try to re-recognize
433 without the last CLOBBER (later iterations will cause each of
434 them to be eliminated, in turn). But don't do this if we
435 have an ASM_OPERAND. */
436 if (GET_CODE (pat) == PARALLEL
437 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
438 && asm_noperands (PATTERN (object)) < 0)
439 {
440 rtx newpat;
441
442 if (XVECLEN (pat, 0) == 2)
443 newpat = XVECEXP (pat, 0, 0);
444 else
445 {
446 int j;
447
448 newpat
449 = gen_rtx_PARALLEL (VOIDmode,
450 rtvec_alloc (XVECLEN (pat, 0) - 1));
451 for (j = 0; j < XVECLEN (newpat, 0); j++)
452 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
453 }
454
455 /* Add a new change to this group to replace the pattern
456 with this new pattern. Then consider this change
457 as having succeeded. The change we added will
458 cause the entire call to fail if things remain invalid.
459
460 Note that this can lose if a later change than the one
461 we are processing specified &XVECEXP (PATTERN (object), 0, X)
462 but this shouldn't occur. */
463
464 validate_change (object, &PATTERN (object), newpat, 1);
465 continue;
466 }
467 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
468 || GET_CODE (pat) == VAR_LOCATION)
469 /* If this insn is a CLOBBER or USE, it is always valid, but is
470 never recognized. */
471 continue;
472 else
473 break;
474 }
475 last_validated = object;
476 }
477
478 return (i == num_changes);
479 }
480
481 /* A group of changes has previously been issued with validate_change
482 and verified with verify_changes. Call df_insn_rescan for each of
483 the insn changed and clear num_changes. */
484
485 void
486 confirm_change_group (void)
487 {
488 int i;
489 rtx last_object = NULL;
490
491 for (i = 0; i < num_changes; i++)
492 {
493 rtx object = changes[i].object;
494
495 if (changes[i].unshare)
496 *changes[i].loc = copy_rtx (*changes[i].loc);
497
498 /* Avoid unnecessary rescanning when multiple changes to same instruction
499 are made. */
500 if (object)
501 {
502 if (object != last_object && last_object && INSN_P (last_object))
503 df_insn_rescan (last_object);
504 last_object = object;
505 }
506 }
507
508 if (last_object && INSN_P (last_object))
509 df_insn_rescan (last_object);
510 num_changes = 0;
511 }
512
513 /* Apply a group of changes previously issued with `validate_change'.
514 If all changes are valid, call confirm_change_group and return 1,
515 otherwise, call cancel_changes and return 0. */
516
517 int
518 apply_change_group (void)
519 {
520 if (verify_changes (0))
521 {
522 confirm_change_group ();
523 return 1;
524 }
525 else
526 {
527 cancel_changes (0);
528 return 0;
529 }
530 }
531
532
533 /* Return the number of changes so far in the current group. */
534
535 int
536 num_validated_changes (void)
537 {
538 return num_changes;
539 }
540
541 /* Retract the changes numbered NUM and up. */
542
543 void
544 cancel_changes (int num)
545 {
546 int i;
547
548 /* Back out all the changes. Do this in the opposite order in which
549 they were made. */
550 for (i = num_changes - 1; i >= num; i--)
551 {
552 *changes[i].loc = changes[i].old;
553 if (changes[i].object && !MEM_P (changes[i].object))
554 INSN_CODE (changes[i].object) = changes[i].old_code;
555 }
556 num_changes = num;
557 }
558
559 /* Reduce conditional compilation elsewhere. */
560 #ifndef HAVE_extv
561 #define HAVE_extv 0
562 #define CODE_FOR_extv CODE_FOR_nothing
563 #endif
564 #ifndef HAVE_extzv
565 #define HAVE_extzv 0
566 #define CODE_FOR_extzv CODE_FOR_nothing
567 #endif
568
569 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
570 rtx. */
571
572 static void
573 simplify_while_replacing (rtx *loc, rtx to, rtx object,
574 enum machine_mode op0_mode)
575 {
576 rtx x = *loc;
577 enum rtx_code code = GET_CODE (x);
578 rtx new_rtx = NULL_RTX;
579
580 if (SWAPPABLE_OPERANDS_P (x)
581 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
582 {
583 validate_unshare_change (object, loc,
584 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
585 : swap_condition (code),
586 GET_MODE (x), XEXP (x, 1),
587 XEXP (x, 0)), 1);
588 x = *loc;
589 code = GET_CODE (x);
590 }
591
592 /* Canonicalize arithmetics with all constant operands. */
593 switch (GET_RTX_CLASS (code))
594 {
595 case RTX_UNARY:
596 if (CONSTANT_P (XEXP (x, 0)))
597 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
598 op0_mode);
599 break;
600 case RTX_COMM_ARITH:
601 case RTX_BIN_ARITH:
602 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
603 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
604 XEXP (x, 1));
605 break;
606 case RTX_COMPARE:
607 case RTX_COMM_COMPARE:
608 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
609 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
610 XEXP (x, 0), XEXP (x, 1));
611 break;
612 default:
613 break;
614 }
615 if (new_rtx)
616 {
617 validate_change (object, loc, new_rtx, 1);
618 return;
619 }
620
621 switch (code)
622 {
623 case PLUS:
624 /* If we have a PLUS whose second operand is now a CONST_INT, use
625 simplify_gen_binary to try to simplify it.
626 ??? We may want later to remove this, once simplification is
627 separated from this function. */
628 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
629 validate_change (object, loc,
630 simplify_gen_binary
631 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
632 break;
633 case MINUS:
634 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
635 validate_change (object, loc,
636 simplify_gen_binary
637 (PLUS, GET_MODE (x), XEXP (x, 0),
638 simplify_gen_unary (NEG,
639 GET_MODE (x), XEXP (x, 1),
640 GET_MODE (x))), 1);
641 break;
642 case ZERO_EXTEND:
643 case SIGN_EXTEND:
644 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
645 {
646 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
647 op0_mode);
648 /* If any of the above failed, substitute in something that
649 we know won't be recognized. */
650 if (!new_rtx)
651 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
652 validate_change (object, loc, new_rtx, 1);
653 }
654 break;
655 case SUBREG:
656 /* All subregs possible to simplify should be simplified. */
657 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
658 SUBREG_BYTE (x));
659
660 /* Subregs of VOIDmode operands are incorrect. */
661 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
662 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
663 if (new_rtx)
664 validate_change (object, loc, new_rtx, 1);
665 break;
666 case ZERO_EXTRACT:
667 case SIGN_EXTRACT:
668 /* If we are replacing a register with memory, try to change the memory
669 to be the mode required for memory in extract operations (this isn't
670 likely to be an insertion operation; if it was, nothing bad will
671 happen, we might just fail in some cases). */
672
673 if (MEM_P (XEXP (x, 0))
674 && CONST_INT_P (XEXP (x, 1))
675 && CONST_INT_P (XEXP (x, 2))
676 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
677 MEM_ADDR_SPACE (XEXP (x, 0)))
678 && !MEM_VOLATILE_P (XEXP (x, 0)))
679 {
680 enum machine_mode wanted_mode = VOIDmode;
681 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
682 int pos = INTVAL (XEXP (x, 2));
683
684 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
685 {
686 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
687 if (wanted_mode == VOIDmode)
688 wanted_mode = word_mode;
689 }
690 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
691 {
692 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
693 if (wanted_mode == VOIDmode)
694 wanted_mode = word_mode;
695 }
696
697 /* If we have a narrower mode, we can do something. */
698 if (wanted_mode != VOIDmode
699 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
700 {
701 int offset = pos / BITS_PER_UNIT;
702 rtx newmem;
703
704 /* If the bytes and bits are counted differently, we
705 must adjust the offset. */
706 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
707 offset =
708 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
709 offset);
710
711 gcc_assert (GET_MODE_PRECISION (wanted_mode)
712 == GET_MODE_BITSIZE (wanted_mode));
713 pos %= GET_MODE_BITSIZE (wanted_mode);
714
715 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
716
717 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
718 validate_change (object, &XEXP (x, 0), newmem, 1);
719 }
720 }
721
722 break;
723
724 default:
725 break;
726 }
727 }
728
729 /* Replace every occurrence of FROM in X with TO. Mark each change with
730 validate_change passing OBJECT. */
731
732 static void
733 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
734 bool simplify)
735 {
736 int i, j;
737 const char *fmt;
738 rtx x = *loc;
739 enum rtx_code code;
740 enum machine_mode op0_mode = VOIDmode;
741 int prev_changes = num_changes;
742
743 if (!x)
744 return;
745
746 code = GET_CODE (x);
747 fmt = GET_RTX_FORMAT (code);
748 if (fmt[0] == 'e')
749 op0_mode = GET_MODE (XEXP (x, 0));
750
751 /* X matches FROM if it is the same rtx or they are both referring to the
752 same register in the same mode. Avoid calling rtx_equal_p unless the
753 operands look similar. */
754
755 if (x == from
756 || (REG_P (x) && REG_P (from)
757 && GET_MODE (x) == GET_MODE (from)
758 && REGNO (x) == REGNO (from))
759 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
760 && rtx_equal_p (x, from)))
761 {
762 validate_unshare_change (object, loc, to, 1);
763 return;
764 }
765
766 /* Call ourself recursively to perform the replacements.
767 We must not replace inside already replaced expression, otherwise we
768 get infinite recursion for replacements like (reg X)->(subreg (reg X))
769 so we must special case shared ASM_OPERANDS. */
770
771 if (GET_CODE (x) == PARALLEL)
772 {
773 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
774 {
775 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
776 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
777 {
778 /* Verify that operands are really shared. */
779 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
780 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
781 (x, 0, j))));
782 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
783 from, to, object, simplify);
784 }
785 else
786 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
787 simplify);
788 }
789 }
790 else
791 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
792 {
793 if (fmt[i] == 'e')
794 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
795 else if (fmt[i] == 'E')
796 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
797 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
798 simplify);
799 }
800
801 /* If we didn't substitute, there is nothing more to do. */
802 if (num_changes == prev_changes)
803 return;
804
805 /* ??? The regmove is no more, so is this aberration still necessary? */
806 /* Allow substituted expression to have different mode. This is used by
807 regmove to change mode of pseudo register. */
808 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
809 op0_mode = GET_MODE (XEXP (x, 0));
810
811 /* Do changes needed to keep rtx consistent. Don't do any other
812 simplifications, as it is not our job. */
813 if (simplify)
814 simplify_while_replacing (loc, to, object, op0_mode);
815 }
816
817 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
818 with TO. After all changes have been made, validate by seeing
819 if INSN is still valid. */
820
821 int
822 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
823 {
824 validate_replace_rtx_1 (loc, from, to, insn, true);
825 return apply_change_group ();
826 }
827
828 /* Try replacing every occurrence of FROM in INSN with TO. After all
829 changes have been made, validate by seeing if INSN is still valid. */
830
831 int
832 validate_replace_rtx (rtx from, rtx to, rtx insn)
833 {
834 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
835 return apply_change_group ();
836 }
837
838 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
839 is a part of INSN. After all changes have been made, validate by seeing if
840 INSN is still valid.
841 validate_replace_rtx (from, to, insn) is equivalent to
842 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
843
844 int
845 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
846 {
847 validate_replace_rtx_1 (where, from, to, insn, true);
848 return apply_change_group ();
849 }
850
851 /* Same as above, but do not simplify rtx afterwards. */
852 int
853 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
854 rtx insn)
855 {
856 validate_replace_rtx_1 (where, from, to, insn, false);
857 return apply_change_group ();
858
859 }
860
861 /* Try replacing every occurrence of FROM in INSN with TO. This also
862 will replace in REG_EQUAL and REG_EQUIV notes. */
863
864 void
865 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
866 {
867 rtx note;
868 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
869 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
870 if (REG_NOTE_KIND (note) == REG_EQUAL
871 || REG_NOTE_KIND (note) == REG_EQUIV)
872 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
873 }
874
875 /* Function called by note_uses to replace used subexpressions. */
876 struct validate_replace_src_data
877 {
878 rtx from; /* Old RTX */
879 rtx to; /* New RTX */
880 rtx insn; /* Insn in which substitution is occurring. */
881 };
882
883 static void
884 validate_replace_src_1 (rtx *x, void *data)
885 {
886 struct validate_replace_src_data *d
887 = (struct validate_replace_src_data *) data;
888
889 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
890 }
891
892 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
893 SET_DESTs. */
894
895 void
896 validate_replace_src_group (rtx from, rtx to, rtx insn)
897 {
898 struct validate_replace_src_data d;
899
900 d.from = from;
901 d.to = to;
902 d.insn = insn;
903 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
904 }
905
906 /* Try simplify INSN.
907 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
908 pattern and return true if something was simplified. */
909
910 bool
911 validate_simplify_insn (rtx insn)
912 {
913 int i;
914 rtx pat = NULL;
915 rtx newpat = NULL;
916
917 pat = PATTERN (insn);
918
919 if (GET_CODE (pat) == SET)
920 {
921 newpat = simplify_rtx (SET_SRC (pat));
922 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
923 validate_change (insn, &SET_SRC (pat), newpat, 1);
924 newpat = simplify_rtx (SET_DEST (pat));
925 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
926 validate_change (insn, &SET_DEST (pat), newpat, 1);
927 }
928 else if (GET_CODE (pat) == PARALLEL)
929 for (i = 0; i < XVECLEN (pat, 0); i++)
930 {
931 rtx s = XVECEXP (pat, 0, i);
932
933 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
934 {
935 newpat = simplify_rtx (SET_SRC (s));
936 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
937 validate_change (insn, &SET_SRC (s), newpat, 1);
938 newpat = simplify_rtx (SET_DEST (s));
939 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
940 validate_change (insn, &SET_DEST (s), newpat, 1);
941 }
942 }
943 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
944 }
945 \f
946 #ifdef HAVE_cc0
947 /* Return 1 if the insn using CC0 set by INSN does not contain
948 any ordered tests applied to the condition codes.
949 EQ and NE tests do not count. */
950
951 int
952 next_insn_tests_no_inequality (rtx insn)
953 {
954 rtx next = next_cc0_user (insn);
955
956 /* If there is no next insn, we have to take the conservative choice. */
957 if (next == 0)
958 return 0;
959
960 return (INSN_P (next)
961 && ! inequality_comparisons_p (PATTERN (next)));
962 }
963 #endif
964 \f
965 /* Return 1 if OP is a valid general operand for machine mode MODE.
966 This is either a register reference, a memory reference,
967 or a constant. In the case of a memory reference, the address
968 is checked for general validity for the target machine.
969
970 Register and memory references must have mode MODE in order to be valid,
971 but some constants have no machine mode and are valid for any mode.
972
973 If MODE is VOIDmode, OP is checked for validity for whatever mode
974 it has.
975
976 The main use of this function is as a predicate in match_operand
977 expressions in the machine description. */
978
979 int
980 general_operand (rtx op, enum machine_mode mode)
981 {
982 enum rtx_code code = GET_CODE (op);
983
984 if (mode == VOIDmode)
985 mode = GET_MODE (op);
986
987 /* Don't accept CONST_INT or anything similar
988 if the caller wants something floating. */
989 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
990 && GET_MODE_CLASS (mode) != MODE_INT
991 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
992 return 0;
993
994 if (CONST_INT_P (op)
995 && mode != VOIDmode
996 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
997 return 0;
998
999 if (CONSTANT_P (op))
1000 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1001 || mode == VOIDmode)
1002 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1003 && targetm.legitimate_constant_p (mode == VOIDmode
1004 ? GET_MODE (op)
1005 : mode, op));
1006
1007 /* Except for certain constants with VOIDmode, already checked for,
1008 OP's mode must match MODE if MODE specifies a mode. */
1009
1010 if (GET_MODE (op) != mode)
1011 return 0;
1012
1013 if (code == SUBREG)
1014 {
1015 rtx sub = SUBREG_REG (op);
1016
1017 #ifdef INSN_SCHEDULING
1018 /* On machines that have insn scheduling, we want all memory
1019 reference to be explicit, so outlaw paradoxical SUBREGs.
1020 However, we must allow them after reload so that they can
1021 get cleaned up by cleanup_subreg_operands. */
1022 if (!reload_completed && MEM_P (sub)
1023 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1024 return 0;
1025 #endif
1026 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1027 may result in incorrect reference. We should simplify all valid
1028 subregs of MEM anyway. But allow this after reload because we
1029 might be called from cleanup_subreg_operands.
1030
1031 ??? This is a kludge. */
1032 if (!reload_completed && SUBREG_BYTE (op) != 0
1033 && MEM_P (sub))
1034 return 0;
1035
1036 #ifdef CANNOT_CHANGE_MODE_CLASS
1037 if (REG_P (sub)
1038 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1039 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1040 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1041 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1042 /* LRA can generate some invalid SUBREGS just for matched
1043 operand reload presentation. LRA needs to treat them as
1044 valid. */
1045 && ! LRA_SUBREG_P (op))
1046 return 0;
1047 #endif
1048
1049 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1050 create such rtl, and we must reject it. */
1051 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1052 /* LRA can use subreg to store a floating point value in an
1053 integer mode. Although the floating point and the
1054 integer modes need the same number of hard registers, the
1055 size of floating point mode can be less than the integer
1056 mode. */
1057 && ! lra_in_progress
1058 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1059 return 0;
1060
1061 op = sub;
1062 code = GET_CODE (op);
1063 }
1064
1065 if (code == REG)
1066 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1067 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1068
1069 if (code == MEM)
1070 {
1071 rtx y = XEXP (op, 0);
1072
1073 if (! volatile_ok && MEM_VOLATILE_P (op))
1074 return 0;
1075
1076 /* Use the mem's mode, since it will be reloaded thus. LRA can
1077 generate move insn with invalid addresses which is made valid
1078 and efficiently calculated by LRA through further numerous
1079 transformations. */
1080 if (lra_in_progress
1081 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1082 return 1;
1083 }
1084
1085 return 0;
1086 }
1087 \f
1088 /* Return 1 if OP is a valid memory address for a memory reference
1089 of mode MODE.
1090
1091 The main use of this function is as a predicate in match_operand
1092 expressions in the machine description. */
1093
1094 int
1095 address_operand (rtx op, enum machine_mode mode)
1096 {
1097 return memory_address_p (mode, op);
1098 }
1099
1100 /* Return 1 if OP is a register reference of mode MODE.
1101 If MODE is VOIDmode, accept a register in any mode.
1102
1103 The main use of this function is as a predicate in match_operand
1104 expressions in the machine description. */
1105
1106 int
1107 register_operand (rtx op, enum machine_mode mode)
1108 {
1109 if (GET_CODE (op) == SUBREG)
1110 {
1111 rtx sub = SUBREG_REG (op);
1112
1113 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1114 because it is guaranteed to be reloaded into one.
1115 Just make sure the MEM is valid in itself.
1116 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1117 but currently it does result from (SUBREG (REG)...) where the
1118 reg went on the stack.) */
1119 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1120 return 0;
1121 }
1122 else if (!REG_P (op))
1123 return 0;
1124 return general_operand (op, mode);
1125 }
1126
1127 /* Return 1 for a register in Pmode; ignore the tested mode. */
1128
1129 int
1130 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1131 {
1132 return register_operand (op, Pmode);
1133 }
1134
1135 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1136 or a hard register. */
1137
1138 int
1139 scratch_operand (rtx op, enum machine_mode mode)
1140 {
1141 if (GET_MODE (op) != mode && mode != VOIDmode)
1142 return 0;
1143
1144 return (GET_CODE (op) == SCRATCH
1145 || (REG_P (op)
1146 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1147 }
1148
1149 /* Return 1 if OP is a valid immediate operand for mode MODE.
1150
1151 The main use of this function is as a predicate in match_operand
1152 expressions in the machine description. */
1153
1154 int
1155 immediate_operand (rtx op, enum machine_mode mode)
1156 {
1157 /* Don't accept CONST_INT or anything similar
1158 if the caller wants something floating. */
1159 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1160 && GET_MODE_CLASS (mode) != MODE_INT
1161 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1162 return 0;
1163
1164 if (CONST_INT_P (op)
1165 && mode != VOIDmode
1166 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1167 return 0;
1168
1169 return (CONSTANT_P (op)
1170 && (GET_MODE (op) == mode || mode == VOIDmode
1171 || GET_MODE (op) == VOIDmode)
1172 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1173 && targetm.legitimate_constant_p (mode == VOIDmode
1174 ? GET_MODE (op)
1175 : mode, op));
1176 }
1177
1178 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1179
1180 int
1181 const_int_operand (rtx op, enum machine_mode mode)
1182 {
1183 if (!CONST_INT_P (op))
1184 return 0;
1185
1186 if (mode != VOIDmode
1187 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1188 return 0;
1189
1190 return 1;
1191 }
1192
1193 #if TARGET_SUPPORTS_WIDE_INT
1194 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1195 of mode MODE. */
1196 int
1197 const_scalar_int_operand (rtx op, enum machine_mode mode)
1198 {
1199 if (!CONST_SCALAR_INT_P (op))
1200 return 0;
1201
1202 if (CONST_INT_P (op))
1203 return const_int_operand (op, mode);
1204
1205 if (mode != VOIDmode)
1206 {
1207 int prec = GET_MODE_PRECISION (mode);
1208 int bitsize = GET_MODE_BITSIZE (mode);
1209
1210 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1211 return 0;
1212
1213 if (prec == bitsize)
1214 return 1;
1215 else
1216 {
1217 /* Multiword partial int. */
1218 HOST_WIDE_INT x
1219 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1220 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1221 }
1222 }
1223 return 1;
1224 }
1225
1226 /* Returns 1 if OP is an operand that is a constant integer or constant
1227 floating-point number of MODE. */
1228
1229 int
1230 const_double_operand (rtx op, enum machine_mode mode)
1231 {
1232 return (GET_CODE (op) == CONST_DOUBLE)
1233 && (GET_MODE (op) == mode || mode == VOIDmode);
1234 }
1235 #else
1236 /* Returns 1 if OP is an operand that is a constant integer or constant
1237 floating-point number of MODE. */
1238
1239 int
1240 const_double_operand (rtx op, enum machine_mode mode)
1241 {
1242 /* Don't accept CONST_INT or anything similar
1243 if the caller wants something floating. */
1244 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1245 && GET_MODE_CLASS (mode) != MODE_INT
1246 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1247 return 0;
1248
1249 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1250 && (mode == VOIDmode || GET_MODE (op) == mode
1251 || GET_MODE (op) == VOIDmode));
1252 }
1253 #endif
1254 /* Return 1 if OP is a general operand that is not an immediate
1255 operand of mode MODE. */
1256
1257 int
1258 nonimmediate_operand (rtx op, enum machine_mode mode)
1259 {
1260 return (general_operand (op, mode) && ! CONSTANT_P (op));
1261 }
1262
1263 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1264
1265 int
1266 nonmemory_operand (rtx op, enum machine_mode mode)
1267 {
1268 if (CONSTANT_P (op))
1269 return immediate_operand (op, mode);
1270 return register_operand (op, mode);
1271 }
1272
1273 /* Return 1 if OP is a valid operand that stands for pushing a
1274 value of mode MODE onto the stack.
1275
1276 The main use of this function is as a predicate in match_operand
1277 expressions in the machine description. */
1278
1279 int
1280 push_operand (rtx op, enum machine_mode mode)
1281 {
1282 unsigned int rounded_size = GET_MODE_SIZE (mode);
1283
1284 #ifdef PUSH_ROUNDING
1285 rounded_size = PUSH_ROUNDING (rounded_size);
1286 #endif
1287
1288 if (!MEM_P (op))
1289 return 0;
1290
1291 if (mode != VOIDmode && GET_MODE (op) != mode)
1292 return 0;
1293
1294 op = XEXP (op, 0);
1295
1296 if (rounded_size == GET_MODE_SIZE (mode))
1297 {
1298 if (GET_CODE (op) != STACK_PUSH_CODE)
1299 return 0;
1300 }
1301 else
1302 {
1303 if (GET_CODE (op) != PRE_MODIFY
1304 || GET_CODE (XEXP (op, 1)) != PLUS
1305 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1306 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1307 #ifdef STACK_GROWS_DOWNWARD
1308 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1309 #else
1310 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1311 #endif
1312 )
1313 return 0;
1314 }
1315
1316 return XEXP (op, 0) == stack_pointer_rtx;
1317 }
1318
1319 /* Return 1 if OP is a valid operand that stands for popping a
1320 value of mode MODE off the stack.
1321
1322 The main use of this function is as a predicate in match_operand
1323 expressions in the machine description. */
1324
1325 int
1326 pop_operand (rtx op, enum machine_mode mode)
1327 {
1328 if (!MEM_P (op))
1329 return 0;
1330
1331 if (mode != VOIDmode && GET_MODE (op) != mode)
1332 return 0;
1333
1334 op = XEXP (op, 0);
1335
1336 if (GET_CODE (op) != STACK_POP_CODE)
1337 return 0;
1338
1339 return XEXP (op, 0) == stack_pointer_rtx;
1340 }
1341
1342 /* Return 1 if ADDR is a valid memory address
1343 for mode MODE in address space AS. */
1344
1345 int
1346 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1347 rtx addr, addr_space_t as)
1348 {
1349 #ifdef GO_IF_LEGITIMATE_ADDRESS
1350 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1351 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1352 return 0;
1353
1354 win:
1355 return 1;
1356 #else
1357 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1358 #endif
1359 }
1360
1361 /* Return 1 if OP is a valid memory reference with mode MODE,
1362 including a valid address.
1363
1364 The main use of this function is as a predicate in match_operand
1365 expressions in the machine description. */
1366
1367 int
1368 memory_operand (rtx op, enum machine_mode mode)
1369 {
1370 rtx inner;
1371
1372 if (! reload_completed)
1373 /* Note that no SUBREG is a memory operand before end of reload pass,
1374 because (SUBREG (MEM...)) forces reloading into a register. */
1375 return MEM_P (op) && general_operand (op, mode);
1376
1377 if (mode != VOIDmode && GET_MODE (op) != mode)
1378 return 0;
1379
1380 inner = op;
1381 if (GET_CODE (inner) == SUBREG)
1382 inner = SUBREG_REG (inner);
1383
1384 return (MEM_P (inner) && general_operand (op, mode));
1385 }
1386
1387 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1388 that is, a memory reference whose address is a general_operand. */
1389
1390 int
1391 indirect_operand (rtx op, enum machine_mode mode)
1392 {
1393 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1394 if (! reload_completed
1395 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1396 {
1397 int offset = SUBREG_BYTE (op);
1398 rtx inner = SUBREG_REG (op);
1399
1400 if (mode != VOIDmode && GET_MODE (op) != mode)
1401 return 0;
1402
1403 /* The only way that we can have a general_operand as the resulting
1404 address is if OFFSET is zero and the address already is an operand
1405 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1406 operand. */
1407
1408 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1409 || (GET_CODE (XEXP (inner, 0)) == PLUS
1410 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1411 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1412 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1413 }
1414
1415 return (MEM_P (op)
1416 && memory_operand (op, mode)
1417 && general_operand (XEXP (op, 0), Pmode));
1418 }
1419
1420 /* Return 1 if this is an ordered comparison operator (not including
1421 ORDERED and UNORDERED). */
1422
1423 int
1424 ordered_comparison_operator (rtx op, enum machine_mode mode)
1425 {
1426 if (mode != VOIDmode && GET_MODE (op) != mode)
1427 return false;
1428 switch (GET_CODE (op))
1429 {
1430 case EQ:
1431 case NE:
1432 case LT:
1433 case LTU:
1434 case LE:
1435 case LEU:
1436 case GT:
1437 case GTU:
1438 case GE:
1439 case GEU:
1440 return true;
1441 default:
1442 return false;
1443 }
1444 }
1445
1446 /* Return 1 if this is a comparison operator. This allows the use of
1447 MATCH_OPERATOR to recognize all the branch insns. */
1448
1449 int
1450 comparison_operator (rtx op, enum machine_mode mode)
1451 {
1452 return ((mode == VOIDmode || GET_MODE (op) == mode)
1453 && COMPARISON_P (op));
1454 }
1455 \f
1456 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1457
1458 rtx
1459 extract_asm_operands (rtx body)
1460 {
1461 rtx tmp;
1462 switch (GET_CODE (body))
1463 {
1464 case ASM_OPERANDS:
1465 return body;
1466
1467 case SET:
1468 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1469 tmp = SET_SRC (body);
1470 if (GET_CODE (tmp) == ASM_OPERANDS)
1471 return tmp;
1472 break;
1473
1474 case PARALLEL:
1475 tmp = XVECEXP (body, 0, 0);
1476 if (GET_CODE (tmp) == ASM_OPERANDS)
1477 return tmp;
1478 if (GET_CODE (tmp) == SET)
1479 {
1480 tmp = SET_SRC (tmp);
1481 if (GET_CODE (tmp) == ASM_OPERANDS)
1482 return tmp;
1483 }
1484 break;
1485
1486 default:
1487 break;
1488 }
1489 return NULL;
1490 }
1491
1492 /* If BODY is an insn body that uses ASM_OPERANDS,
1493 return the number of operands (both input and output) in the insn.
1494 Otherwise return -1. */
1495
1496 int
1497 asm_noperands (const_rtx body)
1498 {
1499 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1500 int n_sets = 0;
1501
1502 if (asm_op == NULL)
1503 return -1;
1504
1505 if (GET_CODE (body) == SET)
1506 n_sets = 1;
1507 else if (GET_CODE (body) == PARALLEL)
1508 {
1509 int i;
1510 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1511 {
1512 /* Multiple output operands, or 1 output plus some clobbers:
1513 body is
1514 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1515 /* Count backwards through CLOBBERs to determine number of SETs. */
1516 for (i = XVECLEN (body, 0); i > 0; i--)
1517 {
1518 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1519 break;
1520 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1521 return -1;
1522 }
1523
1524 /* N_SETS is now number of output operands. */
1525 n_sets = i;
1526
1527 /* Verify that all the SETs we have
1528 came from a single original asm_operands insn
1529 (so that invalid combinations are blocked). */
1530 for (i = 0; i < n_sets; i++)
1531 {
1532 rtx elt = XVECEXP (body, 0, i);
1533 if (GET_CODE (elt) != SET)
1534 return -1;
1535 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1536 return -1;
1537 /* If these ASM_OPERANDS rtx's came from different original insns
1538 then they aren't allowed together. */
1539 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1540 != ASM_OPERANDS_INPUT_VEC (asm_op))
1541 return -1;
1542 }
1543 }
1544 else
1545 {
1546 /* 0 outputs, but some clobbers:
1547 body is [(asm_operands ...) (clobber (reg ...))...]. */
1548 /* Make sure all the other parallel things really are clobbers. */
1549 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1550 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1551 return -1;
1552 }
1553 }
1554
1555 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1556 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1557 }
1558
1559 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1560 copy its operands (both input and output) into the vector OPERANDS,
1561 the locations of the operands within the insn into the vector OPERAND_LOCS,
1562 and the constraints for the operands into CONSTRAINTS.
1563 Write the modes of the operands into MODES.
1564 Return the assembler-template.
1565
1566 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1567 we don't store that info. */
1568
1569 const char *
1570 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1571 const char **constraints, enum machine_mode *modes,
1572 location_t *loc)
1573 {
1574 int nbase = 0, n, i;
1575 rtx asmop;
1576
1577 switch (GET_CODE (body))
1578 {
1579 case ASM_OPERANDS:
1580 /* Zero output asm: BODY is (asm_operands ...). */
1581 asmop = body;
1582 break;
1583
1584 case SET:
1585 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1586 asmop = SET_SRC (body);
1587
1588 /* The output is in the SET.
1589 Its constraint is in the ASM_OPERANDS itself. */
1590 if (operands)
1591 operands[0] = SET_DEST (body);
1592 if (operand_locs)
1593 operand_locs[0] = &SET_DEST (body);
1594 if (constraints)
1595 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1596 if (modes)
1597 modes[0] = GET_MODE (SET_DEST (body));
1598 nbase = 1;
1599 break;
1600
1601 case PARALLEL:
1602 {
1603 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1604
1605 asmop = XVECEXP (body, 0, 0);
1606 if (GET_CODE (asmop) == SET)
1607 {
1608 asmop = SET_SRC (asmop);
1609
1610 /* At least one output, plus some CLOBBERs. The outputs are in
1611 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1612 for (i = 0; i < nparallel; i++)
1613 {
1614 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1615 break; /* Past last SET */
1616 if (operands)
1617 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1618 if (operand_locs)
1619 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1620 if (constraints)
1621 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1622 if (modes)
1623 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1624 }
1625 nbase = i;
1626 }
1627 break;
1628 }
1629
1630 default:
1631 gcc_unreachable ();
1632 }
1633
1634 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1635 for (i = 0; i < n; i++)
1636 {
1637 if (operand_locs)
1638 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1639 if (operands)
1640 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1641 if (constraints)
1642 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1643 if (modes)
1644 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1645 }
1646 nbase += n;
1647
1648 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1649 for (i = 0; i < n; i++)
1650 {
1651 if (operand_locs)
1652 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1653 if (operands)
1654 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1655 if (constraints)
1656 constraints[nbase + i] = "";
1657 if (modes)
1658 modes[nbase + i] = Pmode;
1659 }
1660
1661 if (loc)
1662 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1663
1664 return ASM_OPERANDS_TEMPLATE (asmop);
1665 }
1666
1667 /* Parse inline assembly string STRING and determine which operands are
1668 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1669 to true if operand I is referenced.
1670
1671 This is intended to distinguish barrier-like asms such as:
1672
1673 asm ("" : "=m" (...));
1674
1675 from real references such as:
1676
1677 asm ("sw\t$0, %0" : "=m" (...)); */
1678
1679 void
1680 get_referenced_operands (const char *string, bool *used,
1681 unsigned int noperands)
1682 {
1683 memset (used, 0, sizeof (bool) * noperands);
1684 const char *p = string;
1685 while (*p)
1686 switch (*p)
1687 {
1688 case '%':
1689 p += 1;
1690 /* A letter followed by a digit indicates an operand number. */
1691 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1692 p += 1;
1693 if (ISDIGIT (*p))
1694 {
1695 char *endptr;
1696 unsigned long opnum = strtoul (p, &endptr, 10);
1697 if (endptr != p && opnum < noperands)
1698 used[opnum] = true;
1699 p = endptr;
1700 }
1701 else
1702 p += 1;
1703 break;
1704
1705 default:
1706 p++;
1707 break;
1708 }
1709 }
1710
1711 /* Check if an asm_operand matches its constraints.
1712 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1713
1714 int
1715 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1716 {
1717 int result = 0;
1718 #ifdef AUTO_INC_DEC
1719 bool incdec_ok = false;
1720 #endif
1721
1722 /* Use constrain_operands after reload. */
1723 gcc_assert (!reload_completed);
1724
1725 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1726 many alternatives as required to match the other operands. */
1727 if (*constraint == '\0')
1728 result = 1;
1729
1730 while (*constraint)
1731 {
1732 char c = *constraint;
1733 int len;
1734 switch (c)
1735 {
1736 case ',':
1737 constraint++;
1738 continue;
1739 case '=':
1740 case '+':
1741 case '*':
1742 case '%':
1743 case '!':
1744 case '#':
1745 case '&':
1746 case '?':
1747 break;
1748
1749 case '0': case '1': case '2': case '3': case '4':
1750 case '5': case '6': case '7': case '8': case '9':
1751 /* If caller provided constraints pointer, look up
1752 the matching constraint. Otherwise, our caller should have
1753 given us the proper matching constraint, but we can't
1754 actually fail the check if they didn't. Indicate that
1755 results are inconclusive. */
1756 if (constraints)
1757 {
1758 char *end;
1759 unsigned long match;
1760
1761 match = strtoul (constraint, &end, 10);
1762 if (!result)
1763 result = asm_operand_ok (op, constraints[match], NULL);
1764 constraint = (const char *) end;
1765 }
1766 else
1767 {
1768 do
1769 constraint++;
1770 while (ISDIGIT (*constraint));
1771 if (! result)
1772 result = -1;
1773 }
1774 continue;
1775
1776 case 'p':
1777 if (address_operand (op, VOIDmode))
1778 result = 1;
1779 break;
1780
1781 case TARGET_MEM_CONSTRAINT:
1782 case 'V': /* non-offsettable */
1783 if (memory_operand (op, VOIDmode))
1784 result = 1;
1785 break;
1786
1787 case 'o': /* offsettable */
1788 if (offsettable_nonstrict_memref_p (op))
1789 result = 1;
1790 break;
1791
1792 case '<':
1793 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1794 excepting those that expand_call created. Further, on some
1795 machines which do not have generalized auto inc/dec, an inc/dec
1796 is not a memory_operand.
1797
1798 Match any memory and hope things are resolved after reload. */
1799
1800 if (MEM_P (op)
1801 && (1
1802 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1803 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1804 result = 1;
1805 #ifdef AUTO_INC_DEC
1806 incdec_ok = true;
1807 #endif
1808 break;
1809
1810 case '>':
1811 if (MEM_P (op)
1812 && (1
1813 || GET_CODE (XEXP (op, 0)) == PRE_INC
1814 || GET_CODE (XEXP (op, 0)) == POST_INC))
1815 result = 1;
1816 #ifdef AUTO_INC_DEC
1817 incdec_ok = true;
1818 #endif
1819 break;
1820
1821 case 'E':
1822 case 'F':
1823 if (CONST_DOUBLE_AS_FLOAT_P (op)
1824 || (GET_CODE (op) == CONST_VECTOR
1825 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1826 result = 1;
1827 break;
1828
1829 case 'G':
1830 if (CONST_DOUBLE_AS_FLOAT_P (op)
1831 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1832 result = 1;
1833 break;
1834 case 'H':
1835 if (CONST_DOUBLE_AS_FLOAT_P (op)
1836 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1837 result = 1;
1838 break;
1839
1840 case 's':
1841 if (CONST_SCALAR_INT_P (op))
1842 break;
1843 /* Fall through. */
1844
1845 case 'i':
1846 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1847 result = 1;
1848 break;
1849
1850 case 'n':
1851 if (CONST_SCALAR_INT_P (op))
1852 result = 1;
1853 break;
1854
1855 case 'I':
1856 if (CONST_INT_P (op)
1857 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1858 result = 1;
1859 break;
1860 case 'J':
1861 if (CONST_INT_P (op)
1862 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1863 result = 1;
1864 break;
1865 case 'K':
1866 if (CONST_INT_P (op)
1867 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1868 result = 1;
1869 break;
1870 case 'L':
1871 if (CONST_INT_P (op)
1872 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1873 result = 1;
1874 break;
1875 case 'M':
1876 if (CONST_INT_P (op)
1877 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1878 result = 1;
1879 break;
1880 case 'N':
1881 if (CONST_INT_P (op)
1882 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1883 result = 1;
1884 break;
1885 case 'O':
1886 if (CONST_INT_P (op)
1887 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1888 result = 1;
1889 break;
1890 case 'P':
1891 if (CONST_INT_P (op)
1892 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1893 result = 1;
1894 break;
1895
1896 case 'X':
1897 result = 1;
1898 break;
1899
1900 case 'g':
1901 if (general_operand (op, VOIDmode))
1902 result = 1;
1903 break;
1904
1905 default:
1906 /* For all other letters, we first check for a register class,
1907 otherwise it is an EXTRA_CONSTRAINT. */
1908 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1909 {
1910 case 'r':
1911 if (GET_MODE (op) == BLKmode)
1912 break;
1913 if (register_operand (op, VOIDmode))
1914 result = 1;
1915 }
1916 #ifdef EXTRA_CONSTRAINT_STR
1917 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1918 /* Every memory operand can be reloaded to fit. */
1919 result = result || memory_operand (op, VOIDmode);
1920 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1921 /* Every address operand can be reloaded to fit. */
1922 result = result || address_operand (op, VOIDmode);
1923 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1924 result = 1;
1925 #endif
1926 break;
1927 }
1928 len = CONSTRAINT_LEN (c, constraint);
1929 do
1930 constraint++;
1931 while (--len && *constraint);
1932 if (len)
1933 return 0;
1934 }
1935
1936 #ifdef AUTO_INC_DEC
1937 /* For operands without < or > constraints reject side-effects. */
1938 if (!incdec_ok && result && MEM_P (op))
1939 switch (GET_CODE (XEXP (op, 0)))
1940 {
1941 case PRE_INC:
1942 case POST_INC:
1943 case PRE_DEC:
1944 case POST_DEC:
1945 case PRE_MODIFY:
1946 case POST_MODIFY:
1947 return 0;
1948 default:
1949 break;
1950 }
1951 #endif
1952
1953 return result;
1954 }
1955 \f
1956 /* Given an rtx *P, if it is a sum containing an integer constant term,
1957 return the location (type rtx *) of the pointer to that constant term.
1958 Otherwise, return a null pointer. */
1959
1960 rtx *
1961 find_constant_term_loc (rtx *p)
1962 {
1963 rtx *tem;
1964 enum rtx_code code = GET_CODE (*p);
1965
1966 /* If *P IS such a constant term, P is its location. */
1967
1968 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1969 || code == CONST)
1970 return p;
1971
1972 /* Otherwise, if not a sum, it has no constant term. */
1973
1974 if (GET_CODE (*p) != PLUS)
1975 return 0;
1976
1977 /* If one of the summands is constant, return its location. */
1978
1979 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1980 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1981 return p;
1982
1983 /* Otherwise, check each summand for containing a constant term. */
1984
1985 if (XEXP (*p, 0) != 0)
1986 {
1987 tem = find_constant_term_loc (&XEXP (*p, 0));
1988 if (tem != 0)
1989 return tem;
1990 }
1991
1992 if (XEXP (*p, 1) != 0)
1993 {
1994 tem = find_constant_term_loc (&XEXP (*p, 1));
1995 if (tem != 0)
1996 return tem;
1997 }
1998
1999 return 0;
2000 }
2001 \f
2002 /* Return 1 if OP is a memory reference
2003 whose address contains no side effects
2004 and remains valid after the addition
2005 of a positive integer less than the
2006 size of the object being referenced.
2007
2008 We assume that the original address is valid and do not check it.
2009
2010 This uses strict_memory_address_p as a subroutine, so
2011 don't use it before reload. */
2012
2013 int
2014 offsettable_memref_p (rtx op)
2015 {
2016 return ((MEM_P (op))
2017 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2018 MEM_ADDR_SPACE (op)));
2019 }
2020
2021 /* Similar, but don't require a strictly valid mem ref:
2022 consider pseudo-regs valid as index or base regs. */
2023
2024 int
2025 offsettable_nonstrict_memref_p (rtx op)
2026 {
2027 return ((MEM_P (op))
2028 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2029 MEM_ADDR_SPACE (op)));
2030 }
2031
2032 /* Return 1 if Y is a memory address which contains no side effects
2033 and would remain valid for address space AS after the addition of
2034 a positive integer less than the size of that mode.
2035
2036 We assume that the original address is valid and do not check it.
2037 We do check that it is valid for narrower modes.
2038
2039 If STRICTP is nonzero, we require a strictly valid address,
2040 for the sake of use in reload.c. */
2041
2042 int
2043 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
2044 addr_space_t as)
2045 {
2046 enum rtx_code ycode = GET_CODE (y);
2047 rtx z;
2048 rtx y1 = y;
2049 rtx *y2;
2050 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
2051 (strictp ? strict_memory_address_addr_space_p
2052 : memory_address_addr_space_p);
2053 unsigned int mode_sz = GET_MODE_SIZE (mode);
2054
2055 if (CONSTANT_ADDRESS_P (y))
2056 return 1;
2057
2058 /* Adjusting an offsettable address involves changing to a narrower mode.
2059 Make sure that's OK. */
2060
2061 if (mode_dependent_address_p (y, as))
2062 return 0;
2063
2064 enum machine_mode address_mode = GET_MODE (y);
2065 if (address_mode == VOIDmode)
2066 address_mode = targetm.addr_space.address_mode (as);
2067 #ifdef POINTERS_EXTEND_UNSIGNED
2068 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2069 #endif
2070
2071 /* ??? How much offset does an offsettable BLKmode reference need?
2072 Clearly that depends on the situation in which it's being used.
2073 However, the current situation in which we test 0xffffffff is
2074 less than ideal. Caveat user. */
2075 if (mode_sz == 0)
2076 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2077
2078 /* If the expression contains a constant term,
2079 see if it remains valid when max possible offset is added. */
2080
2081 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2082 {
2083 int good;
2084
2085 y1 = *y2;
2086 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2087 /* Use QImode because an odd displacement may be automatically invalid
2088 for any wider mode. But it should be valid for a single byte. */
2089 good = (*addressp) (QImode, y, as);
2090
2091 /* In any case, restore old contents of memory. */
2092 *y2 = y1;
2093 return good;
2094 }
2095
2096 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2097 return 0;
2098
2099 /* The offset added here is chosen as the maximum offset that
2100 any instruction could need to add when operating on something
2101 of the specified mode. We assume that if Y and Y+c are
2102 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2103 go inside a LO_SUM here, so we do so as well. */
2104 if (GET_CODE (y) == LO_SUM
2105 && mode != BLKmode
2106 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2107 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2108 plus_constant (address_mode, XEXP (y, 1),
2109 mode_sz - 1));
2110 #ifdef POINTERS_EXTEND_UNSIGNED
2111 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2112 else if (POINTERS_EXTEND_UNSIGNED > 0
2113 && GET_CODE (y) == ZERO_EXTEND
2114 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2115 z = gen_rtx_ZERO_EXTEND (address_mode,
2116 plus_constant (pointer_mode, XEXP (y, 0),
2117 mode_sz - 1));
2118 #endif
2119 else
2120 z = plus_constant (address_mode, y, mode_sz - 1);
2121
2122 /* Use QImode because an odd displacement may be automatically invalid
2123 for any wider mode. But it should be valid for a single byte. */
2124 return (*addressp) (QImode, z, as);
2125 }
2126
2127 /* Return 1 if ADDR is an address-expression whose effect depends
2128 on the mode of the memory reference it is used in.
2129
2130 ADDRSPACE is the address space associated with the address.
2131
2132 Autoincrement addressing is a typical example of mode-dependence
2133 because the amount of the increment depends on the mode. */
2134
2135 bool
2136 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2137 {
2138 /* Auto-increment addressing with anything other than post_modify
2139 or pre_modify always introduces a mode dependency. Catch such
2140 cases now instead of deferring to the target. */
2141 if (GET_CODE (addr) == PRE_INC
2142 || GET_CODE (addr) == POST_INC
2143 || GET_CODE (addr) == PRE_DEC
2144 || GET_CODE (addr) == POST_DEC)
2145 return true;
2146
2147 return targetm.mode_dependent_address_p (addr, addrspace);
2148 }
2149 \f
2150 /* Return the mask of operand alternatives that are allowed for INSN.
2151 This mask depends only on INSN and on the current target; it does not
2152 depend on things like the values of operands. */
2153
2154 alternative_mask
2155 get_enabled_alternatives (rtx insn)
2156 {
2157 /* Quick exit for asms and for targets that don't use the "enabled"
2158 attribute. */
2159 int code = INSN_CODE (insn);
2160 if (code < 0 || !HAVE_ATTR_enabled)
2161 return ALL_ALTERNATIVES;
2162
2163 /* Calling get_attr_enabled can be expensive, so cache the mask
2164 for speed. */
2165 if (this_target_recog->x_enabled_alternatives[code])
2166 return this_target_recog->x_enabled_alternatives[code];
2167
2168 /* Temporarily install enough information for get_attr_enabled to assume
2169 that the insn operands are already cached. As above, the attribute
2170 mustn't depend on the values of operands, so we don't provide their
2171 real values here. */
2172 rtx old_insn = recog_data.insn;
2173 int old_alternative = which_alternative;
2174
2175 recog_data.insn = insn;
2176 alternative_mask enabled = ALL_ALTERNATIVES;
2177 int n_alternatives = insn_data[code].n_alternatives;
2178 for (int i = 0; i < n_alternatives; i++)
2179 {
2180 which_alternative = i;
2181 if (!get_attr_enabled (insn))
2182 enabled &= ~ALTERNATIVE_BIT (i);
2183 }
2184
2185 recog_data.insn = old_insn;
2186 which_alternative = old_alternative;
2187
2188 this_target_recog->x_enabled_alternatives[code] = enabled;
2189 return enabled;
2190 }
2191
2192 /* Like extract_insn, but save insn extracted and don't extract again, when
2193 called again for the same insn expecting that recog_data still contain the
2194 valid information. This is used primary by gen_attr infrastructure that
2195 often does extract insn again and again. */
2196 void
2197 extract_insn_cached (rtx insn)
2198 {
2199 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2200 return;
2201 extract_insn (insn);
2202 recog_data.insn = insn;
2203 }
2204
2205 /* Do cached extract_insn, constrain_operands and complain about failures.
2206 Used by insn_attrtab. */
2207 void
2208 extract_constrain_insn_cached (rtx insn)
2209 {
2210 extract_insn_cached (insn);
2211 if (which_alternative == -1
2212 && !constrain_operands (reload_completed))
2213 fatal_insn_not_found (insn);
2214 }
2215
2216 /* Do cached constrain_operands and complain about failures. */
2217 int
2218 constrain_operands_cached (int strict)
2219 {
2220 if (which_alternative == -1)
2221 return constrain_operands (strict);
2222 else
2223 return 1;
2224 }
2225 \f
2226 /* Analyze INSN and fill in recog_data. */
2227
2228 void
2229 extract_insn (rtx insn)
2230 {
2231 int i;
2232 int icode;
2233 int noperands;
2234 rtx body = PATTERN (insn);
2235
2236 recog_data.n_operands = 0;
2237 recog_data.n_alternatives = 0;
2238 recog_data.n_dups = 0;
2239 recog_data.is_asm = false;
2240
2241 switch (GET_CODE (body))
2242 {
2243 case USE:
2244 case CLOBBER:
2245 case ASM_INPUT:
2246 case ADDR_VEC:
2247 case ADDR_DIFF_VEC:
2248 case VAR_LOCATION:
2249 return;
2250
2251 case SET:
2252 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2253 goto asm_insn;
2254 else
2255 goto normal_insn;
2256 case PARALLEL:
2257 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2258 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2259 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2260 goto asm_insn;
2261 else
2262 goto normal_insn;
2263 case ASM_OPERANDS:
2264 asm_insn:
2265 recog_data.n_operands = noperands = asm_noperands (body);
2266 if (noperands >= 0)
2267 {
2268 /* This insn is an `asm' with operands. */
2269
2270 /* expand_asm_operands makes sure there aren't too many operands. */
2271 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2272
2273 /* Now get the operand values and constraints out of the insn. */
2274 decode_asm_operands (body, recog_data.operand,
2275 recog_data.operand_loc,
2276 recog_data.constraints,
2277 recog_data.operand_mode, NULL);
2278 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2279 if (noperands > 0)
2280 {
2281 const char *p = recog_data.constraints[0];
2282 recog_data.n_alternatives = 1;
2283 while (*p)
2284 recog_data.n_alternatives += (*p++ == ',');
2285 }
2286 recog_data.is_asm = true;
2287 break;
2288 }
2289 fatal_insn_not_found (insn);
2290
2291 default:
2292 normal_insn:
2293 /* Ordinary insn: recognize it, get the operands via insn_extract
2294 and get the constraints. */
2295
2296 icode = recog_memoized (insn);
2297 if (icode < 0)
2298 fatal_insn_not_found (insn);
2299
2300 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2301 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2302 recog_data.n_dups = insn_data[icode].n_dups;
2303
2304 insn_extract (insn);
2305
2306 for (i = 0; i < noperands; i++)
2307 {
2308 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2309 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2310 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2311 /* VOIDmode match_operands gets mode from their real operand. */
2312 if (recog_data.operand_mode[i] == VOIDmode)
2313 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2314 }
2315 }
2316 for (i = 0; i < noperands; i++)
2317 recog_data.operand_type[i]
2318 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2319 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2320 : OP_IN);
2321
2322 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2323
2324 recog_data.enabled_alternatives = get_enabled_alternatives (insn);
2325
2326 recog_data.insn = NULL;
2327 which_alternative = -1;
2328 }
2329
2330 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2331 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2332 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2333 has N_OPERANDS entries. */
2334
2335 void
2336 preprocess_constraints (int n_operands, int n_alternatives,
2337 const char **constraints,
2338 operand_alternative *op_alt_base)
2339 {
2340 for (int i = 0; i < n_operands; i++)
2341 {
2342 int j;
2343 struct operand_alternative *op_alt;
2344 const char *p = constraints[i];
2345
2346 op_alt = op_alt_base;
2347
2348 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2349 {
2350 op_alt[i].cl = NO_REGS;
2351 op_alt[i].constraint = p;
2352 op_alt[i].matches = -1;
2353 op_alt[i].matched = -1;
2354
2355 if (*p == '\0' || *p == ',')
2356 {
2357 op_alt[i].anything_ok = 1;
2358 continue;
2359 }
2360
2361 for (;;)
2362 {
2363 char c = *p;
2364 if (c == '#')
2365 do
2366 c = *++p;
2367 while (c != ',' && c != '\0');
2368 if (c == ',' || c == '\0')
2369 {
2370 p++;
2371 break;
2372 }
2373
2374 switch (c)
2375 {
2376 case '=': case '+': case '*': case '%':
2377 case 'E': case 'F': case 'G': case 'H':
2378 case 's': case 'i': case 'n':
2379 case 'I': case 'J': case 'K': case 'L':
2380 case 'M': case 'N': case 'O': case 'P':
2381 /* These don't say anything we care about. */
2382 break;
2383
2384 case '?':
2385 op_alt[i].reject += 6;
2386 break;
2387 case '!':
2388 op_alt[i].reject += 600;
2389 break;
2390 case '&':
2391 op_alt[i].earlyclobber = 1;
2392 break;
2393
2394 case '0': case '1': case '2': case '3': case '4':
2395 case '5': case '6': case '7': case '8': case '9':
2396 {
2397 char *end;
2398 op_alt[i].matches = strtoul (p, &end, 10);
2399 op_alt[op_alt[i].matches].matched = i;
2400 p = end;
2401 }
2402 continue;
2403
2404 case TARGET_MEM_CONSTRAINT:
2405 op_alt[i].memory_ok = 1;
2406 break;
2407 case '<':
2408 op_alt[i].decmem_ok = 1;
2409 break;
2410 case '>':
2411 op_alt[i].incmem_ok = 1;
2412 break;
2413 case 'V':
2414 op_alt[i].nonoffmem_ok = 1;
2415 break;
2416 case 'o':
2417 op_alt[i].offmem_ok = 1;
2418 break;
2419 case 'X':
2420 op_alt[i].anything_ok = 1;
2421 break;
2422
2423 case 'p':
2424 op_alt[i].is_address = 1;
2425 op_alt[i].cl = reg_class_subunion[(int) op_alt[i].cl]
2426 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2427 ADDRESS, SCRATCH)];
2428 break;
2429
2430 case 'g':
2431 case 'r':
2432 op_alt[i].cl =
2433 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2434 break;
2435
2436 default:
2437 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2438 {
2439 op_alt[i].memory_ok = 1;
2440 break;
2441 }
2442 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2443 {
2444 op_alt[i].is_address = 1;
2445 op_alt[i].cl
2446 = (reg_class_subunion
2447 [(int) op_alt[i].cl]
2448 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2449 ADDRESS, SCRATCH)]);
2450 break;
2451 }
2452
2453 op_alt[i].cl
2454 = (reg_class_subunion
2455 [(int) op_alt[i].cl]
2456 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2457 break;
2458 }
2459 p += CONSTRAINT_LEN (c, p);
2460 }
2461 }
2462 }
2463 }
2464
2465 /* Return an array of operand_alternative instructions for
2466 instruction ICODE. */
2467
2468 const operand_alternative *
2469 preprocess_insn_constraints (int icode)
2470 {
2471 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2472 if (this_target_recog->x_op_alt[icode])
2473 return this_target_recog->x_op_alt[icode];
2474
2475 int n_operands = insn_data[icode].n_operands;
2476 if (n_operands == 0)
2477 return 0;
2478 /* Always provide at least one alternative so that which_op_alt ()
2479 works correctly. If the instruction has 0 alternatives (i.e. all
2480 constraint strings are empty) then each operand in this alternative
2481 will have anything_ok set. */
2482 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2483 int n_entries = n_operands * n_alternatives;
2484
2485 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2486 const char **constraints = XALLOCAVEC (const char *, n_operands);
2487
2488 for (int i = 0; i < n_operands; ++i)
2489 constraints[i] = insn_data[icode].operand[i].constraint;
2490 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2491
2492 this_target_recog->x_op_alt[icode] = op_alt;
2493 return op_alt;
2494 }
2495
2496 /* After calling extract_insn, you can use this function to extract some
2497 information from the constraint strings into a more usable form.
2498 The collected data is stored in recog_op_alt. */
2499
2500 void
2501 preprocess_constraints (rtx insn)
2502 {
2503 int icode = INSN_CODE (insn);
2504 if (icode >= 0)
2505 recog_op_alt = preprocess_insn_constraints (icode);
2506 else
2507 {
2508 int n_operands = recog_data.n_operands;
2509 int n_alternatives = recog_data.n_alternatives;
2510 int n_entries = n_operands * n_alternatives;
2511 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2512 preprocess_constraints (n_operands, n_alternatives,
2513 recog_data.constraints, asm_op_alt);
2514 recog_op_alt = asm_op_alt;
2515 }
2516 }
2517
2518 /* Check the operands of an insn against the insn's operand constraints
2519 and return 1 if they are valid.
2520 The information about the insn's operands, constraints, operand modes
2521 etc. is obtained from the global variables set up by extract_insn.
2522
2523 WHICH_ALTERNATIVE is set to a number which indicates which
2524 alternative of constraints was matched: 0 for the first alternative,
2525 1 for the next, etc.
2526
2527 In addition, when two operands are required to match
2528 and it happens that the output operand is (reg) while the
2529 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2530 make the output operand look like the input.
2531 This is because the output operand is the one the template will print.
2532
2533 This is used in final, just before printing the assembler code and by
2534 the routines that determine an insn's attribute.
2535
2536 If STRICT is a positive nonzero value, it means that we have been
2537 called after reload has been completed. In that case, we must
2538 do all checks strictly. If it is zero, it means that we have been called
2539 before reload has completed. In that case, we first try to see if we can
2540 find an alternative that matches strictly. If not, we try again, this
2541 time assuming that reload will fix up the insn. This provides a "best
2542 guess" for the alternative and is used to compute attributes of insns prior
2543 to reload. A negative value of STRICT is used for this internal call. */
2544
2545 struct funny_match
2546 {
2547 int this_op, other;
2548 };
2549
2550 int
2551 constrain_operands (int strict)
2552 {
2553 const char *constraints[MAX_RECOG_OPERANDS];
2554 int matching_operands[MAX_RECOG_OPERANDS];
2555 int earlyclobber[MAX_RECOG_OPERANDS];
2556 int c;
2557
2558 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2559 int funny_match_index;
2560
2561 which_alternative = 0;
2562 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2563 return 1;
2564
2565 for (c = 0; c < recog_data.n_operands; c++)
2566 {
2567 constraints[c] = recog_data.constraints[c];
2568 matching_operands[c] = -1;
2569 }
2570
2571 do
2572 {
2573 int seen_earlyclobber_at = -1;
2574 int opno;
2575 int lose = 0;
2576 funny_match_index = 0;
2577
2578 if (!TEST_BIT (recog_data.enabled_alternatives, which_alternative))
2579 {
2580 int i;
2581
2582 for (i = 0; i < recog_data.n_operands; i++)
2583 constraints[i] = skip_alternative (constraints[i]);
2584
2585 which_alternative++;
2586 continue;
2587 }
2588
2589 for (opno = 0; opno < recog_data.n_operands; opno++)
2590 {
2591 rtx op = recog_data.operand[opno];
2592 enum machine_mode mode = GET_MODE (op);
2593 const char *p = constraints[opno];
2594 int offset = 0;
2595 int win = 0;
2596 int val;
2597 int len;
2598
2599 earlyclobber[opno] = 0;
2600
2601 /* A unary operator may be accepted by the predicate, but it
2602 is irrelevant for matching constraints. */
2603 if (UNARY_P (op))
2604 op = XEXP (op, 0);
2605
2606 if (GET_CODE (op) == SUBREG)
2607 {
2608 if (REG_P (SUBREG_REG (op))
2609 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2610 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2611 GET_MODE (SUBREG_REG (op)),
2612 SUBREG_BYTE (op),
2613 GET_MODE (op));
2614 op = SUBREG_REG (op);
2615 }
2616
2617 /* An empty constraint or empty alternative
2618 allows anything which matched the pattern. */
2619 if (*p == 0 || *p == ',')
2620 win = 1;
2621
2622 do
2623 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2624 {
2625 case '\0':
2626 len = 0;
2627 break;
2628 case ',':
2629 c = '\0';
2630 break;
2631
2632 case '?': case '!': case '*': case '%':
2633 case '=': case '+':
2634 break;
2635
2636 case '#':
2637 /* Ignore rest of this alternative as far as
2638 constraint checking is concerned. */
2639 do
2640 p++;
2641 while (*p && *p != ',');
2642 len = 0;
2643 break;
2644
2645 case '&':
2646 earlyclobber[opno] = 1;
2647 if (seen_earlyclobber_at < 0)
2648 seen_earlyclobber_at = opno;
2649 break;
2650
2651 case '0': case '1': case '2': case '3': case '4':
2652 case '5': case '6': case '7': case '8': case '9':
2653 {
2654 /* This operand must be the same as a previous one.
2655 This kind of constraint is used for instructions such
2656 as add when they take only two operands.
2657
2658 Note that the lower-numbered operand is passed first.
2659
2660 If we are not testing strictly, assume that this
2661 constraint will be satisfied. */
2662
2663 char *end;
2664 int match;
2665
2666 match = strtoul (p, &end, 10);
2667 p = end;
2668
2669 if (strict < 0)
2670 val = 1;
2671 else
2672 {
2673 rtx op1 = recog_data.operand[match];
2674 rtx op2 = recog_data.operand[opno];
2675
2676 /* A unary operator may be accepted by the predicate,
2677 but it is irrelevant for matching constraints. */
2678 if (UNARY_P (op1))
2679 op1 = XEXP (op1, 0);
2680 if (UNARY_P (op2))
2681 op2 = XEXP (op2, 0);
2682
2683 val = operands_match_p (op1, op2);
2684 }
2685
2686 matching_operands[opno] = match;
2687 matching_operands[match] = opno;
2688
2689 if (val != 0)
2690 win = 1;
2691
2692 /* If output is *x and input is *--x, arrange later
2693 to change the output to *--x as well, since the
2694 output op is the one that will be printed. */
2695 if (val == 2 && strict > 0)
2696 {
2697 funny_match[funny_match_index].this_op = opno;
2698 funny_match[funny_match_index++].other = match;
2699 }
2700 }
2701 len = 0;
2702 break;
2703
2704 case 'p':
2705 /* p is used for address_operands. When we are called by
2706 gen_reload, no one will have checked that the address is
2707 strictly valid, i.e., that all pseudos requiring hard regs
2708 have gotten them. */
2709 if (strict <= 0
2710 || (strict_memory_address_p (recog_data.operand_mode[opno],
2711 op)))
2712 win = 1;
2713 break;
2714
2715 /* No need to check general_operand again;
2716 it was done in insn-recog.c. Well, except that reload
2717 doesn't check the validity of its replacements, but
2718 that should only matter when there's a bug. */
2719 case 'g':
2720 /* Anything goes unless it is a REG and really has a hard reg
2721 but the hard reg is not in the class GENERAL_REGS. */
2722 if (REG_P (op))
2723 {
2724 if (strict < 0
2725 || GENERAL_REGS == ALL_REGS
2726 || (reload_in_progress
2727 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2728 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2729 win = 1;
2730 }
2731 else if (strict < 0 || general_operand (op, mode))
2732 win = 1;
2733 break;
2734
2735 case 'X':
2736 /* This is used for a MATCH_SCRATCH in the cases when
2737 we don't actually need anything. So anything goes
2738 any time. */
2739 win = 1;
2740 break;
2741
2742 case TARGET_MEM_CONSTRAINT:
2743 /* Memory operands must be valid, to the extent
2744 required by STRICT. */
2745 if (MEM_P (op))
2746 {
2747 if (strict > 0
2748 && !strict_memory_address_addr_space_p
2749 (GET_MODE (op), XEXP (op, 0),
2750 MEM_ADDR_SPACE (op)))
2751 break;
2752 if (strict == 0
2753 && !memory_address_addr_space_p
2754 (GET_MODE (op), XEXP (op, 0),
2755 MEM_ADDR_SPACE (op)))
2756 break;
2757 win = 1;
2758 }
2759 /* Before reload, accept what reload can turn into mem. */
2760 else if (strict < 0 && CONSTANT_P (op))
2761 win = 1;
2762 /* During reload, accept a pseudo */
2763 else if (reload_in_progress && REG_P (op)
2764 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2765 win = 1;
2766 break;
2767
2768 case '<':
2769 if (MEM_P (op)
2770 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2771 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2772 win = 1;
2773 break;
2774
2775 case '>':
2776 if (MEM_P (op)
2777 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2778 || GET_CODE (XEXP (op, 0)) == POST_INC))
2779 win = 1;
2780 break;
2781
2782 case 'E':
2783 case 'F':
2784 if (CONST_DOUBLE_AS_FLOAT_P (op)
2785 || (GET_CODE (op) == CONST_VECTOR
2786 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2787 win = 1;
2788 break;
2789
2790 case 'G':
2791 case 'H':
2792 if (CONST_DOUBLE_AS_FLOAT_P (op)
2793 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2794 win = 1;
2795 break;
2796
2797 case 's':
2798 if (CONST_SCALAR_INT_P (op))
2799 break;
2800 case 'i':
2801 if (CONSTANT_P (op))
2802 win = 1;
2803 break;
2804
2805 case 'n':
2806 if (CONST_SCALAR_INT_P (op))
2807 win = 1;
2808 break;
2809
2810 case 'I':
2811 case 'J':
2812 case 'K':
2813 case 'L':
2814 case 'M':
2815 case 'N':
2816 case 'O':
2817 case 'P':
2818 if (CONST_INT_P (op)
2819 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2820 win = 1;
2821 break;
2822
2823 case 'V':
2824 if (MEM_P (op)
2825 && ((strict > 0 && ! offsettable_memref_p (op))
2826 || (strict < 0
2827 && !(CONSTANT_P (op) || MEM_P (op)))
2828 || (reload_in_progress
2829 && !(REG_P (op)
2830 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2831 win = 1;
2832 break;
2833
2834 case 'o':
2835 if ((strict > 0 && offsettable_memref_p (op))
2836 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2837 /* Before reload, accept what reload can handle. */
2838 || (strict < 0
2839 && (CONSTANT_P (op) || MEM_P (op)))
2840 /* During reload, accept a pseudo */
2841 || (reload_in_progress && REG_P (op)
2842 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2843 win = 1;
2844 break;
2845
2846 default:
2847 {
2848 enum reg_class cl;
2849
2850 cl = (c == 'r'
2851 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2852 if (cl != NO_REGS)
2853 {
2854 if (strict < 0
2855 || (strict == 0
2856 && REG_P (op)
2857 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2858 || (strict == 0 && GET_CODE (op) == SCRATCH)
2859 || (REG_P (op)
2860 && reg_fits_class_p (op, cl, offset, mode)))
2861 win = 1;
2862 }
2863 #ifdef EXTRA_CONSTRAINT_STR
2864 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2865 win = 1;
2866
2867 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2868 /* Every memory operand can be reloaded to fit. */
2869 && ((strict < 0 && MEM_P (op))
2870 /* Before reload, accept what reload can turn
2871 into mem. */
2872 || (strict < 0 && CONSTANT_P (op))
2873 /* During reload, accept a pseudo */
2874 || (reload_in_progress && REG_P (op)
2875 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2876 win = 1;
2877 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2878 /* Every address operand can be reloaded to fit. */
2879 && strict < 0)
2880 win = 1;
2881 /* Cater to architectures like IA-64 that define extra memory
2882 constraints without using define_memory_constraint. */
2883 else if (reload_in_progress
2884 && REG_P (op)
2885 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2886 && reg_renumber[REGNO (op)] < 0
2887 && reg_equiv_mem (REGNO (op)) != 0
2888 && EXTRA_CONSTRAINT_STR
2889 (reg_equiv_mem (REGNO (op)), c, p))
2890 win = 1;
2891 #endif
2892 break;
2893 }
2894 }
2895 while (p += len, c);
2896
2897 constraints[opno] = p;
2898 /* If this operand did not win somehow,
2899 this alternative loses. */
2900 if (! win)
2901 lose = 1;
2902 }
2903 /* This alternative won; the operands are ok.
2904 Change whichever operands this alternative says to change. */
2905 if (! lose)
2906 {
2907 int opno, eopno;
2908
2909 /* See if any earlyclobber operand conflicts with some other
2910 operand. */
2911
2912 if (strict > 0 && seen_earlyclobber_at >= 0)
2913 for (eopno = seen_earlyclobber_at;
2914 eopno < recog_data.n_operands;
2915 eopno++)
2916 /* Ignore earlyclobber operands now in memory,
2917 because we would often report failure when we have
2918 two memory operands, one of which was formerly a REG. */
2919 if (earlyclobber[eopno]
2920 && REG_P (recog_data.operand[eopno]))
2921 for (opno = 0; opno < recog_data.n_operands; opno++)
2922 if ((MEM_P (recog_data.operand[opno])
2923 || recog_data.operand_type[opno] != OP_OUT)
2924 && opno != eopno
2925 /* Ignore things like match_operator operands. */
2926 && *recog_data.constraints[opno] != 0
2927 && ! (matching_operands[opno] == eopno
2928 && operands_match_p (recog_data.operand[opno],
2929 recog_data.operand[eopno]))
2930 && ! safe_from_earlyclobber (recog_data.operand[opno],
2931 recog_data.operand[eopno]))
2932 lose = 1;
2933
2934 if (! lose)
2935 {
2936 while (--funny_match_index >= 0)
2937 {
2938 recog_data.operand[funny_match[funny_match_index].other]
2939 = recog_data.operand[funny_match[funny_match_index].this_op];
2940 }
2941
2942 #ifdef AUTO_INC_DEC
2943 /* For operands without < or > constraints reject side-effects. */
2944 if (recog_data.is_asm)
2945 {
2946 for (opno = 0; opno < recog_data.n_operands; opno++)
2947 if (MEM_P (recog_data.operand[opno]))
2948 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2949 {
2950 case PRE_INC:
2951 case POST_INC:
2952 case PRE_DEC:
2953 case POST_DEC:
2954 case PRE_MODIFY:
2955 case POST_MODIFY:
2956 if (strchr (recog_data.constraints[opno], '<') == NULL
2957 && strchr (recog_data.constraints[opno], '>')
2958 == NULL)
2959 return 0;
2960 break;
2961 default:
2962 break;
2963 }
2964 }
2965 #endif
2966 return 1;
2967 }
2968 }
2969
2970 which_alternative++;
2971 }
2972 while (which_alternative < recog_data.n_alternatives);
2973
2974 which_alternative = -1;
2975 /* If we are about to reject this, but we are not to test strictly,
2976 try a very loose test. Only return failure if it fails also. */
2977 if (strict == 0)
2978 return constrain_operands (-1);
2979 else
2980 return 0;
2981 }
2982
2983 /* Return true iff OPERAND (assumed to be a REG rtx)
2984 is a hard reg in class CLASS when its regno is offset by OFFSET
2985 and changed to mode MODE.
2986 If REG occupies multiple hard regs, all of them must be in CLASS. */
2987
2988 bool
2989 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2990 enum machine_mode mode)
2991 {
2992 unsigned int regno = REGNO (operand);
2993
2994 if (cl == NO_REGS)
2995 return false;
2996
2997 /* Regno must not be a pseudo register. Offset may be negative. */
2998 return (HARD_REGISTER_NUM_P (regno)
2999 && HARD_REGISTER_NUM_P (regno + offset)
3000 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3001 regno + offset));
3002 }
3003 \f
3004 /* Split single instruction. Helper function for split_all_insns and
3005 split_all_insns_noflow. Return last insn in the sequence if successful,
3006 or NULL if unsuccessful. */
3007
3008 static rtx
3009 split_insn (rtx insn)
3010 {
3011 /* Split insns here to get max fine-grain parallelism. */
3012 rtx first = PREV_INSN (insn);
3013 rtx last = try_split (PATTERN (insn), insn, 1);
3014 rtx insn_set, last_set, note;
3015
3016 if (last == insn)
3017 return NULL_RTX;
3018
3019 /* If the original instruction was a single set that was known to be
3020 equivalent to a constant, see if we can say the same about the last
3021 instruction in the split sequence. The two instructions must set
3022 the same destination. */
3023 insn_set = single_set (insn);
3024 if (insn_set)
3025 {
3026 last_set = single_set (last);
3027 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3028 {
3029 note = find_reg_equal_equiv_note (insn);
3030 if (note && CONSTANT_P (XEXP (note, 0)))
3031 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3032 else if (CONSTANT_P (SET_SRC (insn_set)))
3033 set_unique_reg_note (last, REG_EQUAL,
3034 copy_rtx (SET_SRC (insn_set)));
3035 }
3036 }
3037
3038 /* try_split returns the NOTE that INSN became. */
3039 SET_INSN_DELETED (insn);
3040
3041 /* ??? Coddle to md files that generate subregs in post-reload
3042 splitters instead of computing the proper hard register. */
3043 if (reload_completed && first != last)
3044 {
3045 first = NEXT_INSN (first);
3046 for (;;)
3047 {
3048 if (INSN_P (first))
3049 cleanup_subreg_operands (first);
3050 if (first == last)
3051 break;
3052 first = NEXT_INSN (first);
3053 }
3054 }
3055
3056 return last;
3057 }
3058
3059 /* Split all insns in the function. If UPD_LIFE, update life info after. */
3060
3061 void
3062 split_all_insns (void)
3063 {
3064 sbitmap blocks;
3065 bool changed;
3066 basic_block bb;
3067
3068 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3069 bitmap_clear (blocks);
3070 changed = false;
3071
3072 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3073 {
3074 rtx insn, next;
3075 bool finish = false;
3076
3077 rtl_profile_for_bb (bb);
3078 for (insn = BB_HEAD (bb); !finish ; insn = next)
3079 {
3080 /* Can't use `next_real_insn' because that might go across
3081 CODE_LABELS and short-out basic blocks. */
3082 next = NEXT_INSN (insn);
3083 finish = (insn == BB_END (bb));
3084 if (INSN_P (insn))
3085 {
3086 rtx set = single_set (insn);
3087
3088 /* Don't split no-op move insns. These should silently
3089 disappear later in final. Splitting such insns would
3090 break the code that handles LIBCALL blocks. */
3091 if (set && set_noop_p (set))
3092 {
3093 /* Nops get in the way while scheduling, so delete them
3094 now if register allocation has already been done. It
3095 is too risky to try to do this before register
3096 allocation, and there are unlikely to be very many
3097 nops then anyways. */
3098 if (reload_completed)
3099 delete_insn_and_edges (insn);
3100 }
3101 else
3102 {
3103 if (split_insn (insn))
3104 {
3105 bitmap_set_bit (blocks, bb->index);
3106 changed = true;
3107 }
3108 }
3109 }
3110 }
3111 }
3112
3113 default_rtl_profile ();
3114 if (changed)
3115 find_many_sub_basic_blocks (blocks);
3116
3117 #ifdef ENABLE_CHECKING
3118 verify_flow_info ();
3119 #endif
3120
3121 sbitmap_free (blocks);
3122 }
3123
3124 /* Same as split_all_insns, but do not expect CFG to be available.
3125 Used by machine dependent reorg passes. */
3126
3127 unsigned int
3128 split_all_insns_noflow (void)
3129 {
3130 rtx next, insn;
3131
3132 for (insn = get_insns (); insn; insn = next)
3133 {
3134 next = NEXT_INSN (insn);
3135 if (INSN_P (insn))
3136 {
3137 /* Don't split no-op move insns. These should silently
3138 disappear later in final. Splitting such insns would
3139 break the code that handles LIBCALL blocks. */
3140 rtx set = single_set (insn);
3141 if (set && set_noop_p (set))
3142 {
3143 /* Nops get in the way while scheduling, so delete them
3144 now if register allocation has already been done. It
3145 is too risky to try to do this before register
3146 allocation, and there are unlikely to be very many
3147 nops then anyways.
3148
3149 ??? Should we use delete_insn when the CFG isn't valid? */
3150 if (reload_completed)
3151 delete_insn_and_edges (insn);
3152 }
3153 else
3154 split_insn (insn);
3155 }
3156 }
3157 return 0;
3158 }
3159 \f
3160 #ifdef HAVE_peephole2
3161 struct peep2_insn_data
3162 {
3163 rtx insn;
3164 regset live_before;
3165 };
3166
3167 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3168 static int peep2_current;
3169
3170 static bool peep2_do_rebuild_jump_labels;
3171 static bool peep2_do_cleanup_cfg;
3172
3173 /* The number of instructions available to match a peep2. */
3174 int peep2_current_count;
3175
3176 /* A non-insn marker indicating the last insn of the block.
3177 The live_before regset for this element is correct, indicating
3178 DF_LIVE_OUT for the block. */
3179 #define PEEP2_EOB pc_rtx
3180
3181 /* Wrap N to fit into the peep2_insn_data buffer. */
3182
3183 static int
3184 peep2_buf_position (int n)
3185 {
3186 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3187 n -= MAX_INSNS_PER_PEEP2 + 1;
3188 return n;
3189 }
3190
3191 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3192 does not exist. Used by the recognizer to find the next insn to match
3193 in a multi-insn pattern. */
3194
3195 rtx
3196 peep2_next_insn (int n)
3197 {
3198 gcc_assert (n <= peep2_current_count);
3199
3200 n = peep2_buf_position (peep2_current + n);
3201
3202 return peep2_insn_data[n].insn;
3203 }
3204
3205 /* Return true if REGNO is dead before the Nth non-note insn
3206 after `current'. */
3207
3208 int
3209 peep2_regno_dead_p (int ofs, int regno)
3210 {
3211 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3212
3213 ofs = peep2_buf_position (peep2_current + ofs);
3214
3215 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3216
3217 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3218 }
3219
3220 /* Similarly for a REG. */
3221
3222 int
3223 peep2_reg_dead_p (int ofs, rtx reg)
3224 {
3225 int regno, n;
3226
3227 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3228
3229 ofs = peep2_buf_position (peep2_current + ofs);
3230
3231 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3232
3233 regno = REGNO (reg);
3234 n = hard_regno_nregs[regno][GET_MODE (reg)];
3235 while (--n >= 0)
3236 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3237 return 0;
3238 return 1;
3239 }
3240
3241 /* Regno offset to be used in the register search. */
3242 static int search_ofs;
3243
3244 /* Try to find a hard register of mode MODE, matching the register class in
3245 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3246 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3247 in which case the only condition is that the register must be available
3248 before CURRENT_INSN.
3249 Registers that already have bits set in REG_SET will not be considered.
3250
3251 If an appropriate register is available, it will be returned and the
3252 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3253 returned. */
3254
3255 rtx
3256 peep2_find_free_register (int from, int to, const char *class_str,
3257 enum machine_mode mode, HARD_REG_SET *reg_set)
3258 {
3259 enum reg_class cl;
3260 HARD_REG_SET live;
3261 df_ref *def_rec;
3262 int i;
3263
3264 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3265 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3266
3267 from = peep2_buf_position (peep2_current + from);
3268 to = peep2_buf_position (peep2_current + to);
3269
3270 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3271 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3272
3273 while (from != to)
3274 {
3275 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3276
3277 /* Don't use registers set or clobbered by the insn. */
3278 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3279 *def_rec; def_rec++)
3280 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3281
3282 from = peep2_buf_position (from + 1);
3283 }
3284
3285 cl = (class_str[0] == 'r' ? GENERAL_REGS
3286 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3287
3288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3289 {
3290 int raw_regno, regno, success, j;
3291
3292 /* Distribute the free registers as much as possible. */
3293 raw_regno = search_ofs + i;
3294 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3295 raw_regno -= FIRST_PSEUDO_REGISTER;
3296 #ifdef REG_ALLOC_ORDER
3297 regno = reg_alloc_order[raw_regno];
3298 #else
3299 regno = raw_regno;
3300 #endif
3301
3302 /* Can it support the mode we need? */
3303 if (! HARD_REGNO_MODE_OK (regno, mode))
3304 continue;
3305
3306 success = 1;
3307 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3308 {
3309 /* Don't allocate fixed registers. */
3310 if (fixed_regs[regno + j])
3311 {
3312 success = 0;
3313 break;
3314 }
3315 /* Don't allocate global registers. */
3316 if (global_regs[regno + j])
3317 {
3318 success = 0;
3319 break;
3320 }
3321 /* Make sure the register is of the right class. */
3322 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3323 {
3324 success = 0;
3325 break;
3326 }
3327 /* And that we don't create an extra save/restore. */
3328 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3329 {
3330 success = 0;
3331 break;
3332 }
3333
3334 if (! targetm.hard_regno_scratch_ok (regno + j))
3335 {
3336 success = 0;
3337 break;
3338 }
3339
3340 /* And we don't clobber traceback for noreturn functions. */
3341 if ((regno + j == FRAME_POINTER_REGNUM
3342 || regno + j == HARD_FRAME_POINTER_REGNUM)
3343 && (! reload_completed || frame_pointer_needed))
3344 {
3345 success = 0;
3346 break;
3347 }
3348
3349 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3350 || TEST_HARD_REG_BIT (live, regno + j))
3351 {
3352 success = 0;
3353 break;
3354 }
3355 }
3356
3357 if (success)
3358 {
3359 add_to_hard_reg_set (reg_set, mode, regno);
3360
3361 /* Start the next search with the next register. */
3362 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3363 raw_regno = 0;
3364 search_ofs = raw_regno;
3365
3366 return gen_rtx_REG (mode, regno);
3367 }
3368 }
3369
3370 search_ofs = 0;
3371 return NULL_RTX;
3372 }
3373
3374 /* Forget all currently tracked instructions, only remember current
3375 LIVE regset. */
3376
3377 static void
3378 peep2_reinit_state (regset live)
3379 {
3380 int i;
3381
3382 /* Indicate that all slots except the last holds invalid data. */
3383 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3384 peep2_insn_data[i].insn = NULL_RTX;
3385 peep2_current_count = 0;
3386
3387 /* Indicate that the last slot contains live_after data. */
3388 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3389 peep2_current = MAX_INSNS_PER_PEEP2;
3390
3391 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3392 }
3393
3394 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3395 starting at INSN. Perform the replacement, removing the old insns and
3396 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3397 if the replacement is rejected. */
3398
3399 static rtx
3400 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3401 {
3402 int i;
3403 rtx last, eh_note, as_note, before_try, x;
3404 rtx old_insn, new_insn;
3405 bool was_call = false;
3406
3407 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3408 match more than one insn, or to be split into more than one insn. */
3409 old_insn = peep2_insn_data[peep2_current].insn;
3410 if (RTX_FRAME_RELATED_P (old_insn))
3411 {
3412 bool any_note = false;
3413 rtx note;
3414
3415 if (match_len != 0)
3416 return NULL;
3417
3418 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3419 may be in the stream for the purpose of register allocation. */
3420 if (active_insn_p (attempt))
3421 new_insn = attempt;
3422 else
3423 new_insn = next_active_insn (attempt);
3424 if (next_active_insn (new_insn))
3425 return NULL;
3426
3427 /* We have a 1-1 replacement. Copy over any frame-related info. */
3428 RTX_FRAME_RELATED_P (new_insn) = 1;
3429
3430 /* Allow the backend to fill in a note during the split. */
3431 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3432 switch (REG_NOTE_KIND (note))
3433 {
3434 case REG_FRAME_RELATED_EXPR:
3435 case REG_CFA_DEF_CFA:
3436 case REG_CFA_ADJUST_CFA:
3437 case REG_CFA_OFFSET:
3438 case REG_CFA_REGISTER:
3439 case REG_CFA_EXPRESSION:
3440 case REG_CFA_RESTORE:
3441 case REG_CFA_SET_VDRAP:
3442 any_note = true;
3443 break;
3444 default:
3445 break;
3446 }
3447
3448 /* If the backend didn't supply a note, copy one over. */
3449 if (!any_note)
3450 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3451 switch (REG_NOTE_KIND (note))
3452 {
3453 case REG_FRAME_RELATED_EXPR:
3454 case REG_CFA_DEF_CFA:
3455 case REG_CFA_ADJUST_CFA:
3456 case REG_CFA_OFFSET:
3457 case REG_CFA_REGISTER:
3458 case REG_CFA_EXPRESSION:
3459 case REG_CFA_RESTORE:
3460 case REG_CFA_SET_VDRAP:
3461 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3462 any_note = true;
3463 break;
3464 default:
3465 break;
3466 }
3467
3468 /* If there still isn't a note, make sure the unwind info sees the
3469 same expression as before the split. */
3470 if (!any_note)
3471 {
3472 rtx old_set, new_set;
3473
3474 /* The old insn had better have been simple, or annotated. */
3475 old_set = single_set (old_insn);
3476 gcc_assert (old_set != NULL);
3477
3478 new_set = single_set (new_insn);
3479 if (!new_set || !rtx_equal_p (new_set, old_set))
3480 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3481 }
3482
3483 /* Copy prologue/epilogue status. This is required in order to keep
3484 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3485 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3486 }
3487
3488 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3489 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3490 cfg-related call notes. */
3491 for (i = 0; i <= match_len; ++i)
3492 {
3493 int j;
3494 rtx note;
3495
3496 j = peep2_buf_position (peep2_current + i);
3497 old_insn = peep2_insn_data[j].insn;
3498 if (!CALL_P (old_insn))
3499 continue;
3500 was_call = true;
3501
3502 new_insn = attempt;
3503 while (new_insn != NULL_RTX)
3504 {
3505 if (CALL_P (new_insn))
3506 break;
3507 new_insn = NEXT_INSN (new_insn);
3508 }
3509
3510 gcc_assert (new_insn != NULL_RTX);
3511
3512 CALL_INSN_FUNCTION_USAGE (new_insn)
3513 = CALL_INSN_FUNCTION_USAGE (old_insn);
3514 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3515
3516 for (note = REG_NOTES (old_insn);
3517 note;
3518 note = XEXP (note, 1))
3519 switch (REG_NOTE_KIND (note))
3520 {
3521 case REG_NORETURN:
3522 case REG_SETJMP:
3523 case REG_TM:
3524 add_reg_note (new_insn, REG_NOTE_KIND (note),
3525 XEXP (note, 0));
3526 break;
3527 default:
3528 /* Discard all other reg notes. */
3529 break;
3530 }
3531
3532 /* Croak if there is another call in the sequence. */
3533 while (++i <= match_len)
3534 {
3535 j = peep2_buf_position (peep2_current + i);
3536 old_insn = peep2_insn_data[j].insn;
3537 gcc_assert (!CALL_P (old_insn));
3538 }
3539 break;
3540 }
3541
3542 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3543 move those notes over to the new sequence. */
3544 as_note = NULL;
3545 for (i = match_len; i >= 0; --i)
3546 {
3547 int j = peep2_buf_position (peep2_current + i);
3548 old_insn = peep2_insn_data[j].insn;
3549
3550 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3551 if (as_note)
3552 break;
3553 }
3554
3555 i = peep2_buf_position (peep2_current + match_len);
3556 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3557
3558 /* Replace the old sequence with the new. */
3559 last = emit_insn_after_setloc (attempt,
3560 peep2_insn_data[i].insn,
3561 INSN_LOCATION (peep2_insn_data[i].insn));
3562 before_try = PREV_INSN (insn);
3563 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3564
3565 /* Re-insert the EH_REGION notes. */
3566 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3567 {
3568 edge eh_edge;
3569 edge_iterator ei;
3570
3571 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3572 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3573 break;
3574
3575 if (eh_note)
3576 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3577
3578 if (eh_edge)
3579 for (x = last; x != before_try; x = PREV_INSN (x))
3580 if (x != BB_END (bb)
3581 && (can_throw_internal (x)
3582 || can_nonlocal_goto (x)))
3583 {
3584 edge nfte, nehe;
3585 int flags;
3586
3587 nfte = split_block (bb, x);
3588 flags = (eh_edge->flags
3589 & (EDGE_EH | EDGE_ABNORMAL));
3590 if (CALL_P (x))
3591 flags |= EDGE_ABNORMAL_CALL;
3592 nehe = make_edge (nfte->src, eh_edge->dest,
3593 flags);
3594
3595 nehe->probability = eh_edge->probability;
3596 nfte->probability
3597 = REG_BR_PROB_BASE - nehe->probability;
3598
3599 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3600 bb = nfte->src;
3601 eh_edge = nehe;
3602 }
3603
3604 /* Converting possibly trapping insn to non-trapping is
3605 possible. Zap dummy outgoing edges. */
3606 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3607 }
3608
3609 /* Re-insert the ARGS_SIZE notes. */
3610 if (as_note)
3611 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3612
3613 /* If we generated a jump instruction, it won't have
3614 JUMP_LABEL set. Recompute after we're done. */
3615 for (x = last; x != before_try; x = PREV_INSN (x))
3616 if (JUMP_P (x))
3617 {
3618 peep2_do_rebuild_jump_labels = true;
3619 break;
3620 }
3621
3622 return last;
3623 }
3624
3625 /* After performing a replacement in basic block BB, fix up the life
3626 information in our buffer. LAST is the last of the insns that we
3627 emitted as a replacement. PREV is the insn before the start of
3628 the replacement. MATCH_LEN is the number of instructions that were
3629 matched, and which now need to be replaced in the buffer. */
3630
3631 static void
3632 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3633 {
3634 int i = peep2_buf_position (peep2_current + match_len + 1);
3635 rtx x;
3636 regset_head live;
3637
3638 INIT_REG_SET (&live);
3639 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3640
3641 gcc_assert (peep2_current_count >= match_len + 1);
3642 peep2_current_count -= match_len + 1;
3643
3644 x = last;
3645 do
3646 {
3647 if (INSN_P (x))
3648 {
3649 df_insn_rescan (x);
3650 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3651 {
3652 peep2_current_count++;
3653 if (--i < 0)
3654 i = MAX_INSNS_PER_PEEP2;
3655 peep2_insn_data[i].insn = x;
3656 df_simulate_one_insn_backwards (bb, x, &live);
3657 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3658 }
3659 }
3660 x = PREV_INSN (x);
3661 }
3662 while (x != prev);
3663 CLEAR_REG_SET (&live);
3664
3665 peep2_current = i;
3666 }
3667
3668 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3669 Return true if we added it, false otherwise. The caller will try to match
3670 peepholes against the buffer if we return false; otherwise it will try to
3671 add more instructions to the buffer. */
3672
3673 static bool
3674 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3675 {
3676 int pos;
3677
3678 /* Once we have filled the maximum number of insns the buffer can hold,
3679 allow the caller to match the insns against peepholes. We wait until
3680 the buffer is full in case the target has similar peepholes of different
3681 length; we always want to match the longest if possible. */
3682 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3683 return false;
3684
3685 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3686 any other pattern, lest it change the semantics of the frame info. */
3687 if (RTX_FRAME_RELATED_P (insn))
3688 {
3689 /* Let the buffer drain first. */
3690 if (peep2_current_count > 0)
3691 return false;
3692 /* Now the insn will be the only thing in the buffer. */
3693 }
3694
3695 pos = peep2_buf_position (peep2_current + peep2_current_count);
3696 peep2_insn_data[pos].insn = insn;
3697 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3698 peep2_current_count++;
3699
3700 df_simulate_one_insn_forwards (bb, insn, live);
3701 return true;
3702 }
3703
3704 /* Perform the peephole2 optimization pass. */
3705
3706 static void
3707 peephole2_optimize (void)
3708 {
3709 rtx insn;
3710 bitmap live;
3711 int i;
3712 basic_block bb;
3713
3714 peep2_do_cleanup_cfg = false;
3715 peep2_do_rebuild_jump_labels = false;
3716
3717 df_set_flags (DF_LR_RUN_DCE);
3718 df_note_add_problem ();
3719 df_analyze ();
3720
3721 /* Initialize the regsets we're going to use. */
3722 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3723 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3724 search_ofs = 0;
3725 live = BITMAP_ALLOC (&reg_obstack);
3726
3727 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3728 {
3729 bool past_end = false;
3730 int pos;
3731
3732 rtl_profile_for_bb (bb);
3733
3734 /* Start up propagation. */
3735 bitmap_copy (live, DF_LR_IN (bb));
3736 df_simulate_initialize_forwards (bb, live);
3737 peep2_reinit_state (live);
3738
3739 insn = BB_HEAD (bb);
3740 for (;;)
3741 {
3742 rtx attempt, head;
3743 int match_len;
3744
3745 if (!past_end && !NONDEBUG_INSN_P (insn))
3746 {
3747 next_insn:
3748 insn = NEXT_INSN (insn);
3749 if (insn == NEXT_INSN (BB_END (bb)))
3750 past_end = true;
3751 continue;
3752 }
3753 if (!past_end && peep2_fill_buffer (bb, insn, live))
3754 goto next_insn;
3755
3756 /* If we did not fill an empty buffer, it signals the end of the
3757 block. */
3758 if (peep2_current_count == 0)
3759 break;
3760
3761 /* The buffer filled to the current maximum, so try to match. */
3762
3763 pos = peep2_buf_position (peep2_current + peep2_current_count);
3764 peep2_insn_data[pos].insn = PEEP2_EOB;
3765 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3766
3767 /* Match the peephole. */
3768 head = peep2_insn_data[peep2_current].insn;
3769 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3770 if (attempt != NULL)
3771 {
3772 rtx last = peep2_attempt (bb, head, match_len, attempt);
3773 if (last)
3774 {
3775 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3776 continue;
3777 }
3778 }
3779
3780 /* No match: advance the buffer by one insn. */
3781 peep2_current = peep2_buf_position (peep2_current + 1);
3782 peep2_current_count--;
3783 }
3784 }
3785
3786 default_rtl_profile ();
3787 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3788 BITMAP_FREE (peep2_insn_data[i].live_before);
3789 BITMAP_FREE (live);
3790 if (peep2_do_rebuild_jump_labels)
3791 rebuild_jump_labels (get_insns ());
3792 }
3793 #endif /* HAVE_peephole2 */
3794
3795 /* Common predicates for use with define_bypass. */
3796
3797 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3798 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3799 must be either a single_set or a PARALLEL with SETs inside. */
3800
3801 int
3802 store_data_bypass_p (rtx out_insn, rtx in_insn)
3803 {
3804 rtx out_set, in_set;
3805 rtx out_pat, in_pat;
3806 rtx out_exp, in_exp;
3807 int i, j;
3808
3809 in_set = single_set (in_insn);
3810 if (in_set)
3811 {
3812 if (!MEM_P (SET_DEST (in_set)))
3813 return false;
3814
3815 out_set = single_set (out_insn);
3816 if (out_set)
3817 {
3818 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3819 return false;
3820 }
3821 else
3822 {
3823 out_pat = PATTERN (out_insn);
3824
3825 if (GET_CODE (out_pat) != PARALLEL)
3826 return false;
3827
3828 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3829 {
3830 out_exp = XVECEXP (out_pat, 0, i);
3831
3832 if (GET_CODE (out_exp) == CLOBBER)
3833 continue;
3834
3835 gcc_assert (GET_CODE (out_exp) == SET);
3836
3837 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3838 return false;
3839 }
3840 }
3841 }
3842 else
3843 {
3844 in_pat = PATTERN (in_insn);
3845 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3846
3847 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3848 {
3849 in_exp = XVECEXP (in_pat, 0, i);
3850
3851 if (GET_CODE (in_exp) == CLOBBER)
3852 continue;
3853
3854 gcc_assert (GET_CODE (in_exp) == SET);
3855
3856 if (!MEM_P (SET_DEST (in_exp)))
3857 return false;
3858
3859 out_set = single_set (out_insn);
3860 if (out_set)
3861 {
3862 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3863 return false;
3864 }
3865 else
3866 {
3867 out_pat = PATTERN (out_insn);
3868 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3869
3870 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3871 {
3872 out_exp = XVECEXP (out_pat, 0, j);
3873
3874 if (GET_CODE (out_exp) == CLOBBER)
3875 continue;
3876
3877 gcc_assert (GET_CODE (out_exp) == SET);
3878
3879 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3880 return false;
3881 }
3882 }
3883 }
3884 }
3885
3886 return true;
3887 }
3888
3889 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3890 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3891 or multiple set; IN_INSN should be single_set for truth, but for convenience
3892 of insn categorization may be any JUMP or CALL insn. */
3893
3894 int
3895 if_test_bypass_p (rtx out_insn, rtx in_insn)
3896 {
3897 rtx out_set, in_set;
3898
3899 in_set = single_set (in_insn);
3900 if (! in_set)
3901 {
3902 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3903 return false;
3904 }
3905
3906 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3907 return false;
3908 in_set = SET_SRC (in_set);
3909
3910 out_set = single_set (out_insn);
3911 if (out_set)
3912 {
3913 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3914 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3915 return false;
3916 }
3917 else
3918 {
3919 rtx out_pat;
3920 int i;
3921
3922 out_pat = PATTERN (out_insn);
3923 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3924
3925 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3926 {
3927 rtx exp = XVECEXP (out_pat, 0, i);
3928
3929 if (GET_CODE (exp) == CLOBBER)
3930 continue;
3931
3932 gcc_assert (GET_CODE (exp) == SET);
3933
3934 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3935 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3936 return false;
3937 }
3938 }
3939
3940 return true;
3941 }
3942 \f
3943 static unsigned int
3944 rest_of_handle_peephole2 (void)
3945 {
3946 #ifdef HAVE_peephole2
3947 peephole2_optimize ();
3948 #endif
3949 return 0;
3950 }
3951
3952 namespace {
3953
3954 const pass_data pass_data_peephole2 =
3955 {
3956 RTL_PASS, /* type */
3957 "peephole2", /* name */
3958 OPTGROUP_NONE, /* optinfo_flags */
3959 true, /* has_execute */
3960 TV_PEEPHOLE2, /* tv_id */
3961 0, /* properties_required */
3962 0, /* properties_provided */
3963 0, /* properties_destroyed */
3964 0, /* todo_flags_start */
3965 TODO_df_finish, /* todo_flags_finish */
3966 };
3967
3968 class pass_peephole2 : public rtl_opt_pass
3969 {
3970 public:
3971 pass_peephole2 (gcc::context *ctxt)
3972 : rtl_opt_pass (pass_data_peephole2, ctxt)
3973 {}
3974
3975 /* opt_pass methods: */
3976 /* The epiphany backend creates a second instance of this pass, so we need
3977 a clone method. */
3978 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3979 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3980 virtual unsigned int execute (function *)
3981 {
3982 return rest_of_handle_peephole2 ();
3983 }
3984
3985 }; // class pass_peephole2
3986
3987 } // anon namespace
3988
3989 rtl_opt_pass *
3990 make_pass_peephole2 (gcc::context *ctxt)
3991 {
3992 return new pass_peephole2 (ctxt);
3993 }
3994
3995 namespace {
3996
3997 const pass_data pass_data_split_all_insns =
3998 {
3999 RTL_PASS, /* type */
4000 "split1", /* name */
4001 OPTGROUP_NONE, /* optinfo_flags */
4002 true, /* has_execute */
4003 TV_NONE, /* tv_id */
4004 0, /* properties_required */
4005 0, /* properties_provided */
4006 0, /* properties_destroyed */
4007 0, /* todo_flags_start */
4008 0, /* todo_flags_finish */
4009 };
4010
4011 class pass_split_all_insns : public rtl_opt_pass
4012 {
4013 public:
4014 pass_split_all_insns (gcc::context *ctxt)
4015 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4016 {}
4017
4018 /* opt_pass methods: */
4019 /* The epiphany backend creates a second instance of this pass, so
4020 we need a clone method. */
4021 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
4022 virtual unsigned int execute (function *)
4023 {
4024 split_all_insns ();
4025 return 0;
4026 }
4027
4028 }; // class pass_split_all_insns
4029
4030 } // anon namespace
4031
4032 rtl_opt_pass *
4033 make_pass_split_all_insns (gcc::context *ctxt)
4034 {
4035 return new pass_split_all_insns (ctxt);
4036 }
4037
4038 static unsigned int
4039 rest_of_handle_split_after_reload (void)
4040 {
4041 /* If optimizing, then go ahead and split insns now. */
4042 #ifndef STACK_REGS
4043 if (optimize > 0)
4044 #endif
4045 split_all_insns ();
4046 return 0;
4047 }
4048
4049 namespace {
4050
4051 const pass_data pass_data_split_after_reload =
4052 {
4053 RTL_PASS, /* type */
4054 "split2", /* name */
4055 OPTGROUP_NONE, /* optinfo_flags */
4056 true, /* has_execute */
4057 TV_NONE, /* tv_id */
4058 0, /* properties_required */
4059 0, /* properties_provided */
4060 0, /* properties_destroyed */
4061 0, /* todo_flags_start */
4062 0, /* todo_flags_finish */
4063 };
4064
4065 class pass_split_after_reload : public rtl_opt_pass
4066 {
4067 public:
4068 pass_split_after_reload (gcc::context *ctxt)
4069 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4070 {}
4071
4072 /* opt_pass methods: */
4073 virtual unsigned int execute (function *)
4074 {
4075 return rest_of_handle_split_after_reload ();
4076 }
4077
4078 }; // class pass_split_after_reload
4079
4080 } // anon namespace
4081
4082 rtl_opt_pass *
4083 make_pass_split_after_reload (gcc::context *ctxt)
4084 {
4085 return new pass_split_after_reload (ctxt);
4086 }
4087
4088 namespace {
4089
4090 const pass_data pass_data_split_before_regstack =
4091 {
4092 RTL_PASS, /* type */
4093 "split3", /* name */
4094 OPTGROUP_NONE, /* optinfo_flags */
4095 true, /* has_execute */
4096 TV_NONE, /* tv_id */
4097 0, /* properties_required */
4098 0, /* properties_provided */
4099 0, /* properties_destroyed */
4100 0, /* todo_flags_start */
4101 0, /* todo_flags_finish */
4102 };
4103
4104 class pass_split_before_regstack : public rtl_opt_pass
4105 {
4106 public:
4107 pass_split_before_regstack (gcc::context *ctxt)
4108 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4109 {}
4110
4111 /* opt_pass methods: */
4112 virtual bool gate (function *);
4113 virtual unsigned int execute (function *)
4114 {
4115 split_all_insns ();
4116 return 0;
4117 }
4118
4119 }; // class pass_split_before_regstack
4120
4121 bool
4122 pass_split_before_regstack::gate (function *)
4123 {
4124 #if HAVE_ATTR_length && defined (STACK_REGS)
4125 /* If flow2 creates new instructions which need splitting
4126 and scheduling after reload is not done, they might not be
4127 split until final which doesn't allow splitting
4128 if HAVE_ATTR_length. */
4129 # ifdef INSN_SCHEDULING
4130 return (optimize && !flag_schedule_insns_after_reload);
4131 # else
4132 return (optimize);
4133 # endif
4134 #else
4135 return 0;
4136 #endif
4137 }
4138
4139 } // anon namespace
4140
4141 rtl_opt_pass *
4142 make_pass_split_before_regstack (gcc::context *ctxt)
4143 {
4144 return new pass_split_before_regstack (ctxt);
4145 }
4146
4147 static unsigned int
4148 rest_of_handle_split_before_sched2 (void)
4149 {
4150 #ifdef INSN_SCHEDULING
4151 split_all_insns ();
4152 #endif
4153 return 0;
4154 }
4155
4156 namespace {
4157
4158 const pass_data pass_data_split_before_sched2 =
4159 {
4160 RTL_PASS, /* type */
4161 "split4", /* name */
4162 OPTGROUP_NONE, /* optinfo_flags */
4163 true, /* has_execute */
4164 TV_NONE, /* tv_id */
4165 0, /* properties_required */
4166 0, /* properties_provided */
4167 0, /* properties_destroyed */
4168 0, /* todo_flags_start */
4169 0, /* todo_flags_finish */
4170 };
4171
4172 class pass_split_before_sched2 : public rtl_opt_pass
4173 {
4174 public:
4175 pass_split_before_sched2 (gcc::context *ctxt)
4176 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4177 {}
4178
4179 /* opt_pass methods: */
4180 virtual bool gate (function *)
4181 {
4182 #ifdef INSN_SCHEDULING
4183 return optimize > 0 && flag_schedule_insns_after_reload;
4184 #else
4185 return false;
4186 #endif
4187 }
4188
4189 virtual unsigned int execute (function *)
4190 {
4191 return rest_of_handle_split_before_sched2 ();
4192 }
4193
4194 }; // class pass_split_before_sched2
4195
4196 } // anon namespace
4197
4198 rtl_opt_pass *
4199 make_pass_split_before_sched2 (gcc::context *ctxt)
4200 {
4201 return new pass_split_before_sched2 (ctxt);
4202 }
4203
4204 namespace {
4205
4206 const pass_data pass_data_split_for_shorten_branches =
4207 {
4208 RTL_PASS, /* type */
4209 "split5", /* name */
4210 OPTGROUP_NONE, /* optinfo_flags */
4211 true, /* has_execute */
4212 TV_NONE, /* tv_id */
4213 0, /* properties_required */
4214 0, /* properties_provided */
4215 0, /* properties_destroyed */
4216 0, /* todo_flags_start */
4217 0, /* todo_flags_finish */
4218 };
4219
4220 class pass_split_for_shorten_branches : public rtl_opt_pass
4221 {
4222 public:
4223 pass_split_for_shorten_branches (gcc::context *ctxt)
4224 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4225 {}
4226
4227 /* opt_pass methods: */
4228 virtual bool gate (function *)
4229 {
4230 /* The placement of the splitting that we do for shorten_branches
4231 depends on whether regstack is used by the target or not. */
4232 #if HAVE_ATTR_length && !defined (STACK_REGS)
4233 return true;
4234 #else
4235 return false;
4236 #endif
4237 }
4238
4239 virtual unsigned int execute (function *)
4240 {
4241 return split_all_insns_noflow ();
4242 }
4243
4244 }; // class pass_split_for_shorten_branches
4245
4246 } // anon namespace
4247
4248 rtl_opt_pass *
4249 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4250 {
4251 return new pass_split_for_shorten_branches (ctxt);
4252 }
4253
4254 /* (Re)initialize the target information after a change in target. */
4255
4256 void
4257 recog_init ()
4258 {
4259 /* The information is zero-initialized, so we don't need to do anything
4260 first time round. */
4261 if (!this_target_recog->x_initialized)
4262 {
4263 this_target_recog->x_initialized = true;
4264 return;
4265 }
4266 memset (this_target_recog->x_enabled_alternatives, 0,
4267 sizeof (this_target_recog->x_enabled_alternatives));
4268 for (int i = 0; i < LAST_INSN_CODE; ++i)
4269 if (this_target_recog->x_op_alt[i])
4270 {
4271 free (this_target_recog->x_op_alt[i]);
4272 this_target_recog->x_op_alt[i] = 0;
4273 }
4274 }