]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arc/arc.c
Include function.h in most files.
[thirdparty/gcc.git] / gcc / config / arc / arc.c
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
22
23 #include <stdio.h>
24 #include "config.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "recog.h"
39
40 /* Which cpu we're compiling for (NULL(=base), ???). */
41 char *arc_cpu_string;
42 int arc_cpu_type;
43
44 /* Name of mangle string to add to symbols to separate code compiled for each
45 cpu (or NULL). */
46 char *arc_mangle_cpu;
47
48 /* Save the operands last given to a compare for use when we
49 generate a scc or bcc insn. */
50 rtx arc_compare_op0, arc_compare_op1;
51
52 /* Name of text, data, and rodata sections, as specified on command line.
53 Selected by -m{text,data,rodata} flags. */
54 char *arc_text_string = ARC_DEFAULT_TEXT_SECTION;
55 char *arc_data_string = ARC_DEFAULT_DATA_SECTION;
56 char *arc_rodata_string = ARC_DEFAULT_RODATA_SECTION;
57
58 /* Name of text, data, and rodata sections used in varasm.c. */
59 char *arc_text_section;
60 char *arc_data_section;
61 char *arc_rodata_section;
62
63 /* Array of valid operand punctuation characters. */
64 char arc_punct_chars[256];
65
66 /* Variables used by arc_final_prescan_insn to implement conditional
67 execution. */
68 static int arc_ccfsm_state;
69 static int arc_ccfsm_current_cc;
70 static rtx arc_ccfsm_target_insn;
71 static int arc_ccfsm_target_label;
72
73 /* The maximum number of insns skipped which will be conditionalised if
74 possible. */
75 #define MAX_INSNS_SKIPPED 3
76
77 /* A nop is needed between a 4 byte insn that sets the condition codes and
78 a branch that uses them (the same isn't true for an 8 byte insn that sets
79 the condition codes). Set by arc_final_prescan_insn. Used by
80 arc_print_operand. */
81 static int last_insn_set_cc_p;
82 static int current_insn_set_cc_p;
83 static void record_cc_ref ();
84
85 void arc_init_reg_tables ();
86
87 /* Called by OVERRIDE_OPTIONS to initialize various things. */
88
89 void
90 arc_init (void)
91 {
92 if (arc_cpu_string == 0
93 || !strcmp (arc_cpu_string, "base"))
94 {
95 /* Ensure we have a printable value for the .cpu pseudo-op. */
96 arc_cpu_string = "base";
97 arc_cpu_type = 0;
98 arc_mangle_cpu = NULL;
99 }
100 else if (ARC_EXTENSION_CPU (arc_cpu_string))
101 ; /* nothing to do */
102 else
103 {
104 error ("bad value (%s) for -mcpu switch", arc_cpu_string);
105 arc_cpu_string = "base";
106 arc_cpu_type = 0;
107 arc_mangle_cpu = NULL;
108 }
109
110 /* Set the pseudo-ops for the various standard sections. */
111 arc_text_section = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
112 sprintf (arc_text_section, ARC_SECTION_FORMAT, arc_text_string);
113 arc_data_section = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
114 sprintf (arc_data_section, ARC_SECTION_FORMAT, arc_data_string);
115 arc_rodata_section = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
116 sprintf (arc_rodata_section, ARC_SECTION_FORMAT, arc_rodata_string);
117
118 arc_init_reg_tables ();
119
120 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
121 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
122 arc_punct_chars['#'] = 1;
123 arc_punct_chars['*'] = 1;
124 arc_punct_chars['?'] = 1;
125 arc_punct_chars['!'] = 1;
126 arc_punct_chars['~'] = 1;
127 }
128 \f
129 /* The condition codes of the ARC, and the inverse function. */
130 static char *arc_condition_codes[] =
131 {
132 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
133 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
134 };
135
136 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
137
138 /* Returns the index of the ARC condition code string in
139 `arc_condition_codes'. COMPARISON should be an rtx like
140 `(eq (...) (...))'. */
141
142 static int
143 get_arc_condition_code (comparison)
144 rtx comparison;
145 {
146 switch (GET_CODE (comparison))
147 {
148 case EQ : return 2;
149 case NE : return 3;
150 case GT : return 10;
151 case LE : return 11;
152 case GE : return 12;
153 case LT : return 13;
154 case GTU : return 14;
155 case LEU : return 15;
156 case LTU : return 6;
157 case GEU : return 7;
158 default : abort ();
159 }
160 /*NOTREACHED*/
161 return (42);
162 }
163
164 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
165 return the mode to be used for the comparison. */
166
167 enum machine_mode
168 arc_select_cc_mode (op, x, y)
169 enum rtx_code op;
170 rtx x, y;
171 {
172 switch (op)
173 {
174 case EQ :
175 case NE :
176 return CCZNmode;
177 default :
178 switch (GET_CODE (x))
179 {
180 case AND :
181 case IOR :
182 case XOR :
183 case SIGN_EXTEND :
184 case ZERO_EXTEND :
185 return CCZNmode;
186 case ASHIFT :
187 case ASHIFTRT :
188 case LSHIFTRT :
189 return CCZNCmode;
190 }
191 }
192 return CCmode;
193 }
194 \f
195 /* Vectors to keep interesting information about registers where it can easily
196 be got. We use to use the actual mode value as the bit number, but there
197 is (or may be) more than 32 modes now. Instead we use two tables: one
198 indexed by hard register number, and one indexed by mode. */
199
200 /* The purpose of arc_mode_class is to shrink the range of modes so that
201 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
202 mapped into one arc_mode_class mode. */
203
204 enum arc_mode_class {
205 C_MODE,
206 S_MODE, D_MODE, T_MODE, O_MODE,
207 SF_MODE, DF_MODE, TF_MODE, OF_MODE
208 };
209
210 /* Modes for condition codes. */
211 #define C_MODES (1 << (int) C_MODE)
212
213 /* Modes for single-word and smaller quantities. */
214 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
215
216 /* Modes for double-word and smaller quantities. */
217 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
218
219 /* Modes for quad-word and smaller quantities. */
220 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
221
222 /* Value is 1 if register/mode pair is acceptable on arc. */
223
224 unsigned int arc_hard_regno_mode_ok[] = {
225 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
226 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
227 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
228 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
229
230 /* ??? Leave these as S_MODES for now. */
231 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
232 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
233 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
234 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
235 };
236
237 unsigned int arc_mode_class [NUM_MACHINE_MODES];
238
239 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
240
241 void
242 arc_init_reg_tables ()
243 {
244 int i;
245
246 for (i = 0; i < NUM_MACHINE_MODES; i++)
247 {
248 switch (GET_MODE_CLASS (i))
249 {
250 case MODE_INT:
251 case MODE_PARTIAL_INT:
252 case MODE_COMPLEX_INT:
253 if (GET_MODE_SIZE (i) <= 4)
254 arc_mode_class[i] = 1 << (int) S_MODE;
255 else if (GET_MODE_SIZE (i) == 8)
256 arc_mode_class[i] = 1 << (int) D_MODE;
257 else if (GET_MODE_SIZE (i) == 16)
258 arc_mode_class[i] = 1 << (int) T_MODE;
259 else if (GET_MODE_SIZE (i) == 32)
260 arc_mode_class[i] = 1 << (int) O_MODE;
261 else
262 arc_mode_class[i] = 0;
263 break;
264 case MODE_FLOAT:
265 case MODE_COMPLEX_FLOAT:
266 if (GET_MODE_SIZE (i) <= 4)
267 arc_mode_class[i] = 1 << (int) SF_MODE;
268 else if (GET_MODE_SIZE (i) == 8)
269 arc_mode_class[i] = 1 << (int) DF_MODE;
270 else if (GET_MODE_SIZE (i) == 16)
271 arc_mode_class[i] = 1 << (int) TF_MODE;
272 else if (GET_MODE_SIZE (i) == 32)
273 arc_mode_class[i] = 1 << (int) OF_MODE;
274 else
275 arc_mode_class[i] = 0;
276 break;
277 case MODE_CC:
278 default:
279 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
280 we must explicitly check for them here. */
281 if (i == (int) CCmode || i == (int) CCZNmode || i == (int) CCZNCmode)
282 arc_mode_class[i] = 1 << (int) C_MODE;
283 else
284 arc_mode_class[i] = 0;
285 break;
286 }
287 }
288
289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
290 {
291 if (i < 60)
292 arc_regno_reg_class[i] = GENERAL_REGS;
293 else if (i == 60)
294 arc_regno_reg_class[i] = LPCOUNT_REG;
295 else if (i == 61)
296 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
297 else
298 arc_regno_reg_class[i] = NO_REGS;
299 }
300 }
301 \f
302 /* ARC specific attribute support.
303
304 The ARC has these attributes:
305 interrupt - for interrupt functions
306 */
307
308 /* Return nonzero if IDENTIFIER is a valid decl attribute. */
309
310 int
311 arc_valid_machine_decl_attribute (type, attributes, identifier, args)
312 tree type;
313 tree attributes;
314 tree identifier;
315 tree args;
316 {
317 if (identifier == get_identifier ("__interrupt__")
318 && list_length (args) == 1
319 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
320 {
321 tree value = TREE_VALUE (args);
322
323 if (!strcmp (TREE_STRING_POINTER (value), "ilink1")
324 || !strcmp (TREE_STRING_POINTER (value), "ilink2"))
325 return 1;
326 }
327 return 0;
328 }
329
330 /* Return zero if TYPE1 and TYPE are incompatible, one if they are compatible,
331 and two if they are nearly compatible (which causes a warning to be
332 generated). */
333
334 int
335 arc_comp_type_attributes (type1, type2)
336 tree type1, type2;
337 {
338 return 1;
339 }
340
341 /* Set the default attributes for TYPE. */
342
343 void
344 arc_set_default_type_attributes (type)
345 tree type;
346 {
347 }
348 \f
349 /* Acceptable arguments to the call insn. */
350
351 int
352 call_address_operand (op, mode)
353 rtx op;
354 enum machine_mode mode;
355 {
356 return (symbolic_operand (op, mode)
357 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
358 || (GET_CODE (op) == REG));
359 }
360
361 int
362 call_operand (op, mode)
363 rtx op;
364 enum machine_mode mode;
365 {
366 if (GET_CODE (op) != MEM)
367 return 0;
368 op = XEXP (op, 0);
369 return call_address_operand (op, mode);
370 }
371
372 /* Returns 1 if OP is a symbol reference. */
373
374 int
375 symbolic_operand (op, mode)
376 rtx op;
377 enum machine_mode mode;
378 {
379 switch (GET_CODE (op))
380 {
381 case SYMBOL_REF:
382 case LABEL_REF:
383 case CONST :
384 return 1;
385 default:
386 return 0;
387 }
388 }
389
390 /* Return truth value of statement that OP is a symbolic memory
391 operand of mode MODE. */
392
393 int
394 symbolic_memory_operand (op, mode)
395 rtx op;
396 enum machine_mode mode;
397 {
398 if (GET_CODE (op) == SUBREG)
399 op = SUBREG_REG (op);
400 if (GET_CODE (op) != MEM)
401 return 0;
402 op = XEXP (op, 0);
403 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
404 || GET_CODE (op) == LABEL_REF);
405 }
406
407 /* Return true if OP is a short immediate (shimm) value. */
408
409 int
410 short_immediate_operand (op, mode)
411 rtx op;
412 enum machine_mode mode;
413 {
414 if (GET_CODE (op) != CONST_INT)
415 return 0;
416 return SMALL_INT (INTVAL (op));
417 }
418
419 /* Return true if OP will require a long immediate (limm) value.
420 This is currently only used when calculating length attributes. */
421
422 int
423 long_immediate_operand (op, mode)
424 rtx op;
425 enum machine_mode mode;
426 {
427 switch (GET_CODE (op))
428 {
429 case SYMBOL_REF :
430 case LABEL_REF :
431 case CONST :
432 return 1;
433 case CONST_INT :
434 return !SMALL_INT (INTVAL (op));
435 case CONST_DOUBLE :
436 /* These can happen because large unsigned 32 bit constants are
437 represented this way (the multiplication patterns can cause these
438 to be generated). They also occur for SFmode values. */
439 return 1;
440 }
441 return 0;
442 }
443
444 /* Return true if OP is a MEM that when used as a load or store address will
445 require an 8 byte insn.
446 Load and store instructions don't allow the same possibilities but they're
447 similar enough that this one function will do.
448 This is currently only used when calculating length attributes. */
449
450 int
451 long_immediate_loadstore_operand (op, mode)
452 rtx op;
453 enum machine_mode mode;
454 {
455 if (GET_CODE (op) != MEM)
456 return 0;
457
458 op = XEXP (op, 0);
459 switch (GET_CODE (op))
460 {
461 case SYMBOL_REF :
462 case LABEL_REF :
463 case CONST :
464 return 1;
465 case CONST_INT :
466 /* This must be handled as "st c,[limm]". Ditto for load.
467 Technically, the assembler could translate some possibilities to
468 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
469 assume that it does. */
470 return 1;
471 case CONST_DOUBLE :
472 /* These can happen because large unsigned 32 bit constants are
473 represented this way (the multiplication patterns can cause these
474 to be generated). They also occur for SFmode values. */
475 return 1;
476 case REG :
477 return 0;
478 case PLUS :
479 if (GET_CODE (XEXP (op, 1)) == CONST_INT
480 && !SMALL_INT (INTVAL (XEXP (op, 1))))
481 return 1;
482 return 0;
483 }
484 return 0;
485 }
486
487 /* Return true if OP is an acceptable argument for a single word
488 move source. */
489
490 int
491 move_src_operand (op, mode)
492 rtx op;
493 enum machine_mode mode;
494 {
495 switch (GET_CODE (op))
496 {
497 case SYMBOL_REF :
498 case LABEL_REF :
499 case CONST :
500 return 1;
501 case CONST_INT :
502 return (LARGE_INT (INTVAL (op)));
503 case CONST_DOUBLE :
504 /* We can handle DImode integer constants in SImode if the value
505 (signed or unsigned) will fit in 32 bits. This is needed because
506 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
507 if (mode == SImode)
508 return arc_double_limm_p (op);
509 /* We can handle 32 bit floating point constants. */
510 if (mode == SFmode)
511 return GET_MODE (op) == SFmode;
512 return 0;
513 case REG :
514 return register_operand (op, mode);
515 case SUBREG :
516 /* (subreg (mem ...) ...) can occur here if the inner part was once a
517 pseudo-reg and is now a stack slot. */
518 if (GET_CODE (SUBREG_REG (op)) == MEM)
519 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
520 else
521 return register_operand (op, mode);
522 case MEM :
523 return address_operand (XEXP (op, 0), mode);
524 default :
525 return 0;
526 }
527 }
528
529 /* Return true if OP is an acceptable argument for a double word
530 move source. */
531
532 int
533 move_double_src_operand (op, mode)
534 rtx op;
535 enum machine_mode mode;
536 {
537 switch (GET_CODE (op))
538 {
539 case REG :
540 return register_operand (op, mode);
541 case SUBREG :
542 /* (subreg (mem ...) ...) can occur here if the inner part was once a
543 pseudo-reg and is now a stack slot. */
544 if (GET_CODE (SUBREG_REG (op)) == MEM)
545 return move_double_src_operand (SUBREG_REG (op), mode);
546 else
547 return register_operand (op, mode);
548 case MEM :
549 /* Disallow auto inc/dec for now. */
550 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
551 || GET_CODE (XEXP (op, 0)) == PRE_INC)
552 return 0;
553 return address_operand (XEXP (op, 0), mode);
554 case CONST_INT :
555 case CONST_DOUBLE :
556 return 1;
557 default :
558 return 0;
559 }
560 }
561
562 /* Return true if OP is an acceptable argument for a move destination. */
563
564 int
565 move_dest_operand (op, mode)
566 rtx op;
567 enum machine_mode mode;
568 {
569 switch (GET_CODE (op))
570 {
571 case REG :
572 return register_operand (op, mode);
573 case SUBREG :
574 /* (subreg (mem ...) ...) can occur here if the inner part was once a
575 pseudo-reg and is now a stack slot. */
576 if (GET_CODE (SUBREG_REG (op)) == MEM)
577 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
578 else
579 return register_operand (op, mode);
580 case MEM :
581 return address_operand (XEXP (op, 0), mode);
582 default :
583 return 0;
584 }
585 }
586
587 /* Return true if OP is valid load with update operand. */
588
589 int
590 load_update_operand (op, mode)
591 rtx op;
592 enum machine_mode mode;
593 {
594 if (GET_CODE (op) != MEM
595 || GET_MODE (op) != mode)
596 return 0;
597 op = XEXP (op, 0);
598 if (GET_CODE (op) != PLUS
599 || GET_MODE (op) != Pmode
600 || !register_operand (XEXP (op, 0), Pmode)
601 || !nonmemory_operand (XEXP (op, 1), Pmode))
602 return 0;
603 return 1;
604 }
605
606 /* Return true if OP is valid store with update operand. */
607
608 int
609 store_update_operand (op, mode)
610 rtx op;
611 enum machine_mode mode;
612 {
613 if (GET_CODE (op) != MEM
614 || GET_MODE (op) != mode)
615 return 0;
616 op = XEXP (op, 0);
617 if (GET_CODE (op) != PLUS
618 || GET_MODE (op) != Pmode
619 || !register_operand (XEXP (op, 0), Pmode)
620 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
621 && SMALL_INT (INTVAL (XEXP (op, 1)))))
622 return 0;
623 return 1;
624 }
625
626 /* Return true if OP is a non-volatile non-immediate operand.
627 Volatile memory refs require a special "cache-bypass" instruction
628 and only the standard movXX patterns are set up to handle them. */
629
630 int
631 nonvol_nonimm_operand (op, mode)
632 rtx op;
633 enum machine_mode mode;
634 {
635 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
636 return 0;
637 return nonimmediate_operand (op, mode);
638 }
639
640 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
641 to check the range carefully since this predicate is used in DImode
642 contexts. */
643
644 int
645 const_sint32_operand (op, mode)
646 rtx op;
647 enum machine_mode mode;
648 {
649 /* All allowed constants will fit a CONST_INT. */
650 return (GET_CODE (op) == CONST_INT
651 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
652 }
653
654 /* Accept integer operands in the range 0..0xffffffff. We have to check the
655 range carefully since this predicate is used in DImode contexts. Also, we
656 need some extra crud to make it work when hosted on 64-bit machines. */
657
658 int
659 const_uint32_operand (op, mode)
660 rtx op;
661 enum machine_mode mode;
662 {
663 #if HOST_BITS_PER_WIDE_INT > 32
664 /* All allowed constants will fit a CONST_INT. */
665 return (GET_CODE (op) == CONST_INT
666 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
667 #else
668 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
669 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
670 #endif
671 }
672
673 /* Return 1 if OP is a comparison operator valid for the mode of CC.
674 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
675
676 Some insns only set a few bits in the condition code. So only allow those
677 comparisons that use the bits that are valid. */
678
679 int
680 proper_comparison_operator (op, mode)
681 rtx op;
682 enum machine_mode mode;
683 {
684 enum rtx_code code = GET_CODE (op);
685
686 if (GET_RTX_CLASS (code) != '<')
687 return 0;
688
689 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
690 return (code == EQ || code == NE);
691 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
692 return (code == EQ || code == NE
693 || code == LTU || code == GEU || code == GTU || code == LEU);
694 return 1;
695 }
696 \f
697 /* Misc. utilities. */
698
699 /* X and Y are two things to compare using CODE. Emit the compare insn and
700 return the rtx for the cc reg in the proper mode. */
701
702 rtx
703 gen_compare_reg (code, x, y)
704 enum rtx_code code;
705 rtx x, y;
706 {
707 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
708 rtx cc_reg;
709
710 cc_reg = gen_rtx (REG, mode, 61);
711
712 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
713 gen_rtx (COMPARE, mode, x, y)));
714
715 return cc_reg;
716 }
717
718 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
719 We assume the value can be either signed or unsigned. */
720
721 int
722 arc_double_limm_p (value)
723 rtx value;
724 {
725 HOST_WIDE_INT low, high;
726
727 if (GET_CODE (value) != CONST_DOUBLE)
728 abort ();
729
730 low = CONST_DOUBLE_LOW (value);
731 high = CONST_DOUBLE_HIGH (value);
732
733 if (low & 0x80000000)
734 {
735 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
736 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
737 == - (unsigned HOST_WIDE_INT) 0x80000000)
738 && high == -1));
739 }
740 else
741 {
742 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
743 }
744 }
745 \f
746 /* Do any needed setup for a variadic function. For the ARC, we must
747 create a register parameter block, and then copy any anonymous arguments
748 in registers to memory.
749
750 CUM has not been updated for the last named argument which has type TYPE
751 and mode MODE, and we rely on this fact.
752
753 We do things a little weird here. We're supposed to only allocate space
754 for the anonymous arguments. However we need to keep the stack eight byte
755 aligned. So we round the space up if necessary, and leave it to va_start
756 to compensate. */
757
758 void
759 arc_setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
760 CUMULATIVE_ARGS *cum;
761 enum machine_mode mode;
762 tree type;
763 int *pretend_size;
764 int no_rtl;
765 {
766 int first_anon_arg;
767
768 /* All BLKmode values are passed by reference. */
769 if (mode == BLKmode)
770 abort ();
771
772 /* We must treat `__builtin_va_alist' as an anonymous arg. */
773 if (current_function_varargs)
774 first_anon_arg = *cum;
775 else
776 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
777 / UNITS_PER_WORD);
778
779 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
780 {
781 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
782 int first_reg_offset = first_anon_arg;
783 /* Size in words to "pretend" allocate. */
784 int size = MAX_ARC_PARM_REGS - first_reg_offset;
785 /* Extra slop to keep stack eight byte aligned. */
786 int align_slop = size & 1;
787 rtx regblock;
788
789 regblock = gen_rtx (MEM, BLKmode,
790 plus_constant (arg_pointer_rtx,
791 FIRST_PARM_OFFSET (0)
792 + align_slop * UNITS_PER_WORD));
793 MEM_ALIAS_SET (regblock) = get_varargs_alias_set ();
794
795 move_block_from_reg (first_reg_offset, regblock,
796 MAX_ARC_PARM_REGS - first_reg_offset,
797 ((MAX_ARC_PARM_REGS - first_reg_offset)
798 * UNITS_PER_WORD));
799
800 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
801 * UNITS_PER_WORD);
802 }
803 }
804 \f
805 /* Cost functions. */
806
807 /* Provide the costs of an addressing mode that contains ADDR.
808 If ADDR is not a valid address, its cost is irrelevant. */
809
810 int
811 arc_address_cost (addr)
812 rtx addr;
813 {
814 switch (GET_CODE (addr))
815 {
816 case REG :
817 /* This is handled in the macro that calls us.
818 It's here for documentation. */
819 return 1;
820
821 case LABEL_REF :
822 case SYMBOL_REF :
823 case CONST :
824 return 2;
825
826 case PLUS :
827 {
828 register rtx plus0 = XEXP (addr, 0);
829 register rtx plus1 = XEXP (addr, 1);
830
831 if (GET_CODE (plus0) != REG)
832 break;
833
834 switch (GET_CODE (plus1))
835 {
836 case CONST_INT :
837 return SMALL_INT (plus1) ? 1 : 2;
838 case CONST :
839 case SYMBOL_REF :
840 case LABEL_REF :
841 return 2;
842 default:
843 break;
844 }
845 break;
846 }
847 }
848
849 return 4;
850 }
851 \f
852 /* Function prologue/epilogue handlers. */
853
854 /* ARC stack frames look like:
855
856 Before call After call
857 +-----------------------+ +-----------------------+
858 | | | |
859 high | local variables, | | local variables, |
860 mem | reg save area, etc. | | reg save area, etc. |
861 | | | |
862 +-----------------------+ +-----------------------+
863 | | | |
864 | arguments on stack. | | arguments on stack. |
865 | | | |
866 SP+16->+-----------------------+FP+48->+-----------------------+
867 | 4 word save area for | | reg parm save area, |
868 | return addr, prev %fp | | only created for |
869 SP+0->+-----------------------+ | variable argument |
870 | functions |
871 FP+16->+-----------------------+
872 | 4 word save area for |
873 | return addr, prev %fp |
874 FP+0->+-----------------------+
875 | |
876 | local variables |
877 | |
878 +-----------------------+
879 | |
880 | register save area |
881 | |
882 +-----------------------+
883 | |
884 | alloca allocations |
885 | |
886 +-----------------------+
887 | |
888 | arguments on stack |
889 | |
890 SP+16->+-----------------------+
891 low | 4 word save area for |
892 memory | return addr, prev %fp |
893 SP+0->+-----------------------+
894
895 Notes:
896 1) The "reg parm save area" does not exist for non variable argument fns.
897 The "reg parm save area" can be eliminated completely if we created our
898 own va-arc.h, but that has tradeoffs as well (so it's not done). */
899
900 /* Structure to be filled in by arc_compute_frame_size with register
901 save masks, and offsets for the current function. */
902 struct arc_frame_info
903 {
904 unsigned int total_size; /* # bytes that the entire frame takes up. */
905 unsigned int extra_size; /* # bytes of extra stuff. */
906 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
907 unsigned int args_size; /* # bytes that outgoing arguments take up. */
908 unsigned int reg_size; /* # bytes needed to store regs. */
909 unsigned int var_size; /* # bytes that variables take up. */
910 unsigned int reg_offset; /* Offset from new sp to store regs. */
911 unsigned int gmask; /* Mask of saved gp registers. */
912 int initialized; /* Nonzero if frame size already calculated. */
913 };
914
915 /* Current frame information calculated by arc_compute_frame_size. */
916 static struct arc_frame_info current_frame_info;
917
918 /* Zero structure to initialize current_frame_info. */
919 static struct arc_frame_info zero_frame_info;
920
921 /* Type of function DECL.
922
923 The result is cached. To reset the cache at the end of a function,
924 call with DECL = NULL_TREE. */
925
926 enum arc_function_type
927 arc_compute_function_type (decl)
928 tree decl;
929 {
930 tree a;
931 /* Cached value. */
932 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
933 /* Last function we were called for. */
934 static tree last_fn = NULL_TREE;
935
936 /* Resetting the cached value? */
937 if (decl == NULL_TREE)
938 {
939 fn_type = ARC_FUNCTION_UNKNOWN;
940 last_fn = NULL_TREE;
941 return fn_type;
942 }
943
944 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
945 return fn_type;
946
947 /* Assume we have a normal function (not an interrupt handler). */
948 fn_type = ARC_FUNCTION_NORMAL;
949
950 /* Now see if this is an interrupt handler. */
951 for (a = DECL_MACHINE_ATTRIBUTES (current_function_decl);
952 a;
953 a = TREE_CHAIN (a))
954 {
955 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
956
957 if (name == get_identifier ("__interrupt__")
958 && list_length (args) == 1
959 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
960 {
961 tree value = TREE_VALUE (args);
962
963 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
964 fn_type = ARC_FUNCTION_ILINK1;
965 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
966 fn_type = ARC_FUNCTION_ILINK2;
967 else
968 abort ();
969 break;
970 }
971 }
972
973 last_fn = decl;
974 return fn_type;
975 }
976
977 #define ILINK1_REGNUM 29
978 #define ILINK2_REGNUM 30
979 #define RETURN_ADDR_REGNUM 31
980 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
981 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
982
983 /* Tell prologue and epilogue if register REGNO should be saved / restored.
984 The return address and frame pointer are treated separately.
985 Don't consider them here. */
986 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
987 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
988 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
989
990 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
991
992 /* Return the bytes needed to compute the frame pointer from the current
993 stack pointer.
994
995 SIZE is the size needed for local variables. */
996
997 unsigned int
998 arc_compute_frame_size (size)
999 int size; /* # of var. bytes allocated. */
1000 {
1001 int regno;
1002 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1003 unsigned int reg_size, reg_offset;
1004 unsigned int gmask;
1005 enum arc_function_type fn_type;
1006 int interrupt_p;
1007
1008 var_size = size;
1009 args_size = current_function_outgoing_args_size;
1010 pretend_size = current_function_pretend_args_size;
1011 extra_size = FIRST_PARM_OFFSET (0);
1012 total_size = extra_size + pretend_size + args_size + var_size;
1013 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1014 reg_size = 0;
1015 gmask = 0;
1016
1017 /* See if this is an interrupt handler. Call used registers must be saved
1018 for them too. */
1019 fn_type = arc_compute_function_type (current_function_decl);
1020 interrupt_p = ARC_INTERRUPT_P (fn_type);
1021
1022 /* Calculate space needed for registers.
1023 ??? We ignore the extension registers for now. */
1024
1025 for (regno = 0; regno <= 31; regno++)
1026 {
1027 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1028 {
1029 reg_size += UNITS_PER_WORD;
1030 gmask |= 1 << regno;
1031 }
1032 }
1033
1034 total_size += reg_size;
1035
1036 /* If the only space to allocate is the fp/blink save area this is an
1037 empty frame. However, if we'll be making a function call we need to
1038 allocate a stack frame for our callee's fp/blink save area. */
1039 if (total_size == extra_size
1040 && !MUST_SAVE_RETURN_ADDR)
1041 total_size = extra_size = 0;
1042
1043 total_size = ARC_STACK_ALIGN (total_size);
1044
1045 /* Save computed information. */
1046 current_frame_info.total_size = total_size;
1047 current_frame_info.extra_size = extra_size;
1048 current_frame_info.pretend_size = pretend_size;
1049 current_frame_info.var_size = var_size;
1050 current_frame_info.args_size = args_size;
1051 current_frame_info.reg_size = reg_size;
1052 current_frame_info.reg_offset = reg_offset;
1053 current_frame_info.gmask = gmask;
1054 current_frame_info.initialized = reload_completed;
1055
1056 /* Ok, we're done. */
1057 return total_size;
1058 }
1059 \f
1060 /* Common code to save/restore registers. */
1061
1062 void
1063 arc_save_restore (file, base_reg, offset, gmask, op)
1064 FILE *file;
1065 char *base_reg;
1066 unsigned int offset;
1067 unsigned int gmask;
1068 char *op;
1069 {
1070 int regno;
1071
1072 if (gmask == 0)
1073 return;
1074
1075 for (regno = 0; regno <= 31; regno++)
1076 {
1077 if ((gmask & (1L << regno)) != 0)
1078 {
1079 fprintf (file, "\t%s %s,[%s,%d]\n",
1080 op, reg_names[regno], base_reg, offset);
1081 offset += UNITS_PER_WORD;
1082 }
1083 }
1084 }
1085 \f
1086 /* Set up the stack and frame pointer (if desired) for the function. */
1087
1088 void
1089 arc_output_function_prologue (file, size)
1090 FILE *file;
1091 int size;
1092 {
1093 char *sp_str = reg_names[STACK_POINTER_REGNUM];
1094 char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1095 unsigned int gmask = current_frame_info.gmask;
1096 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1097
1098 /* If this is an interrupt handler, set up our stack frame.
1099 ??? Optimize later. */
1100 if (ARC_INTERRUPT_P (fn_type))
1101 {
1102 fprintf (file, "\t%s interrupt handler\n",
1103 ASM_COMMENT_START);
1104 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1105 }
1106
1107 /* This is only for the human reader. */
1108 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1109 ASM_COMMENT_START, ASM_COMMENT_START,
1110 current_frame_info.var_size,
1111 current_frame_info.reg_size / 4,
1112 current_frame_info.args_size,
1113 current_frame_info.extra_size);
1114
1115 size = ARC_STACK_ALIGN (size);
1116 size = (! current_frame_info.initialized
1117 ? arc_compute_frame_size (size)
1118 : current_frame_info.total_size);
1119
1120 /* These cases shouldn't happen. Catch them now. */
1121 if (size == 0 && gmask)
1122 abort ();
1123
1124 /* Allocate space for register arguments if this is a variadic function. */
1125 if (current_frame_info.pretend_size != 0)
1126 fprintf (file, "\tsub %s,%s,%d\n",
1127 sp_str, sp_str, current_frame_info.pretend_size);
1128
1129 /* The home-grown ABI says link register is saved first. */
1130 if (MUST_SAVE_RETURN_ADDR)
1131 fprintf (file, "\tst %s,[%s,%d]\n",
1132 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1133
1134 /* Set up the previous frame pointer next (if we need to). */
1135 if (frame_pointer_needed)
1136 {
1137 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1138 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1139 }
1140
1141 /* ??? We don't handle the case where the saved regs are more than 252
1142 bytes away from sp. This can be handled by decrementing sp once, saving
1143 the regs, and then decrementing it again. The epilogue doesn't have this
1144 problem as the `ld' insn takes reg+limm values (though it would be more
1145 efficient to avoid reg+limm). */
1146
1147 /* Allocate the stack frame. */
1148 if (size - current_frame_info.pretend_size > 0)
1149 fprintf (file, "\tsub %s,%s,%d\n",
1150 sp_str, sp_str, size - current_frame_info.pretend_size);
1151
1152 /* Save any needed call-saved regs (and call-used if this is an
1153 interrupt handler). */
1154 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1155 /* The zeroing of these two bits is unnecessary,
1156 but leave this in for clarity. */
1157 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1158 "st");
1159
1160 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1161 }
1162 \f
1163 /* Do any necessary cleanup after a function to restore stack, frame,
1164 and regs. */
1165
1166 void
1167 arc_output_function_epilogue (file, size)
1168 FILE *file;
1169 int size;
1170 {
1171 rtx epilogue_delay = current_function_epilogue_delay_list;
1172 int noepilogue = FALSE;
1173 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1174
1175 /* This is only for the human reader. */
1176 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1177
1178 size = ARC_STACK_ALIGN (size);
1179 size = (!current_frame_info.initialized
1180 ? arc_compute_frame_size (size)
1181 : current_frame_info.total_size);
1182
1183 if (size == 0 && epilogue_delay == 0)
1184 {
1185 rtx insn = get_last_insn ();
1186
1187 /* If the last insn was a BARRIER, we don't have to write any code
1188 because a jump (aka return) was put there. */
1189 if (GET_CODE (insn) == NOTE)
1190 insn = prev_nonnote_insn (insn);
1191 if (insn && GET_CODE (insn) == BARRIER)
1192 noepilogue = TRUE;
1193 }
1194
1195 if (!noepilogue)
1196 {
1197 unsigned int pretend_size = current_frame_info.pretend_size;
1198 unsigned int frame_size = size - pretend_size;
1199 int restored, fp_restored_p;
1200 int can_trust_sp_p = !current_function_calls_alloca;
1201 char *sp_str = reg_names[STACK_POINTER_REGNUM];
1202 char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1203
1204 /* ??? There are lots of optimizations that can be done here.
1205 EG: Use fp to restore regs if it's closer.
1206 Maybe in time we'll do them all. For now, always restore regs from
1207 sp, but don't restore sp if we don't have to. */
1208
1209 if (!can_trust_sp_p)
1210 {
1211 if (!frame_pointer_needed)
1212 abort ();
1213 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1214 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1215 }
1216
1217 /* Restore any saved registers. */
1218 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1219 /* The zeroing of these two bits is unnecessary,
1220 but leave this in for clarity. */
1221 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1222 "ld");
1223
1224 if (MUST_SAVE_RETURN_ADDR)
1225 fprintf (file, "\tld %s,[%s,%d]\n",
1226 reg_names[RETURN_ADDR_REGNUM],
1227 frame_pointer_needed ? fp_str : sp_str,
1228 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1229
1230 /* Keep track of how much of the stack pointer we've restored.
1231 It makes the following a lot more readable. */
1232 restored = 0;
1233 fp_restored_p = 0;
1234
1235 /* We try to emit the epilogue delay slot insn right after the load
1236 of the return address register so that it can execute with the
1237 stack intact. Secondly, loads are delayed. */
1238 /* ??? If stack intactness is important, always emit now. */
1239 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1240 {
1241 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1242 epilogue_delay = NULL_RTX;
1243 }
1244
1245 if (frame_pointer_needed)
1246 {
1247 /* Try to restore the frame pointer in the delay slot. We can't,
1248 however, if any of these is true. */
1249 if (epilogue_delay != NULL_RTX
1250 || !SMALL_INT (frame_size)
1251 || pretend_size
1252 || ARC_INTERRUPT_P (fn_type))
1253 {
1254 /* Note that we restore fp and sp here! */
1255 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1256 restored += frame_size;
1257 fp_restored_p = 1;
1258 }
1259 }
1260 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1261 || ARC_INTERRUPT_P (fn_type))
1262 {
1263 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1264 restored += frame_size;
1265 }
1266
1267 /* These must be done before the return insn because the delay slot
1268 does the final stack restore. */
1269 if (ARC_INTERRUPT_P (fn_type))
1270 {
1271 if (epilogue_delay)
1272 {
1273 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1274 }
1275 }
1276
1277 /* Emit the return instruction. */
1278 {
1279 static int regs[4] = {
1280 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1281 };
1282 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1283 }
1284
1285 /* If the only register saved is the return address, we need a
1286 nop, unless we have an instruction to put into it. Otherwise
1287 we don't since reloading multiple registers doesn't reference
1288 the register being loaded. */
1289
1290 if (ARC_INTERRUPT_P (fn_type))
1291 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1292 else if (epilogue_delay != NULL_RTX)
1293 {
1294 if (frame_pointer_needed && !fp_restored_p)
1295 abort ();
1296 if (restored < size)
1297 abort ();
1298 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1299 }
1300 else if (frame_pointer_needed && !fp_restored_p)
1301 {
1302 if (!SMALL_INT (frame_size))
1303 abort ();
1304 /* Note that we restore fp and sp here! */
1305 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1306 }
1307 else if (restored < size)
1308 {
1309 if (!SMALL_INT (size - restored))
1310 abort ();
1311 fprintf (file, "\tadd %s,%s,%d\n",
1312 sp_str, sp_str, size - restored);
1313 }
1314 else
1315 fprintf (file, "\tnop\n");
1316 }
1317
1318 /* Reset state info for each function. */
1319 current_frame_info = zero_frame_info;
1320 arc_compute_function_type (NULL_TREE);
1321 }
1322 \f
1323 /* Define the number of delay slots needed for the function epilogue.
1324
1325 Interrupt handlers can't have any epilogue delay slots (it's always needed
1326 for something else, I think). For normal functions, we have to worry about
1327 using call-saved regs as they'll be restored before the delay slot insn.
1328 Functions with non-empty frames already have enough choices for the epilogue
1329 delay slot so for now we only consider functions with empty frames. */
1330
1331 int
1332 arc_delay_slots_for_epilogue ()
1333 {
1334 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1335 return 0;
1336 if (!current_frame_info.initialized)
1337 (void) arc_compute_frame_size (get_frame_size ());
1338 if (current_frame_info.total_size == 0)
1339 return 1;
1340 return 0;
1341 }
1342
1343 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1344 Any single length instruction which doesn't reference the stack or frame
1345 pointer or any call-saved register is OK. SLOT will always be 0. */
1346
1347 int
1348 arc_eligible_for_epilogue_delay (trial, slot)
1349 rtx trial;
1350 int slot;
1351 {
1352 if (slot != 0)
1353 abort ();
1354
1355 if (get_attr_length (trial) == 1
1356 /* If registers where saved, presumably there's more than enough
1357 possibilities for the delay slot. The alternative is something
1358 more complicated (of course, if we expanded the epilogue as rtl
1359 this problem would go away). */
1360 /* ??? Note that this will always be true since only functions with
1361 empty frames have epilogue delay slots. See
1362 arc_delay_slots_for_epilogue. */
1363 && current_frame_info.gmask == 0
1364 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1365 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1366 return 1;
1367 return 0;
1368 }
1369 \f
1370 /* PIC */
1371
1372 /* Emit special PIC prologues and epilogues. */
1373
1374 void
1375 arc_finalize_pic ()
1376 {
1377 /* nothing to do */
1378 }
1379 \f
1380 /* Return true if OP is a shift operator. */
1381
1382 int
1383 shift_operator (op, mode)
1384 rtx op;
1385 enum machine_mode mode;
1386 {
1387 switch (GET_CODE (op))
1388 {
1389 case ASHIFTRT:
1390 case LSHIFTRT:
1391 case ASHIFT:
1392 return 1;
1393 default:
1394 return 0;
1395 }
1396 }
1397
1398 /* Output the assembler code for doing a shift.
1399 We go to a bit of trouble to generate efficient code as the ARC only has
1400 single bit shifts. This is taken from the h8300 port. We only have one
1401 mode of shifting and can't access individual bytes like the h8300 can, so
1402 this is greatly simplified (at the expense of not generating hyper-
1403 efficient code).
1404
1405 This function is not used if the variable shift insns are present. */
1406
1407 /* ??? We assume the output operand is the same as operand 1.
1408 This can be optimized (deleted) in the case of 1 bit shifts. */
1409 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1410 using it here will give us a chance to play with it. */
1411
1412 char *
1413 output_shift (operands)
1414 rtx *operands;
1415 {
1416 static int loopend_lab;
1417 rtx shift = operands[3];
1418 enum machine_mode mode = GET_MODE (shift);
1419 enum rtx_code code = GET_CODE (shift);
1420 char *shift_one;
1421
1422 if (mode != SImode)
1423 abort ();
1424
1425 switch (code)
1426 {
1427 case ASHIFT: shift_one = "asl %0,%0"; break;
1428 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1429 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1430 default: abort ();
1431 }
1432
1433 if (GET_CODE (operands[2]) != CONST_INT)
1434 {
1435 if (optimize)
1436 output_asm_insn ("mov lp_count,%2", operands);
1437 else
1438 output_asm_insn ("mov %4,%2", operands);
1439 goto shiftloop;
1440 }
1441 else
1442 {
1443 int n = INTVAL (operands[2]);
1444
1445 /* If the count is negative, make it 0. */
1446 if (n < 0)
1447 n = 0;
1448 /* If the count is too big, truncate it.
1449 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1450 do the intuitive thing. */
1451 else if (n > GET_MODE_BITSIZE (mode))
1452 n = GET_MODE_BITSIZE (mode);
1453
1454 /* First see if we can do them inline. */
1455 if (n <= 8)
1456 {
1457 while (--n >= 0)
1458 output_asm_insn (shift_one, operands);
1459 }
1460 /* See if we can use a rotate/and. */
1461 else if (n == BITS_PER_WORD - 1)
1462 {
1463 switch (code)
1464 {
1465 case ASHIFT :
1466 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1467 break;
1468 case ASHIFTRT :
1469 /* The ARC doesn't have a rol insn. Use something else. */
1470 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1471 break;
1472 case LSHIFTRT :
1473 /* The ARC doesn't have a rol insn. Use something else. */
1474 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1475 break;
1476 }
1477 }
1478 /* Must loop. */
1479 else
1480 {
1481 char buf[100];
1482
1483 if (optimize)
1484 output_asm_insn ("mov lp_count,%c2", operands);
1485 else
1486 output_asm_insn ("mov %4,%c2", operands);
1487 shiftloop:
1488 if (optimize)
1489 {
1490 if (flag_pic)
1491 sprintf ("lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1492 ASM_COMMENT_START);
1493 else
1494 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1495 ASM_COMMENT_START);
1496 output_asm_insn (buf, operands);
1497 output_asm_insn ("sr %4,[lp_start]", operands);
1498 output_asm_insn ("add %4,%4,1", operands);
1499 output_asm_insn ("sr %4,[lp_end]", operands);
1500 output_asm_insn ("nop\n\tnop", operands);
1501 if (flag_pic)
1502 asm_fprintf (asm_out_file, "\t%s single insn loop\n",
1503 ASM_COMMENT_START);
1504 else
1505 asm_fprintf (asm_out_file, "1:\t%s single insn loop\n",
1506 ASM_COMMENT_START);
1507 output_asm_insn (shift_one, operands);
1508 }
1509 else
1510 {
1511 asm_fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1512 ASM_COMMENT_START);
1513 output_asm_insn ("sub.f %4,%4,1", operands);
1514 output_asm_insn ("nop", operands);
1515 output_asm_insn ("bn.nd 2f", operands);
1516 output_asm_insn (shift_one, operands);
1517 output_asm_insn ("b.nd 1b", operands);
1518 asm_fprintf (asm_out_file, "2:\t%s end shift loop\n",
1519 ASM_COMMENT_START);
1520 }
1521 }
1522 }
1523
1524 return "";
1525 }
1526 \f
1527 /* Nested function support. */
1528
1529 /* Emit RTL insns to initialize the variable parts of a trampoline.
1530 FNADDR is an RTX for the address of the function's pure code.
1531 CXT is an RTX for the static chain value for the function. */
1532
1533 void
1534 arc_initialize_trampoline (tramp, fnaddr, cxt)
1535 rtx tramp, fnaddr, cxt;
1536 {
1537 }
1538 \f
1539 /* Set the cpu type and print out other fancy things,
1540 at the top of the file. */
1541
1542 void
1543 arc_asm_file_start (file)
1544 FILE *file;
1545 {
1546 fprintf (file, "\t.cpu %s\n", arc_cpu_string);
1547 }
1548 \f
1549 /* Print operand X (an rtx) in assembler syntax to file FILE.
1550 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1551 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1552
1553 void
1554 arc_print_operand (file, x, code)
1555 FILE *file;
1556 rtx x;
1557 int code;
1558 {
1559 switch (code)
1560 {
1561 case '#' :
1562 /* Conditional branches. For now these are equivalent. */
1563 case '*' :
1564 /* Unconditional branches. Output the appropriate delay slot suffix. */
1565 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1566 {
1567 /* There's nothing in the delay slot. */
1568 fputs (".nd", file);
1569 }
1570 else
1571 {
1572 rtx jump = XVECEXP (final_sequence, 0, 0);
1573 rtx delay = XVECEXP (final_sequence, 0, 1);
1574 if (INSN_ANNULLED_BRANCH_P (jump))
1575 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1576 else
1577 fputs (".d", file);
1578 }
1579 return;
1580 case '?' : /* with leading "." */
1581 case '!' : /* without leading "." */
1582 /* This insn can be conditionally executed. See if the ccfsm machinery
1583 says it should be conditionalized. */
1584 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1585 {
1586 /* Is this insn in a delay slot? */
1587 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1588 {
1589 rtx insn = XVECEXP (final_sequence, 0, 1);
1590
1591 /* If the insn is annulled and is from the target path, we need
1592 to inverse the condition test. */
1593 if (INSN_ANNULLED_BRANCH_P (insn))
1594 {
1595 if (INSN_FROM_TARGET_P (insn))
1596 fprintf (file, "%s%s",
1597 code == '?' ? "." : "",
1598 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1599 else
1600 fprintf (file, "%s%s",
1601 code == '?' ? "." : "",
1602 arc_condition_codes[arc_ccfsm_current_cc]);
1603 }
1604 else
1605 /* This insn is executed for either path, so don't
1606 conditionalize it at all. */
1607 ; /* nothing to do */
1608 }
1609 else
1610 {
1611 /* This insn isn't in a delay slot. */
1612 fprintf (file, "%s%s",
1613 code == '?' ? "." : "",
1614 arc_condition_codes[arc_ccfsm_current_cc]);
1615 }
1616 }
1617 return;
1618 case '~' :
1619 /* Output a nop if we're between a set of the condition codes,
1620 and a conditional branch. */
1621 if (last_insn_set_cc_p)
1622 fputs ("nop\n\t", file);
1623 return;
1624 case 'd' :
1625 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1626 return;
1627 case 'D' :
1628 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1629 (get_arc_condition_code (x))],
1630 file);
1631 return;
1632 case 'R' :
1633 /* Write second word of DImode or DFmode reference,
1634 register or memory. */
1635 if (GET_CODE (x) == REG)
1636 fputs (reg_names[REGNO (x)+1], file);
1637 else if (GET_CODE (x) == MEM)
1638 {
1639 fputc ('[', file);
1640 /* Handle possible auto-increment. Since it is pre-increment and
1641 we have already done it, we can just use an offset of four. */
1642 /* ??? This is taken from rs6000.c I think. I don't think it is
1643 currently necessary, but keep it around. */
1644 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1645 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1646 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1647 else
1648 output_address (plus_constant (XEXP (x, 0), 4));
1649 fputc (']', file);
1650 }
1651 else
1652 output_operand_lossage ("invalid operand to %R code");
1653 return;
1654 case 'S' :
1655 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FLAG (x))
1656 || GET_CODE (x) == LABEL_REF)
1657 {
1658 fprintf (file, "%%st(");
1659 output_addr_const (file, x);
1660 fprintf (file, ")");
1661 return;
1662 }
1663 break;
1664 case 'H' :
1665 case 'L' :
1666 if (GET_CODE (x) == REG)
1667 {
1668 /* L = least significant word, H = most significant word */
1669 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1670 fputs (reg_names[REGNO (x)], file);
1671 else
1672 fputs (reg_names[REGNO (x)+1], file);
1673 }
1674 else if (GET_CODE (x) == CONST_INT
1675 || GET_CODE (x) == CONST_DOUBLE)
1676 {
1677 rtx first, second;
1678
1679 split_double (x, &first, &second);
1680 fprintf (file, "0x%08lx",
1681 code == 'L' ? INTVAL (first) : INTVAL (second));
1682 }
1683 else
1684 output_operand_lossage ("invalid operand to %H/%L code");
1685 return;
1686 case 'A' :
1687 {
1688 REAL_VALUE_TYPE d;
1689 char str[30];
1690
1691 if (GET_CODE (x) != CONST_DOUBLE
1692 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
1693 abort ();
1694 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1695 REAL_VALUE_TO_DECIMAL (d, "%.20e", str);
1696 fprintf (file, "%s", str);
1697 return;
1698 }
1699 case 'U' :
1700 /* Output a load/store with update indicator if appropriate. */
1701 if (GET_CODE (x) == MEM)
1702 {
1703 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1704 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1705 fputs (".a", file);
1706 }
1707 else
1708 output_operand_lossage ("invalid operand to %U code");
1709 return;
1710 case 'V' :
1711 /* Output cache bypass indicator for a load/store insn. Volatile memory
1712 refs are defined to use the cache bypass mechanism. */
1713 if (GET_CODE (x) == MEM)
1714 {
1715 if (MEM_VOLATILE_P (x))
1716 fputs (".di", file);
1717 }
1718 else
1719 output_operand_lossage ("invalid operand to %V code");
1720 return;
1721 case 0 :
1722 /* Do nothing special. */
1723 break;
1724 default :
1725 /* Unknown flag. */
1726 output_operand_lossage ("invalid operand output code");
1727 }
1728
1729 switch (GET_CODE (x))
1730 {
1731 case REG :
1732 fputs (reg_names[REGNO (x)], file);
1733 break;
1734 case MEM :
1735 fputc ('[', file);
1736 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1737 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1738 GET_MODE_SIZE (GET_MODE (x))));
1739 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1740 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1741 - GET_MODE_SIZE (GET_MODE (x))));
1742 else
1743 output_address (XEXP (x, 0));
1744 fputc (']', file);
1745 break;
1746 case CONST_DOUBLE :
1747 /* We handle SFmode constants here as output_addr_const doesn't. */
1748 if (GET_MODE (x) == SFmode)
1749 {
1750 REAL_VALUE_TYPE d;
1751 long l;
1752
1753 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1754 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1755 fprintf (file, "0x%08lx", l);
1756 break;
1757 }
1758 /* Fall through. Let output_addr_const deal with it. */
1759 default :
1760 output_addr_const (file, x);
1761 break;
1762 }
1763 }
1764
1765 /* Print a memory address as an operand to reference that memory location. */
1766
1767 void
1768 arc_print_operand_address (file, addr)
1769 FILE *file;
1770 rtx addr;
1771 {
1772 register rtx base, index = 0;
1773 int offset = 0;
1774
1775 switch (GET_CODE (addr))
1776 {
1777 case REG :
1778 fputs (reg_names[REGNO (addr)], file);
1779 break;
1780 case SYMBOL_REF :
1781 if (/*???*/ 0 && SYMBOL_REF_FLAG (addr))
1782 {
1783 fprintf (file, "%%st(");
1784 output_addr_const (file, addr);
1785 fprintf (file, ")");
1786 }
1787 else
1788 output_addr_const (file, addr);
1789 break;
1790 case PLUS :
1791 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1792 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1793 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1794 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1795 else
1796 base = XEXP (addr, 0), index = XEXP (addr, 1);
1797 if (GET_CODE (base) != REG)
1798 abort ();
1799 fputs (reg_names[REGNO (base)], file);
1800 if (index == 0)
1801 {
1802 if (offset != 0)
1803 fprintf (file, ",%d", offset);
1804 }
1805 else if (GET_CODE (index) == REG)
1806 fprintf (file, ",%s", reg_names[REGNO (index)]);
1807 else if (GET_CODE (index) == SYMBOL_REF)
1808 fputc (',', file), output_addr_const (file, index);
1809 else
1810 abort ();
1811 break;
1812 case PRE_INC :
1813 case PRE_DEC :
1814 /* We shouldn't get here as we've lost the mode of the memory object
1815 (which says how much to inc/dec by. */
1816 abort ();
1817 break;
1818 default :
1819 output_addr_const (file, addr);
1820 break;
1821 }
1822 }
1823
1824 /* Update compare/branch separation marker. */
1825
1826 static void
1827 record_cc_ref (insn)
1828 rtx insn;
1829 {
1830 last_insn_set_cc_p = current_insn_set_cc_p;
1831
1832 switch (get_attr_cond (insn))
1833 {
1834 case COND_SET :
1835 case COND_SET_ZN :
1836 case COND_SET_ZNC :
1837 if (get_attr_length (insn) == 1)
1838 current_insn_set_cc_p = 1;
1839 else
1840 current_insn_set_cc_p = 0;
1841 break;
1842 default :
1843 current_insn_set_cc_p = 0;
1844 break;
1845 }
1846 }
1847 \f
1848 /* Conditional execution support.
1849
1850 This is based on the ARM port but for now is much simpler.
1851
1852 A finite state machine takes care of noticing whether or not instructions
1853 can be conditionally executed, and thus decrease execution time and code
1854 size by deleting branch instructions. The fsm is controlled by
1855 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1856 in the .md file for the branch insns also have a hand in this. */
1857
1858 /* The state of the fsm controlling condition codes are:
1859 0: normal, do nothing special
1860 1: don't output this insn
1861 2: don't output this insn
1862 3: make insns conditional
1863 4: make insns conditional
1864
1865 State transitions (state->state by whom, under what condition):
1866 0 -> 1 final_prescan_insn, if insn is conditional branch
1867 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1868 1 -> 3 branch patterns, after having not output the conditional branch
1869 2 -> 4 branch patterns, after having not output the conditional branch
1870 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL, if the `target' label is reached
1871 (the target label has CODE_LABEL_NUMBER equal to
1872 arc_ccfsm_target_label).
1873 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1874
1875 If the jump clobbers the conditions then we use states 2 and 4.
1876
1877 A similar thing can be done with conditional return insns.
1878
1879 We also handle separating branches from sets of the condition code.
1880 This is done here because knowledge of the ccfsm state is required,
1881 we may not be outputting the branch. */
1882
1883 void
1884 arc_final_prescan_insn (insn, opvec, noperands)
1885 rtx insn;
1886 rtx *opvec;
1887 int noperands;
1888 {
1889 /* BODY will hold the body of INSN. */
1890 register rtx body = PATTERN (insn);
1891
1892 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
1893 an if/then/else), and things need to be reversed. */
1894 int reverse = 0;
1895
1896 /* If we start with a return insn, we only succeed if we find another one. */
1897 int seeking_return = 0;
1898
1899 /* START_INSN will hold the insn from where we start looking. This is the
1900 first insn after the following code_label if REVERSE is true. */
1901 rtx start_insn = insn;
1902
1903 /* Update compare/branch separation marker. */
1904 record_cc_ref (insn);
1905
1906 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1907 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1908 final_scan_insn which has `optimize' as a local. */
1909 if (optimize < 2 || TARGET_NO_COND_EXEC)
1910 return;
1911
1912 /* If in state 4, check if the target branch is reached, in order to
1913 change back to state 0. */
1914 if (arc_ccfsm_state == 4)
1915 {
1916 if (insn == arc_ccfsm_target_insn)
1917 {
1918 arc_ccfsm_target_insn = NULL;
1919 arc_ccfsm_state = 0;
1920 }
1921 return;
1922 }
1923
1924 /* If in state 3, it is possible to repeat the trick, if this insn is an
1925 unconditional branch to a label, and immediately following this branch
1926 is the previous target label which is only used once, and the label this
1927 branch jumps to is not too far off. Or in other words "we've done the
1928 `then' part, see if we can do the `else' part." */
1929 if (arc_ccfsm_state == 3)
1930 {
1931 if (simplejump_p (insn))
1932 {
1933 start_insn = next_nonnote_insn (start_insn);
1934 if (GET_CODE (start_insn) == BARRIER)
1935 {
1936 /* ??? Isn't this always a barrier? */
1937 start_insn = next_nonnote_insn (start_insn);
1938 }
1939 if (GET_CODE (start_insn) == CODE_LABEL
1940 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
1941 && LABEL_NUSES (start_insn) == 1)
1942 reverse = TRUE;
1943 else
1944 return;
1945 }
1946 else if (GET_CODE (body) == RETURN)
1947 {
1948 start_insn = next_nonnote_insn (start_insn);
1949 if (GET_CODE (start_insn) == BARRIER)
1950 start_insn = next_nonnote_insn (start_insn);
1951 if (GET_CODE (start_insn) == CODE_LABEL
1952 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
1953 && LABEL_NUSES (start_insn) == 1)
1954 {
1955 reverse = TRUE;
1956 seeking_return = 1;
1957 }
1958 else
1959 return;
1960 }
1961 else
1962 return;
1963 }
1964
1965 if (GET_CODE (insn) != JUMP_INSN)
1966 return;
1967
1968 /* This jump might be paralleled with a clobber of the condition codes,
1969 the jump should always come first. */
1970 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
1971 body = XVECEXP (body, 0, 0);
1972
1973 if (reverse
1974 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
1975 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
1976 {
1977 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
1978 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
1979 int then_not_else = TRUE;
1980 /* Nonzero if next insn must be the target label. */
1981 int next_must_be_target_label_p;
1982 rtx this_insn = start_insn, label = 0;
1983
1984 /* Register the insn jumped to. */
1985 if (reverse)
1986 {
1987 if (!seeking_return)
1988 label = XEXP (SET_SRC (body), 0);
1989 }
1990 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
1991 label = XEXP (XEXP (SET_SRC (body), 1), 0);
1992 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
1993 {
1994 label = XEXP (XEXP (SET_SRC (body), 2), 0);
1995 then_not_else = FALSE;
1996 }
1997 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
1998 seeking_return = 1;
1999 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2000 {
2001 seeking_return = 1;
2002 then_not_else = FALSE;
2003 }
2004 else
2005 abort ();
2006
2007 /* See how many insns this branch skips, and what kind of insns. If all
2008 insns are okay, and the label or unconditional branch to the same
2009 label is not too far away, succeed. */
2010 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2011 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2012 insns_skipped++)
2013 {
2014 rtx scanbody;
2015
2016 this_insn = next_nonnote_insn (this_insn);
2017 if (!this_insn)
2018 break;
2019
2020 if (next_must_be_target_label_p)
2021 {
2022 if (GET_CODE (this_insn) == BARRIER)
2023 continue;
2024 if (GET_CODE (this_insn) == CODE_LABEL
2025 && this_insn == label)
2026 {
2027 arc_ccfsm_state = 1;
2028 succeed = TRUE;
2029 }
2030 else
2031 fail = TRUE;
2032 break;
2033 }
2034
2035 scanbody = PATTERN (this_insn);
2036
2037 switch (GET_CODE (this_insn))
2038 {
2039 case CODE_LABEL:
2040 /* Succeed if it is the target label, otherwise fail since
2041 control falls in from somewhere else. */
2042 if (this_insn == label)
2043 {
2044 arc_ccfsm_state = 1;
2045 succeed = TRUE;
2046 }
2047 else
2048 fail = TRUE;
2049 break;
2050
2051 case BARRIER:
2052 /* Succeed if the following insn is the target label.
2053 Otherwise fail.
2054 If return insns are used then the last insn in a function
2055 will be a barrier. */
2056 next_must_be_target_label_p = TRUE;
2057 break;
2058
2059 case CALL_INSN:
2060 /* Can handle a call insn if there are no insns after it.
2061 IE: The next "insn" is the target label. We don't have to
2062 worry about delay slots as such insns are SEQUENCE's inside
2063 INSN's. ??? It is possible to handle such insns though. */
2064 if (get_attr_cond (this_insn) == COND_CANUSE)
2065 next_must_be_target_label_p = TRUE;
2066 else
2067 fail = TRUE;
2068 break;
2069
2070 case JUMP_INSN:
2071 /* If this is an unconditional branch to the same label, succeed.
2072 If it is to another label, do nothing. If it is conditional,
2073 fail. */
2074 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2075
2076 if (GET_CODE (scanbody) == SET
2077 && GET_CODE (SET_DEST (scanbody)) == PC)
2078 {
2079 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2080 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2081 {
2082 arc_ccfsm_state = 2;
2083 succeed = TRUE;
2084 }
2085 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2086 fail = TRUE;
2087 }
2088 else if (GET_CODE (scanbody) == RETURN
2089 && seeking_return)
2090 {
2091 arc_ccfsm_state = 2;
2092 succeed = TRUE;
2093 }
2094 else if (GET_CODE (scanbody) == PARALLEL)
2095 {
2096 if (get_attr_cond (this_insn) != COND_CANUSE)
2097 fail = TRUE;
2098 }
2099 break;
2100
2101 case INSN:
2102 /* We can only do this with insns that can use the condition
2103 codes (and don't set them). */
2104 if (GET_CODE (scanbody) == SET
2105 || GET_CODE (scanbody) == PARALLEL)
2106 {
2107 if (get_attr_cond (this_insn) != COND_CANUSE)
2108 fail = TRUE;
2109 }
2110 /* We can't handle other insns like sequences. */
2111 else
2112 fail = TRUE;
2113 break;
2114
2115 default:
2116 break;
2117 }
2118 }
2119
2120 if (succeed)
2121 {
2122 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2123 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2124 else if (seeking_return || arc_ccfsm_state == 2)
2125 {
2126 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2127 {
2128 this_insn = next_nonnote_insn (this_insn);
2129 if (this_insn && (GET_CODE (this_insn) == BARRIER
2130 || GET_CODE (this_insn) == CODE_LABEL))
2131 abort ();
2132 }
2133 if (!this_insn)
2134 {
2135 /* Oh dear! we ran off the end, give up. */
2136 insn_extract (insn);
2137 arc_ccfsm_state = 0;
2138 arc_ccfsm_target_insn = NULL;
2139 return;
2140 }
2141 arc_ccfsm_target_insn = this_insn;
2142 }
2143 else
2144 abort ();
2145
2146 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2147 what it was. */
2148 if (!reverse)
2149 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2150 0));
2151
2152 if (reverse || then_not_else)
2153 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2154 }
2155
2156 /* Restore recog_operand. Getting the attributes of other insns can
2157 destroy this array, but final.c assumes that it remains intact
2158 across this call; since the insn has been recognized already we
2159 call insn_extract direct. */
2160 insn_extract (insn);
2161 }
2162 }
2163
2164 /* Record that we are currently outputting label NUM with prefix PREFIX.
2165 It it's the label we're looking for, reset the ccfsm machinery.
2166
2167 Called from ASM_OUTPUT_INTERNAL_LABEL. */
2168
2169 void
2170 arc_ccfsm_at_label (prefix, num)
2171 char *prefix;
2172 int num;
2173 {
2174 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2175 && !strcmp (prefix, "L"))
2176 {
2177 arc_ccfsm_state = 0;
2178 arc_ccfsm_target_insn = NULL_RTX;
2179 }
2180 }
2181
2182 /* See if the current insn, which is a conditional branch, is to be
2183 deleted. */
2184
2185 int
2186 arc_ccfsm_branch_deleted_p ()
2187 {
2188 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2189 return 1;
2190 return 0;
2191 }
2192
2193 /* Record a branch isn't output because subsequent insns can be
2194 conditionalized. */
2195
2196 void
2197 arc_ccfsm_record_branch_deleted ()
2198 {
2199 /* Indicate we're conditionalizing insns now. */
2200 arc_ccfsm_state += 2;
2201
2202 /* If the next insn is a subroutine call, we still need a nop between the
2203 cc setter and user. We need to undo the effect of calling record_cc_ref
2204 for the just deleted branch. */
2205 current_insn_set_cc_p = last_insn_set_cc_p;
2206 }
2207 \f
2208 void
2209 arc_va_start (stdarg_p, valist, nextarg)
2210 int stdarg_p;
2211 tree valist;
2212 rtx nextarg;
2213 {
2214 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2215 if (current_function_args_info < 8
2216 && (current_function_args_info & 1))
2217 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2218
2219 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2220 }
2221
2222 rtx
2223 arc_va_arg (valist, type)
2224 tree valist, type;
2225 {
2226 rtx addr_rtx;
2227 tree addr, incr;
2228 tree type_ptr = build_pointer_type (type);
2229
2230 /* All aggregates are passed by reference. All scalar types larger
2231 than 8 bytes are passed by reference. */
2232
2233 if (AGGREGATE_TYPE_P (type) || int_size_in_bytes (type) > 8)
2234 {
2235 tree type_ptr_ptr = build_pointer_type (type_ptr);
2236
2237 addr = build (INDIRECT_REF, type_ptr,
2238 build (NOP_EXPR, type_ptr_ptr, valist));
2239
2240 incr = build (PLUS_EXPR, TREE_TYPE (valist),
2241 valist, build_int_2 (UNITS_PER_WORD, 0));
2242 }
2243 else
2244 {
2245 HOST_WIDE_INT align, rounded_size;
2246
2247 /* Compute the rounded size of the type. */
2248 align = PARM_BOUNDARY / BITS_PER_UNIT;
2249 rounded_size = (((TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT
2250 + align - 1) / align) * align);
2251
2252 /* Align 8 byte operands. */
2253 addr = valist;
2254 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2255 {
2256 /* AP = (TYPE *)(((int)AP + 7) & -8) */
2257
2258 addr = build (NOP_EXPR, integer_type_node, valist);
2259 addr = fold (build (PLUS_EXPR, integer_type_node, addr,
2260 build_int_2 (7, 0)));
2261 addr = fold (build (BIT_AND_EXPR, integer_type_node, addr,
2262 build_int_2 (-8, 0)));
2263 addr = fold (build (NOP_EXPR, TREE_TYPE (valist), addr));
2264 }
2265
2266 /* The increment is always rounded_size past the aligned pointer. */
2267 incr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr,
2268 build_int_2 (rounded_size, 0)));
2269
2270 /* Adjust the pointer in big-endian mode. */
2271 if (BYTES_BIG_ENDIAN)
2272 {
2273 HOST_WIDE_INT adj;
2274 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
2275 if (rounded_size > align)
2276 adj = rounded_size;
2277
2278 addr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr,
2279 build_int_2 (rounded_size - adj, 0)));
2280 }
2281 }
2282
2283 /* Evaluate the data address. */
2284 addr_rtx = expand_expr (addr, NULL_RTX, Pmode, EXPAND_NORMAL);
2285 addr_rtx = copy_to_reg (addr_rtx);
2286
2287 /* Compute new value for AP. */
2288 incr = build (MODIFY_EXPR, TREE_TYPE (valist), valist, incr);
2289 TREE_SIDE_EFFECTS (incr) = 1;
2290 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
2291
2292 return addr_rtx;
2293 }