]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arc/arc.c
9773d697f339ee313ac32c92a5afd41c9fae8330
[thirdparty/gcc.git] / gcc / config / arc / arc.c
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "recog.h"
41 #include "toplev.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45
46 /* Which cpu we're compiling for (NULL(=base), ???). */
47 const char *arc_cpu_string;
48 int arc_cpu_type;
49
50 /* Name of mangle string to add to symbols to separate code compiled for each
51 cpu (or NULL). */
52 const char *arc_mangle_cpu;
53
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0, arc_compare_op1;
57
58 /* Name of text, data, and rodata sections, as specified on command line.
59 Selected by -m{text,data,rodata} flags. */
60 const char *arc_text_string = ARC_DEFAULT_TEXT_SECTION;
61 const char *arc_data_string = ARC_DEFAULT_DATA_SECTION;
62 const char *arc_rodata_string = ARC_DEFAULT_RODATA_SECTION;
63
64 /* Name of text, data, and rodata sections used in varasm.c. */
65 const char *arc_text_section;
66 const char *arc_data_section;
67 const char *arc_rodata_section;
68
69 /* Array of valid operand punctuation characters. */
70 char arc_punct_chars[256];
71
72 /* Variables used by arc_final_prescan_insn to implement conditional
73 execution. */
74 static int arc_ccfsm_state;
75 static int arc_ccfsm_current_cc;
76 static rtx arc_ccfsm_target_insn;
77 static int arc_ccfsm_target_label;
78
79 /* The maximum number of insns skipped which will be conditionalised if
80 possible. */
81 #define MAX_INSNS_SKIPPED 3
82
83 /* A nop is needed between a 4 byte insn that sets the condition codes and
84 a branch that uses them (the same isn't true for an 8 byte insn that sets
85 the condition codes). Set by arc_final_prescan_insn. Used by
86 arc_print_operand. */
87 static int last_insn_set_cc_p;
88 static int current_insn_set_cc_p;
89 static void record_cc_ref (rtx);
90 static void arc_init_reg_tables (void);
91 static int get_arc_condition_code (rtx);
92 const struct attribute_spec arc_attribute_table[];
93 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
94 static bool arc_assemble_integer (rtx, unsigned int, int);
95 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
96 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
97 static void arc_file_start (void);
98 static void arc_internal_label (FILE *, const char *, unsigned long);
99 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
100 tree, int *, int);
101 static bool arc_rtx_costs (rtx, int, int, int *);
102 static int arc_address_cost (rtx);
103 static void arc_external_libcall (rtx);
104 static bool arc_return_in_memory (tree, tree);
105 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
106 tree, bool);
107 \f
108 /* Initialize the GCC target structure. */
109 #undef TARGET_ASM_ALIGNED_HI_OP
110 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
111 #undef TARGET_ASM_ALIGNED_SI_OP
112 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
113 #undef TARGET_ASM_INTEGER
114 #define TARGET_ASM_INTEGER arc_assemble_integer
115
116 #undef TARGET_ASM_FUNCTION_PROLOGUE
117 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
118 #undef TARGET_ASM_FUNCTION_EPILOGUE
119 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
120 #undef TARGET_ASM_FILE_START
121 #define TARGET_ASM_FILE_START arc_file_start
122 #undef TARGET_ATTRIBUTE_TABLE
123 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
124 #undef TARGET_ASM_INTERNAL_LABEL
125 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
126 #undef TARGET_ASM_EXTERNAL_LIBCALL
127 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
128
129 #undef TARGET_RTX_COSTS
130 #define TARGET_RTX_COSTS arc_rtx_costs
131 #undef TARGET_ADDRESS_COST
132 #define TARGET_ADDRESS_COST arc_address_cost
133
134 #undef TARGET_PROMOTE_FUNCTION_ARGS
135 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
136 #undef TARGET_PROMOTE_FUNCTION_RETURN
137 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
138 #undef TARGET_PROMOTE_PROTOTYPES
139 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
140
141 #undef TARGET_RETURN_IN_MEMORY
142 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
143 #undef TARGET_PASS_BY_REFERENCE
144 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
145 #undef TARGET_CALLEE_COPIES
146 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
147
148 #undef TARGET_SETUP_INCOMING_VARARGS
149 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
150
151 struct gcc_target targetm = TARGET_INITIALIZER;
152 \f
153 /* Called by OVERRIDE_OPTIONS to initialize various things. */
154
155 void
156 arc_init (void)
157 {
158 char *tmp;
159
160 if (arc_cpu_string == 0
161 || !strcmp (arc_cpu_string, "base"))
162 {
163 /* Ensure we have a printable value for the .cpu pseudo-op. */
164 arc_cpu_string = "base";
165 arc_cpu_type = 0;
166 arc_mangle_cpu = NULL;
167 }
168 else if (ARC_EXTENSION_CPU (arc_cpu_string))
169 ; /* nothing to do */
170 else
171 {
172 error ("bad value (%s) for -mcpu switch", arc_cpu_string);
173 arc_cpu_string = "base";
174 arc_cpu_type = 0;
175 arc_mangle_cpu = NULL;
176 }
177
178 /* Set the pseudo-ops for the various standard sections. */
179 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
180 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
181 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
182 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
183 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
184 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
185
186 arc_init_reg_tables ();
187
188 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
189 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
190 arc_punct_chars['#'] = 1;
191 arc_punct_chars['*'] = 1;
192 arc_punct_chars['?'] = 1;
193 arc_punct_chars['!'] = 1;
194 arc_punct_chars['~'] = 1;
195 }
196 \f
197 /* The condition codes of the ARC, and the inverse function. */
198 static const char *const arc_condition_codes[] =
199 {
200 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
201 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
202 };
203
204 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
205
206 /* Returns the index of the ARC condition code string in
207 `arc_condition_codes'. COMPARISON should be an rtx like
208 `(eq (...) (...))'. */
209
210 static int
211 get_arc_condition_code (rtx comparison)
212 {
213 switch (GET_CODE (comparison))
214 {
215 case EQ : return 2;
216 case NE : return 3;
217 case GT : return 10;
218 case LE : return 11;
219 case GE : return 12;
220 case LT : return 13;
221 case GTU : return 14;
222 case LEU : return 15;
223 case LTU : return 6;
224 case GEU : return 7;
225 default : abort ();
226 }
227 /*NOTREACHED*/
228 return (42);
229 }
230
231 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
232 return the mode to be used for the comparison. */
233
234 enum machine_mode
235 arc_select_cc_mode (enum rtx_code op,
236 rtx x ATTRIBUTE_UNUSED,
237 rtx y ATTRIBUTE_UNUSED)
238 {
239 switch (op)
240 {
241 case EQ :
242 case NE :
243 return CCZNmode;
244 default :
245 switch (GET_CODE (x))
246 {
247 case AND :
248 case IOR :
249 case XOR :
250 case SIGN_EXTEND :
251 case ZERO_EXTEND :
252 return CCZNmode;
253 case ASHIFT :
254 case ASHIFTRT :
255 case LSHIFTRT :
256 return CCZNCmode;
257 default:
258 break;
259 }
260 }
261 return CCmode;
262 }
263 \f
264 /* Vectors to keep interesting information about registers where it can easily
265 be got. We use to use the actual mode value as the bit number, but there
266 is (or may be) more than 32 modes now. Instead we use two tables: one
267 indexed by hard register number, and one indexed by mode. */
268
269 /* The purpose of arc_mode_class is to shrink the range of modes so that
270 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
271 mapped into one arc_mode_class mode. */
272
273 enum arc_mode_class {
274 C_MODE,
275 S_MODE, D_MODE, T_MODE, O_MODE,
276 SF_MODE, DF_MODE, TF_MODE, OF_MODE
277 };
278
279 /* Modes for condition codes. */
280 #define C_MODES (1 << (int) C_MODE)
281
282 /* Modes for single-word and smaller quantities. */
283 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
284
285 /* Modes for double-word and smaller quantities. */
286 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
287
288 /* Modes for quad-word and smaller quantities. */
289 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
290
291 /* Value is 1 if register/mode pair is acceptable on arc. */
292
293 const unsigned int arc_hard_regno_mode_ok[] = {
294 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
295 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
296 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
297 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
298
299 /* ??? Leave these as S_MODES for now. */
300 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
301 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
302 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
303 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
304 };
305
306 unsigned int arc_mode_class [NUM_MACHINE_MODES];
307
308 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
309
310 static void
311 arc_init_reg_tables (void)
312 {
313 int i;
314
315 for (i = 0; i < NUM_MACHINE_MODES; i++)
316 {
317 switch (GET_MODE_CLASS (i))
318 {
319 case MODE_INT:
320 case MODE_PARTIAL_INT:
321 case MODE_COMPLEX_INT:
322 if (GET_MODE_SIZE (i) <= 4)
323 arc_mode_class[i] = 1 << (int) S_MODE;
324 else if (GET_MODE_SIZE (i) == 8)
325 arc_mode_class[i] = 1 << (int) D_MODE;
326 else if (GET_MODE_SIZE (i) == 16)
327 arc_mode_class[i] = 1 << (int) T_MODE;
328 else if (GET_MODE_SIZE (i) == 32)
329 arc_mode_class[i] = 1 << (int) O_MODE;
330 else
331 arc_mode_class[i] = 0;
332 break;
333 case MODE_FLOAT:
334 case MODE_COMPLEX_FLOAT:
335 if (GET_MODE_SIZE (i) <= 4)
336 arc_mode_class[i] = 1 << (int) SF_MODE;
337 else if (GET_MODE_SIZE (i) == 8)
338 arc_mode_class[i] = 1 << (int) DF_MODE;
339 else if (GET_MODE_SIZE (i) == 16)
340 arc_mode_class[i] = 1 << (int) TF_MODE;
341 else if (GET_MODE_SIZE (i) == 32)
342 arc_mode_class[i] = 1 << (int) OF_MODE;
343 else
344 arc_mode_class[i] = 0;
345 break;
346 case MODE_CC:
347 arc_mode_class[i] = 1 << (int) C_MODE;
348 break;
349 default:
350 arc_mode_class[i] = 0;
351 break;
352 }
353 }
354
355 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
356 {
357 if (i < 60)
358 arc_regno_reg_class[i] = GENERAL_REGS;
359 else if (i == 60)
360 arc_regno_reg_class[i] = LPCOUNT_REG;
361 else if (i == 61)
362 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
363 else
364 arc_regno_reg_class[i] = NO_REGS;
365 }
366 }
367 \f
368 /* ARC specific attribute support.
369
370 The ARC has these attributes:
371 interrupt - for interrupt functions
372 */
373
374 const struct attribute_spec arc_attribute_table[] =
375 {
376 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
377 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
378 { NULL, 0, 0, false, false, false, NULL }
379 };
380
381 /* Handle an "interrupt" attribute; arguments as in
382 struct attribute_spec.handler. */
383 static tree
384 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
385 tree name,
386 tree args,
387 int flags ATTRIBUTE_UNUSED,
388 bool *no_add_attrs)
389 {
390 tree value = TREE_VALUE (args);
391
392 if (TREE_CODE (value) != STRING_CST)
393 {
394 warning ("argument of `%s' attribute is not a string constant",
395 IDENTIFIER_POINTER (name));
396 *no_add_attrs = true;
397 }
398 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
399 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
400 {
401 warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"",
402 IDENTIFIER_POINTER (name));
403 *no_add_attrs = true;
404 }
405
406 return NULL_TREE;
407 }
408
409 \f
410 /* Acceptable arguments to the call insn. */
411
412 int
413 call_address_operand (rtx op, enum machine_mode mode)
414 {
415 return (symbolic_operand (op, mode)
416 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
417 || (GET_CODE (op) == REG));
418 }
419
420 int
421 call_operand (rtx op, enum machine_mode mode)
422 {
423 if (GET_CODE (op) != MEM)
424 return 0;
425 op = XEXP (op, 0);
426 return call_address_operand (op, mode);
427 }
428
429 /* Returns 1 if OP is a symbol reference. */
430
431 int
432 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
433 {
434 switch (GET_CODE (op))
435 {
436 case SYMBOL_REF:
437 case LABEL_REF:
438 case CONST :
439 return 1;
440 default:
441 return 0;
442 }
443 }
444
445 /* Return truth value of statement that OP is a symbolic memory
446 operand of mode MODE. */
447
448 int
449 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
450 {
451 if (GET_CODE (op) == SUBREG)
452 op = SUBREG_REG (op);
453 if (GET_CODE (op) != MEM)
454 return 0;
455 op = XEXP (op, 0);
456 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
457 || GET_CODE (op) == LABEL_REF);
458 }
459
460 /* Return true if OP is a short immediate (shimm) value. */
461
462 int
463 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
464 {
465 if (GET_CODE (op) != CONST_INT)
466 return 0;
467 return SMALL_INT (INTVAL (op));
468 }
469
470 /* Return true if OP will require a long immediate (limm) value.
471 This is currently only used when calculating length attributes. */
472
473 int
474 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
475 {
476 switch (GET_CODE (op))
477 {
478 case SYMBOL_REF :
479 case LABEL_REF :
480 case CONST :
481 return 1;
482 case CONST_INT :
483 return !SMALL_INT (INTVAL (op));
484 case CONST_DOUBLE :
485 /* These can happen because large unsigned 32 bit constants are
486 represented this way (the multiplication patterns can cause these
487 to be generated). They also occur for SFmode values. */
488 return 1;
489 default:
490 break;
491 }
492 return 0;
493 }
494
495 /* Return true if OP is a MEM that when used as a load or store address will
496 require an 8 byte insn.
497 Load and store instructions don't allow the same possibilities but they're
498 similar enough that this one function will do.
499 This is currently only used when calculating length attributes. */
500
501 int
502 long_immediate_loadstore_operand (rtx op,
503 enum machine_mode mode ATTRIBUTE_UNUSED)
504 {
505 if (GET_CODE (op) != MEM)
506 return 0;
507
508 op = XEXP (op, 0);
509 switch (GET_CODE (op))
510 {
511 case SYMBOL_REF :
512 case LABEL_REF :
513 case CONST :
514 return 1;
515 case CONST_INT :
516 /* This must be handled as "st c,[limm]". Ditto for load.
517 Technically, the assembler could translate some possibilities to
518 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
519 assume that it does. */
520 return 1;
521 case CONST_DOUBLE :
522 /* These can happen because large unsigned 32 bit constants are
523 represented this way (the multiplication patterns can cause these
524 to be generated). They also occur for SFmode values. */
525 return 1;
526 case REG :
527 return 0;
528 case PLUS :
529 if (GET_CODE (XEXP (op, 1)) == CONST_INT
530 && !SMALL_INT (INTVAL (XEXP (op, 1))))
531 return 1;
532 return 0;
533 default:
534 break;
535 }
536 return 0;
537 }
538
539 /* Return true if OP is an acceptable argument for a single word
540 move source. */
541
542 int
543 move_src_operand (rtx op, enum machine_mode mode)
544 {
545 switch (GET_CODE (op))
546 {
547 case SYMBOL_REF :
548 case LABEL_REF :
549 case CONST :
550 return 1;
551 case CONST_INT :
552 return (LARGE_INT (INTVAL (op)));
553 case CONST_DOUBLE :
554 /* We can handle DImode integer constants in SImode if the value
555 (signed or unsigned) will fit in 32 bits. This is needed because
556 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
557 if (mode == SImode)
558 return arc_double_limm_p (op);
559 /* We can handle 32 bit floating point constants. */
560 if (mode == SFmode)
561 return GET_MODE (op) == SFmode;
562 return 0;
563 case REG :
564 return register_operand (op, mode);
565 case SUBREG :
566 /* (subreg (mem ...) ...) can occur here if the inner part was once a
567 pseudo-reg and is now a stack slot. */
568 if (GET_CODE (SUBREG_REG (op)) == MEM)
569 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
570 else
571 return register_operand (op, mode);
572 case MEM :
573 return address_operand (XEXP (op, 0), mode);
574 default :
575 return 0;
576 }
577 }
578
579 /* Return true if OP is an acceptable argument for a double word
580 move source. */
581
582 int
583 move_double_src_operand (rtx op, enum machine_mode mode)
584 {
585 switch (GET_CODE (op))
586 {
587 case REG :
588 return register_operand (op, mode);
589 case SUBREG :
590 /* (subreg (mem ...) ...) can occur here if the inner part was once a
591 pseudo-reg and is now a stack slot. */
592 if (GET_CODE (SUBREG_REG (op)) == MEM)
593 return move_double_src_operand (SUBREG_REG (op), mode);
594 else
595 return register_operand (op, mode);
596 case MEM :
597 /* Disallow auto inc/dec for now. */
598 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
599 || GET_CODE (XEXP (op, 0)) == PRE_INC)
600 return 0;
601 return address_operand (XEXP (op, 0), mode);
602 case CONST_INT :
603 case CONST_DOUBLE :
604 return 1;
605 default :
606 return 0;
607 }
608 }
609
610 /* Return true if OP is an acceptable argument for a move destination. */
611
612 int
613 move_dest_operand (rtx op, enum machine_mode mode)
614 {
615 switch (GET_CODE (op))
616 {
617 case REG :
618 return register_operand (op, mode);
619 case SUBREG :
620 /* (subreg (mem ...) ...) can occur here if the inner part was once a
621 pseudo-reg and is now a stack slot. */
622 if (GET_CODE (SUBREG_REG (op)) == MEM)
623 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
624 else
625 return register_operand (op, mode);
626 case MEM :
627 return address_operand (XEXP (op, 0), mode);
628 default :
629 return 0;
630 }
631 }
632
633 /* Return true if OP is valid load with update operand. */
634
635 int
636 load_update_operand (rtx op, enum machine_mode mode)
637 {
638 if (GET_CODE (op) != MEM
639 || GET_MODE (op) != mode)
640 return 0;
641 op = XEXP (op, 0);
642 if (GET_CODE (op) != PLUS
643 || GET_MODE (op) != Pmode
644 || !register_operand (XEXP (op, 0), Pmode)
645 || !nonmemory_operand (XEXP (op, 1), Pmode))
646 return 0;
647 return 1;
648 }
649
650 /* Return true if OP is valid store with update operand. */
651
652 int
653 store_update_operand (rtx op, enum machine_mode mode)
654 {
655 if (GET_CODE (op) != MEM
656 || GET_MODE (op) != mode)
657 return 0;
658 op = XEXP (op, 0);
659 if (GET_CODE (op) != PLUS
660 || GET_MODE (op) != Pmode
661 || !register_operand (XEXP (op, 0), Pmode)
662 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
663 && SMALL_INT (INTVAL (XEXP (op, 1)))))
664 return 0;
665 return 1;
666 }
667
668 /* Return true if OP is a non-volatile non-immediate operand.
669 Volatile memory refs require a special "cache-bypass" instruction
670 and only the standard movXX patterns are set up to handle them. */
671
672 int
673 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
674 {
675 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
676 return 0;
677 return nonimmediate_operand (op, mode);
678 }
679
680 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
681 to check the range carefully since this predicate is used in DImode
682 contexts. */
683
684 int
685 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
686 {
687 /* All allowed constants will fit a CONST_INT. */
688 return (GET_CODE (op) == CONST_INT
689 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
690 }
691
692 /* Accept integer operands in the range 0..0xffffffff. We have to check the
693 range carefully since this predicate is used in DImode contexts. Also, we
694 need some extra crud to make it work when hosted on 64-bit machines. */
695
696 int
697 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
698 {
699 #if HOST_BITS_PER_WIDE_INT > 32
700 /* All allowed constants will fit a CONST_INT. */
701 return (GET_CODE (op) == CONST_INT
702 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
703 #else
704 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
705 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
706 #endif
707 }
708
709 /* Return 1 if OP is a comparison operator valid for the mode of CC.
710 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
711
712 Some insns only set a few bits in the condition code. So only allow those
713 comparisons that use the bits that are valid. */
714
715 int
716 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
717 {
718 enum rtx_code code;
719 if (!COMPARISON_P (op))
720 return 0;
721
722 code = GET_CODE (op);
723 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
724 return (code == EQ || code == NE);
725 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
726 return (code == EQ || code == NE
727 || code == LTU || code == GEU || code == GTU || code == LEU);
728 return 1;
729 }
730 \f
731 /* Misc. utilities. */
732
733 /* X and Y are two things to compare using CODE. Emit the compare insn and
734 return the rtx for the cc reg in the proper mode. */
735
736 rtx
737 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
738 {
739 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
740 rtx cc_reg;
741
742 cc_reg = gen_rtx_REG (mode, 61);
743
744 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
745 gen_rtx_COMPARE (mode, x, y)));
746
747 return cc_reg;
748 }
749
750 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
751 We assume the value can be either signed or unsigned. */
752
753 int
754 arc_double_limm_p (rtx value)
755 {
756 HOST_WIDE_INT low, high;
757
758 if (GET_CODE (value) != CONST_DOUBLE)
759 abort ();
760
761 low = CONST_DOUBLE_LOW (value);
762 high = CONST_DOUBLE_HIGH (value);
763
764 if (low & 0x80000000)
765 {
766 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
767 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
768 == - (unsigned HOST_WIDE_INT) 0x80000000)
769 && high == -1));
770 }
771 else
772 {
773 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
774 }
775 }
776 \f
777 /* Do any needed setup for a variadic function. For the ARC, we must
778 create a register parameter block, and then copy any anonymous arguments
779 in registers to memory.
780
781 CUM has not been updated for the last named argument which has type TYPE
782 and mode MODE, and we rely on this fact.
783
784 We do things a little weird here. We're supposed to only allocate space
785 for the anonymous arguments. However we need to keep the stack eight byte
786 aligned. So we round the space up if necessary, and leave it to va_start
787 to compensate. */
788
789 static void
790 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
791 enum machine_mode mode,
792 tree type ATTRIBUTE_UNUSED,
793 int *pretend_size,
794 int no_rtl)
795 {
796 int first_anon_arg;
797
798 /* All BLKmode values are passed by reference. */
799 if (mode == BLKmode)
800 abort ();
801
802 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
803 / UNITS_PER_WORD);
804
805 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
806 {
807 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
808 int first_reg_offset = first_anon_arg;
809 /* Size in words to "pretend" allocate. */
810 int size = MAX_ARC_PARM_REGS - first_reg_offset;
811 /* Extra slop to keep stack eight byte aligned. */
812 int align_slop = size & 1;
813 rtx regblock;
814
815 regblock = gen_rtx_MEM (BLKmode,
816 plus_constant (arg_pointer_rtx,
817 FIRST_PARM_OFFSET (0)
818 + align_slop * UNITS_PER_WORD));
819 set_mem_alias_set (regblock, get_varargs_alias_set ());
820 set_mem_align (regblock, BITS_PER_WORD);
821 move_block_from_reg (first_reg_offset, regblock,
822 MAX_ARC_PARM_REGS - first_reg_offset);
823
824 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
825 * UNITS_PER_WORD);
826 }
827 }
828 \f
829 /* Cost functions. */
830
831 /* Compute a (partial) cost for rtx X. Return true if the complete
832 cost has been computed, and false if subexpressions should be
833 scanned. In either case, *TOTAL contains the cost result. */
834
835 static bool
836 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
837 {
838 switch (code)
839 {
840 /* Small integers are as cheap as registers. 4 byte values can
841 be fetched as immediate constants - let's give that the cost
842 of an extra insn. */
843 case CONST_INT:
844 if (SMALL_INT (INTVAL (x)))
845 {
846 *total = 0;
847 return true;
848 }
849 /* FALLTHRU */
850
851 case CONST:
852 case LABEL_REF:
853 case SYMBOL_REF:
854 *total = COSTS_N_INSNS (1);
855 return true;
856
857 case CONST_DOUBLE:
858 {
859 rtx high, low;
860 split_double (x, &high, &low);
861 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
862 + !SMALL_INT (INTVAL (low)));
863 return true;
864 }
865
866 /* Encourage synth_mult to find a synthetic multiply when reasonable.
867 If we need more than 12 insns to do a multiply, then go out-of-line,
868 since the call overhead will be < 10% of the cost of the multiply. */
869 case ASHIFT:
870 case ASHIFTRT:
871 case LSHIFTRT:
872 if (TARGET_SHIFTER)
873 *total = COSTS_N_INSNS (1);
874 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
875 *total = COSTS_N_INSNS (16);
876 else
877 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
878 return false;
879
880 default:
881 return false;
882 }
883 }
884
885
886 /* Provide the costs of an addressing mode that contains ADDR.
887 If ADDR is not a valid address, its cost is irrelevant. */
888
889 static int
890 arc_address_cost (rtx addr)
891 {
892 switch (GET_CODE (addr))
893 {
894 case REG :
895 return 1;
896
897 case LABEL_REF :
898 case SYMBOL_REF :
899 case CONST :
900 return 2;
901
902 case PLUS :
903 {
904 register rtx plus0 = XEXP (addr, 0);
905 register rtx plus1 = XEXP (addr, 1);
906
907 if (GET_CODE (plus0) != REG)
908 break;
909
910 switch (GET_CODE (plus1))
911 {
912 case CONST_INT :
913 return SMALL_INT (plus1) ? 1 : 2;
914 case CONST :
915 case SYMBOL_REF :
916 case LABEL_REF :
917 return 2;
918 default:
919 break;
920 }
921 break;
922 }
923 default:
924 break;
925 }
926
927 return 4;
928 }
929 \f
930 /* Function prologue/epilogue handlers. */
931
932 /* ARC stack frames look like:
933
934 Before call After call
935 +-----------------------+ +-----------------------+
936 | | | |
937 high | local variables, | | local variables, |
938 mem | reg save area, etc. | | reg save area, etc. |
939 | | | |
940 +-----------------------+ +-----------------------+
941 | | | |
942 | arguments on stack. | | arguments on stack. |
943 | | | |
944 SP+16->+-----------------------+FP+48->+-----------------------+
945 | 4 word save area for | | reg parm save area, |
946 | return addr, prev %fp | | only created for |
947 SP+0->+-----------------------+ | variable argument |
948 | functions |
949 FP+16->+-----------------------+
950 | 4 word save area for |
951 | return addr, prev %fp |
952 FP+0->+-----------------------+
953 | |
954 | local variables |
955 | |
956 +-----------------------+
957 | |
958 | register save area |
959 | |
960 +-----------------------+
961 | |
962 | alloca allocations |
963 | |
964 +-----------------------+
965 | |
966 | arguments on stack |
967 | |
968 SP+16->+-----------------------+
969 low | 4 word save area for |
970 memory | return addr, prev %fp |
971 SP+0->+-----------------------+
972
973 Notes:
974 1) The "reg parm save area" does not exist for non variable argument fns.
975 The "reg parm save area" can be eliminated completely if we created our
976 own va-arc.h, but that has tradeoffs as well (so it's not done). */
977
978 /* Structure to be filled in by arc_compute_frame_size with register
979 save masks, and offsets for the current function. */
980 struct arc_frame_info
981 {
982 unsigned int total_size; /* # bytes that the entire frame takes up. */
983 unsigned int extra_size; /* # bytes of extra stuff. */
984 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
985 unsigned int args_size; /* # bytes that outgoing arguments take up. */
986 unsigned int reg_size; /* # bytes needed to store regs. */
987 unsigned int var_size; /* # bytes that variables take up. */
988 unsigned int reg_offset; /* Offset from new sp to store regs. */
989 unsigned int gmask; /* Mask of saved gp registers. */
990 int initialized; /* Nonzero if frame size already calculated. */
991 };
992
993 /* Current frame information calculated by arc_compute_frame_size. */
994 static struct arc_frame_info current_frame_info;
995
996 /* Zero structure to initialize current_frame_info. */
997 static struct arc_frame_info zero_frame_info;
998
999 /* Type of function DECL.
1000
1001 The result is cached. To reset the cache at the end of a function,
1002 call with DECL = NULL_TREE. */
1003
1004 enum arc_function_type
1005 arc_compute_function_type (tree decl)
1006 {
1007 tree a;
1008 /* Cached value. */
1009 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1010 /* Last function we were called for. */
1011 static tree last_fn = NULL_TREE;
1012
1013 /* Resetting the cached value? */
1014 if (decl == NULL_TREE)
1015 {
1016 fn_type = ARC_FUNCTION_UNKNOWN;
1017 last_fn = NULL_TREE;
1018 return fn_type;
1019 }
1020
1021 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1022 return fn_type;
1023
1024 /* Assume we have a normal function (not an interrupt handler). */
1025 fn_type = ARC_FUNCTION_NORMAL;
1026
1027 /* Now see if this is an interrupt handler. */
1028 for (a = DECL_ATTRIBUTES (current_function_decl);
1029 a;
1030 a = TREE_CHAIN (a))
1031 {
1032 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1033
1034 if (name == get_identifier ("__interrupt__")
1035 && list_length (args) == 1
1036 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1037 {
1038 tree value = TREE_VALUE (args);
1039
1040 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1041 fn_type = ARC_FUNCTION_ILINK1;
1042 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1043 fn_type = ARC_FUNCTION_ILINK2;
1044 else
1045 abort ();
1046 break;
1047 }
1048 }
1049
1050 last_fn = decl;
1051 return fn_type;
1052 }
1053
1054 #define ILINK1_REGNUM 29
1055 #define ILINK2_REGNUM 30
1056 #define RETURN_ADDR_REGNUM 31
1057 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1058 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1059
1060 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1061 The return address and frame pointer are treated separately.
1062 Don't consider them here. */
1063 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1064 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1065 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1066
1067 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1068
1069 /* Return the bytes needed to compute the frame pointer from the current
1070 stack pointer.
1071
1072 SIZE is the size needed for local variables. */
1073
1074 unsigned int
1075 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1076 {
1077 int regno;
1078 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1079 unsigned int reg_size, reg_offset;
1080 unsigned int gmask;
1081 enum arc_function_type fn_type;
1082 int interrupt_p;
1083
1084 var_size = size;
1085 args_size = current_function_outgoing_args_size;
1086 pretend_size = current_function_pretend_args_size;
1087 extra_size = FIRST_PARM_OFFSET (0);
1088 total_size = extra_size + pretend_size + args_size + var_size;
1089 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1090 reg_size = 0;
1091 gmask = 0;
1092
1093 /* See if this is an interrupt handler. Call used registers must be saved
1094 for them too. */
1095 fn_type = arc_compute_function_type (current_function_decl);
1096 interrupt_p = ARC_INTERRUPT_P (fn_type);
1097
1098 /* Calculate space needed for registers.
1099 ??? We ignore the extension registers for now. */
1100
1101 for (regno = 0; regno <= 31; regno++)
1102 {
1103 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1104 {
1105 reg_size += UNITS_PER_WORD;
1106 gmask |= 1 << regno;
1107 }
1108 }
1109
1110 total_size += reg_size;
1111
1112 /* If the only space to allocate is the fp/blink save area this is an
1113 empty frame. However, if we'll be making a function call we need to
1114 allocate a stack frame for our callee's fp/blink save area. */
1115 if (total_size == extra_size
1116 && !MUST_SAVE_RETURN_ADDR)
1117 total_size = extra_size = 0;
1118
1119 total_size = ARC_STACK_ALIGN (total_size);
1120
1121 /* Save computed information. */
1122 current_frame_info.total_size = total_size;
1123 current_frame_info.extra_size = extra_size;
1124 current_frame_info.pretend_size = pretend_size;
1125 current_frame_info.var_size = var_size;
1126 current_frame_info.args_size = args_size;
1127 current_frame_info.reg_size = reg_size;
1128 current_frame_info.reg_offset = reg_offset;
1129 current_frame_info.gmask = gmask;
1130 current_frame_info.initialized = reload_completed;
1131
1132 /* Ok, we're done. */
1133 return total_size;
1134 }
1135 \f
1136 /* Common code to save/restore registers. */
1137
1138 void
1139 arc_save_restore (FILE *file,
1140 const char *base_reg,
1141 unsigned int offset,
1142 unsigned int gmask,
1143 const char *op)
1144 {
1145 int regno;
1146
1147 if (gmask == 0)
1148 return;
1149
1150 for (regno = 0; regno <= 31; regno++)
1151 {
1152 if ((gmask & (1L << regno)) != 0)
1153 {
1154 fprintf (file, "\t%s %s,[%s,%d]\n",
1155 op, reg_names[regno], base_reg, offset);
1156 offset += UNITS_PER_WORD;
1157 }
1158 }
1159 }
1160 \f
1161 /* Target hook to assemble an integer object. The ARC version needs to
1162 emit a special directive for references to labels and function
1163 symbols. */
1164
1165 static bool
1166 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1167 {
1168 if (size == UNITS_PER_WORD && aligned_p
1169 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1170 || GET_CODE (x) == LABEL_REF))
1171 {
1172 fputs ("\t.word\t%st(", asm_out_file);
1173 output_addr_const (asm_out_file, x);
1174 fputs (")\n", asm_out_file);
1175 return true;
1176 }
1177 return default_assemble_integer (x, size, aligned_p);
1178 }
1179 \f
1180 /* Set up the stack and frame pointer (if desired) for the function. */
1181
1182 static void
1183 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1184 {
1185 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1186 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1187 unsigned int gmask = current_frame_info.gmask;
1188 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1189
1190 /* If this is an interrupt handler, set up our stack frame.
1191 ??? Optimize later. */
1192 if (ARC_INTERRUPT_P (fn_type))
1193 {
1194 fprintf (file, "\t%s interrupt handler\n",
1195 ASM_COMMENT_START);
1196 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1197 }
1198
1199 /* This is only for the human reader. */
1200 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1201 ASM_COMMENT_START, ASM_COMMENT_START,
1202 current_frame_info.var_size,
1203 current_frame_info.reg_size / 4,
1204 current_frame_info.args_size,
1205 current_frame_info.extra_size);
1206
1207 size = ARC_STACK_ALIGN (size);
1208 size = (! current_frame_info.initialized
1209 ? arc_compute_frame_size (size)
1210 : current_frame_info.total_size);
1211
1212 /* These cases shouldn't happen. Catch them now. */
1213 if (size == 0 && gmask)
1214 abort ();
1215
1216 /* Allocate space for register arguments if this is a variadic function. */
1217 if (current_frame_info.pretend_size != 0)
1218 fprintf (file, "\tsub %s,%s,%d\n",
1219 sp_str, sp_str, current_frame_info.pretend_size);
1220
1221 /* The home-grown ABI says link register is saved first. */
1222 if (MUST_SAVE_RETURN_ADDR)
1223 fprintf (file, "\tst %s,[%s,%d]\n",
1224 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1225
1226 /* Set up the previous frame pointer next (if we need to). */
1227 if (frame_pointer_needed)
1228 {
1229 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1230 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1231 }
1232
1233 /* ??? We don't handle the case where the saved regs are more than 252
1234 bytes away from sp. This can be handled by decrementing sp once, saving
1235 the regs, and then decrementing it again. The epilogue doesn't have this
1236 problem as the `ld' insn takes reg+limm values (though it would be more
1237 efficient to avoid reg+limm). */
1238
1239 /* Allocate the stack frame. */
1240 if (size - current_frame_info.pretend_size > 0)
1241 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1242 sp_str, sp_str, size - current_frame_info.pretend_size);
1243
1244 /* Save any needed call-saved regs (and call-used if this is an
1245 interrupt handler). */
1246 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1247 /* The zeroing of these two bits is unnecessary,
1248 but leave this in for clarity. */
1249 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1250 "st");
1251
1252 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1253 }
1254 \f
1255 /* Do any necessary cleanup after a function to restore stack, frame,
1256 and regs. */
1257
1258 static void
1259 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1260 {
1261 rtx epilogue_delay = current_function_epilogue_delay_list;
1262 int noepilogue = FALSE;
1263 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1264
1265 /* This is only for the human reader. */
1266 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1267
1268 size = ARC_STACK_ALIGN (size);
1269 size = (!current_frame_info.initialized
1270 ? arc_compute_frame_size (size)
1271 : current_frame_info.total_size);
1272
1273 if (size == 0 && epilogue_delay == 0)
1274 {
1275 rtx insn = get_last_insn ();
1276
1277 /* If the last insn was a BARRIER, we don't have to write any code
1278 because a jump (aka return) was put there. */
1279 if (GET_CODE (insn) == NOTE)
1280 insn = prev_nonnote_insn (insn);
1281 if (insn && GET_CODE (insn) == BARRIER)
1282 noepilogue = TRUE;
1283 }
1284
1285 if (!noepilogue)
1286 {
1287 unsigned int pretend_size = current_frame_info.pretend_size;
1288 unsigned int frame_size = size - pretend_size;
1289 int restored, fp_restored_p;
1290 int can_trust_sp_p = !current_function_calls_alloca;
1291 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1292 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1293
1294 /* ??? There are lots of optimizations that can be done here.
1295 EG: Use fp to restore regs if it's closer.
1296 Maybe in time we'll do them all. For now, always restore regs from
1297 sp, but don't restore sp if we don't have to. */
1298
1299 if (!can_trust_sp_p)
1300 {
1301 if (!frame_pointer_needed)
1302 abort ();
1303 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1304 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1305 }
1306
1307 /* Restore any saved registers. */
1308 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1309 /* The zeroing of these two bits is unnecessary,
1310 but leave this in for clarity. */
1311 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1312 "ld");
1313
1314 if (MUST_SAVE_RETURN_ADDR)
1315 fprintf (file, "\tld %s,[%s,%d]\n",
1316 reg_names[RETURN_ADDR_REGNUM],
1317 frame_pointer_needed ? fp_str : sp_str,
1318 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1319
1320 /* Keep track of how much of the stack pointer we've restored.
1321 It makes the following a lot more readable. */
1322 restored = 0;
1323 fp_restored_p = 0;
1324
1325 /* We try to emit the epilogue delay slot insn right after the load
1326 of the return address register so that it can execute with the
1327 stack intact. Secondly, loads are delayed. */
1328 /* ??? If stack intactness is important, always emit now. */
1329 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1330 {
1331 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1, NULL);
1332 epilogue_delay = NULL_RTX;
1333 }
1334
1335 if (frame_pointer_needed)
1336 {
1337 /* Try to restore the frame pointer in the delay slot. We can't,
1338 however, if any of these is true. */
1339 if (epilogue_delay != NULL_RTX
1340 || !SMALL_INT (frame_size)
1341 || pretend_size
1342 || ARC_INTERRUPT_P (fn_type))
1343 {
1344 /* Note that we restore fp and sp here! */
1345 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1346 restored += frame_size;
1347 fp_restored_p = 1;
1348 }
1349 }
1350 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1351 || ARC_INTERRUPT_P (fn_type))
1352 {
1353 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1354 restored += frame_size;
1355 }
1356
1357 /* These must be done before the return insn because the delay slot
1358 does the final stack restore. */
1359 if (ARC_INTERRUPT_P (fn_type))
1360 {
1361 if (epilogue_delay)
1362 {
1363 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1,
1364 NULL);
1365 }
1366 }
1367
1368 /* Emit the return instruction. */
1369 {
1370 static const int regs[4] = {
1371 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1372 };
1373 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1374 }
1375
1376 /* If the only register saved is the return address, we need a
1377 nop, unless we have an instruction to put into it. Otherwise
1378 we don't since reloading multiple registers doesn't reference
1379 the register being loaded. */
1380
1381 if (ARC_INTERRUPT_P (fn_type))
1382 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1383 else if (epilogue_delay != NULL_RTX)
1384 {
1385 if (frame_pointer_needed && !fp_restored_p)
1386 abort ();
1387 if (restored < size)
1388 abort ();
1389 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1, NULL);
1390 }
1391 else if (frame_pointer_needed && !fp_restored_p)
1392 {
1393 if (!SMALL_INT (frame_size))
1394 abort ();
1395 /* Note that we restore fp and sp here! */
1396 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1397 }
1398 else if (restored < size)
1399 {
1400 if (!SMALL_INT (size - restored))
1401 abort ();
1402 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1403 sp_str, sp_str, size - restored);
1404 }
1405 else
1406 fprintf (file, "\tnop\n");
1407 }
1408
1409 /* Reset state info for each function. */
1410 current_frame_info = zero_frame_info;
1411 arc_compute_function_type (NULL_TREE);
1412 }
1413 \f
1414 /* Define the number of delay slots needed for the function epilogue.
1415
1416 Interrupt handlers can't have any epilogue delay slots (it's always needed
1417 for something else, I think). For normal functions, we have to worry about
1418 using call-saved regs as they'll be restored before the delay slot insn.
1419 Functions with non-empty frames already have enough choices for the epilogue
1420 delay slot so for now we only consider functions with empty frames. */
1421
1422 int
1423 arc_delay_slots_for_epilogue (void)
1424 {
1425 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1426 return 0;
1427 if (!current_frame_info.initialized)
1428 (void) arc_compute_frame_size (get_frame_size ());
1429 if (current_frame_info.total_size == 0)
1430 return 1;
1431 return 0;
1432 }
1433
1434 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1435 Any single length instruction which doesn't reference the stack or frame
1436 pointer or any call-saved register is OK. SLOT will always be 0. */
1437
1438 int
1439 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1440 {
1441 if (slot != 0)
1442 abort ();
1443
1444 if (get_attr_length (trial) == 1
1445 /* If registers where saved, presumably there's more than enough
1446 possibilities for the delay slot. The alternative is something
1447 more complicated (of course, if we expanded the epilogue as rtl
1448 this problem would go away). */
1449 /* ??? Note that this will always be true since only functions with
1450 empty frames have epilogue delay slots. See
1451 arc_delay_slots_for_epilogue. */
1452 && current_frame_info.gmask == 0
1453 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1454 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1455 return 1;
1456 return 0;
1457 }
1458 \f
1459 /* PIC */
1460
1461 /* Emit special PIC prologues and epilogues. */
1462
1463 void
1464 arc_finalize_pic (void)
1465 {
1466 /* nothing to do */
1467 }
1468 \f
1469 /* Return true if OP is a shift operator. */
1470
1471 int
1472 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1473 {
1474 switch (GET_CODE (op))
1475 {
1476 case ASHIFTRT:
1477 case LSHIFTRT:
1478 case ASHIFT:
1479 return 1;
1480 default:
1481 return 0;
1482 }
1483 }
1484
1485 /* Output the assembler code for doing a shift.
1486 We go to a bit of trouble to generate efficient code as the ARC only has
1487 single bit shifts. This is taken from the h8300 port. We only have one
1488 mode of shifting and can't access individual bytes like the h8300 can, so
1489 this is greatly simplified (at the expense of not generating hyper-
1490 efficient code).
1491
1492 This function is not used if the variable shift insns are present. */
1493
1494 /* ??? We assume the output operand is the same as operand 1.
1495 This can be optimized (deleted) in the case of 1 bit shifts. */
1496 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1497 using it here will give us a chance to play with it. */
1498
1499 const char *
1500 output_shift (rtx *operands)
1501 {
1502 rtx shift = operands[3];
1503 enum machine_mode mode = GET_MODE (shift);
1504 enum rtx_code code = GET_CODE (shift);
1505 const char *shift_one;
1506
1507 if (mode != SImode)
1508 abort ();
1509
1510 switch (code)
1511 {
1512 case ASHIFT: shift_one = "asl %0,%0"; break;
1513 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1514 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1515 default: abort ();
1516 }
1517
1518 if (GET_CODE (operands[2]) != CONST_INT)
1519 {
1520 if (optimize)
1521 output_asm_insn ("mov lp_count,%2", operands);
1522 else
1523 output_asm_insn ("mov %4,%2", operands);
1524 goto shiftloop;
1525 }
1526 else
1527 {
1528 int n = INTVAL (operands[2]);
1529
1530 /* If the count is negative, make it 0. */
1531 if (n < 0)
1532 n = 0;
1533 /* If the count is too big, truncate it.
1534 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1535 do the intuitive thing. */
1536 else if (n > GET_MODE_BITSIZE (mode))
1537 n = GET_MODE_BITSIZE (mode);
1538
1539 /* First see if we can do them inline. */
1540 if (n <= 8)
1541 {
1542 while (--n >= 0)
1543 output_asm_insn (shift_one, operands);
1544 }
1545 /* See if we can use a rotate/and. */
1546 else if (n == BITS_PER_WORD - 1)
1547 {
1548 switch (code)
1549 {
1550 case ASHIFT :
1551 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1552 break;
1553 case ASHIFTRT :
1554 /* The ARC doesn't have a rol insn. Use something else. */
1555 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1556 break;
1557 case LSHIFTRT :
1558 /* The ARC doesn't have a rol insn. Use something else. */
1559 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1560 break;
1561 default:
1562 break;
1563 }
1564 }
1565 /* Must loop. */
1566 else
1567 {
1568 char buf[100];
1569
1570 if (optimize)
1571 output_asm_insn ("mov lp_count,%c2", operands);
1572 else
1573 output_asm_insn ("mov %4,%c2", operands);
1574 shiftloop:
1575 if (optimize)
1576 {
1577 if (flag_pic)
1578 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1579 ASM_COMMENT_START);
1580 else
1581 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1582 ASM_COMMENT_START);
1583 output_asm_insn (buf, operands);
1584 output_asm_insn ("sr %4,[lp_start]", operands);
1585 output_asm_insn ("add %4,%4,1", operands);
1586 output_asm_insn ("sr %4,[lp_end]", operands);
1587 output_asm_insn ("nop\n\tnop", operands);
1588 if (flag_pic)
1589 fprintf (asm_out_file, "\t%s single insn loop\n",
1590 ASM_COMMENT_START);
1591 else
1592 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1593 ASM_COMMENT_START);
1594 output_asm_insn (shift_one, operands);
1595 }
1596 else
1597 {
1598 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1599 ASM_COMMENT_START);
1600 output_asm_insn ("sub.f %4,%4,1", operands);
1601 output_asm_insn ("nop", operands);
1602 output_asm_insn ("bn.nd 2f", operands);
1603 output_asm_insn (shift_one, operands);
1604 output_asm_insn ("b.nd 1b", operands);
1605 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1606 ASM_COMMENT_START);
1607 }
1608 }
1609 }
1610
1611 return "";
1612 }
1613 \f
1614 /* Nested function support. */
1615
1616 /* Emit RTL insns to initialize the variable parts of a trampoline.
1617 FNADDR is an RTX for the address of the function's pure code.
1618 CXT is an RTX for the static chain value for the function. */
1619
1620 void
1621 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1622 rtx fnaddr ATTRIBUTE_UNUSED,
1623 rtx cxt ATTRIBUTE_UNUSED)
1624 {
1625 }
1626 \f
1627 /* Set the cpu type and print out other fancy things,
1628 at the top of the file. */
1629
1630 static void
1631 arc_file_start (void)
1632 {
1633 default_file_start ();
1634 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1635 }
1636 \f
1637 /* Print operand X (an rtx) in assembler syntax to file FILE.
1638 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1639 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1640
1641 void
1642 arc_print_operand (FILE *file, rtx x, int code)
1643 {
1644 switch (code)
1645 {
1646 case '#' :
1647 /* Conditional branches. For now these are equivalent. */
1648 case '*' :
1649 /* Unconditional branches. Output the appropriate delay slot suffix. */
1650 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1651 {
1652 /* There's nothing in the delay slot. */
1653 fputs (".nd", file);
1654 }
1655 else
1656 {
1657 rtx jump = XVECEXP (final_sequence, 0, 0);
1658 rtx delay = XVECEXP (final_sequence, 0, 1);
1659 if (INSN_ANNULLED_BRANCH_P (jump))
1660 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1661 else
1662 fputs (".d", file);
1663 }
1664 return;
1665 case '?' : /* with leading "." */
1666 case '!' : /* without leading "." */
1667 /* This insn can be conditionally executed. See if the ccfsm machinery
1668 says it should be conditionalized. */
1669 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1670 {
1671 /* Is this insn in a delay slot? */
1672 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1673 {
1674 rtx insn = XVECEXP (final_sequence, 0, 1);
1675
1676 /* If the insn is annulled and is from the target path, we need
1677 to inverse the condition test. */
1678 if (INSN_ANNULLED_BRANCH_P (insn))
1679 {
1680 if (INSN_FROM_TARGET_P (insn))
1681 fprintf (file, "%s%s",
1682 code == '?' ? "." : "",
1683 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1684 else
1685 fprintf (file, "%s%s",
1686 code == '?' ? "." : "",
1687 arc_condition_codes[arc_ccfsm_current_cc]);
1688 }
1689 else
1690 {
1691 /* This insn is executed for either path, so don't
1692 conditionalize it at all. */
1693 ; /* nothing to do */
1694 }
1695 }
1696 else
1697 {
1698 /* This insn isn't in a delay slot. */
1699 fprintf (file, "%s%s",
1700 code == '?' ? "." : "",
1701 arc_condition_codes[arc_ccfsm_current_cc]);
1702 }
1703 }
1704 return;
1705 case '~' :
1706 /* Output a nop if we're between a set of the condition codes,
1707 and a conditional branch. */
1708 if (last_insn_set_cc_p)
1709 fputs ("nop\n\t", file);
1710 return;
1711 case 'd' :
1712 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1713 return;
1714 case 'D' :
1715 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1716 (get_arc_condition_code (x))],
1717 file);
1718 return;
1719 case 'R' :
1720 /* Write second word of DImode or DFmode reference,
1721 register or memory. */
1722 if (GET_CODE (x) == REG)
1723 fputs (reg_names[REGNO (x)+1], file);
1724 else if (GET_CODE (x) == MEM)
1725 {
1726 fputc ('[', file);
1727 /* Handle possible auto-increment. Since it is pre-increment and
1728 we have already done it, we can just use an offset of four. */
1729 /* ??? This is taken from rs6000.c I think. I don't think it is
1730 currently necessary, but keep it around. */
1731 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1732 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1733 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1734 else
1735 output_address (plus_constant (XEXP (x, 0), 4));
1736 fputc (']', file);
1737 }
1738 else
1739 output_operand_lossage ("invalid operand to %%R code");
1740 return;
1741 case 'S' :
1742 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1743 || GET_CODE (x) == LABEL_REF)
1744 {
1745 fprintf (file, "%%st(");
1746 output_addr_const (file, x);
1747 fprintf (file, ")");
1748 return;
1749 }
1750 break;
1751 case 'H' :
1752 case 'L' :
1753 if (GET_CODE (x) == REG)
1754 {
1755 /* L = least significant word, H = most significant word */
1756 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1757 fputs (reg_names[REGNO (x)], file);
1758 else
1759 fputs (reg_names[REGNO (x)+1], file);
1760 }
1761 else if (GET_CODE (x) == CONST_INT
1762 || GET_CODE (x) == CONST_DOUBLE)
1763 {
1764 rtx first, second;
1765
1766 split_double (x, &first, &second);
1767 fprintf (file, "0x%08lx",
1768 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1769 }
1770 else
1771 output_operand_lossage ("invalid operand to %%H/%%L code");
1772 return;
1773 case 'A' :
1774 {
1775 char str[30];
1776
1777 if (GET_CODE (x) != CONST_DOUBLE
1778 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
1779 abort ();
1780
1781 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1782 fprintf (file, "%s", str);
1783 return;
1784 }
1785 case 'U' :
1786 /* Output a load/store with update indicator if appropriate. */
1787 if (GET_CODE (x) == MEM)
1788 {
1789 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1790 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1791 fputs (".a", file);
1792 }
1793 else
1794 output_operand_lossage ("invalid operand to %%U code");
1795 return;
1796 case 'V' :
1797 /* Output cache bypass indicator for a load/store insn. Volatile memory
1798 refs are defined to use the cache bypass mechanism. */
1799 if (GET_CODE (x) == MEM)
1800 {
1801 if (MEM_VOLATILE_P (x))
1802 fputs (".di", file);
1803 }
1804 else
1805 output_operand_lossage ("invalid operand to %%V code");
1806 return;
1807 case 0 :
1808 /* Do nothing special. */
1809 break;
1810 default :
1811 /* Unknown flag. */
1812 output_operand_lossage ("invalid operand output code");
1813 }
1814
1815 switch (GET_CODE (x))
1816 {
1817 case REG :
1818 fputs (reg_names[REGNO (x)], file);
1819 break;
1820 case MEM :
1821 fputc ('[', file);
1822 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1823 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1824 GET_MODE_SIZE (GET_MODE (x))));
1825 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1826 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1827 - GET_MODE_SIZE (GET_MODE (x))));
1828 else
1829 output_address (XEXP (x, 0));
1830 fputc (']', file);
1831 break;
1832 case CONST_DOUBLE :
1833 /* We handle SFmode constants here as output_addr_const doesn't. */
1834 if (GET_MODE (x) == SFmode)
1835 {
1836 REAL_VALUE_TYPE d;
1837 long l;
1838
1839 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1840 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1841 fprintf (file, "0x%08lx", l);
1842 break;
1843 }
1844 /* Fall through. Let output_addr_const deal with it. */
1845 default :
1846 output_addr_const (file, x);
1847 break;
1848 }
1849 }
1850
1851 /* Print a memory address as an operand to reference that memory location. */
1852
1853 void
1854 arc_print_operand_address (FILE *file, rtx addr)
1855 {
1856 register rtx base, index = 0;
1857 int offset = 0;
1858
1859 switch (GET_CODE (addr))
1860 {
1861 case REG :
1862 fputs (reg_names[REGNO (addr)], file);
1863 break;
1864 case SYMBOL_REF :
1865 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1866 {
1867 fprintf (file, "%%st(");
1868 output_addr_const (file, addr);
1869 fprintf (file, ")");
1870 }
1871 else
1872 output_addr_const (file, addr);
1873 break;
1874 case PLUS :
1875 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1876 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1877 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1878 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1879 else
1880 base = XEXP (addr, 0), index = XEXP (addr, 1);
1881 if (GET_CODE (base) != REG)
1882 abort ();
1883 fputs (reg_names[REGNO (base)], file);
1884 if (index == 0)
1885 {
1886 if (offset != 0)
1887 fprintf (file, ",%d", offset);
1888 }
1889 else if (GET_CODE (index) == REG)
1890 fprintf (file, ",%s", reg_names[REGNO (index)]);
1891 else if (GET_CODE (index) == SYMBOL_REF)
1892 fputc (',', file), output_addr_const (file, index);
1893 else
1894 abort ();
1895 break;
1896 case PRE_INC :
1897 case PRE_DEC :
1898 /* We shouldn't get here as we've lost the mode of the memory object
1899 (which says how much to inc/dec by. */
1900 abort ();
1901 break;
1902 default :
1903 output_addr_const (file, addr);
1904 break;
1905 }
1906 }
1907
1908 /* Update compare/branch separation marker. */
1909
1910 static void
1911 record_cc_ref (rtx insn)
1912 {
1913 last_insn_set_cc_p = current_insn_set_cc_p;
1914
1915 switch (get_attr_cond (insn))
1916 {
1917 case COND_SET :
1918 case COND_SET_ZN :
1919 case COND_SET_ZNC :
1920 if (get_attr_length (insn) == 1)
1921 current_insn_set_cc_p = 1;
1922 else
1923 current_insn_set_cc_p = 0;
1924 break;
1925 default :
1926 current_insn_set_cc_p = 0;
1927 break;
1928 }
1929 }
1930 \f
1931 /* Conditional execution support.
1932
1933 This is based on the ARM port but for now is much simpler.
1934
1935 A finite state machine takes care of noticing whether or not instructions
1936 can be conditionally executed, and thus decrease execution time and code
1937 size by deleting branch instructions. The fsm is controlled by
1938 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1939 in the .md file for the branch insns also have a hand in this. */
1940
1941 /* The state of the fsm controlling condition codes are:
1942 0: normal, do nothing special
1943 1: don't output this insn
1944 2: don't output this insn
1945 3: make insns conditional
1946 4: make insns conditional
1947
1948 State transitions (state->state by whom, under what condition):
1949 0 -> 1 final_prescan_insn, if insn is conditional branch
1950 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1951 1 -> 3 branch patterns, after having not output the conditional branch
1952 2 -> 4 branch patterns, after having not output the conditional branch
1953 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1954 (the target label has CODE_LABEL_NUMBER equal to
1955 arc_ccfsm_target_label).
1956 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1957
1958 If the jump clobbers the conditions then we use states 2 and 4.
1959
1960 A similar thing can be done with conditional return insns.
1961
1962 We also handle separating branches from sets of the condition code.
1963 This is done here because knowledge of the ccfsm state is required,
1964 we may not be outputting the branch. */
1965
1966 void
1967 arc_final_prescan_insn (rtx insn,
1968 rtx *opvec ATTRIBUTE_UNUSED,
1969 int noperands ATTRIBUTE_UNUSED)
1970 {
1971 /* BODY will hold the body of INSN. */
1972 register rtx body = PATTERN (insn);
1973
1974 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1975 an if/then/else), and things need to be reversed. */
1976 int reverse = 0;
1977
1978 /* If we start with a return insn, we only succeed if we find another one. */
1979 int seeking_return = 0;
1980
1981 /* START_INSN will hold the insn from where we start looking. This is the
1982 first insn after the following code_label if REVERSE is true. */
1983 rtx start_insn = insn;
1984
1985 /* Update compare/branch separation marker. */
1986 record_cc_ref (insn);
1987
1988 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1989 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1990 final_scan_insn which has `optimize' as a local. */
1991 if (optimize < 2 || TARGET_NO_COND_EXEC)
1992 return;
1993
1994 /* If in state 4, check if the target branch is reached, in order to
1995 change back to state 0. */
1996 if (arc_ccfsm_state == 4)
1997 {
1998 if (insn == arc_ccfsm_target_insn)
1999 {
2000 arc_ccfsm_target_insn = NULL;
2001 arc_ccfsm_state = 0;
2002 }
2003 return;
2004 }
2005
2006 /* If in state 3, it is possible to repeat the trick, if this insn is an
2007 unconditional branch to a label, and immediately following this branch
2008 is the previous target label which is only used once, and the label this
2009 branch jumps to is not too far off. Or in other words "we've done the
2010 `then' part, see if we can do the `else' part." */
2011 if (arc_ccfsm_state == 3)
2012 {
2013 if (simplejump_p (insn))
2014 {
2015 start_insn = next_nonnote_insn (start_insn);
2016 if (GET_CODE (start_insn) == BARRIER)
2017 {
2018 /* ??? Isn't this always a barrier? */
2019 start_insn = next_nonnote_insn (start_insn);
2020 }
2021 if (GET_CODE (start_insn) == CODE_LABEL
2022 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2023 && LABEL_NUSES (start_insn) == 1)
2024 reverse = TRUE;
2025 else
2026 return;
2027 }
2028 else if (GET_CODE (body) == RETURN)
2029 {
2030 start_insn = next_nonnote_insn (start_insn);
2031 if (GET_CODE (start_insn) == BARRIER)
2032 start_insn = next_nonnote_insn (start_insn);
2033 if (GET_CODE (start_insn) == CODE_LABEL
2034 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2035 && LABEL_NUSES (start_insn) == 1)
2036 {
2037 reverse = TRUE;
2038 seeking_return = 1;
2039 }
2040 else
2041 return;
2042 }
2043 else
2044 return;
2045 }
2046
2047 if (GET_CODE (insn) != JUMP_INSN)
2048 return;
2049
2050 /* This jump might be paralleled with a clobber of the condition codes,
2051 the jump should always come first. */
2052 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2053 body = XVECEXP (body, 0, 0);
2054
2055 if (reverse
2056 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2057 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2058 {
2059 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2060 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2061 int then_not_else = TRUE;
2062 /* Nonzero if next insn must be the target label. */
2063 int next_must_be_target_label_p;
2064 rtx this_insn = start_insn, label = 0;
2065
2066 /* Register the insn jumped to. */
2067 if (reverse)
2068 {
2069 if (!seeking_return)
2070 label = XEXP (SET_SRC (body), 0);
2071 }
2072 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2073 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2074 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2075 {
2076 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2077 then_not_else = FALSE;
2078 }
2079 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2080 seeking_return = 1;
2081 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2082 {
2083 seeking_return = 1;
2084 then_not_else = FALSE;
2085 }
2086 else
2087 abort ();
2088
2089 /* See how many insns this branch skips, and what kind of insns. If all
2090 insns are okay, and the label or unconditional branch to the same
2091 label is not too far away, succeed. */
2092 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2093 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2094 insns_skipped++)
2095 {
2096 rtx scanbody;
2097
2098 this_insn = next_nonnote_insn (this_insn);
2099 if (!this_insn)
2100 break;
2101
2102 if (next_must_be_target_label_p)
2103 {
2104 if (GET_CODE (this_insn) == BARRIER)
2105 continue;
2106 if (GET_CODE (this_insn) == CODE_LABEL
2107 && this_insn == label)
2108 {
2109 arc_ccfsm_state = 1;
2110 succeed = TRUE;
2111 }
2112 else
2113 fail = TRUE;
2114 break;
2115 }
2116
2117 scanbody = PATTERN (this_insn);
2118
2119 switch (GET_CODE (this_insn))
2120 {
2121 case CODE_LABEL:
2122 /* Succeed if it is the target label, otherwise fail since
2123 control falls in from somewhere else. */
2124 if (this_insn == label)
2125 {
2126 arc_ccfsm_state = 1;
2127 succeed = TRUE;
2128 }
2129 else
2130 fail = TRUE;
2131 break;
2132
2133 case BARRIER:
2134 /* Succeed if the following insn is the target label.
2135 Otherwise fail.
2136 If return insns are used then the last insn in a function
2137 will be a barrier. */
2138 next_must_be_target_label_p = TRUE;
2139 break;
2140
2141 case CALL_INSN:
2142 /* Can handle a call insn if there are no insns after it.
2143 IE: The next "insn" is the target label. We don't have to
2144 worry about delay slots as such insns are SEQUENCE's inside
2145 INSN's. ??? It is possible to handle such insns though. */
2146 if (get_attr_cond (this_insn) == COND_CANUSE)
2147 next_must_be_target_label_p = TRUE;
2148 else
2149 fail = TRUE;
2150 break;
2151
2152 case JUMP_INSN:
2153 /* If this is an unconditional branch to the same label, succeed.
2154 If it is to another label, do nothing. If it is conditional,
2155 fail. */
2156 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2157
2158 if (GET_CODE (scanbody) == SET
2159 && GET_CODE (SET_DEST (scanbody)) == PC)
2160 {
2161 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2162 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2163 {
2164 arc_ccfsm_state = 2;
2165 succeed = TRUE;
2166 }
2167 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2168 fail = TRUE;
2169 }
2170 else if (GET_CODE (scanbody) == RETURN
2171 && seeking_return)
2172 {
2173 arc_ccfsm_state = 2;
2174 succeed = TRUE;
2175 }
2176 else if (GET_CODE (scanbody) == PARALLEL)
2177 {
2178 if (get_attr_cond (this_insn) != COND_CANUSE)
2179 fail = TRUE;
2180 }
2181 break;
2182
2183 case INSN:
2184 /* We can only do this with insns that can use the condition
2185 codes (and don't set them). */
2186 if (GET_CODE (scanbody) == SET
2187 || GET_CODE (scanbody) == PARALLEL)
2188 {
2189 if (get_attr_cond (this_insn) != COND_CANUSE)
2190 fail = TRUE;
2191 }
2192 /* We can't handle other insns like sequences. */
2193 else
2194 fail = TRUE;
2195 break;
2196
2197 default:
2198 break;
2199 }
2200 }
2201
2202 if (succeed)
2203 {
2204 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2205 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2206 else if (seeking_return || arc_ccfsm_state == 2)
2207 {
2208 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2209 {
2210 this_insn = next_nonnote_insn (this_insn);
2211 if (this_insn && (GET_CODE (this_insn) == BARRIER
2212 || GET_CODE (this_insn) == CODE_LABEL))
2213 abort ();
2214 }
2215 if (!this_insn)
2216 {
2217 /* Oh dear! we ran off the end, give up. */
2218 extract_insn_cached (insn);
2219 arc_ccfsm_state = 0;
2220 arc_ccfsm_target_insn = NULL;
2221 return;
2222 }
2223 arc_ccfsm_target_insn = this_insn;
2224 }
2225 else
2226 abort ();
2227
2228 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2229 what it was. */
2230 if (!reverse)
2231 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2232 0));
2233
2234 if (reverse || then_not_else)
2235 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2236 }
2237
2238 /* Restore recog_data. Getting the attributes of other insns can
2239 destroy this array, but final.c assumes that it remains intact
2240 across this call. */
2241 extract_insn_cached (insn);
2242 }
2243 }
2244
2245 /* Record that we are currently outputting label NUM with prefix PREFIX.
2246 It it's the label we're looking for, reset the ccfsm machinery.
2247
2248 Called from (*targetm.asm_out.internal_label). */
2249
2250 void
2251 arc_ccfsm_at_label (const char *prefix, int num)
2252 {
2253 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2254 && !strcmp (prefix, "L"))
2255 {
2256 arc_ccfsm_state = 0;
2257 arc_ccfsm_target_insn = NULL_RTX;
2258 }
2259 }
2260
2261 /* See if the current insn, which is a conditional branch, is to be
2262 deleted. */
2263
2264 int
2265 arc_ccfsm_branch_deleted_p (void)
2266 {
2267 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2268 return 1;
2269 return 0;
2270 }
2271
2272 /* Record a branch isn't output because subsequent insns can be
2273 conditionalized. */
2274
2275 void
2276 arc_ccfsm_record_branch_deleted (void)
2277 {
2278 /* Indicate we're conditionalizing insns now. */
2279 arc_ccfsm_state += 2;
2280
2281 /* If the next insn is a subroutine call, we still need a nop between the
2282 cc setter and user. We need to undo the effect of calling record_cc_ref
2283 for the just deleted branch. */
2284 current_insn_set_cc_p = last_insn_set_cc_p;
2285 }
2286 \f
2287 void
2288 arc_va_start (tree valist, rtx nextarg)
2289 {
2290 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2291 if (current_function_args_info < 8
2292 && (current_function_args_info & 1))
2293 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2294
2295 std_expand_builtin_va_start (valist, nextarg);
2296 }
2297
2298 /* This is how to output a definition of an internal numbered label where
2299 PREFIX is the class of label and NUM is the number within the class. */
2300
2301 static void
2302 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2303 {
2304 arc_ccfsm_at_label (prefix, labelno);
2305 default_internal_label (stream, prefix, labelno);
2306 }
2307
2308 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2309
2310 static void
2311 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2312 {
2313 #if 0
2314 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2315 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2316 and we'll get another suffix added on if -mmangle-cpu. */
2317 if (TARGET_MANGLE_CPU_LIBGCC)
2318 {
2319 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2320 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2321 arc_mangle_suffix);
2322 }
2323 #endif
2324 }
2325
2326 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2327
2328 static bool
2329 arc_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2330 {
2331 if (AGGREGATE_TYPE_P (type))
2332 return true;
2333 else
2334 {
2335 HOST_WIDE_INT size = int_size_in_bytes (type);
2336 return (size == -1 || size > 8);
2337 }
2338 }
2339
2340 /* For ARC, All aggregates and arguments greater than 8 bytes are
2341 passed by reference. */
2342
2343 static bool
2344 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2345 enum machine_mode mode, tree type,
2346 bool named ATTRIBUTE_UNUSED)
2347 {
2348 unsigned HOST_WIDE_INT size;
2349
2350 if (type)
2351 {
2352 if (AGGREGATE_TYPE_P (type))
2353 return true;
2354 size = int_size_in_bytes (type);
2355 }
2356 else
2357 size = GET_MODE_SIZE (mode);
2358
2359 return size > 8;
2360 }