1 /* GCC backend functions for C-SKY targets.
2 Copyright (C) 2018 Free Software Foundation, Inc.
3 Contributed by C-SKY Microsystems and Mentor Graphics.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define IN_TARGET_CODE 1
25 #include "coretypes.h"
34 #include "stringpool.h"
41 #include "c-family/c-common.h"
43 #include "diagnostic-core.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
50 #include "insn-attr.h"
56 #include "sched-int.h"
57 #include "common/common-target.h"
58 #include "langhooks.h"
64 #include "target-globals.h"
66 #include "tm-constrs.h"
68 #include "pass_manager.h"
69 #include "tree-pass.h"
72 /* This file should be included last. */
73 #include "target-def.h"
75 /* Stack and register size macros. */
77 #define CSKY_NUM_WORDS(SIZE) \
78 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
79 #define CSKY_NUM_REGS(MODE) \
80 CSKY_NUM_WORDS (GET_MODE_SIZE (MODE))
81 #define CSKY_STACK_ALIGN(SIZE) \
82 (CSKY_NUM_WORDS (SIZE) * UNITS_PER_WORD)
84 /* Offsets and range macros. */
86 #define CSKY_LD16_MAX_OFFSET(MODE) \
87 (31 * GET_MODE_SIZE (MODE))
88 #define CSKY_LD32_MAX_OFFSET(MODE) \
89 (4095 * GET_MODE_SIZE (MODE))
90 #define CSKY_LD16_OFFSET_MASK(MODE) \
91 (CSKY_LD16_MAX_OFFSET (MODE) + GET_MODE_SIZE (MODE) - 1)
93 #define CSKY_ADDI16_MAX_IMM 256
94 #define CSKY_SUBI16_MAX_IMM 256
96 #define CSKY_CONSTPOOL_LABEL_PREFIX "LCP"
98 /* Array of the smallest class containing reg number REGNO, indexed by
99 REGNO. Used by REGNO_REG_CLASS. */
100 enum reg_class regno_reg_class
[FIRST_PSEUDO_REGISTER
] =
102 /* Registers r0-r7. */
103 MINI_REGS
, MINI_REGS
, MINI_REGS
, MINI_REGS
,
104 MINI_REGS
, MINI_REGS
, MINI_REGS
, MINI_REGS
,
105 /* Registers r8-r15. */
106 LOW_REGS
, LOW_REGS
, LOW_REGS
, LOW_REGS
,
107 LOW_REGS
, LOW_REGS
, SP_REGS
, LOW_REGS
,
108 /* Registers r16-r31. */
109 GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
,
110 GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
,
111 GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
,
112 GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
, GENERAL_REGS
,
115 /* CC,HI,LO registers. */
116 C_REGS
, HI_REGS
, LO_REGS
,
118 RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
,
119 RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
,
120 RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
,
121 RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
, RESERVE_REGS
,
123 V_REGS
, V_REGS
, V_REGS
, V_REGS
,
124 V_REGS
, V_REGS
, V_REGS
, V_REGS
,
125 V_REGS
, V_REGS
, V_REGS
, V_REGS
,
126 V_REGS
, V_REGS
, V_REGS
, V_REGS
,
128 RESERVE_REGS
, RESERVE_REGS
,
133 /* Arrays that map GCC register numbers to debugger register numbers,
134 '-1' means that is INVALID_REGNUM.
135 TODO: which rules according to here ? */
136 const int csky_dbx_regno
[FIRST_PSEUDO_REGISTER
] =
138 0, 1, 2, 3, 4, 5, 6, 7,
139 8, 9, 10, 11, 12, 13, 14, 15,
140 16, 17, 18, 19, 20, 21, 22, 23,
141 24, 25, 26, 27, 28, 29, 30, 31,
142 -1, -1, 36, 37, -1, -1, -1, -1,
143 -1, -1, -1, -1, -1, -1, -1, -1,
144 -1, -1, -1, -1, 56, 57, 58, 59,
145 60, 61, 62, 63, 64, 65, 66, 67,
146 68, 69, 70, 71, -1, -1, 72
149 /* Table of machine attributes. */
150 static tree
csky_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
151 static tree
csky_handle_isr_attribute (tree
*, tree
, tree
, int, bool *);
152 static const struct attribute_spec csky_attribute_table
[] =
154 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
155 affects_type_identity, handler, exclude } */
156 { "naked", 0, 0, true, false, false, false, csky_handle_fndecl_attribute
, NULL
},
157 /* Interrupt Service Routines have special prologue and epilogue requirements. */
158 { "interrupt", 0, 1, false, false, false, false, csky_handle_isr_attribute
, NULL
},
159 { "isr", 0, 1, false, false, false, false, csky_handle_isr_attribute
, NULL
},
160 { NULL
, 0, 0, false, false, false, false, NULL
, NULL
}
163 /* A C structure for machine-specific, per-function data.
164 This is added to the cfun structure. */
165 typedef struct GTY(()) machine_function
167 /* Records if LR has to be saved for far jumps. */
169 /* Records the type of the current function. */
170 unsigned long func_type
;
171 /* Record if the function has a variable argument list. */
172 int uses_anonymous_args
;
174 /* Stack frame layout information. If frame_init_p is true,
175 these fields have been initialized and don't need to be
177 unsigned int reg_mask
; /* non-volatile reg saves */
178 int arg_size
; /* stdarg spills (bytes) */
179 int reg_size
; /* non-volatile reg saves (bytes) */
180 int local_size
; /* locals */
181 int outbound_size
; /* arg overflow on calls out */
182 int frame_size
; /* total static size of stack frame */
190 /* These macros are for the func_type values above. */
191 #define CSKY_FT_TYPE_MASK ((1 << 3) - 1)
192 #define CSKY_FT_UNKNOWN 0 /* Type not been determined */
193 #define CSKY_FT_NORMAL 1 /* Normal function */
194 #define CSKY_FT_ISR 4 /* Interrupt service routine */
195 #define CSKY_FT_FIQ 5 /* Fast interrupt service routine */
196 #define CSKY_FT_EXCEPTION 6 /* Exception handler */
197 #define CSKY_FT_INTERRUPT (1 << 2) /* overlap CSKY_FT_ISR */
198 #define CSKY_FT_NAKED (1 << 3) /* No prologue and epilogue */
199 #define CSKY_FUNCTION_TYPE(t) ((t) & CSKY_FT_TYPE_MASK)
200 #define CSKY_FUNCTION_IS_INTERRUPT(t) ((t) & CSKY_FT_INTERRUPT)
201 #define CSKY_FUNCTION_IS_NAKED(t) ((t) & CSKY_FT_NAKED)
203 struct csky_processors
205 const char *const name
;
206 enum csky_processor_type core
;
208 enum csky_base_architecture base_arch
;
209 enum csky_isa_feature isa_bits
[CSKY_ISA_FEATURE_GET (max
)];
212 static struct csky_processors all_cores
[] =
215 #define CSKY_CORE(NAME, CORE, X, ARCH, ISA) \
216 {NAME, TARGET_CPU_##CORE, #ARCH, CSKY_BASE_ARCH_##ARCH, \
217 {ISA CSKY_ISA_FEATURE_GET (none)}},
218 #include "csky_cores.def"
220 {NULL
, TARGET_CPU_csky_none
, NULL
, CSKY_BASE_ARCH_NONE
, \
221 {CSKY_ISA_FEATURE_GET (none
)}}
224 static struct csky_processors all_architectures
[] =
227 #define CSKY_ARCH(NAME, CORE, ARCH, ISA) \
228 {NAME, TARGET_CPU_##CORE, #ARCH, CSKY_BASE_ARCH_##ARCH, \
229 {ISA CSKY_ISA_FEATURE_GET (none)}},
230 #include "csky_cores.def"
232 {NULL
, TARGET_CPU_csky_none
, NULL
, CSKY_BASE_ARCH_NONE
, \
233 {CSKY_ISA_FEATURE_GET (none
)}}
239 enum csky_isa_feature isa_bits
[CSKY_ISA_FEATURE_GET (max
)];
242 static const struct csky_fpu_desc all_fpus
[] =
245 #define CSKY_FPU(NAME, CNAME, ISA) \
246 {NAME, {ISA CSKY_ISA_FEATURE_GET (none)}},
247 #include "csky_cores.def"
251 /* Active target architecture. */
252 struct csky_build_target
254 /* Name of the target CPU, if known, or NULL if the target CPU was not
255 specified by the user (and inferred from the -march option). */
256 const char *core_name
;
257 /* Name of the target ARCH. NULL if there is a selected CPU. */
258 const char *arch_name
;
259 /* Preprocessor substring (never NULL). */
260 const char *arch_pp_name
;
261 /* CPU identifier for the core we're compiling for (architecturally). */
262 enum csky_processor_type arch_core
;
263 /* The base architecture value. */
264 enum csky_base_architecture base_arch
;
265 /* Bitmap encapsulating the isa_bits for the target environment. */
269 struct csky_build_target csky_active_target
;
271 /* The following are used in the .md file as equivalents to bits. */
272 int csky_arch_isa_features
[CSKY_ISA_FEATURE_GET (max
)] = {0};
274 /* The highest CSKY architecture version supported by the target. */
275 enum csky_base_architecture csky_base_arch
= CSKY_TARGET_ARCH_GET (NONE
);
277 /* Forward definitions of types. */
278 typedef struct minipool_node Mnode
;
279 typedef struct minipool_fixup Mfix
;
281 static GTY(()) int tls_labelno
;
284 /* Maximum constant offset that can be added/subtracted from SP in a
285 single instruction. For ck801, this is for addsp/subsp, otherwise
286 it is the range of addi/subi. */
287 #define CSKY_MAX_SP_ADJUST \
288 (CSKY_TARGET_ARCH (CK801) ? 508 : 4096)
291 /* Implement TARGET_CPU_CPP_BUILTINS. */
293 #define builtin_define(MACRO) cpp_define (pfile, MACRO)
296 csky_cpu_cpp_builtins (cpp_reader
*pfile
)
298 const char *arch_name
= csky_active_target
.arch_pp_name
;
299 char *pp_name
= (char *) alloca (1 + strlen (arch_name
) + 4);
300 sprintf (pp_name
, "__%s__", arch_name
);
301 builtin_define (pp_name
);
303 builtin_define ("__csky__=2");
304 builtin_define ("__CSKY__=2");
305 builtin_define ("__ckcore__=2");
306 builtin_define ("__CKCORE__=2");
308 builtin_define ("__CSKYABIV2__");
309 builtin_define ("__cskyabiv2__");
310 builtin_define ("__CSKYABI__=2");
311 builtin_define ("__cskyabi__=2");
313 if (TARGET_BIG_ENDIAN
)
315 builtin_define ("__ckcoreBE__");
316 builtin_define ("__cskyBE__");
317 builtin_define ("__cskybe__");
318 builtin_define ("__CSKYBE__");
322 builtin_define ("__ckcoreLE__");
323 builtin_define ("__cskyLE__");
324 builtin_define ("__cskyle__");
325 builtin_define ("__CSKYLE__");
328 if (TARGET_HARD_FLOAT
)
330 builtin_define ("__csky_hard_float__");
331 builtin_define ("__CSKY_HARD_FLOAT__");
335 builtin_define ("__csky_soft_float__");
336 builtin_define ("__CSKY_SOFT_FLOAT__");
339 if (CSKY_ISA_FEATURE (fpv2_sf
))
341 builtin_define ("__csky_fpuv2__");
342 builtin_define ("__CSKY_FPUV2__");
347 builtin_define ("__csky_elrw__");
348 builtin_define ("__CSKY_ELRW__");
352 builtin_define ("__csky_istack__");
353 builtin_define ("__CSKY_ISTACK__");
357 builtin_define ("__csky_mp__");
358 builtin_define ("__CSKY_MP__");
362 builtin_define ("__csky_cp__");
363 builtin_define ("__CSKY_CP__");
367 builtin_define ("__csky_cache__");
368 builtin_define ("__CSKY_CACHE__");
372 builtin_define ("__csky_security__");
373 builtin_define ("__CSKY_SECURITY__");
377 builtin_define ("__csky_trust__");
378 builtin_define ("__CSKY_TRUST__");
382 builtin_define ("__csky_dsp__");
383 builtin_define ("__CSKY_DSP__");
387 builtin_define ("__csky_edsp__");
388 builtin_define ("__CSKY_EDSP__");
392 builtin_define ("__csky_vdsp__");
393 builtin_define ("__CSKY_VDSP__");
398 /******************************************************************
400 ******************************************************************/
403 #undef TARGET_PROMOTE_FUNCTION_MODE
404 #define TARGET_PROMOTE_FUNCTION_MODE \
405 default_promote_function_mode_always_promote
407 #undef TARGET_CONSTANT_ALIGNMENT
408 #define TARGET_CONSTANT_ALIGNMENT csky_constant_alignment
411 /******************************************************************
412 * Stack Layout and Calling Conventions *
413 ******************************************************************/
415 #undef TARGET_CAN_ELIMINATE
416 #define TARGET_CAN_ELIMINATE csky_can_eliminate
418 #undef TARGET_FUNCTION_ARG
419 #define TARGET_FUNCTION_ARG csky_function_arg
421 #undef TARGET_FUNCTION_ARG_ADVANCE
422 #define TARGET_FUNCTION_ARG_ADVANCE csky_function_arg_advance
424 #undef TARGET_FUNCTION_VALUE
425 #define TARGET_FUNCTION_VALUE csky_function_value
427 #undef TARGET_LIBCALL_VALUE
428 #define TARGET_LIBCALL_VALUE csky_libcall_value
430 #undef TARGET_FUNCTION_VALUE_REGNO_P
431 #define TARGET_FUNCTION_VALUE_REGNO_P csky_function_value_regno_p
433 #undef TARGET_SPLIT_COMPLEX_ARG
434 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
436 #undef TARGET_PROMOTE_PROTOTYPES
437 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
439 #undef TARGET_MUST_PASS_IN_STACK
440 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
442 #undef TARGET_ARG_PARTIAL_BYTES
443 #define TARGET_ARG_PARTIAL_BYTES csky_arg_partial_bytes
445 #undef TARGET_PASS_BY_REFERENCE
446 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
448 #undef TARGET_ASM_OUTPUT_MI_THUNK
449 #define TARGET_ASM_OUTPUT_MI_THUNK csky_output_mi_thunk
451 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
452 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
453 hook_bool_const_tree_hwi_hwi_const_tree_true
455 #undef TARGET_ASM_FUNCTION_PROLOGUE
456 #define TARGET_ASM_FUNCTION_PROLOGUE csky_output_function_prologue
458 #undef TARGET_ASM_FUNCTION_EPILOGUE
459 #define TARGET_ASM_FUNCTION_EPILOGUE csky_output_function_epilogue
461 #undef TARGET_WARN_FUNC_RETURN
462 #define TARGET_WARN_FUNC_RETURN csky_warn_func_return
464 #undef TARGET_RETURN_IN_MEMORY
465 #define TARGET_RETURN_IN_MEMORY csky_return_in_memory
468 /******************************************************************
469 * Implementing the Varargs Macros *
470 ******************************************************************/
473 #undef TARGET_SETUP_INCOMING_VARARGS
474 #define TARGET_SETUP_INCOMING_VARARGS csky_setup_incoming_varargs
477 /******************************************************************
478 * Implicit Calls to Library Routines *
479 ******************************************************************/
482 #undef TARGET_INIT_LIBFUNCS
483 #define TARGET_INIT_LIBFUNCS csky_init_libfuncs
486 /******************************************************************
487 * Dividing the Output into Sections (Texts, Data, . . . ) *
488 ******************************************************************/
491 #undef TARGET_HAVE_TLS
492 #define TARGET_HAVE_TLS TARGET_CSKY_LINUX
495 /******************************************************************
496 * Defining target-specific uses of __attribute__ *
497 ******************************************************************/
500 #undef TARGET_ATTRIBUTE_TABLE
501 #define TARGET_ATTRIBUTE_TABLE csky_attribute_table
503 #undef TARGET_OPTION_OVERRIDE
504 #define TARGET_OPTION_OVERRIDE csky_option_override
507 /* Implement the BRANCH_COST target macro. */
510 csky_default_branch_cost (bool speed_p ATTRIBUTE_UNUSED
,
511 bool predictable_p ATTRIBUTE_UNUSED
)
513 return csky_branch_cost
;
517 csky_default_logical_op_non_short_circuit (void)
519 return BRANCH_COST (optimize_function_for_speed_p (cfun
), false) >= 2;
522 /******************************************************************
524 ******************************************************************/
526 #undef TARGET_HARD_REGNO_NREGS
527 #define TARGET_HARD_REGNO_NREGS csky_hard_regno_nregs
529 #undef TARGET_HARD_REGNO_MODE_OK
530 #define TARGET_HARD_REGNO_MODE_OK csky_hard_regno_mode_ok
532 #undef TARGET_MODES_TIEABLE_P
533 #define TARGET_MODES_TIEABLE_P csky_modes_tieable_p
535 #undef TARGET_CAN_CHANGE_MODE_CLASS
536 #define TARGET_CAN_CHANGE_MODE_CLASS csky_can_change_mode_class
538 #undef TARGET_CONDITIONAL_REGISTER_USAGE
539 #define TARGET_CONDITIONAL_REGISTER_USAGE csky_conditional_register_usage
541 #undef TARGET_CLASS_LIKELY_SPILLED_P
542 #define TARGET_CLASS_LIKELY_SPILLED_P csky_class_likely_spilled_p
544 #undef TARGET_PREFERRED_RELOAD_CLASS
545 #define TARGET_PREFERRED_RELOAD_CLASS csky_preferred_reload_class
547 #undef TARGET_CLASS_MAX_NREGS
548 #define TARGET_CLASS_MAX_NREGS csky_class_max_nregs
550 #undef TARGET_SECONDARY_RELOAD
551 #define TARGET_SECONDARY_RELOAD csky_secondary_reload
553 #undef TARGET_SPILL_CLASS
554 #define TARGET_SPILL_CLASS csky_spill_class
557 /******************************************************************
559 ******************************************************************/
562 #undef TARGET_CANNOT_FORCE_CONST_MEM
563 #define TARGET_CANNOT_FORCE_CONST_MEM csky_cannot_force_const_mem
565 #undef TARGET_LEGITIMATE_CONSTANT_P
566 #define TARGET_LEGITIMATE_CONSTANT_P csky_legitimate_constant_p
568 #undef TARGET_LEGITIMIZE_ADDRESS
569 #define TARGET_LEGITIMIZE_ADDRESS csky_legitimize_address
571 #undef TARGET_LEGITIMATE_ADDRESS_P
572 #define TARGET_LEGITIMATE_ADDRESS_P csky_legitimate_address_p
575 /******************************************************************
577 ******************************************************************/
580 #undef TARGET_CANNOT_COPY_INSN_P
581 #define TARGET_CANNOT_COPY_INSN_P csky_cannot_copy_insn_p
584 /******************************************************************
586 ******************************************************************/
589 #undef TARGET_PRINT_OPERAND
590 #define TARGET_PRINT_OPERAND csky_print_operand
592 #undef TARGET_PRINT_OPERAND_ADDRESS
593 #define TARGET_PRINT_OPERAND_ADDRESS csky_print_operand_address
595 #undef TARGET_ASM_UNALIGNED_HI_OP
596 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
598 #undef TARGET_ASM_UNALIGNED_SI_OP
599 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
601 #undef TARGET_DWARF_REGISTER_SPAN
602 #define TARGET_DWARF_REGISTER_SPAN csky_dwarf_register_span
605 /******************************************************************
606 * Miscellaneous Parameters *
607 ******************************************************************/
610 #undef TARGET_MACHINE_DEPENDENT_REORG
611 #define TARGET_MACHINE_DEPENDENT_REORG csky_reorg
613 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
614 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS csky_allocate_stack_slots_for_args
616 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
617 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
620 /******************************************************************
621 * Trampolines for Nested Functions *
622 ******************************************************************/
625 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
626 #define TARGET_ASM_TRAMPOLINE_TEMPLATE csky_asm_trampoline_template
627 #undef TARGET_TRAMPOLINE_INIT
628 #define TARGET_TRAMPOLINE_INIT csky_trampoline_init
630 /* The low bit is ignored by jsr and jmp instructions so is safe to use. */
631 #undef TARGET_CUSTOM_FUNCTION_DESCRIPTORS
632 #define TARGET_CUSTOM_FUNCTION_DESCRIPTORS 1
634 /******************************************************************
635 * Describing Relative Costs of Operations *
636 ******************************************************************/
639 #undef TARGET_REGISTER_MOVE_COST
640 #define TARGET_REGISTER_MOVE_COST csky_register_move_cost
642 #undef TARGET_MEMORY_MOVE_COST
643 #define TARGET_MEMORY_MOVE_COST csky_memory_move_cost
645 #undef TARGET_RTX_COSTS
646 #define TARGET_RTX_COSTS csky_rtx_costs
648 #undef TARGET_ADDRESS_COST
649 #define TARGET_ADDRESS_COST csky_address_cost
652 /******************************************************************
654 ******************************************************************/
657 /* FIXME: the max offset is related to mode size, the following is
658 defined according to SImode. How to deal with HImode and
659 QImode, and should the min offset be defined? */
660 #undef TARGET_MAX_ANCHOR_OFFSET
661 #define TARGET_MAX_ANCHOR_OFFSET \
662 ((TARGET_MINI_REGISTERS && optimize_size) ? 127 : 4095)
665 /******************************************************************
666 * Condition Code Status *
667 ******************************************************************/
670 #undef TARGET_FIXED_CONDITION_CODE_REGS
671 #define TARGET_FIXED_CONDITION_CODE_REGS csky_fixed_condition_code_regs
674 /******************************************************************
675 * Adjusting the Instruction Scheduler *
676 ******************************************************************/
679 #undef TARGET_SCHED_ISSUE_RATE
680 #define TARGET_SCHED_ISSUE_RATE csky_sched_issue_rate
682 #undef TARGET_SCHED_ADJUST_COST
683 #define TARGET_SCHED_ADJUST_COST csky_sched_adjust_cost
686 /* The declaration of functions. */
687 static void push_csky_minipool_fix (rtx_insn
*, HOST_WIDE_INT
, rtx
*,
689 static void csky_print_operand (FILE *stream
, rtx x
, int code
);
692 /* Define a table to map ISR attribute arguments onto function type
697 const char *const arg
;
698 const unsigned long return_value
;
699 } isr_attribute_entry
;
701 static const isr_attribute_entry isr_attribute_map
[] =
703 {"irq", CSKY_FT_ISR
},
704 {"IRQ", CSKY_FT_ISR
},
705 {"fiq", CSKY_FT_FIQ
},
706 {"FIQ", CSKY_FT_FIQ
},
707 {NULL
, CSKY_FT_NORMAL
}
711 /* Return the function type of the current function, if it has not been
712 determined, return CSKY_FT_UNKNOWN. */
715 get_csky_isr_type (tree argument
)
717 const isr_attribute_entry
*ptr
;
720 /* if argument is NULL, set default value ISR. */
721 if (argument
== NULL_TREE
)
724 if (TREE_VALUE (argument
) == NULL_TREE
725 || TREE_CODE (TREE_VALUE (argument
)) != STRING_CST
)
726 return CSKY_FT_UNKNOWN
;
728 arg
= TREE_STRING_POINTER (TREE_VALUE (argument
));
730 for (ptr
= isr_attribute_map
; ptr
->arg
!= NULL
; ptr
++)
731 if (strcmp (arg
, ptr
->arg
) == 0)
732 return ptr
->return_value
;
734 return CSKY_FT_UNKNOWN
;
737 /* Classify cfun as a normal function or some sort of interrupt
738 handler, and set the corresponding bits in cfun->machine->func_type. */
741 get_csky_current_func_type (void)
743 if (CSKY_FUNCTION_TYPE (cfun
->machine
->func_type
) == CSKY_FT_UNKNOWN
)
745 unsigned long type
= CSKY_FT_UNKNOWN
;
749 gcc_assert (TREE_CODE (current_function_decl
) == FUNCTION_DECL
);
751 attr
= DECL_ATTRIBUTES (current_function_decl
);
752 a
= lookup_attribute ("naked", attr
);
754 type
|= CSKY_FT_NAKED
;
755 a
= lookup_attribute ("isr", attr
);
757 a
= lookup_attribute ("interrupt", attr
);
759 type
|= CSKY_FT_NORMAL
;
761 type
|= get_csky_isr_type (TREE_VALUE (a
));
763 cfun
->machine
->func_type
= type
;
766 return cfun
->machine
->func_type
;
769 /* These typedefs are located at the start of this file, so that
770 they can be used in the prototypes there. This comment is to
771 remind readers of that fact so that the following structures
772 can be understood more easily.
774 typedef struct minipool_node Mnode;
775 typedef struct minipool_fixup Mfix; */
779 /* Doubly linked chain of entries. */
782 /* The maximum offset into the code that this entry can be placed. While
783 pushing fixes for forward references, all entries are sorted in order
784 of increasing max_address. */
785 HOST_WIDE_INT max_address
;
786 /* Similarly for an entry inserted for a backwards ref. */
787 HOST_WIDE_INT min_address
;
788 /* The number of fixes referencing this entry. This can become zero
789 if we "unpush" an entry. In this case we ignore the entry when we
790 come to emit the code. */
792 /* The offset from the start of the minipool. */
793 HOST_WIDE_INT offset
;
794 /* The value in table. */
796 /* The mode of value. */
798 /* The size of the value. */
802 struct minipool_fixup
806 HOST_WIDE_INT address
;
812 HOST_WIDE_INT forwards
;
813 HOST_WIDE_INT backwards
;
816 static Mnode
*minipool_vector_head
;
817 static Mnode
*minipool_vector_tail
;
818 static rtx minipool_vector_label
;
819 static HOST_WIDE_INT constpool_label_no
= 0;
821 /* Obstack for minipool constant handling. */
822 static struct obstack minipool_obstack
;
823 static char *minipool_startobj
;
824 /* The linked list of all minipool fixes required for this function. */
825 Mfix
*minipool_fix_head
;
826 Mfix
*minipool_fix_tail
;
827 /* The fix entry for the current minipool, once it has been placed. */
828 Mfix
*minipool_barrier
;
830 /* Allow GC scanning of the minipool obstack. */
832 csky_add_gc_roots (void)
834 gcc_obstack_init (&minipool_obstack
);
835 minipool_startobj
= (char *) obstack_alloc (&minipool_obstack
, 0);
838 /* Implement TARGET_CONSTANT_ALIGNMENT.
839 Make strings word-aligned so strcpy from constants will be faster. */
841 csky_constant_alignment (const_tree exp
, HOST_WIDE_INT align
)
843 if (TREE_CODE (exp
) == STRING_CST
845 && align
< BITS_PER_WORD
)
846 return BITS_PER_WORD
;
850 /* Record that there is a natural barrier in the insn stream at
854 push_csky_minipool_barrier (rtx_insn
*insn
, HOST_WIDE_INT address
)
856 Mfix
*fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (*fix
));
859 fix
->address
= address
;
862 if (minipool_fix_head
!= NULL
)
863 minipool_fix_tail
->next
= fix
;
865 minipool_fix_head
= fix
;
867 minipool_fix_tail
= fix
;
870 /* Compute the size of a vector jump table. */
873 get_csky_jump_table_size (rtx insn
)
875 /* ADDR_VECs only take room if read-only data does into the text
877 if (JUMP_TABLES_IN_TEXT_SECTION
|| readonly_data_section
== text_section
)
879 rtx body
= PATTERN (insn
);
880 int elt
= GET_CODE (body
) == ADDR_DIFF_VEC
? 1 : 0;
882 HOST_WIDE_INT modesize
;
884 modesize
= GET_MODE_SIZE (GET_MODE (body
));
885 size
= modesize
* XVECLEN (body
, elt
);
889 /* Round up size of TBB table to a halfword boundary. */
890 size
= (size
+ 1) & ~(HOST_WIDE_INT
)1;
893 /* No padding necessary for TBH. */
907 /* Scan INSN and note any of its operands that need fixing.
908 If DO_PUSHES is false we do not actually push any of the fixups
909 needed. The function returns TRUE if any fixups were needed/pushed. */
912 note_csky_invalid_constants (rtx_insn
*insn
, HOST_WIDE_INT address
,
918 extract_constrain_insn (insn
);
920 if (recog_data
.n_alternatives
== 0)
923 /* Fill in recog_op_alt with information about the constraints of
925 preprocess_constraints (insn
);
927 const operand_alternative
*op_alt
= which_op_alt ();
928 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
930 /* Things we need to fix can only occur in inputs. */
931 if (recog_data
.operand_type
[opno
] != OP_IN
)
934 /* If this alternative is a memory reference, then any mention
935 of constants in this alternative is really to fool reload
936 into allowing us to accept one there. We need to fix them up
937 now so that we output the right code. */
938 if (op_alt
[opno
].memory_ok
)
940 rtx op
= recog_data
.operand
[opno
];
945 push_csky_minipool_fix (insn
, address
,
946 recog_data
.operand_loc
[opno
],
947 recog_data
.operand_mode
[opno
], op
);
957 /* Add a constant to the minipool for a forward reference. Returns the
958 node added or NULL if the constant will not fit in this pool. */
961 add_csky_minipool_forward_ref (Mfix
*fix
)
963 /* If set, max_mp is the first pool_entry that has a lower
964 constraint than the one we are trying to add. */
965 Mnode
*max_mp
= NULL
;
966 HOST_WIDE_INT max_address
= fix
->address
+ fix
->forwards
;
969 /* If the minipool starts before the end of FIX->INSN then this FIX
970 can not be placed into the current pool. Furthermore, adding the
971 new constant pool entry may cause the pool to start FIX_SIZE bytes
973 if (minipool_vector_head
974 && (fix
->address
+ get_attr_length (fix
->insn
)
975 >= minipool_vector_head
->max_address
- fix
->fix_size
))
978 /* Scan the pool to see if a constant with the same value has
979 already been added. While we are doing this, also note the
980 location where we must insert the constant if it doesn't already
982 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
984 if (GET_CODE (fix
->value
) == GET_CODE (mp
->value
)
985 && fix
->mode
== mp
->mode
986 && (GET_CODE (fix
->value
) != CODE_LABEL
987 || (CODE_LABEL_NUMBER (fix
->value
)
988 == CODE_LABEL_NUMBER (mp
->value
)))
989 && rtx_equal_p (fix
->value
, mp
->value
))
991 /* More than one fix references this entry. */
996 /* Note the insertion point if necessary. */
997 if (max_mp
== NULL
&& mp
->max_address
> max_address
)
1001 /* The value is not currently in the minipool, so we need to create
1002 a new entry for it. If MAX_MP is NULL, the entry will be put on
1003 the end of the list since the placement is less constrained than
1004 any existing entry. Otherwise, we insert the new fix before
1005 MAX_MP and, if necessary, adjust the constraints on the other
1008 mp
->fix_size
= fix
->fix_size
;
1009 mp
->mode
= fix
->mode
;
1010 mp
->value
= fix
->value
;
1012 /* Not yet required for a backwards ref. */
1013 mp
->min_address
= -65536;
1017 mp
->max_address
= max_address
;
1019 mp
->prev
= minipool_vector_tail
;
1021 if (mp
->prev
== NULL
)
1023 minipool_vector_head
= mp
;
1024 minipool_vector_label
1025 = gen_csky_constpool_label (gen_rtx_CONST_INT (VOIDmode
,
1026 constpool_label_no
++));
1029 mp
->prev
->next
= mp
;
1031 minipool_vector_tail
= mp
;
1035 if (max_address
> max_mp
->max_address
- mp
->fix_size
)
1036 mp
->max_address
= max_mp
->max_address
- mp
->fix_size
;
1038 mp
->max_address
= max_address
;
1041 mp
->prev
= max_mp
->prev
;
1043 if (mp
->prev
!= NULL
)
1044 mp
->prev
->next
= mp
;
1046 minipool_vector_head
= mp
;
1049 /* Save the new entry. */
1052 /* Scan over the preceding entries and adjust their addresses as
1054 while (mp
->prev
!= NULL
1055 && mp
->prev
->max_address
> mp
->max_address
- mp
->prev
->fix_size
)
1057 mp
->prev
->max_address
= mp
->max_address
- mp
->prev
->fix_size
;
1065 /* Return the cost of forcibly inserting a barrier after INSN. */
1068 get_csky_barrier_cost (rtx_insn
*insn
)
1070 /* Basing the location of the pool on the loop depth is preferable,
1071 but at the moment, the basic block information seems to be
1072 corrupt by this stage of the compilation. */
1074 rtx next
= next_nonnote_insn (insn
);
1076 if (next
!= NULL
&& GET_CODE (next
) == CODE_LABEL
)
1079 switch (GET_CODE (insn
))
1082 /* It will always be better to place the table before the label, rather
1091 return base_cost
- 10;
1094 return base_cost
+ 10;
1099 /* Find the best place in the insn stream in the range
1100 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
1101 Create the barrier by inserting a jump and add a new fix entry for
1104 create_csky_fix_barrier (Mfix
*fix
, Mfix
*fix_next
,
1105 HOST_WIDE_INT max_address
)
1107 rtx_barrier
*barrier
;
1108 rtx_insn
*from
= (fix
? fix
->insn
: get_insns ());
1109 /* The instruction after which we will insert the jump. */
1110 rtx_insn
*selected
= NULL
;
1112 /* The address at which the jump instruction will be placed. */
1113 HOST_WIDE_INT selected_address
= 0;
1115 HOST_WIDE_INT count
= (fix
? fix
->address
: 0);
1116 HOST_WIDE_INT max_count
= max_address
;
1117 rtx_code_label
*label
= gen_label_rtx ();
1119 selected_cost
= get_csky_barrier_cost (from
);
1121 while (from
&& count
< max_count
)
1124 rtx_jump_table_data
*table
;
1126 /* Count the length of this insn. */
1127 count
+= get_attr_length (from
);
1129 /* If there is a jump table, add its length. */
1130 if (tablejump_p (from
, NULL
, &table
))
1132 count
+= get_csky_jump_table_size (table
);
1134 /* Jump tables aren't in a basic block, so base the cost on
1135 the dispatch insn. If we select this location, we will
1136 still put the pool after the table. */
1137 new_cost
= get_csky_barrier_cost (from
);
1139 if (count
< max_count
1140 && (!selected
|| new_cost
<= selected_cost
))
1143 selected_cost
= new_cost
;
1144 selected_address
= count
;
1147 /* Continue after the dispatch table. */
1148 from
= NEXT_INSN (table
);
1152 new_cost
= get_csky_barrier_cost (from
);
1154 if (count
< max_count
1155 && (!selected
|| new_cost
<= selected_cost
))
1158 selected_cost
= new_cost
;
1159 selected_address
= count
;
1162 from
= NEXT_INSN (from
);
1165 /* Make sure that we found a place to insert the jump. */
1166 gcc_assert (selected
);
1168 /* Create a new JUMP_INSN that branches around a barrier. */
1169 from
= emit_jump_insn_after (gen_jump (label
), selected
);
1170 JUMP_LABEL (from
) = label
;
1171 barrier
= emit_barrier_after (from
);
1172 emit_label_after (label
, barrier
);
1174 /* Create a minipool barrier entry for the new barrier. */
1175 new_fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* new_fix
));
1176 new_fix
->insn
= barrier
;
1177 new_fix
->address
= selected_address
;
1180 new_fix
->next
= fix
->next
;
1181 fix
->next
= new_fix
;
1184 new_fix
->next
= fix_next
;
1190 /* Print a symbolic form of the constant X to the dump file F.
1191 This is used for dump output for -mconstpool in the target-dependent
1195 print_csky_value (FILE *f
, rtx x
)
1197 switch (GET_CODE (x
))
1200 fprintf (f
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
1204 fprintf (f
, "<0x%lx,0x%lx>", (long)XWINT (x
, 2), (long)XWINT (x
, 3));
1212 for (i
= 0; i
< CONST_VECTOR_NUNITS (x
); i
++)
1214 fprintf (f
, HOST_WIDE_INT_PRINT_HEX
,
1215 INTVAL (CONST_VECTOR_ELT (x
, i
)));
1216 if (i
< (CONST_VECTOR_NUNITS (x
) - 1))
1224 fprintf (f
, "\"%s\"", XSTR (x
, 0));
1228 fprintf (f
, "`%s'", XSTR (x
, 0));
1232 fprintf (f
, "L%d", INSN_UID (XEXP (x
, 0)));
1236 print_csky_value (f
, XEXP (x
, 0));
1240 print_csky_value (f
, XEXP (x
, 0));
1242 print_csky_value (f
, XEXP (x
, 1));
1250 fprintf (f
, "????");
1256 /* Record INSN, which will need fixing up to load a value from the
1257 minipool. ADDRESS is the offset of the insn since the start of the
1258 function; LOC is a pointer to the part of the insn which requires
1259 fixing; VALUE is the constant that must be loaded, which is of type
1263 push_csky_minipool_fix (rtx_insn
*insn
, HOST_WIDE_INT address
, rtx
*loc
,
1264 machine_mode mode
, rtx value
)
1266 #define CSKY_ELRW16_RANGE 1400
1267 #define CSKY_LRW16_RANGE 700
1268 #define CSKY_CONSTANT_POOL_RANGE (TARGET_ELRW ? CSKY_ELRW16_RANGE \
1271 /* Fixes less than a word need padding out to a word boundary. */
1272 #define CSKY_MINIPOOL_FIX_SIZE(mode) \
1273 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
1275 Mfix
*fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (*fix
));
1278 fix
->address
= address
;
1281 fix
->fix_size
= CSKY_MINIPOOL_FIX_SIZE (mode
);
1283 fix
->forwards
= CSKY_CONSTANT_POOL_RANGE
;
1285 fix
->minipool
= NULL
;
1287 /* If an insn doesn't have a range defined for it, then it isn't
1288 expecting to be reworked by this code. Better to stop now than
1289 to generate duff assembly code. */
1290 gcc_assert (fix
->forwards
|| fix
->backwards
);
1295 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
1296 GET_MODE_NAME (mode
),
1297 INSN_UID (insn
), (unsigned long) address
,
1298 -1 * (long)fix
->backwards
, (long)fix
->forwards
);
1299 print_csky_value (dump_file
, fix
->value
);
1300 fprintf (dump_file
, "\n");
1303 /* Add it to the chain of fixes. */
1306 if (minipool_fix_head
!= NULL
)
1307 minipool_fix_tail
->next
= fix
;
1309 minipool_fix_head
= fix
;
1311 minipool_fix_tail
= fix
;
1315 /* Fill in the offsets for minipool entries. */
1318 assign_csky_minipool_offsets (Mfix
*barrier
)
1320 HOST_WIDE_INT offset
= 0;
1323 minipool_barrier
= barrier
;
1325 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
1327 mp
->offset
= offset
;
1329 if (mp
->refcount
> 0)
1330 offset
+= mp
->fix_size
;
1335 /* Output the literal table. */
1337 static HOST_WIDE_INT
1338 dump_csky_minipool (rtx_insn
*scan
)
1342 HOST_WIDE_INT pool_length
= 0;
1346 ";; Emitting minipool after insn %u;\
1347 address %ld; align %d (bytes)\n",
1348 INSN_UID (scan
), (unsigned long) minipool_barrier
->address
, 4);
1350 scan
= emit_insn_after (gen_align_4 (), scan
);
1351 scan
= emit_insn_after (minipool_vector_label
, scan
);
1353 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= nmp
)
1355 if (mp
->refcount
> 0)
1359 fprintf (dump_file
, ";; Offset %u, min %ld, max %ld ",
1360 (unsigned) mp
->offset
, (unsigned long) mp
->min_address
,
1361 (unsigned long) mp
->max_address
);
1362 print_csky_value (dump_file
, mp
->value
);
1363 fputc ('\n', dump_file
);
1366 switch (mp
->fix_size
)
1369 scan
= emit_insn_after (gen_consttable_4 (mp
->value
), scan
);
1373 scan
= emit_insn_after (gen_consttable_8 (mp
->value
), scan
);
1385 minipool_vector_head
= minipool_vector_tail
= NULL
;
1386 scan
= emit_barrier_after (scan
);
1391 /* Return true if INSN is a minipool load or instruction that will be
1392 converted to one. It is assumed that INSN has type attribute "load". */
1395 csky_minipool_load_p (rtx_insn
*insn
)
1399 extract_insn_cached (insn
);
1401 op1
= recog_data
.operand
[1];
1403 /* This is a constant that has not yet been turned into
1405 if (CONSTANT_P (op1
))
1408 /* Constant pool loads are label_refs. */
1409 if (GET_CODE (op1
) == ZERO_EXTEND
|| GET_CODE (op1
) == SIGN_EXTEND
)
1410 op1
= XEXP (op1
, 0);
1411 if (GET_CODE (op1
) != MEM
)
1413 addr
= XEXP (op1
, 0);
1414 if (GET_CODE (addr
) == PLUS
&& CONST_INT_P (XEXP (addr
, 1)))
1415 addr
= XEXP (addr
, 0);
1416 return GET_CODE (addr
) == LABEL_REF
;
1420 /* Compute the attribute "length" of push or pop insn, according to
1421 the registers it uses. */
1424 csky_compute_pushpop_length (rtx
*operands
)
1426 rtx parallel_op
= operands
[2];
1427 /* Initialize to elements number of PARALLEL. */
1428 unsigned indx
= XVECLEN (parallel_op
, 0) - 1;
1429 unsigned first_indx
= 0;
1430 unsigned regno
= REGNO (operands
[1]);
1432 if (regno
> CSKY_LR_REGNUM
)
1435 /* Check each register in the list. */
1436 for (; indx
> first_indx
; indx
--)
1438 regno
= REGNO (XEXP (XVECEXP (parallel_op
, 0, indx
), 0));
1439 /* If a register number higher than 15 is included, a 32-bit insn
1441 if (regno
> CSKY_LR_REGNUM
)
1448 /* Emit constant pools for -mconstpool. */
1450 csky_emit_constant_pools (void)
1453 HOST_WIDE_INT address
= 0;
1456 minipool_fix_head
= minipool_fix_tail
= NULL
;
1458 /* The first insn must always be a note, or the code below won't
1459 scan it properly. */
1460 insn
= get_insns ();
1461 gcc_assert (NOTE_P (insn
));
1463 /* Scan the insns and record the operands that need fixing. */
1464 for (insn
= next_nonnote_insn (insn
); insn
;
1465 insn
= next_nonnote_insn (insn
))
1467 if (BARRIER_P (insn
))
1468 push_csky_minipool_barrier (insn
, address
);
1469 else if (INSN_P (insn
))
1471 rtx_jump_table_data
*table
;
1473 note_csky_invalid_constants (insn
, address
, true);
1474 address
+= get_attr_length (insn
);
1476 /* If the insn is a vector jump, add the size of the table
1477 and skip the table. */
1478 if (tablejump_p (insn
, NULL
, &table
))
1480 address
+= get_csky_jump_table_size (table
);
1486 fix
= minipool_fix_head
;
1488 /* Now scan the fixups and perform the required changes. */
1492 Mfix
*last_added_fix
;
1493 Mfix
*last_barrier
= NULL
;
1496 bool has_pending_const
= false;
1498 /* Check if there is any pending constant not processed. */
1499 for (mp
= minipool_vector_head
; mp
; mp
= mp
->next
)
1500 if (mp
->refcount
> 0)
1502 has_pending_const
= true;
1506 /* If no pending constant, skip over barrier insns. */
1507 if (has_pending_const
== false)
1509 while (fix
&& BARRIER_P (fix
->insn
))
1515 last_added_fix
= NULL
;
1517 for (ftmp
= fix
; ftmp
; ftmp
= ftmp
->next
)
1519 if (BARRIER_P (ftmp
->insn
))
1521 if (minipool_vector_head
1522 && ftmp
->address
>= minipool_vector_head
->max_address
)
1525 last_barrier
= ftmp
;
1529 ftmp
->minipool
= add_csky_minipool_forward_ref (ftmp
);
1530 if (ftmp
->minipool
== NULL
)
1533 last_added_fix
= ftmp
; /* Keep track of the last fix added. */
1536 /* If the last added fix is a barrier, dump minipool after it. */
1537 if (last_added_fix
&& BARRIER_P (last_added_fix
->insn
))
1538 ftmp
= last_barrier
;
1541 /* ftmp is first fix that we can't fit into this pool.
1542 Insert a new barrier in the code somewhere between the previous
1543 fix and this one, and arrange to jump around it. */
1544 HOST_WIDE_INT max_address
;
1546 /* The last item on the list of fixes must be a barrier, so
1547 we can never run off the end of the list of fixes without
1548 last_barrier being set. */
1551 /* Check that there isn't another fix that is in range that
1552 we couldn't fit into this pool because the pool was
1553 already too large: we need to put the pool before such an
1554 instruction. The pool itself may come just after the
1555 fix because create_csky_fix_barrier also allows space for a
1556 jump instruction. */
1557 max_address
= minipool_vector_head
->max_address
;
1558 if (ftmp
->address
< max_address
)
1559 max_address
= ftmp
->address
+ 1;
1560 last_barrier
= create_csky_fix_barrier (last_added_fix
, ftmp
,
1564 assign_csky_minipool_offsets (last_barrier
);
1566 /* Scan over the fixes we have identified for this pool, fixing them
1567 up and adding the constants to the pool itself. */
1568 for (this_fix
= fix
; this_fix
&& ftmp
!= this_fix
;
1569 this_fix
= this_fix
->next
)
1571 if (GET_CODE (this_fix
->insn
) != BARRIER
)
1574 = plus_constant (Pmode
,
1575 gen_rtx_LABEL_REF (VOIDmode
,
1576 minipool_vector_label
),
1577 this_fix
->minipool
->offset
);
1578 rtx insn_body
= PATTERN (this_fix
->insn
);
1579 rtx src
= XEXP (insn_body
, 1);
1580 *this_fix
->loc
= gen_rtx_MEM (this_fix
->mode
, addr
);
1581 if (GET_CODE (this_fix
->value
) == SYMBOL_REF
)
1582 emit_insn_after (gen_rtx_UNSPEC_VOLATILE (VOIDmode
,
1584 VUNSPEC_SYMBOL_REF
),
1588 dump_csky_minipool (last_barrier
->insn
);
1590 if (fix
->next
== NULL
)
1594 /* Free the minipool memory. */
1595 obstack_free (&minipool_obstack
, minipool_startobj
);
1599 /* Implement TARGET_MACHINE_DEPENDENT_REORG. This handles
1600 -mconstpool output. */
1605 if (TARGET_CONSTANT_POOL
)
1606 csky_emit_constant_pools ();
1610 /* Check to see if the current function contains a branch insn with the
1611 far jump attribute set. Such a function uses the LR register. */
1614 csky_far_jump_used_p (void)
1617 if (cfun
->machine
->far_jump_used
)
1620 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1621 if (GET_CODE (insn
) == JUMP_INSN
1622 /* Ignore tablejump patterns. */
1623 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
1624 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
1625 && get_attr_far_jump (insn
) == FAR_JUMP_YES
)
1627 cfun
->machine
->far_jump_used
= 1;
1634 /* Return the mask of registers used by the current function. Set
1635 COUNT to the number of registers used. */
1638 get_csky_live_regs (int *count
)
1641 unsigned int live_regs_mask
= 0;
1644 for (reg
= 0; reg
< CSKY_NGPR_REGS
; reg
++)
1648 /* Ignore unsupported registers. */
1649 if (CSKY_TARGET_ARCH (CK801
) && reg
> 8 && reg
< 13)
1651 if ((CSKY_TARGET_ARCH (CK801
)
1652 || CSKY_TARGET_ARCH (CK802
)
1653 || CSKY_TARGET_ARCH (CK803
))
1657 /* Caller-saved registers marked as used. */
1658 if (df_regs_ever_live_p (reg
) && !call_really_used_regs
[reg
])
1661 /* Frame pointer marked used. */
1662 else if (frame_pointer_needed
&& reg
== FRAME_POINTER_REGNUM
)
1665 /* This is required for CK801/802 where FP is a fixed reg, otherwise
1666 we end up with no FP value available to the DWARF-2 unwinder. */
1667 else if (crtl
->calls_eh_return
&& reg
== FRAME_POINTER_REGNUM
)
1670 /* CK801/802 also need special handling for LR because it's clobbered
1672 else if ((CSKY_TARGET_ARCH (CK801
) || CSKY_TARGET_ARCH (CK802
))
1673 && reg
== CSKY_LR_REGNUM
1674 && (!crtl
->is_leaf
|| csky_far_jump_used_p ()))
1677 /* Register is used for EH data return. */
1678 else if (crtl
->calls_eh_return
1679 && reg
>= CSKY_FIRST_EH_RETDATA_REGNUM
1680 && reg
<= CSKY_LAST_EH_RETDATA_REGNUM
)
1683 /* We need a temporary reg to hold the offset for adjusting the SP
1684 for a large stack frame. */
1685 if (reg
== CSKY_STACKADJUST_REGNUM
1686 && cfun
->machine
->reg_offset
> CSKY_MAX_SP_ADJUST
* 2)
1689 /* Add reg to the mask. */
1693 live_regs_mask
|= (1 << reg
);
1696 return live_regs_mask
;
1699 /* Compute the stack frame layout, storing sizes of the various pieces
1702 Stack frames constructed in the prologue look like:
1703 ... caller's frame ...
1704 incoming SP -> caller's outbound argument overflow
1706 optional FP -> register save
1709 adjusted SP -> outbound argument overflow
1711 with SP/FP pointing at the base (low address) of the respective area,
1712 and each area aligned to a word boundary. */
1715 csky_layout_stack_frame (void)
1717 machine_function
*infp
= cfun
->machine
;
1720 if (infp
->frame_init_p
)
1723 /* Get sizes of local variables & outbound arguments. */
1724 infp
->outbound_size
= CSKY_STACK_ALIGN (crtl
->outgoing_args_size
);
1725 infp
->local_offset
= infp
->outbound_size
;
1726 infp
->local_size
= CSKY_STACK_ALIGN (get_frame_size ());
1727 infp
->reg_offset
= infp
->local_offset
+ infp
->local_size
;
1729 /* Now compute size of argument spill + saved regs. These do not
1730 need explicit alignment since they are already word-sized. */
1731 infp
->reg_mask
= get_csky_live_regs (®_count
);
1732 infp
->reg_size
= reg_count
* UNITS_PER_WORD
;
1733 infp
->arg_offset
= infp
->reg_offset
+ infp
->reg_size
;
1734 infp
->arg_size
= crtl
->args
.pretend_args_size
;
1735 infp
->frame_size
= infp
->arg_offset
+ infp
->arg_size
;
1736 infp
->frame_init_p
= reload_completed
;
1739 /* Implement TARGET_CAN_ELIMINATE. */
1741 csky_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1743 if (to
== STACK_POINTER_REGNUM
)
1744 return !frame_pointer_needed
;
1748 /* Worker function for INITIAL_ELIMINATION_OFFSET macro.
1749 Define the offset between two registers, one to be eliminated, and
1750 the other its replacement, at the start of a routine. */
1753 csky_initial_elimination_offset (int from
, int to
)
1757 csky_layout_stack_frame ();
1759 /* Set OFFSET to the offset to the initial stack pointer. */
1762 case FRAME_POINTER_REGNUM
:
1763 offset
= cfun
->machine
->reg_offset
;
1766 case ARG_POINTER_REGNUM
:
1767 offset
= cfun
->machine
->arg_offset
;
1774 /* If we are asked for the offset to the frame pointer instead,
1775 then subtract the difference between the frame pointer and stack
1777 if (to
== FRAME_POINTER_REGNUM
)
1778 offset
-= cfun
->machine
->reg_offset
;
1783 /* Determine where to put an argument to a function.
1784 Value is zero to push the argument on the stack,
1785 or a hard register in which to store the argument.
1787 MODE is the argument's machine mode.
1788 TYPE is the data type of the argument (as a tree).
1789 This is null for libcalls where that information may
1791 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1792 the preceding args and about the function being called.
1793 NAMED is nonzero if this argument is a named parameter
1794 (otherwise it is an extra parameter matching an ellipsis). */
1796 csky_function_arg (cumulative_args_t pcum_v
, machine_mode mode
,
1797 const_tree type ATTRIBUTE_UNUSED
,
1798 bool named ATTRIBUTE_UNUSED
)
1800 CUMULATIVE_ARGS
*pcum
= get_cumulative_args (pcum_v
);
1802 if (*pcum
< CSKY_NPARM_REGS
)
1803 return gen_rtx_REG (mode
, CSKY_FIRST_PARM_REGNUM
+ *pcum
);
1809 /* Return the number of registers (words) needed to pass an argument of
1813 csky_num_arg_regs (machine_mode mode
, const_tree type
)
1817 if (type
&& mode
== BLKmode
)
1818 size
= int_size_in_bytes (type
);
1820 size
= GET_MODE_SIZE (mode
);
1822 return CSKY_NUM_WORDS (size
);
1826 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
1829 csky_function_arg_advance (cumulative_args_t pcum_v
, machine_mode mode
,
1830 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1832 CUMULATIVE_ARGS
*pcum
= get_cumulative_args (pcum_v
);
1833 int param_size
= csky_num_arg_regs (mode
, type
);
1835 if (*pcum
+ param_size
> CSKY_NPARM_REGS
)
1836 *pcum
= CSKY_NPARM_REGS
;
1838 *pcum
+= param_size
;
1842 /* Implement TARGET_FUNCTION_VALUE. */
1844 csky_function_value (const_tree type
, const_tree func
,
1845 bool outgoing ATTRIBUTE_UNUSED
)
1848 int unsignedp ATTRIBUTE_UNUSED
;
1851 mode
= TYPE_MODE (type
);
1852 size
= int_size_in_bytes (type
);
1854 /* Since we promote return types, we must promote the mode here too. */
1855 if (INTEGRAL_TYPE_P (type
))
1857 mode
= promote_function_mode (type
, mode
, &unsignedp
, func
, 1);
1858 return gen_rtx_REG (mode
, CSKY_FIRST_RET_REGNUM
);
1861 if (mode
== BLKmode
&& size
> UNITS_PER_WORD
1862 && size
<= UNITS_PER_WORD
* 2)
1865 ret_regs
[0] = gen_rtx_EXPR_LIST (SImode
,
1866 gen_rtx_REG (SImode
,
1867 CSKY_FIRST_RET_REGNUM
),
1868 GEN_INT (0 * UNITS_PER_WORD
));
1869 ret_regs
[1] = gen_rtx_EXPR_LIST (SImode
,
1870 gen_rtx_REG (SImode
,
1871 CSKY_FIRST_RET_REGNUM
+ 1),
1872 GEN_INT (1 * UNITS_PER_WORD
));
1874 rtvec vec
= gen_rtvec (2, ret_regs
[0], ret_regs
[1]);
1876 return gen_rtx_PARALLEL (mode
, vec
);
1879 return gen_rtx_REG (mode
, CSKY_FIRST_RET_REGNUM
);
1883 /* Implement TARGET_LIBCALL_VALUE. */
1885 csky_libcall_value (machine_mode mode
,
1886 const_rtx libcall ATTRIBUTE_UNUSED
)
1888 return gen_rtx_REG (mode
, CSKY_FIRST_RET_REGNUM
);
1892 /* Implement TARGET_FUNCTION_VALUE_REGNO_P.
1893 On C-SKY, only r0 can return results. */
1896 csky_function_value_regno_p (const unsigned int regno
)
1898 return (regno
== CSKY_FIRST_RET_REGNUM
);
1902 /* Return an RTX indicating where the return address to the
1903 calling function can be found. */
1905 csky_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
1910 return get_hard_reg_initial_val (Pmode
, CSKY_LR_REGNUM
);
1914 /* Implement TARGET_ARG_PARTIAL_BYTES.
1915 Return the number of bytes at the beginning of an argument
1916 that must be put in registers. The value must be zero for arguments
1917 that are passed entirely in registers or
1918 that are entirely pushed on the stack. */
1920 csky_arg_partial_bytes (cumulative_args_t pcum_v
, machine_mode mode
,
1921 tree type
, bool named ATTRIBUTE_UNUSED
)
1923 CUMULATIVE_ARGS
*pcum
= get_cumulative_args (pcum_v
);
1924 int param_size
= csky_num_arg_regs (mode
, type
);
1926 if (*pcum
< CSKY_NPARM_REGS
1927 && *pcum
+ param_size
> CSKY_NPARM_REGS
)
1928 return (CSKY_NPARM_REGS
- *pcum
) * UNITS_PER_WORD
;
1934 /* Implement TARGET_SETUP_INCOMING_VARARGS.
1935 On C-Sky the copy from the argument registers to the stack is emitted
1936 by the prologue hooks, so here we just have to note how much stack space
1940 csky_setup_incoming_varargs (cumulative_args_t pcum_v
,
1944 int second_time ATTRIBUTE_UNUSED
)
1946 CUMULATIVE_ARGS
*pcum
= get_cumulative_args (pcum_v
);
1947 CUMULATIVE_ARGS local_cum
;
1948 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
1951 cfun
->machine
->uses_anonymous_args
= 1;
1953 csky_function_arg_advance (local_cum_v
, mode
, type
, true);
1954 regs_to_push
= CSKY_NPARM_REGS
- local_cum
;
1956 *pretend_size
= regs_to_push
* UNITS_PER_WORD
;
1960 /* Implement TARGET_ASM_OUTPUT_MI_THUNK.
1961 Output code to add DELTA to the first argument, and then jump
1962 to FUNCTION. Used for C++ multiple inheritance. */
1965 csky_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
1966 HOST_WIDE_INT delta
,
1967 HOST_WIDE_INT vcall_offset
,
1970 const char *thiz
= "a0";
1971 const char *reg0
= "t0";
1972 const char *reg1
= "t1";
1973 int maxoff
= 4096; /* Constant range for addi/subi. */
1975 final_start_function (emit_barrier (), file
, 1);
1977 rtx fnaddr
= XEXP (DECL_RTL (function
), 0);
1979 if (CSKY_TARGET_ARCH (CK801
))
1981 /* CK801 can't use t registers and has only 16-bit addi/subi. */
1985 if (vcall_offset
> maxoff
|| vcall_offset
< -maxoff
)
1986 fprintf (file
, "\tpush\tl0, l1\n");
1987 else if (delta
> maxoff
|| delta
< -maxoff
)
1988 fprintf (file
, "\tpush\tl0\n");
1991 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
1994 /* Add delta to this_rtx. */
1997 if (delta
> maxoff
|| delta
< -maxoff
)
1999 fprintf (file
, "\tlrw\t%s, %ld\n", reg0
, (long)delta
);
2000 fprintf (file
, "\taddu\t%s, %s, %s\n", thiz
, thiz
, reg0
);
2003 fprintf (file
, "\t%s\t%s, %s, %ld\n",
2004 (delta
> 0 ? "addi" : "subi"), thiz
, thiz
,
2005 (long)(delta
> 0 ? delta
: -delta
));
2008 /* If needed, add *(*this_rtx + vcall_offset) to this_rtx. */
2009 if (vcall_offset
!= 0)
2011 fprintf (file
, "\tld.w\t%s, (%s, 0)\n", reg0
, thiz
);
2013 if (vcall_offset
> maxoff
|| vcall_offset
< -maxoff
)
2015 fprintf (file
, "\tlrw\t%s, %ld\n", reg1
, (long)vcall_offset
);
2016 fprintf (file
, "\taddu\t%s, %s, %s\n", reg0
, reg0
, reg1
);
2019 fprintf (file
, "\t%s\t%s, %s, %ld\n",
2020 (vcall_offset
> 0 ? "addi" : "subi"), reg0
, reg0
,
2021 (long)(vcall_offset
> 0 ? vcall_offset
: -vcall_offset
));
2023 /* Load the offset and add it to this_rtx */
2024 fprintf (file
, "\tld.w\t%s, (%s, 0)\n", reg0
, reg0
);
2025 fprintf (file
, "\taddu\t%s, %s, %s\n", thiz
, thiz
, reg0
);
2028 /* We must pop the scratch regs individually instead of using the
2029 "pop" insn, which also does a return. */
2030 if (CSKY_TARGET_ARCH (CK801
))
2032 if (vcall_offset
> maxoff
|| vcall_offset
< -maxoff
)
2034 fprintf (file
, "\tld.w\tl0, (sp, 0)\n");
2035 fprintf (file
, "\tld.w\tl1, (sp, 4)\n");
2036 fprintf (file
, "\taddi\t sp, sp, 8\n");
2038 else if (delta
> maxoff
|| delta
< -maxoff
)
2040 fprintf (file
, "\tld.w\tl0, (sp, 0)\n");
2041 fprintf (file
, "\taddi\tsp, sp, 4\n");
2045 fprintf (file
, "\tjbr\t");
2046 output_addr_const (file
, fnaddr
);
2047 fprintf (file
, "\n");
2049 final_end_function ();
2053 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE.
2054 Conditionally modify five variables fixed_regs, call_used_regs, global_regs,
2055 reg_names, and reg_class_contents, to take into account any dependence of
2056 these register sets on target flags.
2058 CK801 has registers r0-r8 and r13-r15. CK802 and CK803 have registers
2059 r0-r15 (the "low" registers). Other cpus use registers r0-r31 with
2060 -mhigh-registers, otherwise also only r0-r15.
2062 CK801 only has 16-bit instructions, most of which can only reference
2063 r0-r7 (the "mini" registers). So we mark regs outside that range as
2064 fixed. -msmart can be used on other arch variants to force the same
2065 behavior because it results in smaller code size.
2067 TODO: investigate whether it's beneficial to use r8-r13 as a spill
2068 class when TARGET_MINI_REGISTERS instead of making them unusable by
2069 the register allocator. */
2072 csky_conditional_register_usage (void)
2074 /* Only use mini registers in smart mode or 801. */
2075 if (TARGET_MINI_REGISTERS
)
2079 for (i
= (CSKY_LAST_MINI_REGNUM
+ 1); i
< 32; i
++)
2082 call_used_regs
[i
] = 1;
2083 call_really_used_regs
[i
] = 1;
2086 /* For some targets, the high registers are not supported.
2087 CPUs other than ck801/ck802/ck803 use high registers
2088 depending on -mhigh-registers option. */
2089 else if (CSKY_TARGET_ARCH (CK802
)
2090 || CSKY_TARGET_ARCH (CK803
)
2091 || !TARGET_HIGH_REGISTERS
)
2095 for (i
= CSKY_FIRST_HIGH_REGNUM
; i
<= CSKY_LAST_HIGH_REGNUM
; i
++)
2098 call_used_regs
[i
] = 1;
2099 call_really_used_regs
[i
] = 1;
2103 /* On CK801/CK802 we must mark lr as a fixed register because it is
2104 used to implement far jumps.
2105 FIXME: perhaps there should be a command-line option controlling
2106 use of lr for far jumps on ck802 when !TARGET_MINI_REGS, when
2107 you really want lr to be available to the register allocator and
2108 you know there are no far jumps in the code. */
2109 if (CSKY_TARGET_ARCH (CK801
) || CSKY_TARGET_ARCH (CK802
))
2111 fixed_regs
[CSKY_LR_REGNUM
] = 1;
2112 call_used_regs
[CSKY_LR_REGNUM
] = 1;
2113 call_really_used_regs
[CSKY_LR_REGNUM
] = 0;
2116 /* The hi/lo registers are only supported in dsp mode. */
2119 fixed_regs
[CSKY_HI_REGNUM
] = 1;
2120 call_used_regs
[CSKY_HI_REGNUM
] = 1;
2121 call_really_used_regs
[CSKY_HI_REGNUM
] = 1;
2123 fixed_regs
[CSKY_LO_REGNUM
] = 1;
2124 call_used_regs
[CSKY_LO_REGNUM
] = 1;
2125 call_really_used_regs
[CSKY_LO_REGNUM
] = 1;
2128 /* The V_REGS are only supported in hard float mode. */
2129 if (!TARGET_HARD_FLOAT
)
2133 for (regno
= CSKY_FIRST_VFP_REGNUM
;
2134 regno
<= CSKY_LAST_VFP_REGNUM
; regno
++)
2136 fixed_regs
[regno
] = 1;
2137 call_used_regs
[regno
] = 1;
2138 call_really_used_regs
[regno
] = 1;
2142 /* In pic mode, the gb register is not available for register
2143 allocation. Since gb is not clobbered by function
2144 calls, set its call_really_used_regs to 0. */
2147 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2148 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2149 call_really_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 0;
2153 /* Implement TARGET_HARD_REGNO_NREGS. */
2155 csky_hard_regno_nregs (unsigned int regno
, machine_mode mode
)
2157 if (regno
>= CSKY_FIRST_VFP_REGNUM
&& !CSKY_TARGET_ARCH (CK803
))
2160 return CSKY_NUM_REGS (mode
);
2163 /* Implement TARGET_HARD_REGNO_MODE_OK. Return true if REGNO is a
2164 valid register for holding a quantity of type MODE. */
2167 csky_hard_regno_mode_ok (unsigned int regno
, machine_mode mode
)
2169 int nregs
= CSKY_NUM_REGS (mode
);
2171 /* We can't handle more than doubleword sizes for any register. */
2175 /* For general registers, return true if mode is one word size.
2176 When the size is larger than one word size, there should
2177 be two successive hard registers to put the data. */
2178 if (regno
< CSKY_NGPR_REGS
)
2182 else if (TARGET_MINI_REGISTERS
)
2183 return (regno
< CSKY_LAST_MINI_REGNUM
);
2184 else if (CSKY_TARGET_ARCH (CK802
)
2185 || CSKY_TARGET_ARCH (CK803
)
2186 || !TARGET_HIGH_REGISTERS
)
2187 /* Without high register, r15 cannot hold doubleword data. */
2188 return (regno
< (CSKY_SP_REGNUM
- 1));
2190 return (regno
< (CSKY_SP_REGNUM
- 1)
2191 || (regno
>= CSKY_LR_REGNUM
2192 && regno
< CSKY_LAST_HIGH_UNFIXED_REGNUM
));
2194 else if (regno
== CSKY_CC_REGNUM
)
2195 return (mode
== CCmode
);
2196 else if (regno
== CSKY_HI_REGNUM
|| regno
== CSKY_LO_REGNUM
)
2198 /* Don't allocate hi,lo register for float data even
2199 if in dsp mode, because it will cause high cost
2200 to reload data from hi,lo register. */
2201 if (!TARGET_DSP
|| mode
== SFmode
|| mode
== DFmode
)
2203 else if (nregs
== 2)
2204 return (regno
== CSKY_HI_REGNUM
);
2208 else if (CSKY_VREG_P (regno
) && TARGET_HARD_FLOAT
)
2214 /* Implement TARGET_MODES_TIEABLE_P. We can't tie DFmode with other modes
2215 when V_REGs might be in use because those registers mess with the stored
2218 csky_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
2220 return !(TARGET_HARD_FLOAT
2222 && (mode1
== DFmode
|| mode2
== DFmode
));
2225 /* Implement TARGET_CAN_CHANGE_MODE_CLASS.
2226 V_REG registers can't do subreg as all values are reformatted to
2227 internal precision. */
2229 csky_can_change_mode_class (machine_mode from
,
2233 return (GET_MODE_SIZE (from
) == GET_MODE_SIZE (to
)
2234 || !reg_classes_intersect_p (V_REGS
, rclass
));
2237 /* Implement TARGET_CLASS_LIKELY_SPILLED_P.
2238 We need to define this for MINI_REGS when we only use r0 - r7.
2239 Otherwise we can end up using r0-r4 for function arguments, and don't
2240 have enough left over to do doubleword arithmetic. */
2243 csky_class_likely_spilled_p (reg_class_t rclass
)
2245 if ((TARGET_MINI_REGISTERS
&& rclass
== MINI_REGS
)
2246 || rclass
== C_REGS
)
2253 /* Implement TARGET_PREFERRED_RELOAD_CLASS.
2254 Given an rtx X being reloaded into a reg required to be
2255 in class CLASS, return the class of reg to actually use.
2256 In general this is just CLASS. */
2259 csky_preferred_reload_class (rtx x
, reg_class_t rclass
)
2261 if (TARGET_HARD_FLOAT
2262 && CONST_DOUBLE_P (x
)
2263 && (GET_MODE (x
) == DFmode
|| GET_MODE (x
) == SFmode
)
2264 && rclass
== NO_REGS
)
2265 return GENERAL_REGS
;
2270 /* Implement TARGET_CLASS_MAX_NREGS.
2271 Return the maximum number of consecutive registers of class rclass needed
2272 to hold a value of mode mode.
2273 On the csky, this is the size of MODE in words,
2274 except in the FP regs, where a single reg is always enough. */
2276 static unsigned char
2277 csky_class_max_nregs (reg_class_t rclass
, machine_mode mode
)
2279 if (rclass
== V_REGS
)
2282 return CSKY_NUM_REGS (mode
);
2286 /* Implement TARGET_SECONDARY_RELOAD.
2287 If copying a register of RCLASS from/to X requires an intermediate
2288 register, the hook should return the REGISTER_CLASS required for this
2289 intermediate register.
2290 If no intermediate register is required, it should return NO_REGS.
2291 If more than one intermediate register is required, describe the one
2292 that is closest in the copy chain to the reload register. */
2295 csky_secondary_reload (bool in_p ATTRIBUTE_UNUSED
, rtx x
,
2298 secondary_reload_info
*sri ATTRIBUTE_UNUSED
)
2302 /* Extract the real regno from X. */
2303 if (GET_CODE (x
) == SIGN_EXTEND
)
2310 regno
= true_regnum (x
);
2313 while (GET_CODE (x
) == SUBREG
)
2315 off
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2316 GET_MODE (SUBREG_REG (x
)),
2317 SUBREG_BYTE (x
), GET_MODE (x
));
2321 if (GET_CODE (x
) == REG
)
2322 regno
= REGNO (x
) + off
;
2325 else if (GET_CODE (x
) == REG
|| GET_CODE (x
) == SUBREG
)
2326 regno
= true_regnum (x
);
2328 /* We always require a general register when copying anything to
2329 HI/LO_REGNUM, except when copying an SImode value from HI/LO_REGNUM
2330 to a general register, or when copying from register 0. */
2331 if ((rclass
== HILO_REGS
|| rclass
== LO_REGS
|| rclass
== HI_REGS
)
2332 && !CSKY_GENERAL_REGNO_P (regno
))
2333 return GENERAL_REGS
;
2335 if (rclass
== V_REGS
&& !CSKY_GENERAL_REGNO_P (regno
))
2337 /* Reload between vector reg and memory does not need an
2338 intermediate register. */
2339 if (MEM_P (x
) && (mode
== SFmode
|| mode
== DFmode
))
2342 return GENERAL_REGS
;
2348 /* Implement TARGET_SPILL_CLASS.
2349 Try spilling to a larger register class before spilling to memory. */
2352 csky_spill_class (reg_class_t rclass
, machine_mode mode ATTRIBUTE_UNUSED
)
2354 if ((rclass
== MINI_REGS
&& !TARGET_MINI_REGISTERS
)
2355 || (rclass
== LOW_REGS
&& TARGET_HIGH_REGISTERS
))
2356 return GENERAL_REGS
;
2360 /* Convert a static initializer array of feature bits to sbitmap
2363 csky_initialize_isa (sbitmap isa
, const enum csky_isa_feature
*isa_bits
)
2366 while (*isa_bits
!= CSKY_ISA_FEATURE_GET (none
))
2367 bitmap_set_bit (isa
, *(isa_bits
++));
2371 /* Configure a build target TARGET from the user-specified options OPTS and
2374 csky_configure_build_target (struct csky_build_target
*target
,
2375 struct cl_target_option
*opts
,
2376 struct gcc_options
*opts_set
)
2378 const struct csky_processors
*csky_selected_tune
= NULL
;
2379 struct csky_processors
*csky_selected_cpu
= NULL
;
2380 struct csky_processors
*csky_selected_arch
= NULL
;
2381 sbitmap all_sbits
= sbitmap_alloc (CSKY_ISA_FEATURE_GET (max
));
2382 bitmap_clear (all_sbits
);
2384 bitmap_clear (target
->isa
);
2385 target
->core_name
= NULL
;
2386 target
->arch_name
= NULL
;
2388 if (opts_set
->x_csky_arch_option
)
2389 csky_selected_arch
= &all_architectures
[opts
->x_csky_arch_option
];
2391 if (opts_set
->x_csky_cpu_option
)
2393 csky_selected_cpu
= &all_cores
[opts
->x_csky_cpu_option
];
2394 csky_selected_tune
= &all_cores
[opts
->x_csky_cpu_option
];
2397 if (csky_selected_cpu
)
2399 /* TODO: support combination of features
2400 between different cpu & arch, should based on arch. */
2401 if (csky_selected_arch
2402 && (csky_selected_cpu
->base_arch
!= csky_selected_arch
->base_arch
))
2403 warning (0, "cpu %s is not based on arch %s, ignoring the arch",
2404 csky_selected_cpu
->name
, csky_selected_arch
->name
);
2405 if (!csky_selected_arch
)
2406 csky_selected_arch
= &all_architectures
[csky_selected_cpu
->base_arch
];
2407 csky_initialize_isa (all_sbits
, csky_selected_arch
->isa_bits
);
2408 target
->core_name
= csky_selected_cpu
->name
;
2410 else if (csky_selected_arch
)
2412 csky_selected_cpu
= csky_selected_arch
;
2413 target
->arch_name
= csky_selected_arch
->name
;
2415 else /* If the user did not specify a processor, choose one for them. */
2417 csky_selected_cpu
= &all_cores
[TARGET_CPU_DEFAULT
];
2418 csky_selected_arch
= &all_architectures
[csky_selected_cpu
->base_arch
];
2419 csky_initialize_isa (all_sbits
, csky_selected_arch
->isa_bits
);
2420 target
->core_name
= csky_selected_cpu
->name
;
2423 /* The selected cpu may be an architecture, so lookup tuning by core ID. */
2424 if (!csky_selected_tune
)
2425 csky_selected_tune
= &all_cores
[csky_selected_cpu
->core
];
2426 gcc_assert (csky_selected_tune
);
2428 gcc_assert (csky_selected_arch
);
2429 gcc_assert (csky_selected_cpu
);
2430 csky_initialize_isa (target
->isa
, csky_selected_cpu
->isa_bits
);
2431 bitmap_ior (target
->isa
, target
->isa
, all_sbits
);
2433 /* Finish initializing the target structure. */
2434 target
->arch_pp_name
= csky_selected_cpu
->arch
;
2435 target
->base_arch
= csky_selected_cpu
->base_arch
;
2436 target
->arch_core
= csky_selected_cpu
->core
;
2438 sbitmap_free (all_sbits
);
2442 /* Implement TARGET_OPTION_OVERRIDE. */
2445 csky_option_override (void)
2447 csky_active_target
.isa
= sbitmap_alloc (CSKY_ISA_FEATURE_GET (max
));
2449 /* Create the default target_options structure. We need this early
2450 to configure the overall build target. */
2451 target_option_default_node
= target_option_current_node
2452 = build_target_option_node (&global_options
);
2454 csky_configure_build_target (&csky_active_target
,
2455 TREE_TARGET_OPTION (target_option_default_node
),
2456 &global_options_set
);
2458 #ifdef SUBTARGET_OVERRIDE_OPTIONS
2459 SUBTARGET_OVERRIDE_OPTIONS
;
2462 csky_base_arch
= csky_active_target
.base_arch
;
2464 if (flag_pic
&& !(CSKY_TARGET_ARCH (CK810
) || CSKY_TARGET_ARCH (CK807
)))
2467 warning (0, "%qs is not supported by arch %s",
2468 "-fPIC", csky_active_target
.arch_pp_name
);
2471 /* Check floating-point options for consistency. */
2472 if (TARGET_HARD_FLOAT
)
2474 const struct csky_fpu_desc
*csky_selected_fpu
= NULL
;
2476 if (csky_fpu_index
== TARGET_FPU_auto
)
2478 const char *target_fpu_name
;
2482 #ifdef CSKY_FPUTYPE_DEFAULT
2483 target_fpu_name
= CSKY_FPUTYPE_DEFAULT
;
2485 target_fpu_name
= "fpv2";
2488 if (csky_active_target
.core_name
!= NULL
2489 && !strchr (csky_active_target
.core_name
, 'f'))
2490 target_fpu_name
= "auto";
2491 else if (CSKY_TARGET_ARCH (CK803
) || !TARGET_DOUBLE_FLOAT
)
2492 target_fpu_name
= "fpv2_sf";
2493 else if (TARGET_DOUBLE_FLOAT
&& TARGET_FDIVDU
)
2494 target_fpu_name
= "fpv2_divd";
2496 ok
= opt_enum_arg_to_value (OPT_mfpu_
, target_fpu_name
, &fpu_index
,
2499 csky_fpu_index
= (enum csky_fpu_type
) fpu_index
;
2502 if (CSKY_TARGET_ARCH (CK801
) || CSKY_TARGET_ARCH (CK802
))
2503 error ("%qs is not supported by arch %s",
2504 "-mhard-float", csky_active_target
.arch_pp_name
);
2505 else if (csky_fpu_index
== TARGET_FPU_auto
)
2506 error ("%<-mhard-float%> is not supported by the selected CPU");
2509 csky_selected_fpu
= &all_fpus
[csky_fpu_index
];
2510 sbitmap fpu_bits
= sbitmap_alloc (CSKY_ISA_FEATURE_GET (max
));
2511 csky_initialize_isa (fpu_bits
, csky_selected_fpu
->isa_bits
);
2513 bitmap_ior (csky_active_target
.isa
, csky_active_target
.isa
,
2516 sbitmap_free (fpu_bits
);
2521 if (TARGET_DOUBLE_FLOAT
> 0)
2522 warning (0, "%<-mdouble-float%> ignored without %<-mhard-float%>");
2523 TARGET_DOUBLE_FLOAT
= 0;
2524 if (TARGET_FDIVDU
> 0)
2525 warning (0, "%<-mfdivdu%> ignored without %<-mhard-float%>");
2529 /* Extended LRW instructions are enabled by default on CK801, disabled
2531 if (TARGET_ELRW
== -1)
2532 TARGET_ELRW
= CSKY_TARGET_ARCH (CK801
);
2534 /* DSP is enabled either by the processor feature or -mdsp
2535 command-line option. There is no -mno-dsp option as the assembler
2536 doesn't take one. */
2538 TARGET_DSP
= CSKY_ISA_FEATURE (dsp
);
2540 /* There's both -mdiv and -mno-div. Take default from processor if
2541 neither is specified explicitly. */
2542 if (TARGET_DIV
== -1)
2543 TARGET_DIV
= CSKY_ISA_FEATURE (div
);
2545 /* TARGET_CONSTANT_POOL is mandatory for CK801 and CK802 and optional
2547 The reason why the compiler has to generate constant pools for CK801/2
2548 instead of deferring to the assembler is that these cores don't have a
2549 long branch instruction other than jbsr, which clobbers lr. So for
2550 the compiler to correctly save/restore lr it has to know whether there
2551 are long branches, which depends on having accurate branch length
2552 counts, which in turn depends on having control over where constant
2553 pools are placed. */
2554 if ((CSKY_TARGET_ARCH (CK801
) || CSKY_TARGET_ARCH (CK802
))
2555 && !TARGET_CONSTANT_POOL
)
2556 error ("%qs is not supported by arch %s",
2557 "-mno-constpool", csky_active_target
.arch_pp_name
);
2558 else if (TARGET_CONSTANT_POOL
== -1)
2559 TARGET_CONSTANT_POOL
= (CSKY_TARGET_ARCH (CK801
)
2560 || CSKY_TARGET_ARCH (CK802
));
2562 /* TARGET_MINI_REGISTERS is mandatory for CK801, the default for CK802,
2563 and optional for other CPUs. TARGET_HIGH_REGISTERS is incompatible
2564 with TARGET_MINI_REGISTERS, is not supported by CK801/802/803,
2565 and is the default for other processors.
2566 See csky_conditional_register_usage. */
2567 if (TARGET_MINI_REGISTERS
> 0 && TARGET_HIGH_REGISTERS
> 0)
2568 error ("%<-msmart%> is incompatible with %<-mhigh-registers%>");
2569 else if (CSKY_TARGET_ARCH (CK801
)
2570 || CSKY_TARGET_ARCH (CK802
)
2571 || CSKY_TARGET_ARCH (CK803
))
2573 if (CSKY_TARGET_ARCH (CK801
)
2574 || (CSKY_TARGET_ARCH (CK802
) && TARGET_MINI_REGISTERS
== -1))
2575 TARGET_MINI_REGISTERS
= 1;
2576 else if (TARGET_MINI_REGISTERS
== -1)
2577 TARGET_MINI_REGISTERS
= 0;
2578 if (TARGET_HIGH_REGISTERS
> 0)
2579 warning (0, "%qs is not supported by arch %s",
2580 "-mhigh-registers", csky_active_target
.arch_pp_name
);
2581 TARGET_HIGH_REGISTERS
= 0;
2585 if (TARGET_MINI_REGISTERS
== -1)
2586 TARGET_MINI_REGISTERS
= 0;
2587 if (TARGET_HIGH_REGISTERS
== -1)
2588 TARGET_HIGH_REGISTERS
= !TARGET_MINI_REGISTERS
;
2591 /* -mmultiple-stld is the default for everything but CK801, which
2592 doesn't support it. */
2593 if (CSKY_TARGET_ARCH (CK801
))
2595 if (TARGET_MULTIPLE_STLD
> 0)
2596 warning (0, "%qs is not supported by arch %s",
2597 "-mmultiple-stld", csky_active_target
.arch_pp_name
);
2598 TARGET_MULTIPLE_STLD
= 0;
2601 /* Initialize boolean versions of the architectural flags, for use
2605 #define CSKY_ISA(IDENT, DESC) \
2607 csky_arch_isa_features[CSKY_ISA_FEATURE_GET (IDENT)] = \
2608 bitmap_bit_p (csky_active_target.isa, CSKY_ISA_FEATURE_GET (IDENT)); \
2610 #include "csky_isa.def"
2615 /* Resynchronize the saved target options. */
2616 cl_target_option_save (TREE_TARGET_OPTION (target_option_default_node
),
2619 #ifdef ENABLE_TPF_DEBUG
2620 /* Don't emit DWARF4 unless specifically selected. The TPF
2621 debuggers do not yet support DWARF 3/4. */
2622 if (!global_options_set
.x_dwarf_strict
)
2624 if (!global_options_set
.x_dwarf_version
)
2628 /* Don't run the scheduler before reload by default,
2629 since it tends to increase register pressure. */
2630 if (!global_options_set
.x_flag_schedule_insns
)
2631 flag_schedule_insns
= 0;
2633 csky_add_gc_roots ();
2637 /* Return TRUE if X contains any references to TLS symbols. */
2640 csky_tls_referenced_p (rtx x
)
2645 subrtx_iterator::array_type array
;
2646 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
2648 const_rtx x
= *iter
;
2649 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
) != 0)
2652 /* Don't recurse into UNSPEC_TLS looking for TLS symbols; these are
2653 TLS offsets, not real symbol references. */
2654 if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLS
)
2655 iter
.skip_subrtxes ();
2661 /* Implement TARGET_CANNOT_FORCE_CONST_MEM.
2662 Determine if it's legal to put X into the constant pool. This
2663 is not possible for the address of thread-local symbols, which
2664 is checked above. */
2667 csky_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
2670 return csky_tls_referenced_p (x
);
2674 /* Implement TARGET_LEGITIMATE_CONSTANT_P. Returns nonzero if the
2675 constant value X is a legitimate general operand.
2676 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2679 csky_legitimate_constant_p (machine_mode mode
, rtx x
)
2681 return (!csky_cannot_force_const_mem (mode
, x
)
2686 /* Return true if X is valid as an CSKY addressing register. */
2689 is_csky_address_register_rtx_p (rtx x
, int strict_p
)
2701 return (CSKY_GENERAL_REGNO_P (regno
)
2702 || CSKY_GENERAL_REGNO_P (reg_renumber
[regno
]));
2704 return CSKY_GENERAL_REGNO_P (regno
) || regno
>= FIRST_PSEUDO_REGISTER
;
2708 /* Return TRUE if X is a thread-local symbol. */
2711 csky_tls_symbol_p (rtx x
)
2716 if (GET_CODE (x
) != SYMBOL_REF
)
2719 return SYMBOL_REF_TLS_MODEL (x
) != 0;
2723 /* Handle lazy initialization of __tls_get_addr libfunc. */
2724 static GTY(()) rtx tls_get_addr_libfunc
;
2727 get_tls_get_addr (void)
2729 if (!tls_get_addr_libfunc
)
2730 tls_get_addr_libfunc
= init_one_libfunc ("__tls_get_addr");
2731 return tls_get_addr_libfunc
;
2735 /* Emit a call to __tls_get_addr. */
2738 csky_call_tls_get_addr (rtx x
, rtx reg
, rtx
*valuep
, int reloc
)
2740 rtx label
, labelno
, unspec
, tmp
;
2745 labelno
= GEN_INT (tls_labelno
++);
2746 label
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, labelno
), UNSPEC_TLS_LABEL
);
2747 unspec
= gen_rtx_UNSPEC (Pmode
,
2748 gen_rtvec (3, x
, GEN_INT (reloc
), label
),
2750 tmp
= gen_reg_rtx (SImode
);
2751 emit_move_insn (reg
, unspec
);
2752 emit_move_insn (tmp
, label
);
2753 emit_insn (gen_addsi3 (reg
, reg
, tmp
));
2754 *valuep
= emit_library_call_value (get_tls_get_addr (),
2755 NULL_RTX
, LCT_PURE
, /* LCT_CONST? */
2757 insns
= get_insns ();
2762 /* Helper function for csky_legitimize_address, to handle the TLS cases.
2763 REG is a scratch register and may be null. */
2766 csky_legitimize_tls_address (rtx x
, rtx reg
)
2768 rtx dest
, tp
, label
, labelno
, unspec
, ret
, eqv
, addend
, tmp
;
2770 unsigned int model
= SYMBOL_REF_TLS_MODEL (x
);
2773 reg
= gen_reg_rtx (SImode
);
2777 case TLS_MODEL_GLOBAL_DYNAMIC
:
2778 insns
= csky_call_tls_get_addr (x
, reg
, &ret
, TLS_GD32
);
2779 dest
= gen_reg_rtx (Pmode
);
2780 emit_libcall_block (insns
, dest
, ret
, x
);
2783 case TLS_MODEL_LOCAL_DYNAMIC
:
2784 insns
= csky_call_tls_get_addr (x
, reg
, &ret
, TLS_LDM32
);
2786 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
2787 share the LDM result with other LD model accesses. */
2788 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const1_rtx
), UNSPEC_TLS
);
2789 dest
= gen_reg_rtx (Pmode
);
2790 emit_libcall_block (insns
, dest
, ret
, eqv
);
2792 /* Load the addend. */
2793 addend
= gen_rtx_UNSPEC (Pmode
,
2794 gen_rtvec (2, x
, GEN_INT (TLS_LDO32
)),
2796 addend
= force_reg (SImode
, addend
);
2797 return gen_rtx_PLUS (Pmode
, dest
, addend
);
2799 case TLS_MODEL_INITIAL_EXEC
:
2800 labelno
= GEN_INT (tls_labelno
++);
2801 label
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, labelno
), UNSPEC_TLS_LABEL
);
2802 unspec
= gen_rtx_UNSPEC (Pmode
,
2803 gen_rtvec (3, x
, GEN_INT (TLS_IE32
), label
),
2805 tmp
= gen_reg_rtx (SImode
);
2806 emit_move_insn (reg
, unspec
);
2807 emit_move_insn (tmp
, label
);
2808 emit_insn (gen_addsi3 (reg
, reg
, tmp
));
2809 emit_move_insn (reg
, gen_const_mem (Pmode
, reg
));
2810 tp
= gen_rtx_REG (SImode
, CSKY_TLS_REGNUM
);
2811 return gen_rtx_PLUS (Pmode
, tp
, reg
);
2813 case TLS_MODEL_LOCAL_EXEC
:
2814 unspec
= gen_rtx_UNSPEC (Pmode
,
2815 gen_rtvec (2, x
, GEN_INT (TLS_LE32
)),
2817 emit_move_insn (reg
, unspec
);
2818 tp
= gen_rtx_REG (SImode
, CSKY_TLS_REGNUM
);
2819 return gen_rtx_PLUS (Pmode
, tp
, reg
);
2827 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2830 csky_legitimize_address (rtx x
, rtx orig_x ATTRIBUTE_UNUSED
,
2833 if (csky_tls_symbol_p (x
))
2834 return csky_legitimize_tls_address (x
, NULL_RTX
);
2836 if (GET_CODE (x
) == PLUS
)
2838 rtx xop0
= XEXP (x
, 0);
2839 rtx xop1
= XEXP (x
, 1);
2841 if (is_csky_address_register_rtx_p (xop0
, 0)
2842 && CONST_INT_P (xop1
))
2844 HOST_WIDE_INT offset
= INTVAL (xop1
);
2846 /* Try to replace ld32 rx,(ry, offset), to addi16 rz, oimm8
2847 and ld16 rx,(rz, new_ld_offset) to avoid emitting a
2848 32-bit ld, but this addi has a range limitation. */
2850 && offset
> CSKY_LD16_MAX_OFFSET (mode
)
2851 && offset
<= (CSKY_ADDI16_MAX_IMM
2852 + CSKY_LD16_MAX_OFFSET (mode
)))
2854 HOST_WIDE_INT new_ld_offset
2855 = offset
& CSKY_LD16_OFFSET_MASK (mode
);
2857 xop0
= force_operand (plus_constant (Pmode
, xop0
,
2858 offset
- new_ld_offset
),
2860 x
= plus_constant (Pmode
, xop0
, new_ld_offset
);
2862 else if (offset
< 0 && offset
>= (-CSKY_SUBI16_MAX_IMM
))
2863 x
= force_operand (x
, NULL_RTX
);
2864 else if (offset
> CSKY_LD16_MAX_OFFSET (mode
)
2867 /* For the remaining cases, force the constant into a
2869 xop1
= force_reg (SImode
, xop1
);
2870 x
= gen_rtx_PLUS (SImode
, xop0
, xop1
);
2874 /* If the index is store in register, force the
2875 base to register. */
2876 if (is_csky_address_register_rtx_p (xop1
, 0)
2877 && !is_csky_address_register_rtx_p (xop0
, 0))
2879 xop0
= force_operand (xop0
, NULL_RTX
);
2880 x
= gen_rtx_PLUS (SImode
, xop0
, xop1
);
2883 /* Make sure to take full advantage of the pre-indexed addressing mode
2884 with absolute addresses which often allows for the base register to
2885 be factorized for multiple adjacent memory references, and it might
2886 even allows for the mini pool to be avoided entirely. */
2887 else if (CONST_INT_P (x
) && optimize
> 0)
2889 HOST_WIDE_INT mask
, base
, index
;
2892 mask
= CSKY_LD16_OFFSET_MASK (mode
);
2893 base
= INTVAL (x
) & ~mask
;
2894 index
= INTVAL (x
) & mask
;
2895 base_reg
= force_reg (SImode
, GEN_INT (base
));
2896 x
= plus_constant (Pmode
, base_reg
, index
);
2903 /* Return nonzero if INDEX is valid for an address index operand.
2904 ck801 use 16 bits ld
2905 ck802 use 16 and 32 bits ld
2906 others use ld and ldr. */
2909 ck801_legitimate_index_p (machine_mode mode
, rtx index
,
2910 int strict_p ATTRIBUTE_UNUSED
)
2912 enum rtx_code code
= GET_CODE (index
);
2914 /* When the mode size is larger than 4, we may use two ld instruction
2915 to get data, the index and (index+1) should be valid. */
2916 if (GET_MODE_SIZE (mode
) >= 8)
2917 return (code
== CONST_INT
2918 && INTVAL (index
) < CSKY_LD16_MAX_OFFSET (SImode
)
2919 && INTVAL (index
) >= 0 && (INTVAL (index
) & 3) == 0);
2921 if (code
== CONST_INT
&& GET_MODE_SIZE (mode
) > 0
2922 && INTVAL (index
) <= CSKY_LD16_MAX_OFFSET (mode
)
2923 && INTVAL (index
) >= 0)
2924 return ((INTVAL (index
) % GET_MODE_SIZE (mode
)) == 0);
2931 ck802_legitimate_index_p (machine_mode mode
, rtx index
,
2932 int strict_p ATTRIBUTE_UNUSED
)
2934 enum rtx_code code
= GET_CODE (index
);
2936 /* When the mode size is larger than 4, we may use two ld instruction
2937 to get data, the index and (index+1) should be valid. */
2938 if (GET_MODE_SIZE (mode
) >= 8)
2939 return (code
== CONST_INT
2940 && INTVAL (index
) < CSKY_LD32_MAX_OFFSET (SImode
)
2941 && INTVAL (index
) >= 0 && (INTVAL (index
) & 3) == 0);
2943 if (code
== CONST_INT
&& GET_MODE_SIZE (mode
) > 0
2944 && INTVAL (index
) <= CSKY_LD32_MAX_OFFSET (mode
)
2945 && INTVAL (index
) >= 0)
2946 return ((INTVAL (index
) % GET_MODE_SIZE (mode
)) == 0);
2952 /* The instruction ldr rz, (rx, ry << i), i can be 0,1,2,3.
2953 Check that SHIFT is valid, that the code is MULT, and that
2954 the shift is a power of 2. */
2957 is_ldr_shift_p (HOST_WIDE_INT shift
, enum rtx_code code
)
2960 return (shift
>= 0 && shift
<= 3);
2961 else if (code
== MULT
)
2972 ck810_legitimate_index_p (machine_mode mode
, rtx index
, int strict_p
)
2974 enum rtx_code code
= GET_CODE (index
);
2976 if (TARGET_HARD_FLOAT
2977 && (mode
== SFmode
|| mode
== DFmode
))
2978 return (code
== CONST_INT
&& INTVAL (index
) < 1024
2979 && INTVAL (index
) >= 0
2980 && (INTVAL (index
) & 3) == 0);
2982 if (code
== CONST_INT
)
2984 /* When the mode size is larger than 4, we may use two ld instruction
2985 to get data, the index and (index+1) should be valid. */
2986 if (GET_MODE_SIZE (mode
) >= 8)
2987 return (INTVAL (index
) < CSKY_LD32_MAX_OFFSET (SImode
)
2988 && INTVAL (index
) >= 0 && (INTVAL (index
) & 3) == 0);
2990 if (GET_MODE_SIZE (mode
) > 0
2991 && INTVAL (index
) <= CSKY_LD32_MAX_OFFSET (mode
)
2992 && INTVAL (index
) >= 0)
2993 return ((INTVAL (index
) % GET_MODE_SIZE (mode
)) == 0);
2995 /* Allow ld.w rx, (gb, sym@got) when -fpic specially. */
2996 else if (code
== UNSPEC
)
2997 return (flag_pic
== 1
2998 && (XINT (index
, 1) == UNSPEC_PIC_SYMBOL_PLT
2999 || XINT (index
, 1) == UNSPEC_PIC_SYMBOL_GOT
));
3000 /* The follow index is for ldr instruction, the ldr cannot
3001 load dword data, so the mode size should not be larger than
3003 else if (GET_MODE_SIZE (mode
) <= 4)
3005 if (is_csky_address_register_rtx_p (index
, strict_p
))
3007 else if (code
== MULT
|| code
== ASHIFT
)
3009 rtx xiop0
= XEXP (index
, 0);
3010 rtx xiop1
= XEXP (index
, 1);
3012 /* FIXME can the xiop1 be the reg and xiop0 be the int when mult? */
3013 return (is_csky_address_register_rtx_p (xiop0
, strict_p
)
3014 && CONST_INT_P (xiop1
)
3015 && is_ldr_shift_p (INTVAL (xiop1
), code
));
3024 csky_legitimate_index_p (machine_mode mode
, rtx index
, int strict_p
)
3026 if (CSKY_TARGET_ARCH (CK801
))
3027 return ck801_legitimate_index_p (mode
, index
, strict_p
);
3028 else if (CSKY_TARGET_ARCH (CK802
))
3029 return ck802_legitimate_index_p (mode
, index
, strict_p
);
3031 return ck810_legitimate_index_p (mode
, index
, strict_p
);
3035 /* Implement TARGET_LEGITIMATE_ADDRESS_P.
3036 Recognizes RTL expressions that are valid memory addresses for an
3037 instruction. The MODE argument is the machine mode for the MEM
3038 expression that wants to use this address.
3040 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
3041 convert common non-canonical forms to canonical form so that they will
3045 csky_legitimate_address_p (machine_mode mode
, rtx addr
, bool strict_p
)
3047 enum rtx_code code
= GET_CODE (addr
);
3049 /* Match the RTX form emitted for constant pool references.
3050 After reload constants split into minipools will have addresses
3051 from a LABEL_REF. */
3052 if (reload_completed
3053 && ((code
== LABEL_REF
)
3055 && GET_CODE (XEXP (addr
, 0)) == PLUS
3056 && GET_CODE (XEXP (XEXP (addr
, 0), 0)) == LABEL_REF
3057 && CONST_INT_P (XEXP (XEXP (addr
, 0), 1)))))
3060 if (is_csky_address_register_rtx_p (addr
, strict_p
))
3062 /* It is a pc-relative load, may be generated for constpool. */
3063 else if (GET_CODE (addr
) == LABEL_REF
)
3068 rtx xop0
= XEXP (addr
, 0);
3069 rtx xop1
= XEXP (addr
, 1);
3071 return ((is_csky_address_register_rtx_p (xop0
, strict_p
)
3072 && csky_legitimate_index_p (mode
, xop1
, strict_p
))
3073 || (is_csky_address_register_rtx_p (xop1
, strict_p
)
3074 && csky_legitimate_index_p (mode
, xop0
, strict_p
)));
3081 /* Functions to save and restore machine-specific function data. */
3083 static struct machine_function
*
3084 csky_init_machine_status (void)
3086 struct machine_function
*machine
;
3088 machine
= ggc_cleared_alloc
<machine_function
> ();
3090 #if CSKY_FT_UNKNOWN != 0
3091 machine
->func_type
= CSKY_FT_UNKNOWN
;
3097 /* Implement INIT_EXPANDERS. */
3100 csky_init_expanders (void)
3102 /* Arrange to initialize and mark the machine per-function status. */
3103 init_machine_status
= csky_init_machine_status
;
3107 /* Implement TARGET_CANNOT_COPY_INSN_P.
3108 We must not copy any rtx that uses a pc-relative address. */
3111 csky_cannot_copy_insn_p (rtx_insn
*insn
)
3113 subrtx_iterator::array_type array
;
3114 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
3116 const_rtx x
= *iter
;
3117 if (GET_CODE (x
) == UNSPEC
3118 && (XINT (x
, 1) == UNSPEC_TLS_LABEL
3119 || XINT (x
, 1) == UNSPEC_PIC_SYMBOL_GOTPC_GRS
))
3126 /* Extract the parts of an RTL expression that is a valid memory address
3127 for an instruction. Return FALSE if it is a invalid memory address. */
3131 rtx base
, index
, symbol
, label
, disp
;
3132 HOST_WIDE_INT scale
;
3136 decompose_csky_address (rtx addr
, struct csky_address
*out
)
3138 rtx base
= NULL_RTX
, index
= NULL_RTX
, disp
= NULL_RTX
;
3139 HOST_WIDE_INT scale
= 1;
3140 rtx scale_rtx
= NULL_RTX
;
3143 out
->base
= out
->index
= out
->symbol
= out
->label
= out
->disp
= NULL_RTX
;
3152 if (GET_CODE (addr
) == LABEL_REF
)
3158 if (GET_CODE (addr
) == CONST
)
3159 addr
= XEXP (addr
, 0);
3161 if (GET_CODE (addr
) == PLUS
)
3165 addends
[0] = XEXP (addr
, 0);
3166 addends
[1] = XEXP (addr
, 1);
3168 if (GET_CODE (addends
[0]) == LABEL_REF
&& CONST_INT_P (addends
[1]))
3170 out
->label
= addends
[0];
3171 out
->disp
= addends
[1];
3175 if (!REG_P (addends
[0]))
3176 std::swap (addends
[0], addends
[1]);
3178 for (i
= 0; i
< 2; ++i
)
3181 switch (GET_CODE (op
))
3200 index
= XEXP (op
, 0);
3201 scale_rtx
= XEXP (op
, 1);
3202 if (!CONST_INT_P (index
) && !CONST_INT_P (scale_rtx
))
3204 else if (CONST_INT_P (index
))
3205 std::swap (index
, scale_rtx
);
3206 scale
= INTVAL (scale_rtx
);
3211 index
= XEXP (op
, 0);
3212 scale_rtx
= XEXP (op
, 1);
3213 if (!CONST_INT_P (scale_rtx
))
3215 scale
= scale
<< INTVAL (scale_rtx
);
3234 /* Helper function for the csky_simple_mem_operand predicate. Returns
3235 true if OP is an address of the form reg + displacement. */
3238 csky_simple_addr_operand_p (rtx op
)
3240 struct csky_address addr
;
3242 if (!decompose_csky_address (op
, &addr
))
3245 /* FIXME The PIC related code.
3246 Check if load the symbol address from got table. */
3247 if (addr
.disp
&& GET_CODE (addr
.disp
) == UNSPEC
)
3249 if (!addr
.index
&& !addr
.symbol
)
3255 /* Print the UNSPEC operand in X to the STREAM. */
3258 csky_output_pic_addr_const (FILE *stream
, rtx x
, int code
)
3261 if (GET_CODE (x
) != UNSPEC
)
3264 if (UNSPEC_TLS
== XINT (x
, 1))
3266 /* FIXME It is not reached */
3270 csky_print_operand (stream
, XVECEXP (x
, 0, 0), code
);
3272 switch (XINT (x
, 1))
3274 case UNSPEC_PIC_SYMBOL_GOTOFF
:
3275 fputs ("@GOTOFF", stream
);
3277 case UNSPEC_PIC_SYMBOL_PLT
:
3278 fputs ("@PLT", stream
);
3280 case UNSPEC_PIC_SYMBOL_GOT
:
3281 fputs ("@GOT", stream
);
3283 case UNSPEC_PIC_SYMBOL_GOTPC
:
3284 fputs ("@GOTPC", stream
);
3286 case UNSPEC_PIC_SYMBOL_BSR
:
3294 /* Output the constpool label according to the rtx expression X. */
3297 csky_output_constpool_label (FILE *stream
, rtx x
)
3301 gcc_assert (GET_CODE (x
) == LABEL_REF
);
3304 if (GET_CODE (x
) == UNSPEC_VOLATILE
&& XINT (x
, 1) == VUNSPEC_POOL_LABEL
)
3306 ASM_GENERATE_INTERNAL_LABEL (buf
, CSKY_CONSTPOOL_LABEL_PREFIX
,
3307 INTVAL (XVECEXP (x
, 0, 0)));
3308 assemble_name (stream
, buf
);
3313 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
3316 csky_print_operand_address (FILE *stream
,
3317 machine_mode mode ATTRIBUTE_UNUSED
,
3321 struct csky_address addr
;
3323 decompose_csky_address (x
, &addr
);
3325 if (addr
.label
&& addr
.disp
&& GET_CODE (addr
.disp
) == CONST_INT
)
3327 fprintf (stream
, "[");
3328 csky_output_constpool_label (stream
, addr
.label
);
3329 fprintf (stream
, "+%d]", (int) INTVAL (addr
.disp
));
3331 else if (addr
.label
)
3333 fprintf (stream
, "[");
3334 csky_output_constpool_label (stream
, addr
.label
);
3335 fprintf (stream
, "]");
3337 else if (addr
.symbol
&& addr
.disp
&& GET_CODE (addr
.disp
) == CONST_INT
)
3339 fprintf (stream
, "[");
3340 output_addr_const (stream
, addr
.symbol
);
3341 fprintf (stream
, "+%d]", (int) INTVAL (addr
.disp
));
3343 else if (addr
.symbol
)
3345 fprintf (stream
, "[");
3346 output_addr_const (stream
, addr
.symbol
);
3347 fprintf (stream
, "]");
3349 else if (addr
.disp
&& GET_CODE (addr
.disp
) == CONST_INT
)
3350 fprintf (stream
, "(%s, %d)",
3351 reg_names
[REGNO (addr
.base
)], (int) INTVAL (addr
.disp
));
3352 else if (addr
.disp
&& GET_CODE (addr
.disp
) == UNSPEC
)
3354 if (REGNO (addr
.base
) != CSKY_GB_REGNUM
)
3355 fprintf (stream
, "(%s, ", reg_names
[REGNO (addr
.base
)]);
3357 fprintf (stream
, "[");
3358 csky_output_pic_addr_const (stream
, addr
.disp
, 0);
3359 fprintf (stream
, "%s", (REGNO (addr
.base
) != CSKY_GB_REGNUM
)
3362 else if (addr
.index
)
3363 fprintf (stream
, "(%s, %s << %d)",
3364 reg_names
[REGNO (addr
.base
)], reg_names
[REGNO (addr
.index
)],
3365 exact_log2 ((int) (addr
.scale
)));
3367 fprintf (stream
, "(%s, 0)", reg_names
[REGNO (addr
.base
)]);
3371 /* Implement TARGET_PRINT_OPERAND.
3372 Print operand X (an rtx) in assembler syntax to file STREAM
3373 according to modifier CODE.
3375 'N' print the log2(X+1), mainly used for bmaski
3376 'P' print the log2(X)
3377 'Q' print the log2(~X)
3378 'O' print a decimal number
3379 'M' print a decimal number as its negative
3380 'R' print the next register or memory location along, i.e. the lsw in
3382 'H' print the high 16 bits of a constant. */
3385 csky_print_operand (FILE *stream
, rtx x
, int code
)
3390 if ((INTVAL (x
) & 0xffffffff) == 0xffffffff)
3391 fprintf (stream
, "0");
3393 fprintf (stream
, "%d",
3394 (int) exact_log2 ((INTVAL (x
) & 0xffffffff) + 1) % 32);
3397 fprintf (stream
, "%d",
3398 (int) exact_log2 (INTVAL (x
) & 0xffffffff));
3401 fprintf (stream
, "%d",
3402 (int) exact_log2 (~INTVAL (x
) & 0xffffffff));
3405 fprintf (stream
, "%d", (int) INTVAL (x
));
3408 fprintf (stream
, "%d", (int) (-INTVAL (x
)));
3411 /* Next location along in memory or register. */
3412 switch (GET_CODE (x
))
3415 fputs (reg_names
[REGNO (x
) + 1], stream
);
3418 csky_print_operand_address
3419 (stream
, GET_MODE (x
), XEXP (adjust_address (x
, SImode
, 4), 0));
3426 fprintf (stream
, "%ld", (long)((INTVAL (x
) & 0xFFFF0000) >> 16));
3429 switch (GET_CODE (x
))
3432 fputs (reg_names
[REGNO (x
)], stream
);
3435 output_address (GET_MODE (x
), XEXP (x
, 0));
3438 csky_output_pic_addr_const (stream
, x
, code
);
3441 output_addr_const (stream
, x
);
3450 /* Implement TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS. */
3453 csky_allocate_stack_slots_for_args (void)
3455 /* Naked functions should not allocate stack slots for arguments. */
3456 return !CSKY_FUNCTION_IS_NAKED (get_csky_current_func_type ());
3460 /* Can we generate a constant with a single instruction, without using
3464 const_ok_for_cskyv2 (HOST_WIDE_INT value
)
3466 /* Try exact power of two. It can be generated by bgeni. */
3467 if (CSKY_CONST_OK_FOR_Ub (value
))
3470 /* Try exact power of two - 1. It can be generated by bmaski. */
3471 if (CSKY_CONST_OK_FOR_Uc (value
) && value
!= -1)
3474 /* Try if it can be generated by movi. */
3475 if (CSKY_CONST_OK_FOR_I (value
))
3478 /* The constant can be generated by movih.
3479 Notice that movih is a 32-bit instruction. */
3480 if (CSKY_CONST_OK_FOR_MOVIH (value
))
3487 /* Tricks for synthesizing constants from values that can be directly
3488 manipulated by machine instructions. */
3490 enum csky_inline_const_type
3492 IC_UNINLINABLE
= 0, /* Not inlineable */
3493 IC_SINGLE
, /* Single instruction */
3494 IC_APPEND_NOT
, /* Single instruction followed by a not */
3495 IC_APPEND_ADDI
, /* Single insn followed by an addi */
3496 IC_APPEND_SUBI
, /* Single insn followed by a subi */
3497 IC_BGENI_ADDI
, /* Single insn(bgeni) followed by an addi */
3498 IC_BGENI_SUBI
, /* Single insn(bgeni) followed by a subi */
3499 IC_APPEND_BSETI
, /* Single insn followed by bseti */
3500 IC_APPEND_MOVI
, /* Single insn followed by movi */
3501 IC_APPEND_BCLRI
, /* Single insn followed by bclri */
3502 IC_APPEND_ROTLI
, /* Single insn followed by rotli */
3503 IC_APPEND_LSLI
, /* Single insn followed by lsli */
3504 IC_APPEND_IXH
, /* Single insn followed by ixh */
3505 IC_APPEND_IXW
/* Single insn followed by ixw */
3509 /* Try tricks to load a constant inline and return the trick number if
3510 success, or IC_UNINLINABLE. */
3512 static enum csky_inline_const_type
3513 try_csky_constant_tricks (HOST_WIDE_INT value
, HOST_WIDE_INT
*x
,
3516 HOST_WIDE_INT i
, value_invert
;
3517 unsigned HOST_WIDE_INT bit
, shf
, rot
, lobits
, hibits
;
3519 value
&= 0xffffffff;
3520 value_invert
= ~value
& 0xffffffff;
3522 if (const_ok_for_cskyv2 (value
))
3528 /* Since movih is 32 bits, do not use it here, better code may
3529 be generated later. */
3530 if (const_ok_for_cskyv2 (value_invert
)
3531 && !CSKY_CONST_OK_FOR_MOVIH (value_invert
))
3534 return IC_APPEND_NOT
;
3537 /* One immediate generate instruction, and one 16-bit subi or addi. */
3538 for (i
= 1; i
<= 32; i
++)
3540 if (const_ok_for_cskyv2 (value
- i
)
3541 && !CSKY_CONST_OK_FOR_MOVIH (value
- i
))
3545 return IC_APPEND_ADDI
;
3548 if (const_ok_for_cskyv2 (value
+ i
)
3549 && !CSKY_CONST_OK_FOR_MOVIH (value
- i
))
3553 return IC_APPEND_SUBI
;
3557 /* Generate bgeni + addi. */
3558 if (CSKY_CONST_OK_FOR_Ub (value
& 0xfffff000))
3560 *x
= (value
& 0xfffff000);
3561 *y
= (value
& 0xfff);
3562 return IC_BGENI_ADDI
;
3565 /* Generate bgeni + subi. */
3566 lobits
= value
& 0xfff;
3567 hibits
= (unsigned HOST_WIDE_INT
)(value
& 0xfffff000) + (1 << 12);
3568 if (exact_log2 (hibits
) >= 1
3569 && exact_log2 (hibits
) <= 30
3573 *y
= (0x1000 - lobits
);
3574 return IC_BGENI_SUBI
;
3577 /* One immediate generate instruction, and one bseti or bclri. */
3578 bit
= 0x80000000ULL
;
3579 for (i
= 0; i
<= 31; i
++)
3581 if (const_ok_for_cskyv2 (value
& ~bit
)
3582 && !CSKY_CONST_OK_FOR_MOVIH (value
& ~bit
))
3585 *x
= (value
& ~bit
);
3586 return IC_APPEND_BSETI
;
3589 if (const_ok_for_cskyv2 (value
| bit
)
3590 && !CSKY_CONST_OK_FOR_MOVIH (value
| bit
))
3592 *y
= ~bit
& 0xffffffff;
3594 return IC_APPEND_BCLRI
;
3600 /* One immediate generate instruction, and one rotli or lsli. */
3603 for (i
= 1; i
< 31; i
++)
3613 if (const_ok_for_cskyv2 (rot
) && !CSKY_CONST_OK_FOR_MOVIH (rot
))
3617 return IC_APPEND_ROTLI
;
3620 /* Can't use logical shift when low order bit is one. */
3626 if (shf
!= 0 && const_ok_for_cskyv2 (shf
)
3627 && !CSKY_CONST_OK_FOR_MOVIH (shf
))
3631 return IC_APPEND_LSLI
;
3635 /* One immediate generate instruction, and one ixh. */
3636 if (CSKY_ISA_FEATURE (E2
)
3638 && const_ok_for_cskyv2 (value
/ 3)
3639 && !CSKY_CONST_OK_FOR_MOVIH (value
/ 3))
3642 return IC_APPEND_IXH
;
3645 /* One immediate generate instruction, and one ixw. */
3646 if (CSKY_ISA_FEATURE (E2
)
3648 && const_ok_for_cskyv2 (value
/ 5)
3649 && !CSKY_CONST_OK_FOR_MOVIH (value
/ 5))
3652 return IC_APPEND_IXW
;
3655 /* Generate movih + bseti. */
3656 if (CSKY_CONST_OK_FOR_Ub (value
& 0xffff))
3658 *x
= value
& 0xffff0000;
3659 *y
= value
& 0xffff;
3660 return IC_APPEND_BSETI
;
3663 /* Generate movih + not. */
3664 if (CSKY_CONST_OK_FOR_MOVIH (value_invert
))
3667 return IC_APPEND_NOT
;
3670 /* One movih, and one 16bits addi or subi. */
3671 for (i
= 1; i
<= 32; i
++)
3673 if (CSKY_CONST_OK_FOR_MOVIH (value
- i
))
3677 return IC_APPEND_ADDI
;
3680 if (CSKY_CONST_OK_FOR_MOVIH (value
+ i
))
3684 return IC_APPEND_SUBI
;
3688 /* One movih, and one bseti or bclri. */
3689 bit
= 0x80000000ULL
;
3690 for (i
= 0; i
<= 31; i
++)
3692 if (CSKY_CONST_OK_FOR_MOVIH (value
& ~bit
))
3696 return IC_APPEND_BSETI
;
3699 if (CSKY_CONST_OK_FOR_MOVIH (value
| bit
))
3701 *y
= ~bit
& 0xffffffff;
3703 return IC_APPEND_BCLRI
;
3709 /* One movih, and one rotli or lsli. */
3712 for (i
= 1; i
< 31; i
++)
3722 if (CSKY_CONST_OK_FOR_MOVIH (rot
))
3726 return IC_APPEND_ROTLI
;
3729 /* Can't use logical shift when low order bit is one. */
3735 if (shf
!= 0 && CSKY_CONST_OK_FOR_MOVIH (shf
))
3739 return IC_APPEND_LSLI
;
3743 return IC_UNINLINABLE
;
3747 /* Actually output a constant using a trick.
3748 FIXME: I think this would be better handled by a splitter than at the
3749 asm output level. */
3752 csky_output_inline_const (machine_mode mode
, rtx operands
[])
3754 HOST_WIDE_INT x
= 0, y
= 0;
3755 enum csky_inline_const_type trick_type
;
3756 rtx out_operands
[3];
3759 const char *dst_fmt
;
3760 HOST_WIDE_INT value
= INTVAL (operands
[1]);
3761 int ivalue
= (int) value
;
3762 unsigned int uvalue
= (unsigned int) value
;
3764 trick_type
= try_csky_constant_tricks (value
, &x
, &y
);
3765 /* lrw's are handled separately: Large inlinable constants never get
3766 turned into lrw's. Our caller uses try_csky_constant_tricks to back
3767 off to an lrw rather than calling this routine. */
3768 gcc_assert (trick_type
!= IC_UNINLINABLE
);
3770 /* Operands: 0 = dst, 1 = load immedate., 2 = adjust immedate. */
3771 out_operands
[0] = operands
[0];
3772 out_operands
[1] = GEN_INT (x
);
3773 if (trick_type
!= IC_SINGLE
&& trick_type
!= IC_APPEND_NOT
)
3774 out_operands
[2] = GEN_INT (y
);
3776 /* Select dst format based on mode. */
3777 if (mode
== DImode
&& TARGET_BIG_ENDIAN
)
3782 /* Try movi16: 0~31,movi32: 0~65535. */
3783 if (CSKY_CONST_OK_FOR_I (x
))
3784 sprintf (load_op
, "movi\t%s, %%1", dst_fmt
);
3785 /* Try exact power of two - 1. */
3786 else if (CSKY_CONST_OK_FOR_Uc (x
))
3787 sprintf (load_op
, "bmaski\t%s, %%N1", dst_fmt
);
3789 else if (CSKY_CONST_OK_FOR_MOVIH (x
))
3790 sprintf (load_op
, "movih\t%s, %%H1", dst_fmt
);
3793 sprintf (load_op
, "BADMOVI-inline_const %s, %%1", dst_fmt
);
3800 strcpy (buf
, load_op
);
3802 /* Add instruction 'not'. */
3804 sprintf (buf
, "%s\n\tnot\t%s, %s\t// %d 0x%x", load_op
, dst_fmt
,
3805 dst_fmt
, ivalue
, uvalue
);
3807 /* Add instruction 'addi'. */
3808 case IC_APPEND_ADDI
:
3809 sprintf (buf
, "%s\n\taddi\t%s, %s, %%2\t// %d 0x%x", load_op
,
3810 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3812 /* Add instruction 'subi'. */
3813 case IC_APPEND_SUBI
:
3814 sprintf (buf
, "%s\n\tsubi\t%s, %s, %%2\t// %d 0x%x", load_op
,
3815 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3817 /* Add instruction 'addi', the last instruction is bgeni. */
3819 sprintf (buf
, "%s\n\taddi\t%s, %s, %%2\t// %d 0x%x", load_op
,
3820 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3822 /* Add instruction 'subi', the last instruction is bgeni. */
3824 sprintf (buf
, "%s\n\tsubi\t%s, %s, %%2\t// %d 0x%x", load_op
,
3825 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3827 /* Add instruction 'bseti'. */
3828 case IC_APPEND_BSETI
:
3829 sprintf (buf
, "%s\n\tbseti\t%s, %s, %%P2\t// %d 0x%x", load_op
,
3830 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3832 /* Add instruction 'movi'. */
3833 case IC_APPEND_MOVI
:
3834 sprintf (buf
, "%s\n\tmovi\t%s, %%2\t// %d 0x%x", load_op
, dst_fmt
,
3837 /* Add instruction 'bclri'. */
3838 case IC_APPEND_BCLRI
:
3839 sprintf (buf
, "%s\n\tbclri\t%s, %s, %%Q2\t// %d 0x%x", load_op
,
3840 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3842 /* Add instruction 'rotli'. */
3843 case IC_APPEND_ROTLI
:
3844 sprintf (buf
, "%s\n\trotli\t%s, %s, %%2\t// %d 0x%x", load_op
,
3845 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3847 /* Add instruction 'lsli'. */
3848 case IC_APPEND_LSLI
:
3849 sprintf (buf
, "%s\n\tlsli\t%s, %s, %%2\t// %d 0x%x", load_op
,
3850 dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3852 /* Add instruction 'ixh'. */
3854 sprintf (buf
, "%s\n\tixh\t%s, %s, %s\t// %d 0x%x", load_op
,
3855 dst_fmt
, dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3857 /* Add instruction 'ixw'. */
3859 sprintf (buf
, "%s\n\tixw\t%s, %s, %s\t// %d 0x%x", load_op
,
3860 dst_fmt
, dst_fmt
, dst_fmt
, ivalue
, uvalue
);
3866 output_asm_insn (buf
, out_operands
);
3871 /* This is a helper function for the Uo constraint for movsi patterns. */
3874 csky_inlinable_constant (HOST_WIDE_INT value
)
3877 return (!(CSKY_TARGET_ARCH (CK802
) || CSKY_TARGET_ARCH (CK801
))
3878 && try_csky_constant_tricks (value
, &x
, &y
));
3882 /* Return true if the constant VAL can be expressed by an 8-bit constant
3883 with a shift value, filling in *BASE and *SHIFT. */
3886 csky_shifted_imm8_constant (unsigned HOST_WIDE_INT val
,
3887 unsigned int *base
, unsigned int *shift
)
3889 unsigned HOST_WIDE_INT mask
= 0xff;
3891 val
= val
& (unsigned HOST_WIDE_INT
) 0xffffffffu
;
3895 for (i
= 0; i
< 25; i
++)
3896 if ((val
& (mask
<< i
)) == val
)
3899 *base
= (unsigned int) (val
>> i
);
3901 *shift
= (unsigned int) i
;
3909 /* Output a move of a word or less value. */
3912 csky_output_move (rtx insn ATTRIBUTE_UNUSED
, rtx operands
[],
3913 machine_mode mode ATTRIBUTE_UNUSED
)
3915 rtx dst
= operands
[0];
3916 rtx src
= operands
[1];
3917 struct csky_address op0
, op1
;
3921 /* The situation mov reg to reg. */
3924 int dstreg
= REGNO (dst
);
3925 int srcreg
= REGNO (src
);
3927 /* hilo registers exchange their places,
3928 and their order of Dimode as same as other
3929 general registers in LITTLE_ENDIAN mode. */
3930 if (TARGET_BIG_ENDIAN
)
3932 if (dstreg
== CSKY_HI_REGNUM
)
3934 else if (dstreg
== CSKY_LO_REGNUM
)
3936 else if (srcreg
== CSKY_HI_REGNUM
)
3938 else if (srcreg
== CSKY_LO_REGNUM
)
3943 if (dstreg
== CSKY_HI_REGNUM
)
3945 else if (dstreg
== CSKY_LO_REGNUM
)
3947 else if (srcreg
== CSKY_HI_REGNUM
)
3949 else if (srcreg
== CSKY_LO_REGNUM
)
3953 if (CSKY_VREG_P (dstreg
) && CSKY_VREG_P (srcreg
))
3954 return "fmovs\t%0, %1";
3955 if (CSKY_VREG_P (dstreg
))
3956 return "fmtvrl\t%0, %1";
3957 if (CSKY_VREG_P (srcreg
))
3958 return "fmfvrl\t%0, %1";
3960 if (REGNO (src
) == CSKY_CC_REGNUM
)
3963 return "mov\t%0, %1";
3965 /* The situation mov memory to reg. */
3966 else if (GET_CODE (src
) == MEM
)
3968 decompose_csky_address (XEXP (src
, 0), &op1
);
3971 switch (GET_MODE (src
))
3974 return "ldr.h\t%0, %1";
3976 return "ldr.b\t%0, %1";
3979 if (CSKY_VREG_P (REGNO (dst
)))
3980 return "fldrs\t%0, %1";
3982 return "ldr.w\t%0, %1";
3986 /* Generate lrw rx, [LABEL]. This happens when the compiler
3987 generates constant pool references and uses lrw to get the
3988 constant into memory. */
3990 return "lrw\t%0, %1";
3991 /* Generate lrs.w rx, [symbol@GOT/PLT]. */
3992 else if (flag_pic
== 1 && op1
.disp
&& GET_CODE (op1
.disp
) == UNSPEC
)
3993 return "lrs.w\t%0, %1";
3995 switch (GET_MODE (src
))
3998 return "ld.h\t%0, %1";
4000 return "ld.b\t%0, %1";
4003 if (CSKY_VREG_P (REGNO (dst
)))
4004 return "flds\t%0, %1";
4006 return "ld.w\t%0, %1";
4011 /* The situation mov integer to reg. */
4012 else if (GET_CODE (src
) == CONST_INT
||
4013 (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
))
4016 const REAL_VALUE_TYPE
*d
;
4019 if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
4021 d
= CONST_DOUBLE_REAL_VALUE (src
);
4022 REAL_VALUE_TO_TARGET_SINGLE (*d
, l
);
4023 operands
[1] = GEN_INT (l
);
4027 if (try_csky_constant_tricks (INTVAL (src
), &x
, &y
))
4028 return csky_output_inline_const (SImode
, operands
);
4029 /* Return '#' to split it. */
4030 else if (CSKY_CONST_OK_FOR_T (INTVAL (src
)))
4033 return "lrw\t%0, %x1\t";
4035 else if (TARGET_ANCHOR
&& GET_CODE (src
) == SYMBOL_REF
)
4037 if (SYMBOL_REF_FUNCTION_P (src
))
4038 return "lrw\t%0, %1@BTEXT";
4040 return "lrw\t%0, %1@BDATA";
4042 else if (GET_CODE (src
) == UNSPEC
4043 && XINT (src
, 1) == UNSPEC_PIC_SYMBOL_GRS
)
4044 return "grs\t%0, %1";
4046 return "lrw\t%0, %1";
4048 else if (GET_CODE (dst
) == MEM
)
4050 decompose_csky_address (XEXP (dst
, 0), &op0
);
4053 switch (GET_MODE (src
))
4056 return "str.h\t%1, %0";
4058 return "str.b\t%1, %0";
4061 if (CSKY_VREG_P (REGNO (src
)))
4062 return "fstrs\t%1, %0";
4064 return "str.w\t%1, %0";
4069 switch (GET_MODE (dst
))
4072 return "st.h\t%1, %0";
4074 return "st.b\t%1, %0";
4077 if (CSKY_VREG_P (REGNO (src
)))
4078 return "fsts\t%1, %0";
4080 return "st.w\t%1, %0";
4090 /* Output a move of a word or less value. Specific for ck801. */
4093 csky_output_ck801_move (rtx insn ATTRIBUTE_UNUSED
, rtx operands
[],
4094 machine_mode mode ATTRIBUTE_UNUSED
)
4096 rtx dst
= operands
[0];
4097 rtx src
= operands
[1];
4098 struct csky_address op1
;
4103 return "mov\t%0, %1";
4104 else if (GET_CODE (src
) == MEM
)
4106 decompose_csky_address (XEXP (src
, 0), &op1
);
4108 /* Generate lrw rx, [LABEL]. This happens when the compiler
4109 generates constant pool references and uses lrw to get the
4110 constant in memory. */
4112 return "lrw\t%0, %1";
4114 switch (GET_MODE (src
))
4117 return "ld.h\t%0, %1";
4119 return "ld.b\t%0, %1";
4122 return "ld.w\t%0, %1";
4127 else if (GET_CODE (src
) == CONST_INT
)
4129 if (REGNO (dst
) > 7)
4130 return "lrw\t%0, %x1\t";
4131 else if (CSKY_CONST_OK_FOR_N (INTVAL (src
) + 1))
4132 return "movi\t%0, %1";
4133 /* Return '#' to split it. */
4134 else if (CSKY_CONST_OK_FOR_T (INTVAL (src
)))
4136 else if (csky_shifted_imm8_constant (INTVAL (src
), NULL
, NULL
))
4139 return "lrw\t%0, %x1\t";
4141 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
4143 const REAL_VALUE_TYPE
*d
;
4146 d
= CONST_DOUBLE_REAL_VALUE (src
);
4147 REAL_VALUE_TO_TARGET_SINGLE (*d
, l
);
4148 operands
[1] = GEN_INT (l
);
4151 if (CSKY_CONST_OK_FOR_N (INTVAL (src
) + 1))
4152 return "movi\t%0, %1";
4154 return "lrw\t%0, %x1\t";
4156 else if (TARGET_ANCHOR
&& GET_CODE (src
) == SYMBOL_REF
)
4158 if (SYMBOL_REF_FUNCTION_P (src
))
4159 return "lrw\t%0, %1@BTEXT";
4161 return "lrw\t%0, %1@BDATA";
4164 return "lrw\t%0, %1";
4166 else if (GET_CODE (dst
) == MEM
)
4167 switch (GET_MODE (dst
))
4170 return "st.h\t%1, %0";
4172 return "st.b\t%1, %0";
4175 return "st.w\t%1, %0";
4184 /* Return a sequence of instructions to perform DI or DF move.
4185 Since the CSKY cannot move a DI or DF in one instruction, we have
4186 to take care when we see overlapping source and dest registers. */
4189 csky_output_movedouble (rtx operands
[],
4190 machine_mode mode ATTRIBUTE_UNUSED
)
4192 rtx dst
= operands
[0];
4193 rtx src
= operands
[1];
4199 int dstreg
= REGNO (dst
);
4200 int srcreg
= REGNO (src
);
4202 if (CSKY_HILO_REG_P (srcreg
))
4204 if (TARGET_BIG_ENDIAN
)
4205 return "mfhi\t%0\n\tmflo\t%R0";
4207 return "mfhi\t%R0\n\tmflo\t%0";
4209 else if (CSKY_HILO_REG_P (dstreg
))
4211 if (TARGET_BIG_ENDIAN
)
4212 return "mthi\t%1\n\tmtlo\t%R1";
4214 return "mthi\t%R1\n\tmtlo\t%1";
4216 else if (CSKY_VREG_P (srcreg
) && CSKY_VREG_P (dstreg
))
4217 return "fmovd\t%0, %1";
4218 else if (CSKY_VREG_P (srcreg
))
4220 /* Since the vector registers in fpuv2_soft processors
4221 like ck803f are 32 bits wide, just one insn is needed
4222 to complete the move operation. */
4223 if (TARGET_SOFT_FPU
)
4224 return "fmfvrl\t%0, %1";
4225 else if (TARGET_BIG_ENDIAN
)
4226 return "fmfvrh\t%0, %1\n\tfmfvrl\t%R0, %1";
4228 return "fmfvrh\t%R0, %1\n\tfmfvrl\t%0, %1";
4230 else if (CSKY_VREG_P (dstreg
))
4232 if (TARGET_SOFT_FPU
)
4233 return "fmtvrl\t%0, %1";
4234 else if (TARGET_BIG_ENDIAN
)
4235 return "fmtvrh\t%0, %1\n\tfmtvrl\t%0, %R1";
4237 return "fmtvrh\t%0, %R1\n\tfmtvrl\t%0, %1";
4240 /* Ensure the second source not overwritten. */
4241 if (srcreg
+ 1 == dstreg
)
4242 return "mov\t%R0, %R1\n\tmov\t%0, %1";
4244 return "mov\t%0, %1\n\tmov\t%R0, %R1";
4246 else if (GET_CODE (src
) == MEM
)
4248 rtx memexp
= XEXP (src
, 0);
4249 int dstreg
= REGNO (dst
);
4251 struct csky_address op0
;
4253 decompose_csky_address (XEXP (src
, 0), &op0
);
4255 if (GET_CODE (memexp
) == LABEL_REF
4256 || (GET_CODE (memexp
) == CONST
4257 && GET_CODE (XEXP (memexp
, 0)) == PLUS
4258 && GET_CODE (XEXP (XEXP (memexp
, 0), 0)) == LABEL_REF
))
4259 return "lrw\t%0, [%1]\n\tlrw\t%R0, [%R1]";
4260 else if (GET_CODE (memexp
) == REG
)
4261 basereg
= REGNO (memexp
);
4262 else if (GET_CODE (memexp
) == PLUS
)
4264 if (GET_CODE (XEXP (memexp
, 0)) == REG
)
4265 basereg
= REGNO (XEXP (memexp
, 0));
4266 else if (GET_CODE (XEXP (memexp
, 1)) == REG
)
4267 basereg
= REGNO (XEXP (memexp
, 1));
4276 if (CSKY_VREG_P (dstreg
))
4279 return "fldrd\t%0, %1";
4281 return "fldd\t%0, %1";
4283 /* FIXME length attribute is wrong here. */
4284 if (dstreg
== basereg
)
4285 /* Just load them in reverse order. */
4286 return "ld.w\t%R0, %R1\n\tld.w\t%0, %1";
4288 return "ld.w\t%0, %1\n\tld.w\t%R0, %R1";
4290 else if (GET_CODE (src
) == CONST_INT
|| GET_CODE (src
) == CONST_DOUBLE
)
4292 split_double (src
, operands
+ 2, operands
+ 3);
4294 if (CSKY_CONST_OK_FOR_I (INTVAL (operands
[2])))
4295 output_asm_insn ("movi\t%0, %2", operands
);
4296 else if (CSKY_CONST_OK_FOR_Uc (INTVAL (operands
[2])))
4297 output_asm_insn ("bmaski\t%0, %N2", operands
);
4298 else if (CSKY_CONST_OK_FOR_Ub (INTVAL (operands
[2])))
4299 output_asm_insn ("bgeni\t%0, %P2", operands
);
4301 output_asm_insn ("lrw\t%0, %2", operands
);
4303 if (CSKY_CONST_OK_FOR_I (INTVAL (operands
[3])))
4304 output_asm_insn ("movi\t%R0, %3", operands
);
4305 else if (CSKY_CONST_OK_FOR_Uc (INTVAL (operands
[3])))
4306 output_asm_insn ("bmaski\t%R0, %N3", operands
);
4308 else if (CSKY_CONST_OK_FOR_Ub (INTVAL (operands
[3])))
4309 output_asm_insn ("bgeni\t%R0, %P3", operands
);
4311 output_asm_insn ("lrw\t%R0, %3", operands
);
4318 else if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == REG
)
4320 rtx memexp
= XEXP (dst
, 0);
4321 int srcreg
= REGNO (src
);
4323 struct csky_address op0
;
4325 decompose_csky_address (XEXP (dst
, 0), &op0
);
4327 if (GET_CODE (memexp
) == REG
)
4328 basereg
= REGNO (memexp
);
4329 else if (GET_CODE (memexp
) == PLUS
)
4331 if (GET_CODE (XEXP (memexp
, 0)) == REG
)
4332 basereg
= REGNO (XEXP (memexp
, 0));
4333 else if (GET_CODE (XEXP (memexp
, 1)) == REG
)
4334 basereg
= REGNO (XEXP (memexp
, 1));
4342 if (CSKY_VREG_P (srcreg
))
4345 return "fstrd\t%1, %0";
4347 return "fstd\t%1, %0";
4349 /* FIXME length attribute is wrong here. */
4350 if (srcreg
== basereg
)
4351 /* Just load them in reverse order. */
4352 return "st.w\t%R1, %R0\n\tst.w\t%1, %0";
4354 return "st.w\t%1, %0\n\tst.w\t%R1, %R0";
4362 csky_output_ck801_movedouble (rtx operands
[],
4363 machine_mode mode ATTRIBUTE_UNUSED
)
4365 rtx dst
= operands
[0];
4366 rtx src
= operands
[1];
4372 int dstreg
= REGNO (dst
);
4373 int srcreg
= REGNO (src
);
4375 /* Ensure the second source not overwritten. */
4376 if (srcreg
+ 1 == dstreg
)
4377 return "mov\t%R0, %R1\n\tmov\t%0, %1";
4379 return "mov\t%0, %1\n\tmov\t%R0, %R1";
4381 else if (GET_CODE (src
) == MEM
)
4383 rtx memexp
= XEXP (src
, 0);
4384 int dstreg
= REGNO (dst
);
4386 struct csky_address op0
;
4388 decompose_csky_address (XEXP (src
, 0), &op0
);
4390 if (GET_CODE (memexp
) == LABEL_REF
4391 || (GET_CODE (memexp
) == CONST
4392 && GET_CODE (XEXP (memexp
, 0)) == PLUS
4393 && GET_CODE (XEXP (XEXP (memexp
, 0), 0)) == LABEL_REF
))
4394 return "lrw\t%0, [%1]\n\tlrw\t%R0, [%R1]";
4395 else if (GET_CODE (memexp
) == REG
)
4396 basereg
= REGNO (memexp
);
4397 else if (GET_CODE (memexp
) == PLUS
)
4399 if (GET_CODE (XEXP (memexp
, 0)) == REG
)
4400 basereg
= REGNO (XEXP (memexp
, 0));
4401 else if (GET_CODE (XEXP (memexp
, 1)) == REG
)
4402 basereg
= REGNO (XEXP (memexp
, 1));
4409 /* FIXME length attribute is wrong here. */
4410 if (dstreg
== basereg
)
4411 /* Just load them in reverse order. */
4412 return "ld.w\t%R0, %R1\n\tld.w\t%0, %1";
4414 return "ld.w\t%0, %1\n\tld.w\t%R0, %R1";
4416 else if (GET_CODE (src
) == CONST_INT
|| GET_CODE (src
) == CONST_DOUBLE
)
4418 split_double (src
, operands
+ 2, operands
+ 3);
4420 if (REGNO (dst
) <= 7
4421 && CSKY_CONST_OK_FOR_N (INTVAL (operands
[2]) + 1))
4422 output_asm_insn ("movi\t%0, %2", operands
);
4424 output_asm_insn ("lrw\t%0, %2", operands
);
4427 if (REGNO (dst
) <= 6
4428 && CSKY_CONST_OK_FOR_N (INTVAL (operands
[3]) + 1))
4429 output_asm_insn ("movi\t%R0, %3", operands
);
4431 output_asm_insn ("lrw\t%R0, %3", operands
);
4440 else if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == REG
)
4442 rtx memexp
= XEXP (dst
, 0);
4443 int srcreg
= REGNO (src
);
4445 struct csky_address op0
;
4447 decompose_csky_address (XEXP (dst
, 0), &op0
);
4449 if (GET_CODE (memexp
) == REG
)
4450 basereg
= REGNO (memexp
);
4451 else if (GET_CODE (memexp
) == PLUS
)
4453 if (GET_CODE (XEXP (memexp
, 0)) == REG
)
4454 basereg
= REGNO (XEXP (memexp
, 0));
4455 else if (GET_CODE (XEXP (memexp
, 1)) == REG
)
4456 basereg
= REGNO (XEXP (memexp
, 1));
4463 /* FIXME length attribute is wrong here. */
4464 if (srcreg
== basereg
)
4465 /* Just load them in reverse order. */
4466 return "st.w\t%R1, %R0\n\tst.w\t%1, %0";
4468 return "st.w\t%1, %0\n\tst.w\t%R1, %R0";
4474 /* Split operands for an AND expression when OPERANDS[2] is a constant.
4475 Note operands[0] is marked earlyclobber in this case and can be
4476 overwritten. Return true if "DONE", false otherwise. */
4478 csky_split_and (rtx
*operands
)
4480 HOST_WIDE_INT mask
= INTVAL (operands
[2]);
4481 rtx not_value
= GEN_INT (~mask
);
4484 /* All zeros or all ones can be handled by a move instruction. */
4487 emit_move_insn (operands
[0], const0_rtx
);
4492 emit_move_insn (operands
[0], operands
[1]);
4496 /* Check for constants that can be handled directly by the 32-bit andi
4498 if (CSKY_ISA_FEATURE (E2
) && csky_arith_O_operand (operands
[2], SImode
))
4501 /* Try to transform to andni instruction. */
4502 if (CSKY_ISA_FEATURE (E2
) && csky_arith_O_operand (not_value
, SImode
))
4504 emit_insn (gen_cskyv2_andnsi3 (operands
[0], not_value
, operands
[1]));
4508 /* If there are only one or two 0 bits in the constant, we can
4509 replace the operation with bclri instructions on those bits.
4510 Note CK801 has only the 16-bit bclri that operates on a single
4511 register, so we must count a move if we are post-reload. */
4512 if (popcount_hwi (~mask
& 0xffffffff)
4513 <= (reload_completed
&& !CSKY_ISA_FEATURE (E2
) ? 1 : 2))
4515 rtx input
= operands
[1];
4517 if (!CSKY_ISA_FEATURE (E2
))
4519 emit_move_insn (operands
[0], input
);
4520 input
= operands
[0];
4523 for (i
= 0; i
< 32; i
++)
4524 if ((mask
& (1 << i
)) == 0x0)
4526 emit_insn (gen_bclri (operands
[0], input
, GEN_INT (i
)));
4527 input
= operands
[0];
4532 /* If the constant mask is outside the [0, 4095] range for
4533 constraint O, or if constraint O is not allowed (ck801),
4534 maybe the constant is a contiguous bit range that we can
4535 handle by bit extract (low bits) or shifts (high bits). */
4536 for (i
= (CSKY_ISA_FEATURE (E2
) ? 13 : 1); i
< 32; i
++)
4538 if ((((HOST_WIDE_INT
) 1) << i
) - 1 == mask
)
4540 if (CSKY_ISA_FEATURE (2E3
))
4541 emit_insn (gen_cskyv2_extzv (operands
[0], operands
[1],
4542 GEN_INT (i
), const0_rtx
));
4545 rtx shift
= GEN_INT (32 - i
);
4546 rtx reg
= (reload_completed
4547 ? operands
[0] : gen_reg_rtx (SImode
));
4549 emit_insn (gen_ashlsi3 (reg
, operands
[1], shift
));
4550 emit_insn (gen_lshrsi3 (operands
[0], reg
, shift
));
4554 else if ((((HOST_WIDE_INT
) 1) << i
) - 1 == ~mask
)
4556 rtx shift
= GEN_INT (i
);
4557 rtx reg
= (reload_completed
4558 ? operands
[0] : gen_reg_rtx (SImode
));
4560 emit_insn (gen_lshrsi3 (reg
, operands
[1], shift
));
4561 emit_insn (gen_ashlsi3 (operands
[0], reg
, shift
));
4566 /* If the constant is a negative number, it seems better to use
4567 andn and copy the NOT_VALUE to a register instead of the
4568 original value, since the NOT_VALUE is always smaller and thus
4569 more likely to be representable as a small constant.
4570 This transformation can only be done before reload because
4571 it requires a temporary. Hopefully register allocation can get
4572 rid of the extra move required for CK801. */
4573 if (!reload_completed
&& INTVAL (operands
[2]) < 0)
4575 rtx reg
= copy_to_mode_reg (SImode
, not_value
);
4577 if (CSKY_ISA_FEATURE (E2
))
4578 emit_insn (gen_cskyv2_andnsi3 (operands
[0], reg
, operands
[1]));
4581 emit_move_insn (operands
[0], operands
[1]);
4582 emit_insn (gen_ck801_andnsi3 (operands
[0], reg
, operands
[0]));
4587 /* If the above ways are all not working, move the constant
4588 to a register. We can clobber operands[0] as it is
4589 marked earlyclobber in the insn constraints, but then we have to
4590 swap operands 1 and 2 to match the constraints on the 2-operand
4591 16-bit and instruction. */
4592 if (reload_completed
)
4594 emit_move_insn (operands
[0], operands
[2]);
4595 operands
[2] = operands
[1];
4596 operands
[1] = operands
[0];
4599 operands
[2] = copy_to_mode_reg (SImode
, operands
[2]);
4603 /* Split operands for an IOR expression when OPERANDS[2] is a constant.
4604 Note operands[0] is marked earlyclobber in this case and can be
4605 overwritten. Return true if "DONE", false otherwise. */
4607 csky_split_ior (rtx
*operands
)
4609 HOST_WIDE_INT mask
= INTVAL (operands
[2]);
4612 /* All zeros or all ones can be handled by a move instruction. */
4615 emit_move_insn (operands
[0], operands
[1]);
4620 emit_move_insn (operands
[0], gen_int_mode (-1, SImode
));
4624 /* Check for constants that can be handled directly by the 32-bit ori
4626 if (CSKY_ISA_FEATURE (E2
) && csky_literal_I_operand (operands
[2], SImode
))
4629 /* If there are only one or two 1 bits in the value, we can replace
4630 the operation with bseti instructions to set those bits.
4631 Note CK801 has only the 16-bit bclri that operates on a single
4632 register, so we must count a move if we are post-reload. */
4633 if (popcount_hwi (mask
& 0xffffffff)
4634 <= (reload_completed
&& !CSKY_ISA_FEATURE (E2
) ? 1 : 2))
4636 rtx input
= operands
[1];
4638 if (!CSKY_ISA_FEATURE (E2
))
4640 emit_move_insn (operands
[0], input
);
4641 input
= operands
[0];
4644 for (i
= 0; i
< 32; i
++)
4645 if (mask
& (1 << i
))
4647 emit_insn (gen_bseti (operands
[0], input
, GEN_INT (i
)));
4648 input
= operands
[0];
4653 /* If the above ways are all not working, move the constant
4654 to a register. We can clobber operands[0] as it is
4655 marked earlyclobber in the insn constraints, but then we have to
4656 swap operands 1 and 2 to match the constraints on the 2-operand
4657 16-bit ior instruction. */
4658 if (reload_completed
)
4660 emit_move_insn (operands
[0], operands
[2]);
4661 operands
[2] = operands
[1];
4662 operands
[1] = operands
[0];
4665 operands
[2] = copy_to_mode_reg (SImode
, operands
[2]);
4670 /* Split operands for an XOR expression when OPERANDS[2] is a constant.
4671 Note operands[0] is marked earlyclobber in this case and can be
4672 overwritten. Return true if "DONE", false otherwise. */
4674 csky_split_xor (rtx
*operands
)
4676 HOST_WIDE_INT mask
= INTVAL (operands
[2]);
4678 /* All zeros can be turned into move instruction. */
4681 emit_move_insn (operands
[0], operands
[1]);
4685 /* All ones can be turned into a bitwise not. */
4688 if (CSKY_ISA_FEATURE (E2
))
4689 emit_insn (gen_cskyv2_one_cmplsi2 (operands
[0], operands
[1]));
4692 emit_move_insn (operands
[0], operands
[1]);
4693 emit_insn (gen_ck801_one_cmplsi2 (operands
[0], operands
[0]));
4698 /* Check for constants that can be handled directly by the 32-bit xori
4700 if (CSKY_ISA_FEATURE (E2
) && csky_arith_O_operand (operands
[2], SImode
))
4703 /* If the above ways are all not working, move the constant
4704 to a register. We can clobber operands[0] as it is
4705 marked earlyclobber in the insn constraints, but then we have to
4706 swap operands 1 and 2 to match the constraints on the 2-operand
4707 16-bit ior instruction. */
4708 if (reload_completed
)
4710 emit_move_insn (operands
[0], operands
[2]);
4711 operands
[2] = operands
[1];
4712 operands
[1] = operands
[0];
4715 operands
[2] = copy_to_mode_reg (SImode
, operands
[2]);
4720 /* Return true if X is an address form involving a symbol or label ref. */
4722 csky_symbolic_address_p (rtx x
)
4724 switch (GET_CODE (x
))
4731 return ((GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
4732 || GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
4733 && GET_CODE (XEXP (x
, 1)) == CONST_INT
);
4740 /* Emit a comparison instruction.
4741 Return true if an inverted comparison is generated. */
4744 csky_emit_compare (enum rtx_code code
, rtx op0
, rtx op1
)
4747 rtx cc_reg
= gen_rtx_REG (CCmode
, CSKY_CC_REGNUM
);
4749 if (GET_CODE (op1
) == CONST_INT
)
4751 HOST_WIDE_INT val
= INTVAL (op1
);
4756 /* Unsigned (GTU 0) is the same as (NE 0); everything else is
4757 converted below to LEU (reversed cmphs). */
4760 /* Check whether (GTU A imm) can become (GEU A imm + 1). */
4761 else if (TARGET_MINI_REGISTERS
4762 ? CSKY_CONST_OK_FOR_J (val
+ 1)
4763 : CSKY_CONST_OK_FOR_Uk (val
+ 1))
4765 op1
= GEN_INT (val
+ 1);
4769 /* Check whether (LE A imm) can become (LT A imm + 1),
4770 or (GT A imm) can become (GE A imm + 1). */
4773 if (TARGET_MINI_REGISTERS
4774 ? CSKY_CONST_OK_FOR_J (val
+ 1)
4775 : CSKY_CONST_OK_FOR_Uk (val
+ 1))
4777 op1
= GEN_INT (val
+ 1);
4778 code
= code
== LE
? LT
: GE
;
4787 if (CONSTANT_P (op1
) && GET_CODE (op1
) != CONST_INT
)
4788 op1
= force_reg (GET_MODE (op1
), op1
);
4790 /* cmpnei: 0-31 (K immediate)
4791 ti: 1-32 (J immediate, 0 using btsti x,31). */
4795 /* Use inverted condition, cmpne. */
4800 /* Use normal condition, cmpne. */
4802 if (GET_CODE (op1
) == CONST_INT
4803 && (TARGET_MINI_REGISTERS
4804 ? !csky_literal_K_operand (op1
, SImode
)
4805 : !csky_literal_I_operand (op1
, SImode
)))
4806 op1
= force_reg (SImode
, op1
);
4809 /* Use inverted condition, reversed cmplt. */
4814 /* Use normal condition, reversed cmplt. */
4816 if (GET_CODE (op1
) == CONST_INT
)
4817 op1
= force_reg (SImode
, op1
);
4820 /* Use inverted condition, cmplt. */
4825 /* Use normal condition, cmplt. */
4827 /* covered by btsti x,31. */
4828 if (GET_CODE (op1
) == CONST_INT
&& INTVAL (op1
) != 0
4829 && (TARGET_MINI_REGISTERS
4830 ? !csky_literal_J_operand (op1
, SImode
)
4831 : !csky_literal_Uk_operand (op1
, SImode
)))
4832 op1
= force_reg (SImode
, op1
);
4835 /* Use inverted condition, cmple. */
4837 /* We coped with unsigned > 0 above. */
4838 gcc_assert (GET_CODE (op1
) != CONST_INT
|| INTVAL (op1
) != 0);
4842 /* Use normal condition, reversed cmphs. */
4844 if (GET_CODE (op1
) == CONST_INT
&& INTVAL (op1
) != 0)
4845 op1
= force_reg (SImode
, op1
);
4848 /* Use inverted condition, cmphs. */
4853 /* Use normal condition, cmphs. */
4855 if (GET_CODE (op1
) == CONST_INT
&& INTVAL (op1
) != 0
4856 && (TARGET_MINI_REGISTERS
4857 ? !csky_literal_J_operand (op1
, SImode
)
4858 : !csky_literal_Uk_operand (op1
, SImode
)))
4859 op1
= force_reg (SImode
, op1
);
4866 emit_insn (gen_rtx_SET (cc_reg
,
4867 gen_rtx_fmt_ee (code
, CCmode
, op0
, op1
)));
4871 /* Return true if push/pop can be used to save/restore all the registers
4872 indicated by MASK. We currently don't attempt to handle situations where
4873 some of the registers could be handled by push/pop and others saved and
4874 restored individually. */
4877 csky_can_use_pushpop (unsigned int mask
)
4882 if (!TARGET_PUSHPOP
)
4888 /* Regs 0-3, 12-14, 18-27, 29-31 cannot be in the mask. */
4889 if (mask
& 0xeffc700f)
4892 /* Regs in the range r4-r11 must be contiguous. */
4893 for (end_reg
= 0, i
= 11; i
>= 4; i
--)
4895 if (!end_reg
&& (mask
& (1 << i
)))
4897 if (end_reg
&& !(mask
& (1 << i
)))
4901 /* Likewise for regs in the range r16-r17. */
4902 for (end_reg
= 0, i
= 17; i
>= 16; i
--)
4904 if (!end_reg
&& (mask
& (1 << i
)))
4906 if (end_reg
&& !(mask
& (1 << i
)))
4914 /* Return true if store/load multiple instructions can be used to
4915 save/restore at least some of the registers indicated by MASK.
4916 Unlike the push/pop case, this does handle partial ranges.
4917 Set *BR and *ER to the beginning and end (respectively) of the
4918 register range that can be handled. */
4921 csky_can_use_ldstm (int mask
, int *br
, int *er
)
4924 int begin_reg
= 0, end_reg
= 0;
4927 if (!TARGET_MULTIPLE_STLD
)
4930 /* We'll only handle registers in the range 4-11, the contiguous range
4931 of caller-saved registers. Higher-numbered registers are handled
4932 individually in addition to this, but we'll give up on doing ldstm
4933 entirely if we need to save/restore the low-numbered EH registers. */
4937 for (regno
= 4; regno
<= 11; regno
++)
4939 if (mask
& 1 << regno
)
4950 if (count
>= CSKY_MIN_MULTIPLE_STLD
&& count
<= CSKY_MAX_MULTIPLE_STLD
)
4963 csky_output_return_instruction (void)
4965 unsigned long func_type
= get_csky_current_func_type ();
4967 if (CSKY_FUNCTION_IS_NAKED (func_type
))
4969 if (CSKY_FUNCTION_IS_INTERRUPT (func_type
))
4970 return "ipop\n\tnir\n";
4976 /* Adjust the stack pointer by OFFSET bytes. OFFSET is negative if this
4977 is in the prologue, positive if in the epilogue. This may require
4978 multiple instructions and/or use of CSKY_STACKADJUST_REGNUM as
4979 a scratch register. Emit CFA notes as appropriate. */
4981 expand_csky_stack_adjust (int offset
)
4985 int size
= (offset
> 0 ? offset
: -offset
);
4990 /* If OFFSET is too large for addi/subi, load it into
4991 CSKY_STACKADJUST_REGNUM and use a register add/sub instead.
4992 This case is not mentioned in the ABI documentation, but it is
4993 supported by GDB prologue analysis provided that the instruction(s)
4994 to initialize CSKY_STACKADJUST_REGNUM appear directly before
4995 the sub. Depending on the value of OFFSET, this might be a
4996 lrw instruction or the "tricks" used by csky_output_inline_const to
4997 encode special-case integer constants. */
4998 if (size
> CSKY_MAX_SP_ADJUST
* 2)
5002 /* We should have reserved the scratch register already in
5003 csky_layout_stack_frame. */
5004 gcc_assert (cfun
->machine
->reg_size
!= 0
5005 && (cfun
->machine
->reg_mask
5006 & (1 << CSKY_STACKADJUST_REGNUM
)));
5008 /* Prevent the optimizer from reordering these instructions to
5010 if (!flag_sched_prolog
)
5011 emit_insn (gen_blockage ());
5013 tmp
= gen_rtx_REG (SImode
, CSKY_STACKADJUST_REGNUM
);
5014 emit_move_insn (tmp
, GEN_INT (size
));
5017 set
= gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, tmp
);
5019 set
= gen_subsi3 (stack_pointer_rtx
, stack_pointer_rtx
, tmp
);
5020 insn
= emit_insn (set
);
5021 RTX_FRAME_RELATED_P (insn
) = 1;
5022 dwarf
= gen_rtx_SET (stack_pointer_rtx
,
5023 plus_constant (Pmode
, stack_pointer_rtx
, offset
));
5024 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
5026 /* More make GDB happy. */
5027 if (!flag_sched_prolog
)
5028 emit_insn (gen_blockage ());
5031 /* Use one or two addi or subi insns to adjust stack. */
5035 int delta
= (size
> CSKY_MAX_SP_ADJUST
5036 ? CSKY_MAX_SP_ADJUST
: size
);
5039 set
= gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
5042 set
= gen_subsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
5044 insn
= emit_insn (set
);
5045 RTX_FRAME_RELATED_P (insn
) = 1;
5051 /* Generate and emit an insn that we will recognize as a push_multi.
5052 Unfortunately, since this insn does not reflect very well the actual
5053 semantics of the operation, we need to annotate the insn for the benefit
5054 of DWARF2 frame unwind information. DWARF_REGS_MASK is a subset of
5055 MASK for registers that should be annotated for DWARF2 frame unwind
5059 emit_csky_regs_push (unsigned long mask
)
5066 int dwarf_par_index
;
5068 for (i
= 0; i
< CSKY_NGPR_REGS
; i
++)
5070 if (mask
& (1 << i
))
5074 /* The reg range for push is:r4-r11,r15-r17,r28. */
5075 gcc_assert (num_regs
&& num_regs
<= 12);
5077 /* For the body of the insn we are going to generate an UNSPEC in
5078 parallel with several USEs. This allows the insn to be recognized
5079 by the push_multi pattern in the csky.md file.
5081 The body of the insn looks something like this:
5084 (set (mem:BLK (pre_modify:SI (reg:SI sp)
5085 (const_int:SI <num>)))
5086 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSHPOP_MULT))
5092 For the frame note however, we try to be more explicit and actually
5093 show each register being stored into the stack frame, plus a (single)
5094 decrement of the stack pointer. We do it this way in order to be
5095 friendly to the stack unwinding code, which only wants to see a single
5096 stack decrement per instruction. The RTL we generate for the note looks
5097 something like this:
5100 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
5101 (set (mem:SI (reg:SI sp)) (reg:SI r4))
5102 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI XX))
5103 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI YY))
5107 FIXME:: In an ideal world the PRE_MODIFY would not exist and
5108 instead we'd have a parallel expression detailing all
5109 the stores to the various memory addresses so that debug
5110 information is more up-to-date. Remember however while writing
5111 this to take care of the constraints with the push instruction.
5113 Note also that this has to be taken care of for the VFP registers.
5115 For more see PR43399. */
5117 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_regs
));
5118 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (num_regs
+ 1));
5119 dwarf_par_index
= 1;
5121 for (i
= 0; i
< CSKY_NGPR_REGS
; i
++)
5122 if (mask
& (1 << i
))
5124 rtx reg
= gen_rtx_REG (SImode
, i
);
5125 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, -4 * num_regs
);
5126 tmp
= gen_frame_mem (BLKmode
,
5127 gen_rtx_PRE_MODIFY (Pmode
,
5128 stack_pointer_rtx
, addr
));
5131 gen_rtx_UNSPEC (BLKmode
,
5133 UNSPEC_PUSHPOP_MULT
));
5134 tmp
= gen_rtx_SET (gen_frame_mem (SImode
, stack_pointer_rtx
),
5136 RTX_FRAME_RELATED_P (tmp
) = 1;
5137 XVECEXP (dwarf
, 0, dwarf_par_index
++) = tmp
;
5142 for (j
= 1, i
++; j
< num_regs
; i
++)
5143 if (mask
& (1 << i
))
5145 rtx reg
= gen_rtx_REG (SImode
, i
);
5146 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, 4 * j
);
5147 tmp
= gen_rtx_SET (gen_frame_mem (SImode
, addr
), reg
);
5148 RTX_FRAME_RELATED_P (tmp
) = 1;
5149 XVECEXP (par
, 0, j
) = gen_rtx_USE (VOIDmode
, reg
);
5150 XVECEXP (dwarf
, 0, dwarf_par_index
++) = tmp
;
5154 par
= emit_insn (par
);
5156 tmp
= gen_rtx_SET (stack_pointer_rtx
,
5157 plus_constant (Pmode
, stack_pointer_rtx
, -4 * num_regs
));
5158 RTX_FRAME_RELATED_P (tmp
) = 1;
5159 XVECEXP (dwarf
, 0, 0) = tmp
;
5161 add_reg_note (par
, REG_FRAME_RELATED_EXPR
, dwarf
);
5162 RTX_FRAME_RELATED_P (par
) = 1;
5168 /* Generate and emit an insn pattern that we will recognize as a pop_multi.
5169 SAVED_REGS_MASK shows which registers need to be restored.
5171 Unfortunately, since this insn does not reflect very well the actual
5172 semantics of the operation, we need to annotate the insn for the benefit
5173 of DWARF2 frame unwind information. */
5176 emit_csky_regs_pop (unsigned long mask
)
5182 for (i
= 0; i
< CSKY_NGPR_REGS
; i
++)
5183 if (mask
& (1 << i
))
5186 /* The reg range for push is:r4-r11,r15-r17,r28. */
5187 gcc_assert (num_regs
&& num_regs
<= 12);
5189 /* The first element is (return),
5190 the second element is
5191 (set (reg:SI 'first reg number')
5192 (unspec:SI [(mem)] UNSPEC_PUSHPOP_MULT),
5193 the rest elements is (use (reg:SI 'rest reg number')),
5194 so the length should be number of register to be poped
5196 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_regs
+ 1));
5198 XVECEXP (par
, 0, 0) = ret_rtx
;
5200 for (i
= 0; i
< CSKY_NGPR_REGS
; i
++)
5201 if (mask
& (1 << i
))
5203 rtx reg
= gen_rtx_REG (SImode
, i
);
5204 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, 4 * num_regs
);
5205 rtx tmp
= gen_frame_mem (SImode
,
5206 gen_rtx_POST_MODIFY (Pmode
,
5207 stack_pointer_rtx
, addr
));
5210 gen_rtx_UNSPEC (SImode
,
5212 UNSPEC_PUSHPOP_MULT
));
5216 for (j
= 2, i
++; j
< (num_regs
+ 1); i
++)
5217 if (mask
& (1 << i
))
5219 rtx reg
= gen_rtx_REG (SImode
, i
);
5220 XVECEXP (par
, 0, j
) = gen_rtx_USE (VOIDmode
, reg
);
5224 par
= emit_jump_insn (par
);
5228 /* Generate the function prologue. */
5231 csky_expand_prologue (void)
5234 unsigned long func_type
= get_csky_current_func_type ();
5235 unsigned int reg_mask
;
5238 if (CSKY_FUNCTION_IS_NAKED (func_type
))
5240 if (flag_stack_usage_info
)
5241 current_function_static_stack_size
= 0;
5245 csky_layout_stack_frame ();
5246 reg_mask
= cfun
->machine
->reg_mask
;
5247 reg_size
= cfun
->machine
->reg_size
;
5249 /* Adjust stack pointer past argument overflow area. */
5250 if (cfun
->machine
->arg_size
!= 0)
5252 int offset
= cfun
->machine
->arg_size
;
5253 expand_csky_stack_adjust (- offset
);
5255 /* If we have a parameter passed partially in regs and partially
5256 in memory, the registers will have been stored to memory already
5257 in function.c. So we only need to copy varargs from registers
5259 if (cfun
->machine
->uses_anonymous_args
)
5261 int rn
= CSKY_FIRST_PARM_REGNUM
+ CSKY_NPARM_REGS
- 1;
5262 for (offset
-= 4; offset
>= 0; offset
-= 4, rn
--)
5264 rtx dst
= gen_frame_mem (SImode
,
5265 plus_constant (Pmode
,
5268 insn
= emit_move_insn (dst
, gen_rtx_REG (SImode
, rn
));
5269 RTX_FRAME_RELATED_P (insn
) = 1;
5274 /* Push caller-saved registers to stack. */
5275 if (csky_can_use_pushpop (reg_mask
))
5276 emit_csky_regs_push (reg_mask
);
5279 int sreg
= -1, ereg
= -1;
5280 bool stm_p
= csky_can_use_ldstm (reg_mask
, &sreg
, &ereg
);
5281 int stm_regs
= stm_p
? ereg
- sreg
+ 1 : 0;
5282 int stm_size
= stm_regs
* 4;
5284 /* First adjust the SP to the low end of the register save area. */
5285 expand_csky_stack_adjust (- reg_size
);
5287 /* Emit individual register saves. Even if we are going to emit an
5288 stm, we may need to save individual registers above that too. */
5289 if (reg_size
> stm_size
)
5291 int offset
= reg_size
- 4;
5293 for ( ; regno
> ereg
; regno
--)
5294 if (reg_mask
& (1 << regno
))
5296 rtx dst
= gen_rtx_MEM (SImode
,
5297 plus_constant (Pmode
,
5300 rtx insn
= emit_insn (gen_movsi (dst
,
5301 gen_rtx_REG (SImode
, regno
)));
5302 RTX_FRAME_RELATED_P (insn
) = 1;
5303 if (offset
== stm_size
)
5309 /* If possible, emit a stm to do a bulk store of sequential
5310 registers to the stack. Note that it is an error in the ABI
5311 documentation that it doesn't list stm as a valid prologue
5315 rtx par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (stm_regs
));
5317 for (regno
= sreg
, slot
= 0; regno
<= ereg
; regno
++, slot
++)
5319 rtx reg
= gen_rtx_REG (SImode
, regno
);
5320 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, slot
* 4);
5321 rtx set
= gen_rtx_SET (gen_frame_mem (SImode
, addr
), reg
);
5322 RTX_FRAME_RELATED_P (set
) = 1;
5323 XVECEXP (par
, 0, slot
) = set
;
5325 insn
= emit_insn (par
);
5326 RTX_FRAME_RELATED_P (insn
) = 1;
5330 /* Initialize hard frame pointer, if necessary. It points at the base
5331 of the register save area. */
5332 if (frame_pointer_needed
)
5334 insn
= emit_insn (gen_movsi (frame_pointer_rtx
, stack_pointer_rtx
));
5335 RTX_FRAME_RELATED_P (insn
) = 1;
5338 /* Reserve stack space for locals and outgoing args. */
5339 expand_csky_stack_adjust (- cfun
->machine
->reg_offset
);
5341 /* Put the GOT address in reg_gb for PIC, using R13 as a scratch.
5342 See section 4.7.1 in the ABI documentation,
5343 "Function Prologue for PIC". */
5344 if (flag_pic
&& (reg_mask
& (1 << PIC_OFFSET_TABLE_REGNUM
)))
5346 rtx l1
= gen_label_rtx ();
5347 rtx grs_label
= gen_rtx_LABEL_REF (SImode
, l1
);
5348 rtx reg_gb
= gen_rtx_REG (SImode
, PIC_OFFSET_TABLE_REGNUM
);
5349 rtx reg_temp
= gen_rtx_REG (SImode
, 13);
5351 rtx tmp0_unspec
= gen_rtx_UNSPEC (Pmode
,
5352 gen_rtvec (1, grs_label
),
5353 UNSPEC_PIC_SYMBOL_GOTPC_GRS
);
5354 rtx tmp1_unspec
= gen_rtx_UNSPEC (Pmode
,
5355 gen_rtvec (1, grs_label
),
5356 UNSPEC_PIC_SYMBOL_GOTPC
);
5358 emit_insn (gen_prologue_get_pc (tmp0_unspec
));
5359 emit_move_insn (reg_temp
, tmp1_unspec
);
5360 emit_insn (gen_addsi3 (reg_gb
, reg_gb
, reg_temp
));
5363 if (flag_stack_usage_info
)
5364 current_function_static_stack_size
= cfun
->machine
->frame_size
;
5366 if (!flag_sched_prolog
)
5367 emit_insn (gen_blockage ());
5371 csky_expand_epilogue (void)
5373 unsigned long func_type
= get_csky_current_func_type ();
5374 unsigned int reg_mask
;
5379 if (!flag_sched_prolog
)
5380 emit_insn (gen_blockage ());
5382 if (CSKY_FUNCTION_IS_NAKED (func_type
))
5384 emit_jump_insn (gen_simple_return ());
5388 /* Get the frame information. */
5389 csky_layout_stack_frame ();
5390 reg_mask
= cfun
->machine
->reg_mask
;
5391 reg_size
= cfun
->machine
->reg_size
;
5392 adjust
= reg_size
+ cfun
->machine
->arg_size
;
5394 /* Restore the SP to the base of the register save area. */
5395 if (frame_pointer_needed
)
5397 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
5398 RTX_FRAME_RELATED_P (insn
) = 1;
5401 expand_csky_stack_adjust (cfun
->machine
->reg_offset
);
5403 /* Restore the callee-saved registers. */
5404 if (csky_can_use_pushpop (reg_mask
)
5405 && cfun
->machine
->arg_size
== 0
5406 && !CSKY_FUNCTION_IS_INTERRUPT (func_type
)
5407 && !crtl
->calls_eh_return
)
5409 /* Pop includes an implicit return, so we are done. */
5410 emit_csky_regs_pop (reg_mask
);
5415 int sreg
= -1, ereg
= -1;
5416 bool ldm_p
= csky_can_use_ldstm (reg_mask
, &sreg
, &ereg
);
5417 int ldm_regs
= ldm_p
? ereg
- sreg
+ 1 : 0;
5418 int ldm_size
= ldm_regs
* 4;
5420 /* Emit individual register loads. Even if we are going to emit an
5421 ldm, we may need to load individual registers above that too. */
5422 if (reg_size
> ldm_size
)
5424 int offset
= reg_size
- 4;
5426 for ( ; regno
> ereg
; regno
--)
5427 if (reg_mask
& (1 << regno
))
5429 rtx src
= gen_frame_mem (SImode
,
5430 plus_constant (Pmode
,
5433 rtx reg
= gen_rtx_REG (SImode
, regno
);
5434 insn
= emit_move_insn (reg
, src
);
5435 RTX_FRAME_RELATED_P (insn
) = 1;
5436 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
5437 if (offset
== ldm_size
)
5443 /* If possible, emit a ldm to do a bulk load of sequential
5444 registers from the stack. */
5447 rtx par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (ldm_regs
));
5449 for (regno
= sreg
, slot
= 0; regno
<= ereg
; regno
++, slot
++)
5451 rtx reg
= gen_rtx_REG (SImode
, regno
);
5452 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, slot
* 4);
5453 rtx set
= gen_rtx_SET (reg
, gen_frame_mem (SImode
, addr
));
5454 XVECEXP (par
, 0, slot
) = set
;
5456 insn
= emit_insn (par
);
5457 RTX_FRAME_RELATED_P (insn
) = 1;
5458 for (regno
= sreg
; regno
<= ereg
; regno
++)
5460 rtx reg
= gen_rtx_REG (SImode
, regno
);
5461 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
5466 /* Emit the final stack pointer adjustment to deallocate the saved
5467 registers and incoming argument area. */
5468 expand_csky_stack_adjust (adjust
);
5470 /* Extra stack adjustment for exception handler return. */
5471 if (crtl
->calls_eh_return
)
5472 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
5473 EH_RETURN_STACKADJ_RTX
));
5475 /* Now we can return. */
5476 emit_jump_insn (gen_simple_return ());
5481 csky_output_function_prologue (FILE *f
)
5483 unsigned long func_type
= get_csky_current_func_type ();
5485 switch ((int) CSKY_FUNCTION_TYPE (func_type
))
5488 case CSKY_FT_NORMAL
:
5490 case CSKY_FT_INTERRUPT
:
5492 asm_fprintf (f
, "\t# Interrupt Service Routine.\n");
5493 asm_fprintf (f
, "\tnie\n\tipush\n");
5497 asm_fprintf (f
, "\t# Fast Interrupt Service Routine.\n");
5499 case CSKY_FT_EXCEPTION
:
5500 asm_fprintf (f
, "\t# CSKY Exception Handler.\n");
5503 asm_fprintf (f
, "\t# Naked Function: prologue and epilogue \
5504 provided by programmer.\n");
5508 csky_layout_stack_frame ();
5510 /* Generate .stack_size function-name, size for callgraph;
5511 the default stack size is 0. */
5512 if (TARGET_STACK_SIZE
&& cfun
->machine
->frame_size
> 0)
5514 gcc_assert (current_function_decl
!= NULL
);
5515 const char *func_name
=
5516 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (current_function_decl
));
5517 if (func_name
[0] == '*')
5518 asm_fprintf (f
, "\t.stack_size %s, %d\n",
5519 &func_name
[1], cfun
->machine
->frame_size
);
5521 asm_fprintf (f
, "\t.stack_size %s, %d\n",
5522 func_name
, cfun
->machine
->frame_size
);
5528 csky_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED
)
5534 /* Helper for csky_eh_return splitter: store the call frame exception
5535 handler address in lr. */
5537 csky_set_eh_return_address (rtx source
, rtx scratch
)
5539 HOST_WIDE_INT delta
= 0;
5541 unsigned int reg_mask
;
5543 csky_layout_stack_frame ();
5544 reg_mask
= cfun
->machine
->reg_mask
;
5546 if (reg_mask
& (1 << CSKY_LR_REGNUM
))
5548 /* Find LR in the stack frame. */
5551 if (frame_pointer_needed
)
5553 basereg
= frame_pointer_rtx
;
5558 basereg
= stack_pointer_rtx
;
5559 delta
= cfun
->machine
->reg_offset
;
5562 /* At this point, (basereg + delta) points at the low end of
5563 the reg save area. Regs are saved sequentially from low
5564 to high from this address. */
5565 for (i
= 0; i
< CSKY_LR_REGNUM
; i
++)
5566 if (reg_mask
& (1 << i
))
5569 if ((CSKY_TARGET_ARCH (CK801
) && delta
>= CSKY_LD16_MAX_OFFSET (Pmode
))
5570 || delta
>= CSKY_LD32_MAX_OFFSET (Pmode
))
5572 emit_insn (gen_movsi (scratch
, GEN_INT (delta
)));
5573 emit_insn (gen_addsi3 (scratch
, scratch
, basereg
));
5577 addr
= plus_constant (Pmode
, basereg
, delta
);
5578 emit_move_insn (gen_frame_mem (Pmode
, addr
), source
);
5581 emit_move_insn (gen_rtx_REG (Pmode
, CSKY_LR_REGNUM
), source
);
5584 /* Return TRUE if X references a SYMBOL_REF. */
5587 csky_symbol_mentioned_p (rtx x
)
5592 if (GET_CODE (x
) == SYMBOL_REF
)
5595 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5596 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
5602 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5603 if (csky_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
5606 else if (fmt
[i
] == 'e' && csky_symbol_mentioned_p (XEXP (x
, i
)))
5613 /* Return TRUE if X references a LABEL_REF. */
5616 csky_label_mentioned_p (rtx x
)
5621 if (GET_CODE (x
) == LABEL_REF
)
5624 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5625 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
5631 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5632 if (csky_label_mentioned_p (XVECEXP (x
, i
, j
)))
5635 else if (fmt
[i
] == 'e' && csky_label_mentioned_p (XEXP (x
, i
)))
5644 tls_unspec_mentioned_p (rtx x
)
5646 switch (GET_CODE (x
))
5649 return tls_unspec_mentioned_p (XEXP (x
, 0));
5652 if (XINT (x
, 1) == UNSPEC_TLS
)
5662 /* Implement LEGITIMATE_PIC_OPERAND_P. */
5664 csky_legitimate_pic_operand_p (rtx x
)
5666 if (tls_unspec_mentioned_p (x
))
5668 if (csky_symbol_mentioned_p (x
) || csky_label_mentioned_p (x
))
5674 csky_legitimize_pic_address (rtx orig
, rtx reg
, bool gotrel_p
)
5676 rtx pic_reg
= gen_rtx_REG (SImode
, PIC_OFFSET_TABLE_REGNUM
);
5677 bool optimize_p
= false;
5679 if (GET_CODE (orig
) == SYMBOL_REF
|| GET_CODE (orig
) == LABEL_REF
)
5681 rtx pic_ref
, address
, rtx_tmp
;
5683 rtx pic_reg
= gen_rtx_REG (SImode
, PIC_OFFSET_TABLE_REGNUM
);
5688 gcc_assert (can_create_pseudo_p ());
5689 reg
= gen_reg_rtx (Pmode
);
5694 address
= gen_reg_rtx (Pmode
);
5698 if (GET_CODE (orig
) == SYMBOL_REF
&& !SYMBOL_REF_LOCAL_P (orig
))
5700 /* When gotrel_p generate sym@GOT, otherwise generate sym@PLT. */
5701 rtx_tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, orig
),
5703 ? UNSPEC_PIC_SYMBOL_GOT
5704 : UNSPEC_PIC_SYMBOL_PLT
));
5705 optimize_p
= gotrel_p
;
5708 emit_move_insn (address
, rtx_tmp
);
5709 rtx_tmp
= gen_rtx_MULT (Pmode
, address
, GEN_INT (1));
5711 pic_ref
= gen_const_mem (Pmode
,
5712 gen_rtx_PLUS (Pmode
, pic_reg
, rtx_tmp
));
5717 if (flag_pic
== 1 && !gotrel_p
)
5719 pic_ref
= gen_rtx_UNSPEC (Pmode
,
5720 gen_rtvec (1, orig
),
5721 UNSPEC_PIC_SYMBOL_BSR
);
5724 /* grs rx, symbol */
5725 else if (flag_pic
== 1 && (GET_CODE (orig
) == SYMBOL_REF
)
5726 && SYMBOL_REF_FUNCTION_P (orig
))
5728 pic_ref
= gen_rtx_UNSPEC (Pmode
,
5729 gen_rtvec (1, orig
),
5730 UNSPEC_PIC_SYMBOL_GRS
);
5733 /* lrw rx, symbol@GOTOFF; add rx, rx, gb */
5736 rtx_tmp
= gen_rtx_UNSPEC (Pmode
,
5737 gen_rtvec (1, orig
),
5738 UNSPEC_PIC_SYMBOL_GOTOFF
);
5739 emit_move_insn (address
, rtx_tmp
);
5740 pic_ref
= gen_rtx_PLUS (Pmode
, address
, pic_reg
);
5745 insn
= emit_move_insn (reg
, pic_ref
);
5746 /* Put a REG_EQUAL note on this insn,
5747 so that it can be optimized by loop. */
5749 set_unique_reg_note (insn
, REG_EQUAL
, orig
);
5753 else if (GET_CODE (orig
) == CONST
)
5757 if (GET_CODE (XEXP (orig
, 0)) == PLUS
5758 && XEXP (XEXP (orig
, 0), 1) == pic_reg
)
5763 gcc_assert (can_create_pseudo_p ());
5764 reg
= gen_reg_rtx (Pmode
);
5767 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
5769 base
= csky_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
5771 offset
= csky_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
5772 base
== reg
? 0 : reg
, gotrel_p
);
5774 if (GET_CODE (offset
) == CONST_INT
)
5775 return plus_constant (Pmode
, base
, INTVAL (offset
));
5777 return gen_rtx_PLUS (Pmode
, base
, offset
);
5784 /* Functions to output assembly code for a function call. */
5787 csky_output_call (rtx
*operands
, int index
)
5789 static char buffer
[20];
5790 rtx addr
= operands
[index
];
5793 sprintf (buffer
, "jsr\t%%%d", index
);
5794 else if (flag_pic
&& (GET_CODE (addr
) == UNSPEC
))
5795 sprintf (buffer
, "bsr\t%%%d", index
);
5797 sprintf (buffer
, "jbsr\t%%%d", index
);
5803 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE.
5804 Output assembler code for a block containing the constant parts
5805 of a trampoline, leaving space for the variable parts.
5806 Note that STATIC_CHAIN_REGNUM is t1 (aka r12) on ck801 and
5807 t1 (r13) otherwise. */
5810 csky_asm_trampoline_template (FILE *f
)
5812 if (CSKY_ISA_FEATURE (2E3
))
5814 fprintf (f
, "\tlrw\t%s, [.Lstatic_chain]\n",
5815 reg_names
[STATIC_CHAIN_REGNUM
]);
5816 fprintf (f
, "\tjmpi\t[.Lfunc_address]\n");
5817 /* 2 32-bit insns = 8 bytes. */
5819 else if (CSKY_TARGET_ARCH (CK801
))
5821 /* It's hard to provide general support for trampolines on this
5822 core. We need a register other than the one holding the
5823 static chain (r13) to hold the function pointer for the
5824 indirect jump to it. But ck801 has such a limited register set
5825 there is no other call-clobbered scratch register available -- in
5826 particular, this core does not have r12, which we use for the
5827 ck802 case below. If we use a callee-saved register like r4,
5828 saving the old value on the stack screws up the stack frame
5829 if there are overflow arguments pushed on the stack
5830 by the caller. In theory we could test for that and handle
5831 limited cases with parameters that all fit in r0-r3 with no
5832 stack overflow, but punt for now. */
5833 sorry ("Nested function trampolines not supported on CK801.");
5837 fprintf (f
, "\tlrw\t%s, [.Lfunc_address]\n",
5838 reg_names
[CSKY_T1_REGNUM
]);
5839 fprintf (f
, "\tlrw\t%s, [.Lstatic_chain]\n",
5840 reg_names
[STATIC_CHAIN_REGNUM
]);
5841 fprintf (f
, "\tjmp\t%s\n",
5842 reg_names
[CSKY_T1_REGNUM
]);
5843 /* To align constant pool on a word boundary. */
5844 fprintf (f
, "\t.align 2\n");
5845 /* 2 32-bit lrw insns + 16-bit jump + 16-bit pad = 12 bytes. */
5848 fprintf (f
, ".Lstatic_chain:\n");
5849 fprintf (f
, "\t.long 0\n");
5850 fprintf (f
, ".Lfunc_address:\n");
5851 fprintf (f
, "\t.long 0\n");
5852 /* 2 words of constant pool = 8 bytes. */
5855 /* Worker function for TARGET_TRAMPOLINE_INIT. */
5858 csky_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
5860 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
5862 int pool
= TRAMPOLINE_SIZE
- 8;
5864 emit_block_move (m_tramp
, assemble_trampoline_template (),
5865 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5867 mem
= adjust_address (m_tramp
, SImode
, pool
);
5868 emit_move_insn (mem
, chain_value
);
5869 mem
= adjust_address (m_tramp
, SImode
, pool
+ 4);
5870 emit_move_insn (mem
, fnaddr
);
5872 a_tramp
= XEXP (m_tramp
, 0);
5873 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
5874 LCT_NORMAL
, VOIDmode
, a_tramp
, Pmode
,
5875 plus_constant (Pmode
, a_tramp
, TRAMPOLINE_SIZE
), Pmode
);
5879 /* Emit a comparison insn for float values.
5880 Return true if the comparison is inverted. */
5883 csky_emit_compare_float (enum rtx_code code
, rtx op0
, rtx op1
)
5885 rtx cc_reg
= gen_rtx_REG (CCmode
, CSKY_CC_REGNUM
);
5887 machine_mode mode
= GET_MODE (op1
);
5889 if (op1
!= CONST0_RTX (mode
))
5890 op1
= force_reg (mode
, op1
);
5903 if (op1
== CONST0_RTX (mode
))
5904 op1
= force_reg (mode
, op1
);
5907 if (op1
== CONST0_RTX (mode
))
5908 op1
= force_reg (mode
, op1
);
5913 if (op1
== CONST0_RTX (mode
))
5930 emit_insn (gen_rtx_SET (cc_reg
, gen_rtx_fmt_ee (code
, CCmode
, op0
, op1
)));
5935 /* Support for the Q memory constraint. Returns true if OP is a MEM RTX
5936 with an address consisting of base + index or base + displacement. */
5938 csky_valid_fpuv2_mem_operand (rtx op
)
5940 struct csky_address addr
;
5942 if (GET_CODE (op
) != MEM
)
5945 if (!decompose_csky_address (XEXP (op
, 0), &addr
))
5948 /* Verify base register. */
5949 if (!is_csky_address_register_rtx_p (addr
.base
, 0))
5952 /* Verify index operand. */
5955 if (!is_csky_address_register_rtx_p (addr
.index
, 0))
5958 if (addr
.scale
== 1 || addr
.scale
== 2 || addr
.scale
== 4
5964 /* Verify disp operand. */
5967 rtx disp
= addr
.disp
;
5969 if (!CONST_INT_P (disp
))
5972 if (((unsigned) INTVAL (disp
) % 4) == 0
5973 && (unsigned) INTVAL (disp
) <= (unsigned) 1020)
5982 /* Returns the (interrupt) function type of the current
5983 function, or CSKY_FT_UNKNOWN if the type cannot be determined. */
5985 static unsigned long
5986 csky_isr_value (tree argument
)
5988 const isr_attribute_entry
*ptr
;
5991 /* No argument - default to IRQ. */
5992 if (argument
== NULL_TREE
)
5995 /* Get the value of the argument. */
5996 if (TREE_VALUE (argument
) == NULL_TREE
5997 || TREE_CODE (TREE_VALUE (argument
)) != STRING_CST
)
5998 return CSKY_FT_UNKNOWN
;
6000 arg
= TREE_STRING_POINTER (TREE_VALUE (argument
));
6002 /* Check it against the list of known arguments. */
6003 for (ptr
= isr_attribute_map
; ptr
->arg
!= NULL
; ptr
++)
6004 if (strcmp (arg
, ptr
->arg
) == 0)
6005 return ptr
->return_value
;
6007 /* An unrecognized interrupt type. */
6008 return CSKY_FT_UNKNOWN
;
6011 /* Handle an attribute requiring a FUNCTION_DECL;
6012 arguments as in struct attribute_spec.handler. */
6015 csky_handle_fndecl_attribute (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
6016 int flags ATTRIBUTE_UNUSED
, bool *no_add_attrs
)
6018 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6020 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6022 *no_add_attrs
= true;
6028 /* Handle an "interrupt" or "isr" attribute;
6029 arguments as in struct attribute_spec.handler. */
6032 csky_handle_isr_attribute (tree
*node
, tree name
, tree args
, int flags
,
6038 warning (OPT_Wattributes
, "%qE attribute ignored without -mistack",
6040 *no_add_attrs
= true;
6046 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6048 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
6050 *no_add_attrs
= true;
6055 if (TREE_CODE (*node
) == FUNCTION_TYPE
6056 || TREE_CODE (*node
) == METHOD_TYPE
)
6058 if (csky_isr_value (args
) == CSKY_FT_UNKNOWN
)
6060 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6061 *no_add_attrs
= true;
6064 else if (TREE_CODE (*node
) == POINTER_TYPE
6065 && (TREE_CODE (TREE_TYPE (*node
)) == FUNCTION_TYPE
6066 || TREE_CODE (TREE_TYPE (*node
)) == METHOD_TYPE
)
6067 && csky_isr_value (args
) != CSKY_FT_UNKNOWN
)
6069 *node
= build_variant_type_copy (*node
);
6070 TREE_TYPE (*node
) = build_type_attribute_variant (TREE_TYPE (*node
),
6071 tree_cons (name
, args
, TYPE_ATTRIBUTES (TREE_TYPE (*node
))));
6072 *no_add_attrs
= true;
6074 else if (flags
& ((int)ATTR_FLAG_DECL_NEXT
6075 | (int)ATTR_FLAG_FUNCTION_NEXT
6076 | (int)ATTR_FLAG_ARRAY_NEXT
))
6078 *no_add_attrs
= true;
6079 return tree_cons (name
, args
, NULL_TREE
);
6082 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6088 /* Implement TARGET_REGISTER_MOVE_COST: compute extra cost of moving data
6089 between one register class and another. */
6092 csky_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
6093 reg_class_t from
, reg_class_t to
)
6095 #define GR_REG_CLASS_P(CLASS) \
6096 ((CLASS) == GENERAL_REGS || (CLASS) == MINI_REGS || (CLASS) == SP_REGS \
6097 || (CLASS) == LOW_REGS)
6099 #define HILO_REG_CLASS_P(CLASS) \
6100 ((CLASS) == HI_REGS || (CLASS) == LO_REGS || (CLASS) == HILO_REGS)
6102 #define V_REG_CLASS_P(CLASS) \
6105 if (V_REG_CLASS_P (from
) && V_REG_CLASS_P (to
))
6108 if ((V_REG_CLASS_P (from
) && GR_REG_CLASS_P (to
))
6109 || (GR_REG_CLASS_P (from
) && V_REG_CLASS_P (to
)))
6112 if ((HILO_REG_CLASS_P (from
) && GR_REG_CLASS_P (to
))
6113 || (GR_REG_CLASS_P (from
) && HILO_REG_CLASS_P (to
)))
6116 if (HILO_REG_CLASS_P (from
) && HILO_REG_CLASS_P (to
))
6119 if ((HILO_REG_CLASS_P (from
) && V_REG_CLASS_P (to
))
6120 || (V_REG_CLASS_P (from
) && HILO_REG_CLASS_P (to
)))
6127 /* Implement TARGET_MEMORY_MOVE_COST: compute the cost of moving data
6128 between registers and memory. */
6131 csky_memory_move_cost (machine_mode mode
, reg_class_t rclass
,
6134 return (4 + memory_move_secondary_cost (mode
, rclass
, in
));
6138 /* TARGET_RTX_COSTS helper for ck801/ck802. */
6141 ck802_ck801_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
6144 machine_mode mode
= GET_MODE (x
);
6147 /* Accessing memory costs quite a lot for first word; */
6149 *total
= COSTS_N_INSNS (1 + CSKY_NUM_REGS (mode
));
6166 *total
= COSTS_N_INSNS (1);
6171 *total
= COSTS_N_INSNS (CSKY_NUM_REGS (mode
));
6176 enum rtx_code subcode
= GET_CODE (XEXP (x
, 1));
6178 /* If subcode is "not", we'll try to combine it into e.g. "andn"
6179 instruction, so give AND itself zero cost. */
6189 *total
= COSTS_N_INSNS (CSKY_NUM_REGS (mode
));
6193 /* FIXME: is ixw supported on ck801/ck802? */
6194 /* We can use "ix.h/w" insn to replace multiply by 2 or 4.
6195 "ix.h/w" is a 32-bit insn, so let its cost be a little less than
6197 if (REG_P (XEXP (x
, 0)) && CONST_INT_P (XEXP (x
, 1)))
6199 unsigned HOST_WIDE_INT m
6200 = (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)));
6201 if ((m
== 2 || m
== 4) && outer_code
== PLUS
)
6208 /* Because mult is relatively slower than other operations,
6209 we try to use other insns when optimizing for speed.
6210 When optimizing for size, give it lower cost. */
6213 *total
= COSTS_N_INSNS (10 * CSKY_NUM_REGS (mode
));
6222 *total
= COSTS_N_INSNS (1) + cycle
;
6227 *total
= COSTS_N_INSNS (1);
6231 /* Usually, we use subtract from 0 to substitute for neg, and
6232 it costs 1 extra insn to move 0 to a register. */
6233 *total
= COSTS_N_INSNS (2 * CSKY_NUM_REGS (mode
));
6237 *total
= COSTS_N_INSNS (CSKY_NUM_REGS (mode
));
6241 *total
= COSTS_N_INSNS (1);
6246 *total
= COSTS_N_INSNS (CSKY_NUM_REGS (mode
));
6251 if (REG_P (XEXP (x
, 0))
6252 && CONST_INT_P (XEXP (x
, 1))
6253 && CONST_INT_P (XEXP (x
, 2))
6254 && INTVAL (XEXP (x
, 1)) == 8
6255 && INTVAL (XEXP (x
, 2)) % 8 == 0)
6257 *total
= COSTS_N_INSNS (1);
6260 *total
= COSTS_N_INSNS (CSKY_NUM_REGS (mode
));
6265 unsigned HOST_WIDE_INT t
= (unsigned HOST_WIDE_INT
) (INTVAL (x
));
6267 if (outer_code
== COMPARE
)
6272 *total
= COSTS_N_INSNS (2);
6274 else if (outer_code
== AND
|| outer_code
== IOR
|| outer_code
== XOR
)
6276 /* "andi,xori,ori" are 32-bit insns, so let it cost a
6280 /* Try replacing "andi" by "sextb/h", so let it cost more. */
6281 if (outer_code
== AND
&& (t
== 0xff || t
== 0xffff))
6288 else if (t
< 0x10000)
6289 *total
= COSTS_N_INSNS (1);
6291 *total
= COSTS_N_INSNS (2);
6293 else if (outer_code
== PLUS
|| outer_code
== MINUS
)
6295 /* "addi/subi rx,ry,imm", if imm<9, it is more often a
6296 16-bit insn. If imm>=9, use "movi" insn; it's probably
6297 less than "addi/subi". */
6300 else if (t
< 0x1000)
6302 else if (t
< 0x10000)
6303 *total
= COSTS_N_INSNS (1);
6305 *total
= COSTS_N_INSNS (2);
6307 else if (outer_code
== ROTATE
|| outer_code
== ROTATERT
6308 || outer_code
== LSHIFTRT
|| outer_code
== ASHIFTRT
6309 || outer_code
== ASHIFT
)
6314 *total
= COSTS_N_INSNS (2);
6319 if (outer_code
== SET
&& t
< 256)
6322 *total
= COSTS_N_INSNS (1);
6324 *total
= COSTS_N_INSNS (2);
6332 *total
= COSTS_N_INSNS (3);
6340 /* TARGET_RTX_COSTS helper for ck803. */
6343 ck803_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
6344 int *total
, bool speed ATTRIBUTE_UNUSED
)
6349 if (MEM_P (XEXP (x
, 1)))
6351 struct csky_address op1
;
6353 = decompose_csky_address (XEXP (XEXP (x
, 1), 0), &op1
);
6356 *total
= COSTS_N_INSNS (3);
6359 else if (address_valid
)
6361 *total
= COSTS_N_INSNS (1);
6365 if (REG_P (XEXP (x
, 0)) && (GET_CODE (XEXP (x
, 1)) == PLUS
))
6367 rtx sub_exp
= XEXP (x
, 1);
6368 if (REG_P (XEXP (sub_exp
, 0)) && REG_P (XEXP (sub_exp
, 1)))
6370 *total
= COSTS_N_INSNS (1);
6376 if (REG_P (XEXP (x
, 0)) && CONST_INT_P (XEXP (x
, 1)))
6378 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
6379 if (val
% 2 == 0 && val
< 0xffffffff && val
> 0)
6381 *total
= COSTS_N_INSNS (1);
6390 *total
= COSTS_N_INSNS (3);
6397 /* TARGET_RTX_COSTS helper for ck807+ arches. */
6400 ck807_ck810_rtx_costs (rtx x
, int code
,
6401 int outer_code ATTRIBUTE_UNUSED
,
6402 int *total
, bool speed ATTRIBUTE_UNUSED
)
6407 if (REG_P (XEXP (x
, 0)) && CONST_INT_P (XEXP (x
, 1)))
6409 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
6410 if (val
% 2 == 0 && val
< 0xffffffff && val
> 0)
6412 *total
= COSTS_N_INSNS (1);
6421 *total
= COSTS_N_INSNS (3);
6429 /* Implement TARGET_RTX_COSTS, to compute a (partial) cost for rtx X.
6430 Return true if the complete cost has been computed, and false if
6431 subexpressions should be scanned. In either case, *TOTAL contains
6435 csky_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
, int outer_code
,
6436 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
6438 int code
= GET_CODE (x
);
6440 if (CSKY_TARGET_ARCH (CK802
) || CSKY_TARGET_ARCH (CK801
))
6441 return ck802_ck801_rtx_costs (x
, code
, outer_code
, total
, speed
);
6442 else if (CSKY_TARGET_ARCH (CK803
))
6443 return ck803_rtx_costs (x
, code
, outer_code
, total
, speed
);
6444 else if (CSKY_TARGET_ARCH (CK807
) || CSKY_TARGET_ARCH (CK810
))
6445 return ck807_ck810_rtx_costs (x
, code
, outer_code
, total
, speed
);
6450 /* Emit assembly code for CASESI. This is only used on CK801 and CK802
6451 when optimizing for size, and uses helper functions in libgcc instead
6452 of doing the control transfer inline. */
6455 csky_output_casesi (rtx
*operands
)
6457 rtx diff_vec
= PATTERN (NEXT_INSN (as_a
<rtx_insn
*> (operands
[0])));
6459 gcc_assert (GET_CODE (diff_vec
) == ADDR_DIFF_VEC
);
6461 switch (GET_MODE (diff_vec
))
6464 return (ADDR_DIFF_VEC_FLAGS (diff_vec
).offset_unsigned
6465 ? "jbsr\t___gnu_csky_case_uqi"
6466 : "jbsr\t___gnu_csky_case_sqi");
6468 return (ADDR_DIFF_VEC_FLAGS (diff_vec
).offset_unsigned
6469 ? "jbsr\t___gnu_csky_case_uhi"
6470 : "jbsr\t___gnu_csky_case_shi");
6472 return "jbsr\t___gnu_csky_case_si";
6478 /* Implement TARGET_SCHED_ISSUE_RATE. Lookup the issue rate in the
6479 per-core tuning structs. */
6481 csky_sched_issue_rate (void)
6483 if (CSKY_TARGET_ARCH (CK810
))
6490 /* This function implements the target macro TARGET_SCHED_ADJUST_COST.
6491 It corrects the value of COST based on the relationship between
6492 INSN and DEP through the dependence DEP_TYPE. It returns the new
6496 csky_sched_adjust_cost (rtx_insn
*insn
,
6500 unsigned int dw ATTRIBUTE_UNUSED
)
6502 if (dep_type
== REG_DEP_ANTI
|| dep_type
== REG_DEP_OUTPUT
)
6504 /* The REG_DEP_TRUE situation. */
6505 else if (recog_memoized (insn
) >= 0 && recog_memoized (dep
) >= 0)
6507 enum attr_type insn_type
= get_attr_type (insn
);
6508 if (CSKY_TARGET_ARCH (CK803
))
6510 /* The ld or st's base reg depends on the pre insn,
6511 it will delay 1 cycle. */
6512 if (insn_type
== TYPE_LOAD
|| insn_type
== TYPE_STORE
)
6514 rtx pattern
= PATTERN (insn
);
6516 gcc_assert (GET_CODE (pattern
) == SET
);
6517 rtx addr
= (insn_type
== TYPE_LOAD
6518 ? SET_SRC (pattern
) : SET_DEST (pattern
));
6520 enum rtx_code code
= GET_CODE (addr
);
6521 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
6522 addr
= XEXP (addr
, 0);
6523 gcc_assert (GET_CODE (addr
) == MEM
);
6525 rtx base
= XEXP (addr
, 0);
6529 if (GET_CODE (base
) == PLUS
6530 && GET_CODE (XEXP (base
, 0)) == REG
)
6531 reg
= XEXP (base
, 0);
6532 if ((reg
!= NULL_RTX
) && reg_set_p (reg
, PATTERN (dep
)))
6536 else if (CSKY_TARGET_ARCH (CK802
))
6538 if ((insn_type
== TYPE_CALL_JSR
|| insn_type
== TYPE_BRANCH_JMP
)
6539 && get_attr_type (dep
) != TYPE_LOAD
)
6542 if (insn_type
== TYPE_LOAD
|| insn_type
== TYPE_STORE
)
6544 rtx pattern
= PATTERN (insn
);
6546 gcc_assert (GET_CODE (pattern
) == SET
);
6548 rtx addr
= (insn_type
== TYPE_LOAD
6549 ? SET_SRC (pattern
) : SET_DEST (pattern
));
6551 enum rtx_code code
= GET_CODE (addr
);
6552 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
6553 addr
= XEXP (addr
, 0);
6554 gcc_assert (GET_CODE (addr
) == MEM
);
6556 rtx base
= XEXP (addr
, 0);
6560 if (GET_CODE (base
) == PLUS
6561 && GET_CODE (XEXP (base
, 0)) == REG
)
6562 reg
= XEXP (base
, 0);
6563 if ((reg
!= NULL_RTX
) && reg_set_p (reg
, PATTERN (dep
))
6564 && get_attr_type (dep
) != TYPE_LOAD
)
6567 if (insn_type
== TYPE_STORE
6568 && reg_referenced_p (SET_SRC (pattern
), PATTERN (dep
)))
6577 csky_warn_func_return (tree decl
)
6579 /* Naked functions are implemented entirely in assembly, including the
6580 return sequence, so suppress warnings about this. */
6581 return lookup_attribute ("naked", DECL_ATTRIBUTES (decl
)) == NULL_TREE
;
6585 /* Implement TARGET_RETURN_IN_MEMORY to decide whether TYPE should be
6586 returned in memory (true) or in a register (false).
6587 FNTYPE is the type of the function making the call. */
6589 csky_return_in_memory (const_tree type
,
6590 const_tree fntype ATTRIBUTE_UNUSED
)
6592 const HOST_WIDE_INT size
= int_size_in_bytes (type
);
6593 return (size
== -1 || size
> 2 * UNITS_PER_WORD
);
6597 /* Implement TARGET_DWARF_REGISTER_SPAN.
6598 Dwarf models VFP registers as 64-bit or 128-bit registers default.
6599 GCC models tham as 32-bit registers, so we need to describe this to
6600 the DWARF generation code. Other registers can use the default. */
6602 csky_dwarf_register_span (rtx rtl
)
6610 regno
= REGNO (rtl
);
6611 if (!CSKY_VREG_P (regno
))
6614 mode
= GET_MODE (rtl
);
6615 if (GET_MODE_SIZE (mode
) < 8)
6618 if (TARGET_SOFT_FPU
)
6620 nregs
= GET_MODE_SIZE (mode
) / 4;
6621 for (i
= 0; i
< nregs
; i
+= 2)
6622 if (TARGET_BIG_ENDIAN
)
6624 parts
[i
] = gen_rtx_REG (SImode
, regno
+ i
+ 1);
6625 parts
[i
+ 1] = gen_rtx_REG (SImode
, regno
+ i
);
6629 parts
[i
] = gen_rtx_REG (SImode
, regno
+ i
);
6630 parts
[i
+ 1] = gen_rtx_REG (SImode
, regno
+ i
+ 1);
6635 /* FIXME: dwarf2 considers all general registers to be the same
6636 as the CPU bit width. Transform the 64-bit FPU registers to
6637 32 bits here, and we will modify the unwind processing to
6638 fit CSKY architecture later. */
6639 nregs
= GET_MODE_SIZE (mode
) / 8;
6640 for (i
= 0; i
< nregs
; i
++)
6641 parts
[i
] = gen_rtx_REG (SImode
, regno
+ i
);
6644 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nregs
, parts
));
6647 /* Implement TARGET_INIT_LIBFUNCS. */
6650 csky_init_libfuncs (void)
6652 if (TARGET_CSKY_LINUX
)
6653 init_sync_libfuncs (UNITS_PER_WORD
);
6654 if (!TARGET_LIBCCRT
)
6657 #define CSKY_GCC_SYM(sym) "__csky_ccrt_" # sym
6661 /* Arithmetic functions */
6662 set_optab_libfunc (ashl_optab
, DImode
, CSKY_GCC_SYM (ashldi3
));
6663 set_optab_libfunc (ashr_optab
, DImode
, CSKY_GCC_SYM (ashrdi3
));
6664 set_optab_libfunc (sdiv_optab
, SImode
, CSKY_GCC_SYM (divsi3
));
6665 set_optab_libfunc (sdiv_optab
, DImode
, CSKY_GCC_SYM (divdi3
));
6666 set_optab_libfunc (lshr_optab
, DImode
, CSKY_GCC_SYM (lshrdi3
));
6667 set_optab_libfunc (smod_optab
, SImode
, CSKY_GCC_SYM (modsi3
));
6668 set_optab_libfunc (smod_optab
, DImode
, CSKY_GCC_SYM (moddi3
));
6669 set_optab_libfunc (smul_optab
, DImode
, CSKY_GCC_SYM (muldi3
));
6670 set_optab_libfunc (neg_optab
, DImode
, CSKY_GCC_SYM (negdi2
));
6671 set_optab_libfunc (udiv_optab
, SImode
, CSKY_GCC_SYM (udivsi3
));
6672 set_optab_libfunc (udiv_optab
, DImode
, CSKY_GCC_SYM (udivdi3
));
6673 set_optab_libfunc (udivmod_optab
, DImode
, CSKY_GCC_SYM (udivmoddi4
));
6674 set_optab_libfunc (umod_optab
, SImode
, CSKY_GCC_SYM (umodsi3
));
6675 set_optab_libfunc (umod_optab
, DImode
, CSKY_GCC_SYM (umoddi3
));
6677 /* Comparison functions */
6678 set_optab_libfunc (cmp_optab
, DImode
, CSKY_GCC_SYM (cmpdi2
));
6679 set_optab_libfunc (ucmp_optab
, DImode
, CSKY_GCC_SYM (ucmpdi2
));
6681 /* Trapping arithmetic functions */
6682 set_optab_libfunc (absv_optab
, SImode
, CSKY_GCC_SYM (absvsi2
));
6683 set_optab_libfunc (absv_optab
, DImode
, CSKY_GCC_SYM (absvdi2
));
6684 set_optab_libfunc (addv_optab
, SImode
, CSKY_GCC_SYM (addvsi3
));
6685 set_optab_libfunc (addv_optab
, DImode
, CSKY_GCC_SYM (addvdi3
));
6686 set_optab_libfunc (smulv_optab
, SImode
, CSKY_GCC_SYM (mulvsi3
));
6687 set_optab_libfunc (smulv_optab
, DImode
, CSKY_GCC_SYM (mulvdi3
));
6688 set_optab_libfunc (negv_optab
, SImode
, CSKY_GCC_SYM (negvsi2
));
6689 set_optab_libfunc (negv_optab
, DImode
, CSKY_GCC_SYM (negvdi2
));
6690 set_optab_libfunc (subv_optab
, SImode
, CSKY_GCC_SYM (subvsi3
));
6691 set_optab_libfunc (subv_optab
, DImode
, CSKY_GCC_SYM (subvdi3
));
6693 /* Bit operations */
6694 set_optab_libfunc (clz_optab
, SImode
, CSKY_GCC_SYM (clzsi2
));
6695 set_optab_libfunc (clz_optab
, DImode
, CSKY_GCC_SYM (clzdi2
));
6696 set_optab_libfunc (ctz_optab
, SImode
, CSKY_GCC_SYM (ctzsi2
));
6697 set_optab_libfunc (ctz_optab
, DImode
, CSKY_GCC_SYM (ctzdi2
));
6698 set_optab_libfunc (ffs_optab
, DImode
, CSKY_GCC_SYM (ffsdi2
));
6699 set_optab_libfunc (parity_optab
, SImode
, CSKY_GCC_SYM (paritysi2
));
6700 set_optab_libfunc (parity_optab
, DImode
, CSKY_GCC_SYM (paritydi2
));
6701 set_optab_libfunc (popcount_optab
,SImode
, CSKY_GCC_SYM (popcountsi2
));
6702 set_optab_libfunc (popcount_optab
,DImode
, CSKY_GCC_SYM (popcountdi2
));
6703 set_optab_libfunc (bswap_optab
, SImode
, CSKY_GCC_SYM (bswapsi2
));
6704 set_optab_libfunc (bswap_optab
, DImode
, CSKY_GCC_SYM (bswapdi2
));
6708 /* Arithmetic functions */
6709 set_optab_libfunc (add_optab
, SFmode
, CSKY_GCC_SYM (addsf3
));
6710 set_optab_libfunc (add_optab
, DFmode
, CSKY_GCC_SYM (adddf3
));
6711 set_optab_libfunc (sub_optab
, SFmode
, CSKY_GCC_SYM (subsf3
));
6712 set_optab_libfunc (sub_optab
, DFmode
, CSKY_GCC_SYM (subdf3
));
6713 set_optab_libfunc (smul_optab
, SFmode
, CSKY_GCC_SYM (mulsf3
));
6714 set_optab_libfunc (smul_optab
, DFmode
, CSKY_GCC_SYM (muldf3
));
6715 set_optab_libfunc (sdiv_optab
, SFmode
, CSKY_GCC_SYM (divsf3
));
6716 set_optab_libfunc (sdiv_optab
, DFmode
, CSKY_GCC_SYM (divdf3
));
6717 set_optab_libfunc (neg_optab
, SFmode
, CSKY_GCC_SYM (negsf2
));
6718 set_optab_libfunc (neg_optab
, DFmode
, CSKY_GCC_SYM (negdf2
));
6720 /* Conversion functions */
6721 set_conv_libfunc (sext_optab
, DFmode
, SFmode
, CSKY_GCC_SYM (extendsfdf2
));
6722 set_conv_libfunc (trunc_optab
, SFmode
, DFmode
, CSKY_GCC_SYM (truncdfsf2
));
6723 set_conv_libfunc (sfix_optab
, SImode
, SFmode
, CSKY_GCC_SYM (fixsfsi
));
6724 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, CSKY_GCC_SYM (fixdfsi
));
6725 set_conv_libfunc (sfix_optab
, DImode
, SFmode
, CSKY_GCC_SYM (fixsfdi
));
6726 set_conv_libfunc (sfix_optab
, DImode
, DFmode
, CSKY_GCC_SYM (fixdfdi
));
6727 set_conv_libfunc (ufix_optab
, SImode
, SFmode
, CSKY_GCC_SYM (fixunssfsi
));
6728 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, CSKY_GCC_SYM (fixunsdfsi
));
6729 set_conv_libfunc (ufix_optab
, DImode
, SFmode
, CSKY_GCC_SYM (fixunssfdi
));
6730 set_conv_libfunc (ufix_optab
, DImode
, DFmode
, CSKY_GCC_SYM (fixunsdfdi
));
6731 set_conv_libfunc (sfloat_optab
, SFmode
, SImode
, CSKY_GCC_SYM (floatsisf
));
6732 set_conv_libfunc (sfloat_optab
, DFmode
, SImode
, CSKY_GCC_SYM (floatsidf
));
6733 set_conv_libfunc (sfloat_optab
, SFmode
, DImode
, CSKY_GCC_SYM (floatdisf
));
6734 set_conv_libfunc (sfloat_optab
, DFmode
, DImode
, CSKY_GCC_SYM (floatdidf
));
6735 set_conv_libfunc (ufloat_optab
, SFmode
, SImode
, CSKY_GCC_SYM (floatunsisf
));
6736 set_conv_libfunc (ufloat_optab
, DFmode
, SImode
, CSKY_GCC_SYM (floatunsidf
));
6737 set_conv_libfunc (ufloat_optab
, SFmode
, DImode
, CSKY_GCC_SYM (floatundisf
));
6738 set_conv_libfunc (ufloat_optab
, DFmode
, DImode
, CSKY_GCC_SYM (floatundidf
));
6740 /* Comparison functions */
6741 set_optab_libfunc (cmp_optab
, SFmode
, CSKY_GCC_SYM (cmpsf2
));
6742 set_optab_libfunc (cmp_optab
, DFmode
, CSKY_GCC_SYM (cmpdf2
));
6743 set_optab_libfunc (unord_optab
, SFmode
, CSKY_GCC_SYM (unordsf2
));
6744 set_optab_libfunc (unord_optab
, DFmode
, CSKY_GCC_SYM (unorddf2
));
6745 set_optab_libfunc (eq_optab
, SFmode
, CSKY_GCC_SYM (eqsf2
));
6746 set_optab_libfunc (eq_optab
, DFmode
, CSKY_GCC_SYM (eqdf2
));
6747 set_optab_libfunc (ne_optab
, SFmode
, CSKY_GCC_SYM (nesf2
));
6748 set_optab_libfunc (ne_optab
, DFmode
, CSKY_GCC_SYM (nedf2
));
6749 set_optab_libfunc (ge_optab
, SFmode
, CSKY_GCC_SYM (gesf2
));
6750 set_optab_libfunc (ge_optab
, DFmode
, CSKY_GCC_SYM (gedf2
));
6751 set_optab_libfunc (lt_optab
, SFmode
, CSKY_GCC_SYM (ltsf2
));
6752 set_optab_libfunc (lt_optab
, DFmode
, CSKY_GCC_SYM (ltdf2
));
6753 set_optab_libfunc (le_optab
, SFmode
, CSKY_GCC_SYM (lesf2
));
6754 set_optab_libfunc (le_optab
, DFmode
, CSKY_GCC_SYM (ledf2
));
6755 set_optab_libfunc (gt_optab
, SFmode
, CSKY_GCC_SYM (gtsf2
));
6756 set_optab_libfunc (gt_optab
, DFmode
, CSKY_GCC_SYM (gtdf2
));
6760 /* Implement TARGET_ADDRESS_COST to estimate cost of the memory address X.
6761 For C-SKY, (register) and (register + offset) have the same cost.
6762 Other situations cost more. */
6765 csky_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
6766 addr_space_t as ATTRIBUTE_UNUSED
,
6767 bool speed ATTRIBUTE_UNUSED
)
6769 enum rtx_code code
= GET_CODE (x
);
6772 return COSTS_N_INSNS (1);
6774 && REG_P (XEXP (x
, 0))
6775 && CONST_INT_P (XEXP (x
, 1)))
6776 return COSTS_N_INSNS (1);
6778 return COSTS_N_INSNS (3);
6782 /* Implement TARGET_FIXED_CONDITION_CODE_REGS. */
6785 csky_fixed_condition_code_regs (unsigned int *p1
, unsigned int *p2
)
6787 *p1
= CSKY_CC_REGNUM
;
6788 *p2
= INVALID_REGNUM
;
6793 struct gcc_target targetm
= TARGET_INITIALIZER
;
6795 #include "gt-csky.h"