]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/csky/csky.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / config / csky / csky.cc
CommitLineData
cc7232b9 1/* GCC backend functions for C-SKY targets.
a945c346 2 Copyright (C) 2018-2024 Free Software Foundation, Inc.
cc7232b9
J
3 Contributed by C-SKY Microsystems and Mentor Graphics.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#define IN_TARGET_CODE 1
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "memmodel.h"
27#include "backend.h"
28#include "target.h"
29#include "rtl.h"
30#include "tree.h"
31#include "cfghooks.h"
32#include "df.h"
33#include "tm_p.h"
34#include "stringpool.h"
35#include "attribs.h"
36#include "optabs.h"
37#include "regs.h"
38#include "emit-rtl.h"
39#include "recog.h"
40#include "cgraph.h"
41#include "c-family/c-common.h"
42#include "cpplib.h"
43#include "diagnostic-core.h"
44#include "alias.h"
45#include "fold-const.h"
46#include "stor-layout.h"
47#include "calls.h"
48#include "varasm.h"
49#include "output.h"
50#include "insn-attr.h"
51#include "flags.h"
52#include "reload.h"
53#include "explow.h"
54#include "expr.h"
55#include "cfgrtl.h"
56#include "sched-int.h"
57#include "common/common-target.h"
58#include "langhooks.h"
59#include "intl.h"
60#include "libfuncs.h"
cc7232b9
J
61#include "opts.h"
62#include "dumpfile.h"
63#include "target-globals.h"
64#include "builtins.h"
65#include "tm-constrs.h"
66#include "rtl-iter.h"
67#include "pass_manager.h"
68#include "tree-pass.h"
69#include "context.h"
00f34291 70#include "opts.h"
cc7232b9
J
71
72/* This file should be included last. */
73#include "target-def.h"
74
75/* Stack and register size macros. */
76
77#define CSKY_NUM_WORDS(SIZE) \
78 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
79#define CSKY_NUM_REGS(MODE) \
80 CSKY_NUM_WORDS (GET_MODE_SIZE (MODE))
81#define CSKY_STACK_ALIGN(SIZE) \
82 (CSKY_NUM_WORDS (SIZE) * UNITS_PER_WORD)
83
84/* Offsets and range macros. */
85
86#define CSKY_LD16_MAX_OFFSET(MODE) \
87 (31 * GET_MODE_SIZE (MODE))
88#define CSKY_LD32_MAX_OFFSET(MODE) \
89 (4095 * GET_MODE_SIZE (MODE))
90#define CSKY_LD16_OFFSET_MASK(MODE) \
91 (CSKY_LD16_MAX_OFFSET (MODE) + GET_MODE_SIZE (MODE) - 1)
92
93#define CSKY_ADDI16_MAX_IMM 256
94#define CSKY_SUBI16_MAX_IMM 256
95
96#define CSKY_CONSTPOOL_LABEL_PREFIX "LCP"
97
98/* Array of the smallest class containing reg number REGNO, indexed by
99 REGNO. Used by REGNO_REG_CLASS. */
100enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER] =
101{
102 /* Registers r0-r7. */
103 MINI_REGS, MINI_REGS, MINI_REGS, MINI_REGS,
104 MINI_REGS, MINI_REGS, MINI_REGS, MINI_REGS,
105 /* Registers r8-r15. */
106 LOW_REGS, LOW_REGS, LOW_REGS, LOW_REGS,
107 LOW_REGS, LOW_REGS, SP_REGS, LOW_REGS,
108 /* Registers r16-r31. */
109 GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS,
110 GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS,
111 GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS,
112 GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS,
113 /* Reserved. */
114 RESERVE_REGS,
115 /* CC,HI,LO registers. */
b8a61853 116 C_REGS, HILO_REGS, HILO_REGS,
cc7232b9
J
117 /* Reserved. */
118 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
119 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
120 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
121 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
122 /* Vec registers. */
123 V_REGS, V_REGS, V_REGS, V_REGS,
124 V_REGS, V_REGS, V_REGS, V_REGS,
125 V_REGS, V_REGS, V_REGS, V_REGS,
126 V_REGS, V_REGS, V_REGS, V_REGS,
127 /* Reserved. */
128 RESERVE_REGS, RESERVE_REGS,
129 /* Register epc. */
db92bd22
GQ
130 OTHER_REGS,
131 /* Vec registers. */
132 V_REGS, V_REGS, V_REGS, V_REGS,
133 V_REGS, V_REGS, V_REGS, V_REGS,
134 V_REGS, V_REGS, V_REGS, V_REGS,
135 V_REGS, V_REGS, V_REGS, V_REGS,
136 /* Reserved. */
137 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
138 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
139 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
140 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
141 /* Reserved. */
142 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
143 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
144 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
145 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
146 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
147 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
148 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
149 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
150
151 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
152 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
153 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
154 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
155 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
156 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
157 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
158 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
159
160 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
161 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
162 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
163 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
164 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
165 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
166 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
167 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS, RESERVE_REGS,
168
169 RESERVE_REGS, RESERVE_REGS, RESERVE_REGS
cc7232b9
J
170};
171
172/* Arrays that map GCC register numbers to debugger register numbers,
173 '-1' means that is INVALID_REGNUM.
174 TODO: which rules according to here ? */
ca60bd93 175const int csky_debugger_regno[FIRST_PSEUDO_REGISTER] =
cc7232b9
J
176{
177 0, 1, 2, 3, 4, 5, 6, 7,
178 8, 9, 10, 11, 12, 13, 14, 15,
179 16, 17, 18, 19, 20, 21, 22, 23,
180 24, 25, 26, 27, 28, 29, 30, 31,
db92bd22
GQ
181 -1, -1, 36, 37,
182 75, 79, 83, 87, 91, 95, 99, 103,
183 107, 111, 115, 119, 123, 127, 131, 135,
184 74, 78, 82, 86, 90, 94, 98, 102,
185 106, 110, 114, 118, 122, 126, 130, 134,
186 -1, -1, 72,
187 /* vr: 71 - 86 */
188 139, 143, 147, 151, 155, 159, 163, 167,
189 171, 175, 179, 183, 187, 191, 195, 199,
190 138, 142, 146, 150, 154, 158, 162, 166,
191 170, 174, 178, 182, 186, 190, 194, 198,
192 /* resereved */
193 -1, -1, -1, -1, -1, -1, -1, -1,
194 -1, -1, -1, -1, -1, -1, -1, -1,
195 -1, -1, -1, -1, -1, -1, -1, -1,
196 -1, -1, -1, -1, -1, -1, -1, -1,
197
198 -1, -1, -1, -1, -1, -1, -1, -1,
199 -1, -1, -1, -1, -1, -1, -1, -1,
200 -1, -1, -1, -1, -1, -1, -1, -1,
201 -1, -1, -1, -1, -1, -1, -1, -1,
202
203 -1, -1, -1, -1, -1, -1, -1, -1,
204 -1, -1, -1, -1, -1, -1, -1, -1,
205 -1, -1, -1, -1, -1, -1, -1, -1,
206 -1, -1, -1, -1, -1, -1, -1, -1,
207
208 -1, -1, -1
cc7232b9
J
209};
210
211/* Table of machine attributes. */
212static tree csky_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
213static tree csky_handle_isr_attribute (tree *, tree, tree, int, bool *);
7fa24687 214TARGET_GNU_ATTRIBUTES (csky_attribute_table,
cc7232b9
J
215{
216 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
217 affects_type_identity, handler, exclude } */
218 { "naked", 0, 0, true, false, false, false, csky_handle_fndecl_attribute, NULL },
219 /* Interrupt Service Routines have special prologue and epilogue requirements. */
220 { "interrupt", 0, 1, false, false, false, false, csky_handle_isr_attribute, NULL },
7fa24687
RS
221 { "isr", 0, 1, false, false, false, false, csky_handle_isr_attribute, NULL }
222});
cc7232b9
J
223
224/* A C structure for machine-specific, per-function data.
225 This is added to the cfun structure. */
226typedef struct GTY(()) machine_function
227{
228 /* Records if LR has to be saved for far jumps. */
229 int far_jump_used;
230 /* Records the type of the current function. */
231 unsigned long func_type;
232 /* Record if the function has a variable argument list. */
233 int uses_anonymous_args;
234
235 /* Stack frame layout information. If frame_init_p is true,
236 these fields have been initialized and don't need to be
237 recomputed. */
238 unsigned int reg_mask; /* non-volatile reg saves */
239 int arg_size; /* stdarg spills (bytes) */
240 int reg_size; /* non-volatile reg saves (bytes) */
241 int local_size; /* locals */
242 int outbound_size; /* arg overflow on calls out */
243 int frame_size; /* total static size of stack frame */
244 int local_offset;
245 int reg_offset;
246 int arg_offset;
247 int frame_init_p;
248
249} machine_function;
250
251/* These macros are for the func_type values above. */
252#define CSKY_FT_TYPE_MASK ((1 << 3) - 1)
253#define CSKY_FT_UNKNOWN 0 /* Type not been determined */
254#define CSKY_FT_NORMAL 1 /* Normal function */
255#define CSKY_FT_ISR 4 /* Interrupt service routine */
256#define CSKY_FT_FIQ 5 /* Fast interrupt service routine */
257#define CSKY_FT_EXCEPTION 6 /* Exception handler */
258#define CSKY_FT_INTERRUPT (1 << 2) /* overlap CSKY_FT_ISR */
259#define CSKY_FT_NAKED (1 << 3) /* No prologue and epilogue */
260#define CSKY_FUNCTION_TYPE(t) ((t) & CSKY_FT_TYPE_MASK)
261#define CSKY_FUNCTION_IS_INTERRUPT(t) ((t) & CSKY_FT_INTERRUPT)
262#define CSKY_FUNCTION_IS_NAKED(t) ((t) & CSKY_FT_NAKED)
263
264struct csky_processors
265{
266 const char *const name;
267 enum csky_processor_type core;
268 const char *arch;
269 enum csky_base_architecture base_arch;
270 enum csky_isa_feature isa_bits[CSKY_ISA_FEATURE_GET (max)];
271};
272
273static struct csky_processors all_cores[] =
274{
275#undef CSKY_CORE
276#define CSKY_CORE(NAME, CORE, X, ARCH, ISA) \
277 {NAME, TARGET_CPU_##CORE, #ARCH, CSKY_BASE_ARCH_##ARCH, \
278 {ISA CSKY_ISA_FEATURE_GET (none)}},
279#include "csky_cores.def"
280#undef CSKY_CORE
281 {NULL, TARGET_CPU_csky_none, NULL, CSKY_BASE_ARCH_NONE, \
282 {CSKY_ISA_FEATURE_GET (none)}}
283};
284
285static struct csky_processors all_architectures[] =
286{
287#undef CSKY_ARCH
288#define CSKY_ARCH(NAME, CORE, ARCH, ISA) \
289 {NAME, TARGET_CPU_##CORE, #ARCH, CSKY_BASE_ARCH_##ARCH, \
290 {ISA CSKY_ISA_FEATURE_GET (none)}},
291#include "csky_cores.def"
292#undef CSKY_ARCH
293 {NULL, TARGET_CPU_csky_none, NULL, CSKY_BASE_ARCH_NONE, \
294 {CSKY_ISA_FEATURE_GET (none)}}
295};
296
297struct csky_fpu_desc
298{
299 const char *name;
300 enum csky_isa_feature isa_bits[CSKY_ISA_FEATURE_GET (max)];
301};
302
303static const struct csky_fpu_desc all_fpus[] =
304{
305#undef CSKY_FPU
306#define CSKY_FPU(NAME, CNAME, ISA) \
307 {NAME, {ISA CSKY_ISA_FEATURE_GET (none)}},
308#include "csky_cores.def"
309#undef CSKY_FPU
310};
311
312/* Active target architecture. */
313struct csky_build_target
314{
315 /* Name of the target CPU, if known, or NULL if the target CPU was not
316 specified by the user (and inferred from the -march option). */
317 const char *core_name;
318 /* Name of the target ARCH. NULL if there is a selected CPU. */
319 const char *arch_name;
320 /* Preprocessor substring (never NULL). */
321 const char *arch_pp_name;
322 /* CPU identifier for the core we're compiling for (architecturally). */
323 enum csky_processor_type arch_core;
324 /* The base architecture value. */
325 enum csky_base_architecture base_arch;
326 /* Bitmap encapsulating the isa_bits for the target environment. */
327 sbitmap isa;
328};
329
330struct csky_build_target csky_active_target;
331
332/* The following are used in the .md file as equivalents to bits. */
333int csky_arch_isa_features[CSKY_ISA_FEATURE_GET (max)] = {0};
334
335/* The highest CSKY architecture version supported by the target. */
336enum csky_base_architecture csky_base_arch = CSKY_TARGET_ARCH_GET (NONE);
337
338/* Forward definitions of types. */
339typedef struct minipool_node Mnode;
340typedef struct minipool_fixup Mfix;
341
342static GTY(()) int tls_labelno;
343
344
345/* Maximum constant offset that can be added/subtracted from SP in a
346 single instruction. For ck801, this is for addsp/subsp, otherwise
347 it is the range of addi/subi. */
348#define CSKY_MAX_SP_ADJUST \
349 (CSKY_TARGET_ARCH (CK801) ? 508 : 4096)
350
351
352/* Implement TARGET_CPU_CPP_BUILTINS. */
353
354#define builtin_define(MACRO) cpp_define (pfile, MACRO)
355
356void
357csky_cpu_cpp_builtins (cpp_reader *pfile)
358{
359 const char *arch_name = csky_active_target.arch_pp_name;
360 char *pp_name = (char *) alloca (1 + strlen (arch_name) + 4);
361 sprintf (pp_name, "__%s__", arch_name);
362 builtin_define (pp_name);
363
364 builtin_define ("__csky__=2");
365 builtin_define ("__CSKY__=2");
366 builtin_define ("__ckcore__=2");
367 builtin_define ("__CKCORE__=2");
368
369 builtin_define ("__CSKYABIV2__");
370 builtin_define ("__cskyabiv2__");
371 builtin_define ("__CSKYABI__=2");
372 builtin_define ("__cskyabi__=2");
373
374 if (TARGET_BIG_ENDIAN)
375 {
376 builtin_define ("__ckcoreBE__");
377 builtin_define ("__cskyBE__");
378 builtin_define ("__cskybe__");
379 builtin_define ("__CSKYBE__");
380 }
381 else
382 {
383 builtin_define ("__ckcoreLE__");
384 builtin_define ("__cskyLE__");
385 builtin_define ("__cskyle__");
386 builtin_define ("__CSKYLE__");
387 }
388
389 if (TARGET_HARD_FLOAT)
390 {
391 builtin_define ("__csky_hard_float__");
392 builtin_define ("__CSKY_HARD_FLOAT__");
01d56aea
J
393 if (TARGET_HARD_FLOAT_ABI)
394 {
395 builtin_define ("__csky_hard_float_abi__");
396 builtin_define ("__CSKY_HARD_FLOAT_ABI__");
397 }
641af925
XQ
398 else
399 {
400 builtin_define ("__csky_soft_float_abi__");
401 builtin_define ("__CSKY_SOFT_FLOAT_ABI__");
402 }
01d56aea
J
403 if (TARGET_SINGLE_FPU)
404 {
405 builtin_define ("__csky_hard_float_fpu_sf__");
406 builtin_define ("__CSKY_HARD_FLOAT_FPU_SF__");
407 }
cc7232b9
J
408 }
409 else
410 {
411 builtin_define ("__csky_soft_float__");
412 builtin_define ("__CSKY_SOFT_FLOAT__");
413 }
414
415 if (CSKY_ISA_FEATURE (fpv2_sf))
416 {
417 builtin_define ("__csky_fpuv2__");
418 builtin_define ("__CSKY_FPUV2__");
419 }
420
db92bd22
GQ
421 if (TARGET_SUPPORT_FPV3)
422 {
423 builtin_define ("__csky_fpuv3__");
424 builtin_define ("__CSKY_FPUV3__");
425 }
426
cc7232b9
J
427 if (TARGET_ELRW)
428 {
429 builtin_define ("__csky_elrw__");
430 builtin_define ("__CSKY_ELRW__");
431 }
432 if (TARGET_ISTACK)
433 {
434 builtin_define ("__csky_istack__");
435 builtin_define ("__CSKY_ISTACK__");
436 }
437 if (TARGET_MP)
438 {
439 builtin_define ("__csky_mp__");
440 builtin_define ("__CSKY_MP__");
441 }
442 if (TARGET_CP)
443 {
444 builtin_define ("__csky_cp__");
445 builtin_define ("__CSKY_CP__");
446 }
447 if (TARGET_CACHE)
448 {
449 builtin_define ("__csky_cache__");
450 builtin_define ("__CSKY_CACHE__");
451 }
452 if (TARGET_SECURITY)
453 {
454 builtin_define ("__csky_security__");
455 builtin_define ("__CSKY_SECURITY__");
456 }
457 if (TARGET_TRUST)
458 {
459 builtin_define ("__csky_trust__");
460 builtin_define ("__CSKY_TRUST__");
461 }
462 if (TARGET_DSP)
463 {
464 builtin_define ("__csky_dsp__");
465 builtin_define ("__CSKY_DSP__");
466 }
467 if (TARGET_EDSP)
468 {
469 builtin_define ("__csky_edsp__");
470 builtin_define ("__CSKY_EDSP__");
471 }
472 if (TARGET_VDSP)
473 {
474 builtin_define ("__csky_vdsp__");
475 builtin_define ("__CSKY_VDSP__");
476 }
477}
478
479
480/******************************************************************
481 * Storage Layout *
482 ******************************************************************/
483
cc7232b9
J
484#undef TARGET_PROMOTE_FUNCTION_MODE
485#define TARGET_PROMOTE_FUNCTION_MODE \
486 default_promote_function_mode_always_promote
487
488#undef TARGET_CONSTANT_ALIGNMENT
489#define TARGET_CONSTANT_ALIGNMENT csky_constant_alignment
490
db92bd22
GQ
491#undef TARGET_MANGLE_TYPE
492#define TARGET_MANGLE_TYPE csky_mangle_type
493
cc7232b9
J
494
495/******************************************************************
496 * Stack Layout and Calling Conventions *
497 ******************************************************************/
498
499#undef TARGET_CAN_ELIMINATE
500#define TARGET_CAN_ELIMINATE csky_can_eliminate
501
502#undef TARGET_FUNCTION_ARG
503#define TARGET_FUNCTION_ARG csky_function_arg
504
505#undef TARGET_FUNCTION_ARG_ADVANCE
506#define TARGET_FUNCTION_ARG_ADVANCE csky_function_arg_advance
507
508#undef TARGET_FUNCTION_VALUE
509#define TARGET_FUNCTION_VALUE csky_function_value
510
511#undef TARGET_LIBCALL_VALUE
512#define TARGET_LIBCALL_VALUE csky_libcall_value
513
514#undef TARGET_FUNCTION_VALUE_REGNO_P
515#define TARGET_FUNCTION_VALUE_REGNO_P csky_function_value_regno_p
516
517#undef TARGET_SPLIT_COMPLEX_ARG
518#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
519
cc7232b9
J
520#undef TARGET_MUST_PASS_IN_STACK
521#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
522
523#undef TARGET_ARG_PARTIAL_BYTES
524#define TARGET_ARG_PARTIAL_BYTES csky_arg_partial_bytes
525
526#undef TARGET_PASS_BY_REFERENCE
527#define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
528
529#undef TARGET_ASM_OUTPUT_MI_THUNK
530#define TARGET_ASM_OUTPUT_MI_THUNK csky_output_mi_thunk
531
532#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
533#define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
534 hook_bool_const_tree_hwi_hwi_const_tree_true
535
536#undef TARGET_ASM_FUNCTION_PROLOGUE
537#define TARGET_ASM_FUNCTION_PROLOGUE csky_output_function_prologue
538
539#undef TARGET_ASM_FUNCTION_EPILOGUE
540#define TARGET_ASM_FUNCTION_EPILOGUE csky_output_function_epilogue
541
542#undef TARGET_WARN_FUNC_RETURN
543#define TARGET_WARN_FUNC_RETURN csky_warn_func_return
544
545#undef TARGET_RETURN_IN_MEMORY
546#define TARGET_RETURN_IN_MEMORY csky_return_in_memory
547
548
549/******************************************************************
550 * Implementing the Varargs Macros *
551 ******************************************************************/
552
553
554#undef TARGET_SETUP_INCOMING_VARARGS
555#define TARGET_SETUP_INCOMING_VARARGS csky_setup_incoming_varargs
556
557
558/******************************************************************
559 * Implicit Calls to Library Routines *
560 ******************************************************************/
561
562
563#undef TARGET_INIT_LIBFUNCS
564#define TARGET_INIT_LIBFUNCS csky_init_libfuncs
565
566
567/******************************************************************
568 * Dividing the Output into Sections (Texts, Data, . . . ) *
569 ******************************************************************/
570
571
572#undef TARGET_HAVE_TLS
573#define TARGET_HAVE_TLS TARGET_CSKY_LINUX
574
575
576/******************************************************************
577 * Defining target-specific uses of __attribute__ *
578 ******************************************************************/
579
580
581#undef TARGET_ATTRIBUTE_TABLE
582#define TARGET_ATTRIBUTE_TABLE csky_attribute_table
583
584#undef TARGET_OPTION_OVERRIDE
585#define TARGET_OPTION_OVERRIDE csky_option_override
586
587
588/* Implement the BRANCH_COST target macro. */
589
590int
591csky_default_branch_cost (bool speed_p ATTRIBUTE_UNUSED,
592 bool predictable_p ATTRIBUTE_UNUSED)
593{
594 return csky_branch_cost;
595}
596
597bool
598csky_default_logical_op_non_short_circuit (void)
599{
600 return BRANCH_COST (optimize_function_for_speed_p (cfun), false) >= 2;
601}
602
603/******************************************************************
604 * Register Usage *
605 ******************************************************************/
606
607#undef TARGET_HARD_REGNO_NREGS
608#define TARGET_HARD_REGNO_NREGS csky_hard_regno_nregs
609
610#undef TARGET_HARD_REGNO_MODE_OK
611#define TARGET_HARD_REGNO_MODE_OK csky_hard_regno_mode_ok
612
613#undef TARGET_MODES_TIEABLE_P
614#define TARGET_MODES_TIEABLE_P csky_modes_tieable_p
615
cc7232b9
J
616#undef TARGET_CONDITIONAL_REGISTER_USAGE
617#define TARGET_CONDITIONAL_REGISTER_USAGE csky_conditional_register_usage
618
619#undef TARGET_CLASS_LIKELY_SPILLED_P
620#define TARGET_CLASS_LIKELY_SPILLED_P csky_class_likely_spilled_p
621
622#undef TARGET_PREFERRED_RELOAD_CLASS
623#define TARGET_PREFERRED_RELOAD_CLASS csky_preferred_reload_class
624
625#undef TARGET_CLASS_MAX_NREGS
626#define TARGET_CLASS_MAX_NREGS csky_class_max_nregs
627
628#undef TARGET_SECONDARY_RELOAD
629#define TARGET_SECONDARY_RELOAD csky_secondary_reload
630
631#undef TARGET_SPILL_CLASS
632#define TARGET_SPILL_CLASS csky_spill_class
633
634
635/******************************************************************
636 * Addressing Modes *
637 ******************************************************************/
638
639
640#undef TARGET_CANNOT_FORCE_CONST_MEM
641#define TARGET_CANNOT_FORCE_CONST_MEM csky_cannot_force_const_mem
642
643#undef TARGET_LEGITIMATE_CONSTANT_P
644#define TARGET_LEGITIMATE_CONSTANT_P csky_legitimate_constant_p
645
646#undef TARGET_LEGITIMIZE_ADDRESS
647#define TARGET_LEGITIMIZE_ADDRESS csky_legitimize_address
648
649#undef TARGET_LEGITIMATE_ADDRESS_P
650#define TARGET_LEGITIMATE_ADDRESS_P csky_legitimate_address_p
651
652
653/******************************************************************
654 * Others *
655 ******************************************************************/
656
657
658#undef TARGET_CANNOT_COPY_INSN_P
659#define TARGET_CANNOT_COPY_INSN_P csky_cannot_copy_insn_p
660
661
662/******************************************************************
663 * Assembler Format *
664 ******************************************************************/
665
666
667#undef TARGET_PRINT_OPERAND
668#define TARGET_PRINT_OPERAND csky_print_operand
669
670#undef TARGET_PRINT_OPERAND_ADDRESS
671#define TARGET_PRINT_OPERAND_ADDRESS csky_print_operand_address
672
673#undef TARGET_ASM_UNALIGNED_HI_OP
674#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
675
676#undef TARGET_ASM_UNALIGNED_SI_OP
677#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
678
679#undef TARGET_DWARF_REGISTER_SPAN
680#define TARGET_DWARF_REGISTER_SPAN csky_dwarf_register_span
681
682
683/******************************************************************
684 * Miscellaneous Parameters *
685 ******************************************************************/
686
687
688#undef TARGET_MACHINE_DEPENDENT_REORG
689#define TARGET_MACHINE_DEPENDENT_REORG csky_reorg
690
691#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
692#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS csky_allocate_stack_slots_for_args
693
694#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
695#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
696
697
698/******************************************************************
699 * Trampolines for Nested Functions *
700 ******************************************************************/
701
702
703#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
704#define TARGET_ASM_TRAMPOLINE_TEMPLATE csky_asm_trampoline_template
705#undef TARGET_TRAMPOLINE_INIT
706#define TARGET_TRAMPOLINE_INIT csky_trampoline_init
707
708/* The low bit is ignored by jsr and jmp instructions so is safe to use. */
709#undef TARGET_CUSTOM_FUNCTION_DESCRIPTORS
710#define TARGET_CUSTOM_FUNCTION_DESCRIPTORS 1
711
712/******************************************************************
713 * Describing Relative Costs of Operations *
714 ******************************************************************/
715
716
717#undef TARGET_REGISTER_MOVE_COST
718#define TARGET_REGISTER_MOVE_COST csky_register_move_cost
719
720#undef TARGET_MEMORY_MOVE_COST
721#define TARGET_MEMORY_MOVE_COST csky_memory_move_cost
722
723#undef TARGET_RTX_COSTS
724#define TARGET_RTX_COSTS csky_rtx_costs
725
726#undef TARGET_ADDRESS_COST
727#define TARGET_ADDRESS_COST csky_address_cost
728
729
730/******************************************************************
731 * Anchor address *
732 ******************************************************************/
733
734
735/* FIXME: the max offset is related to mode size, the following is
736 defined according to SImode. How to deal with HImode and
737 QImode, and should the min offset be defined? */
738#undef TARGET_MAX_ANCHOR_OFFSET
739#define TARGET_MAX_ANCHOR_OFFSET \
740 ((TARGET_MINI_REGISTERS && optimize_size) ? 127 : 4095)
741
742
743/******************************************************************
744 * Condition Code Status *
745 ******************************************************************/
746
747
748#undef TARGET_FIXED_CONDITION_CODE_REGS
749#define TARGET_FIXED_CONDITION_CODE_REGS csky_fixed_condition_code_regs
750
751
752/******************************************************************
753 * Adjusting the Instruction Scheduler *
754 ******************************************************************/
755
756
757#undef TARGET_SCHED_ISSUE_RATE
758#define TARGET_SCHED_ISSUE_RATE csky_sched_issue_rate
759
760#undef TARGET_SCHED_ADJUST_COST
761#define TARGET_SCHED_ADJUST_COST csky_sched_adjust_cost
762
763
db92bd22
GQ
764/******************************************************************
765 * Builtin *
766 ******************************************************************/
767
768
769#undef TARGET_INIT_BUILTINS
770#define TARGET_INIT_BUILTINS csky_init_builtins
771
772
cc7232b9
J
773/* The declaration of functions. */
774static void push_csky_minipool_fix (rtx_insn *, HOST_WIDE_INT, rtx *,
775 machine_mode, rtx);
776static void csky_print_operand (FILE *stream, rtx x, int code);
777
778
779/* Define a table to map ISR attribute arguments onto function type
780 modifiers. */
781
782typedef struct
783{
784 const char *const arg;
785 const unsigned long return_value;
786} isr_attribute_entry;
787
788static const isr_attribute_entry isr_attribute_map[] =
789{
790 {"irq", CSKY_FT_ISR },
791 {"IRQ", CSKY_FT_ISR },
792 {"fiq", CSKY_FT_FIQ },
793 {"FIQ", CSKY_FT_FIQ },
794 {NULL, CSKY_FT_NORMAL }
795};
796
797
798/* Return the function type of the current function, if it has not been
799 determined, return CSKY_FT_UNKNOWN. */
800
801static unsigned long
802get_csky_isr_type (tree argument)
803{
804 const isr_attribute_entry *ptr;
805 const char *arg;
806
807 /* if argument is NULL, set default value ISR. */
808 if (argument == NULL_TREE)
809 return CSKY_FT_ISR;
810
811 if (TREE_VALUE (argument) == NULL_TREE
812 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
813 return CSKY_FT_UNKNOWN;
814
815 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
816
817 for (ptr = isr_attribute_map; ptr->arg != NULL; ptr++)
818 if (strcmp (arg, ptr->arg) == 0)
819 return ptr->return_value;
820
821 return CSKY_FT_UNKNOWN;
822}
823
824/* Classify cfun as a normal function or some sort of interrupt
825 handler, and set the corresponding bits in cfun->machine->func_type. */
826
827static unsigned long
828get_csky_current_func_type (void)
829{
830 if (CSKY_FUNCTION_TYPE (cfun->machine->func_type) == CSKY_FT_UNKNOWN)
831 {
832 unsigned long type = CSKY_FT_UNKNOWN;
833 tree a;
834 tree attr;
835
836 gcc_assert (TREE_CODE (current_function_decl) == FUNCTION_DECL);
837
838 attr = DECL_ATTRIBUTES (current_function_decl);
839 a = lookup_attribute ("naked", attr);
840 if (a != NULL_TREE)
841 type |= CSKY_FT_NAKED;
842 a = lookup_attribute ("isr", attr);
843 if (a == NULL_TREE)
844 a = lookup_attribute ("interrupt", attr);
845 if (a == NULL_TREE)
846 type |= CSKY_FT_NORMAL;
847 else
848 type |= get_csky_isr_type (TREE_VALUE (a));
849
850 cfun->machine->func_type = type;
851 }
852
853 return cfun->machine->func_type;
854}
855
856/* These typedefs are located at the start of this file, so that
857 they can be used in the prototypes there. This comment is to
858 remind readers of that fact so that the following structures
859 can be understood more easily.
860
861 typedef struct minipool_node Mnode;
862 typedef struct minipool_fixup Mfix; */
863
864struct minipool_node
865{
866 /* Doubly linked chain of entries. */
867 Mnode *next;
868 Mnode *prev;
869 /* The maximum offset into the code that this entry can be placed. While
870 pushing fixes for forward references, all entries are sorted in order
871 of increasing max_address. */
872 HOST_WIDE_INT max_address;
873 /* Similarly for an entry inserted for a backwards ref. */
874 HOST_WIDE_INT min_address;
875 /* The number of fixes referencing this entry. This can become zero
876 if we "unpush" an entry. In this case we ignore the entry when we
877 come to emit the code. */
878 int refcount;
879 /* The offset from the start of the minipool. */
880 HOST_WIDE_INT offset;
881 /* The value in table. */
882 rtx value;
883 /* The mode of value. */
884 machine_mode mode;
885 /* The size of the value. */
886 int fix_size;
887};
888
889struct minipool_fixup
890{
891 Mfix *next;
892 rtx_insn *insn;
893 HOST_WIDE_INT address;
894 rtx *loc;
895 machine_mode mode;
896 int fix_size;
897 rtx value;
898 Mnode *minipool;
899 HOST_WIDE_INT forwards;
900 HOST_WIDE_INT backwards;
901};
902
903static Mnode *minipool_vector_head;
904static Mnode *minipool_vector_tail;
905static rtx minipool_vector_label;
906static HOST_WIDE_INT constpool_label_no = 0;
907
908/* Obstack for minipool constant handling. */
909static struct obstack minipool_obstack;
910static char *minipool_startobj;
911/* The linked list of all minipool fixes required for this function. */
912Mfix *minipool_fix_head;
913Mfix *minipool_fix_tail;
914/* The fix entry for the current minipool, once it has been placed. */
915Mfix *minipool_barrier;
916
917/* Allow GC scanning of the minipool obstack. */
db92bd22 918
cc7232b9
J
919static void
920csky_add_gc_roots (void)
921{
922 gcc_obstack_init (&minipool_obstack);
923 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
924}
925
926/* Implement TARGET_CONSTANT_ALIGNMENT.
927 Make strings word-aligned so strcpy from constants will be faster. */
db92bd22 928
cc7232b9
J
929static HOST_WIDE_INT
930csky_constant_alignment (const_tree exp, HOST_WIDE_INT align)
931{
932 if (TREE_CODE (exp) == STRING_CST
933 && !optimize_size
934 && align < BITS_PER_WORD)
935 return BITS_PER_WORD;
936 return align;
937}
938
939/* Record that there is a natural barrier in the insn stream at
940 ADDRESS. */
941
942static void
943push_csky_minipool_barrier (rtx_insn *insn, HOST_WIDE_INT address)
944{
945 Mfix *fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (*fix));
946
947 fix->insn = insn;
948 fix->address = address;
949
950 fix->next = NULL;
951 if (minipool_fix_head != NULL)
952 minipool_fix_tail->next = fix;
953 else
954 minipool_fix_head = fix;
955
956 minipool_fix_tail = fix;
957}
958
959/* Compute the size of a vector jump table. */
960
961static HOST_WIDE_INT
962get_csky_jump_table_size (rtx insn)
963{
964 /* ADDR_VECs only take room if read-only data does into the text
965 section. */
966 if (JUMP_TABLES_IN_TEXT_SECTION || readonly_data_section == text_section)
967 {
968 rtx body = PATTERN (insn);
969 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
970 HOST_WIDE_INT size;
971 HOST_WIDE_INT modesize;
972
973 modesize = GET_MODE_SIZE (GET_MODE (body));
974 size = modesize * XVECLEN (body, elt);
975 switch (modesize)
976 {
977 case 1:
978 /* Round up size of TBB table to a halfword boundary. */
979 size = (size + 1) & ~(HOST_WIDE_INT)1;
980 break;
981 case 2:
982 /* No padding necessary for TBH. */
983 break;
984 case 4:
985 break;
986 default:
987 gcc_unreachable ();
988 }
989 return size;
990 }
991
992 return 0;
993}
994
995
996/* Scan INSN and note any of its operands that need fixing.
997 If DO_PUSHES is false we do not actually push any of the fixups
998 needed. The function returns TRUE if any fixups were needed/pushed. */
999
1000static bool
1001note_csky_invalid_constants (rtx_insn *insn, HOST_WIDE_INT address,
1002 int do_pushes)
1003{
1004 bool result = false;
1005 int opno;
1006
1007 extract_constrain_insn (insn);
1008
1009 if (recog_data.n_alternatives == 0)
1010 return false;
1011
1012 /* Fill in recog_op_alt with information about the constraints of
1013 this insn. */
1014 preprocess_constraints (insn);
1015
1016 const operand_alternative *op_alt = which_op_alt ();
1017 for (opno = 0; opno < recog_data.n_operands; opno++)
1018 {
1019 /* Things we need to fix can only occur in inputs. */
1020 if (recog_data.operand_type[opno] != OP_IN)
1021 continue;
1022
1023 /* If this alternative is a memory reference, then any mention
1024 of constants in this alternative is really to fool reload
1025 into allowing us to accept one there. We need to fix them up
1026 now so that we output the right code. */
1027 if (op_alt[opno].memory_ok)
1028 {
1029 rtx op = recog_data.operand[opno];
1030
1031 if (CONSTANT_P (op))
1032 {
1033 if (do_pushes)
1034 push_csky_minipool_fix (insn, address,
1035 recog_data.operand_loc[opno],
1036 recog_data.operand_mode[opno], op);
1037 result = true;
1038 }
1039 }
1040 }
1041
1042 return result;
1043}
1044
1045
1046/* Add a constant to the minipool for a forward reference. Returns the
1047 node added or NULL if the constant will not fit in this pool. */
1048
1049static Mnode *
1050add_csky_minipool_forward_ref (Mfix *fix)
1051{
1052 /* If set, max_mp is the first pool_entry that has a lower
1053 constraint than the one we are trying to add. */
1054 Mnode *max_mp = NULL;
1055 HOST_WIDE_INT max_address = fix->address + fix->forwards;
1056 Mnode *mp;
1057
1058 /* If the minipool starts before the end of FIX->INSN then this FIX
67914693 1059 cannot be placed into the current pool. Furthermore, adding the
cc7232b9
J
1060 new constant pool entry may cause the pool to start FIX_SIZE bytes
1061 earlier. */
1062 if (minipool_vector_head
1063 && (fix->address + get_attr_length (fix->insn)
1064 >= minipool_vector_head->max_address - fix->fix_size))
1065 return NULL;
1066
1067 /* Scan the pool to see if a constant with the same value has
1068 already been added. While we are doing this, also note the
1069 location where we must insert the constant if it doesn't already
1070 exist. */
1071 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
1072 {
1073 if (GET_CODE (fix->value) == GET_CODE (mp->value)
1074 && fix->mode == mp->mode
1075 && (GET_CODE (fix->value) != CODE_LABEL
1076 || (CODE_LABEL_NUMBER (fix->value)
1077 == CODE_LABEL_NUMBER (mp->value)))
1078 && rtx_equal_p (fix->value, mp->value))
1079 {
1080 /* More than one fix references this entry. */
1081 mp->refcount++;
1082 return mp;
1083 }
1084
1085 /* Note the insertion point if necessary. */
1086 if (max_mp == NULL && mp->max_address > max_address)
1087 max_mp = mp;
1088 }
1089
1090 /* The value is not currently in the minipool, so we need to create
1091 a new entry for it. If MAX_MP is NULL, the entry will be put on
1092 the end of the list since the placement is less constrained than
1093 any existing entry. Otherwise, we insert the new fix before
1094 MAX_MP and, if necessary, adjust the constraints on the other
1095 entries. */
1096 mp = XNEW (Mnode);
1097 mp->fix_size = fix->fix_size;
1098 mp->mode = fix->mode;
1099 mp->value = fix->value;
1100 mp->refcount = 1;
1101 /* Not yet required for a backwards ref. */
1102 mp->min_address = -65536;
1103
1104 if (max_mp == NULL)
1105 {
1106 mp->max_address = max_address;
1107 mp->next = NULL;
1108 mp->prev = minipool_vector_tail;
1109
1110 if (mp->prev == NULL)
1111 {
1112 minipool_vector_head = mp;
1113 minipool_vector_label
1114 = gen_csky_constpool_label (gen_rtx_CONST_INT (VOIDmode,
1115 constpool_label_no++));
1116 }
1117 else
1118 mp->prev->next = mp;
1119
1120 minipool_vector_tail = mp;
1121 }
1122 else
1123 {
1124 if (max_address > max_mp->max_address - mp->fix_size)
1125 mp->max_address = max_mp->max_address - mp->fix_size;
1126 else
1127 mp->max_address = max_address;
1128
1129 mp->next = max_mp;
1130 mp->prev = max_mp->prev;
1131 max_mp->prev = mp;
1132 if (mp->prev != NULL)
1133 mp->prev->next = mp;
1134 else
1135 minipool_vector_head = mp;
1136 }
1137
1138 /* Save the new entry. */
1139 max_mp = mp;
1140
1141 /* Scan over the preceding entries and adjust their addresses as
1142 required. */
1143 while (mp->prev != NULL
1144 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
1145 {
1146 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
1147 mp = mp->prev;
1148 }
1149
1150 return max_mp;
1151}
1152
1153
1154/* Return the cost of forcibly inserting a barrier after INSN. */
1155
1156static int
1157get_csky_barrier_cost (rtx_insn *insn)
1158{
1159 /* Basing the location of the pool on the loop depth is preferable,
1160 but at the moment, the basic block information seems to be
1161 corrupt by this stage of the compilation. */
1162 int base_cost = 50;
1163 rtx next = next_nonnote_insn (insn);
1164
1165 if (next != NULL && GET_CODE (next) == CODE_LABEL)
1166 base_cost -= 20;
1167
1168 switch (GET_CODE (insn))
1169 {
1170 case CODE_LABEL:
1171 /* It will always be better to place the table before the label, rather
1172 than after it. */
1173 return 50;
1174
1175 case INSN:
1176 case CALL_INSN:
1177 return base_cost;
1178
1179 case JUMP_INSN:
1180 return base_cost - 10;
1181
1182 default:
1183 return base_cost + 10;
1184 }
1185}
1186
1187
1188/* Find the best place in the insn stream in the range
1189 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
1190 Create the barrier by inserting a jump and add a new fix entry for
1191 it. */
db92bd22 1192
cc7232b9
J
1193static Mfix *
1194create_csky_fix_barrier (Mfix *fix, Mfix *fix_next,
1195 HOST_WIDE_INT max_address)
1196{
1197 rtx_barrier *barrier;
1198 rtx_insn *from = (fix ? fix->insn : get_insns ());
1199 /* The instruction after which we will insert the jump. */
1200 rtx_insn *selected = NULL;
1201 int selected_cost;
1202 /* The address at which the jump instruction will be placed. */
1203 HOST_WIDE_INT selected_address = 0;
1204 Mfix *new_fix;
1205 HOST_WIDE_INT count = (fix ? fix->address : 0);
1206 HOST_WIDE_INT max_count = max_address;
1207 rtx_code_label *label = gen_label_rtx ();
1208
1209 selected_cost = get_csky_barrier_cost (from);
1210
1211 while (from && count < max_count)
1212 {
1213 int new_cost;
1214 rtx_jump_table_data *table;
1215
1216 /* Count the length of this insn. */
1217 count += get_attr_length (from);
1218
1219 /* If there is a jump table, add its length. */
1220 if (tablejump_p (from, NULL, &table))
1221 {
1222 count += get_csky_jump_table_size (table);
1223
1224 /* Jump tables aren't in a basic block, so base the cost on
1225 the dispatch insn. If we select this location, we will
1226 still put the pool after the table. */
1227 new_cost = get_csky_barrier_cost (from);
1228
1229 if (count < max_count
1230 && (!selected || new_cost <= selected_cost))
1231 {
1232 selected = table;
1233 selected_cost = new_cost;
1234 selected_address = count;
1235 }
1236
1237 /* Continue after the dispatch table. */
1238 from = NEXT_INSN (table);
1239 continue;
1240 }
1241
1242 new_cost = get_csky_barrier_cost (from);
1243
1244 if (count < max_count
1245 && (!selected || new_cost <= selected_cost))
1246 {
1247 selected = from;
1248 selected_cost = new_cost;
1249 selected_address = count;
1250 }
1251
1252 from = NEXT_INSN (from);
1253 }
1254
1255 /* Make sure that we found a place to insert the jump. */
1256 gcc_assert (selected);
1257
1258 /* Create a new JUMP_INSN that branches around a barrier. */
1259 from = emit_jump_insn_after (gen_jump (label), selected);
1260 JUMP_LABEL (from) = label;
1261 barrier = emit_barrier_after (from);
1262 emit_label_after (label, barrier);
1263
1264 /* Create a minipool barrier entry for the new barrier. */
1265 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
1266 new_fix->insn = barrier;
1267 new_fix->address = selected_address;
1268 if (fix)
1269 {
1270 new_fix->next = fix->next;
1271 fix->next = new_fix;
1272 }
1273 else
1274 new_fix->next = fix_next;
1275
1276 return new_fix;
1277}
1278
1279
1280/* Print a symbolic form of the constant X to the dump file F.
1281 This is used for dump output for -mconstpool in the target-dependent
1282 reorg pass. */
1283
1284static void
1285print_csky_value (FILE *f, rtx x)
1286{
1287 switch (GET_CODE (x))
1288 {
1289 case CONST_INT:
1290 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
1291 return;
1292
1293 case CONST_DOUBLE:
1294 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
1295 return;
1296
1297 case CONST_VECTOR:
1298 {
1299 int i;
1300
1301 fprintf (f, "<");
1302 for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
1303 {
1304 fprintf (f, HOST_WIDE_INT_PRINT_HEX,
1305 INTVAL (CONST_VECTOR_ELT (x, i)));
1306 if (i < (CONST_VECTOR_NUNITS (x) - 1))
1307 fputc (',', f);
1308 }
1309 fprintf (f, ">");
1310 }
1311 return;
1312
1313 case CONST_STRING:
1314 fprintf (f, "\"%s\"", XSTR (x, 0));
1315 return;
1316
1317 case SYMBOL_REF:
1318 fprintf (f, "`%s'", XSTR (x, 0));
1319 return;
1320
1321 case LABEL_REF:
1322 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
1323 return;
1324
1325 case CONST:
1326 print_csky_value (f, XEXP (x, 0));
1327 return;
1328
1329 case PLUS:
1330 print_csky_value (f, XEXP (x, 0));
1331 fprintf (f, "+");
1332 print_csky_value (f, XEXP (x, 1));
1333 return;
1334
1335 case PC:
1336 fprintf (f, "pc");
1337 return;
1338
1339 default:
1340 fprintf (f, "????");
1341 return;
1342 }
1343}
1344
1345
1346/* Record INSN, which will need fixing up to load a value from the
1347 minipool. ADDRESS is the offset of the insn since the start of the
1348 function; LOC is a pointer to the part of the insn which requires
1349 fixing; VALUE is the constant that must be loaded, which is of type
1350 MODE. */
1351
1352static void
1353push_csky_minipool_fix (rtx_insn *insn, HOST_WIDE_INT address, rtx *loc,
1354 machine_mode mode, rtx value)
1355{
1356 #define CSKY_ELRW16_RANGE 1400
1357 #define CSKY_LRW16_RANGE 700
1358 #define CSKY_CONSTANT_POOL_RANGE (TARGET_ELRW ? CSKY_ELRW16_RANGE \
1359 : CSKY_LRW16_RANGE)
1360
1361 /* Fixes less than a word need padding out to a word boundary. */
1362 #define CSKY_MINIPOOL_FIX_SIZE(mode) \
1363 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
1364
1365 Mfix *fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (*fix));
1366
1367 fix->insn = insn;
1368 fix->address = address;
1369 fix->loc = loc;
1370 fix->mode = mode;
1371 fix->fix_size = CSKY_MINIPOOL_FIX_SIZE (mode);
1372 fix->value = value;
1373 fix->forwards = CSKY_CONSTANT_POOL_RANGE;
1374 fix->backwards = 0;
1375 fix->minipool = NULL;
1376
1377 /* If an insn doesn't have a range defined for it, then it isn't
1378 expecting to be reworked by this code. Better to stop now than
1379 to generate duff assembly code. */
1380 gcc_assert (fix->forwards || fix->backwards);
1381
1382 if (dump_file)
1383 {
1384 fprintf (dump_file,
1385 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
1386 GET_MODE_NAME (mode),
1387 INSN_UID (insn), (unsigned long) address,
1388 -1 * (long)fix->backwards, (long)fix->forwards);
1389 print_csky_value (dump_file, fix->value);
1390 fprintf (dump_file, "\n");
1391 }
1392
1393 /* Add it to the chain of fixes. */
1394 fix->next = NULL;
1395
1396 if (minipool_fix_head != NULL)
1397 minipool_fix_tail->next = fix;
1398 else
1399 minipool_fix_head = fix;
1400
1401 minipool_fix_tail = fix;
1402}
1403
1404
1405/* Fill in the offsets for minipool entries. */
1406
1407static void
1408assign_csky_minipool_offsets (Mfix *barrier)
1409{
1410 HOST_WIDE_INT offset = 0;
1411 Mnode *mp;
1412
1413 minipool_barrier = barrier;
1414
1415 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
1416 {
1417 mp->offset = offset;
1418
1419 if (mp->refcount > 0)
1420 offset += mp->fix_size;
1421 }
1422}
1423
1424
1425/* Output the literal table. */
1426
1427static HOST_WIDE_INT
1428dump_csky_minipool (rtx_insn *scan)
1429{
1430 Mnode *mp;
1431 Mnode *nmp;
1432 HOST_WIDE_INT pool_length = 0;
1433
1434 if (dump_file)
1435 fprintf (dump_file,
1436 ";; Emitting minipool after insn %u;\
1437 address %ld; align %d (bytes)\n",
1438 INSN_UID (scan), (unsigned long) minipool_barrier->address, 4);
1439
1440 scan = emit_insn_after (gen_align_4 (), scan);
1441 scan = emit_insn_after (minipool_vector_label, scan);
1442
1443 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
1444 {
1445 if (mp->refcount > 0)
1446 {
1447 if (dump_file)
1448 {
1449 fprintf (dump_file, ";; Offset %u, min %ld, max %ld ",
1450 (unsigned) mp->offset, (unsigned long) mp->min_address,
1451 (unsigned long) mp->max_address);
1452 print_csky_value (dump_file, mp->value);
1453 fputc ('\n', dump_file);
1454 }
1455
1456 switch (mp->fix_size)
1457 {
1458 case 4:
1459 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
1460 pool_length += 4;
1461 break;
1462 case 8:
1463 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
1464 pool_length += 8;
1465 break;
1466 default:
1467 gcc_unreachable ();
1468 }
1469 }
1470
1471 nmp = mp->next;
1472 free (mp);
1473 }
1474
1475 minipool_vector_head = minipool_vector_tail = NULL;
1476 scan = emit_barrier_after (scan);
1477
1478 return pool_length;
1479}
1480
1481/* Return true if INSN is a minipool load or instruction that will be
1482 converted to one. It is assumed that INSN has type attribute "load". */
1483
1484bool
1485csky_minipool_load_p (rtx_insn *insn)
1486{
1487 rtx op1, addr;
1488
1489 extract_insn_cached (insn);
1490
1491 op1 = recog_data.operand[1];
1492
1493 /* This is a constant that has not yet been turned into
1494 a minipool load. */
1495 if (CONSTANT_P (op1))
1496 return true;
1497
1498 /* Constant pool loads are label_refs. */
1499 if (GET_CODE (op1) == ZERO_EXTEND || GET_CODE (op1) == SIGN_EXTEND)
1500 op1 = XEXP (op1, 0);
1501 if (GET_CODE (op1) != MEM)
1502 return false;
1503 addr = XEXP (op1, 0);
1504 if (GET_CODE (addr) == PLUS && CONST_INT_P (XEXP (addr, 1)))
1505 addr = XEXP (addr, 0);
1506 return GET_CODE (addr) == LABEL_REF;
1507}
1508
1509
1510/* Compute the attribute "length" of push or pop insn, according to
1511 the registers it uses. */
1512
1513int
1514csky_compute_pushpop_length (rtx *operands)
1515{
1516 rtx parallel_op = operands[2];
1517 /* Initialize to elements number of PARALLEL. */
1518 unsigned indx = XVECLEN (parallel_op, 0) - 1;
1519 unsigned first_indx = 0;
1520 unsigned regno = REGNO (operands[1]);
1521
1522 if (regno > CSKY_LR_REGNUM)
1523 return 4;
1524
1525 /* Check each register in the list. */
1526 for (; indx > first_indx; indx--)
1527 {
1528 regno = REGNO (XEXP (XVECEXP (parallel_op, 0, indx), 0));
1529 /* If a register number higher than 15 is included, a 32-bit insn
1530 is used. */
1531 if (regno > CSKY_LR_REGNUM)
1532 return 4;
1533 }
1534
1535 return 2;
1536}
1537
1538/* Emit constant pools for -mconstpool. */
db92bd22 1539
cc7232b9
J
1540static void
1541csky_emit_constant_pools (void)
1542{
1543 rtx_insn *insn;
1544 HOST_WIDE_INT address = 0;
1545 Mfix *fix;
1546
1547 minipool_fix_head = minipool_fix_tail = NULL;
1548
1549 /* The first insn must always be a note, or the code below won't
1550 scan it properly. */
1551 insn = get_insns ();
1552 gcc_assert (NOTE_P (insn));
1553
1554 /* Scan the insns and record the operands that need fixing. */
1555 for (insn = next_nonnote_insn (insn); insn;
1556 insn = next_nonnote_insn (insn))
1557 {
1558 if (BARRIER_P (insn))
1559 push_csky_minipool_barrier (insn, address);
1560 else if (INSN_P (insn))
1561 {
1562 rtx_jump_table_data *table;
1563
1564 note_csky_invalid_constants (insn, address, true);
1565 address += get_attr_length (insn);
1566
1567 /* If the insn is a vector jump, add the size of the table
1568 and skip the table. */
1569 if (tablejump_p (insn, NULL, &table))
1570 {
1571 address += get_csky_jump_table_size (table);
1572 insn = table;
1573 }
1574 }
1575 }
1576
1577 fix = minipool_fix_head;
1578
1579 /* Now scan the fixups and perform the required changes. */
1580 while (fix)
1581 {
1582 Mfix *ftmp;
1583 Mfix *last_added_fix;
1584 Mfix *last_barrier = NULL;
1585 Mfix *this_fix;
1586 Mnode *mp;
1587 bool has_pending_const = false;
1588
1589 /* Check if there is any pending constant not processed. */
1590 for (mp = minipool_vector_head; mp; mp = mp->next)
1591 if (mp->refcount > 0)
1592 {
1593 has_pending_const = true;
1594 break;
1595 }
1596
1597 /* If no pending constant, skip over barrier insns. */
1598 if (has_pending_const == false)
1599 {
1600 while (fix && BARRIER_P (fix->insn))
1601 fix = fix->next;
1602 if (fix == NULL)
1603 break;
1604 }
1605
1606 last_added_fix = NULL;
1607
1608 for (ftmp = fix; ftmp; ftmp = ftmp->next)
1609 {
1610 if (BARRIER_P (ftmp->insn))
1611 {
1612 if (minipool_vector_head
1613 && ftmp->address >= minipool_vector_head->max_address)
1614 break;
1615
1616 last_barrier = ftmp;
1617 }
1618 else
1619 {
1620 ftmp->minipool = add_csky_minipool_forward_ref (ftmp);
1621 if (ftmp->minipool == NULL)
1622 break;
1623 }
1624 last_added_fix = ftmp; /* Keep track of the last fix added. */
1625 }
1626
1627 /* If the last added fix is a barrier, dump minipool after it. */
1628 if (last_added_fix && BARRIER_P (last_added_fix->insn))
1629 ftmp = last_barrier;
1630 else
1631 {
1632 /* ftmp is first fix that we can't fit into this pool.
1633 Insert a new barrier in the code somewhere between the previous
1634 fix and this one, and arrange to jump around it. */
1635 HOST_WIDE_INT max_address;
1636
1637 /* The last item on the list of fixes must be a barrier, so
1638 we can never run off the end of the list of fixes without
1639 last_barrier being set. */
1640 gcc_assert (ftmp);
1641
1642 /* Check that there isn't another fix that is in range that
1643 we couldn't fit into this pool because the pool was
1644 already too large: we need to put the pool before such an
1645 instruction. The pool itself may come just after the
1646 fix because create_csky_fix_barrier also allows space for a
1647 jump instruction. */
1648 max_address = minipool_vector_head->max_address;
1649 if (ftmp->address < max_address)
1650 max_address = ftmp->address + 1;
1651 last_barrier = create_csky_fix_barrier (last_added_fix, ftmp,
1652 max_address);
1653 }
1654
1655 assign_csky_minipool_offsets (last_barrier);
1656
1657 /* Scan over the fixes we have identified for this pool, fixing them
1658 up and adding the constants to the pool itself. */
1659 for (this_fix = fix; this_fix && ftmp != this_fix;
1660 this_fix = this_fix->next)
1661 {
1662 if (GET_CODE (this_fix->insn) != BARRIER)
1663 {
1664 rtx addr
1665 = plus_constant (Pmode,
1666 gen_rtx_LABEL_REF (VOIDmode,
1667 minipool_vector_label),
1668 this_fix->minipool->offset);
1669 rtx insn_body = PATTERN (this_fix->insn);
1670 rtx src = XEXP (insn_body, 1);
1671 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
1672 if (GET_CODE (this_fix->value) == SYMBOL_REF)
1673 emit_insn_after (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
1674 gen_rtvec (1, src),
1675 VUNSPEC_SYMBOL_REF),
1676 this_fix->insn);
1677 }
1678 }
1679 dump_csky_minipool (last_barrier->insn);
1680 fix = ftmp;
1681 if (fix->next == NULL)
1682 break;
1683 }
1684
1685 /* Free the minipool memory. */
1686 obstack_free (&minipool_obstack, minipool_startobj);
1687}
1688
1689
1690/* Implement TARGET_MACHINE_DEPENDENT_REORG. This handles
1691 -mconstpool output. */
1692
1693static void
1694csky_reorg (void)
1695{
1696 if (TARGET_CONSTANT_POOL)
1697 csky_emit_constant_pools ();
1698}
1699
1700
1701/* Check to see if the current function contains a branch insn with the
1702 far jump attribute set. Such a function uses the LR register. */
1703
1704static bool
1705csky_far_jump_used_p (void)
1706{
1707 rtx_insn *insn;
1708 if (cfun->machine->far_jump_used)
1709 return true;
1710
1711 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1712 if (GET_CODE (insn) == JUMP_INSN
1713 /* Ignore tablejump patterns. */
1714 && GET_CODE (PATTERN (insn)) != ADDR_VEC
1715 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
1716 && get_attr_far_jump (insn) == FAR_JUMP_YES)
1717 {
1718 cfun->machine->far_jump_used = 1;
1719 return true;
1720 }
1721 return false;
1722}
1723
1724
1725/* Return the mask of registers used by the current function. Set
1726 COUNT to the number of registers used. */
1727
1728static unsigned int
1729get_csky_live_regs (int *count)
1730{
1731 int reg;
1732 unsigned int live_regs_mask = 0;
1733
1734 *count = 0;
1735 for (reg = 0; reg < CSKY_NGPR_REGS; reg++)
1736 {
1737 bool save = false;
1738
1739 /* Ignore unsupported registers. */
1740 if (CSKY_TARGET_ARCH (CK801) && reg > 8 && reg < 13)
1741 continue;
1742 if ((CSKY_TARGET_ARCH (CK801)
1743 || CSKY_TARGET_ARCH (CK802)
1744 || CSKY_TARGET_ARCH (CK803))
1745 && reg > 15)
1746 break;
1747
1748 /* Caller-saved registers marked as used. */
d7fb4c31 1749 if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
cc7232b9
J
1750 save = true;
1751
1752 /* Frame pointer marked used. */
d19a00c6 1753 else if (frame_pointer_needed && reg == HARD_FRAME_POINTER_REGNUM)
cc7232b9
J
1754 save = true;
1755
1756 /* This is required for CK801/802 where FP is a fixed reg, otherwise
1757 we end up with no FP value available to the DWARF-2 unwinder. */
d19a00c6 1758 else if (crtl->calls_eh_return && reg == HARD_FRAME_POINTER_REGNUM)
cc7232b9
J
1759 save = true;
1760
1761 /* CK801/802 also need special handling for LR because it's clobbered
1762 by far jumps. */
1763 else if ((CSKY_TARGET_ARCH (CK801) || CSKY_TARGET_ARCH (CK802))
1764 && reg == CSKY_LR_REGNUM
1765 && (!crtl->is_leaf || csky_far_jump_used_p ()))
1766 save = true;
1767
1768 /* Register is used for EH data return. */
1769 else if (crtl->calls_eh_return
1770 && reg >= CSKY_FIRST_EH_RETDATA_REGNUM
1771 && reg <= CSKY_LAST_EH_RETDATA_REGNUM)
1772 save = true;
1773
1774 /* We need a temporary reg to hold the offset for adjusting the SP
1775 for a large stack frame. */
1776 if (reg == CSKY_STACKADJUST_REGNUM
1777 && cfun->machine->reg_offset > CSKY_MAX_SP_ADJUST * 2)
1778 save = true;
1779
1780 /* Add reg to the mask. */
1781 if (save)
1782 {
1783 (*count)++;
1784 live_regs_mask |= (1 << reg);
1785 }
1786 }
1787 return live_regs_mask;
1788}
1789
1790/* Compute the stack frame layout, storing sizes of the various pieces
1791 in cfun->machine.
1792
1793 Stack frames constructed in the prologue look like:
1794 ... caller's frame ...
1795 incoming SP -> caller's outbound argument overflow
1796 argument spill
1797 optional FP -> register save
1798 local variables
1799 alloca() space
1800 adjusted SP -> outbound argument overflow
1801
1802 with SP/FP pointing at the base (low address) of the respective area,
1803 and each area aligned to a word boundary. */
1804
1805static void
1806csky_layout_stack_frame (void)
1807{
1808 machine_function *infp = cfun->machine;
1809 int reg_count;
1810
1811 if (infp->frame_init_p)
1812 return;
1813
1814 /* Get sizes of local variables & outbound arguments. */
1815 infp->outbound_size = CSKY_STACK_ALIGN (crtl->outgoing_args_size);
1816 infp->local_offset = infp->outbound_size;
1817 infp->local_size = CSKY_STACK_ALIGN (get_frame_size ());
1818 infp->reg_offset = infp->local_offset + infp->local_size;
1819
1820 /* Now compute size of argument spill + saved regs. These do not
1821 need explicit alignment since they are already word-sized. */
1822 infp->reg_mask = get_csky_live_regs (&reg_count);
1823 infp->reg_size = reg_count * UNITS_PER_WORD;
1824 infp->arg_offset = infp->reg_offset + infp->reg_size;
1825 infp->arg_size = crtl->args.pretend_args_size;
1826 infp->frame_size = infp->arg_offset + infp->arg_size;
1827 infp->frame_init_p = reload_completed;
1828}
1829
1830/* Implement TARGET_CAN_ELIMINATE. */
1831static bool
1832csky_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1833{
d19a00c6
GQ
1834 if (to == FRAME_POINTER_REGNUM)
1835 return from != ARG_POINTER_REGNUM;
cc7232b9
J
1836 if (to == STACK_POINTER_REGNUM)
1837 return !frame_pointer_needed;
1838 return true;
1839}
1840
1841/* Worker function for INITIAL_ELIMINATION_OFFSET macro.
1842 Define the offset between two registers, one to be eliminated, and
1843 the other its replacement, at the start of a routine. */
1844
1845HOST_WIDE_INT
1846csky_initial_elimination_offset (int from, int to)
1847{
1848 int offset;
1849
1850 csky_layout_stack_frame ();
1851
1852 /* Set OFFSET to the offset to the initial stack pointer. */
1853 switch (from)
1854 {
1855 case FRAME_POINTER_REGNUM:
d19a00c6 1856 case HARD_FRAME_POINTER_REGNUM:
cc7232b9
J
1857 offset = cfun->machine->reg_offset;
1858 break;
1859
1860 case ARG_POINTER_REGNUM:
1861 offset = cfun->machine->arg_offset;
1862 break;
1863
1864 default:
1865 gcc_unreachable ();
1866 }
1867
1868 /* If we are asked for the offset to the frame pointer instead,
1869 then subtract the difference between the frame pointer and stack
1870 pointer. */
d19a00c6 1871 if (to == FRAME_POINTER_REGNUM || to == HARD_FRAME_POINTER_REGNUM)
cc7232b9
J
1872 offset -= cfun->machine->reg_offset;
1873 return offset;
1874}
1875
1876
1877/* Determine where to put an argument to a function.
1878 Value is zero to push the argument on the stack,
1879 or a hard register in which to store the argument.
1880
cc7232b9
J
1881 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1882 the preceding args and about the function being called.
6783fdb7 1883 ARG is a description of the argument. */
db92bd22 1884
cc7232b9 1885static rtx
6783fdb7 1886csky_function_arg (cumulative_args_t pcum_v, const function_arg_info &arg)
cc7232b9
J
1887{
1888 CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
01d56aea
J
1889 int reg = pcum->reg;
1890 machine_mode mode = arg.mode;
cc7232b9 1891
01d56aea
J
1892 if (FUNCTION_VARG_MODE_P(mode)
1893 && !pcum->is_stdarg)
1894 {
1895 reg = pcum->freg;
1896
1897 if (reg < CSKY_NPARM_FREGS)
1898 return gen_rtx_REG (mode, CSKY_FIRST_VFP_REGNUM + reg);
1899 else
1900 return NULL_RTX;
1901 }
1902
1903 if (reg < CSKY_NPARM_REGS)
1904 return gen_rtx_REG (mode, CSKY_FIRST_PARM_REGNUM + reg);
cc7232b9
J
1905
1906 return NULL_RTX;
1907}
1908
1909
1910/* Return the number of registers (words) needed to pass an argument of
1911 MODE and TYPE. */
1912
1913static int
01d56aea 1914csky_num_arg_regs (machine_mode mode, const_tree type, bool is_stdarg)
cc7232b9
J
1915{
1916 int size;
1917
1918 if (type && mode == BLKmode)
1919 size = int_size_in_bytes (type);
1920 else
1921 size = GET_MODE_SIZE (mode);
1922
01d56aea
J
1923 if (TARGET_HARD_FLOAT_ABI
1924 && !is_stdarg)
1925 {
1926 if (CSKY_VREG_MODE_P(mode)
1927 && !TARGET_SINGLE_FPU)
1928 return ((CSKY_NUM_WORDS (size) + 1) / 2);
1929 }
1930
cc7232b9
J
1931 return CSKY_NUM_WORDS (size);
1932}
1933
1934
1935/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
1936
1937static void
6930c98c
RS
1938csky_function_arg_advance (cumulative_args_t pcum_v,
1939 const function_arg_info &arg)
cc7232b9
J
1940{
1941 CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
01d56aea
J
1942 int *reg = &pcum->reg;
1943 machine_mode mode = arg.mode;
cc7232b9 1944
01d56aea
J
1945 int param_size = csky_num_arg_regs (mode, arg.type, pcum->is_stdarg);
1946 int param_regs_nums = CSKY_NPARM_REGS;
1947
1948 if (FUNCTION_VARG_MODE_P(mode)
1949 && !pcum->is_stdarg)
1950 {
1951 reg = &pcum->freg;
1952 param_regs_nums = CSKY_NPARM_FREGS;
1953 }
1954
1955 if (*reg + param_size > param_regs_nums)
1956 *reg = param_regs_nums;
cc7232b9 1957 else
01d56aea 1958 *reg += param_size;
cc7232b9
J
1959}
1960
1961
1962/* Implement TARGET_FUNCTION_VALUE. */
1963static rtx
1964csky_function_value (const_tree type, const_tree func,
1965 bool outgoing ATTRIBUTE_UNUSED)
1966{
1967 machine_mode mode;
1968 int unsignedp ATTRIBUTE_UNUSED;
1969 int size;
1970
1971 mode = TYPE_MODE (type);
1972 size = int_size_in_bytes (type);
1973
01d56aea
J
1974 if (FUNCTION_VARG_MODE_P(mode))
1975 {
1976 mode = promote_function_mode (type, mode, &unsignedp, func, 1);
1977 return gen_rtx_REG (mode, CSKY_FIRST_VFP_REGNUM);
1978 }
1979
cc7232b9
J
1980 /* Since we promote return types, we must promote the mode here too. */
1981 if (INTEGRAL_TYPE_P (type))
1982 {
1983 mode = promote_function_mode (type, mode, &unsignedp, func, 1);
1984 return gen_rtx_REG (mode, CSKY_FIRST_RET_REGNUM);
1985 }
1986
1987 if (mode == BLKmode && size > UNITS_PER_WORD
1988 && size <= UNITS_PER_WORD * 2)
1989 {
1990 rtx ret_regs[2];
1991 ret_regs[0] = gen_rtx_EXPR_LIST (SImode,
1992 gen_rtx_REG (SImode,
1993 CSKY_FIRST_RET_REGNUM),
1994 GEN_INT (0 * UNITS_PER_WORD));
1995 ret_regs[1] = gen_rtx_EXPR_LIST (SImode,
1996 gen_rtx_REG (SImode,
1997 CSKY_FIRST_RET_REGNUM + 1),
1998 GEN_INT (1 * UNITS_PER_WORD));
1999
2000 rtvec vec = gen_rtvec (2, ret_regs[0], ret_regs[1]);
2001
2002 return gen_rtx_PARALLEL (mode, vec);
2003 }
2004
2005 return gen_rtx_REG (mode, CSKY_FIRST_RET_REGNUM);
2006}
2007
2008
2009/* Implement TARGET_LIBCALL_VALUE. */
db92bd22 2010
cc7232b9
J
2011static rtx
2012csky_libcall_value (machine_mode mode,
2013 const_rtx libcall ATTRIBUTE_UNUSED)
2014{
01d56aea
J
2015 if (FUNCTION_VARG_MODE_P(mode))
2016 {
2017 return gen_rtx_REG (mode, CSKY_FIRST_VFP_REGNUM);
2018 }
cc7232b9
J
2019 return gen_rtx_REG (mode, CSKY_FIRST_RET_REGNUM);
2020}
2021
2022
2023/* Implement TARGET_FUNCTION_VALUE_REGNO_P.
2024 On C-SKY, only r0 can return results. */
2025
2026static bool
2027csky_function_value_regno_p (const unsigned int regno)
2028{
01d56aea
J
2029 if (regno == CSKY_FIRST_RET_REGNUM
2030 || (TARGET_HARD_FLOAT_ABI
2031 && regno == CSKY_FIRST_VFP_REGNUM))
2032 return true;
2033 return false;
cc7232b9
J
2034}
2035
2036
2037/* Return an RTX indicating where the return address to the
2038 calling function can be found. */
db92bd22 2039
cc7232b9
J
2040rtx
2041csky_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
2042{
2043 if (count != 0)
2044 return NULL_RTX;
2045
2046 return get_hard_reg_initial_val (Pmode, CSKY_LR_REGNUM);
2047}
2048
2049
2050/* Implement TARGET_ARG_PARTIAL_BYTES.
2051 Return the number of bytes at the beginning of an argument
2052 that must be put in registers. The value must be zero for arguments
2053 that are passed entirely in registers or
2054 that are entirely pushed on the stack. */
db92bd22 2055
cc7232b9 2056static int
a7c81bc1 2057csky_arg_partial_bytes (cumulative_args_t pcum_v, const function_arg_info &arg)
cc7232b9
J
2058{
2059 CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
01d56aea
J
2060 int param_size = csky_num_arg_regs (arg.mode, arg.type, pcum->is_stdarg);
2061 int reg = pcum->reg;
2062
2063 if (FUNCTION_VARG_MODE_P(arg.mode)
2064 && !pcum->is_stdarg)
2065 return 0;
cc7232b9 2066
01d56aea
J
2067 if (reg < CSKY_NPARM_REGS
2068 && reg + param_size > CSKY_NPARM_REGS)
2069 return (CSKY_NPARM_REGS - reg) * UNITS_PER_WORD;
cc7232b9
J
2070
2071 return 0;
2072}
2073
2074
2075/* Implement TARGET_SETUP_INCOMING_VARARGS.
2076 On C-Sky the copy from the argument registers to the stack is emitted
2077 by the prologue hooks, so here we just have to note how much stack space
2078 to save. */
2079
2080static void
2081csky_setup_incoming_varargs (cumulative_args_t pcum_v,
e7056ca4 2082 const function_arg_info &arg,
cc7232b9
J
2083 int *pretend_size,
2084 int second_time ATTRIBUTE_UNUSED)
2085{
2086 CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
2087 CUMULATIVE_ARGS local_cum;
2088 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
2089 int regs_to_push;
2090
2091 cfun->machine->uses_anonymous_args = 1;
2092 local_cum = *pcum;
4fe34cdc
JM
2093 if (!TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (current_function_decl)))
2094 csky_function_arg_advance (local_cum_v, arg);
01d56aea 2095 regs_to_push = CSKY_NPARM_REGS - local_cum.reg;
cc7232b9
J
2096 if (regs_to_push)
2097 *pretend_size = regs_to_push * UNITS_PER_WORD;
2098}
2099
2100
2101/* Implement TARGET_ASM_OUTPUT_MI_THUNK.
2102 Output code to add DELTA to the first argument, and then jump
2103 to FUNCTION. Used for C++ multiple inheritance. */
2104
2105static void
2106csky_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
2107 HOST_WIDE_INT delta,
2108 HOST_WIDE_INT vcall_offset,
2109 tree function)
2110{
f7430263 2111 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
cc7232b9
J
2112 const char *thiz = "a0";
2113 const char *reg0 = "t0";
2114 const char *reg1 = "t1";
2115 int maxoff = 4096; /* Constant range for addi/subi. */
2116
f7430263 2117 assemble_start_function (thunk, fnname);
cc7232b9
J
2118 final_start_function (emit_barrier (), file, 1);
2119
2120 rtx fnaddr = XEXP (DECL_RTL (function), 0);
2121
2122 if (CSKY_TARGET_ARCH (CK801))
2123 {
2124 /* CK801 can't use t registers and has only 16-bit addi/subi. */
2125 reg0 = "l0";
2126 reg1 = "l1";
2127 maxoff = 256;
2128 if (vcall_offset > maxoff || vcall_offset < -maxoff)
2129 fprintf (file, "\tpush\tl0, l1\n");
2130 else if (delta > maxoff || delta < -maxoff)
2131 fprintf (file, "\tpush\tl0\n");
2132 }
2133
2134 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
2135 thiz = "a1";
2136
2137 /* Add delta to this_rtx. */
2138 if (delta != 0)
2139 {
2140 if (delta > maxoff || delta < -maxoff)
2141 {
2142 fprintf (file, "\tlrw\t%s, %ld\n", reg0, (long)delta);
2143 fprintf (file, "\taddu\t%s, %s, %s\n", thiz, thiz, reg0);
2144 }
2145 else
2146 fprintf (file, "\t%s\t%s, %s, %ld\n",
2147 (delta > 0 ? "addi" : "subi"), thiz, thiz,
2148 (long)(delta > 0 ? delta : -delta));
2149 }
2150
2151 /* If needed, add *(*this_rtx + vcall_offset) to this_rtx. */
2152 if (vcall_offset != 0)
2153 {
2154 fprintf (file, "\tld.w\t%s, (%s, 0)\n", reg0, thiz);
2155
2156 if (vcall_offset > maxoff || vcall_offset < -maxoff)
2157 {
2158 fprintf (file, "\tlrw\t%s, %ld\n", reg1, (long)vcall_offset);
2159 fprintf (file, "\taddu\t%s, %s, %s\n", reg0, reg0, reg1);
2160 }
2161 else
2162 fprintf (file, "\t%s\t%s, %s, %ld\n",
2163 (vcall_offset > 0 ? "addi" : "subi"), reg0, reg0,
2164 (long)(vcall_offset > 0 ? vcall_offset : -vcall_offset));
2165
2166 /* Load the offset and add it to this_rtx */
2167 fprintf (file, "\tld.w\t%s, (%s, 0)\n", reg0, reg0);
2168 fprintf (file, "\taddu\t%s, %s, %s\n", thiz, thiz, reg0);
2169 }
2170
2171 /* We must pop the scratch regs individually instead of using the
2172 "pop" insn, which also does a return. */
2173 if (CSKY_TARGET_ARCH (CK801))
2174 {
2175 if (vcall_offset > maxoff || vcall_offset < -maxoff)
2176 {
2177 fprintf (file, "\tld.w\tl0, (sp, 0)\n");
2178 fprintf (file, "\tld.w\tl1, (sp, 4)\n");
2179 fprintf (file, "\taddi\t sp, sp, 8\n");
2180 }
2181 else if (delta > maxoff || delta < -maxoff)
2182 {
2183 fprintf (file, "\tld.w\tl0, (sp, 0)\n");
2184 fprintf (file, "\taddi\tsp, sp, 4\n");
2185 }
2186 }
2187
2188 fprintf (file, "\tjbr\t");
2189 output_addr_const (file, fnaddr);
2190 fprintf (file, "\n");
2191
2192 final_end_function ();
f7430263 2193 assemble_end_function (thunk, fnname);
cc7232b9
J
2194}
2195
2196
2197/* Implement TARGET_CONDITIONAL_REGISTER_USAGE.
2198 Conditionally modify five variables fixed_regs, call_used_regs, global_regs,
2199 reg_names, and reg_class_contents, to take into account any dependence of
2200 these register sets on target flags.
2201
2202 CK801 has registers r0-r8 and r13-r15. CK802 and CK803 have registers
2203 r0-r15 (the "low" registers). Other cpus use registers r0-r31 with
2204 -mhigh-registers, otherwise also only r0-r15.
2205
2206 CK801 only has 16-bit instructions, most of which can only reference
2207 r0-r7 (the "mini" registers). So we mark regs outside that range as
2208 fixed. -msmart can be used on other arch variants to force the same
2209 behavior because it results in smaller code size.
2210
2211 TODO: investigate whether it's beneficial to use r8-r13 as a spill
2212 class when TARGET_MINI_REGISTERS instead of making them unusable by
2213 the register allocator. */
2214
2215static void
2216csky_conditional_register_usage (void)
2217{
2218 /* Only use mini registers in smart mode or 801. */
2219 if (TARGET_MINI_REGISTERS)
2220 {
2221 int i;
2222
2223 for (i = (CSKY_LAST_MINI_REGNUM + 1); i < 32; i++)
2224 {
2225 fixed_regs[i] = 1;
2226 call_used_regs[i] = 1;
cc7232b9
J
2227 }
2228 }
2229 /* For some targets, the high registers are not supported.
2230 CPUs other than ck801/ck802/ck803 use high registers
2231 depending on -mhigh-registers option. */
2232 else if (CSKY_TARGET_ARCH (CK802)
2233 || CSKY_TARGET_ARCH (CK803)
2234 || !TARGET_HIGH_REGISTERS)
2235 {
2236 int i;
2237
2238 for (i = CSKY_FIRST_HIGH_REGNUM; i <= CSKY_LAST_HIGH_REGNUM; i++)
2239 {
2240 fixed_regs[i] = 1;
2241 call_used_regs[i] = 1;
cc7232b9
J
2242 }
2243 }
2244
2245 /* On CK801/CK802 we must mark lr as a fixed register because it is
2246 used to implement far jumps.
2247 FIXME: perhaps there should be a command-line option controlling
2248 use of lr for far jumps on ck802 when !TARGET_MINI_REGS, when
2249 you really want lr to be available to the register allocator and
2250 you know there are no far jumps in the code. */
2251 if (CSKY_TARGET_ARCH (CK801) || CSKY_TARGET_ARCH (CK802))
2252 {
2253 fixed_regs[CSKY_LR_REGNUM] = 1;
d7fb4c31 2254 call_used_regs[CSKY_LR_REGNUM] = 0;
cc7232b9
J
2255 }
2256
2257 /* The hi/lo registers are only supported in dsp mode. */
2258 if (!TARGET_DSP)
2259 {
2260 fixed_regs[CSKY_HI_REGNUM] = 1;
2261 call_used_regs[CSKY_HI_REGNUM] = 1;
cc7232b9
J
2262
2263 fixed_regs[CSKY_LO_REGNUM] = 1;
2264 call_used_regs[CSKY_LO_REGNUM] = 1;
cc7232b9
J
2265 }
2266
2267 /* The V_REGS are only supported in hard float mode. */
2268 if (!TARGET_HARD_FLOAT)
2269 {
2270 int regno;
2271
2272 for (regno = CSKY_FIRST_VFP_REGNUM;
db92bd22
GQ
2273 regno <= CSKY_LAST_VFP3_REGNUM; regno++)
2274 {
2275 fixed_regs[regno] = 1;
2276 call_used_regs[regno] = 1;
2277 }
2278 }
2279
2280 if (!TARGET_SUPPORT_FPV3)
2281 {
2282 int regno;
2283
2284 for (regno = CSKY_FIRST_VFP3_REGNUM;
2285 regno <= CSKY_LAST_VFP3_REGNUM; regno++)
cc7232b9
J
2286 {
2287 fixed_regs[regno] = 1;
2288 call_used_regs[regno] = 1;
cc7232b9
J
2289 }
2290 }
2291
2292 /* In pic mode, the gb register is not available for register
2293 allocation. Since gb is not clobbered by function
d7fb4c31 2294 calls, set its call_used_regs to 0. */
cc7232b9
J
2295 if (flag_pic)
2296 {
2297 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
d7fb4c31 2298 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 0;
cc7232b9
J
2299 }
2300}
2301
2302/* Implement TARGET_HARD_REGNO_NREGS. */
db92bd22 2303
cc7232b9
J
2304static unsigned int
2305csky_hard_regno_nregs (unsigned int regno, machine_mode mode)
2306{
2307 if (regno >= CSKY_FIRST_VFP_REGNUM && !CSKY_TARGET_ARCH (CK803))
2308 return 1;
2309 else
2310 return CSKY_NUM_REGS (mode);
2311}
2312
2313/* Implement TARGET_HARD_REGNO_MODE_OK. Return true if REGNO is a
2314 valid register for holding a quantity of type MODE. */
2315
2316static bool
2317csky_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2318{
2319 int nregs = CSKY_NUM_REGS (mode);
2320
2321 /* We can't handle more than doubleword sizes for any register. */
2322 if (nregs > 2)
2323 return false;
2324
2325 /* For general registers, return true if mode is one word size.
2326 When the size is larger than one word size, there should
2327 be two successive hard registers to put the data. */
2328 if (regno < CSKY_NGPR_REGS)
2329 {
2330 if (nregs < 2)
2331 return true;
2332 else if (TARGET_MINI_REGISTERS)
2333 return (regno < CSKY_LAST_MINI_REGNUM);
2334 else if (CSKY_TARGET_ARCH (CK802)
2335 || CSKY_TARGET_ARCH (CK803)
2336 || !TARGET_HIGH_REGISTERS)
2337 /* Without high register, r15 cannot hold doubleword data. */
2338 return (regno < (CSKY_SP_REGNUM - 1));
2339 else
2340 return (regno < (CSKY_SP_REGNUM - 1)
2341 || (regno >= CSKY_LR_REGNUM
2342 && regno < CSKY_LAST_HIGH_UNFIXED_REGNUM));
2343 }
2344 else if (regno == CSKY_CC_REGNUM)
2345 return (mode == CCmode);
2346 else if (regno == CSKY_HI_REGNUM || regno == CSKY_LO_REGNUM)
2347 {
2348 /* Don't allocate hi,lo register for float data even
2349 if in dsp mode, because it will cause high cost
2350 to reload data from hi,lo register. */
2351 if (!TARGET_DSP || mode == SFmode || mode == DFmode)
2352 return false;
2353 else if (nregs == 2)
2354 return (regno == CSKY_HI_REGNUM);
2355 else
2356 return true;
2357 }
2358 else if (CSKY_VREG_P (regno) && TARGET_HARD_FLOAT)
2359 return true;
2360
2361 return false;
2362}
2363
2364/* Implement TARGET_MODES_TIEABLE_P. We can't tie DFmode with other modes
2365 when V_REGs might be in use because those registers mess with the stored
2366 bits. */
db92bd22 2367
cc7232b9
J
2368static bool
2369csky_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2370{
2371 return !(TARGET_HARD_FLOAT
2372 && mode1 != mode2
2373 && (mode1 == DFmode || mode2 == DFmode));
2374}
2375
cc7232b9
J
2376/* Implement TARGET_CLASS_LIKELY_SPILLED_P.
2377 We need to define this for MINI_REGS when we only use r0 - r7.
2378 Otherwise we can end up using r0-r4 for function arguments, and don't
2379 have enough left over to do doubleword arithmetic. */
2380
2381static bool
2382csky_class_likely_spilled_p (reg_class_t rclass)
2383{
2384 if ((TARGET_MINI_REGISTERS && rclass == MINI_REGS)
2385 || rclass == C_REGS)
2386 return true;
2387
2388 return false;
2389}
2390
2391
2392/* Implement TARGET_PREFERRED_RELOAD_CLASS.
2393 Given an rtx X being reloaded into a reg required to be
2394 in class CLASS, return the class of reg to actually use.
2395 In general this is just CLASS. */
2396
2397static reg_class_t
2398csky_preferred_reload_class (rtx x, reg_class_t rclass)
2399{
2400 if (TARGET_HARD_FLOAT
2401 && CONST_DOUBLE_P (x)
2402 && (GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2403 && rclass == NO_REGS)
2404 return GENERAL_REGS;
2405 return rclass;
2406}
2407
2408
2409/* Implement TARGET_CLASS_MAX_NREGS.
2410 Return the maximum number of consecutive registers of class rclass needed
2411 to hold a value of mode mode.
2412 On the csky, this is the size of MODE in words,
2413 except in the FP regs, where a single reg is always enough. */
2414
2415static unsigned char
2416csky_class_max_nregs (reg_class_t rclass, machine_mode mode)
2417{
2418 if (rclass == V_REGS)
2419 return 1;
2420 else
2421 return CSKY_NUM_REGS (mode);
2422}
2423
2424
2425/* Implement TARGET_SECONDARY_RELOAD.
2426 If copying a register of RCLASS from/to X requires an intermediate
2427 register, the hook should return the REGISTER_CLASS required for this
2428 intermediate register.
2429 If no intermediate register is required, it should return NO_REGS.
2430 If more than one intermediate register is required, describe the one
2431 that is closest in the copy chain to the reload register. */
2432
2433reg_class_t
2434csky_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x,
2435 reg_class_t rclass,
2436 machine_mode mode,
2437 secondary_reload_info *sri ATTRIBUTE_UNUSED)
2438{
2439 int regno = -1;
2440
2441 /* Extract the real regno from X. */
2442 if (GET_CODE (x) == SIGN_EXTEND)
2443 {
2444 int off = 0;
2445
2446 x = XEXP (x, 0);
2447
2448 if (reg_renumber)
2449 regno = true_regnum (x);
2450 else
2451 {
2452 while (GET_CODE (x) == SUBREG)
2453 {
2454 off += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2455 GET_MODE (SUBREG_REG (x)),
2456 SUBREG_BYTE (x), GET_MODE (x));
2457 x = SUBREG_REG (x);
2458 }
2459
2460 if (GET_CODE (x) == REG)
2461 regno = REGNO (x) + off;
2462 }
2463 }
2464 else if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2465 regno = true_regnum (x);
2466
2467 /* We always require a general register when copying anything to
2468 HI/LO_REGNUM, except when copying an SImode value from HI/LO_REGNUM
2469 to a general register, or when copying from register 0. */
b8a61853 2470 if (rclass == HILO_REGS && !CSKY_GENERAL_REGNO_P (regno))
cc7232b9
J
2471 return GENERAL_REGS;
2472
2473 if (rclass == V_REGS && !CSKY_GENERAL_REGNO_P (regno))
2474 {
2475 /* Reload between vector reg and memory does not need an
2476 intermediate register. */
2477 if (MEM_P (x) && (mode == SFmode || mode == DFmode))
2478 return NO_REGS;
2479 else
2480 return GENERAL_REGS;
2481 }
2482
2483 return NO_REGS;
2484}
2485
2486/* Implement TARGET_SPILL_CLASS.
2487 Try spilling to a larger register class before spilling to memory. */
2488
2489static reg_class_t
2490csky_spill_class (reg_class_t rclass, machine_mode mode ATTRIBUTE_UNUSED)
2491{
2492 if ((rclass == MINI_REGS && !TARGET_MINI_REGISTERS)
2493 || (rclass == LOW_REGS && TARGET_HIGH_REGISTERS))
2494 return GENERAL_REGS;
2495 return NO_REGS;
2496}
2497
2498/* Convert a static initializer array of feature bits to sbitmap
2499 representation. */
db92bd22 2500
cc7232b9
J
2501static void
2502csky_initialize_isa (sbitmap isa, const enum csky_isa_feature *isa_bits)
2503{
2504 bitmap_clear (isa);
2505 while (*isa_bits != CSKY_ISA_FEATURE_GET (none))
2506 bitmap_set_bit (isa, *(isa_bits++));
2507}
2508
2509
2510/* Configure a build target TARGET from the user-specified options OPTS and
2511 OPTS_SET. */
db92bd22 2512
cc7232b9
J
2513static void
2514csky_configure_build_target (struct csky_build_target *target,
2515 struct cl_target_option *opts,
2516 struct gcc_options *opts_set)
2517{
2518 const struct csky_processors *csky_selected_tune = NULL;
2519 struct csky_processors *csky_selected_cpu = NULL;
2520 struct csky_processors *csky_selected_arch = NULL;
2521 sbitmap all_sbits = sbitmap_alloc (CSKY_ISA_FEATURE_GET (max));
2522 bitmap_clear (all_sbits);
2523
2524 bitmap_clear (target->isa);
2525 target->core_name = NULL;
2526 target->arch_name = NULL;
2527
2528 if (opts_set->x_csky_arch_option)
2529 csky_selected_arch = &all_architectures[opts->x_csky_arch_option];
2530
2531 if (opts_set->x_csky_cpu_option)
2532 {
2533 csky_selected_cpu = &all_cores[opts->x_csky_cpu_option];
2534 csky_selected_tune = &all_cores[opts->x_csky_cpu_option];
2535 }
2536
2537 if (csky_selected_cpu)
2538 {
2539 /* TODO: support combination of features
2540 between different cpu & arch, should based on arch. */
2541 if (csky_selected_arch
2542 && (csky_selected_cpu->base_arch != csky_selected_arch->base_arch))
2543 warning (0, "cpu %s is not based on arch %s, ignoring the arch",
2544 csky_selected_cpu->name, csky_selected_arch->name);
2545 if (!csky_selected_arch)
2546 csky_selected_arch = &all_architectures[csky_selected_cpu->base_arch];
2547 csky_initialize_isa (all_sbits, csky_selected_arch->isa_bits);
2548 target->core_name = csky_selected_cpu->name;
2549 }
2550 else if (csky_selected_arch)
2551 {
2552 csky_selected_cpu = csky_selected_arch;
2553 target->arch_name = csky_selected_arch->name;
2554 }
2555 else /* If the user did not specify a processor, choose one for them. */
2556 {
2557 csky_selected_cpu = &all_cores[TARGET_CPU_DEFAULT];
2558 csky_selected_arch = &all_architectures[csky_selected_cpu->base_arch];
2559 csky_initialize_isa (all_sbits, csky_selected_arch->isa_bits);
2560 target->core_name = csky_selected_cpu->name;
2561 }
2562
2563 /* The selected cpu may be an architecture, so lookup tuning by core ID. */
2564 if (!csky_selected_tune)
2565 csky_selected_tune = &all_cores[csky_selected_cpu->core];
2566 gcc_assert (csky_selected_tune);
2567
2568 gcc_assert (csky_selected_arch);
2569 gcc_assert (csky_selected_cpu);
2570 csky_initialize_isa (target->isa, csky_selected_cpu->isa_bits);
2571 bitmap_ior (target->isa, target->isa, all_sbits);
2572
2573 /* Finish initializing the target structure. */
2574 target->arch_pp_name = csky_selected_cpu->arch;
2575 target->base_arch = csky_selected_cpu->base_arch;
2576 target->arch_core = csky_selected_cpu->core;
2577
2578 sbitmap_free (all_sbits);
2579}
2580
2581
2582/* Implement TARGET_OPTION_OVERRIDE. */
2583
2584static void
2585csky_option_override (void)
2586{
2587 csky_active_target.isa = sbitmap_alloc (CSKY_ISA_FEATURE_GET (max));
2588
2589 /* Create the default target_options structure. We need this early
2590 to configure the overall build target. */
2591 target_option_default_node = target_option_current_node
ba948b37 2592 = build_target_option_node (&global_options, &global_options_set);
cc7232b9
J
2593
2594 csky_configure_build_target (&csky_active_target,
2595 TREE_TARGET_OPTION (target_option_default_node),
2596 &global_options_set);
2597
2598#ifdef SUBTARGET_OVERRIDE_OPTIONS
2599 SUBTARGET_OVERRIDE_OPTIONS;
2600#endif
2601
2602 csky_base_arch = csky_active_target.base_arch;
2603
db92bd22
GQ
2604 if (flag_pic && !(CSKY_TARGET_ARCH (CK807)
2605 || CSKY_TARGET_ARCH (CK810)
2606 || CSKY_TARGET_ARCH (CK860)))
cc7232b9
J
2607 {
2608 flag_pic = 0;
2609 warning (0, "%qs is not supported by arch %s",
2610 "-fPIC", csky_active_target.arch_pp_name);
2611 }
2612
2613 /* Check floating-point options for consistency. */
2614 if (TARGET_HARD_FLOAT)
2615 {
2616 const struct csky_fpu_desc *csky_selected_fpu = NULL;
2617
2618 if (csky_fpu_index == TARGET_FPU_auto)
2619 {
2620 const char *target_fpu_name;
2621 bool ok;
2622 int fpu_index;
2623
cc7232b9
J
2624 if (csky_active_target.core_name != NULL
2625 && !strchr (csky_active_target.core_name, 'f'))
2626 target_fpu_name = "auto";
2627 else if (CSKY_TARGET_ARCH (CK803) || !TARGET_DOUBLE_FLOAT)
2628 target_fpu_name = "fpv2_sf";
db92bd22
GQ
2629 else if (CSKY_TARGET_ARCH (CK860))
2630 target_fpu_name = "fpv3";
cc7232b9
J
2631 else if (TARGET_DOUBLE_FLOAT && TARGET_FDIVDU)
2632 target_fpu_name = "fpv2_divd";
db92bd22
GQ
2633 else
2634#ifdef CSKY_FPUTYPE_DEFAULT
2635 target_fpu_name = CSKY_FPUTYPE_DEFAULT;
2636#else
2637 target_fpu_name = "fpv2";
2638#endif
cc7232b9
J
2639
2640 ok = opt_enum_arg_to_value (OPT_mfpu_, target_fpu_name, &fpu_index,
2641 CL_TARGET);
2642 gcc_assert (ok);
2643 csky_fpu_index = (enum csky_fpu_type) fpu_index;
2644 }
2645
2646 if (CSKY_TARGET_ARCH (CK801) || CSKY_TARGET_ARCH (CK802))
2647 error ("%qs is not supported by arch %s",
2648 "-mhard-float", csky_active_target.arch_pp_name);
2649 else if (csky_fpu_index == TARGET_FPU_auto)
2650 error ("%<-mhard-float%> is not supported by the selected CPU");
2651 else
2652 {
2653 csky_selected_fpu = &all_fpus[csky_fpu_index];
2654 sbitmap fpu_bits = sbitmap_alloc (CSKY_ISA_FEATURE_GET (max));
2655 csky_initialize_isa (fpu_bits, csky_selected_fpu->isa_bits);
2656
2657 bitmap_ior (csky_active_target.isa, csky_active_target.isa,
2658 fpu_bits);
2659
2660 sbitmap_free (fpu_bits);
2661 }
2662 }
2663 else
2664 {
2665 if (TARGET_DOUBLE_FLOAT > 0)
2666 warning (0, "%<-mdouble-float%> ignored without %<-mhard-float%>");
2667 TARGET_DOUBLE_FLOAT = 0;
2668 if (TARGET_FDIVDU > 0)
2669 warning (0, "%<-mfdivdu%> ignored without %<-mhard-float%>");
2670 TARGET_FDIVDU = 0;
2671 }
2672
62fa9cb2
GQ
2673 /* Initialize boolean versions of the architectural flags, for use
2674 in the .md file. */
2675
2676#undef CSKY_ISA
2677#define CSKY_ISA(IDENT, DESC) \
2678 { \
2679 csky_arch_isa_features[CSKY_ISA_FEATURE_GET (IDENT)] = \
2680 bitmap_bit_p (csky_active_target.isa, CSKY_ISA_FEATURE_GET (IDENT)); \
2681 }
2682#include "csky_isa.def"
2683#undef CSKY_ISA
2684
cc7232b9
J
2685 /* Extended LRW instructions are enabled by default on CK801, disabled
2686 otherwise. */
2687 if (TARGET_ELRW == -1)
2688 TARGET_ELRW = CSKY_TARGET_ARCH (CK801);
2689
2690 /* DSP is enabled either by the processor feature or -mdsp
2691 command-line option. There is no -mno-dsp option as the assembler
2692 doesn't take one. */
2693 if (!TARGET_DSP)
2694 TARGET_DSP = CSKY_ISA_FEATURE (dsp);
2695
2696 /* There's both -mdiv and -mno-div. Take default from processor if
2697 neither is specified explicitly. */
2698 if (TARGET_DIV == -1)
2699 TARGET_DIV = CSKY_ISA_FEATURE (div);
2700
2701 /* TARGET_CONSTANT_POOL is mandatory for CK801 and CK802 and optional
2702 for other CPUs.
2703 The reason why the compiler has to generate constant pools for CK801/2
2704 instead of deferring to the assembler is that these cores don't have a
2705 long branch instruction other than jbsr, which clobbers lr. So for
2706 the compiler to correctly save/restore lr it has to know whether there
2707 are long branches, which depends on having accurate branch length
2708 counts, which in turn depends on having control over where constant
2709 pools are placed. */
2710 if ((CSKY_TARGET_ARCH (CK801) || CSKY_TARGET_ARCH (CK802))
2711 && !TARGET_CONSTANT_POOL)
2712 error ("%qs is not supported by arch %s",
2713 "-mno-constpool", csky_active_target.arch_pp_name);
2714 else if (TARGET_CONSTANT_POOL == -1)
2715 TARGET_CONSTANT_POOL = (CSKY_TARGET_ARCH (CK801)
2716 || CSKY_TARGET_ARCH (CK802));
2717
2718 /* TARGET_MINI_REGISTERS is mandatory for CK801, the default for CK802,
2719 and optional for other CPUs. TARGET_HIGH_REGISTERS is incompatible
2720 with TARGET_MINI_REGISTERS, is not supported by CK801/802/803,
2721 and is the default for other processors.
2722 See csky_conditional_register_usage. */
2723 if (TARGET_MINI_REGISTERS > 0 && TARGET_HIGH_REGISTERS > 0)
2724 error ("%<-msmart%> is incompatible with %<-mhigh-registers%>");
2725 else if (CSKY_TARGET_ARCH (CK801)
2726 || CSKY_TARGET_ARCH (CK802)
2727 || CSKY_TARGET_ARCH (CK803))
2728 {
2729 if (CSKY_TARGET_ARCH (CK801)
2730 || (CSKY_TARGET_ARCH (CK802) && TARGET_MINI_REGISTERS == -1))
2731 TARGET_MINI_REGISTERS = 1;
2732 else if (TARGET_MINI_REGISTERS == -1)
2733 TARGET_MINI_REGISTERS = 0;
2734 if (TARGET_HIGH_REGISTERS > 0)
2735 warning (0, "%qs is not supported by arch %s",
2736 "-mhigh-registers", csky_active_target.arch_pp_name);
2737 TARGET_HIGH_REGISTERS = 0;
2738 }
2739 else
2740 {
2741 if (TARGET_MINI_REGISTERS == -1)
2742 TARGET_MINI_REGISTERS = 0;
2743 if (TARGET_HIGH_REGISTERS == -1)
2744 TARGET_HIGH_REGISTERS = !TARGET_MINI_REGISTERS;
2745 }
2746
2747 /* -mmultiple-stld is the default for everything but CK801, which
2748 doesn't support it. */
2749 if (CSKY_TARGET_ARCH (CK801))
2750 {
2751 if (TARGET_MULTIPLE_STLD > 0)
2752 warning (0, "%qs is not supported by arch %s",
2753 "-mmultiple-stld", csky_active_target.arch_pp_name);
2754 TARGET_MULTIPLE_STLD = 0;
2755 }
2756
cc7232b9
J
2757 /* TODO */
2758
2759 /* Resynchronize the saved target options. */
2760 cl_target_option_save (TREE_TARGET_OPTION (target_option_default_node),
ba948b37 2761 &global_options, &global_options_set);
cc7232b9
J
2762
2763#ifdef ENABLE_TPF_DEBUG
2764 /* Don't emit DWARF4 unless specifically selected. The TPF
2765 debuggers do not yet support DWARF 3/4. */
00f34291 2766 if (!OPTION_SET_P (dwarf_strict))
cc7232b9 2767 dwarf_strict = 1;
00f34291 2768 if (!OPTION_SET_P (dwarf_version))
cc7232b9
J
2769 dwarf_version = 3;
2770#endif
2771
2772 /* Don't run the scheduler before reload by default,
2773 since it tends to increase register pressure. */
00f34291 2774 if (!OPTION_SET_P (flag_schedule_insns))
cc7232b9
J
2775 flag_schedule_insns = 0;
2776
2777 csky_add_gc_roots ();
2778}
2779
2780
2781/* Return TRUE if X contains any references to TLS symbols. */
2782
2783bool
2784csky_tls_referenced_p (rtx x)
2785{
2786 if (!TARGET_TLS)
2787 return false;
2788
2789 subrtx_iterator::array_type array;
2790 FOR_EACH_SUBRTX (iter, array, x, ALL)
2791 {
2792 const_rtx x = *iter;
2793 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0)
2794 return true;
2795
2796 /* Don't recurse into UNSPEC_TLS looking for TLS symbols; these are
2797 TLS offsets, not real symbol references. */
2798 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLS)
2799 iter.skip_subrtxes ();
2800 }
2801 return false;
2802}
2803
2804
2805/* Implement TARGET_CANNOT_FORCE_CONST_MEM.
2806 Determine if it's legal to put X into the constant pool. This
2807 is not possible for the address of thread-local symbols, which
2808 is checked above. */
2809
2810static bool
2811csky_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
2812 rtx x)
2813{
2814 return csky_tls_referenced_p (x);
2815}
2816
2817
2818/* Implement TARGET_LEGITIMATE_CONSTANT_P. Returns nonzero if the
2819 constant value X is a legitimate general operand.
2820 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2821
2822static bool
2823csky_legitimate_constant_p (machine_mode mode, rtx x)
2824{
2825 return (!csky_cannot_force_const_mem (mode, x)
2826 && CONSTANT_P (x));
2827}
2828
2829
2830/* Return true if X is valid as an CSKY addressing register. */
2831
2832static bool
2833is_csky_address_register_rtx_p (rtx x, int strict_p)
2834{
2835 int regno;
2836
2837 if (!x)
2838 return false;
2839 if (!REG_P (x))
2840 return false;
2841
2842 regno = REGNO (x);
2843
2844 if (strict_p)
2845 return (CSKY_GENERAL_REGNO_P (regno)
2846 || CSKY_GENERAL_REGNO_P (reg_renumber[regno]));
2847 else
2848 return CSKY_GENERAL_REGNO_P (regno) || regno >= FIRST_PSEUDO_REGISTER;
2849}
2850
2851
2852/* Return TRUE if X is a thread-local symbol. */
2853
2854static bool
2855csky_tls_symbol_p (rtx x)
2856{
2857 if (!TARGET_TLS)
2858 return false;
2859
2860 if (GET_CODE (x) != SYMBOL_REF)
2861 return false;
2862
2863 return SYMBOL_REF_TLS_MODEL (x) != 0;
2864}
2865
2866
2867/* Handle lazy initialization of __tls_get_addr libfunc. */
2868static GTY(()) rtx tls_get_addr_libfunc;
2869
2870static rtx
2871get_tls_get_addr (void)
2872{
2873 if (!tls_get_addr_libfunc)
2874 tls_get_addr_libfunc = init_one_libfunc ("__tls_get_addr");
2875 return tls_get_addr_libfunc;
2876}
2877
2878
2879/* Emit a call to __tls_get_addr. */
2880
2881static rtx_insn *
2882csky_call_tls_get_addr (rtx x, rtx reg, rtx *valuep, int reloc)
2883{
2884 rtx label, labelno, unspec, tmp;
2885 rtx_insn *insns;
2886
2887 start_sequence ();
2888
2889 labelno = GEN_INT (tls_labelno++);
2890 label = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, labelno), UNSPEC_TLS_LABEL);
2891 unspec = gen_rtx_UNSPEC (Pmode,
2892 gen_rtvec (3, x, GEN_INT (reloc), label),
2893 UNSPEC_TLS);
2894 tmp = gen_reg_rtx (SImode);
2895 emit_move_insn (reg, unspec);
2896 emit_move_insn (tmp, label);
2897 emit_insn (gen_addsi3 (reg, reg, tmp));
2898 *valuep = emit_library_call_value (get_tls_get_addr (),
2899 NULL_RTX, LCT_PURE, /* LCT_CONST? */
2900 Pmode, reg, Pmode);
2901 insns = get_insns ();
2902 end_sequence ();
2903 return insns;
2904}
2905
2906/* Helper function for csky_legitimize_address, to handle the TLS cases.
2907 REG is a scratch register and may be null. */
2908
2909rtx
2910csky_legitimize_tls_address (rtx x, rtx reg)
2911{
2912 rtx dest, tp, label, labelno, unspec, ret, eqv, addend, tmp;
2913 rtx_insn *insns;
2914 unsigned int model = SYMBOL_REF_TLS_MODEL (x);
2915
2916 if (!reg)
2917 reg = gen_reg_rtx (SImode);
2918
2919 switch (model)
2920 {
2921 case TLS_MODEL_GLOBAL_DYNAMIC:
2922 insns = csky_call_tls_get_addr (x, reg, &ret, TLS_GD32);
2923 dest = gen_reg_rtx (Pmode);
2924 emit_libcall_block (insns, dest, ret, x);
2925 return dest;
2926
2927 case TLS_MODEL_LOCAL_DYNAMIC:
2928 insns = csky_call_tls_get_addr (x, reg, &ret, TLS_LDM32);
2929
2930 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
2931 share the LDM result with other LD model accesses. */
2932 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const1_rtx), UNSPEC_TLS);
2933 dest = gen_reg_rtx (Pmode);
2934 emit_libcall_block (insns, dest, ret, eqv);
2935
2936 /* Load the addend. */
2937 addend = gen_rtx_UNSPEC (Pmode,
2938 gen_rtvec (2, x, GEN_INT (TLS_LDO32)),
2939 UNSPEC_TLS);
2940 addend = force_reg (SImode, addend);
2941 return gen_rtx_PLUS (Pmode, dest, addend);
2942
2943 case TLS_MODEL_INITIAL_EXEC:
2944 labelno = GEN_INT (tls_labelno++);
2945 label = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, labelno), UNSPEC_TLS_LABEL);
2946 unspec = gen_rtx_UNSPEC (Pmode,
2947 gen_rtvec (3, x, GEN_INT (TLS_IE32), label),
2948 UNSPEC_TLS);
2949 tmp = gen_reg_rtx (SImode);
2950 emit_move_insn (reg, unspec);
2951 emit_move_insn (tmp, label);
2952 emit_insn (gen_addsi3 (reg, reg, tmp));
2953 emit_move_insn (reg, gen_const_mem (Pmode, reg));
2954 tp = gen_rtx_REG (SImode, CSKY_TLS_REGNUM);
2955 return gen_rtx_PLUS (Pmode, tp, reg);
2956
2957 case TLS_MODEL_LOCAL_EXEC:
2958 unspec = gen_rtx_UNSPEC (Pmode,
2959 gen_rtvec (2, x, GEN_INT (TLS_LE32)),
2960 UNSPEC_TLS);
2961 emit_move_insn (reg, unspec);
2962 tp = gen_rtx_REG (SImode, CSKY_TLS_REGNUM);
2963 return gen_rtx_PLUS (Pmode, tp, reg);
2964
2965 default:
2966 abort ();
2967 }
2968}
2969
2970
2971/* Implement TARGET_LEGITIMIZE_ADDRESS. */
2972
2973static rtx
2974csky_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2975 machine_mode mode)
2976{
2977 if (csky_tls_symbol_p (x))
2978 return csky_legitimize_tls_address (x, NULL_RTX);
2979
2980 if (GET_CODE (x) == PLUS)
2981 {
2982 rtx xop0 = XEXP (x, 0);
2983 rtx xop1 = XEXP (x, 1);
2984
2985 if (is_csky_address_register_rtx_p (xop0, 0)
2986 && CONST_INT_P (xop1))
2987 {
2988 HOST_WIDE_INT offset = INTVAL (xop1);
2989
2990 /* Try to replace ld32 rx,(ry, offset), to addi16 rz, oimm8
2991 and ld16 rx,(rz, new_ld_offset) to avoid emitting a
2992 32-bit ld, but this addi has a range limitation. */
2993 if (optimize_size
2994 && offset > CSKY_LD16_MAX_OFFSET (mode)
2995 && offset <= (CSKY_ADDI16_MAX_IMM
2996 + CSKY_LD16_MAX_OFFSET (mode)))
2997 {
2998 HOST_WIDE_INT new_ld_offset
2999 = offset & CSKY_LD16_OFFSET_MASK (mode);
3000
3001 xop0 = force_operand (plus_constant (Pmode, xop0,
3002 offset - new_ld_offset),
3003 NULL_RTX);
3004 x = plus_constant (Pmode, xop0, new_ld_offset);
3005 }
3006 else if (offset < 0 && offset >= (-CSKY_SUBI16_MAX_IMM))
3007 x = force_operand (x, NULL_RTX);
3008 else if (offset > CSKY_LD16_MAX_OFFSET (mode)
3009 || offset < 0)
3010 {
3011 /* For the remaining cases, force the constant into a
3012 register. */
3013 xop1 = force_reg (SImode, xop1);
3014 x = gen_rtx_PLUS (SImode, xop0, xop1);
3015 }
3016 }
3017
3018 /* If the index is store in register, force the
3019 base to register. */
3020 if (is_csky_address_register_rtx_p (xop1, 0)
3021 && !is_csky_address_register_rtx_p (xop0, 0))
3022 {
3023 xop0 = force_operand (xop0, NULL_RTX);
3024 x = gen_rtx_PLUS (SImode, xop0, xop1);
3025 }
3026 }
3027 /* Make sure to take full advantage of the pre-indexed addressing mode
3028 with absolute addresses which often allows for the base register to
3029 be factorized for multiple adjacent memory references, and it might
3030 even allows for the mini pool to be avoided entirely. */
3031 else if (CONST_INT_P (x) && optimize > 0)
3032 {
3033 HOST_WIDE_INT mask, base, index;
3034 rtx base_reg;
3035
3036 mask = CSKY_LD16_OFFSET_MASK (mode);
3037 base = INTVAL (x) & ~mask;
3038 index = INTVAL (x) & mask;
3039 base_reg = force_reg (SImode, GEN_INT (base));
3040 x = plus_constant (Pmode, base_reg, index);
3041 }
3042
3043 return x;
3044}
3045
3046
3047/* Return nonzero if INDEX is valid for an address index operand.
3048 ck801 use 16 bits ld
3049 ck802 use 16 and 32 bits ld
3050 others use ld and ldr. */
3051
3052static int
3053ck801_legitimate_index_p (machine_mode mode, rtx index,
3054 int strict_p ATTRIBUTE_UNUSED)
3055{
3056 enum rtx_code code = GET_CODE (index);
3057
3058 /* When the mode size is larger than 4, we may use two ld instruction
3059 to get data, the index and (index+1) should be valid. */
3060 if (GET_MODE_SIZE (mode) >= 8)
3061 return (code == CONST_INT
3062 && INTVAL (index) < CSKY_LD16_MAX_OFFSET (SImode)
3063 && INTVAL (index) >= 0 && (INTVAL (index) & 3) == 0);
3064
3065 if (code == CONST_INT && GET_MODE_SIZE (mode) > 0
3066 && INTVAL (index) <= CSKY_LD16_MAX_OFFSET (mode)
3067 && INTVAL (index) >= 0)
3068 return ((INTVAL (index) % GET_MODE_SIZE (mode)) == 0);
3069
3070 return 0;
3071}
3072
3073
3074static int
3075ck802_legitimate_index_p (machine_mode mode, rtx index,
3076 int strict_p ATTRIBUTE_UNUSED)
3077{
3078 enum rtx_code code = GET_CODE (index);
3079
3080 /* When the mode size is larger than 4, we may use two ld instruction
3081 to get data, the index and (index+1) should be valid. */
3082 if (GET_MODE_SIZE (mode) >= 8)
3083 return (code == CONST_INT
3084 && INTVAL (index) < CSKY_LD32_MAX_OFFSET (SImode)
3085 && INTVAL (index) >= 0 && (INTVAL (index) & 3) == 0);
3086
3087 if (code == CONST_INT && GET_MODE_SIZE (mode) > 0
3088 && INTVAL (index) <= CSKY_LD32_MAX_OFFSET (mode)
3089 && INTVAL (index) >= 0)
3090 return ((INTVAL (index) % GET_MODE_SIZE (mode)) == 0);
3091
3092 return 0;
3093}
3094
3095
3096/* The instruction ldr rz, (rx, ry << i), i can be 0,1,2,3.
3097 Check that SHIFT is valid, that the code is MULT, and that
3098 the shift is a power of 2. */
3099
3100static bool
3101is_ldr_shift_p (HOST_WIDE_INT shift, enum rtx_code code)
3102{
3103 if (code == ASHIFT)
3104 return (shift >= 0 && shift <= 3);
3105 else if (code == MULT)
3106 return (shift == 1
3107 || shift == 2
3108 || shift == 4
3109 || shift == 8);
3110 else
3111 return false;
3112}
3113
3114
3115static int
3116ck810_legitimate_index_p (machine_mode mode, rtx index, int strict_p)
3117{
3118 enum rtx_code code = GET_CODE (index);
3119
db92bd22
GQ
3120 if (code == CONST_INT && TARGET_HARD_FLOAT && CSKY_VREG_MODE_P (mode))
3121 return (INTVAL (index) < 1024 && INTVAL (index) >= 0
cc7232b9
J
3122 && (INTVAL (index) & 3) == 0);
3123
3124 if (code == CONST_INT)
3125 {
3126 /* When the mode size is larger than 4, we may use two ld instruction
3127 to get data, the index and (index+1) should be valid. */
3128 if (GET_MODE_SIZE (mode) >= 8)
3129 return (INTVAL (index) < CSKY_LD32_MAX_OFFSET (SImode)
3130 && INTVAL (index) >= 0 && (INTVAL (index) & 3) == 0);
3131
3132 if (GET_MODE_SIZE (mode) > 0
3133 && INTVAL (index) <= CSKY_LD32_MAX_OFFSET (mode)
3134 && INTVAL (index) >= 0)
3135 return ((INTVAL (index) % GET_MODE_SIZE (mode)) == 0);
3136 }
3137 /* Allow ld.w rx, (gb, sym@got) when -fpic specially. */
3138 else if (code == UNSPEC)
3139 return (flag_pic == 1
3140 && (XINT (index, 1) == UNSPEC_PIC_SYMBOL_PLT
3141 || XINT (index, 1) == UNSPEC_PIC_SYMBOL_GOT));
3142 /* The follow index is for ldr instruction, the ldr cannot
3143 load dword data, so the mode size should not be larger than
3144 4. */
41eba35b
GQ
3145 else if (GET_MODE_SIZE (mode) <= 4
3146 || (TARGET_HARD_FLOAT && CSKY_VREG_MODE_P (mode)))
cc7232b9
J
3147 {
3148 if (is_csky_address_register_rtx_p (index, strict_p))
3149 return 1;
3150 else if (code == MULT || code == ASHIFT)
3151 {
3152 rtx xiop0 = XEXP (index, 0);
3153 rtx xiop1 = XEXP (index, 1);
3154
3155 /* FIXME can the xiop1 be the reg and xiop0 be the int when mult? */
3156 return (is_csky_address_register_rtx_p (xiop0, strict_p)
3157 && CONST_INT_P (xiop1)
3158 && is_ldr_shift_p (INTVAL (xiop1), code));
3159 }
3160 }
3161
3162 return 0;
3163}
3164
3165
3166static int
3167csky_legitimate_index_p (machine_mode mode, rtx index, int strict_p)
3168{
3169 if (CSKY_TARGET_ARCH (CK801))
3170 return ck801_legitimate_index_p (mode, index, strict_p);
3171 else if (CSKY_TARGET_ARCH (CK802))
3172 return ck802_legitimate_index_p (mode, index, strict_p);
3173 else
3174 return ck810_legitimate_index_p (mode, index, strict_p);
3175}
3176
3177
3178/* Implement TARGET_LEGITIMATE_ADDRESS_P.
3179 Recognizes RTL expressions that are valid memory addresses for an
3180 instruction. The MODE argument is the machine mode for the MEM
3181 expression that wants to use this address.
3182
3183 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
3184 convert common non-canonical forms to canonical form so that they will
3185 be recognized. */
3186
3187static bool
165b1f6a
KL
3188csky_legitimate_address_p (machine_mode mode, rtx addr, bool strict_p,
3189 code_helper = ERROR_MARK)
cc7232b9
J
3190{
3191 enum rtx_code code = GET_CODE (addr);
3192
3193 /* Match the RTX form emitted for constant pool references.
3194 After reload constants split into minipools will have addresses
3195 from a LABEL_REF. */
3196 if (reload_completed
3197 && ((code == LABEL_REF)
3198 || (code == CONST
3199 && GET_CODE (XEXP (addr, 0)) == PLUS
3200 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
3201 && CONST_INT_P (XEXP (XEXP (addr, 0), 1)))))
3202 return 1;
3203
3204 if (is_csky_address_register_rtx_p (addr, strict_p))
3205 return 1;
3206 /* It is a pc-relative load, may be generated for constpool. */
3207 else if (GET_CODE (addr) == LABEL_REF)
3208 return 1;
3209
3210 if (code == PLUS)
3211 {
3212 rtx xop0 = XEXP (addr, 0);
3213 rtx xop1 = XEXP (addr, 1);
3214
3215 return ((is_csky_address_register_rtx_p (xop0, strict_p)
3216 && csky_legitimate_index_p (mode, xop1, strict_p))
3217 || (is_csky_address_register_rtx_p (xop1, strict_p)
3218 && csky_legitimate_index_p (mode, xop0, strict_p)));
3219 }
3220
3221 return 0;
3222}
3223
3224
3225/* Functions to save and restore machine-specific function data. */
3226
3227static struct machine_function *
3228csky_init_machine_status (void)
3229{
3230 struct machine_function *machine;
3231
3232 machine = ggc_cleared_alloc<machine_function> ();
3233
3234#if CSKY_FT_UNKNOWN != 0
3235 machine->func_type = CSKY_FT_UNKNOWN;
3236#endif
3237 return machine;
3238}
3239
3240
3241/* Implement INIT_EXPANDERS. */
3242
3243void
3244csky_init_expanders (void)
3245{
3246 /* Arrange to initialize and mark the machine per-function status. */
3247 init_machine_status = csky_init_machine_status;
3248}
3249
3250
3251/* Implement TARGET_CANNOT_COPY_INSN_P.
3252 We must not copy any rtx that uses a pc-relative address. */
3253
3254static bool
3255csky_cannot_copy_insn_p (rtx_insn *insn)
3256{
3257 subrtx_iterator::array_type array;
3258 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
3259 {
3260 const_rtx x = *iter;
3261 if (GET_CODE (x) == UNSPEC
3262 && (XINT (x, 1) == UNSPEC_TLS_LABEL
3263 || XINT (x, 1) == UNSPEC_PIC_SYMBOL_GOTPC_GRS))
3264 return true;
3265 }
3266 return false;
3267}
3268
3269
3270/* Extract the parts of an RTL expression that is a valid memory address
3271 for an instruction. Return FALSE if it is a invalid memory address. */
3272
3273struct csky_address
3274{
3275 rtx base, index, symbol, label, disp;
3276 HOST_WIDE_INT scale;
3277};
3278
3279static bool
3280decompose_csky_address (rtx addr, struct csky_address *out)
3281{
3282 rtx base = NULL_RTX, index = NULL_RTX, disp = NULL_RTX;
db92bd22 3283 HOST_WIDE_INT scale = 0;
cc7232b9
J
3284 rtx scale_rtx = NULL_RTX;
3285 int i;
3286
3287 out->base = out->index = out->symbol = out->label = out->disp = NULL_RTX;
3288 out->scale = 0;
3289
3290 if (REG_P (addr))
3291 {
3292 out->base = addr;
3293 return true;
3294 }
3295
3296 if (GET_CODE (addr) == LABEL_REF)
3297 {
3298 out->label = addr;
3299 return true;
3300 }
3301
3302 if (GET_CODE (addr) == CONST)
3303 addr = XEXP (addr, 0);
3304
3305 if (GET_CODE (addr) == PLUS)
3306 {
3307 rtx addends[2], op;
3308
3309 addends[0] = XEXP (addr, 0);
3310 addends[1] = XEXP (addr, 1);
3311
3312 if (GET_CODE (addends[0]) == LABEL_REF && CONST_INT_P (addends[1]))
3313 {
3314 out->label = addends[0];
3315 out->disp = addends[1];
3316 return true;
3317 }
3318
3319 if (!REG_P (addends[0]))
3320 std::swap (addends[0], addends[1]);
3321
3322 for (i = 0; i < 2; ++i)
3323 {
3324 op = addends[i];
3325 switch (GET_CODE (op))
3326 {
3327 case REG:
3328 if (!base)
3329 base = op;
3330 else if (!index)
db92bd22
GQ
3331 {
3332 index = op;
3333 scale = 1;
3334 }
cc7232b9
J
3335 else
3336 return false;
3337 break;
3338 case CONST_INT:
3339 case UNSPEC:
3340 if (disp)
3341 return false;
3342 disp = op;
3343 break;
3344 case MULT:
3345 if (index)
3346 return false;
3347 index = XEXP (op, 0);
3348 scale_rtx = XEXP (op, 1);
3349 if (!CONST_INT_P (index) && !CONST_INT_P (scale_rtx))
3350 return false;
3351 else if (CONST_INT_P (index))
3352 std::swap (index, scale_rtx);
3353 scale = INTVAL (scale_rtx);
3354 break;
3355 case ASHIFT:
3356 if (index)
3357 return false;
3358 index = XEXP (op, 0);
3359 scale_rtx = XEXP (op, 1);
3360 if (!CONST_INT_P (scale_rtx))
3361 return false;
db92bd22 3362 scale = 1 << INTVAL (scale_rtx);
cc7232b9
J
3363 break;
3364 default:
3365 return false;
3366 }
3367 }
3368 }
3369
3370 if (!base)
3371 return false;
3372
3373 out->base = base;
3374 out->index = index;
3375 out->disp = disp;
3376 out->scale = scale;
3377
3378 return true;
3379}
3380
3381/* Helper function for the csky_simple_mem_operand predicate. Returns
3382 true if OP is an address of the form reg + displacement. */
3383
3384bool
3385csky_simple_addr_operand_p (rtx op)
3386{
3387 struct csky_address addr;
3388
3389 if (!decompose_csky_address (op, &addr))
3390 return false;
3391
3392 /* FIXME The PIC related code.
3393 Check if load the symbol address from got table. */
3394 if (addr.disp && GET_CODE (addr.disp) == UNSPEC)
3395 return false;
3396 if (!addr.index && !addr.symbol)
3397 return true;
3398 return false;
3399}
3400
3401
3402/* Print the UNSPEC operand in X to the STREAM. */
3403
3404static void
3405csky_output_pic_addr_const (FILE *stream, rtx x, int code)
3406{
3407
3408 if (GET_CODE (x) != UNSPEC)
3409 return;
3410
3411 if (UNSPEC_TLS == XINT (x, 1))
3412 {
3413 /* FIXME It is not reached */
3414 return;
3415 }
3416
3417 csky_print_operand (stream, XVECEXP (x, 0, 0), code);
3418
3419 switch (XINT (x, 1))
3420 {
3421 case UNSPEC_PIC_SYMBOL_GOTOFF:
3422 fputs ("@GOTOFF", stream);
3423 break;
3424 case UNSPEC_PIC_SYMBOL_PLT:
3425 fputs ("@PLT", stream);
3426 break;
3427 case UNSPEC_PIC_SYMBOL_GOT:
3428 fputs ("@GOT", stream);
3429 break;
3430 case UNSPEC_PIC_SYMBOL_GOTPC:
3431 fputs ("@GOTPC", stream);
3432 break;
3433 case UNSPEC_PIC_SYMBOL_BSR:
3434 break;
3435 default:
3436 break;
3437 }
3438}
3439
3440
3441/* Output the constpool label according to the rtx expression X. */
3442
3443static void
3444csky_output_constpool_label (FILE *stream, rtx x)
3445{
3446 char buf[15];
3447
3448 gcc_assert (GET_CODE (x) == LABEL_REF);
3449 x = XEXP (x, 0);
3450
3451 if (GET_CODE (x) == UNSPEC_VOLATILE && XINT (x, 1) == VUNSPEC_POOL_LABEL)
3452 {
3453 ASM_GENERATE_INTERNAL_LABEL (buf, CSKY_CONSTPOOL_LABEL_PREFIX,
3454 INTVAL (XVECEXP (x, 0, 0)));
3455 assemble_name (stream, buf);
3456 }
3457}
3458
3459
3460/* Implement TARGET_PRINT_OPERAND_ADDRESS. */
3461
3462static void
3463csky_print_operand_address (FILE *stream,
3464 machine_mode mode ATTRIBUTE_UNUSED,
3465 rtx x)
3466{
3467
3468 struct csky_address addr;
3469
3470 decompose_csky_address (x, &addr);
3471
3472 if (addr.label && addr.disp && GET_CODE (addr.disp) == CONST_INT)
3473 {
3474 fprintf (stream, "[");
3475 csky_output_constpool_label (stream, addr.label);
3476 fprintf (stream, "+%d]", (int) INTVAL (addr.disp));
3477 }
3478 else if (addr.label)
3479 {
3480 fprintf (stream, "[");
3481 csky_output_constpool_label (stream, addr.label);
3482 fprintf (stream, "]");
3483 }
3484 else if (addr.symbol && addr.disp && GET_CODE (addr.disp) == CONST_INT)
3485 {
3486 fprintf (stream, "[");
3487 output_addr_const (stream, addr.symbol);
3488 fprintf (stream, "+%d]", (int) INTVAL (addr.disp));
3489 }
3490 else if (addr.symbol)
3491 {
3492 fprintf (stream, "[");
3493 output_addr_const (stream, addr.symbol);
3494 fprintf (stream, "]");
3495 }
3496 else if (addr.disp && GET_CODE (addr.disp) == CONST_INT)
3497 fprintf (stream, "(%s, %d)",
3498 reg_names[REGNO (addr.base)], (int) INTVAL (addr.disp));
3499 else if (addr.disp && GET_CODE (addr.disp) == UNSPEC)
3500 {
3501 if (REGNO (addr.base) != CSKY_GB_REGNUM)
3502 fprintf (stream, "(%s, ", reg_names[REGNO (addr.base)]);
3503 else
3504 fprintf (stream, "[");
3505 csky_output_pic_addr_const (stream, addr.disp, 0);
3506 fprintf (stream, "%s", (REGNO (addr.base) != CSKY_GB_REGNUM)
3507 ? ")" : "]");
3508 }
3509 else if (addr.index)
3510 fprintf (stream, "(%s, %s << %d)",
3511 reg_names[REGNO (addr.base)], reg_names[REGNO (addr.index)],
3512 exact_log2 ((int) (addr.scale)));
3513 else
3514 fprintf (stream, "(%s, 0)", reg_names[REGNO (addr.base)]);
3515}
3516
3517
3518/* Implement TARGET_PRINT_OPERAND.
3519 Print operand X (an rtx) in assembler syntax to file STREAM
3520 according to modifier CODE.
3521
3522 'N' print the log2(X+1), mainly used for bmaski
3523 'P' print the log2(X)
3524 'Q' print the log2(~X)
3525 'O' print a decimal number
3526 'M' print a decimal number as its negative
3527 'R' print the next register or memory location along, i.e. the lsw in
3528 a double word value
3529 'H' print the high 16 bits of a constant. */
3530
3531static void
3532csky_print_operand (FILE *stream, rtx x, int code)
3533{
3534 switch (code)
3535 {
3536 case 'N':
3537 if ((INTVAL (x) & 0xffffffff) == 0xffffffff)
3538 fprintf (stream, "0");
3539 else
3540 fprintf (stream, "%d",
3541 (int) exact_log2 ((INTVAL (x) & 0xffffffff) + 1) % 32);
3542 break;
3543 case 'P':
3544 fprintf (stream, "%d",
3545 (int) exact_log2 (INTVAL (x) & 0xffffffff));
3546 break;
3547 case 'Q':
3548 fprintf (stream, "%d",
3549 (int) exact_log2 (~INTVAL (x) & 0xffffffff));
3550 break;
3551 case 'O':
3552 fprintf (stream, "%d", (int) INTVAL (x));
3553 break;
3554 case 'M':
3555 fprintf (stream, "%d", (int) (-INTVAL (x)));
3556 break;
3557 case 'R':
3558 /* Next location along in memory or register. */
3559 switch (GET_CODE (x))
3560 {
3561 case REG:
3562 fputs (reg_names[REGNO (x) + 1], stream);
3563 break;
3564 case MEM:
3565 csky_print_operand_address
3566 (stream, GET_MODE (x), XEXP (adjust_address (x, SImode, 4), 0));
3567 break;
3568 default:
3569 gcc_unreachable ();
3570 }
3571 break;
3572 case 'H':
3573 fprintf (stream, "%ld", (long)((INTVAL (x) & 0xFFFF0000) >> 16));
3574 break;
3575 default:
3576 switch (GET_CODE (x))
3577 {
3578 case REG:
3579 fputs (reg_names[REGNO (x)], stream);
3580 break;
3581 case MEM:
3582 output_address (GET_MODE (x), XEXP (x, 0));
3583 break;
3584 case UNSPEC:
3585 csky_output_pic_addr_const (stream, x, code);
3586 break;
db92bd22
GQ
3587 case CONST_DOUBLE:
3588 {
3589 char fpstr[20];
3590 real_to_decimal ( fpstr, CONST_DOUBLE_REAL_VALUE (x),
3591 sizeof (fpstr), 0, 1);
3592 fprintf (stream, "%s", fpstr);
3593 }
3594 break;
cc7232b9
J
3595 default:
3596 output_addr_const (stream, x);
3597 break;
3598 }
3599 break;
3600 }
3601}
3602
3603
3604
3605/* Implement TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS. */
3606
3607static bool
3608csky_allocate_stack_slots_for_args (void)
3609{
3610 /* Naked functions should not allocate stack slots for arguments. */
3611 return !CSKY_FUNCTION_IS_NAKED (get_csky_current_func_type ());
3612}
3613
3614
3615/* Can we generate a constant with a single instruction, without using
3616 lrw? */
3617
3618static int
3619const_ok_for_cskyv2 (HOST_WIDE_INT value)
3620{
3621 /* Try exact power of two. It can be generated by bgeni. */
3622 if (CSKY_CONST_OK_FOR_Ub (value))
3623 return 1;
3624
3625 /* Try exact power of two - 1. It can be generated by bmaski. */
3626 if (CSKY_CONST_OK_FOR_Uc (value) && value != -1)
3627 return 1;
3628
3629 /* Try if it can be generated by movi. */
3630 if (CSKY_CONST_OK_FOR_I (value))
3631 return 1;
3632
3633 /* The constant can be generated by movih.
3634 Notice that movih is a 32-bit instruction. */
3635 if (CSKY_CONST_OK_FOR_MOVIH (value))
3636 return 1;
3637
3638 return 0;
3639}
3640
3641
3642/* Tricks for synthesizing constants from values that can be directly
3643 manipulated by machine instructions. */
3644
3645enum csky_inline_const_type
3646{
3647 IC_UNINLINABLE = 0, /* Not inlineable */
3648 IC_SINGLE, /* Single instruction */
3649 IC_APPEND_NOT, /* Single instruction followed by a not */
3650 IC_APPEND_ADDI, /* Single insn followed by an addi */
3651 IC_APPEND_SUBI, /* Single insn followed by a subi */
3652 IC_BGENI_ADDI, /* Single insn(bgeni) followed by an addi */
3653 IC_BGENI_SUBI, /* Single insn(bgeni) followed by a subi */
3654 IC_APPEND_BSETI, /* Single insn followed by bseti */
3655 IC_APPEND_MOVI, /* Single insn followed by movi */
3656 IC_APPEND_BCLRI, /* Single insn followed by bclri */
3657 IC_APPEND_ROTLI, /* Single insn followed by rotli */
3658 IC_APPEND_LSLI, /* Single insn followed by lsli */
3659 IC_APPEND_IXH, /* Single insn followed by ixh */
3660 IC_APPEND_IXW /* Single insn followed by ixw */
3661};
3662
3663
3664/* Try tricks to load a constant inline and return the trick number if
3665 success, or IC_UNINLINABLE. */
3666
3667static enum csky_inline_const_type
3668try_csky_constant_tricks (HOST_WIDE_INT value, HOST_WIDE_INT *x,
3669 HOST_WIDE_INT *y)
3670{
3671 HOST_WIDE_INT i, value_invert;
3672 unsigned HOST_WIDE_INT bit, shf, rot, lobits, hibits;
3673
3674 value &= 0xffffffff;
3675 value_invert = ~value & 0xffffffff;
3676
3677 if (const_ok_for_cskyv2 (value))
3678 {
3679 *x = value;
3680 return IC_SINGLE;
3681 }
3682
3683 /* Since movih is 32 bits, do not use it here, better code may
3684 be generated later. */
3685 if (const_ok_for_cskyv2 (value_invert)
3686 && !CSKY_CONST_OK_FOR_MOVIH (value_invert))
3687 {
3688 *x = value_invert;
3689 return IC_APPEND_NOT;
3690 }
3691
3692 /* One immediate generate instruction, and one 16-bit subi or addi. */
3693 for (i = 1; i <= 32; i++)
3694 {
3695 if (const_ok_for_cskyv2 (value - i)
3696 && !CSKY_CONST_OK_FOR_MOVIH (value - i))
3697 {
3698 *x = value - i;
3699 *y = i;
3700 return IC_APPEND_ADDI;
3701 }
3702
3703 if (const_ok_for_cskyv2 (value + i)
3704 && !CSKY_CONST_OK_FOR_MOVIH (value - i))
3705 {
3706 *x = value + i;
3707 *y = i;
3708 return IC_APPEND_SUBI;
3709 }
3710 }
3711
3712 /* Generate bgeni + addi. */
3713 if (CSKY_CONST_OK_FOR_Ub (value & 0xfffff000))
3714 {
3715 *x = (value & 0xfffff000);
3716 *y = (value & 0xfff);
3717 return IC_BGENI_ADDI;
3718 }
3719
3720 /* Generate bgeni + subi. */
3721 lobits = value & 0xfff;
3722 hibits = (unsigned HOST_WIDE_INT)(value & 0xfffff000) + (1 << 12);
3723 if (exact_log2 (hibits) >= 1
3724 && exact_log2 (hibits) <= 30
3725 && lobits != 0)
3726 {
3727 *x = hibits;
3728 *y = (0x1000 - lobits);
3729 return IC_BGENI_SUBI;
3730 }
3731
3732 /* One immediate generate instruction, and one bseti or bclri. */
3733 bit = 0x80000000ULL;
3734 for (i = 0; i <= 31; i++)
3735 {
3736 if (const_ok_for_cskyv2 (value & ~bit)
3737 && !CSKY_CONST_OK_FOR_MOVIH (value & ~bit))
3738 {
3739 *y = bit;
3740 *x = (value & ~bit);
3741 return IC_APPEND_BSETI;
3742 }
3743
3744 if (const_ok_for_cskyv2 (value | bit)
3745 && !CSKY_CONST_OK_FOR_MOVIH (value | bit))
3746 {
3747 *y = ~bit & 0xffffffff;
3748 *x = value | bit;
3749 return IC_APPEND_BCLRI;
3750 }
3751
3752 bit >>= 1;
3753 }
3754
3755 /* One immediate generate instruction, and one rotli or lsli. */
3756 shf = value;
3757 rot = value;
3758 for (i = 1; i < 31; i++)
3759 {
3760 int c;
3761
3762 /* Rotate left. */
3763 c = rot << 31;
3764 rot >>= 1;
3765 rot &= 0x7FFFFFFF;
3766 rot |= c;
3767
3768 if (const_ok_for_cskyv2 (rot) && !CSKY_CONST_OK_FOR_MOVIH (rot))
3769 {
3770 *y = i;
3771 *x = rot;
3772 return IC_APPEND_ROTLI;
3773 }
3774
3775 /* Can't use logical shift when low order bit is one. */
3776 if (shf & 1)
3777 shf = 0;
3778 else
3779 shf >>= 1;
3780
3781 if (shf != 0 && const_ok_for_cskyv2 (shf)
3782 && !CSKY_CONST_OK_FOR_MOVIH (shf))
3783 {
3784 *y = i;
3785 *x = shf;
3786 return IC_APPEND_LSLI;
3787 }
3788 }
3789
3790 /* One immediate generate instruction, and one ixh. */
3791 if (CSKY_ISA_FEATURE (E2)
3792 && (value % 3) == 0
3793 && const_ok_for_cskyv2 (value / 3)
3794 && !CSKY_CONST_OK_FOR_MOVIH (value / 3))
3795 {
3796 *x = value / 3;
3797 return IC_APPEND_IXH;
3798 }
3799
3800 /* One immediate generate instruction, and one ixw. */
3801 if (CSKY_ISA_FEATURE (E2)
3802 && (value % 5) == 0
3803 && const_ok_for_cskyv2 (value / 5)
3804 && !CSKY_CONST_OK_FOR_MOVIH (value / 5))
3805 {
3806 *x = value / 5;
3807 return IC_APPEND_IXW;
3808 }
3809
3810 /* Generate movih + bseti. */
3811 if (CSKY_CONST_OK_FOR_Ub (value & 0xffff))
3812 {
3813 *x = value & 0xffff0000;
3814 *y = value & 0xffff;
3815 return IC_APPEND_BSETI;
3816 }
3817
3818 /* Generate movih + not. */
3819 if (CSKY_CONST_OK_FOR_MOVIH (value_invert))
3820 {
3821 *x = value_invert;
3822 return IC_APPEND_NOT;
3823 }
3824
3825 /* One movih, and one 16bits addi or subi. */
3826 for (i = 1; i <= 32; i++)
3827 {
3828 if (CSKY_CONST_OK_FOR_MOVIH (value - i))
3829 {
3830 *x = value - i;
3831 *y = i;
3832 return IC_APPEND_ADDI;
3833 }
3834
3835 if (CSKY_CONST_OK_FOR_MOVIH (value + i))
3836 {
3837 *x = value + i;
3838 *y = i;
3839 return IC_APPEND_SUBI;
3840 }
3841 }
3842
3843 /* One movih, and one bseti or bclri. */
3844 bit = 0x80000000ULL;
3845 for (i = 0; i <= 31; i++)
3846 {
3847 if (CSKY_CONST_OK_FOR_MOVIH (value & ~bit))
3848 {
3849 *y = bit;
3850 *x = value & ~bit;
3851 return IC_APPEND_BSETI;
3852 }
3853
3854 if (CSKY_CONST_OK_FOR_MOVIH (value | bit))
3855 {
3856 *y = ~bit & 0xffffffff;
3857 *x = value | bit;
3858 return IC_APPEND_BCLRI;
3859 }
3860
3861 bit >>= 1;
3862 }
3863
3864 /* One movih, and one rotli or lsli. */
3865 shf = value;
3866 rot = value;
3867 for (i = 1; i < 31; i++)
3868 {
3869 int c;
3870
3871 /* Rotate left. */
3872 c = rot << 31;
3873 rot >>= 1;
3874 rot &= 0x7FFFFFFF;
3875 rot |= c;
3876
3877 if (CSKY_CONST_OK_FOR_MOVIH (rot))
3878 {
3879 *y = i;
3880 *x = rot;
3881 return IC_APPEND_ROTLI;
3882 }
3883
3884 /* Can't use logical shift when low order bit is one. */
3885 if (shf & 1)
3886 shf = 0;
3887 else
3888 shf >>= 1;
3889
3890 if (shf != 0 && CSKY_CONST_OK_FOR_MOVIH (shf))
3891 {
3892 *y = i;
3893 *x = shf;
3894 return IC_APPEND_LSLI;
3895 }
3896 }
3897
3898 return IC_UNINLINABLE;
3899}
3900
3901
3902/* Actually output a constant using a trick.
3903 FIXME: I think this would be better handled by a splitter than at the
3904 asm output level. */
3905
3906static const char *
3907csky_output_inline_const (machine_mode mode, rtx operands[])
3908{
3909 HOST_WIDE_INT x = 0, y = 0;
3910 enum csky_inline_const_type trick_type;
3911 rtx out_operands[3];
3912 char buf[256];
3913 char load_op[128];
3914 const char *dst_fmt;
3915 HOST_WIDE_INT value = INTVAL (operands[1]);
3916 int ivalue = (int) value;
3917 unsigned int uvalue = (unsigned int) value;
3918
3919 trick_type = try_csky_constant_tricks (value, &x, &y);
3920 /* lrw's are handled separately: Large inlinable constants never get
3921 turned into lrw's. Our caller uses try_csky_constant_tricks to back
3922 off to an lrw rather than calling this routine. */
3923 gcc_assert (trick_type != IC_UNINLINABLE);
3924
3925 /* Operands: 0 = dst, 1 = load immedate., 2 = adjust immedate. */
3926 out_operands[0] = operands[0];
3927 out_operands[1] = GEN_INT (x);
3928 if (trick_type != IC_SINGLE && trick_type != IC_APPEND_NOT)
3929 out_operands[2] = GEN_INT (y);
3930
3931 /* Select dst format based on mode. */
3932 if (mode == DImode && TARGET_BIG_ENDIAN)
3933 dst_fmt = "%R0";
3934 else
3935 dst_fmt = "%0";
3936
3937 /* Try movi16: 0~31,movi32: 0~65535. */
3938 if (CSKY_CONST_OK_FOR_I (x))
3939 sprintf (load_op, "movi\t%s, %%1", dst_fmt);
3940 /* Try exact power of two - 1. */
3941 else if (CSKY_CONST_OK_FOR_Uc (x))
3942 sprintf (load_op, "bmaski\t%s, %%N1", dst_fmt);
3943 /* Try movih. */
3944 else if (CSKY_CONST_OK_FOR_MOVIH (x))
3945 sprintf (load_op, "movih\t%s, %%H1", dst_fmt);
3946 else
3947 {
3948 sprintf (load_op, "BADMOVI-inline_const %s, %%1", dst_fmt);
3949 gcc_unreachable ();
3950 }
3951
3952 switch (trick_type)
3953 {
3954 case IC_SINGLE:
3955 strcpy (buf, load_op);
3956 break;
3957 /* Add instruction 'not'. */
3958 case IC_APPEND_NOT:
3959 sprintf (buf, "%s\n\tnot\t%s, %s\t// %d 0x%x", load_op, dst_fmt,
3960 dst_fmt, ivalue, uvalue);
3961 break;
3962 /* Add instruction 'addi'. */
3963 case IC_APPEND_ADDI:
3964 sprintf (buf, "%s\n\taddi\t%s, %s, %%2\t// %d 0x%x", load_op,
3965 dst_fmt, dst_fmt, ivalue, uvalue);
3966 break;
3967 /* Add instruction 'subi'. */
3968 case IC_APPEND_SUBI:
3969 sprintf (buf, "%s\n\tsubi\t%s, %s, %%2\t// %d 0x%x", load_op,
3970 dst_fmt, dst_fmt, ivalue, uvalue);
3971 break;
3972 /* Add instruction 'addi', the last instruction is bgeni. */
3973 case IC_BGENI_ADDI:
3974 sprintf (buf, "%s\n\taddi\t%s, %s, %%2\t// %d 0x%x", load_op,
3975 dst_fmt, dst_fmt, ivalue, uvalue);
3976 break;
3977 /* Add instruction 'subi', the last instruction is bgeni. */
3978 case IC_BGENI_SUBI:
3979 sprintf (buf, "%s\n\tsubi\t%s, %s, %%2\t// %d 0x%x", load_op,
3980 dst_fmt, dst_fmt, ivalue, uvalue);
3981 break;
3982 /* Add instruction 'bseti'. */
3983 case IC_APPEND_BSETI:
3984 sprintf (buf, "%s\n\tbseti\t%s, %s, %%P2\t// %d 0x%x", load_op,
3985 dst_fmt, dst_fmt, ivalue, uvalue);
3986 break;
3987 /* Add instruction 'movi'. */
3988 case IC_APPEND_MOVI:
3989 sprintf (buf, "%s\n\tmovi\t%s, %%2\t// %d 0x%x", load_op, dst_fmt,
3990 ivalue, uvalue);
3991 break;
3992 /* Add instruction 'bclri'. */
3993 case IC_APPEND_BCLRI:
3994 sprintf (buf, "%s\n\tbclri\t%s, %s, %%Q2\t// %d 0x%x", load_op,
3995 dst_fmt, dst_fmt, ivalue, uvalue);
3996 break;
3997 /* Add instruction 'rotli'. */
3998 case IC_APPEND_ROTLI:
3999 sprintf (buf, "%s\n\trotli\t%s, %s, %%2\t// %d 0x%x", load_op,
4000 dst_fmt, dst_fmt, ivalue, uvalue);
4001 break;
4002 /* Add instruction 'lsli'. */
4003 case IC_APPEND_LSLI:
4004 sprintf (buf, "%s\n\tlsli\t%s, %s, %%2\t// %d 0x%x", load_op,
4005 dst_fmt, dst_fmt, ivalue, uvalue);
4006 break;
4007 /* Add instruction 'ixh'. */
4008 case IC_APPEND_IXH:
4009 sprintf (buf, "%s\n\tixh\t%s, %s, %s\t// %d 0x%x", load_op,
4010 dst_fmt, dst_fmt, dst_fmt, ivalue, uvalue);
4011 break;
4012 /* Add instruction 'ixw'. */
4013 case IC_APPEND_IXW:
4014 sprintf (buf, "%s\n\tixw\t%s, %s, %s\t// %d 0x%x", load_op,
4015 dst_fmt, dst_fmt, dst_fmt, ivalue, uvalue);
4016 break;
4017 default:
4018 return "";
4019 }
4020
4021 output_asm_insn (buf, out_operands);
4022
4023 return "";
4024}
4025
4026/* This is a helper function for the Uo constraint for movsi patterns. */
4027
4028bool
4029csky_inlinable_constant (HOST_WIDE_INT value)
4030{
4031 HOST_WIDE_INT x, y;
4032 return (!(CSKY_TARGET_ARCH (CK802) || CSKY_TARGET_ARCH (CK801))
4033 && try_csky_constant_tricks (value, &x, &y));
4034}
4035
4036
4037/* Return true if the constant VAL can be expressed by an 8-bit constant
4038 with a shift value, filling in *BASE and *SHIFT. */
4039
4040bool
4041csky_shifted_imm8_constant (unsigned HOST_WIDE_INT val,
4042 unsigned int *base, unsigned int *shift)
4043{
4044 unsigned HOST_WIDE_INT mask = 0xff;
4045 int i;
4046 val = val & (unsigned HOST_WIDE_INT) 0xffffffffu;
4047 if (val == 0)
4048 return 0;
4049
4050 for (i = 0; i < 25; i++)
4051 if ((val & (mask << i)) == val)
4052 {
4053 if (base)
4054 *base = (unsigned int) (val >> i);
4055 if (shift)
4056 *shift = (unsigned int) i;
4057 return true;
4058 }
4059
4060 return false;
4061}
4062
4063
4064/* Output a move of a word or less value. */
4065
4066const char *
4067csky_output_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
4068 machine_mode mode ATTRIBUTE_UNUSED)
4069{
4070 rtx dst = operands[0];
4071 rtx src = operands[1];
4072 struct csky_address op0, op1;
4073
4074 if (REG_P (dst))
4075 {
4076 /* The situation mov reg to reg. */
4077 if (REG_P (src))
4078 {
4079 int dstreg = REGNO (dst);
4080 int srcreg = REGNO (src);
4081
4082 /* hilo registers exchange their places,
4083 and their order of Dimode as same as other
4084 general registers in LITTLE_ENDIAN mode. */
4085 if (TARGET_BIG_ENDIAN)
4086 {
4087 if (dstreg == CSKY_HI_REGNUM)
4088 return "mthi\t%1";
4089 else if (dstreg == CSKY_LO_REGNUM)
4090 return "mtlo\t%1";
4091 else if (srcreg == CSKY_HI_REGNUM)
4092 return "mfhi\t%0";
4093 else if (srcreg == CSKY_LO_REGNUM)
4094 return "mflo\t%0";
4095 }
4096 else
4097 {
4098 if (dstreg == CSKY_HI_REGNUM)
4099 return "mtlo\t%1";
4100 else if (dstreg == CSKY_LO_REGNUM)
4101 return "mthi\t%1";
4102 else if (srcreg == CSKY_HI_REGNUM)
4103 return "mflo\t%0";
4104 else if (srcreg == CSKY_LO_REGNUM)
4105 return "mfhi\t%0";
4106 }
4107
db92bd22
GQ
4108 if (CSKY_VREG_P (dstreg) && CSKY_VREG_P (srcreg))
4109 {
4110 if (CSKY_ISA_FEATURE (fpv2_sf))
4111 return "fmovs\t%0, %1";
4112 else if (CSKY_ISA_FEATURE (fpv3_sf))
4113 return "fmov.32\t%0, %1";
4114 else
4115 gcc_unreachable ();
4116 }
4117 if (CSKY_VREG_P (dstreg))
4118 {
4119 if (CSKY_ISA_FEATURE (fpv2_sf))
4120 return "fmtvrl\t%0, %1";
4121 else if (CSKY_ISA_FEATURE (fpv3_sf))
4122 return "fmtvr.32.1\t%0, %1";
4123 else
4124 gcc_unreachable ();
4125 }
4126 if (CSKY_VREG_P (srcreg))
4127 {
4128 if (CSKY_ISA_FEATURE (fpv2_sf))
4129 return "fmfvrl\t%0, %1";
4130 else if (CSKY_ISA_FEATURE (fpv3_sf))
4131 return "fmfvr.32.1\t%0, %1";
4132 else
4133 gcc_unreachable ();
4134 }
4135 if (REGNO (src) == CSKY_CC_REGNUM)
4136 return "mvc\t%0";
4137 else
4138 return "mov\t%0, %1";
cc7232b9
J
4139 }
4140 /* The situation mov memory to reg. */
4141 else if (GET_CODE (src) == MEM)
4142 {
4143 decompose_csky_address (XEXP (src, 0), &op1);
4144
4145 if (op1.index)
4146 switch (GET_MODE (src))
4147 {
4148 case E_HImode:
db92bd22 4149 case E_HFmode:
cc7232b9
J
4150 return "ldr.h\t%0, %1";
4151 case E_QImode:
4152 return "ldr.b\t%0, %1";
4153 case E_SImode:
4154 case E_SFmode:
4155 if (CSKY_VREG_P (REGNO (dst)))
db92bd22
GQ
4156 {
4157 if (CSKY_ISA_FEATURE(fpv2_sf))
4158 return "fldrs\t%0, %1";
4159 else if (CSKY_ISA_FEATURE(fpv3_sf))
4160 return "fldr.32\t%0, %1";
4161 else
4162 gcc_unreachable ();
4163 }
cc7232b9
J
4164 else
4165 return "ldr.w\t%0, %1";
4166 default:
4167 gcc_unreachable ();
4168 }
4169 /* Generate lrw rx, [LABEL]. This happens when the compiler
4170 generates constant pool references and uses lrw to get the
4171 constant into memory. */
4172 else if (op1.label)
4173 return "lrw\t%0, %1";
4174 /* Generate lrs.w rx, [symbol@GOT/PLT]. */
4175 else if (flag_pic == 1 && op1.disp && GET_CODE (op1.disp) == UNSPEC)
4176 return "lrs.w\t%0, %1";
4177 else
4178 switch (GET_MODE (src))
4179 {
4180 case E_HImode:
db92bd22 4181 case E_HFmode:
cc7232b9
J
4182 return "ld.h\t%0, %1";
4183 case E_QImode:
4184 return "ld.b\t%0, %1";
4185 case E_SFmode:
4186 case E_SImode:
4187 if (CSKY_VREG_P (REGNO (dst)))
db92bd22
GQ
4188 {
4189 if (CSKY_ISA_FEATURE(fpv2_sf))
4190 return "flds\t%0, %1";
4191 else if (CSKY_ISA_FEATURE(fpv3_sf))
4192 return "fld.32\t%0, %1";
4193 else
4194 gcc_unreachable ();
4195 }
cc7232b9
J
4196 else
4197 return "ld.w\t%0, %1";
4198 default:
4199 gcc_unreachable ();
4200 }
4201 }
4202 /* The situation mov integer to reg. */
4203 else if (GET_CODE (src) == CONST_INT ||
4204 (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode))
4205 {
4206 HOST_WIDE_INT x, y;
4207 const REAL_VALUE_TYPE *d;
4208 long l;
4209
4210 if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
4211 {
4212 d = CONST_DOUBLE_REAL_VALUE (src);
4213 REAL_VALUE_TO_TARGET_SINGLE (*d, l);
4214 operands[1] = GEN_INT (l);
4215 src = operands[1];
4216 }
4217
4218 if (try_csky_constant_tricks (INTVAL (src), &x, &y))
4219 return csky_output_inline_const (SImode, operands);
4220 /* Return '#' to split it. */
4221 else if (CSKY_CONST_OK_FOR_T (INTVAL (src)))
4222 return "#";
4223 else
4224 return "lrw\t%0, %x1\t";
4225 }
4226 else if (TARGET_ANCHOR && GET_CODE (src) == SYMBOL_REF)
4227 {
4228 if (SYMBOL_REF_FUNCTION_P (src))
4229 return "lrw\t%0, %1@BTEXT";
4230 else
4231 return "lrw\t%0, %1@BDATA";
4232 }
4233 else if (GET_CODE (src) == UNSPEC
4234 && XINT (src, 1) == UNSPEC_PIC_SYMBOL_GRS)
4235 return "grs\t%0, %1";
4236 else
4237 return "lrw\t%0, %1";
4238 }
4239 else if (GET_CODE (dst) == MEM)
4240 {
4241 decompose_csky_address (XEXP (dst, 0), &op0);
4242
4243 if (op0.index)
4244 switch (GET_MODE (src))
4245 {
4246 case E_HImode:
4247 return "str.h\t%1, %0";
4248 case E_QImode:
4249 return "str.b\t%1, %0";
4250 case E_SFmode:
4251 case E_SImode:
4252 if (CSKY_VREG_P (REGNO (src)))
db92bd22
GQ
4253 {
4254 if (CSKY_ISA_FEATURE(fpv2_sf))
4255 return "fstrs\t%1, %0";
4256 else if (CSKY_ISA_FEATURE(fpv3_sf))
4257 return "fstr.32\t%1, %0";
4258 else
4259 gcc_unreachable ();
4260 }
cc7232b9
J
4261 else
4262 return "str.w\t%1, %0";
4263 default:
4264 gcc_unreachable ();
4265 }
4266 else
4267 switch (GET_MODE (dst))
4268 {
4269 case E_HImode:
4270 return "st.h\t%1, %0";
4271 case E_QImode:
4272 return "st.b\t%1, %0";
4273 case E_SImode:
4274 case E_SFmode:
4275 if (CSKY_VREG_P (REGNO (src)))
db92bd22
GQ
4276 {
4277 if (CSKY_ISA_FEATURE(fpv2_sf))
4278 return "fsts\t%1, %0";
4279 else if (CSKY_ISA_FEATURE(fpv3_sf))
4280 return "fst.32\t%1, %0";
4281 else
4282 gcc_unreachable ();
4283 }
cc7232b9
J
4284 else
4285 return "st.w\t%1, %0";
4286 default:
4287 gcc_unreachable ();
4288 }
4289 }
4290
4291 gcc_unreachable ();
4292}
4293
4294
4295/* Output a move of a word or less value. Specific for ck801. */
4296
4297const char *
4298csky_output_ck801_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
4299 machine_mode mode ATTRIBUTE_UNUSED)
4300{
4301 rtx dst = operands[0];
4302 rtx src = operands[1];
4303 struct csky_address op1;
4304
4305 if (REG_P (dst))
4306 {
4307 if (REG_P (src))
4308 return "mov\t%0, %1";
4309 else if (GET_CODE (src) == MEM)
4310 {
4311 decompose_csky_address (XEXP (src, 0), &op1);
4312
4313 /* Generate lrw rx, [LABEL]. This happens when the compiler
4314 generates constant pool references and uses lrw to get the
4315 constant in memory. */
4316 if (op1.label)
4317 return "lrw\t%0, %1";
4318 else
4319 switch (GET_MODE (src))
4320 {
4321 case E_HImode:
4322 return "ld.h\t%0, %1";
4323 case E_QImode:
4324 return "ld.b\t%0, %1";
4325 case E_SFmode:
4326 case E_SImode:
4327 return "ld.w\t%0, %1";
4328 default:
4329 gcc_unreachable ();
4330 }
4331 }
4332 else if (GET_CODE (src) == CONST_INT)
4333 {
4334 if (REGNO (dst) > 7)
4335 return "lrw\t%0, %x1\t";
4336 else if (CSKY_CONST_OK_FOR_N (INTVAL (src) + 1))
4337 return "movi\t%0, %1";
4338 /* Return '#' to split it. */
4339 else if (CSKY_CONST_OK_FOR_T (INTVAL (src)))
4340 return "#";
4341 else if (csky_shifted_imm8_constant (INTVAL (src), NULL, NULL))
4342 return "#";
4343 else
4344 return "lrw\t%0, %x1\t";
4345 }
4346 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
4347 {
4348 const REAL_VALUE_TYPE *d;
4349 long l;
4350
4351 d = CONST_DOUBLE_REAL_VALUE (src);
4352 REAL_VALUE_TO_TARGET_SINGLE (*d, l);
4353 operands[1] = GEN_INT (l);
4354 src = operands[1];
4355
4356 if (CSKY_CONST_OK_FOR_N (INTVAL (src) + 1))
4357 return "movi\t%0, %1";
4358 else
4359 return "lrw\t%0, %x1\t";
4360 }
4361 else if (TARGET_ANCHOR && GET_CODE (src) == SYMBOL_REF)
4362 {
4363 if (SYMBOL_REF_FUNCTION_P (src))
4364 return "lrw\t%0, %1@BTEXT";
4365 else
4366 return "lrw\t%0, %1@BDATA";
4367 }
4368 else
4369 return "lrw\t%0, %1";
4370 }
4371 else if (GET_CODE (dst) == MEM)
4372 switch (GET_MODE (dst))
4373 {
4374 case E_HImode:
4375 return "st.h\t%1, %0";
4376 case E_QImode:
4377 return "st.b\t%1, %0";
4378 case E_SImode:
4379 case E_SFmode:
4380 return "st.w\t%1, %0";
4381 default:
4382 gcc_unreachable ();
4383 }
4384
4385 gcc_unreachable ();
4386}
4387
4388
4389/* Return a sequence of instructions to perform DI or DF move.
4390 Since the CSKY cannot move a DI or DF in one instruction, we have
4391 to take care when we see overlapping source and dest registers. */
4392
4393const char *
4394csky_output_movedouble (rtx operands[],
4395 machine_mode mode ATTRIBUTE_UNUSED)
4396{
4397 rtx dst = operands[0];
4398 rtx src = operands[1];
4399
4400 if (REG_P (dst))
4401 {
4402 if (REG_P (src))
4403 {
4404 int dstreg = REGNO (dst);
4405 int srcreg = REGNO (src);
4406
4407 if (CSKY_HILO_REG_P (srcreg))
4408 {
4409 if (TARGET_BIG_ENDIAN)
4410 return "mfhi\t%0\n\tmflo\t%R0";
4411 else
4412 return "mfhi\t%R0\n\tmflo\t%0";
4413 }
4414 else if (CSKY_HILO_REG_P (dstreg))
4415 {
4416 if (TARGET_BIG_ENDIAN)
4417 return "mthi\t%1\n\tmtlo\t%R1";
4418 else
4419 return "mthi\t%R1\n\tmtlo\t%1";
4420 }
4421 else if (CSKY_VREG_P (srcreg) && CSKY_VREG_P (dstreg))
db92bd22
GQ
4422 {
4423 if (CSKY_ISA_FEATURE(fpv2_df))
4424 return "fmovd\t%0, %1";
4425 else if (CSKY_ISA_FEATURE(fpv3_df))
4426 return "fmov.64\t%0, %1";
4427 else
4428 gcc_unreachable ();
4429 }
cc7232b9
J
4430 else if (CSKY_VREG_P (srcreg))
4431 {
4432 /* Since the vector registers in fpuv2_soft processors
4433 like ck803f are 32 bits wide, just one insn is needed
4434 to complete the move operation. */
4435 if (TARGET_SOFT_FPU)
4436 return "fmfvrl\t%0, %1";
4437 else if (TARGET_BIG_ENDIAN)
db92bd22
GQ
4438 {
4439 if (CSKY_ISA_FEATURE(fpv2_df))
4440 return "fmfvrh\t%0, %1\n\tfmfvrl\t%R0, %1";
4441 else if (CSKY_ISA_FEATURE(fpv3_df))
4442 return "fmfvr.64\t%R0, %0, %1";
4443 else
4444 gcc_unreachable ();
4445 }
cc7232b9 4446 else
db92bd22
GQ
4447 {
4448 if (CSKY_ISA_FEATURE(fpv2_df))
4449 return "fmfvrh\t%R0, %1\n\tfmfvrl\t%0, %1";
4450 else if (CSKY_ISA_FEATURE(fpv3_df))
4451 return "fmfvr.64\t%0, %R0, %1";
4452 else
4453 gcc_unreachable ();
4454 }
cc7232b9
J
4455 }
4456 else if (CSKY_VREG_P (dstreg))
4457 {
4458 if (TARGET_SOFT_FPU)
4459 return "fmtvrl\t%0, %1";
4460 else if (TARGET_BIG_ENDIAN)
db92bd22
GQ
4461 {
4462 if (CSKY_ISA_FEATURE(fpv2_df))
4463 return "fmtvrh\t%0, %1\n\tfmtvrl\t%0, %R1";
4464 else if (CSKY_ISA_FEATURE(fpv3_df))
4465 return "fmtvr.64\t%0, %R1, %1";
4466 else
4467 gcc_unreachable ();
4468 }
cc7232b9 4469 else
db92bd22
GQ
4470 {
4471 if (CSKY_ISA_FEATURE(fpv2_df))
4472 return "fmtvrh\t%0, %R1\n\tfmtvrl\t%0, %1";
4473 else if (CSKY_ISA_FEATURE(fpv3_df))
4474 return "fmtvr.64\t%0, %1, %R1";
4475 else
4476 gcc_unreachable ();
4477 }
cc7232b9
J
4478 }
4479
4480 /* Ensure the second source not overwritten. */
4481 if (srcreg + 1 == dstreg)
4482 return "mov\t%R0, %R1\n\tmov\t%0, %1";
4483 else
4484 return "mov\t%0, %1\n\tmov\t%R0, %R1";
4485 }
4486 else if (GET_CODE (src) == MEM)
4487 {
4488 rtx memexp = XEXP (src, 0);
4489 int dstreg = REGNO (dst);
4490 int basereg = -1;
4491 struct csky_address op0;
4492
4493 decompose_csky_address (XEXP (src, 0), &op0);
4494
4495 if (GET_CODE (memexp) == LABEL_REF
4496 || (GET_CODE (memexp) == CONST
4497 && GET_CODE (XEXP (memexp, 0)) == PLUS
4498 && GET_CODE (XEXP (XEXP (memexp, 0), 0)) == LABEL_REF))
4499 return "lrw\t%0, [%1]\n\tlrw\t%R0, [%R1]";
4500 else if (GET_CODE (memexp) == REG)
4501 basereg = REGNO (memexp);
4502 else if (GET_CODE (memexp) == PLUS)
4503 {
4504 if (GET_CODE (XEXP (memexp, 0)) == REG)
4505 basereg = REGNO (XEXP (memexp, 0));
4506 else if (GET_CODE (XEXP (memexp, 1)) == REG)
4507 basereg = REGNO (XEXP (memexp, 1));
4508 else
4509 gcc_unreachable ();
4510 }
4511 else
4512 gcc_unreachable ();
4513
4514
4515 /* When FPUV2. */
4516 if (CSKY_VREG_P (dstreg))
4517 {
4518 if (op0.index)
db92bd22
GQ
4519 {
4520 if (CSKY_ISA_FEATURE(fpv2_df))
4521 return "fldrd\t%0, %1";
4522 else if (CSKY_ISA_FEATURE(fpv3_df))
4523 return "fldr.64\t%0, %1";
4524 else
4525 gcc_unreachable ();
4526 }
cc7232b9 4527 else
db92bd22
GQ
4528 {
4529 if (CSKY_ISA_FEATURE(fpv2_df))
4530 return "fldd\t%0, %1";
4531 else if (CSKY_ISA_FEATURE(fpv3_df))
4532 return "fld.64\t%0, %1";
4533 else
4534 gcc_unreachable ();
4535 }
cc7232b9
J
4536 }
4537 /* FIXME length attribute is wrong here. */
4538 if (dstreg == basereg)
4539 /* Just load them in reverse order. */
4540 return "ld.w\t%R0, %R1\n\tld.w\t%0, %1";
4541 else
4542 return "ld.w\t%0, %1\n\tld.w\t%R0, %R1";
4543 }
4544 else if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
4545 {
4546 split_double (src, operands + 2, operands + 3);
4547
4548 if (CSKY_CONST_OK_FOR_I (INTVAL (operands[2])))
4549 output_asm_insn ("movi\t%0, %2", operands);
4550 else if (CSKY_CONST_OK_FOR_Uc (INTVAL (operands[2])))
4551 output_asm_insn ("bmaski\t%0, %N2", operands);
4552 else if (CSKY_CONST_OK_FOR_Ub (INTVAL (operands[2])))
4553 output_asm_insn ("bgeni\t%0, %P2", operands);
4554 else
4555 output_asm_insn ("lrw\t%0, %2", operands);
4556
4557 if (CSKY_CONST_OK_FOR_I (INTVAL (operands[3])))
4558 output_asm_insn ("movi\t%R0, %3", operands);
4559 else if (CSKY_CONST_OK_FOR_Uc (INTVAL (operands[3])))
4560 output_asm_insn ("bmaski\t%R0, %N3", operands);
4561
4562 else if (CSKY_CONST_OK_FOR_Ub (INTVAL (operands[3])))
4563 output_asm_insn ("bgeni\t%R0, %P3", operands);
4564 else
4565 output_asm_insn ("lrw\t%R0, %3", operands);
4566
4567 return "";
4568 }
4569 else
4570 gcc_unreachable ();
4571 }
4572 else if (GET_CODE (dst) == MEM && GET_CODE (src) == REG)
4573 {
4574 rtx memexp = XEXP (dst, 0);
4575 int srcreg = REGNO (src);
4576 int basereg = -1;
4577 struct csky_address op0;
4578
4579 decompose_csky_address (XEXP (dst, 0), &op0);
4580
4581 if (GET_CODE (memexp) == REG)
4582 basereg = REGNO (memexp);
4583 else if (GET_CODE (memexp) == PLUS)
4584 {
4585 if (GET_CODE (XEXP (memexp, 0)) == REG)
4586 basereg = REGNO (XEXP (memexp, 0));
4587 else if (GET_CODE (XEXP (memexp, 1)) == REG)
4588 basereg = REGNO (XEXP (memexp, 1));
4589 else
4590 gcc_unreachable ();
4591 }
4592 else
4593 gcc_unreachable ();
4594
4595 /* When FPUV2. */
4596 if (CSKY_VREG_P (srcreg))
4597 {
4598 if (op0.index)
db92bd22
GQ
4599 {
4600 if (CSKY_ISA_FEATURE(fpv2_df))
4601 return "fstrd\t%1, %0";
4602 else if (CSKY_ISA_FEATURE(fpv3_df))
4603 return "fstr.64\t%1, %0";
4604 else
4605 gcc_unreachable ();
4606 }
cc7232b9 4607 else
db92bd22
GQ
4608 {
4609 if (CSKY_ISA_FEATURE(fpv2_df))
4610 return "fstd\t%1, %0";
4611 else if (CSKY_ISA_FEATURE(fpv3_df))
4612 return "fst.64\t%1, %0";
4613 else
4614 gcc_unreachable ();
4615 }
cc7232b9
J
4616 }
4617 /* FIXME length attribute is wrong here. */
4618 if (srcreg == basereg)
4619 /* Just load them in reverse order. */
4620 return "st.w\t%R1, %R0\n\tst.w\t%1, %0";
4621 else
4622 return "st.w\t%1, %0\n\tst.w\t%R1, %R0";
4623 }
4624 else
4625 gcc_unreachable ();
4626}
4627
4628
4629const char *
4630csky_output_ck801_movedouble (rtx operands[],
4631 machine_mode mode ATTRIBUTE_UNUSED)
4632{
4633 rtx dst = operands[0];
4634 rtx src = operands[1];
4635
4636 if (REG_P (dst))
4637 {
4638 if (REG_P (src))
4639 {
4640 int dstreg = REGNO (dst);
4641 int srcreg = REGNO (src);
4642
4643 /* Ensure the second source not overwritten. */
4644 if (srcreg + 1 == dstreg)
4645 return "mov\t%R0, %R1\n\tmov\t%0, %1";
4646 else
4647 return "mov\t%0, %1\n\tmov\t%R0, %R1";
4648 }
4649 else if (GET_CODE (src) == MEM)
4650 {
4651 rtx memexp = XEXP (src, 0);
4652 int dstreg = REGNO (dst);
4653 int basereg = -1;
4654 struct csky_address op0;
4655
4656 decompose_csky_address (XEXP (src, 0), &op0);
4657
4658 if (GET_CODE (memexp) == LABEL_REF
4659 || (GET_CODE (memexp) == CONST
4660 && GET_CODE (XEXP (memexp, 0)) == PLUS
4661 && GET_CODE (XEXP (XEXP (memexp, 0), 0)) == LABEL_REF))
4662 return "lrw\t%0, [%1]\n\tlrw\t%R0, [%R1]";
4663 else if (GET_CODE (memexp) == REG)
4664 basereg = REGNO (memexp);
4665 else if (GET_CODE (memexp) == PLUS)
4666 {
4667 if (GET_CODE (XEXP (memexp, 0)) == REG)
4668 basereg = REGNO (XEXP (memexp, 0));
4669 else if (GET_CODE (XEXP (memexp, 1)) == REG)
4670 basereg = REGNO (XEXP (memexp, 1));
4671 else
4672 gcc_unreachable ();
4673 }
4674 else
4675 gcc_unreachable ();
4676
4677 /* FIXME length attribute is wrong here. */
4678 if (dstreg == basereg)
4679 /* Just load them in reverse order. */
4680 return "ld.w\t%R0, %R1\n\tld.w\t%0, %1";
4681 else
4682 return "ld.w\t%0, %1\n\tld.w\t%R0, %R1";
4683 }
4684 else if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
4685 {
4686 split_double (src, operands + 2, operands + 3);
4687
4688 if (REGNO (dst) <= 7
4689 && CSKY_CONST_OK_FOR_N (INTVAL (operands[2]) + 1))
4690 output_asm_insn ("movi\t%0, %2", operands);
4691 else
4692 output_asm_insn ("lrw\t%0, %2", operands);
4693
4694
4695 if (REGNO (dst) <= 6
4696 && CSKY_CONST_OK_FOR_N (INTVAL (operands[3]) + 1))
4697 output_asm_insn ("movi\t%R0, %3", operands);
4698 else
4699 output_asm_insn ("lrw\t%R0, %3", operands);
4700
4701 return "";
4702
4703
4704 }
4705 else
4706 gcc_unreachable ();
4707 }
4708 else if (GET_CODE (dst) == MEM && GET_CODE (src) == REG)
4709 {
4710 rtx memexp = XEXP (dst, 0);
4711 int srcreg = REGNO (src);
4712 int basereg = -1;
4713 struct csky_address op0;
4714
4715 decompose_csky_address (XEXP (dst, 0), &op0);
4716
4717 if (GET_CODE (memexp) == REG)
4718 basereg = REGNO (memexp);
4719 else if (GET_CODE (memexp) == PLUS)
4720 {
4721 if (GET_CODE (XEXP (memexp, 0)) == REG)
4722 basereg = REGNO (XEXP (memexp, 0));
4723 else if (GET_CODE (XEXP (memexp, 1)) == REG)
4724 basereg = REGNO (XEXP (memexp, 1));
4725 else
4726 gcc_unreachable ();
4727 }
4728 else
4729 gcc_unreachable ();
4730
4731 /* FIXME length attribute is wrong here. */
4732 if (srcreg == basereg)
4733 /* Just load them in reverse order. */
4734 return "st.w\t%R1, %R0\n\tst.w\t%1, %0";
4735 else
4736 return "st.w\t%1, %0\n\tst.w\t%R1, %R0";
4737 }
4738 else
4739 gcc_unreachable ();
4740}
4741
db92bd22
GQ
4742/* Calculate the instruction's length for moving double-word data. */
4743
4744int
4745csky_get_movedouble_length(rtx operands[])
4746{
4747 rtx dst = operands[0];
4748 rtx src = operands[1];
4749
4750 if (REG_P (dst))
4751 {
4752 if (REG_P (src))
4753 {
4754 int dstreg = REGNO (dst);
4755 int srcreg = REGNO (src);
4756
4757 if (CSKY_VREG_P (srcreg) && CSKY_VREG_P (dstreg))
4758 return 4;
4759 else
4760 return 8;
4761 }
4762 else if (GET_CODE (src) == MEM)
4763 {
4764 rtx memexp = XEXP (src, 0);
4765 int dstreg = REGNO (dst);
4766 struct csky_address op0;
4767 decompose_csky_address (XEXP (src, 0), &op0);
4768
4769 if (GET_CODE (memexp) == LABEL_REF)
4770 return 8;
4771 if (CSKY_VREG_P (dstreg))
4772 return 4;
4773 return 8;
4774 }
4775 else if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
4776 {
4777 split_double (src, operands + 2, operands + 3);
4778 if (CSKY_CONST_OK_FOR_N (INTVAL (operands[2]) + 1)
4779 && CSKY_CONST_OK_FOR_N (INTVAL (operands[3]) + 1)
4780 && REGNO (operands[0]) < 6)
4781 return 4;
4782 else
4783 return 8;
4784 }
4785 }
4786 else if (GET_CODE (dst) == MEM && GET_CODE (src) == REG)
4787 {
4788 rtx memexp = XEXP (dst, 0);
4789 int srcreg = REGNO (src);
4790 int offset = -1;
4791 if (CSKY_VREG_P (srcreg))
4792 return 4;
4793
4794 if (GET_CODE (memexp) == REG)
4795 offset = 0;
4796 else if (GET_CODE (memexp) == PLUS)
4797 {
4798 if (GET_CODE (XEXP (memexp, 0)) == REG)
4799 offset = INTVAL (XEXP (memexp, 1));
4800 else if (GET_CODE (XEXP (memexp, 1)) == REG)
4801 offset = INTVAL (XEXP (memexp, 0));
4802 else
4803 gcc_unreachable ();
4804 }
4805 else
4806 gcc_unreachable ();
4807
4808 if (srcreg <= 6 && offset <= 1020)
4809 return 4;
4810 else if ((srcreg == 7 && offset <= 1024) || (srcreg <= 7 && offset == 1024))
4811 return 6;
4812 else
4813 return 8;
4814 }
4815 else
4816 gcc_unreachable ();
4817
4818 return 0;
4819}
4820
4821/* Output float point load/store instructions for fpuv3. */
4822
4823const char *
4824fpuv3_output_move (rtx *operands)
4825{
4826 rtx reg, mem, addr, ops[2];
4827 bool isload = REG_P (operands[0]);
4828
4829 const char *templ = "f%s%s.%s\t%%0, %%1";
4830 char buff[50];
4831 machine_mode mode;
4832
4833 reg = operands[isload ? 0 : 1];
4834 mem = operands[isload ? 1 : 0];
4835
4836 gcc_assert (REG_P (reg));
4837 gcc_assert (CSKY_VREG_P (REGNO (reg)));
4838 gcc_assert (MEM_P (mem));
4839
4840 mode = GET_MODE (reg);
4841 const char *type = mode == DFmode ? "64" :
4842 mode == SFmode ? "32" :
4843 mode == HFmode ? "16" :
4844 NULL;
4845 gcc_assert(type != NULL);
4846
4847 addr = XEXP (mem, 0);
4848 struct csky_address caddr;
4849 decompose_csky_address (addr, &caddr);
4850
4851 ops[0] = reg;
4852 ops[1] = mem;
4853 sprintf (buff, templ,
4854 isload ? "ld" : "st",
4855 caddr.index ? "r" : "",
4856 type);
4857 output_asm_insn (buff, ops);
4858
4859 return "";
4860}
4861
4862/* Check if a const_double can be used by a VFP fmovi instruction. */
4863
4864int
4865fpuv3_const_double_rtx (rtx x)
4866{
4867 REAL_VALUE_TYPE r, m;
4868 r = *CONST_DOUBLE_REAL_VALUE (x);
4869
4870 /* Fpuv3 doesn't support the following values. */
4871 if (REAL_VALUE_ISINF (r) || REAL_VALUE_ISNAN (r) || REAL_VALUE_MINUS_ZERO (r)
4872 || r.cl == rvc_zero)
4873 return 0;
4874
4875 /* Extract sign, exponent and mantissa. */
4876 int exponent;
4877 r = real_value_abs (&r);
4878 exponent = REAL_EXP (&r);
4879
4880 bool fail;
4881 unsigned HOST_WIDE_INT mantissa, mant_hi;
4882 unsigned HOST_WIDE_INT mask;
4883 int point_pos = 2 * HOST_BITS_PER_WIDE_INT - 1;
4884 real_ldexp (&m, &r, point_pos - exponent);
4885 wide_int w = real_to_integer (&m, &fail, HOST_BITS_PER_WIDE_INT * 2);
4886 mantissa = w.elt (0);
4887 mant_hi = w.elt (1);
4888
4889 exponent -= 1;
4890
4891 if (!IN_RANGE (exponent, -4, 11))
4892 return 0;
4893
4894 /* If there are bits set in the low part of the mantissa, these values are
4895 not supported. */
4896 if (mantissa != 0)
4897 return 0;
4898
4899 /* Now, make the mantissa contain the most-significant bits, and the
4900 point_pos indicates the number of these bits. */
4901 point_pos -= HOST_BITS_PER_WIDE_INT;
4902 mantissa = mant_hi;
4903
4904 /* We can only allow a mantissa of 9 significant digits, top of which is always 1. */
4905 mask = ((unsigned HOST_WIDE_INT)1 << (point_pos - 9)) - 1;
4906 if ((mantissa & mask) != 0)
4907 return 0;
4908
4909 return 1;
4910}
4911
4912
cc7232b9
J
4913/* Split operands for an AND expression when OPERANDS[2] is a constant.
4914 Note operands[0] is marked earlyclobber in this case and can be
4915 overwritten. Return true if "DONE", false otherwise. */
db92bd22 4916
cc7232b9
J
4917bool
4918csky_split_and (rtx *operands)
4919{
4920 HOST_WIDE_INT mask = INTVAL (operands[2]);
4921 rtx not_value = GEN_INT (~mask);
4922 int i;
4923
4924 /* All zeros or all ones can be handled by a move instruction. */
4925 if (mask == 0)
4926 {
4927 emit_move_insn (operands[0], const0_rtx);
4928 return true;
4929 }
4930 if (mask == -1)
4931 {
4932 emit_move_insn (operands[0], operands[1]);
4933 return true;
4934 }
4935
4936 /* Check for constants that can be handled directly by the 32-bit andi
4937 instruction. */
4938 if (CSKY_ISA_FEATURE (E2) && csky_arith_O_operand (operands[2], SImode))
4939 return false;
4940
4941 /* Try to transform to andni instruction. */
4942 if (CSKY_ISA_FEATURE (E2) && csky_arith_O_operand (not_value, SImode))
4943 {
4944 emit_insn (gen_cskyv2_andnsi3 (operands[0], not_value, operands[1]));
4945 return true;
4946 }
4947
4948 /* If there are only one or two 0 bits in the constant, we can
4949 replace the operation with bclri instructions on those bits.
4950 Note CK801 has only the 16-bit bclri that operates on a single
4951 register, so we must count a move if we are post-reload. */
4952 if (popcount_hwi (~mask & 0xffffffff)
4953 <= (reload_completed && !CSKY_ISA_FEATURE (E2) ? 1 : 2))
4954 {
4955 rtx input = operands[1];
4956
4957 if (!CSKY_ISA_FEATURE (E2))
4958 {
4959 emit_move_insn (operands[0], input);
4960 input = operands[0];
4961 }
4962
4963 for (i = 0; i < 32; i++)
4964 if ((mask & (1 << i)) == 0x0)
4965 {
4966 emit_insn (gen_bclri (operands[0], input, GEN_INT (i)));
4967 input = operands[0];
4968 }
4969 return true;
4970 }
4971
4972 /* If the constant mask is outside the [0, 4095] range for
4973 constraint O, or if constraint O is not allowed (ck801),
4974 maybe the constant is a contiguous bit range that we can
4975 handle by bit extract (low bits) or shifts (high bits). */
4976 for (i = (CSKY_ISA_FEATURE (E2) ? 13 : 1); i < 32; i++)
4977 {
4978 if ((((HOST_WIDE_INT) 1) << i) - 1 == mask)
4979 {
4980 if (CSKY_ISA_FEATURE (2E3))
4981 emit_insn (gen_cskyv2_extzv (operands[0], operands[1],
4982 GEN_INT (i), const0_rtx));
4983 else
4984 {
4985 rtx shift = GEN_INT (32 - i);
4986 rtx reg = (reload_completed
4987 ? operands[0] : gen_reg_rtx (SImode));
4988
4989 emit_insn (gen_ashlsi3 (reg, operands[1], shift));
4990 emit_insn (gen_lshrsi3 (operands[0], reg, shift));
4991 }
4992 return true;
4993 }
4994 else if ((((HOST_WIDE_INT) 1) << i) - 1 == ~mask)
4995 {
4996 rtx shift = GEN_INT (i);
4997 rtx reg = (reload_completed
4998 ? operands[0] : gen_reg_rtx (SImode));
4999
5000 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
5001 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
5002 return true;
5003 }
5004 }
5005
5006 /* If the constant is a negative number, it seems better to use
5007 andn and copy the NOT_VALUE to a register instead of the
5008 original value, since the NOT_VALUE is always smaller and thus
5009 more likely to be representable as a small constant.
5010 This transformation can only be done before reload because
5011 it requires a temporary. Hopefully register allocation can get
5012 rid of the extra move required for CK801. */
5013 if (!reload_completed && INTVAL (operands[2]) < 0)
5014 {
5015 rtx reg = copy_to_mode_reg (SImode, not_value);
5016
5017 if (CSKY_ISA_FEATURE (E2))
5018 emit_insn (gen_cskyv2_andnsi3 (operands[0], reg, operands[1]));
5019 else
5020 {
5021 emit_move_insn (operands[0], operands[1]);
5022 emit_insn (gen_ck801_andnsi3 (operands[0], reg, operands[0]));
5023 }
5024 return true;
5025 }
5026
5027 /* If the above ways are all not working, move the constant
5028 to a register. We can clobber operands[0] as it is
5029 marked earlyclobber in the insn constraints, but then we have to
5030 swap operands 1 and 2 to match the constraints on the 2-operand
5031 16-bit and instruction. */
5032 if (reload_completed)
5033 {
5034 emit_move_insn (operands[0], operands[2]);
5035 operands[2] = operands[1];
5036 operands[1] = operands[0];
5037 }
5038 else
5039 operands[2] = copy_to_mode_reg (SImode, operands[2]);
5040 return false;
5041}
5042
5043/* Split operands for an IOR expression when OPERANDS[2] is a constant.
5044 Note operands[0] is marked earlyclobber in this case and can be
5045 overwritten. Return true if "DONE", false otherwise. */
db92bd22 5046
cc7232b9
J
5047bool
5048csky_split_ior (rtx *operands)
5049{
5050 HOST_WIDE_INT mask = INTVAL (operands[2]);
5051 int i;
5052
5053 /* All zeros or all ones can be handled by a move instruction. */
5054 if (mask == 0)
5055 {
5056 emit_move_insn (operands[0], operands[1]);
5057 return true;
5058 }
5059 if (mask == -1)
5060 {
5061 emit_move_insn (operands[0], gen_int_mode (-1, SImode));
5062 return true;
5063 }
5064
5065 /* Check for constants that can be handled directly by the 32-bit ori
5066 instruction. */
5067 if (CSKY_ISA_FEATURE (E2) && csky_literal_I_operand (operands[2], SImode))
5068 return false;
5069
5070 /* If there are only one or two 1 bits in the value, we can replace
5071 the operation with bseti instructions to set those bits.
5072 Note CK801 has only the 16-bit bclri that operates on a single
5073 register, so we must count a move if we are post-reload. */
5074 if (popcount_hwi (mask & 0xffffffff)
5075 <= (reload_completed && !CSKY_ISA_FEATURE (E2) ? 1 : 2))
5076 {
5077 rtx input = operands[1];
5078
5079 if (!CSKY_ISA_FEATURE (E2))
5080 {
5081 emit_move_insn (operands[0], input);
5082 input = operands[0];
5083 }
5084
5085 for (i = 0; i < 32; i++)
5086 if (mask & (1 << i))
5087 {
5088 emit_insn (gen_bseti (operands[0], input, GEN_INT (i)));
5089 input = operands[0];
5090 }
5091 return true;
5092 }
5093
5094 /* If the above ways are all not working, move the constant
5095 to a register. We can clobber operands[0] as it is
5096 marked earlyclobber in the insn constraints, but then we have to
5097 swap operands 1 and 2 to match the constraints on the 2-operand
5098 16-bit ior instruction. */
5099 if (reload_completed)
5100 {
5101 emit_move_insn (operands[0], operands[2]);
5102 operands[2] = operands[1];
5103 operands[1] = operands[0];
5104 }
5105 else
5106 operands[2] = copy_to_mode_reg (SImode, operands[2]);
5107 return false;
5108}
5109
5110
5111/* Split operands for an XOR expression when OPERANDS[2] is a constant.
5112 Note operands[0] is marked earlyclobber in this case and can be
5113 overwritten. Return true if "DONE", false otherwise. */
db92bd22 5114
cc7232b9
J
5115bool
5116csky_split_xor (rtx *operands)
5117{
5118 HOST_WIDE_INT mask = INTVAL (operands[2]);
5119
5120 /* All zeros can be turned into move instruction. */
5121 if (mask == 0)
5122 {
5123 emit_move_insn (operands[0], operands[1]);
5124 return true;
5125 }
5126
5127 /* All ones can be turned into a bitwise not. */
5128 if (mask == -1)
5129 {
5130 if (CSKY_ISA_FEATURE (E2))
5131 emit_insn (gen_cskyv2_one_cmplsi2 (operands[0], operands[1]));
5132 else
5133 {
5134 emit_move_insn (operands[0], operands[1]);
5135 emit_insn (gen_ck801_one_cmplsi2 (operands[0], operands[0]));
5136 }
5137 return true;
5138 }
5139
5140 /* Check for constants that can be handled directly by the 32-bit xori
5141 instruction. */
5142 if (CSKY_ISA_FEATURE (E2) && csky_arith_O_operand (operands[2], SImode))
5143 return false;
5144
5145 /* If the above ways are all not working, move the constant
5146 to a register. We can clobber operands[0] as it is
5147 marked earlyclobber in the insn constraints, but then we have to
5148 swap operands 1 and 2 to match the constraints on the 2-operand
5149 16-bit ior instruction. */
5150 if (reload_completed)
5151 {
5152 emit_move_insn (operands[0], operands[2]);
5153 operands[2] = operands[1];
5154 operands[1] = operands[0];
5155 }
5156 else
5157 operands[2] = copy_to_mode_reg (SImode, operands[2]);
5158 return false;
5159}
5160
5161
5162/* Return true if X is an address form involving a symbol or label ref. */
db92bd22 5163
cc7232b9
J
5164bool
5165csky_symbolic_address_p (rtx x)
5166{
5167 switch (GET_CODE (x))
5168 {
5169 case SYMBOL_REF:
5170 case LABEL_REF:
5171 return 1;
5172 case CONST:
5173 x = XEXP (x, 0);
5174 return ((GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5175 || GET_CODE (XEXP (x, 0)) == LABEL_REF)
5176 && GET_CODE (XEXP (x, 1)) == CONST_INT);
5177 default:
5178 return 0;
5179 }
5180}
5181
5182
5183/* Emit a comparison instruction.
5184 Return true if an inverted comparison is generated. */
5185
5186bool
5187csky_emit_compare (enum rtx_code code, rtx op0, rtx op1)
5188{
5189 bool invert;
5190 rtx cc_reg = gen_rtx_REG (CCmode, CSKY_CC_REGNUM);
5191
db92bd22
GQ
5192 if (GET_MODE_CLASS(GET_MODE (op0)) == MODE_FLOAT)
5193 return csky_emit_compare_float(code, op0, op1);
5194
cc7232b9
J
5195 if (GET_CODE (op1) == CONST_INT)
5196 {
5197 HOST_WIDE_INT val = INTVAL (op1);
5198
5199 switch (code)
5200 {
5201 case GTU:
5202 /* Unsigned (GTU 0) is the same as (NE 0); everything else is
5203 converted below to LEU (reversed cmphs). */
5204 if (val == 0)
5205 code = NE;
5206 /* Check whether (GTU A imm) can become (GEU A imm + 1). */
5207 else if (TARGET_MINI_REGISTERS
5208 ? CSKY_CONST_OK_FOR_J (val + 1)
5209 : CSKY_CONST_OK_FOR_Uk (val + 1))
5210 {
5211 op1 = GEN_INT (val + 1);
5212 code = GEU;
5213 }
5214 break;
5215 /* Check whether (LE A imm) can become (LT A imm + 1),
5216 or (GT A imm) can become (GE A imm + 1). */
5217 case GT:
5218 case LE:
5219 if (TARGET_MINI_REGISTERS
5220 ? CSKY_CONST_OK_FOR_J (val + 1)
5221 : CSKY_CONST_OK_FOR_Uk (val + 1))
5222 {
5223 op1 = GEN_INT (val + 1);
5224 code = code == LE ? LT : GE;
5225 }
5226 break;
5227
5228 default:
5229 break;
5230 }
5231 }
5232
5233 if (CONSTANT_P (op1) && GET_CODE (op1) != CONST_INT)
5234 op1 = force_reg (GET_MODE (op1), op1);
5235
5236 /* cmpnei: 0-31 (K immediate)
5237 ti: 1-32 (J immediate, 0 using btsti x,31). */
5238 invert = false;
5239 switch (code)
5240 {
5241 /* Use inverted condition, cmpne. */
5242 case EQ:
5243 code = NE;
5244 invert = true;
5245 /* Fall through. */
5246 /* Use normal condition, cmpne. */
5247 case NE:
5248 if (GET_CODE (op1) == CONST_INT
5249 && (TARGET_MINI_REGISTERS
5250 ? !csky_literal_K_operand (op1, SImode)
5251 : !csky_literal_I_operand (op1, SImode)))
5252 op1 = force_reg (SImode, op1);
5253 break;
5254
5255 /* Use inverted condition, reversed cmplt. */
5256 case LE:
5257 code = GT;
5258 invert = true;
5259 /* Fall through. */
5260 /* Use normal condition, reversed cmplt. */
5261 case GT:
5262 if (GET_CODE (op1) == CONST_INT)
5263 op1 = force_reg (SImode, op1);
5264 break;
5265
5266 /* Use inverted condition, cmplt. */
5267 case GE:
5268 code = LT;
5269 invert = true;
5270 /* Fall through. */
5271 /* Use normal condition, cmplt. */
5272 case LT:
5273 /* covered by btsti x,31. */
5274 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0
5275 && (TARGET_MINI_REGISTERS
5276 ? !csky_literal_J_operand (op1, SImode)
5277 : !csky_literal_Uk_operand (op1, SImode)))
5278 op1 = force_reg (SImode, op1);
5279 break;
5280
5281 /* Use inverted condition, cmple. */
5282 case GTU:
5283 /* We coped with unsigned > 0 above. */
5284 gcc_assert (GET_CODE (op1) != CONST_INT || INTVAL (op1) != 0);
5285 code = LEU;
5286 invert = true;
5287 /* Fall through. */
5288 /* Use normal condition, reversed cmphs. */
5289 case LEU:
5290 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0)
5291 op1 = force_reg (SImode, op1);
5292 break;
5293
5294 /* Use inverted condition, cmphs. */
5295 case LTU:
5296 code = GEU;
5297 invert = true;
5298 /* Fall through. */
5299 /* Use normal condition, cmphs. */
5300 case GEU:
5301 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0
5302 && (TARGET_MINI_REGISTERS
5303 ? !csky_literal_J_operand (op1, SImode)
5304 : !csky_literal_Uk_operand (op1, SImode)))
5305 op1 = force_reg (SImode, op1);
5306 break;
5307
5308 default:
5309 break;
5310 }
5311
5312 emit_insn (gen_rtx_SET (cc_reg,
5313 gen_rtx_fmt_ee (code, CCmode, op0, op1)));
5314 return invert;
5315}
5316
5317/* Return true if push/pop can be used to save/restore all the registers
5318 indicated by MASK. We currently don't attempt to handle situations where
5319 some of the registers could be handled by push/pop and others saved and
5320 restored individually. */
5321
5322static bool
5323csky_can_use_pushpop (unsigned int mask)
5324{
5325 int i;
5326 int end_reg;
5327
5328 if (!TARGET_PUSHPOP)
5329 return false;
5330
5331 if (mask == 0)
5332 return false;
5333
5334 /* Regs 0-3, 12-14, 18-27, 29-31 cannot be in the mask. */
5335 if (mask & 0xeffc700f)
5336 return false;
5337
5338 /* Regs in the range r4-r11 must be contiguous. */
5339 for (end_reg = 0, i = 11; i >= 4; i--)
5340 {
5341 if (!end_reg && (mask & (1 << i)))
5342 end_reg = i;
5343 if (end_reg && !(mask & (1 << i)))
5344 return false;
5345 }
5346
5347 /* Likewise for regs in the range r16-r17. */
5348 for (end_reg = 0, i = 17; i >= 16; i--)
5349 {
5350 if (!end_reg && (mask & (1 << i)))
5351 end_reg = i;
5352 if (end_reg && !(mask & (1 << i)))
5353 return false;
5354 }
5355
5356 return true;
5357}
5358
5359
5360/* Return true if store/load multiple instructions can be used to
5361 save/restore at least some of the registers indicated by MASK.
5362 Unlike the push/pop case, this does handle partial ranges.
5363 Set *BR and *ER to the beginning and end (respectively) of the
5364 register range that can be handled. */
5365
5366static bool
5367csky_can_use_ldstm (int mask, int *br, int *er)
5368{
5369 int regno;
5370 int begin_reg = 0, end_reg = 0;
5371 int count = 0;
5372
5373 if (!TARGET_MULTIPLE_STLD)
5374 return false;
5375
5376 /* We'll only handle registers in the range 4-11, the contiguous range
5377 of caller-saved registers. Higher-numbered registers are handled
5378 individually in addition to this, but we'll give up on doing ldstm
5379 entirely if we need to save/restore the low-numbered EH registers. */
5380 if (mask & 0xf)
5381 return false;
5382
5383 for (regno = 4; regno <= 11; regno++)
5384 {
5385 if (mask & 1 << regno)
5386 {
5387 if (!begin_reg)
5388 begin_reg = regno;
5389 end_reg = regno;
5390 count++;
5391 }
5392 else if (begin_reg)
5393 break;
5394 }
5395
5396 if (count >= CSKY_MIN_MULTIPLE_STLD && count <= CSKY_MAX_MULTIPLE_STLD)
5397 {
5398 if (br)
5399 *br = begin_reg;
5400 if (er)
5401 *er = end_reg;
5402 return true;
5403 }
5404 return false;
5405}
5406
5407
5408const char *
5409csky_output_return_instruction (void)
5410{
5411 unsigned long func_type = get_csky_current_func_type ();
5412
5413 if (CSKY_FUNCTION_IS_NAKED (func_type))
5414 return "";
5415 if (CSKY_FUNCTION_IS_INTERRUPT (func_type))
5416 return "ipop\n\tnir\n";
5417 else
5418 return "rts\n";
5419}
5420
5421
5422/* Adjust the stack pointer by OFFSET bytes. OFFSET is negative if this
5423 is in the prologue, positive if in the epilogue. This may require
5424 multiple instructions and/or use of CSKY_STACKADJUST_REGNUM as
5425 a scratch register. Emit CFA notes as appropriate. */
5426static void
5427expand_csky_stack_adjust (int offset)
5428{
5429 rtx set;
5430 rtx_insn *insn;
5431 int size = (offset > 0 ? offset : -offset);
5432
5433 if (offset == 0)
5434 return;
5435
5436 /* If OFFSET is too large for addi/subi, load it into
5437 CSKY_STACKADJUST_REGNUM and use a register add/sub instead.
5438 This case is not mentioned in the ABI documentation, but it is
5439 supported by GDB prologue analysis provided that the instruction(s)
5440 to initialize CSKY_STACKADJUST_REGNUM appear directly before
5441 the sub. Depending on the value of OFFSET, this might be a
5442 lrw instruction or the "tricks" used by csky_output_inline_const to
5443 encode special-case integer constants. */
5444 if (size > CSKY_MAX_SP_ADJUST * 2)
5445 {
5446 rtx tmp, dwarf;
5447
5448 /* We should have reserved the scratch register already in
5449 csky_layout_stack_frame. */
5450 gcc_assert (cfun->machine->reg_size != 0
5451 && (cfun->machine->reg_mask
5452 & (1 << CSKY_STACKADJUST_REGNUM)));
5453
5454 /* Prevent the optimizer from reordering these instructions to
5455 keep GDB happy. */
5456 if (!flag_sched_prolog)
5457 emit_insn (gen_blockage ());
5458
5459 tmp = gen_rtx_REG (SImode, CSKY_STACKADJUST_REGNUM);
5460 emit_move_insn (tmp, GEN_INT (size));
5461
5462 if (offset > 0)
5463 set = gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp);
5464 else
5465 set = gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp);
5466 insn = emit_insn (set);
5467 RTX_FRAME_RELATED_P (insn) = 1;
5468 dwarf = gen_rtx_SET (stack_pointer_rtx,
5469 plus_constant (Pmode, stack_pointer_rtx, offset));
5470 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
5471
5472 /* More make GDB happy. */
5473 if (!flag_sched_prolog)
5474 emit_insn (gen_blockage ());
5475 }
5476
5477 /* Use one or two addi or subi insns to adjust stack. */
5478 else
5479 while (size)
5480 {
5481 int delta = (size > CSKY_MAX_SP_ADJUST
5482 ? CSKY_MAX_SP_ADJUST : size);
5483
5484 if (offset > 0)
5485 set = gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5486 GEN_INT (delta));
5487 else
5488 set = gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx,
5489 GEN_INT (delta));
5490 insn = emit_insn (set);
5491 RTX_FRAME_RELATED_P (insn) = 1;
5492 size -= delta;
5493 }
5494}
5495
5496
5497/* Generate and emit an insn that we will recognize as a push_multi.
5498 Unfortunately, since this insn does not reflect very well the actual
5499 semantics of the operation, we need to annotate the insn for the benefit
5500 of DWARF2 frame unwind information. DWARF_REGS_MASK is a subset of
5501 MASK for registers that should be annotated for DWARF2 frame unwind
5502 information. */
5503
5504static rtx
5505emit_csky_regs_push (unsigned long mask)
5506{
5507 int num_regs = 0;
5508 int i, j;
5509 rtx par;
5510 rtx dwarf;
5511 rtx tmp;
5512 int dwarf_par_index;
5513
5514 for (i = 0; i < CSKY_NGPR_REGS; i++)
5515 {
5516 if (mask & (1 << i))
5517 num_regs++;
5518 }
5519
5520 /* The reg range for push is:r4-r11,r15-r17,r28. */
5521 gcc_assert (num_regs && num_regs <= 12);
5522
5523 /* For the body of the insn we are going to generate an UNSPEC in
5524 parallel with several USEs. This allows the insn to be recognized
5525 by the push_multi pattern in the csky.md file.
5526
5527 The body of the insn looks something like this:
5528
5529 (parallel [
5530 (set (mem:BLK (pre_modify:SI (reg:SI sp)
5531 (const_int:SI <num>)))
5532 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSHPOP_MULT))
5533 (use (reg:SI XX))
5534 (use (reg:SI YY))
5535 ...
5536 ])
5537
5538 For the frame note however, we try to be more explicit and actually
5539 show each register being stored into the stack frame, plus a (single)
5540 decrement of the stack pointer. We do it this way in order to be
5541 friendly to the stack unwinding code, which only wants to see a single
5542 stack decrement per instruction. The RTL we generate for the note looks
5543 something like this:
5544
5545 (sequence [
5546 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
5547 (set (mem:SI (reg:SI sp)) (reg:SI r4))
5548 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI XX))
5549 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI YY))
5550 ...
5551 ])
5552
5553 FIXME:: In an ideal world the PRE_MODIFY would not exist and
5554 instead we'd have a parallel expression detailing all
5555 the stores to the various memory addresses so that debug
5556 information is more up-to-date. Remember however while writing
5557 this to take care of the constraints with the push instruction.
5558
5559 Note also that this has to be taken care of for the VFP registers.
5560
5561 For more see PR43399. */
5562
5563 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
5564 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
5565 dwarf_par_index = 1;
5566
5567 for (i = 0; i < CSKY_NGPR_REGS; i++)
5568 if (mask & (1 << i))
5569 {
5570 rtx reg = gen_rtx_REG (SImode, i);
5571 rtx addr = plus_constant (Pmode, stack_pointer_rtx, -4 * num_regs);
5572 tmp = gen_frame_mem (BLKmode,
5573 gen_rtx_PRE_MODIFY (Pmode,
5574 stack_pointer_rtx, addr));
5575 XVECEXP (par, 0, 0)
5576 = gen_rtx_SET (tmp,
5577 gen_rtx_UNSPEC (BLKmode,
5578 gen_rtvec (1, reg),
5579 UNSPEC_PUSHPOP_MULT));
5580 tmp = gen_rtx_SET (gen_frame_mem (SImode, stack_pointer_rtx),
5581 reg);
5582 RTX_FRAME_RELATED_P (tmp) = 1;
5583 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
5584
5585 break;
5586 }
5587
5588 for (j = 1, i++; j < num_regs; i++)
5589 if (mask & (1 << i))
5590 {
5591 rtx reg = gen_rtx_REG (SImode, i);
5592 rtx addr = plus_constant (Pmode, stack_pointer_rtx, 4 * j);
5593 tmp = gen_rtx_SET (gen_frame_mem (SImode, addr), reg);
5594 RTX_FRAME_RELATED_P (tmp) = 1;
5595 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
5596 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
5597 j++;
5598 }
5599
5600 par = emit_insn (par);
5601
5602 tmp = gen_rtx_SET (stack_pointer_rtx,
5603 plus_constant (Pmode, stack_pointer_rtx, -4 * num_regs));
5604 RTX_FRAME_RELATED_P (tmp) = 1;
5605 XVECEXP (dwarf, 0, 0) = tmp;
5606
5607 add_reg_note (par, REG_FRAME_RELATED_EXPR, dwarf);
5608 RTX_FRAME_RELATED_P (par) = 1;
5609
5610 return par;
5611}
5612
5613
5614/* Generate and emit an insn pattern that we will recognize as a pop_multi.
5615 SAVED_REGS_MASK shows which registers need to be restored.
5616
5617 Unfortunately, since this insn does not reflect very well the actual
5618 semantics of the operation, we need to annotate the insn for the benefit
5619 of DWARF2 frame unwind information. */
5620
5621static void
5622emit_csky_regs_pop (unsigned long mask)
5623{
5624 int num_regs = 0;
5625 int i, j;
5626 rtx par;
5627
5628 for (i = 0; i < CSKY_NGPR_REGS; i++)
5629 if (mask & (1 << i))
5630 num_regs++;
5631
5632 /* The reg range for push is:r4-r11,r15-r17,r28. */
5633 gcc_assert (num_regs && num_regs <= 12);
5634
5635 /* The first element is (return),
5636 the second element is
5637 (set (reg:SI 'first reg number')
5638 (unspec:SI [(mem)] UNSPEC_PUSHPOP_MULT),
5639 the rest elements is (use (reg:SI 'rest reg number')),
5640 so the length should be number of register to be poped
5641 plus one. */
5642 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs + 1));
5643
5644 XVECEXP (par, 0, 0) = ret_rtx;
5645
5646 for (i = 0; i < CSKY_NGPR_REGS; i++)
5647 if (mask & (1 << i))
5648 {
5649 rtx reg = gen_rtx_REG (SImode, i);
5650 rtx addr = plus_constant (Pmode, stack_pointer_rtx, 4 * num_regs);
5651 rtx tmp = gen_frame_mem (SImode,
5652 gen_rtx_POST_MODIFY (Pmode,
5653 stack_pointer_rtx, addr));
5654 XVECEXP (par, 0, 1)
5655 = gen_rtx_SET (reg,
5656 gen_rtx_UNSPEC (SImode,
5657 gen_rtvec (1, tmp),
5658 UNSPEC_PUSHPOP_MULT));
5659 break;
5660 }
5661
5662 for (j = 2, i++; j < (num_regs + 1); i++)
5663 if (mask & (1 << i))
5664 {
5665 rtx reg = gen_rtx_REG (SImode, i);
5666 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
5667 j++;
5668 }
5669
5670 par = emit_jump_insn (par);
5671}
5672
5673
5674/* Generate the function prologue. */
5675
5676void
5677csky_expand_prologue (void)
5678{
5679 rtx_insn *insn;
5680 unsigned long func_type = get_csky_current_func_type ();
5681 unsigned int reg_mask;
5682 int reg_size;
5683
5684 if (CSKY_FUNCTION_IS_NAKED (func_type))
5685 {
5686 if (flag_stack_usage_info)
5687 current_function_static_stack_size = 0;
5688 return;
5689 }
5690
5691 csky_layout_stack_frame ();
5692 reg_mask = cfun->machine->reg_mask;
5693 reg_size = cfun->machine->reg_size;
5694
5695 /* Adjust stack pointer past argument overflow area. */
5696 if (cfun->machine->arg_size != 0)
5697 {
5698 int offset = cfun->machine->arg_size;
5699 expand_csky_stack_adjust (- offset);
5700
5701 /* If we have a parameter passed partially in regs and partially
5702 in memory, the registers will have been stored to memory already
e53b6e56 5703 in function.cc. So we only need to copy varargs from registers
cc7232b9
J
5704 to stack. */
5705 if (cfun->machine->uses_anonymous_args)
5706 {
5707 int rn = CSKY_FIRST_PARM_REGNUM + CSKY_NPARM_REGS - 1;
5708 for (offset -= 4; offset >= 0; offset -= 4, rn--)
5709 {
5710 rtx dst = gen_frame_mem (SImode,
5711 plus_constant (Pmode,
5712 stack_pointer_rtx,
5713 offset));
5714 insn = emit_move_insn (dst, gen_rtx_REG (SImode, rn));
5715 RTX_FRAME_RELATED_P (insn) = 1;
5716 }
5717 }
5718 }
5719
5720 /* Push caller-saved registers to stack. */
5721 if (csky_can_use_pushpop (reg_mask))
5722 emit_csky_regs_push (reg_mask);
5723 else if (reg_size)
5724 {
5725 int sreg = -1, ereg = -1;
5726 bool stm_p = csky_can_use_ldstm (reg_mask, &sreg, &ereg);
5727 int stm_regs = stm_p ? ereg - sreg + 1 : 0;
5728 int stm_size = stm_regs * 4;
5729
5730 /* First adjust the SP to the low end of the register save area. */
5731 expand_csky_stack_adjust (- reg_size);
5732
5733 /* Emit individual register saves. Even if we are going to emit an
5734 stm, we may need to save individual registers above that too. */
5735 if (reg_size > stm_size)
5736 {
5737 int offset = reg_size - 4;
5738 int regno = 31;
5739 for ( ; regno > ereg; regno--)
5740 if (reg_mask & (1 << regno))
5741 {
5742 rtx dst = gen_rtx_MEM (SImode,
5743 plus_constant (Pmode,
5744 stack_pointer_rtx,
5745 offset));
5746 rtx insn = emit_insn (gen_movsi (dst,
5747 gen_rtx_REG (SImode, regno)));
5748 RTX_FRAME_RELATED_P (insn) = 1;
5749 if (offset == stm_size)
5750 break;
5751 offset -= 4;
5752 }
5753 }
5754
5755 /* If possible, emit a stm to do a bulk store of sequential
5756 registers to the stack. Note that it is an error in the ABI
5757 documentation that it doesn't list stm as a valid prologue
5758 instruction. */
5759 if (stm_p)
5760 {
5761 rtx par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (stm_regs));
5762 int regno, slot;
5763 for (regno = sreg, slot = 0; regno <= ereg; regno++, slot++)
5764 {
5765 rtx reg = gen_rtx_REG (SImode, regno);
5766 rtx addr = plus_constant (Pmode, stack_pointer_rtx, slot * 4);
5767 rtx set = gen_rtx_SET (gen_frame_mem (SImode, addr), reg);
5768 RTX_FRAME_RELATED_P (set) = 1;
5769 XVECEXP (par, 0, slot) = set;
5770 }
5771 insn = emit_insn (par);
5772 RTX_FRAME_RELATED_P (insn) = 1;
5773 }
5774 }
5775
5776 /* Initialize hard frame pointer, if necessary. It points at the base
5777 of the register save area. */
5778 if (frame_pointer_needed)
5779 {
d19a00c6 5780 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
cc7232b9
J
5781 RTX_FRAME_RELATED_P (insn) = 1;
5782 }
5783
5784 /* Reserve stack space for locals and outgoing args. */
5785 expand_csky_stack_adjust (- cfun->machine->reg_offset);
5786
5787 /* Put the GOT address in reg_gb for PIC, using R13 as a scratch.
5788 See section 4.7.1 in the ABI documentation,
5789 "Function Prologue for PIC". */
5790 if (flag_pic && (reg_mask & (1 << PIC_OFFSET_TABLE_REGNUM)))
5791 {
5792 rtx l1 = gen_label_rtx ();
5793 rtx grs_label = gen_rtx_LABEL_REF (SImode, l1);
5794 rtx reg_gb = gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM);
5795 rtx reg_temp = gen_rtx_REG (SImode, 13);
5796
5797 rtx tmp0_unspec = gen_rtx_UNSPEC (Pmode,
5798 gen_rtvec (1, grs_label),
5799 UNSPEC_PIC_SYMBOL_GOTPC_GRS);
5800 rtx tmp1_unspec = gen_rtx_UNSPEC (Pmode,
5801 gen_rtvec (1, grs_label),
5802 UNSPEC_PIC_SYMBOL_GOTPC);
5803
5804 emit_insn (gen_prologue_get_pc (tmp0_unspec));
5805 emit_move_insn (reg_temp, tmp1_unspec);
5806 emit_insn (gen_addsi3 (reg_gb, reg_gb, reg_temp));
5807 }
5808
5809 if (flag_stack_usage_info)
5810 current_function_static_stack_size = cfun->machine->frame_size;
5811
5812 if (!flag_sched_prolog)
5813 emit_insn (gen_blockage ());
5814}
5815
5816void
5817csky_expand_epilogue (void)
5818{
5819 unsigned long func_type = get_csky_current_func_type ();
5820 unsigned int reg_mask;
5821 int reg_size;
5822 int adjust;
5823 rtx_insn *insn;
5824
5825 if (!flag_sched_prolog)
5826 emit_insn (gen_blockage ());
5827
5828 if (CSKY_FUNCTION_IS_NAKED (func_type))
5829 {
5830 emit_jump_insn (gen_simple_return ());
5831 return;
5832 }
5833
5834 /* Get the frame information. */
5835 csky_layout_stack_frame ();
5836 reg_mask = cfun->machine->reg_mask;
5837 reg_size = cfun->machine->reg_size;
5838 adjust = reg_size + cfun->machine->arg_size;
5839
5840 /* Restore the SP to the base of the register save area. */
5841 if (frame_pointer_needed)
5842 {
d19a00c6 5843 insn = emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
cc7232b9
J
5844 RTX_FRAME_RELATED_P (insn) = 1;
5845 }
5846 else
5847 expand_csky_stack_adjust (cfun->machine->reg_offset);
5848
5849 /* Restore the callee-saved registers. */
5850 if (csky_can_use_pushpop (reg_mask)
5851 && cfun->machine->arg_size == 0
5852 && !CSKY_FUNCTION_IS_INTERRUPT (func_type)
5853 && !crtl->calls_eh_return)
5854 {
5855 /* Pop includes an implicit return, so we are done. */
5856 emit_csky_regs_pop (reg_mask);
5857 return;
5858 }
5859 else if (reg_size)
5860 {
5861 int sreg = -1, ereg = -1;
5862 bool ldm_p = csky_can_use_ldstm (reg_mask, &sreg, &ereg);
5863 int ldm_regs = ldm_p ? ereg - sreg + 1 : 0;
5864 int ldm_size = ldm_regs * 4;
5865
5866 /* Emit individual register loads. Even if we are going to emit an
5867 ldm, we may need to load individual registers above that too. */
5868 if (reg_size > ldm_size)
5869 {
5870 int offset = reg_size - 4;
5871 int regno = 31;
5872 for ( ; regno > ereg; regno--)
5873 if (reg_mask & (1 << regno))
5874 {
5875 rtx src = gen_frame_mem (SImode,
5876 plus_constant (Pmode,
5877 stack_pointer_rtx,
5878 offset));
5879 rtx reg = gen_rtx_REG (SImode, regno);
5880 insn = emit_move_insn (reg, src);
5881 RTX_FRAME_RELATED_P (insn) = 1;
5882 add_reg_note (insn, REG_CFA_RESTORE, reg);
5883 if (offset == ldm_size)
5884 break;
5885 offset -= 4;
5886 }
5887 }
5888
5889 /* If possible, emit a ldm to do a bulk load of sequential
5890 registers from the stack. */
5891 if (ldm_p)
5892 {
5893 rtx par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (ldm_regs));
5894 int regno, slot;
5895 for (regno = sreg, slot = 0; regno <= ereg; regno++, slot++)
5896 {
5897 rtx reg = gen_rtx_REG (SImode, regno);
5898 rtx addr = plus_constant (Pmode, stack_pointer_rtx, slot * 4);
5899 rtx set = gen_rtx_SET (reg, gen_frame_mem (SImode, addr));
5900 XVECEXP (par, 0, slot) = set;
5901 }
5902 insn = emit_insn (par);
5903 RTX_FRAME_RELATED_P (insn) = 1;
5904 for (regno = sreg; regno <= ereg; regno++)
5905 {
5906 rtx reg = gen_rtx_REG (SImode, regno);
5907 add_reg_note (insn, REG_CFA_RESTORE, reg);
5908 }
5909 }
5910 }
5911
5912 /* Emit the final stack pointer adjustment to deallocate the saved
5913 registers and incoming argument area. */
5914 expand_csky_stack_adjust (adjust);
5915
5916 /* Extra stack adjustment for exception handler return. */
5917 if (crtl->calls_eh_return)
5918 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5919 EH_RETURN_STACKADJ_RTX));
5920
5921 /* Now we can return. */
5922 emit_jump_insn (gen_simple_return ());
5923}
5924
5925
5926static void
5927csky_output_function_prologue (FILE *f)
5928{
5929 unsigned long func_type = get_csky_current_func_type ();
5930
5931 switch ((int) CSKY_FUNCTION_TYPE (func_type))
5932 {
5933 default:
5934 case CSKY_FT_NORMAL:
5935 break;
5936 case CSKY_FT_INTERRUPT:
5937 {
5938 asm_fprintf (f, "\t# Interrupt Service Routine.\n");
5939 asm_fprintf (f, "\tnie\n\tipush\n");
5940 break;
5941 }
5942 case CSKY_FT_FIQ:
5943 asm_fprintf (f, "\t# Fast Interrupt Service Routine.\n");
5944 break;
5945 case CSKY_FT_EXCEPTION:
5946 asm_fprintf (f, "\t# CSKY Exception Handler.\n");
5947 break;
5948 case CSKY_FT_NAKED:
5949 asm_fprintf (f, "\t# Naked Function: prologue and epilogue \
5950 provided by programmer.\n");
5951 return;
5952 }
5953
5954 csky_layout_stack_frame ();
5955
5956 /* Generate .stack_size function-name, size for callgraph;
5957 the default stack size is 0. */
5958 if (TARGET_STACK_SIZE && cfun->machine->frame_size > 0)
5959 {
5960 gcc_assert (current_function_decl != NULL);
5961 const char *func_name =
5962 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (current_function_decl));
5963 if (func_name[0] == '*')
5964 asm_fprintf (f, "\t.stack_size %s, %d\n",
5965 &func_name[1], cfun->machine->frame_size);
5966 else
5967 asm_fprintf (f, "\t.stack_size %s, %d\n",
5968 func_name, cfun->machine->frame_size);
5969 }
5970}
5971
5972
5973static void
5974csky_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED)
5975{
5976
5977}
5978
5979
5980/* Helper for csky_eh_return splitter: store the call frame exception
5981 handler address in lr. */
5982void
5983csky_set_eh_return_address (rtx source, rtx scratch)
5984{
5985 HOST_WIDE_INT delta = 0;
5986 rtx basereg, addr;
5987 unsigned int reg_mask;
5988
5989 csky_layout_stack_frame ();
5990 reg_mask = cfun->machine->reg_mask;
5991
5992 if (reg_mask & (1 << CSKY_LR_REGNUM))
5993 {
5994 /* Find LR in the stack frame. */
5995 int i = 0;
5996
5997 if (frame_pointer_needed)
5998 {
d19a00c6 5999 basereg = hard_frame_pointer_rtx;
cc7232b9
J
6000 delta = 0;
6001 }
6002 else
6003 {
6004 basereg = stack_pointer_rtx;
6005 delta = cfun->machine->reg_offset;
6006 }
6007
6008 /* At this point, (basereg + delta) points at the low end of
6009 the reg save area. Regs are saved sequentially from low
6010 to high from this address. */
6011 for (i = 0; i < CSKY_LR_REGNUM; i++)
6012 if (reg_mask & (1 << i))
6013 delta += 4;
6014
6015 if ((CSKY_TARGET_ARCH (CK801) && delta >= CSKY_LD16_MAX_OFFSET (Pmode))
6016 || delta >= CSKY_LD32_MAX_OFFSET (Pmode))
6017 {
6018 emit_insn (gen_movsi (scratch, GEN_INT (delta)));
6019 emit_insn (gen_addsi3 (scratch, scratch, basereg));
6020 addr = scratch;
6021 }
6022 else
6023 addr = plus_constant (Pmode, basereg, delta);
6024 emit_move_insn (gen_frame_mem (Pmode, addr), source);
6025 }
6026 else
6027 emit_move_insn (gen_rtx_REG (Pmode, CSKY_LR_REGNUM), source);
6028}
6029
6030/* Return TRUE if X references a SYMBOL_REF. */
6031
6032bool
6033csky_symbol_mentioned_p (rtx x)
6034{
6035 const char *fmt;
6036 int i;
6037
6038 if (GET_CODE (x) == SYMBOL_REF)
6039 return true;
6040
6041 fmt = GET_RTX_FORMAT (GET_CODE (x));
6042 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6043 {
6044 if (fmt[i] == 'E')
6045 {
6046 int j;
6047
6048 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6049 if (csky_symbol_mentioned_p (XVECEXP (x, i, j)))
6050 return true;
6051 }
6052 else if (fmt[i] == 'e' && csky_symbol_mentioned_p (XEXP (x, i)))
6053 return true;
6054 }
6055 return false;
6056}
6057
6058
6059/* Return TRUE if X references a LABEL_REF. */
6060
6061bool
6062csky_label_mentioned_p (rtx x)
6063{
6064 const char *fmt;
6065 int i;
6066
6067 if (GET_CODE (x) == LABEL_REF)
6068 return true;
6069
6070 fmt = GET_RTX_FORMAT (GET_CODE (x));
6071 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6072 {
6073 if (fmt[i] == 'E')
6074 {
6075 int j;
6076
6077 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6078 if (csky_label_mentioned_p (XVECEXP (x, i, j)))
6079 return true;
6080 }
6081 else if (fmt[i] == 'e' && csky_label_mentioned_p (XEXP (x, i)))
6082 return true;
6083 }
6084
6085 return false;
6086}
6087
6088
6089static bool
6090tls_unspec_mentioned_p (rtx x)
6091{
6092 switch (GET_CODE (x))
6093 {
6094 case CONST:
6095 return tls_unspec_mentioned_p (XEXP (x, 0));
6096
6097 case UNSPEC:
6098 if (XINT (x, 1) == UNSPEC_TLS)
6099 return true;
6100
6101 /* Fall through. */
6102 default:
6103 return false;
6104 }
6105}
6106
6107
6108/* Implement LEGITIMATE_PIC_OPERAND_P. */
db92bd22 6109
cc7232b9
J
6110bool
6111csky_legitimate_pic_operand_p (rtx x)
6112{
6113 if (tls_unspec_mentioned_p (x))
6114 return true;
6115 if (csky_symbol_mentioned_p (x) || csky_label_mentioned_p (x))
6116 return false;
6117 return true;
6118}
6119
6120rtx
6121csky_legitimize_pic_address (rtx orig, rtx reg, bool gotrel_p)
6122{
6123 rtx pic_reg = gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM);
6124 bool optimize_p = false;
6125
6126 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
6127 {
6128 rtx pic_ref, address, rtx_tmp;
6129 rtx insn;
6130 rtx pic_reg = gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM);
6131 int subregs = 0;
6132
6133 if (reg == 0)
6134 {
6135 gcc_assert (can_create_pseudo_p ());
6136 reg = gen_reg_rtx (Pmode);
6137 subregs = 1;
6138 }
6139
6140 if (subregs)
6141 address = gen_reg_rtx (Pmode);
6142 else
6143 address = reg;
6144
6145 if (GET_CODE (orig) == SYMBOL_REF && !SYMBOL_REF_LOCAL_P (orig))
6146 {
6147 /* When gotrel_p generate sym@GOT, otherwise generate sym@PLT. */
6148 rtx_tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, orig),
6149 (gotrel_p
6150 ? UNSPEC_PIC_SYMBOL_GOT
6151 : UNSPEC_PIC_SYMBOL_PLT));
6152 optimize_p = gotrel_p;
6153 if (flag_pic != 1)
6154 {
6155 emit_move_insn (address, rtx_tmp);
6156 rtx_tmp = gen_rtx_MULT (Pmode, address, GEN_INT (1));
6157 }
6158 pic_ref = gen_const_mem (Pmode,
6159 gen_rtx_PLUS (Pmode, pic_reg, rtx_tmp));
6160 }
6161 else
6162 {
6163 /* bsr symbol */
6164 if (flag_pic == 1 && !gotrel_p)
6165 {
6166 pic_ref = gen_rtx_UNSPEC (Pmode,
6167 gen_rtvec (1, orig),
6168 UNSPEC_PIC_SYMBOL_BSR);
6169 return pic_ref;
6170 }
6171 /* grs rx, symbol */
6172 else if (flag_pic == 1 && (GET_CODE (orig) == SYMBOL_REF)
6173 && SYMBOL_REF_FUNCTION_P (orig))
6174 {
6175 pic_ref = gen_rtx_UNSPEC (Pmode,
6176 gen_rtvec (1, orig),
6177 UNSPEC_PIC_SYMBOL_GRS);
6178 return pic_ref;
6179 }
6180 /* lrw rx, symbol@GOTOFF; add rx, rx, gb */
6181 else
6182 {
6183 rtx_tmp = gen_rtx_UNSPEC (Pmode,
6184 gen_rtvec (1, orig),
6185 UNSPEC_PIC_SYMBOL_GOTOFF);
6186 emit_move_insn (address, rtx_tmp);
6187 pic_ref = gen_rtx_PLUS (Pmode, address, pic_reg);
6188 optimize_p = true;
6189 }
6190 }
6191
6192 insn = emit_move_insn (reg, pic_ref);
6193 /* Put a REG_EQUAL note on this insn,
6194 so that it can be optimized by loop. */
6195 if (optimize_p)
6196 set_unique_reg_note (insn, REG_EQUAL, orig);
6197
6198 return reg;
6199 }
6200 else if (GET_CODE (orig) == CONST)
6201 {
6202 rtx base, offset;
6203
6204 if (GET_CODE (XEXP (orig, 0)) == PLUS
6205 && XEXP (XEXP (orig, 0), 1) == pic_reg)
6206 return orig;
6207
6208 if (reg == 0)
6209 {
6210 gcc_assert (can_create_pseudo_p ());
6211 reg = gen_reg_rtx (Pmode);
6212 }
6213
6214 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
6215
6216 base = csky_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
6217 reg, gotrel_p);
6218 offset = csky_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
6219 base == reg ? 0 : reg, gotrel_p);
6220
6221 if (GET_CODE (offset) == CONST_INT)
6222 return plus_constant (Pmode, base, INTVAL (offset));
6223
6224 return gen_rtx_PLUS (Pmode, base, offset);
6225 }
6226
6227 return orig;
6228}
6229
6230
6231/* Functions to output assembly code for a function call. */
6232
6233char *
6234csky_output_call (rtx *operands, int index)
6235{
6236 static char buffer[20];
6237 rtx addr = operands[index];
6238
6239 if (REG_P (addr))
6240 sprintf (buffer, "jsr\t%%%d", index);
6241 else if (flag_pic && (GET_CODE (addr) == UNSPEC))
6242 sprintf (buffer, "bsr\t%%%d", index);
6243 else
6244 sprintf (buffer, "jbsr\t%%%d", index);
6245
6246 return buffer;
6247}
6248
6249
6250/* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE.
6251 Output assembler code for a block containing the constant parts
6252 of a trampoline, leaving space for the variable parts.
6253 Note that STATIC_CHAIN_REGNUM is t1 (aka r12) on ck801 and
6254 t1 (r13) otherwise. */
6255
6256static void
6257csky_asm_trampoline_template (FILE *f)
6258{
6259 if (CSKY_ISA_FEATURE (2E3))
6260 {
6261 fprintf (f, "\tlrw\t%s, [.Lstatic_chain]\n",
6262 reg_names[STATIC_CHAIN_REGNUM]);
6263 fprintf (f, "\tjmpi\t[.Lfunc_address]\n");
6264 /* 2 32-bit insns = 8 bytes. */
6265 }
6266 else if (CSKY_TARGET_ARCH (CK801))
6267 {
6268 /* It's hard to provide general support for trampolines on this
6269 core. We need a register other than the one holding the
6270 static chain (r13) to hold the function pointer for the
6271 indirect jump to it. But ck801 has such a limited register set
6272 there is no other call-clobbered scratch register available -- in
6273 particular, this core does not have r12, which we use for the
6274 ck802 case below. If we use a callee-saved register like r4,
6275 saving the old value on the stack screws up the stack frame
6276 if there are overflow arguments pushed on the stack
6277 by the caller. In theory we could test for that and handle
6278 limited cases with parameters that all fit in r0-r3 with no
6279 stack overflow, but punt for now. */
6280 sorry ("Nested function trampolines not supported on CK801.");
6281 }
6282 else
6283 {
6284 fprintf (f, "\tlrw\t%s, [.Lfunc_address]\n",
6285 reg_names[CSKY_T1_REGNUM]);
6286 fprintf (f, "\tlrw\t%s, [.Lstatic_chain]\n",
6287 reg_names[STATIC_CHAIN_REGNUM]);
6288 fprintf (f, "\tjmp\t%s\n",
6289 reg_names[CSKY_T1_REGNUM]);
6290 /* To align constant pool on a word boundary. */
6291 fprintf (f, "\t.align 2\n");
6292 /* 2 32-bit lrw insns + 16-bit jump + 16-bit pad = 12 bytes. */
6293 }
6294
6295 fprintf (f, ".Lstatic_chain:\n");
6296 fprintf (f, "\t.long 0\n");
6297 fprintf (f, ".Lfunc_address:\n");
6298 fprintf (f, "\t.long 0\n");
6299 /* 2 words of constant pool = 8 bytes. */
6300}
6301
6302/* Worker function for TARGET_TRAMPOLINE_INIT. */
6303
6304static void
6305csky_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
6306{
6307 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
6308 rtx mem, a_tramp;
6309 int pool = TRAMPOLINE_SIZE - 8;
6310
6311 emit_block_move (m_tramp, assemble_trampoline_template (),
6312 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
6313
6314 mem = adjust_address (m_tramp, SImode, pool);
6315 emit_move_insn (mem, chain_value);
6316 mem = adjust_address (m_tramp, SImode, pool + 4);
6317 emit_move_insn (mem, fnaddr);
6318
6319 a_tramp = XEXP (m_tramp, 0);
c05ece92
AO
6320 maybe_emit_call_builtin___clear_cache (a_tramp,
6321 plus_constant (Pmode,
6322 a_tramp,
6323 TRAMPOLINE_SIZE));
cc7232b9
J
6324}
6325
6326
6327/* Emit a comparison insn for float values.
6328 Return true if the comparison is inverted. */
6329
6330bool
6331csky_emit_compare_float (enum rtx_code code, rtx op0, rtx op1)
6332{
6333 rtx cc_reg = gen_rtx_REG (CCmode, CSKY_CC_REGNUM);
6334 bool invert;
6335 machine_mode mode = GET_MODE (op1);
6336
6337 if (op1 != CONST0_RTX (mode))
6338 op1 = force_reg (mode, op1);
6339
6340 invert = false;
db92bd22 6341
cc7232b9
J
6342 switch (code)
6343 {
6344 case EQ:
6345 code = NE;
6346 invert = true;
6347 break;
cc7232b9 6348 case GT:
cc7232b9 6349 case LT:
db92bd22 6350 case LE:
eae270be 6351 if (op1 == CONST0_RTX (mode) && TARGET_SUPPORT_FPV2)
db92bd22 6352 op1 = force_reg (mode, op1);
cc7232b9
J
6353 break;
6354 case ORDERED:
6355 code = UNORDERED;
6356 invert = true;
6357 break;
6358
6359 default:
6360 break;
6361 }
6362
6363 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_fmt_ee (code, CCmode, op0, op1)));
6364
6365 return invert;
6366}
6367
db92bd22
GQ
6368/* Support for the Q or W memory constraint. Returns true if OP is a MEM
6369 RTX with an address consisting of base + index or base + displacement. */
6370
cc7232b9 6371bool
db92bd22 6372csky_valid_mem_constraint_operand (rtx op, const char *constraint)
cc7232b9
J
6373{
6374 struct csky_address addr;
6375
6376 if (GET_CODE (op) != MEM)
6377 return false;
6378
6379 if (!decompose_csky_address (XEXP (op, 0), &addr))
6380 return false;
6381
6382 /* Verify base register. */
6383 if (!is_csky_address_register_rtx_p (addr.base, 0))
6384 return false;
6385
6386 /* Verify index operand. */
db92bd22 6387 if (addr.index && (constraint[0] == 'Q' || constraint[0] == 'W'))
cc7232b9
J
6388 {
6389 if (!is_csky_address_register_rtx_p (addr.index, 0))
6390 return false;
6391
6392 if (addr.scale == 1 || addr.scale == 2 || addr.scale == 4
6393 || addr.scale == 8)
6394 return true;
6395
6396 return false;
6397 }
6398 /* Verify disp operand. */
db92bd22 6399 else if (addr.disp && constraint[0] == 'Q')
cc7232b9
J
6400 {
6401 rtx disp = addr.disp;
6402
6403 if (!CONST_INT_P (disp))
6404 return false;
6405
6406 if (((unsigned) INTVAL (disp) % 4) == 0
6407 && (unsigned) INTVAL (disp) <= (unsigned) 1020)
6408 return true;
6409
6410 return false;
6411 }
db92bd22
GQ
6412 else if (constraint[0] == 'Q')
6413 /* Single reg is valid for 'Q'. */
6414 return true;
6415
6416 return false;
cc7232b9
J
6417}
6418
6419
6420/* Returns the (interrupt) function type of the current
6421 function, or CSKY_FT_UNKNOWN if the type cannot be determined. */
6422
6423static unsigned long
6424csky_isr_value (tree argument)
6425{
6426 const isr_attribute_entry *ptr;
6427 const char *arg;
6428
6429 /* No argument - default to IRQ. */
6430 if (argument == NULL_TREE)
6431 return CSKY_FT_ISR;
6432
6433 /* Get the value of the argument. */
6434 if (TREE_VALUE (argument) == NULL_TREE
6435 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
6436 return CSKY_FT_UNKNOWN;
6437
6438 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
6439
6440 /* Check it against the list of known arguments. */
6441 for (ptr = isr_attribute_map; ptr->arg != NULL; ptr++)
6442 if (strcmp (arg, ptr->arg) == 0)
6443 return ptr->return_value;
6444
6445 /* An unrecognized interrupt type. */
6446 return CSKY_FT_UNKNOWN;
6447}
6448
6449/* Handle an attribute requiring a FUNCTION_DECL;
6450 arguments as in struct attribute_spec.handler. */
6451
6452static tree
6453csky_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
6454 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6455{
6456 if (TREE_CODE (*node) != FUNCTION_DECL)
6457 {
6458 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6459 name);
6460 *no_add_attrs = true;
6461 }
6462
6463 return NULL_TREE;
6464}
6465
6466/* Handle an "interrupt" or "isr" attribute;
6467 arguments as in struct attribute_spec.handler. */
6468
6469static tree
6470csky_handle_isr_attribute (tree *node, tree name, tree args, int flags,
6471 bool *no_add_attrs)
6472{
6473
6474 if (!TARGET_ISTACK)
6475 {
a3f9f006 6476 warning (OPT_Wattributes, "%qE attribute ignored without %<-mistack%>",
cc7232b9
J
6477 name);
6478 *no_add_attrs = true;
6479 return NULL_TREE;
6480 }
6481
6482 if (DECL_P (*node))
6483 {
6484 if (TREE_CODE (*node) != FUNCTION_DECL)
6485 {
6486 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6487 name);
6488 *no_add_attrs = true;
6489 }
6490 }
6491 else
6492 {
9907413a 6493 if (FUNC_OR_METHOD_TYPE_P (*node))
cc7232b9
J
6494 {
6495 if (csky_isr_value (args) == CSKY_FT_UNKNOWN)
6496 {
6497 warning (OPT_Wattributes, "%qE attribute ignored", name);
6498 *no_add_attrs = true;
6499 }
6500 }
6501 else if (TREE_CODE (*node) == POINTER_TYPE
9907413a 6502 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (*node))
cc7232b9
J
6503 && csky_isr_value (args) != CSKY_FT_UNKNOWN)
6504 {
6505 *node = build_variant_type_copy (*node);
6506 TREE_TYPE (*node) = build_type_attribute_variant (TREE_TYPE (*node),
6507 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
6508 *no_add_attrs = true;
6509 }
6510 else if (flags & ((int)ATTR_FLAG_DECL_NEXT
6511 | (int)ATTR_FLAG_FUNCTION_NEXT
6512 | (int)ATTR_FLAG_ARRAY_NEXT))
6513 {
6514 *no_add_attrs = true;
6515 return tree_cons (name, args, NULL_TREE);
6516 }
6517 else
6518 warning (OPT_Wattributes, "%qE attribute ignored", name);
6519 }
6520 return NULL_TREE;
6521}
6522
6523
6524/* Implement TARGET_REGISTER_MOVE_COST: compute extra cost of moving data
6525 between one register class and another. */
6526
6527int
6528csky_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6529 reg_class_t from, reg_class_t to)
6530{
6531#define GR_REG_CLASS_P(CLASS) \
6532 ((CLASS) == GENERAL_REGS || (CLASS) == MINI_REGS || (CLASS) == SP_REGS \
6533 || (CLASS) == LOW_REGS)
6534
6535#define HILO_REG_CLASS_P(CLASS) \
b8a61853 6536 ((CLASS) == HILO_REGS)
cc7232b9
J
6537
6538#define V_REG_CLASS_P(CLASS) \
6539 ((CLASS) == V_REGS)
6540
6541 if (V_REG_CLASS_P (from) && V_REG_CLASS_P (to))
6542 return 2;
6543
6544 if ((V_REG_CLASS_P (from) && GR_REG_CLASS_P (to))
6545 || (GR_REG_CLASS_P (from) && V_REG_CLASS_P (to)))
6546 return 6;
6547
6548 if ((HILO_REG_CLASS_P (from) && GR_REG_CLASS_P (to))
6549 || (GR_REG_CLASS_P (from) && HILO_REG_CLASS_P (to)))
6550 return 16;
6551
6552 if (HILO_REG_CLASS_P (from) && HILO_REG_CLASS_P (to))
6553 return 32;
6554
6555 if ((HILO_REG_CLASS_P (from) && V_REG_CLASS_P (to))
6556 || (V_REG_CLASS_P (from) && HILO_REG_CLASS_P (to)))
6557 return 64;
6558
6559 return 2;
6560}
6561
6562
6563/* Implement TARGET_MEMORY_MOVE_COST: compute the cost of moving data
6564 between registers and memory. */
6565
6566int
6567csky_memory_move_cost (machine_mode mode, reg_class_t rclass,
6568 bool in)
6569{
6570 return (4 + memory_move_secondary_cost (mode, rclass, in));
6571}
6572
6573
6574/* TARGET_RTX_COSTS helper for ck801/ck802. */
6575
6576static bool
6577ck802_ck801_rtx_costs (rtx x, int code, int outer_code, int *total,
6578 bool speed)
6579{
6580 machine_mode mode = GET_MODE (x);
6581 switch (code)
6582 {
6583 /* Accessing memory costs quite a lot for first word; */
6584 case MEM:
6585 *total = COSTS_N_INSNS (1 + CSKY_NUM_REGS (mode));
6586 return false;
6587 case DIV:
6588 case UDIV:
6589 case MOD:
6590 case UMOD:
6591 *total = 100;
6592 return true;
6593
6594 case ROTATE:
6595 case ROTATERT:
6596 case ASHIFT:
6597 case LSHIFTRT:
6598 case ASHIFTRT:
6599 if (speed)
6600 *total = 2;
6601 else
6602 *total = COSTS_N_INSNS (1);
6603 return false;
6604
6605 case MINUS:
6606 case PLUS:
6607 *total = COSTS_N_INSNS (CSKY_NUM_REGS (mode));
6608 return false;
6609
6610 case AND:
6611 {
6612 enum rtx_code subcode = GET_CODE (XEXP (x, 1));
6613
6614 /* If subcode is "not", we'll try to combine it into e.g. "andn"
6615 instruction, so give AND itself zero cost. */
6616 if (subcode == NOT)
6617 {
6618 *total = 0;
6619 return false;
6620 }
6621 }
6622 /* Fall through. */
6623 case XOR:
6624 case IOR:
6625 *total = COSTS_N_INSNS (CSKY_NUM_REGS (mode));
6626 return false;
6627
6628 case MULT:
6629 /* FIXME: is ixw supported on ck801/ck802? */
6630 /* We can use "ix.h/w" insn to replace multiply by 2 or 4.
6631 "ix.h/w" is a 32-bit insn, so let its cost be a little less than
6632 "mult" insn. */
6633 if (REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)))
6634 {
6635 unsigned HOST_WIDE_INT m
6636 = (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)));
6637 if ((m == 2 || m == 4) && outer_code == PLUS)
6638 {
6639 *total = 2;
6640 return true;
6641 }
6642 else
6643 {
6644 /* Because mult is relatively slower than other operations,
6645 we try to use other insns when optimizing for speed.
6646 When optimizing for size, give it lower cost. */
6647 if (speed)
6648 {
6649 *total = COSTS_N_INSNS (10 * CSKY_NUM_REGS (mode));
6650 return true;
6651 }
6652 int cycle = 0;
6653 while (m)
6654 {
6655 m >>= 2;
6656 cycle++;
6657 }
6658 *total = COSTS_N_INSNS (1) + cycle;
6659 return false;
6660 }
6661 }
6662 if (!speed)
6663 *total = COSTS_N_INSNS (1);
6664 return false;
6665
6666 case NEG:
6667 /* Usually, we use subtract from 0 to substitute for neg, and
6668 it costs 1 extra insn to move 0 to a register. */
6669 *total = COSTS_N_INSNS (2 * CSKY_NUM_REGS (mode));
6670 return false;
6671
6672 case NOT:
6673 *total = COSTS_N_INSNS (CSKY_NUM_REGS (mode));
6674 return false;
6675
6676 case COMPARE:
6677 *total = COSTS_N_INSNS (1);
6678 return false;
6679
6680 case SIGN_EXTEND:
6681 case ZERO_EXTEND:
6682 *total = COSTS_N_INSNS (CSKY_NUM_REGS (mode));
6683 return false;
6684
6685 case SIGN_EXTRACT:
6686 case ZERO_EXTRACT:
6687 if (REG_P (XEXP (x, 0))
6688 && CONST_INT_P (XEXP (x, 1))
6689 && CONST_INT_P (XEXP (x, 2))
6690 && INTVAL (XEXP (x, 1)) == 8
6691 && INTVAL (XEXP (x, 2)) % 8 == 0)
6692 {
6693 *total = COSTS_N_INSNS (1);
6694 return true;
6695 }
6696 *total = COSTS_N_INSNS (CSKY_NUM_REGS (mode));
6697 return false;
6698
6699 case CONST_INT:
6700 {
6701 unsigned HOST_WIDE_INT t = (unsigned HOST_WIDE_INT) (INTVAL (x));
6702
6703 if (outer_code == COMPARE)
6704 {
6705 if (t < 0x10000)
6706 *total = 0;
6707 else
6708 *total = COSTS_N_INSNS (2);
6709 }
6710 else if (outer_code == AND || outer_code == IOR || outer_code == XOR)
6711 {
6712 /* "andi,xori,ori" are 32-bit insns, so let it cost a
6713 little more. */
6714 if (t < 0x1000)
6715 {
6716 /* Try replacing "andi" by "sextb/h", so let it cost more. */
6717 if (outer_code == AND && (t == 0xff || t == 0xffff))
6718 {
6719 *total = 8;
6720 return true;
6721 }
6722 *total = 2;
6723 }
6724 else if (t < 0x10000)
6725 *total = COSTS_N_INSNS (1);
6726 else
6727 *total = COSTS_N_INSNS (2);
6728 }
6729 else if (outer_code == PLUS || outer_code == MINUS)
6730 {
6731 /* "addi/subi rx,ry,imm", if imm<9, it is more often a
6732 16-bit insn. If imm>=9, use "movi" insn; it's probably
6733 less than "addi/subi". */
6734 if (t < 9)
6735 *total = 0;
6736 else if (t < 0x1000)
6737 *total = 2;
6738 else if (t < 0x10000)
6739 *total = COSTS_N_INSNS (1);
6740 else
6741 *total = COSTS_N_INSNS (2);
6742 }
6743 else if (outer_code == ROTATE || outer_code == ROTATERT
6744 || outer_code == LSHIFTRT || outer_code == ASHIFTRT
6745 || outer_code == ASHIFT)
6746 {
6747 if (t < 32)
6748 *total = 0;
6749 else
6750 *total = COSTS_N_INSNS (2);
6751 }
6752 else
6753 {
6754 if (t < 0x10000)
6755 if (outer_code == SET && t < 256)
6756 *total = 0;
6757 else
6758 *total = COSTS_N_INSNS (1);
6759 else
6760 *total = COSTS_N_INSNS (2);
6761 }
6762 }
6763 return true;
6764
6765 case CONST:
6766 case LABEL_REF:
6767 case SYMBOL_REF:
6768 *total = COSTS_N_INSNS (3);
6769 return true;
6770 default:
6771 return false;
6772 }
6773}
6774
6775
6776/* TARGET_RTX_COSTS helper for ck803. */
6777
6778static bool
6779ck803_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
6780 int *total, bool speed ATTRIBUTE_UNUSED)
6781{
6782 switch (code)
6783 {
6784 case SET:
6785 if (MEM_P (XEXP (x, 1)))
6786 {
6787 struct csky_address op1;
6788 bool address_valid
6789 = decompose_csky_address (XEXP (XEXP (x, 1), 0), &op1);
6790 if (op1.index)
6791 {
6792 *total = COSTS_N_INSNS (3);
6793 return true;
6794 }
6795 else if (address_valid)
6796 {
6797 *total = COSTS_N_INSNS (1);
6798 return true;
6799 }
6800 }
6801 if (REG_P (XEXP (x, 0)) && (GET_CODE (XEXP (x, 1)) == PLUS))
6802 {
6803 rtx sub_exp = XEXP (x, 1);
6804 if (REG_P (XEXP (sub_exp, 0)) && REG_P (XEXP (sub_exp, 1)))
6805 {
6806 *total = COSTS_N_INSNS (1);
6807 return true;
6808 }
6809 }
6810 return false;
6811 case MULT:
6812 if (REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)))
6813 {
6814 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
6815 if (val % 2 == 0 && val < 0xffffffff && val > 0)
6816 {
6817 *total = COSTS_N_INSNS (1);
6818 return true;
6819 }
6820 }
6821 return false;
6822
6823 case CONST:
6824 case LABEL_REF:
6825 case SYMBOL_REF:
6826 *total = COSTS_N_INSNS (3);
6827 return true;
6828 default:
6829 return false;
6830 }
6831}
6832
db92bd22 6833/* TARGET_RTX_COSTS helper for ck807/ck810 arches. */
cc7232b9
J
6834
6835static bool
6836ck807_ck810_rtx_costs (rtx x, int code,
6837 int outer_code ATTRIBUTE_UNUSED,
6838 int *total, bool speed ATTRIBUTE_UNUSED)
6839{
6840 switch (code)
6841 {
6842 case MULT:
6843 if (REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)))
6844 {
6845 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
6846 if (val % 2 == 0 && val < 0xffffffff && val > 0)
6847 {
6848 *total = COSTS_N_INSNS (1);
6849 return true;
6850 }
6851 }
6852 return false;
6853
6854 case CONST:
6855 case LABEL_REF:
6856 case SYMBOL_REF:
6857 *total = COSTS_N_INSNS (3);
6858 return true;
6859 default:
6860 return false;
6861 }
6862}
6863
db92bd22
GQ
6864/* TARGET_RTX_COSTS helper for ck860 arches. */
6865
6866static bool
6867ck860_rtx_costs (rtx x, int code, machine_mode mode,
6868 int outer_code ATTRIBUTE_UNUSED,
6869 int *total, bool speed ATTRIBUTE_UNUSED)
6870{
6871 switch (code)
6872 {
6873 case PLUS:
6874 /* The costs of mula is 1 more than mult. */
6875 if (GET_CODE (XEXP (x, 0)) == MULT && REG_P (XEXP (x, 1)) && speed)
6876 {
6877 rtx mul_op0 = XEXP (XEXP (x, 0), 0);
6878 rtx mul_op1 = XEXP (XEXP (x, 0), 1);
6879 if (REG_P (mul_op0) && REG_P (mul_op1))
6880 {
6881 *total = COSTS_N_INSNS (1);
6882 *total += rtx_cost (XEXP (x, 0), mode,
6883 (enum rtx_code) code, 0, speed);
6884 return true;
6885 }
6886 }
6887 return false;
6888 case MULT:
6889 if (REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)))
6890 {
6891 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
6892 if (val % 2 == 0 && val < 0xffffffff && val > 0)
6893 {
6894 *total = COSTS_N_INSNS (1);
6895 return true;
6896 }
6897 }
6898 return false;
6899
6900 case CONST:
6901 case LABEL_REF:
6902 case SYMBOL_REF:
6903 *total = COSTS_N_INSNS (3);
6904 return true;
6905 default:
6906 return false;
6907 }
6908}
6909
cc7232b9
J
6910
6911/* Implement TARGET_RTX_COSTS, to compute a (partial) cost for rtx X.
6912 Return true if the complete cost has been computed, and false if
6913 subexpressions should be scanned. In either case, *TOTAL contains
6914 the cost result. */
6915
6916static bool
6917csky_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED, int outer_code,
6918 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
6919{
6920 int code = GET_CODE (x);
6921
6922 if (CSKY_TARGET_ARCH (CK802) || CSKY_TARGET_ARCH (CK801))
6923 return ck802_ck801_rtx_costs (x, code, outer_code, total, speed);
6924 else if (CSKY_TARGET_ARCH (CK803))
6925 return ck803_rtx_costs (x, code, outer_code, total, speed);
6926 else if (CSKY_TARGET_ARCH (CK807) || CSKY_TARGET_ARCH (CK810))
6927 return ck807_ck810_rtx_costs (x, code, outer_code, total, speed);
db92bd22
GQ
6928 else if (CSKY_TARGET_ARCH (CK860))
6929 return ck860_rtx_costs (x, code, mode, outer_code, total, speed);
cc7232b9
J
6930 else
6931 gcc_unreachable ();
6932}
6933
6934/* Emit assembly code for CASESI. This is only used on CK801 and CK802
6935 when optimizing for size, and uses helper functions in libgcc instead
6936 of doing the control transfer inline. */
6937
6938const char *
6939csky_output_casesi (rtx *operands)
6940{
6941 rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[0])));
6942
6943 gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
6944
6945 switch (GET_MODE (diff_vec))
6946 {
6947 case E_QImode:
6948 return (ADDR_DIFF_VEC_FLAGS (diff_vec).offset_unsigned
6949 ? "jbsr\t___gnu_csky_case_uqi"
6950 : "jbsr\t___gnu_csky_case_sqi");
6951 case E_HImode:
6952 return (ADDR_DIFF_VEC_FLAGS (diff_vec).offset_unsigned
6953 ? "jbsr\t___gnu_csky_case_uhi"
6954 : "jbsr\t___gnu_csky_case_shi");
6955 case E_SImode:
6956 return "jbsr\t___gnu_csky_case_si";
6957 default:
6958 gcc_unreachable ();
6959 }
6960}
6961
6962/* Implement TARGET_SCHED_ISSUE_RATE. Lookup the issue rate in the
6963 per-core tuning structs. */
6964static int
6965csky_sched_issue_rate (void)
6966{
6967 if (CSKY_TARGET_ARCH (CK810))
6968 return 2;
6969 else
6970 return 1;
6971}
6972
6973
6974/* This function implements the target macro TARGET_SCHED_ADJUST_COST.
6975 It corrects the value of COST based on the relationship between
6976 INSN and DEP through the dependence DEP_TYPE. It returns the new
6977 value. */
6978
6979static int
6980csky_sched_adjust_cost (rtx_insn *insn,
6981 int dep_type,
6982 rtx_insn *dep,
6983 int cost,
6984 unsigned int dw ATTRIBUTE_UNUSED)
6985{
6986 if (dep_type == REG_DEP_ANTI || dep_type == REG_DEP_OUTPUT)
6987 return 0;
6988 /* The REG_DEP_TRUE situation. */
6989 else if (recog_memoized (insn) >= 0 && recog_memoized (dep) >= 0)
6990 {
6991 enum attr_type insn_type = get_attr_type (insn);
6992 if (CSKY_TARGET_ARCH (CK803))
6993 {
6994 /* The ld or st's base reg depends on the pre insn,
6995 it will delay 1 cycle. */
6996 if (insn_type == TYPE_LOAD || insn_type == TYPE_STORE)
6997 {
6998 rtx pattern = PATTERN (insn);
6999
7000 gcc_assert (GET_CODE (pattern) == SET);
7001 rtx addr = (insn_type == TYPE_LOAD
7002 ? SET_SRC (pattern) : SET_DEST (pattern));
7003
7004 enum rtx_code code = GET_CODE (addr);
7005 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
7006 addr = XEXP (addr, 0);
7007 gcc_assert (GET_CODE (addr) == MEM);
7008
7009 rtx base = XEXP (addr, 0);
7010 rtx reg = NULL_RTX;
7011 if (REG_P (base))
7012 reg = base;
7013 if (GET_CODE (base) == PLUS
7014 && GET_CODE (XEXP (base, 0)) == REG)
7015 reg = XEXP (base, 0);
7016 if ((reg != NULL_RTX) && reg_set_p (reg, PATTERN (dep)))
7017 return 2;
7018 }
7019 }
7020 else if (CSKY_TARGET_ARCH (CK802))
7021 {
7022 if ((insn_type == TYPE_CALL_JSR || insn_type == TYPE_BRANCH_JMP)
7023 && get_attr_type (dep) != TYPE_LOAD)
7024 return 1;
7025
7026 if (insn_type == TYPE_LOAD || insn_type == TYPE_STORE)
7027 {
7028 rtx pattern = PATTERN (insn);
7029
7030 gcc_assert (GET_CODE (pattern) == SET);
7031
7032 rtx addr = (insn_type == TYPE_LOAD
7033 ? SET_SRC (pattern) : SET_DEST (pattern));
7034
7035 enum rtx_code code = GET_CODE (addr);
7036 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
7037 addr = XEXP (addr, 0);
7038 gcc_assert (GET_CODE (addr) == MEM);
7039
7040 rtx base = XEXP (addr, 0);
7041 rtx reg = NULL_RTX;
7042 if (REG_P (base))
7043 reg = base;
7044 if (GET_CODE (base) == PLUS
7045 && GET_CODE (XEXP (base, 0)) == REG)
7046 reg = XEXP (base, 0);
7047 if ((reg != NULL_RTX) && reg_set_p (reg, PATTERN (dep))
7048 && get_attr_type (dep) != TYPE_LOAD)
7049 return 1;
7050
7051 if (insn_type == TYPE_STORE
7052 && reg_referenced_p (SET_SRC (pattern), PATTERN (dep)))
7053 return 1;
7054 }
7055 }
7056 }
7057 return cost;
7058}
7059
7060static bool
7061csky_warn_func_return (tree decl)
7062{
7063 /* Naked functions are implemented entirely in assembly, including the
7064 return sequence, so suppress warnings about this. */
7065 return lookup_attribute ("naked", DECL_ATTRIBUTES (decl)) == NULL_TREE;
7066}
7067
7068
7069/* Implement TARGET_RETURN_IN_MEMORY to decide whether TYPE should be
7070 returned in memory (true) or in a register (false).
7071 FNTYPE is the type of the function making the call. */
db92bd22 7072
cc7232b9
J
7073static bool
7074csky_return_in_memory (const_tree type,
7075 const_tree fntype ATTRIBUTE_UNUSED)
7076{
7077 const HOST_WIDE_INT size = int_size_in_bytes (type);
7078 return (size == -1 || size > 2 * UNITS_PER_WORD);
7079}
7080
7081
7082/* Implement TARGET_DWARF_REGISTER_SPAN.
7083 Dwarf models VFP registers as 64-bit or 128-bit registers default.
7084 GCC models tham as 32-bit registers, so we need to describe this to
7085 the DWARF generation code. Other registers can use the default. */
db92bd22 7086
cc7232b9
J
7087static rtx
7088csky_dwarf_register_span (rtx rtl)
7089{
7090 machine_mode mode;
7091 unsigned regno;
7092 rtx parts[16];
7093 int nregs;
7094 int i;
7095
7096 regno = REGNO (rtl);
7097 if (!CSKY_VREG_P (regno))
7098 return NULL_RTX;
7099
db92bd22
GQ
7100 if (CSKY_VREG_HI_P (regno))
7101 regno += 16;
7102
cc7232b9
J
7103 mode = GET_MODE (rtl);
7104 if (GET_MODE_SIZE (mode) < 8)
7105 return NULL_RTX;
7106
db92bd22
GQ
7107
7108 if (TARGET_SINGLE_FPU)
cc7232b9
J
7109 {
7110 nregs = GET_MODE_SIZE (mode) / 4;
7111 for (i = 0; i < nregs; i += 2)
7112 if (TARGET_BIG_ENDIAN)
7113 {
7114 parts[i] = gen_rtx_REG (SImode, regno + i + 1);
7115 parts[i + 1] = gen_rtx_REG (SImode, regno + i);
7116 }
7117 else
7118 {
7119 parts[i] = gen_rtx_REG (SImode, regno + i);
7120 parts[i + 1] = gen_rtx_REG (SImode, regno + i + 1);
7121 }
7122 }
7123 else
7124 {
7125 /* FIXME: dwarf2 considers all general registers to be the same
7126 as the CPU bit width. Transform the 64-bit FPU registers to
7127 32 bits here, and we will modify the unwind processing to
7128 fit CSKY architecture later. */
db92bd22
GQ
7129 nregs = GET_MODE_SIZE (mode) / 4;
7130 for (i = 0; i < nregs; i += 2)
7131 if (TARGET_BIG_ENDIAN)
7132 {
7133 parts[i] = gen_rtx_REG (SImode, regno + i - 16);
7134 parts[i + 1] = gen_rtx_REG (SImode, regno + i);
7135 }
7136 else
7137 {
7138 parts[i] = gen_rtx_REG (SImode, regno + i);
7139 parts[i + 1] = gen_rtx_REG (SImode, regno + i - 16);
7140 }
cc7232b9
J
7141 }
7142
7143 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nregs , parts));
7144}
7145
7146/* Implement TARGET_INIT_LIBFUNCS. */
7147
7148static void
7149csky_init_libfuncs (void)
7150{
7151 if (TARGET_CSKY_LINUX)
7152 init_sync_libfuncs (UNITS_PER_WORD);
7153 if (!TARGET_LIBCCRT)
7154 return;
7155
7156 #define CSKY_GCC_SYM(sym) "__csky_ccrt_" # sym
7157
7158 /* int */
7159
7160 /* Arithmetic functions */
7161 set_optab_libfunc (ashl_optab, DImode, CSKY_GCC_SYM (ashldi3));
7162 set_optab_libfunc (ashr_optab, DImode, CSKY_GCC_SYM (ashrdi3));
7163 set_optab_libfunc (sdiv_optab, SImode, CSKY_GCC_SYM (divsi3));
7164 set_optab_libfunc (sdiv_optab, DImode, CSKY_GCC_SYM (divdi3));
7165 set_optab_libfunc (lshr_optab, DImode, CSKY_GCC_SYM (lshrdi3));
7166 set_optab_libfunc (smod_optab, SImode, CSKY_GCC_SYM (modsi3));
7167 set_optab_libfunc (smod_optab, DImode, CSKY_GCC_SYM (moddi3));
7168 set_optab_libfunc (smul_optab, DImode, CSKY_GCC_SYM (muldi3));
7169 set_optab_libfunc (neg_optab, DImode, CSKY_GCC_SYM (negdi2));
7170 set_optab_libfunc (udiv_optab, SImode, CSKY_GCC_SYM (udivsi3));
7171 set_optab_libfunc (udiv_optab, DImode, CSKY_GCC_SYM (udivdi3));
7172 set_optab_libfunc (udivmod_optab, DImode, CSKY_GCC_SYM (udivmoddi4));
7173 set_optab_libfunc (umod_optab, SImode, CSKY_GCC_SYM (umodsi3));
7174 set_optab_libfunc (umod_optab, DImode, CSKY_GCC_SYM (umoddi3));
7175
7176 /* Comparison functions */
7177 set_optab_libfunc (cmp_optab, DImode, CSKY_GCC_SYM (cmpdi2));
7178 set_optab_libfunc (ucmp_optab, DImode, CSKY_GCC_SYM (ucmpdi2));
7179
7180 /* Trapping arithmetic functions */
7181 set_optab_libfunc (absv_optab, SImode, CSKY_GCC_SYM (absvsi2));
7182 set_optab_libfunc (absv_optab, DImode, CSKY_GCC_SYM (absvdi2));
7183 set_optab_libfunc (addv_optab, SImode, CSKY_GCC_SYM (addvsi3));
7184 set_optab_libfunc (addv_optab, DImode, CSKY_GCC_SYM (addvdi3));
7185 set_optab_libfunc (smulv_optab, SImode, CSKY_GCC_SYM (mulvsi3));
7186 set_optab_libfunc (smulv_optab, DImode, CSKY_GCC_SYM (mulvdi3));
7187 set_optab_libfunc (negv_optab, SImode, CSKY_GCC_SYM (negvsi2));
7188 set_optab_libfunc (negv_optab, DImode, CSKY_GCC_SYM (negvdi2));
7189 set_optab_libfunc (subv_optab, SImode, CSKY_GCC_SYM (subvsi3));
7190 set_optab_libfunc (subv_optab, DImode, CSKY_GCC_SYM (subvdi3));
7191
7192 /* Bit operations */
7193 set_optab_libfunc (clz_optab, SImode, CSKY_GCC_SYM (clzsi2));
7194 set_optab_libfunc (clz_optab, DImode, CSKY_GCC_SYM (clzdi2));
7195 set_optab_libfunc (ctz_optab, SImode, CSKY_GCC_SYM (ctzsi2));
7196 set_optab_libfunc (ctz_optab, DImode, CSKY_GCC_SYM (ctzdi2));
7197 set_optab_libfunc (ffs_optab, DImode, CSKY_GCC_SYM (ffsdi2));
7198 set_optab_libfunc (parity_optab, SImode, CSKY_GCC_SYM (paritysi2));
7199 set_optab_libfunc (parity_optab, DImode, CSKY_GCC_SYM (paritydi2));
7200 set_optab_libfunc (popcount_optab,SImode, CSKY_GCC_SYM (popcountsi2));
7201 set_optab_libfunc (popcount_optab,DImode, CSKY_GCC_SYM (popcountdi2));
7202 set_optab_libfunc (bswap_optab, SImode, CSKY_GCC_SYM (bswapsi2));
7203 set_optab_libfunc (bswap_optab, DImode, CSKY_GCC_SYM (bswapdi2));
7204
7205 /* float */
7206
7207 /* Arithmetic functions */
7208 set_optab_libfunc (add_optab, SFmode, CSKY_GCC_SYM (addsf3));
7209 set_optab_libfunc (add_optab, DFmode, CSKY_GCC_SYM (adddf3));
7210 set_optab_libfunc (sub_optab, SFmode, CSKY_GCC_SYM (subsf3));
7211 set_optab_libfunc (sub_optab, DFmode, CSKY_GCC_SYM (subdf3));
7212 set_optab_libfunc (smul_optab, SFmode, CSKY_GCC_SYM (mulsf3));
7213 set_optab_libfunc (smul_optab, DFmode, CSKY_GCC_SYM (muldf3));
7214 set_optab_libfunc (sdiv_optab, SFmode, CSKY_GCC_SYM (divsf3));
7215 set_optab_libfunc (sdiv_optab, DFmode, CSKY_GCC_SYM (divdf3));
7216 set_optab_libfunc (neg_optab, SFmode, CSKY_GCC_SYM (negsf2));
7217 set_optab_libfunc (neg_optab, DFmode, CSKY_GCC_SYM (negdf2));
7218
7219 /* Conversion functions */
7220 set_conv_libfunc (sext_optab, DFmode, SFmode, CSKY_GCC_SYM (extendsfdf2));
7221 set_conv_libfunc (trunc_optab, SFmode, DFmode, CSKY_GCC_SYM (truncdfsf2));
7222 set_conv_libfunc (sfix_optab, SImode, SFmode, CSKY_GCC_SYM (fixsfsi));
7223 set_conv_libfunc (sfix_optab, SImode, DFmode, CSKY_GCC_SYM (fixdfsi));
7224 set_conv_libfunc (sfix_optab, DImode, SFmode, CSKY_GCC_SYM (fixsfdi));
7225 set_conv_libfunc (sfix_optab, DImode, DFmode, CSKY_GCC_SYM (fixdfdi));
7226 set_conv_libfunc (ufix_optab, SImode, SFmode, CSKY_GCC_SYM (fixunssfsi));
7227 set_conv_libfunc (ufix_optab, SImode, DFmode, CSKY_GCC_SYM (fixunsdfsi));
7228 set_conv_libfunc (ufix_optab, DImode, SFmode, CSKY_GCC_SYM (fixunssfdi));
7229 set_conv_libfunc (ufix_optab, DImode, DFmode, CSKY_GCC_SYM (fixunsdfdi));
7230 set_conv_libfunc (sfloat_optab, SFmode, SImode, CSKY_GCC_SYM (floatsisf));
7231 set_conv_libfunc (sfloat_optab, DFmode, SImode, CSKY_GCC_SYM (floatsidf));
7232 set_conv_libfunc (sfloat_optab, SFmode, DImode, CSKY_GCC_SYM (floatdisf));
7233 set_conv_libfunc (sfloat_optab, DFmode, DImode, CSKY_GCC_SYM (floatdidf));
7234 set_conv_libfunc (ufloat_optab, SFmode, SImode, CSKY_GCC_SYM (floatunsisf));
7235 set_conv_libfunc (ufloat_optab, DFmode, SImode, CSKY_GCC_SYM (floatunsidf));
7236 set_conv_libfunc (ufloat_optab, SFmode, DImode, CSKY_GCC_SYM (floatundisf));
7237 set_conv_libfunc (ufloat_optab, DFmode, DImode, CSKY_GCC_SYM (floatundidf));
7238
7239 /* Comparison functions */
7240 set_optab_libfunc (cmp_optab, SFmode, CSKY_GCC_SYM (cmpsf2));
7241 set_optab_libfunc (cmp_optab, DFmode, CSKY_GCC_SYM (cmpdf2));
7242 set_optab_libfunc (unord_optab, SFmode, CSKY_GCC_SYM (unordsf2));
7243 set_optab_libfunc (unord_optab, DFmode, CSKY_GCC_SYM (unorddf2));
7244 set_optab_libfunc (eq_optab, SFmode, CSKY_GCC_SYM (eqsf2));
7245 set_optab_libfunc (eq_optab, DFmode, CSKY_GCC_SYM (eqdf2));
7246 set_optab_libfunc (ne_optab, SFmode, CSKY_GCC_SYM (nesf2));
7247 set_optab_libfunc (ne_optab, DFmode, CSKY_GCC_SYM (nedf2));
7248 set_optab_libfunc (ge_optab, SFmode, CSKY_GCC_SYM (gesf2));
7249 set_optab_libfunc (ge_optab, DFmode, CSKY_GCC_SYM (gedf2));
7250 set_optab_libfunc (lt_optab, SFmode, CSKY_GCC_SYM (ltsf2));
7251 set_optab_libfunc (lt_optab, DFmode, CSKY_GCC_SYM (ltdf2));
7252 set_optab_libfunc (le_optab, SFmode, CSKY_GCC_SYM (lesf2));
7253 set_optab_libfunc (le_optab, DFmode, CSKY_GCC_SYM (ledf2));
7254 set_optab_libfunc (gt_optab, SFmode, CSKY_GCC_SYM (gtsf2));
7255 set_optab_libfunc (gt_optab, DFmode, CSKY_GCC_SYM (gtdf2));
7256}
7257
7258
7259/* Implement TARGET_ADDRESS_COST to estimate cost of the memory address X.
7260 For C-SKY, (register) and (register + offset) have the same cost.
7261 Other situations cost more. */
7262
7263static int
7264csky_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
7265 addr_space_t as ATTRIBUTE_UNUSED,
7266 bool speed ATTRIBUTE_UNUSED)
7267{
7268 enum rtx_code code = GET_CODE (x);
7269
7270 if (code == REG)
7271 return COSTS_N_INSNS (1);
7272 if (code == PLUS
7273 && REG_P (XEXP (x, 0))
7274 && CONST_INT_P (XEXP (x, 1)))
7275 return COSTS_N_INSNS (1);
7276
7277 return COSTS_N_INSNS (3);
7278}
7279
7280
7281/* Implement TARGET_FIXED_CONDITION_CODE_REGS. */
7282
7283static bool
7284csky_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
7285{
7286 *p1 = CSKY_CC_REGNUM;
7287 *p2 = INVALID_REGNUM;
7288 return true;
7289}
7290
01d56aea
J
7291void
7292csky_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
7293 rtx libname ATTRIBUTE_UNUSED,
7294 tree fndecl ATTRIBUTE_UNUSED)
7295{
7296 memset(pcum, 0, sizeof(*pcum));
7297 if (stdarg_p (fntype))
7298 pcum->is_stdarg = true;
7299}
cc7232b9 7300
db92bd22
GQ
7301
7302/* Implement the TARGET_INIT_BUILTINS target macro. */
7303
7304void
7305csky_init_builtins (void)
7306{
61786edf 7307 /* Init fp16. */
db92bd22
GQ
7308 static tree csky_floatHF_type_node = make_node (REAL_TYPE);
7309 TYPE_PRECISION (csky_floatHF_type_node) = GET_MODE_PRECISION (HFmode);
7310 layout_type (csky_floatHF_type_node);
7311 (*lang_hooks.types.register_builtin_type) (csky_floatHF_type_node, "__fp16");
7312}
7313
7314
7315/* Implement TARGET_MANGLE_TYPE. */
7316
7317static const char *
7318csky_mangle_type (const_tree type)
7319{
9907413a 7320 if (SCALAR_FLOAT_TYPE_P (type)
61786edf
JJ
7321 && TYPE_PRECISION (type) == 16
7322 && TYPE_MAIN_VARIANT (type) != float16_type_node)
7323 return "Dh";
db92bd22
GQ
7324
7325 /* Use the default mangling. */
7326 return NULL;
7327}
7328
cc7232b9
J
7329struct gcc_target targetm = TARGET_INITIALIZER;
7330
7331#include "gt-csky.h"