1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
47 #include "dominance.h"
53 #include "cfgcleanup.h"
54 #include "basic-block.h"
55 #include "diagnostic-core.h"
58 #include "target-def.h"
60 #include "langhooks.h"
65 #include "stor-layout.h"
70 /* Forward function declarations. */
71 static bool prologue_saved_reg_p (unsigned);
72 static void nios2_load_pic_register (void);
73 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
74 static const char *nios2_unspec_reloc_name (int);
75 static void nios2_register_builtin_fndecl (unsigned, tree
);
77 /* Threshold for data being put into the small data/bss area, instead
78 of the normal data area (references to the small data/bss area take
79 1 instruction, and use the global pointer, references to the normal
80 data area takes 2 instructions). */
81 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
83 struct GTY (()) machine_function
85 /* Current frame information, to be filled in by nios2_compute_frame_layout
86 with register save masks, and offsets for the current function. */
88 /* Mask of registers to save. */
89 unsigned int save_mask
;
90 /* Number of bytes that the entire frame takes up. */
92 /* Number of bytes that variables take up. */
94 /* Number of bytes that outgoing arguments take up. */
96 /* Number of bytes needed to store registers in frame. */
98 /* Offset from new stack pointer to store registers. */
100 /* Offset from save_regs_offset to store frame pointer register. */
102 /* != 0 if frame layout already calculated. */
106 /* State to track the assignment of custom codes to FPU/custom builtins. */
107 static enum nios2_ccs_code custom_code_status
[256];
108 static int custom_code_index
[256];
109 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
110 static bool custom_code_conflict
= false;
113 /* Definition of builtin function types for nios2. */
117 N2_FTYPE(1, (VOID)) \
118 N2_FTYPE(2, (DF, DF)) \
119 N2_FTYPE(3, (DF, DF, DF)) \
120 N2_FTYPE(2, (DF, SF)) \
121 N2_FTYPE(2, (DF, SI)) \
122 N2_FTYPE(2, (DF, UI)) \
123 N2_FTYPE(2, (SF, DF)) \
124 N2_FTYPE(2, (SF, SF)) \
125 N2_FTYPE(3, (SF, SF, SF)) \
126 N2_FTYPE(2, (SF, SI)) \
127 N2_FTYPE(2, (SF, UI)) \
128 N2_FTYPE(2, (SI, CVPTR)) \
129 N2_FTYPE(2, (SI, DF)) \
130 N2_FTYPE(3, (SI, DF, DF)) \
131 N2_FTYPE(2, (SI, SF)) \
132 N2_FTYPE(3, (SI, SF, SF)) \
133 N2_FTYPE(2, (SI, SI)) \
134 N2_FTYPE(2, (UI, CVPTR)) \
135 N2_FTYPE(2, (UI, DF)) \
136 N2_FTYPE(2, (UI, SF)) \
137 N2_FTYPE(2, (VOID, DF)) \
138 N2_FTYPE(2, (VOID, SF)) \
139 N2_FTYPE(3, (VOID, SI, SI)) \
140 N2_FTYPE(3, (VOID, VPTR, SI))
142 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
143 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
144 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
146 /* Expand ftcode enumeration. */
148 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
154 /* Return the tree function type, based on the ftcode. */
156 nios2_ftype (enum nios2_ftcode ftcode
)
158 static tree types
[(int) N2_FTYPE_MAX
];
160 tree N2_TYPE_SF
= float_type_node
;
161 tree N2_TYPE_DF
= double_type_node
;
162 tree N2_TYPE_SI
= integer_type_node
;
163 tree N2_TYPE_UI
= unsigned_type_node
;
164 tree N2_TYPE_VOID
= void_type_node
;
166 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
169 /* const volatile void *. */
171 = build_pointer_type (build_qualified_type (void_type_node
,
173 | TYPE_QUAL_VOLATILE
)));
174 /* volatile void *. */
176 = build_pointer_type (build_qualified_type (void_type_node
,
177 TYPE_QUAL_VOLATILE
));
179 if (types
[(int) ftcode
] == NULL_TREE
)
182 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
183 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
184 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
185 #define N2_FTYPE(N,ARGS) \
186 case N2_FTYPE_OP ## N ARGS: \
187 types[(int) ftcode] \
188 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
192 default: gcc_unreachable ();
194 return types
[(int) ftcode
];
198 /* Definition of FPU instruction descriptions. */
200 struct nios2_fpu_insn_info
203 int num_operands
, *optvar
;
206 #define N2F_DFREQ 0x2
207 #define N2F_UNSAFE 0x4
208 #define N2F_FINITE 0x8
209 #define N2F_NO_ERRNO 0x10
211 enum insn_code icode
;
212 enum nios2_ftcode ftcode
;
215 /* Base macro for defining FPU instructions. */
216 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
217 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
218 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
219 N2_FTYPE_OP ## nop args }
221 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
222 #define N2FPU_OP2(mode) (mode, mode)
223 #define N2FPU_OP3(mode) (mode, mode, mode)
224 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
225 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
226 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
227 #define N2FPU_INSN_SF(code, nop, flags) \
228 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
229 #define N2FPU_INSN_DF(code, nop, flags) \
230 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
232 /* Compare instructions, 3 operand FP operation with a SI result. */
233 #define N2FPU_CMP_DEF(code, flags, m, M) \
234 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
235 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
236 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
237 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
239 /* The order of definition needs to be maintained consistent with
240 enum n2fpu_code in nios2-opts.h. */
241 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
243 /* Single precision instructions. */
244 N2FPU_INSN_SF (add
, 3, 0),
245 N2FPU_INSN_SF (sub
, 3, 0),
246 N2FPU_INSN_SF (mul
, 3, 0),
247 N2FPU_INSN_SF (div
, 3, 0),
248 /* Due to textual difference between min/max and smin/smax. */
249 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
250 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
251 N2FPU_INSN_SF (neg
, 2, 0),
252 N2FPU_INSN_SF (abs
, 2, 0),
253 N2FPU_INSN_SF (sqrt
, 2, 0),
254 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
255 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
256 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
257 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
258 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
259 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
260 /* Single precision compares. */
261 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
262 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
263 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
265 /* Double precision instructions. */
266 N2FPU_INSN_DF (add
, 3, 0),
267 N2FPU_INSN_DF (sub
, 3, 0),
268 N2FPU_INSN_DF (mul
, 3, 0),
269 N2FPU_INSN_DF (div
, 3, 0),
270 /* Due to textual difference between min/max and smin/smax. */
271 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
272 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
273 N2FPU_INSN_DF (neg
, 2, 0),
274 N2FPU_INSN_DF (abs
, 2, 0),
275 N2FPU_INSN_DF (sqrt
, 2, 0),
276 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
277 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
278 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
279 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
280 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
281 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
282 /* Double precision compares. */
283 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
284 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
285 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
287 /* Conversion instructions. */
288 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
289 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
290 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
291 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
292 N2FPU_INSN_DEF_BASE (round
, 2, N2F_NO_ERRNO
, lroundsfsi2
, (SI
, SF
)),
293 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
294 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
295 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
296 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
297 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
298 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
300 /* X, Y access instructions. */
301 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
302 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
303 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
304 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
305 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
308 /* Some macros for ease of access. */
309 #define N2FPU(code) nios2_fpu_insn[(int) code]
310 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
311 #define N2FPU_N(code) (*N2FPU(code).optvar)
312 #define N2FPU_NAME(code) (N2FPU(code).name)
313 #define N2FPU_ICODE(code) (N2FPU(code).icode)
314 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
315 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
316 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
317 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
318 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
319 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
321 /* Same as above, but for cases where using only the op part is shorter. */
322 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
323 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
324 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
326 /* Export the FPU insn enabled predicate to nios2.md. */
328 nios2_fpu_insn_enabled (enum n2fpu_code code
)
330 return N2FPU_ENABLED_P (code
);
333 /* Return true if COND comparison for mode MODE is enabled under current
337 nios2_fpu_compare_enabled (enum rtx_code cond
, machine_mode mode
)
342 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
343 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
344 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
345 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
346 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
347 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
350 else if (mode
== DFmode
)
353 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
354 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
355 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
356 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
357 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
358 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
364 /* Stack layout and calling conventions. */
366 #define NIOS2_STACK_ALIGN(LOC) \
367 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
368 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
370 /* Return the bytes needed to compute the frame pointer from the current
373 nios2_compute_frame_layout (void)
376 unsigned int save_mask
= 0;
382 if (cfun
->machine
->initialized
)
383 return cfun
->machine
->total_size
;
385 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
386 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
387 total_size
= var_size
+ out_args_size
;
389 /* Calculate space needed for gp registers. */
391 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
392 if (prologue_saved_reg_p (regno
))
394 save_mask
|= 1 << regno
;
398 /* If we call eh_return, we need to save the EH data registers. */
399 if (crtl
->calls_eh_return
)
404 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
405 if (!(save_mask
& (1 << r
)))
412 cfun
->machine
->fp_save_offset
= 0;
413 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
415 int fp_save_offset
= 0;
416 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
417 if (save_mask
& (1 << regno
))
420 cfun
->machine
->fp_save_offset
= fp_save_offset
;
423 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
424 total_size
+= save_reg_size
;
425 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
427 /* Save other computed information. */
428 cfun
->machine
->save_mask
= save_mask
;
429 cfun
->machine
->total_size
= total_size
;
430 cfun
->machine
->var_size
= var_size
;
431 cfun
->machine
->args_size
= out_args_size
;
432 cfun
->machine
->save_reg_size
= save_reg_size
;
433 cfun
->machine
->initialized
= reload_completed
;
434 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
439 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
440 prologue/epilogue expand routines. */
442 save_reg (int regno
, unsigned offset
)
444 rtx reg
= gen_rtx_REG (SImode
, regno
);
445 rtx addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
446 gen_int_mode (offset
, Pmode
));
447 rtx insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
448 RTX_FRAME_RELATED_P (insn
) = 1;
452 restore_reg (int regno
, unsigned offset
)
454 rtx reg
= gen_rtx_REG (SImode
, regno
);
455 rtx addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
456 gen_int_mode (offset
, Pmode
));
457 rtx insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
458 /* Tag epilogue unwind note. */
459 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
460 RTX_FRAME_RELATED_P (insn
) = 1;
463 /* Emit conditional trap for checking stack limit. */
465 nios2_emit_stack_limit_check (void)
467 if (REG_P (stack_limit_rtx
))
468 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
,
470 stack_pointer_rtx
, stack_limit_rtx
, GEN_INT (3)));
472 sorry ("only register based stack limit is supported");
475 /* Temp regno used inside prologue/epilogue. */
476 #define TEMP_REG_NUM 8
479 nios2_expand_prologue (void)
482 int total_frame_size
, save_offset
;
483 int sp_offset
; /* offset from base_reg to final stack value. */
484 int save_regs_base
; /* offset from base_reg to register save area. */
487 total_frame_size
= nios2_compute_frame_layout ();
489 if (flag_stack_usage_info
)
490 current_function_static_stack_size
= total_frame_size
;
492 /* Decrement the stack pointer. */
493 if (!SMALL_INT (total_frame_size
))
495 /* We need an intermediary point, this will point at the spill block. */
497 (gen_add2_insn (stack_pointer_rtx
,
498 gen_int_mode (cfun
->machine
->save_regs_offset
499 - total_frame_size
, Pmode
)));
500 RTX_FRAME_RELATED_P (insn
) = 1;
502 sp_offset
= -cfun
->machine
->save_regs_offset
;
504 else if (total_frame_size
)
506 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
507 gen_int_mode (-total_frame_size
,
509 RTX_FRAME_RELATED_P (insn
) = 1;
510 save_regs_base
= cfun
->machine
->save_regs_offset
;
514 save_regs_base
= sp_offset
= 0;
516 if (crtl
->limit_stack
)
517 nios2_emit_stack_limit_check ();
519 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
521 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
522 if (cfun
->machine
->save_mask
& (1 << regno
))
525 save_reg (regno
, save_offset
);
528 if (frame_pointer_needed
)
530 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
531 insn
= emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
533 gen_int_mode (fp_save_offset
, Pmode
)));
534 RTX_FRAME_RELATED_P (insn
) = 1;
540 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
541 plus_constant (Pmode
, stack_pointer_rtx
, sp_offset
));
542 if (SMALL_INT (sp_offset
))
543 insn
= emit_insn (sp_adjust
);
546 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
547 emit_move_insn (tmp
, gen_int_mode (sp_offset
, Pmode
));
548 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
549 /* Attach the sp_adjust as a note indicating what happened. */
550 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, sp_adjust
);
552 RTX_FRAME_RELATED_P (insn
) = 1;
554 if (crtl
->limit_stack
)
555 nios2_emit_stack_limit_check ();
558 /* Load the PIC register if needed. */
559 if (crtl
->uses_pic_offset_table
)
560 nios2_load_pic_register ();
562 /* If we are profiling, make sure no instructions are scheduled before
563 the call to mcount. */
565 emit_insn (gen_blockage ());
569 nios2_expand_epilogue (bool sibcall_p
)
572 int total_frame_size
;
573 int sp_adjust
, save_offset
;
576 if (!sibcall_p
&& nios2_can_use_return_insn ())
578 emit_jump_insn (gen_return ());
582 emit_insn (gen_blockage ());
584 total_frame_size
= nios2_compute_frame_layout ();
585 if (frame_pointer_needed
)
587 /* Recover the stack pointer. */
588 insn
= emit_insn (gen_add3_insn
589 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
590 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
)));
591 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
593 - cfun
->machine
->save_regs_offset
));
594 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
595 RTX_FRAME_RELATED_P (insn
) = 1;
598 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
600 else if (!SMALL_INT (total_frame_size
))
602 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
603 emit_move_insn (tmp
, gen_int_mode (cfun
->machine
->save_regs_offset
,
605 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
606 cfa_adj
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
607 plus_constant (Pmode
, stack_pointer_rtx
,
608 cfun
->machine
->save_regs_offset
));
609 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
610 RTX_FRAME_RELATED_P (insn
) = 1;
612 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
616 save_offset
= cfun
->machine
->save_regs_offset
;
617 sp_adjust
= total_frame_size
;
620 save_offset
+= cfun
->machine
->save_reg_size
;
622 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
623 if (cfun
->machine
->save_mask
& (1 << regno
))
626 restore_reg (regno
, save_offset
);
631 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
632 gen_int_mode (sp_adjust
, Pmode
)));
633 cfa_adj
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
634 plus_constant (Pmode
, stack_pointer_rtx
,
636 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
637 RTX_FRAME_RELATED_P (insn
) = 1;
640 /* Add in the __builtin_eh_return stack adjustment. */
641 if (crtl
->calls_eh_return
)
642 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
645 emit_jump_insn (gen_simple_return ());
648 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
649 back to a previous frame. */
651 nios2_get_return_address (int count
)
656 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
659 /* Emit code to change the current function's return address to
660 ADDRESS. SCRATCH is available as a scratch register, if needed.
661 ADDRESS and SCRATCH are both word-mode GPRs. */
663 nios2_set_return_address (rtx address
, rtx scratch
)
665 nios2_compute_frame_layout ();
666 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
668 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
671 if (frame_pointer_needed
)
672 base
= hard_frame_pointer_rtx
;
675 base
= stack_pointer_rtx
;
676 offset
+= cfun
->machine
->save_regs_offset
;
678 if (!SMALL_INT (offset
))
680 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
681 emit_insn (gen_add2_insn (scratch
, base
));
687 base
= plus_constant (Pmode
, base
, offset
);
688 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
691 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
694 /* Implement FUNCTION_PROFILER macro. */
696 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
698 fprintf (file
, "\tmov\tr8, ra\n");
701 fprintf (file
, "\tnextpc\tr2\n");
702 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
703 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
704 fprintf (file
, "\tadd\tr2, r2, r3\n");
705 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
706 fprintf (file
, "\tcallr\tr2\n");
708 else if (flag_pic
== 2)
710 fprintf (file
, "\tnextpc\tr2\n");
711 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
712 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
713 fprintf (file
, "\tadd\tr2, r2, r3\n");
714 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
715 fprintf (file
, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
716 fprintf (file
, "\tadd\tr3, r2, r3\n");
717 fprintf (file
, "\tldw\tr2, 0(r3)\n");
718 fprintf (file
, "\tcallr\tr2\n");
721 fprintf (file
, "\tcall\t_mcount\n");
722 fprintf (file
, "\tmov\tra, r8\n");
725 /* Dump stack layout. */
727 nios2_dump_frame_layout (FILE *file
)
729 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
730 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
731 cfun
->machine
->total_size
);
732 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
733 cfun
->machine
->var_size
);
734 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
735 cfun
->machine
->args_size
);
736 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
737 cfun
->machine
->save_reg_size
);
738 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
739 cfun
->machine
->initialized
);
740 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
741 cfun
->machine
->save_regs_offset
);
742 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
744 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
745 frame_pointer_needed
);
746 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
747 crtl
->args
.pretend_args_size
);
750 /* Return true if REGNO should be saved in the prologue. */
752 prologue_saved_reg_p (unsigned regno
)
754 gcc_assert (GP_REG_P (regno
));
756 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
759 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
762 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
765 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
771 /* Implement TARGET_CAN_ELIMINATE. */
773 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
775 if (to
== STACK_POINTER_REGNUM
)
776 return !frame_pointer_needed
;
780 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
782 nios2_initial_elimination_offset (int from
, int to
)
786 nios2_compute_frame_layout ();
788 /* Set OFFSET to the offset from the stack pointer. */
791 case FRAME_POINTER_REGNUM
:
792 offset
= cfun
->machine
->args_size
;
795 case ARG_POINTER_REGNUM
:
796 offset
= cfun
->machine
->total_size
;
797 offset
-= crtl
->args
.pretend_args_size
;
804 /* If we are asked for the frame pointer offset, then adjust OFFSET
805 by the offset from the frame pointer to the stack pointer. */
806 if (to
== HARD_FRAME_POINTER_REGNUM
)
807 offset
-= (cfun
->machine
->save_regs_offset
808 + cfun
->machine
->fp_save_offset
);
813 /* Return nonzero if this function is known to have a null epilogue.
814 This allows the optimizer to omit jumps to jumps if no stack
817 nios2_can_use_return_insn (void)
819 if (!reload_completed
|| crtl
->profile
)
822 return nios2_compute_frame_layout () == 0;
826 /* Check and signal some warnings/errors on FPU insn options. */
828 nios2_custom_check_insns (void)
833 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
834 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
836 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
837 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
839 error ("switch %<-mcustom-%s%> is required for double "
840 "precision floating point", N2FPU_NAME (j
));
846 /* Warn if the user has certain exotic operations that won't get used
847 without -funsafe-math-optimizations. See expand_builtin () in
849 if (!flag_unsafe_math_optimizations
)
850 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
851 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
852 warning (0, "switch %<-mcustom-%s%> has no effect unless "
853 "-funsafe-math-optimizations is specified", N2FPU_NAME (i
));
855 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
856 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
858 if (!flag_finite_math_only
)
859 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
860 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
861 warning (0, "switch %<-mcustom-%s%> has no effect unless "
862 "-ffinite-math-only is specified", N2FPU_NAME (i
));
864 /* Warn if the user is trying to use a custom rounding instruction
865 that won't get used without -fno-math-errno. See
866 expand_builtin_int_roundingfn_2 () in builtins.c. */
868 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
869 if (N2FPU_ENABLED_P (i
) && N2FPU_NO_ERRNO_P (i
))
870 warning (0, "switch %<-mcustom-%s%> has no effect unless "
871 "-fno-math-errno is specified", N2FPU_NAME (i
));
873 if (errors
|| custom_code_conflict
)
874 fatal_error ("conflicting use of -mcustom switches, target attributes, "
875 "and/or __builtin_custom_ functions");
879 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
881 if (override_p
|| N2FPU_N (code
) == -1)
883 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
886 /* Type to represent a standard FPU config. */
887 struct nios2_fpu_config
890 bool set_sp_constants
;
891 int code
[n2fpu_code_num
];
894 #define NIOS2_FPU_CONFIG_NUM 3
895 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
898 nios2_init_fpu_configs (void)
900 struct nios2_fpu_config
* cfg
;
902 #define NEXT_FPU_CONFIG \
904 cfg = &custom_fpu_config[i++]; \
905 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
910 cfg
->set_sp_constants
= true;
911 cfg
->code
[n2fpu_fmuls
] = 252;
912 cfg
->code
[n2fpu_fadds
] = 253;
913 cfg
->code
[n2fpu_fsubs
] = 254;
917 cfg
->set_sp_constants
= true;
918 cfg
->code
[n2fpu_fmuls
] = 252;
919 cfg
->code
[n2fpu_fadds
] = 253;
920 cfg
->code
[n2fpu_fsubs
] = 254;
921 cfg
->code
[n2fpu_fdivs
] = 255;
925 cfg
->set_sp_constants
= true;
926 cfg
->code
[n2fpu_floatus
] = 243;
927 cfg
->code
[n2fpu_fixsi
] = 244;
928 cfg
->code
[n2fpu_floatis
] = 245;
929 cfg
->code
[n2fpu_fcmpgts
] = 246;
930 cfg
->code
[n2fpu_fcmples
] = 249;
931 cfg
->code
[n2fpu_fcmpeqs
] = 250;
932 cfg
->code
[n2fpu_fcmpnes
] = 251;
933 cfg
->code
[n2fpu_fmuls
] = 252;
934 cfg
->code
[n2fpu_fadds
] = 253;
935 cfg
->code
[n2fpu_fsubs
] = 254;
936 cfg
->code
[n2fpu_fdivs
] = 255;
938 #undef NEXT_FPU_CONFIG
939 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
942 static struct nios2_fpu_config
*
943 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
946 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
948 bool match
= !(endp
!= NULL
949 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
951 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
953 return &custom_fpu_config
[i
];
958 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
959 OVERRIDE is true if loaded config codes should overwrite current state. */
961 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
964 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
968 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
969 if (cfg
->code
[i
] >= 0)
970 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
972 if (cfg
->set_sp_constants
)
973 flag_single_precision_constant
= 1;
976 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
977 "value %<%s%>", cfgname
);
979 /* Guard against errors in the standard configurations. */
980 nios2_custom_check_insns ();
983 /* Check individual FPU insn options, and register custom code. */
985 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
987 int param
= N2FPU_N (fpu_insn_index
);
989 if (0 <= param
&& param
<= 255)
990 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
992 /* Valid values are 0-255, but also allow -1 so that the
993 -mno-custom-<opt> switches work. */
994 else if (param
!= -1)
995 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
996 N2FPU_NAME (fpu_insn_index
), param
);
999 /* Allocate a chunk of memory for per-function machine-dependent data. */
1000 static struct machine_function
*
1001 nios2_init_machine_status (void)
1003 return ggc_cleared_alloc
<machine_function
> ();
1006 /* Implement TARGET_OPTION_OVERRIDE. */
1008 nios2_option_override (void)
1012 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1013 SUBTARGET_OVERRIDE_OPTIONS
;
1016 /* Check for unsupported options. */
1017 if (flag_pic
&& !TARGET_LINUX_ABI
)
1018 sorry ("position-independent code requires the Linux ABI");
1020 /* Function to allocate machine-dependent function status. */
1021 init_machine_status
= &nios2_init_machine_status
;
1023 nios2_section_threshold
1024 = (global_options_set
.x_g_switch_value
1025 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1027 /* Default to -mgpopt unless -fpic or -fPIC. */
1028 if (TARGET_GPOPT
== -1 && flag_pic
)
1031 /* If we don't have mul, we don't have mulx either! */
1032 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1033 target_flags
&= ~MASK_HAS_MULX
;
1035 /* Initialize default FPU configurations. */
1036 nios2_init_fpu_configs ();
1038 /* Set up default handling for floating point custom instructions.
1040 Putting things in this order means that the -mcustom-fpu-cfg=
1041 switch will always be overridden by individual -mcustom-fadds=
1042 switches, regardless of the order in which they were specified
1043 on the command line.
1045 This behavior of prioritization of individual -mcustom-<insn>=
1046 options before the -mcustom-fpu-cfg= switch is maintained for
1048 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1049 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1051 /* Handle options for individual FPU insns. */
1052 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1053 nios2_handle_custom_fpu_insn_option (i
);
1055 nios2_custom_check_insns ();
1057 /* Save the initial options in case the user does function specific
1059 target_option_default_node
= target_option_current_node
1060 = build_target_option_node (&global_options
);
1064 /* Return true if CST is a constant within range of movi/movui/movhi. */
1066 nios2_simple_const_p (const_rtx cst
)
1068 HOST_WIDE_INT val
= INTVAL (cst
);
1069 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1072 /* Compute a (partial) cost for rtx X. Return true if the complete
1073 cost has been computed, and false if subexpressions should be
1074 scanned. In either case, *TOTAL contains the cost result. */
1076 nios2_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
1077 int opno ATTRIBUTE_UNUSED
,
1078 int *total
, bool speed ATTRIBUTE_UNUSED
)
1083 if (INTVAL (x
) == 0)
1085 *total
= COSTS_N_INSNS (0);
1088 else if (nios2_simple_const_p (x
))
1090 *total
= COSTS_N_INSNS (2);
1095 *total
= COSTS_N_INSNS (4);
1104 *total
= COSTS_N_INSNS (4);
1110 /* Recognize 'nor' insn pattern. */
1111 if (GET_CODE (XEXP (x
, 0)) == NOT
1112 && GET_CODE (XEXP (x
, 1)) == NOT
)
1114 *total
= COSTS_N_INSNS (1);
1122 *total
= COSTS_N_INSNS (1);
1127 *total
= COSTS_N_INSNS (3);
1132 *total
= COSTS_N_INSNS (1);
1141 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1143 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1145 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1148 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1149 RET is an RTX for the return value location. The entire insn sequence
1151 static GTY(()) rtx nios2_tls_symbol
;
1154 nios2_call_tls_get_addr (rtx ti
)
1156 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1157 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1160 if (!nios2_tls_symbol
)
1161 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1163 emit_move_insn (arg
, ti
);
1164 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1165 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1166 RTL_CONST_CALL_P (insn
) = 1;
1167 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1168 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1173 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1175 nios2_large_offset_p (int unspec
)
1177 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1180 /* FIXME: TLS GOT offset relocations will eventually also get this
1181 treatment, after binutils support for those are also completed. */
1182 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1185 /* 'gotoff' offsets are always hiadj/lo. */
1186 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1192 /* Return true for conforming unspec relocations. Also used in
1193 constraints.md and predicates.md. */
1195 nios2_unspec_reloc_p (rtx op
)
1197 return (GET_CODE (op
) == CONST
1198 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1199 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1202 /* Helper to generate unspec constant. */
1204 nios2_unspec_offset (rtx loc
, int unspec
)
1206 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1210 /* Generate GOT pointer based address with large offset. */
1212 nios2_large_got_address (rtx offset
)
1214 rtx addr
= gen_reg_rtx (Pmode
);
1215 emit_insn (gen_add3_insn (addr
, pic_offset_table_rtx
,
1216 force_reg (Pmode
, offset
)));
1220 /* Generate a GOT pointer based address. */
1222 nios2_got_address (rtx loc
, int unspec
)
1224 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1225 crtl
->uses_pic_offset_table
= 1;
1227 if (nios2_large_offset_p (unspec
))
1228 return nios2_large_got_address (offset
);
1230 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1233 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1234 return value will be a valid address and move_operand (either a REG
1237 nios2_legitimize_tls_address (rtx loc
)
1240 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1244 case TLS_MODEL_GLOBAL_DYNAMIC
:
1245 tmp
= gen_reg_rtx (Pmode
);
1246 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1247 return nios2_call_tls_get_addr (tmp
);
1249 case TLS_MODEL_LOCAL_DYNAMIC
:
1250 tmp
= gen_reg_rtx (Pmode
);
1251 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1252 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1253 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1255 case TLS_MODEL_INITIAL_EXEC
:
1256 tmp
= gen_reg_rtx (Pmode
);
1257 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1258 emit_move_insn (tmp
, mem
);
1259 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1260 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1262 case TLS_MODEL_LOCAL_EXEC
:
1263 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1264 return gen_rtx_PLUS (Pmode
, tp
,
1265 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1273 If -O3 is used, we want to output a table lookup for
1274 divides between small numbers (both num and den >= 0
1275 and < 0x10). The overhead of this method in the worst
1276 case is 40 bytes in the text section (10 insns) and
1277 256 bytes in the data section. Additional divides do
1278 not incur additional penalties in the data section.
1280 Code speed is improved for small divides by about 5x
1281 when using this method in the worse case (~9 cycles
1282 vs ~45). And in the worst case divides not within the
1283 table are penalized by about 10% (~5 cycles vs ~45).
1284 However in the typical case the penalty is not as bad
1285 because doing the long divide in only 45 cycles is
1288 ??? would be nice to have some benchmarks other
1289 than Dhrystone to back this up.
1291 This bit of expansion is to create this instruction
1298 add $12, $11, divide_table
1304 # continue here with result in $2
1306 ??? Ideally I would like the libcall block to contain all
1307 of this code, but I don't know how to do that. What it
1308 means is that if the divide can be eliminated, it may not
1309 completely disappear.
1311 ??? The __divsi3_table label should ideally be moved out
1312 of this block and into a global. If it is placed into the
1313 sdata section we can save even more cycles by doing things
1316 nios2_emit_expensive_div (rtx
*operands
, machine_mode mode
)
1318 rtx or_result
, shift_left_result
;
1320 rtx_code_label
*lab1
, *lab3
;
1327 /* It may look a little generic, but only SImode is supported for now. */
1328 gcc_assert (mode
== SImode
);
1329 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1331 lab1
= gen_label_rtx ();
1332 lab3
= gen_label_rtx ();
1334 or_result
= expand_simple_binop (SImode
, IOR
,
1335 operands
[1], operands
[2],
1336 0, 0, OPTAB_LIB_WIDEN
);
1338 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1339 GET_MODE (or_result
), 0, lab3
);
1340 JUMP_LABEL (get_last_insn ()) = lab3
;
1342 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1343 operands
[1], GEN_INT (4),
1344 0, 0, OPTAB_LIB_WIDEN
);
1346 lookup_value
= expand_simple_binop (SImode
, IOR
,
1347 shift_left_result
, operands
[2],
1348 0, 0, OPTAB_LIB_WIDEN
);
1349 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1350 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1351 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1353 tmp
= emit_jump_insn (gen_jump (lab1
));
1354 JUMP_LABEL (tmp
) = lab1
;
1358 LABEL_NUSES (lab3
) = 1;
1361 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1362 LCT_CONST
, SImode
, 2,
1363 operands
[1], SImode
,
1364 operands
[2], SImode
);
1366 insns
= get_insns ();
1368 emit_libcall_block (insns
, operands
[0], final_result
,
1369 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1372 LABEL_NUSES (lab1
) = 1;
1376 /* Branches and compares. */
1378 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1379 comparison, e.g. >= 1 into > 0. */
1381 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1382 enum rtx_code
*alt_code
, rtx
*alt_op
,
1385 HOST_WIDE_INT opval
= INTVAL (op
);
1386 enum rtx_code scode
= signed_condition (code
);
1387 bool dec_p
= (scode
== LT
|| scode
== GE
);
1389 if (code
== EQ
|| code
== NE
)
1397 ? gen_int_mode (opval
- 1, mode
)
1398 : gen_int_mode (opval
+ 1, mode
));
1400 /* The required conversion between [>,>=] and [<,<=] is captured
1401 by a reverse + swap of condition codes. */
1402 *alt_code
= reverse_condition (swap_condition (code
));
1405 /* Test if the incremented/decremented value crosses the over/underflow
1406 boundary. Supposedly, such boundary cases should already be transformed
1407 into always-true/false or EQ conditions, so use an assertion here. */
1408 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1410 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1411 alt_opval
&= GET_MODE_MASK (mode
);
1412 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1416 /* Return true if the constant comparison is supported by nios2. */
1418 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1422 case EQ
: case NE
: case GE
: case LT
:
1423 return SMALL_INT (INTVAL (op
));
1425 return SMALL_INT_UNSIGNED (INTVAL (op
));
1431 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1432 the current configuration. Perform modifications if MODIFY_P is true.
1433 Returns true if FPU compare can be done. */
1436 nios2_validate_fpu_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1440 enum rtx_code code
= GET_CODE (*cmp
);
1442 if (!nios2_fpu_compare_enabled (code
, mode
))
1444 code
= swap_condition (code
);
1445 if (nios2_fpu_compare_enabled (code
, mode
))
1459 *op1
= force_reg (mode
, *op1
);
1460 *op2
= force_reg (mode
, *op2
);
1461 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1466 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1467 nios2 supported form. Returns true if success. */
1469 nios2_validate_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1471 enum rtx_code code
= GET_CODE (*cmp
);
1472 enum rtx_code alt_code
;
1475 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1476 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1478 if (!reg_or_0_operand (*op2
, mode
))
1480 /* Create alternate constant compare. */
1481 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1483 /* If alterate op2 is zero(0), we can use it directly, possibly
1484 swapping the compare code. */
1485 if (alt_op2
== const0_rtx
)
1489 goto check_rebuild_cmp
;
1492 /* Check if either constant compare can be used. */
1493 if (nios2_valid_compare_const_p (code
, *op2
))
1495 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
1502 /* We have to force op2 into a register now. Try to pick one
1503 with a lower cost. */
1504 if (! nios2_simple_const_p (*op2
)
1505 && nios2_simple_const_p (alt_op2
))
1510 *op2
= force_reg (SImode
, *op2
);
1513 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
1515 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
1516 code
= swap_condition (code
);
1519 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1524 /* Addressing Modes. */
1526 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1528 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1531 split_const (x
, &base
, &offset
);
1532 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
1535 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1537 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1539 return nios2_legitimate_constant_p (mode
, x
) == false;
1542 /* Return true if register REGNO is a valid base register.
1543 STRICT_P is true if REG_OK_STRICT is in effect. */
1546 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
1548 if (!HARD_REGISTER_NUM_P (regno
))
1556 regno
= reg_renumber
[regno
];
1559 /* The fake registers will be eliminated to either the stack or
1560 hard frame pointer, both of which are usually valid base registers.
1561 Reload deals with the cases where the eliminated form isn't valid. */
1562 return (GP_REG_P (regno
)
1563 || regno
== FRAME_POINTER_REGNUM
1564 || regno
== ARG_POINTER_REGNUM
);
1567 /* Return true if the address expression formed by BASE + OFFSET is
1570 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
1572 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
1573 base
= SUBREG_REG (base
);
1574 return (REG_P (base
)
1575 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
1576 && (offset
== NULL_RTX
1577 || const_arith_operand (offset
, Pmode
)
1578 || nios2_unspec_reloc_p (offset
)));
1581 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1583 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
1584 rtx operand
, bool strict_p
)
1586 switch (GET_CODE (operand
))
1590 if (SYMBOL_REF_TLS_MODEL (operand
))
1593 if (nios2_symbol_ref_in_small_data_p (operand
))
1596 /* Else, fall through. */
1603 /* Register indirect. */
1605 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
1607 /* Register indirect with displacement. */
1610 rtx op0
= XEXP (operand
, 0);
1611 rtx op1
= XEXP (operand
, 1);
1613 return (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
1614 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
));
1623 /* Return true if SECTION is a small section name. */
1625 nios2_small_section_name_p (const char *section
)
1627 return (strcmp (section
, ".sbss") == 0
1628 || strncmp (section
, ".sbss.", 6) == 0
1629 || strcmp (section
, ".sdata") == 0
1630 || strncmp (section
, ".sdata.", 7) == 0);
1633 /* Return true if EXP should be placed in the small data section. */
1635 nios2_in_small_data_p (const_tree exp
)
1637 /* We want to merge strings, so we never consider them small data. */
1638 if (TREE_CODE (exp
) == STRING_CST
)
1641 if (TREE_CODE (exp
) == VAR_DECL
)
1643 if (DECL_SECTION_NAME (exp
))
1645 const char *section
= DECL_SECTION_NAME (exp
);
1646 if (nios2_section_threshold
> 0
1647 && nios2_small_section_name_p (section
))
1652 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
1654 /* If this is an incomplete type with size 0, then we can't put it
1655 in sdata because it might be too big when completed. */
1657 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
1665 /* Return true if symbol is in small data section. */
1668 nios2_symbol_ref_in_small_data_p (rtx sym
)
1670 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
1673 /* GP-relative access cannot be used for externally defined symbols,
1674 because the compilation unit that defines the symbol may place it
1675 in a section that cannot be reached from GP. */
1676 && !SYMBOL_REF_EXTERNAL_P (sym
)
1677 /* True if a symbol is both small and not weak. */
1678 && SYMBOL_REF_SMALL_P (sym
)
1679 && !(SYMBOL_REF_DECL (sym
) && DECL_WEAK (SYMBOL_REF_DECL (sym
)))
1680 /* TLS variables are not accessed through the GP. */
1681 && SYMBOL_REF_TLS_MODEL (sym
) == 0);
1685 /* Implement TARGET_SECTION_TYPE_FLAGS. */
1688 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
1692 flags
= default_section_type_flags (decl
, name
, reloc
);
1694 if (nios2_small_section_name_p (name
))
1695 flags
|= SECTION_SMALL
;
1700 /* Return true if SYMBOL_REF X binds locally. */
1703 nios2_symbol_binds_local_p (const_rtx x
)
1705 return (SYMBOL_REF_DECL (x
)
1706 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
1707 : SYMBOL_REF_LOCAL_P (x
));
1710 /* Position independent code related. */
1712 /* Emit code to load the PIC register. */
1714 nios2_load_pic_register (void)
1716 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
1718 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
1719 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
1722 /* Generate a PIC address as a MEM rtx. */
1724 nios2_load_pic_address (rtx sym
, int unspec
)
1727 && GET_CODE (sym
) == SYMBOL_REF
1728 && nios2_symbol_binds_local_p (sym
))
1729 /* Under -fPIC, generate a GOTOFF address for local symbols. */
1730 return nios2_got_address (sym
, UNSPEC_PIC_GOTOFF_SYM
);
1732 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
1735 /* Nonzero if the constant value X is a legitimate general operand
1736 when generating PIC code. It is given that flag_pic is on and
1737 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1739 nios2_legitimate_pic_operand_p (rtx x
)
1741 if (GET_CODE (x
) == CONST
1742 && GET_CODE (XEXP (x
, 0)) == UNSPEC
1743 && nios2_large_offset_p (XINT (XEXP (x
, 0), 1)))
1746 return ! (GET_CODE (x
) == SYMBOL_REF
1747 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
1750 /* Return TRUE if X is a thread-local symbol. */
1752 nios2_tls_symbol_p (rtx x
)
1754 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
1755 && SYMBOL_REF_TLS_MODEL (x
) != 0);
1758 /* Legitimize addresses that are CONSTANT_P expressions. */
1760 nios2_legitimize_constant_address (rtx addr
)
1763 split_const (addr
, &base
, &offset
);
1765 if (nios2_tls_symbol_p (base
))
1766 base
= nios2_legitimize_tls_address (base
);
1768 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
);
1772 if (offset
!= const0_rtx
)
1774 gcc_assert (can_create_pseudo_p ());
1775 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
1776 (CONST_INT_P (offset
)
1777 ? (SMALL_INT (INTVAL (offset
))
1778 ? offset
: force_reg (Pmode
, offset
))
1784 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1786 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1787 machine_mode mode ATTRIBUTE_UNUSED
)
1790 return nios2_legitimize_constant_address (x
);
1792 /* For the TLS LE (Local Exec) model, the compiler may try to
1793 combine constant offsets with unspec relocs, creating address RTXs
1795 (plus:SI (reg:SI 23 r23)
1798 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
1799 (const_int 48 [0x30]))))
1801 This usually happens when 'var' is a thread-local struct variable,
1802 and access of a field in var causes the addend.
1804 We typically want this combining, so transform the above into this
1805 form, which is allowed:
1806 (plus:SI (reg:SI 23 r23)
1810 (plus:SI (symbol_ref:SI ("var"))
1811 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
1813 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
1814 if (GET_CODE (x
) == PLUS
1815 && GET_CODE (XEXP (x
, 0)) == REG
1816 && GET_CODE (XEXP (x
, 1)) == CONST
)
1818 rtx unspec
, offset
, reg
= XEXP (x
, 0);
1819 split_const (XEXP (x
, 1), &unspec
, &offset
);
1820 if (GET_CODE (unspec
) == UNSPEC
1821 && !nios2_large_offset_p (XINT (unspec
, 1))
1822 && offset
!= const0_rtx
)
1824 unspec
= copy_rtx (unspec
);
1825 XVECEXP (unspec
, 0, 0)
1826 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
1827 x
= gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
1835 nios2_delegitimize_address (rtx x
)
1837 x
= delegitimize_mem_from_attrs (x
);
1839 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1841 switch (XINT (XEXP (x
, 0), 1))
1843 case UNSPEC_PIC_SYM
:
1844 case UNSPEC_PIC_CALL_SYM
:
1845 case UNSPEC_PIC_GOTOFF_SYM
:
1846 case UNSPEC_ADD_TLS_GD
:
1847 case UNSPEC_ADD_TLS_LDM
:
1848 case UNSPEC_LOAD_TLS_IE
:
1849 case UNSPEC_ADD_TLS_LE
:
1850 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1851 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
1858 /* Main expander function for RTL moves. */
1860 nios2_emit_move_sequence (rtx
*operands
, machine_mode mode
)
1862 rtx to
= operands
[0];
1863 rtx from
= operands
[1];
1865 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
1867 gcc_assert (can_create_pseudo_p ());
1868 from
= copy_to_mode_reg (mode
, from
);
1871 if (GET_CODE (from
) == SYMBOL_REF
|| GET_CODE (from
) == LABEL_REF
1872 || (GET_CODE (from
) == CONST
1873 && GET_CODE (XEXP (from
, 0)) != UNSPEC
))
1874 from
= nios2_legitimize_constant_address (from
);
1881 /* The function with address *ADDR is being called. If the address
1882 needs to be loaded from the GOT, emit the instruction to do so and
1883 update *ADDR to point to the rtx for the loaded value. */
1885 nios2_adjust_call_address (rtx
*call_op
)
1888 gcc_assert (MEM_P (*call_op
));
1889 addr
= XEXP (*call_op
, 0);
1890 if (flag_pic
&& CONSTANT_P (addr
))
1892 rtx reg
= gen_reg_rtx (Pmode
);
1893 emit_move_insn (reg
, nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
));
1894 XEXP (*call_op
, 0) = reg
;
1899 /* Output assembly language related definitions. */
1901 /* Print the operand OP to file stream FILE modified by LETTER.
1902 LETTER can be one of:
1904 i: print "i" if OP is an immediate, except 0
1905 o: print "io" if OP is volatile
1906 z: for const0_rtx print $0 instead of 0
1909 U: for upper half of 32 bit value
1910 D: for the upper 32-bits of a 64-bit double value
1911 R: prints reverse condition.
1914 nios2_print_operand (FILE *file
, rtx op
, int letter
)
1920 if (CONSTANT_P (op
) && op
!= const0_rtx
)
1921 fprintf (file
, "i");
1925 if (GET_CODE (op
) == MEM
1926 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
1927 || TARGET_BYPASS_CACHE
))
1928 fprintf (file
, "io");
1935 if (comparison_operator (op
, VOIDmode
))
1937 enum rtx_code cond
= GET_CODE (op
);
1940 fprintf (file
, "%s", GET_RTX_NAME (cond
));
1945 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
1950 switch (GET_CODE (op
))
1953 if (letter
== 0 || letter
== 'z')
1955 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1958 else if (letter
== 'D')
1960 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
1966 if (INTVAL (op
) == 0 && letter
== 'z')
1968 fprintf (file
, "zero");
1974 HOST_WIDE_INT val
= INTVAL (op
);
1975 val
= (val
>> 16) & 0xFFFF;
1976 output_addr_const (file
, gen_int_mode (val
, SImode
));
1979 /* Else, fall through. */
1985 if (letter
== 0 || letter
== 'z')
1987 output_addr_const (file
, op
);
1990 else if (letter
== 'H' || letter
== 'L')
1992 fprintf (file
, "%%");
1993 if (GET_CODE (op
) == CONST
1994 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
1996 rtx unspec
= XEXP (op
, 0);
1997 int unspec_reloc
= XINT (unspec
, 1);
1998 gcc_assert (nios2_large_offset_p (unspec_reloc
));
1999 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
2000 op
= XVECEXP (unspec
, 0, 0);
2002 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
2003 output_addr_const (file
, op
);
2004 fprintf (file
, ")");
2013 output_address (op
);
2021 output_addr_const (file
, op
);
2030 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
2034 /* Return true if this is a GP-relative accessible reference. */
2036 gprel_constant_p (rtx op
)
2038 if (GET_CODE (op
) == SYMBOL_REF
2039 && nios2_symbol_ref_in_small_data_p (op
))
2041 else if (GET_CODE (op
) == CONST
2042 && GET_CODE (XEXP (op
, 0)) == PLUS
)
2043 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
2048 /* Return the name string for a supported unspec reloc offset. */
2050 nios2_unspec_reloc_name (int unspec
)
2054 case UNSPEC_PIC_SYM
:
2056 case UNSPEC_PIC_CALL_SYM
:
2058 case UNSPEC_PIC_GOTOFF_SYM
:
2060 case UNSPEC_LOAD_TLS_IE
:
2062 case UNSPEC_ADD_TLS_LE
:
2064 case UNSPEC_ADD_TLS_GD
:
2066 case UNSPEC_ADD_TLS_LDM
:
2068 case UNSPEC_ADD_TLS_LDO
:
2075 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2077 nios2_output_addr_const_extra (FILE *file
, rtx op
)
2080 gcc_assert (GET_CODE (op
) == UNSPEC
);
2082 /* Support for printing out const unspec relocations. */
2083 name
= nios2_unspec_reloc_name (XINT (op
, 1));
2086 fprintf (file
, "%%%s(", name
);
2087 output_addr_const (file
, XVECEXP (op
, 0, 0));
2088 fprintf (file
, ")");
2094 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2096 nios2_print_operand_address (FILE *file
, rtx op
)
2098 switch (GET_CODE (op
))
2105 if (gprel_constant_p (op
))
2107 fprintf (file
, "%%gprel(");
2108 output_addr_const (file
, op
);
2109 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
2117 rtx op0
= XEXP (op
, 0);
2118 rtx op1
= XEXP (op
, 1);
2120 if (REG_P (op0
) && CONSTANT_P (op1
))
2122 output_addr_const (file
, op1
);
2123 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
2126 else if (REG_P (op1
) && CONSTANT_P (op0
))
2128 output_addr_const (file
, op0
);
2129 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
2136 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
2141 rtx base
= XEXP (op
, 0);
2142 nios2_print_operand_address (file
, base
);
2149 fprintf (stderr
, "Missing way to print address\n");
2154 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2156 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2158 gcc_assert (size
== 4);
2159 fprintf (file
, "\t.4byte\t%%tls_ldo(");
2160 output_addr_const (file
, x
);
2161 fprintf (file
, ")");
2164 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2166 nios2_asm_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2168 if (flag_verbose_asm
|| flag_debug_asm
)
2170 nios2_compute_frame_layout ();
2171 nios2_dump_frame_layout (file
);
2175 /* Emit assembly of custom FPU instructions. */
2177 nios2_fpu_insn_asm (enum n2fpu_code code
)
2179 static char buf
[256];
2180 const char *op1
, *op2
, *op3
;
2181 int ln
= 256, n
= 0;
2183 int N
= N2FPU_N (code
);
2184 int num_operands
= N2FPU (code
).num_operands
;
2185 const char *insn_name
= N2FPU_NAME (code
);
2186 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
2187 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
2188 machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
2190 /* Prepare X register for DF input operands. */
2191 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
2192 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2193 N2FPU_N (n2fpu_fwrx
));
2195 if (src_mode
== SFmode
)
2197 if (dst_mode
== VOIDmode
)
2199 /* The fwry case. */
2206 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2208 op3
= (num_operands
== 2 ? "zero" : "%2");
2211 else if (src_mode
== DFmode
)
2213 if (dst_mode
== VOIDmode
)
2215 /* The fwrx case. */
2223 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2224 op2
= (num_operands
== 2 ? "%1" : "%2");
2225 op3
= (num_operands
== 2 ? "%D1" : "%D2");
2228 else if (src_mode
== VOIDmode
)
2230 /* frdxlo, frdxhi, frdy cases. */
2231 gcc_assert (dst_mode
== SFmode
);
2235 else if (src_mode
== SImode
)
2237 /* Conversion operators. */
2238 gcc_assert (num_operands
== 2);
2239 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2246 /* Main instruction string. */
2247 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2248 N
, op1
, op2
, op3
, insn_name
,
2249 (num_operands
>= 2 ? ", %1" : ""),
2250 (num_operands
== 3 ? ", %2" : ""));
2252 /* Extraction of Y register for DF results. */
2253 if (dst_mode
== DFmode
)
2254 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2255 N2FPU_N (n2fpu_frdy
));
2261 /* Function argument related. */
2263 /* Define where to put the arguments to a function. Value is zero to
2264 push the argument on the stack, or a hard register in which to
2267 MODE is the argument's machine mode.
2268 TYPE is the data type of the argument (as a tree).
2269 This is null for libcalls where that information may
2271 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2272 the preceding args and about the function being called.
2273 NAMED is nonzero if this argument is a named parameter
2274 (otherwise it is an extra parameter matching an ellipsis). */
2277 nios2_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
2278 const_tree type ATTRIBUTE_UNUSED
,
2279 bool named ATTRIBUTE_UNUSED
)
2281 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2282 rtx return_rtx
= NULL_RTX
;
2284 if (cum
->regs_used
< NUM_ARG_REGS
)
2285 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
2290 /* Return number of bytes, at the beginning of the argument, that must be
2291 put in registers. 0 is the argument is entirely in registers or entirely
2295 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
2296 machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
2297 bool named ATTRIBUTE_UNUSED
)
2299 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2300 HOST_WIDE_INT param_size
;
2302 if (mode
== BLKmode
)
2304 param_size
= int_size_in_bytes (type
);
2305 gcc_assert (param_size
>= 0);
2308 param_size
= GET_MODE_SIZE (mode
);
2310 /* Convert to words (round up). */
2311 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2313 if (cum
->regs_used
< NUM_ARG_REGS
2314 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2315 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
2320 /* Update the data in CUM to advance over an argument of mode MODE
2321 and data type TYPE; TYPE is null for libcalls where that information
2322 may not be available. */
2325 nios2_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2326 const_tree type ATTRIBUTE_UNUSED
,
2327 bool named ATTRIBUTE_UNUSED
)
2329 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2330 HOST_WIDE_INT param_size
;
2332 if (mode
== BLKmode
)
2334 param_size
= int_size_in_bytes (type
);
2335 gcc_assert (param_size
>= 0);
2338 param_size
= GET_MODE_SIZE (mode
);
2340 /* Convert to words (round up). */
2341 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2343 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2344 cum
->regs_used
= NUM_ARG_REGS
;
2346 cum
->regs_used
+= param_size
;
2350 nios2_function_arg_padding (machine_mode mode
, const_tree type
)
2352 /* On little-endian targets, the first byte of every stack argument
2353 is passed in the first byte of the stack slot. */
2354 if (!BYTES_BIG_ENDIAN
)
2357 /* Otherwise, integral types are padded downward: the last byte of a
2358 stack argument is passed in the last byte of the stack slot. */
2360 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
2361 : GET_MODE_CLASS (mode
) == MODE_INT
)
2364 /* Arguments smaller than a stack slot are padded downward. */
2365 if (mode
!= BLKmode
)
2366 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
) ? upward
: downward
;
2368 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2369 ? upward
: downward
);
2373 nios2_block_reg_padding (machine_mode mode
, tree type
,
2374 int first ATTRIBUTE_UNUSED
)
2376 return nios2_function_arg_padding (mode
, type
);
2379 /* Emit RTL insns to initialize the variable parts of a trampoline.
2380 FNADDR is an RTX for the address of the function's pure code.
2381 CXT is an RTX for the static chain value for the function.
2382 On Nios II, we handle this by a library call. */
2384 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
2386 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2387 rtx ctx_reg
= force_reg (Pmode
, cxt
);
2388 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
2390 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
2391 LCT_NORMAL
, VOIDmode
, 3, addr
, Pmode
, fnaddr
, Pmode
,
2395 /* Implement TARGET_FUNCTION_VALUE. */
2397 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
2398 bool outgoing ATTRIBUTE_UNUSED
)
2400 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
2403 /* Implement TARGET_LIBCALL_VALUE. */
2405 nios2_libcall_value (machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
2407 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
2410 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
2412 nios2_function_value_regno_p (const unsigned int regno
)
2414 return regno
== FIRST_RETVAL_REGNO
;
2417 /* Implement TARGET_RETURN_IN_MEMORY. */
2419 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2421 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
2422 || int_size_in_bytes (type
) == -1);
2425 /* TODO: It may be possible to eliminate the copyback and implement
2428 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
2429 machine_mode mode
, tree type
,
2430 int *pretend_size
, int second_time
)
2432 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2433 CUMULATIVE_ARGS local_cum
;
2434 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
2439 nios2_function_arg_advance (local_cum_v
, mode
, type
, 1);
2441 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
2443 if (!second_time
&& regs_to_push
> 0)
2445 rtx ptr
= virtual_incoming_args_rtx
;
2446 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
2447 emit_insn (gen_blockage ());
2448 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
2450 emit_insn (gen_blockage ());
2453 pret_size
= regs_to_push
* UNITS_PER_WORD
;
2455 *pretend_size
= pret_size
;
2460 /* Init FPU builtins. */
2462 nios2_init_fpu_builtins (int start_code
)
2465 char builtin_name
[64] = "__builtin_custom_";
2466 unsigned int i
, n
= strlen ("__builtin_custom_");
2468 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2470 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
2471 "%s", N2FPU_NAME (i
));
2473 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
2474 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2475 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
2479 /* Helper function for expanding FPU builtins. */
2481 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
2483 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2484 enum insn_code icode
= N2FPU_ICODE (code
);
2485 int nargs
, argno
, opno
= 0;
2486 int num_operands
= N2FPU (code
).num_operands
;
2487 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
2488 bool has_target_p
= (dst_mode
!= VOIDmode
);
2490 if (N2FPU_N (code
) < 0)
2491 fatal_error ("Cannot call %<__builtin_custom_%s%> without specifying switch"
2492 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
2494 create_output_operand (&ops
[opno
++], target
, dst_mode
);
2496 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
2498 nargs
= call_expr_nargs (exp
);
2499 for (argno
= 0; argno
< nargs
; argno
++)
2501 tree arg
= CALL_EXPR_ARG (exp
, argno
);
2502 create_input_operand (&ops
[opno
++], expand_normal (arg
),
2503 TYPE_MODE (TREE_TYPE (arg
)));
2505 if (!maybe_expand_insn (icode
, num_operands
, ops
))
2507 error ("invalid argument to built-in function");
2508 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2510 return has_target_p
? ops
[0].value
: const0_rtx
;
2513 /* Nios II has custom instruction built-in functions of the forms:
2516 __builtin_custom_nXX
2518 __builtin_custom_XnX
2519 __builtin_custom_XnXX
2521 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
2522 Therefore with 0-1 return values, and 0-2 arguments, we have a
2523 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
2525 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
2526 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
2529 nios2_init_custom_builtins (int start_code
)
2531 tree builtin_ftype
, ret_type
, fndecl
;
2532 char builtin_name
[32] = "__builtin_custom_";
2533 int n
= strlen ("__builtin_custom_");
2534 int builtin_code
= 0;
2535 int lhs
, rhs1
, rhs2
;
2537 struct { tree type
; const char *c
; } op
[4];
2538 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
2539 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
2540 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
2541 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
2543 /* We enumerate through the possible operand types to create all the
2544 __builtin_custom_XnXX function tree types. Note that these may slightly
2545 overlap with the function types created for other fixed builtins. */
2547 for (lhs
= 0; lhs
< 4; lhs
++)
2548 for (rhs1
= 0; rhs1
< 4; rhs1
++)
2549 for (rhs2
= 0; rhs2
< 4; rhs2
++)
2551 if (rhs1
== 0 && rhs2
!= 0)
2553 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
2555 = build_function_type_list (ret_type
, integer_type_node
,
2556 op
[rhs1
].type
, op
[rhs2
].type
,
2558 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
2559 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
2560 /* Save copy of parameter string into custom_builtin_name[]. */
2561 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
2563 add_builtin_function (builtin_name
, builtin_ftype
,
2564 start_code
+ builtin_code
,
2565 BUILT_IN_MD
, NULL
, NULL_TREE
);
2566 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
2571 /* Helper function for expanding custom builtins. */
2573 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
2575 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
2576 machine_mode tmode
= VOIDmode
;
2578 rtx value
, insn
, unspec_args
[3];
2584 tmode
= TYPE_MODE (TREE_TYPE (exp
));
2585 if (!target
|| GET_MODE (target
) != tmode
2587 target
= gen_reg_rtx (tmode
);
2590 nargs
= call_expr_nargs (exp
);
2591 for (argno
= 0; argno
< nargs
; argno
++)
2593 arg
= CALL_EXPR_ARG (exp
, argno
);
2594 value
= expand_normal (arg
);
2595 unspec_args
[argno
] = value
;
2598 if (!custom_insn_opcode (value
, VOIDmode
))
2599 error ("custom instruction opcode must be compile time "
2600 "constant in the range 0-255 for __builtin_custom_%s",
2601 custom_builtin_name
[index
]);
2604 /* For other arguments, force into a register. */
2605 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
2606 unspec_args
[argno
]);
2608 /* Fill remaining unspec operands with zero. */
2609 for (; argno
< 3; argno
++)
2610 unspec_args
[argno
] = const0_rtx
;
2612 insn
= (has_target_p
2613 ? gen_rtx_SET (VOIDmode
, target
,
2614 gen_rtx_UNSPEC_VOLATILE (tmode
,
2615 gen_rtvec_v (3, unspec_args
),
2616 UNSPECV_CUSTOM_XNXX
))
2617 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
2618 UNSPECV_CUSTOM_NXX
));
2620 return has_target_p
? target
: const0_rtx
;
2626 /* Main definition of built-in functions. Nios II has a small number of fixed
2627 builtins, plus a large number of FPU insn builtins, and builtins for
2628 generating custom instructions. */
2630 struct nios2_builtin_desc
2632 enum insn_code icode
;
2633 enum nios2_ftcode ftype
;
2637 #define N2_BUILTINS \
2638 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
2639 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
2640 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
2641 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
2642 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
2643 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
2644 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
2645 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
2646 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
2647 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
2648 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
2650 enum nios2_builtin_code
{
2651 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
2653 #undef N2_BUILTIN_DEF
2654 NUM_FIXED_NIOS2_BUILTINS
2657 static const struct nios2_builtin_desc nios2_builtins
[] = {
2658 #define N2_BUILTIN_DEF(name, ftype) \
2659 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
2661 #undef N2_BUILTIN_DEF
2664 /* Start/ends of FPU/custom insn builtin index ranges. */
2665 static unsigned int nios2_fpu_builtin_base
;
2666 static unsigned int nios2_custom_builtin_base
;
2667 static unsigned int nios2_custom_builtin_end
;
2669 /* Implement TARGET_INIT_BUILTINS. */
2671 nios2_init_builtins (void)
2675 /* Initialize fixed builtins. */
2676 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
2678 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
2680 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
2681 BUILT_IN_MD
, NULL
, NULL
);
2682 nios2_register_builtin_fndecl (i
, fndecl
);
2685 /* Initialize FPU builtins. */
2686 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
2687 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
2689 /* Initialize custom insn builtins. */
2690 nios2_custom_builtin_base
2691 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
2692 nios2_custom_builtin_end
2693 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
2694 nios2_init_custom_builtins (nios2_custom_builtin_base
);
2697 /* Array of fndecls for TARGET_BUILTIN_DECL. */
2698 #define NIOS2_NUM_BUILTINS \
2699 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
2700 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
2703 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
2705 nios2_builtin_decls
[code
] = fndecl
;
2708 /* Implement TARGET_BUILTIN_DECL. */
2710 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
2712 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
2714 if (code
>= nios2_custom_builtin_end
)
2715 return error_mark_node
;
2717 if (code
>= nios2_fpu_builtin_base
2718 && code
< nios2_custom_builtin_base
2719 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
2720 return error_mark_node
;
2722 return nios2_builtin_decls
[code
];
2726 /* Low-level built-in expand routine. */
2728 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
2729 struct expand_operand
*ops
, bool has_target_p
)
2731 if (maybe_expand_insn (d
->icode
, n
, ops
))
2732 return has_target_p
? ops
[0].value
: const0_rtx
;
2735 error ("invalid argument to built-in function %s", d
->name
);
2736 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2740 /* Expand ldio/stio form load-store instruction builtins. */
2742 nios2_expand_ldstio_builtin (tree exp
, rtx target
,
2743 const struct nios2_builtin_desc
*d
)
2747 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2748 machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
2750 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2751 mem
= gen_rtx_MEM (mode
, addr
);
2753 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
2756 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2757 if (CONST_INT_P (val
))
2758 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
2759 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
2760 create_output_operand (&ops
[0], mem
, mode
);
2761 create_input_operand (&ops
[1], val
, mode
);
2762 has_target_p
= false;
2767 create_output_operand (&ops
[0], target
, mode
);
2768 create_input_operand (&ops
[1], mem
, mode
);
2769 has_target_p
= true;
2771 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2774 /* Expand rdctl/wrctl builtins. */
2776 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
2777 const struct nios2_builtin_desc
*d
)
2779 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
2780 == register_operand
);
2781 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2782 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2783 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
2785 error ("Control register number must be in range 0-31 for %s",
2787 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
2791 create_output_operand (&ops
[0], target
, SImode
);
2792 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
2796 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2797 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
2798 create_input_operand (&ops
[1], val
, SImode
);
2800 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2803 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
2804 a built-in function, with result going to TARGET if that's convenient
2805 (and in mode MODE if that's convenient).
2806 SUBTARGET may be used as the target for computing one of EXP's operands.
2807 IGNORE is nonzero if the value is to be ignored. */
2810 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
2811 machine_mode mode ATTRIBUTE_UNUSED
,
2812 int ignore ATTRIBUTE_UNUSED
)
2814 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2815 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2817 if (fcode
< nios2_fpu_builtin_base
)
2819 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
2823 case NIOS2_BUILTIN_sync
:
2824 emit_insn (gen_sync ());
2827 case NIOS2_BUILTIN_ldbio
:
2828 case NIOS2_BUILTIN_ldbuio
:
2829 case NIOS2_BUILTIN_ldhio
:
2830 case NIOS2_BUILTIN_ldhuio
:
2831 case NIOS2_BUILTIN_ldwio
:
2832 case NIOS2_BUILTIN_stbio
:
2833 case NIOS2_BUILTIN_sthio
:
2834 case NIOS2_BUILTIN_stwio
:
2835 return nios2_expand_ldstio_builtin (exp
, target
, d
);
2837 case NIOS2_BUILTIN_rdctl
:
2838 case NIOS2_BUILTIN_wrctl
:
2839 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
2845 else if (fcode
< nios2_custom_builtin_base
)
2846 /* FPU builtin range. */
2847 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
2849 else if (fcode
< nios2_custom_builtin_end
)
2850 /* Custom insn builtin range. */
2851 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
2857 /* Implement TARGET_INIT_LIBFUNCS. */
2859 nios2_init_libfuncs (void)
2861 /* For Linux, we have access to kernel support for atomic operations. */
2862 if (TARGET_LINUX_ABI
)
2863 init_sync_libfuncs (UNITS_PER_WORD
);
2868 /* Register a custom code use, and signal error if a conflict was found. */
2870 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
2873 gcc_assert (N
<= 255);
2875 if (status
== CCS_FPU
)
2877 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
2879 custom_code_conflict
= true;
2880 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
2881 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
2883 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
2885 custom_code_conflict
= true;
2886 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2887 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
2888 N2FPU_NAME (index
));
2891 else if (status
== CCS_BUILTIN_CALL
)
2893 if (custom_code_status
[N
] == CCS_FPU
)
2895 custom_code_conflict
= true;
2896 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2897 "%<-mcustom-%s%>", custom_builtin_name
[index
],
2898 N2FPU_NAME (custom_code_index
[N
]));
2902 /* Note that code conflicts between different __builtin_custom_xnxx
2903 calls are not checked. */
2909 custom_code_status
[N
] = status
;
2910 custom_code_index
[N
] = index
;
2913 /* Mark a custom code as not in use. */
2915 nios2_deregister_custom_code (unsigned int N
)
2919 custom_code_status
[N
] = CCS_UNUSED
;
2920 custom_code_index
[N
] = 0;
2924 /* Target attributes can affect per-function option state, so we need to
2925 save/restore the custom code tracking info using the
2926 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
2929 nios2_option_save (struct cl_target_option
*ptr
,
2930 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
2933 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2934 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
2935 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
2936 sizeof (custom_code_status
));
2937 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
2938 sizeof (custom_code_index
));
2942 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
2943 struct cl_target_option
*ptr
)
2946 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2947 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
2948 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
2949 sizeof (custom_code_status
));
2950 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
2951 sizeof (custom_code_index
));
2954 /* Inner function to process the attribute((target(...))), take an argument and
2955 set the current options from the argument. If we have a list, recursively
2956 go over the list. */
2959 nios2_valid_target_attribute_rec (tree args
)
2961 if (TREE_CODE (args
) == TREE_LIST
)
2964 for (; args
; args
= TREE_CHAIN (args
))
2965 if (TREE_VALUE (args
)
2966 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
2970 else if (TREE_CODE (args
) == STRING_CST
)
2972 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
2973 while (argstr
&& *argstr
!= '\0')
2975 bool no_opt
= false, end_p
= false;
2976 char *eq
= NULL
, *p
;
2977 while (ISSPACE (*argstr
))
2980 while (*p
!= '\0' && *p
!= ',')
2982 if (!eq
&& *p
== '=')
2992 if (!strncmp (argstr
, "no-", 3))
2997 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
3002 error ("custom-fpu-cfg option does not support %<no-%>");
3007 error ("custom-fpu-cfg option requires configuration"
3011 /* Increment and skip whitespace. */
3012 while (ISSPACE (*(++eq
))) ;
3013 /* Decrement and skip to before any trailing whitespace. */
3014 while (ISSPACE (*(--end_eq
))) ;
3016 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
3018 else if (!strncmp (argstr
, "custom-", 7))
3022 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3023 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
3024 strlen (N2FPU_NAME (i
))))
3036 error ("%<no-custom-%s%> does not accept arguments",
3040 /* Disable option by setting to -1. */
3041 nios2_deregister_custom_code (N2FPU_N (code
));
3042 N2FPU_N (code
) = -1;
3048 while (ISSPACE (*(++eq
))) ;
3051 error ("%<custom-%s=%> requires argument",
3055 for (t
= eq
; t
!= p
; ++t
)
3061 error ("`custom-%s=' argument requires "
3062 "numeric digits", N2FPU_NAME (code
));
3066 /* Set option to argument. */
3067 N2FPU_N (code
) = atoi (eq
);
3068 nios2_handle_custom_fpu_insn_option (code
);
3073 error ("%<custom-%s=%> is not recognised as FPU instruction",
3080 error ("%<%s%> is unknown", argstr
);
3095 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3098 nios2_valid_target_attribute_tree (tree args
)
3100 if (!nios2_valid_target_attribute_rec (args
))
3102 nios2_custom_check_insns ();
3103 return build_target_option_node (&global_options
);
3106 /* Hook to validate attribute((target("string"))). */
3109 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
3110 tree args
, int ARG_UNUSED (flags
))
3112 struct cl_target_option cur_target
;
3114 tree old_optimize
= build_optimization_node (&global_options
);
3115 tree new_target
, new_optimize
;
3116 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
3118 /* If the function changed the optimization levels as well as setting target
3119 options, start with the optimizations specified. */
3120 if (func_optimize
&& func_optimize
!= old_optimize
)
3121 cl_optimization_restore (&global_options
,
3122 TREE_OPTIMIZATION (func_optimize
));
3124 /* The target attributes may also change some optimization flags, so update
3125 the optimization options if necessary. */
3126 cl_target_option_save (&cur_target
, &global_options
);
3127 new_target
= nios2_valid_target_attribute_tree (args
);
3128 new_optimize
= build_optimization_node (&global_options
);
3135 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
3137 if (old_optimize
!= new_optimize
)
3138 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
3141 cl_target_option_restore (&global_options
, &cur_target
);
3143 if (old_optimize
!= new_optimize
)
3144 cl_optimization_restore (&global_options
,
3145 TREE_OPTIMIZATION (old_optimize
));
3149 /* Remember the last target of nios2_set_current_function. */
3150 static GTY(()) tree nios2_previous_fndecl
;
3152 /* Establish appropriate back-end context for processing the function
3153 FNDECL. The argument might be NULL to indicate processing at top
3154 level, outside of any function scope. */
3156 nios2_set_current_function (tree fndecl
)
3158 tree old_tree
= (nios2_previous_fndecl
3159 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
3162 tree new_tree
= (fndecl
3163 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
3166 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
3168 nios2_previous_fndecl
= fndecl
;
3169 if (old_tree
== new_tree
)
3174 cl_target_option_restore (&global_options
,
3175 TREE_TARGET_OPTION (new_tree
));
3181 struct cl_target_option
*def
3182 = TREE_TARGET_OPTION (target_option_current_node
);
3184 cl_target_option_restore (&global_options
, def
);
3190 /* Hook to validate the current #pragma GCC target and set the FPU custom
3191 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3194 nios2_pragma_target_parse (tree args
, tree pop_target
)
3199 cur_tree
= ((pop_target
)
3201 : target_option_default_node
);
3202 cl_target_option_restore (&global_options
,
3203 TREE_TARGET_OPTION (cur_tree
));
3207 cur_tree
= nios2_valid_target_attribute_tree (args
);
3212 target_option_current_node
= cur_tree
;
3216 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3217 We are just using this hook to add some additional error checking to
3218 the default behavior. GCC does not provide a target hook for merging
3219 the target options, and only correctly handles merging empty vs non-empty
3220 option data; see merge_decls() in c-decl.c.
3221 So here we require either that at least one of the decls has empty
3222 target options, or that the target options/data be identical. */
3224 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
3226 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
3227 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
3228 if (newopts
&& oldopts
&& newopts
!= oldopts
)
3230 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
3231 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
3232 if (oldtree
&& newtree
&& oldtree
!= newtree
)
3234 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
3235 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
3236 if (olddata
!= newdata
3237 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
3238 error ("%qE redeclared with conflicting %qs attributes",
3239 DECL_NAME (newdecl
), "target");
3242 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
3243 DECL_ATTRIBUTES (newdecl
));
3247 /* Initialize the GCC target structure. */
3248 #undef TARGET_ASM_FUNCTION_PROLOGUE
3249 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
3251 #undef TARGET_IN_SMALL_DATA_P
3252 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
3254 #undef TARGET_SECTION_TYPE_FLAGS
3255 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
3257 #undef TARGET_INIT_BUILTINS
3258 #define TARGET_INIT_BUILTINS nios2_init_builtins
3259 #undef TARGET_EXPAND_BUILTIN
3260 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
3261 #undef TARGET_BUILTIN_DECL
3262 #define TARGET_BUILTIN_DECL nios2_builtin_decl
3264 #undef TARGET_INIT_LIBFUNCS
3265 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
3267 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3268 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
3270 #undef TARGET_CAN_ELIMINATE
3271 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
3273 #undef TARGET_FUNCTION_ARG
3274 #define TARGET_FUNCTION_ARG nios2_function_arg
3276 #undef TARGET_FUNCTION_ARG_ADVANCE
3277 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
3279 #undef TARGET_ARG_PARTIAL_BYTES
3280 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
3282 #undef TARGET_TRAMPOLINE_INIT
3283 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
3285 #undef TARGET_FUNCTION_VALUE
3286 #define TARGET_FUNCTION_VALUE nios2_function_value
3288 #undef TARGET_LIBCALL_VALUE
3289 #define TARGET_LIBCALL_VALUE nios2_libcall_value
3291 #undef TARGET_FUNCTION_VALUE_REGNO_P
3292 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
3294 #undef TARGET_RETURN_IN_MEMORY
3295 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
3297 #undef TARGET_PROMOTE_PROTOTYPES
3298 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3300 #undef TARGET_SETUP_INCOMING_VARARGS
3301 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
3303 #undef TARGET_MUST_PASS_IN_STACK
3304 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
3306 #undef TARGET_LEGITIMATE_CONSTANT_P
3307 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
3309 #undef TARGET_LEGITIMIZE_ADDRESS
3310 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
3312 #undef TARGET_DELEGITIMIZE_ADDRESS
3313 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
3315 #undef TARGET_LEGITIMATE_ADDRESS_P
3316 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
3318 #undef TARGET_PREFERRED_RELOAD_CLASS
3319 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
3321 #undef TARGET_RTX_COSTS
3322 #define TARGET_RTX_COSTS nios2_rtx_costs
3324 #undef TARGET_HAVE_TLS
3325 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
3327 #undef TARGET_CANNOT_FORCE_CONST_MEM
3328 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
3330 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
3331 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
3333 #undef TARGET_PRINT_OPERAND
3334 #define TARGET_PRINT_OPERAND nios2_print_operand
3336 #undef TARGET_PRINT_OPERAND_ADDRESS
3337 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
3339 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3340 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
3342 #undef TARGET_OPTION_OVERRIDE
3343 #define TARGET_OPTION_OVERRIDE nios2_option_override
3345 #undef TARGET_OPTION_SAVE
3346 #define TARGET_OPTION_SAVE nios2_option_save
3348 #undef TARGET_OPTION_RESTORE
3349 #define TARGET_OPTION_RESTORE nios2_option_restore
3351 #undef TARGET_SET_CURRENT_FUNCTION
3352 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
3354 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
3355 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
3357 #undef TARGET_OPTION_PRAGMA_PARSE
3358 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
3360 #undef TARGET_MERGE_DECL_ATTRIBUTES
3361 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
3363 struct gcc_target targetm
= TARGET_INITIALIZER
;
3365 #include "gt-nios2.h"